├── .circleci └── config.yml ├── .flake8 ├── .github ├── ISSUE_TEMPLATE │ ├── BUG_REPORT.yml │ └── feature_request.md ├── PULL_REQUEST_TEMPLATE ├── dependabot.yml └── workflows │ └── semantic.yml ├── .gitignore ├── .readthedocs.yml ├── CHANGELOG.md ├── LICENSE ├── MIGRATION_GUIDE.rst ├── Makefile ├── README.md ├── codecov.yml ├── conda └── meta.yaml ├── docker └── aws_lambda_layer │ ├── Dockerfile │ └── README.md ├── docs ├── Makefile ├── _static │ ├── custom.css │ └── rst2html.css ├── api.rst ├── api_async.rst ├── conf.py ├── development.rst ├── images │ ├── realtime-result.gif │ ├── stock-price-prediction-results.png │ └── stock-price-prediction.gif ├── index.rst ├── migration.rst ├── requirements.txt └── usage.rst ├── examples ├── README.md ├── asynchronous.py ├── asynchronous_batching.py ├── asynchronous_management.py ├── asynchronous_retry.py ├── authorizations.py ├── bucket_schemas.py ├── buckets_management.py ├── connection_check.py ├── example.py ├── http_error_handling.py ├── import_data_set.py ├── import_data_set_multiprocessing.py ├── import_data_set_sync_batching.py ├── import_parquet.py ├── influx_cloud.py ├── influxdb_18_example.py ├── ingest_dataframe_default_tags.py ├── ingest_large_dataframe.py ├── invokable_scripts.py ├── iot_sensor.py ├── logging_handler.py ├── monitoring_and_alerting.py ├── nanosecond_precision.py ├── query.flux ├── query.py ├── query_from_file.py ├── query_response_to_json.py ├── query_with_profilers.py ├── rx_playground.py ├── task_example.py ├── templates_management.py ├── vix-daily.csv ├── write_api_callbacks.py ├── write_batching_by_bytes_count.py └── write_structured_data.py ├── influxdb_client ├── __init__.py ├── _async │ ├── __init__.py │ ├── api_client.py │ └── rest.py ├── _sync │ ├── __init__.py │ ├── api_client.py │ └── rest.py ├── client │ ├── __init__.py │ ├── _base.py │ ├── _pages.py │ ├── authorizations_api.py │ ├── bucket_api.py │ ├── delete_api.py │ ├── delete_api_async.py │ ├── exceptions.py │ ├── flux_csv_parser.py │ ├── flux_table.py │ ├── influxdb_client.py │ ├── influxdb_client_async.py │ ├── invokable_scripts_api.py │ ├── labels_api.py │ ├── logging_handler.py │ ├── organizations_api.py │ ├── query_api.py │ ├── query_api_async.py │ ├── tasks_api.py │ ├── users_api.py │ ├── util │ │ ├── __init__.py │ │ ├── date_utils.py │ │ ├── date_utils_pandas.py │ │ ├── helpers.py │ │ └── multiprocessing_helper.py │ ├── warnings.py │ ├── write │ │ ├── __init__.py │ │ ├── dataframe_serializer.py │ │ ├── point.py │ │ └── retry.py │ ├── write_api.py │ └── write_api_async.py ├── configuration.py ├── domain │ ├── __init__.py │ ├── add_resource_member_request_body.py │ ├── analyze_query_response.py │ ├── analyze_query_response_errors.py │ ├── array_expression.py │ ├── ast_response.py │ ├── authorization.py │ ├── authorization_post_request.py │ ├── authorization_update_request.py │ ├── authorizations.py │ ├── axes.py │ ├── axis.py │ ├── axis_scale.py │ ├── bad_statement.py │ ├── band_view_properties.py │ ├── binary_expression.py │ ├── block.py │ ├── boolean_literal.py │ ├── bucket.py │ ├── bucket_links.py │ ├── bucket_metadata_manifest.py │ ├── bucket_retention_rules.py │ ├── bucket_shard_mapping.py │ ├── buckets.py │ ├── builder_aggregate_function_type.py │ ├── builder_config.py │ ├── builder_config_aggregate_window.py │ ├── builder_functions_type.py │ ├── builder_tags_type.py │ ├── builtin_statement.py │ ├── call_expression.py │ ├── cell.py │ ├── cell_links.py │ ├── cell_update.py │ ├── cell_with_view_properties.py │ ├── check.py │ ├── check_base.py │ ├── check_base_links.py │ ├── check_discriminator.py │ ├── check_patch.py │ ├── check_status_level.py │ ├── check_view_properties.py │ ├── checks.py │ ├── column_data_type.py │ ├── column_semantic_type.py │ ├── conditional_expression.py │ ├── config.py │ ├── constant_variable_properties.py │ ├── create_cell.py │ ├── create_dashboard_request.py │ ├── custom_check.py │ ├── dashboard.py │ ├── dashboard_color.py │ ├── dashboard_query.py │ ├── dashboard_with_view_properties.py │ ├── dashboards.py │ ├── date_time_literal.py │ ├── dbr_ps.py │ ├── dbrp.py │ ├── dbrp_create.py │ ├── dbrp_get.py │ ├── dbrp_update.py │ ├── deadman_check.py │ ├── decimal_places.py │ ├── delete_predicate_request.py │ ├── dialect.py │ ├── dict_expression.py │ ├── dict_item.py │ ├── duration.py │ ├── duration_literal.py │ ├── error.py │ ├── expression.py │ ├── expression_statement.py │ ├── field.py │ ├── file.py │ ├── float_literal.py │ ├── flux_response.py │ ├── flux_suggestion.py │ ├── flux_suggestions.py │ ├── function_expression.py │ ├── gauge_view_properties.py │ ├── greater_threshold.py │ ├── health_check.py │ ├── heatmap_view_properties.py │ ├── histogram_view_properties.py │ ├── http_notification_endpoint.py │ ├── http_notification_rule.py │ ├── http_notification_rule_base.py │ ├── identifier.py │ ├── import_declaration.py │ ├── index_expression.py │ ├── integer_literal.py │ ├── is_onboarding.py │ ├── label.py │ ├── label_create_request.py │ ├── label_mapping.py │ ├── label_response.py │ ├── label_update.py │ ├── labels_response.py │ ├── language_request.py │ ├── legacy_authorization_post_request.py │ ├── lesser_threshold.py │ ├── line_plus_single_stat_properties.py │ ├── line_protocol_error.py │ ├── line_protocol_length_error.py │ ├── links.py │ ├── list_stacks_response.py │ ├── log_event.py │ ├── logical_expression.py │ ├── logs.py │ ├── map_variable_properties.py │ ├── markdown_view_properties.py │ ├── measurement_schema.py │ ├── measurement_schema_column.py │ ├── measurement_schema_create_request.py │ ├── measurement_schema_list.py │ ├── measurement_schema_update_request.py │ ├── member_assignment.py │ ├── member_expression.py │ ├── metadata_backup.py │ ├── model_property.py │ ├── mosaic_view_properties.py │ ├── node.py │ ├── notification_endpoint.py │ ├── notification_endpoint_base.py │ ├── notification_endpoint_base_links.py │ ├── notification_endpoint_discriminator.py │ ├── notification_endpoint_type.py │ ├── notification_endpoint_update.py │ ├── notification_endpoints.py │ ├── notification_rule.py │ ├── notification_rule_base.py │ ├── notification_rule_base_links.py │ ├── notification_rule_discriminator.py │ ├── notification_rule_update.py │ ├── notification_rules.py │ ├── object_expression.py │ ├── onboarding_request.py │ ├── onboarding_response.py │ ├── option_statement.py │ ├── organization.py │ ├── organization_links.py │ ├── organizations.py │ ├── package.py │ ├── package_clause.py │ ├── pager_duty_notification_endpoint.py │ ├── pager_duty_notification_rule.py │ ├── pager_duty_notification_rule_base.py │ ├── paren_expression.py │ ├── password_reset_body.py │ ├── patch_bucket_request.py │ ├── patch_dashboard_request.py │ ├── patch_organization_request.py │ ├── patch_retention_rule.py │ ├── patch_stack_request.py │ ├── patch_stack_request_additional_resources.py │ ├── permission.py │ ├── permission_resource.py │ ├── pipe_expression.py │ ├── pipe_literal.py │ ├── post_bucket_request.py │ ├── post_check.py │ ├── post_notification_endpoint.py │ ├── post_notification_rule.py │ ├── post_organization_request.py │ ├── post_restore_kv_response.py │ ├── post_stack_request.py │ ├── property_key.py │ ├── query.py │ ├── query_edit_mode.py │ ├── query_variable_properties.py │ ├── query_variable_properties_values.py │ ├── range_threshold.py │ ├── ready.py │ ├── regexp_literal.py │ ├── remote_connection.py │ ├── remote_connection_creation_request.py │ ├── remote_connection_update_request.py │ ├── remote_connections.py │ ├── renamable_field.py │ ├── replication.py │ ├── replication_creation_request.py │ ├── replication_update_request.py │ ├── replications.py │ ├── resource_member.py │ ├── resource_members.py │ ├── resource_members_links.py │ ├── resource_owner.py │ ├── resource_owners.py │ ├── restored_bucket_mappings.py │ ├── retention_policy_manifest.py │ ├── return_statement.py │ ├── routes.py │ ├── routes_external.py │ ├── routes_query.py │ ├── routes_system.py │ ├── rule_status_level.py │ ├── run.py │ ├── run_links.py │ ├── run_manually.py │ ├── runs.py │ ├── scatter_view_properties.py │ ├── schema_type.py │ ├── scraper_target_request.py │ ├── scraper_target_response.py │ ├── scraper_target_responses.py │ ├── script.py │ ├── script_create_request.py │ ├── script_invocation_params.py │ ├── script_language.py │ ├── script_update_request.py │ ├── scripts.py │ ├── secret_keys.py │ ├── secret_keys_response.py │ ├── shard_group_manifest.py │ ├── shard_manifest.py │ ├── shard_owner.py │ ├── simple_table_view_properties.py │ ├── single_stat_view_properties.py │ ├── slack_notification_endpoint.py │ ├── slack_notification_rule.py │ ├── slack_notification_rule_base.py │ ├── smtp_notification_rule.py │ ├── smtp_notification_rule_base.py │ ├── source.py │ ├── source_links.py │ ├── sources.py │ ├── stack.py │ ├── stack_associations.py │ ├── stack_events.py │ ├── stack_links.py │ ├── stack_resources.py │ ├── statement.py │ ├── static_legend.py │ ├── status_rule.py │ ├── string_literal.py │ ├── subscription_manifest.py │ ├── table_view_properties.py │ ├── table_view_properties_table_options.py │ ├── tag_rule.py │ ├── task.py │ ├── task_create_request.py │ ├── task_links.py │ ├── task_status_type.py │ ├── task_update_request.py │ ├── tasks.py │ ├── telegraf.py │ ├── telegraf_plugin.py │ ├── telegraf_plugin_request.py │ ├── telegraf_plugin_request_plugins.py │ ├── telegraf_plugins.py │ ├── telegraf_request.py │ ├── telegraf_request_metadata.py │ ├── telegrafs.py │ ├── telegram_notification_endpoint.py │ ├── telegram_notification_rule.py │ ├── telegram_notification_rule_base.py │ ├── template_apply.py │ ├── template_apply_remotes.py │ ├── template_apply_template.py │ ├── template_chart.py │ ├── template_export_by_id.py │ ├── template_export_by_id_org_ids.py │ ├── template_export_by_id_resource_filters.py │ ├── template_export_by_id_resources.py │ ├── template_export_by_name.py │ ├── template_export_by_name_resources.py │ ├── template_kind.py │ ├── template_summary.py │ ├── template_summary_diff.py │ ├── template_summary_diff_buckets.py │ ├── template_summary_diff_buckets_new_old.py │ ├── template_summary_diff_checks.py │ ├── template_summary_diff_dashboards.py │ ├── template_summary_diff_dashboards_new_old.py │ ├── template_summary_diff_label_mappings.py │ ├── template_summary_diff_labels.py │ ├── template_summary_diff_labels_new_old.py │ ├── template_summary_diff_notification_endpoints.py │ ├── template_summary_diff_notification_rules.py │ ├── template_summary_diff_notification_rules_new_old.py │ ├── template_summary_diff_tasks.py │ ├── template_summary_diff_tasks_new_old.py │ ├── template_summary_diff_telegraf_configs.py │ ├── template_summary_diff_variables.py │ ├── template_summary_diff_variables_new_old.py │ ├── template_summary_errors.py │ ├── template_summary_label.py │ ├── template_summary_label_properties.py │ ├── template_summary_summary.py │ ├── template_summary_summary_buckets.py │ ├── template_summary_summary_dashboards.py │ ├── template_summary_summary_label_mappings.py │ ├── template_summary_summary_notification_rules.py │ ├── template_summary_summary_status_rules.py │ ├── template_summary_summary_tag_rules.py │ ├── template_summary_summary_tasks.py │ ├── template_summary_summary_variables.py │ ├── test_statement.py │ ├── threshold.py │ ├── threshold_base.py │ ├── threshold_check.py │ ├── unary_expression.py │ ├── unsigned_integer_literal.py │ ├── user.py │ ├── user_response.py │ ├── user_response_links.py │ ├── users.py │ ├── variable.py │ ├── variable_assignment.py │ ├── variable_links.py │ ├── variable_properties.py │ ├── variables.py │ ├── view.py │ ├── view_links.py │ ├── view_properties.py │ ├── views.py │ ├── write_precision.py │ ├── xy_geom.py │ └── xy_view_properties.py ├── extras.py ├── py.typed ├── rest.py ├── service │ ├── __init__.py │ ├── _base_service.py │ ├── authorizations_service.py │ ├── backup_service.py │ ├── bucket_schemas_service.py │ ├── buckets_service.py │ ├── cells_service.py │ ├── checks_service.py │ ├── config_service.py │ ├── dashboards_service.py │ ├── dbr_ps_service.py │ ├── delete_service.py │ ├── health_service.py │ ├── invokable_scripts_service.py │ ├── labels_service.py │ ├── legacy_authorizations_service.py │ ├── metrics_service.py │ ├── notification_endpoints_service.py │ ├── notification_rules_service.py │ ├── organizations_service.py │ ├── ping_service.py │ ├── query_service.py │ ├── ready_service.py │ ├── remote_connections_service.py │ ├── replications_service.py │ ├── resources_service.py │ ├── restore_service.py │ ├── routes_service.py │ ├── rules_service.py │ ├── scraper_targets_service.py │ ├── secrets_service.py │ ├── setup_service.py │ ├── signin_service.py │ ├── signout_service.py │ ├── sources_service.py │ ├── tasks_service.py │ ├── telegraf_plugins_service.py │ ├── telegrafs_service.py │ ├── templates_service.py │ ├── users_service.py │ ├── variables_service.py │ ├── views_service.py │ └── write_service.py └── version.py ├── notebooks ├── realtime-stream.ipynb ├── stock-predictions.ipynb ├── stock_predictions_import_data.py └── telegraf.conf ├── pyproject.toml ├── scripts ├── ci-test.sh ├── generate-sources.sh ├── influxdb-onboarding.sh └── influxdb-restart.sh ├── setup.py └── tests ├── __init__.py ├── base_test.py ├── config-disabled-ssl.ini ├── config-enabled-proxy.ini ├── config-ssl-ca-cert.ini ├── config-ssl-mtls-certs.ini ├── config.ini ├── config.json ├── config.toml ├── config2.ini ├── query_output.json ├── server.pem ├── test_AuthorizationApi.py ├── test_BucketsApi.py ├── test_Dashboards.py ├── test_DateHelper.py ├── test_DeleteApi.py ├── test_FluxCSVParser.py ├── test_FluxObject.py ├── test_Helpers.py ├── test_InfluxDBClient.py ├── test_InfluxDBClientAsync.py ├── test_InfluxDBClientAuthorization.py ├── test_InfluxDBError.py ├── test_LabelsApi.py ├── test_LoggingHandler.py ├── test_MultiprocessingWriter.py ├── test_OrganizationsApi.py ├── test_PandasDateTimeHelper.py ├── test_QueryApi.py ├── test_QueryApiDataFrame.py ├── test_QueryApiStream.py ├── test_TasksApi.py ├── test_Thresholds.py ├── test_UsersApi.py ├── test_Warnings.py ├── test_WriteApi.py ├── test_WriteApiBatching.py ├── test_WriteApiDataFrame.py ├── test_WriteApiPickle.py ├── test_WriteOptions.py ├── test_WritesRetry.py ├── test_gzip.py ├── test_health.py └── test_point.py /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | count = True 3 | max-line-length = 120 4 | 5 | # W504: Line break occurred after a binary operator 6 | ignore = W504 7 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/BUG_REPORT.yml: -------------------------------------------------------------------------------- 1 | name: Bug Report 2 | description: Create a bug report to help us improve 3 | labels: ["bug"] 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: | 8 | Thanks for taking time to fill out this bug report! We reserve this repository issues for bugs with reproducible problems. 9 | Please redirect any questions about the Python client usage to our [Community Slack](https://app.slack.com/client/TH8RGQX5Z/CHQ5VG6F8) or [Community Page](https://community.influxdata.com/) we have a lot of talented community members there who could help answer your question more quickly. 10 | 11 | * Please add a :+1: or comment on a similar existing bug report instead of opening a new one. 12 | * Please check whether the bug can be reproduced with the latest release. 13 | - type: textarea 14 | id: specifications 15 | attributes: 16 | label: Specifications 17 | description: Describe the steps to reproduce the bug. 18 | value: | 19 | * Client Version: 20 | * InfluxDB Version: 21 | * Platform: 22 | validations: 23 | required: true 24 | - type: textarea 25 | id: reproduce 26 | attributes: 27 | label: Code sample to reproduce problem 28 | description: Provide a code sample that reproduces the problem 29 | value: | 30 | ```python 31 | ``` 32 | validations: 33 | required: true 34 | - type: textarea 35 | id: expected-behavior 36 | attributes: 37 | label: Expected behavior 38 | description: Describe what you expected to happen when you performed the above steps. 39 | validations: 40 | required: true 41 | - type: textarea 42 | id: actual-behavior 43 | attributes: 44 | label: Actual behavior 45 | description: Describe what actually happened when you performed the above steps. 46 | validations: 47 | required: true 48 | - type: textarea 49 | id: additional-info 50 | attributes: 51 | label: Additional info 52 | description: Include gist of relevant config, logs, etc. 53 | validations: 54 | required: false 55 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Opening a feature request kicks off a discussion 4 | --- 5 | 6 | 14 | 15 | __Proposal:__ 16 | Short summary of the feature. 17 | 18 | __Current behavior:__ 19 | Describe what currently happens. 20 | 21 | __Desired behavior:__ 22 | Describe what you want. 23 | 24 | __Alternatives considered:__ 25 | Describe other solutions or features you considered. 26 | 27 | __Use case:__ 28 | Why is this important (helps with prioritizing requests)? 29 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE: -------------------------------------------------------------------------------- 1 | Closes # 2 | 3 | ## Proposed Changes 4 | 5 | _Briefly describe your proposed changes:_ 6 | 7 | ## Checklist 8 | 9 | 10 | 11 | - [ ] CHANGELOG.md updated 12 | - [ ] Rebased/mergeable 13 | - [ ] A test has been added if appropriate 14 | - [ ] `pytest tests` completes successfully 15 | - [ ] Commit messages are [conventional](https://www.conventionalcommits.org/en/v1.0.0/) 16 | - [ ] Sign [CLA](https://www.influxdata.com/legal/cla/) (if not already signed) 17 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "pip" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | open-pull-requests-limit: 10 8 | ignore: 9 | - dependency-name: "docutils" # pinned version for readthedocs.org - https://github.com/influxdata/influxdb-client-python/pull/361 10 | -------------------------------------------------------------------------------- /.github/workflows/semantic.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: "Semantic PR and Commit Messages" 3 | 4 | on: 5 | pull_request: 6 | types: [opened, reopened, synchronize, edited] 7 | branches: 8 | - master 9 | 10 | jobs: 11 | semantic: 12 | uses: influxdata/validate-semantic-github-messages/.github/workflows/semantic.yml@main 13 | with: 14 | CHECK_PR_TITLE_OR_ONE_COMMIT: true 15 | 16 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | influxdb2_test/*.csv 50 | 51 | # Translations 52 | *.mo 53 | *.pot 54 | 55 | # Django stuff: 56 | *.log 57 | local_settings.py 58 | db.sqlite3 59 | 60 | # Flask stuff: 61 | instance/ 62 | .webassets-cache 63 | 64 | # Scrapy stuff: 65 | .scrapy 66 | 67 | # Sphinx documentation 68 | docs/_build/ 69 | 70 | # PyBuilder 71 | target/ 72 | 73 | # Jupyter Notebook 74 | .ipynb_checkpoints 75 | 76 | # pyenv 77 | .python-version 78 | 79 | # celery beat schedule file 80 | celerybeat-schedule 81 | 82 | # SageMath parsed files 83 | *.sage.py 84 | 85 | # Environments 86 | .env 87 | .venv 88 | env/ 89 | venv/ 90 | venv-*/ 91 | ENV/ 92 | env.bak/ 93 | venv.bak/ 94 | 95 | # Spyder project settings 96 | .spyderproject 97 | .spyproject 98 | 99 | # Rope project settings 100 | .ropeproject 101 | 102 | # mkdocs documentation 103 | /site 104 | 105 | # mypy 106 | .mypy_cache/ 107 | 108 | sandbox 109 | 110 | # PyCharm 111 | **/.idea 112 | **/*.iml 113 | 114 | # OpenAPI-generator 115 | /.openapi-generator* 116 | **/writer.pickle 117 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | formats: all 4 | 5 | build: 6 | os: ubuntu-22.04 7 | tools: 8 | python: "3.7" 9 | 10 | sphinx: 11 | configuration: docs/conf.py 12 | 13 | python: 14 | install: 15 | - requirements: docs/requirements.txt 16 | - method: pip 17 | path: . 18 | extra_requirements: 19 | - async 20 | - extra 21 | - ciso -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 InfluxData, Inc. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: all 2 | all: help 3 | 4 | .PHONY: clean 5 | clean: 6 | rm -f .coverage coverage.xml writer.pickle 7 | rm -rf .pytest_cache build dist htmlcov test-reports docs/_build 8 | 9 | .PHONY: docs 10 | docs: 11 | cd docs && python -m sphinx -T -E -b html -d _build/doctrees -D language=en . _build/html 12 | 13 | .PHONY: help 14 | help: 15 | @echo 'Makefile Targets' 16 | @echo ' clean clean up local files' 17 | @echo ' docs build docs locally' 18 | @echo ' help print this help output' 19 | @echo ' install install library as editable with all dependencies' 20 | @echo ' lint execute flake8 against source code' 21 | @echo ' test execute all tests' 22 | 23 | .PHONY: install 24 | install: 25 | pip install --editable ".[test,extra,ciso,async]" 26 | 27 | .PHONY: lint 28 | lint: 29 | flake8 setup.py influxdb_client/ 30 | 31 | .PHONY: test 32 | test: 33 | pytest tests \ 34 | --cov=./ \ 35 | --cov-report html:htmlcov \ 36 | --cov-report xml:coverage.xml 37 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | ignore: 2 | - "influxdb_client/domain/*.py" 3 | - "influxdb_client/service/*.py" 4 | - "tests/*.py" 5 | - "setup.py" -------------------------------------------------------------------------------- /conda/meta.yaml: -------------------------------------------------------------------------------- 1 | {% set name = "influxdb_client" %} 2 | {% set version = "1.49.0" %} 3 | 4 | 5 | package: 6 | name: {{ name|lower }} 7 | version: {{ version }} 8 | 9 | source: 10 | url: https://files.pythonhosted.org/packages/2a/f3/9c418215cf399529175ed5b198d15a21c2e29f28d90932107634b375c9ee/influxdb_client-1.49.0.tar.gz 11 | sha256: 4a53a218adef6ac9458bfbd31fa08c76194f70310c6b4e01f53d804bd2c48e03 12 | 13 | build: 14 | number: 0 15 | noarch: python 16 | script: {{ PYTHON }} -m pip install . -vv 17 | 18 | requirements: 19 | host: 20 | - python >=3.7 21 | - pip 22 | run: 23 | - python >=3.7 24 | - setuptools 25 | 26 | about: 27 | home: https://github.com/influxdata/influxdb-client-python 28 | license: MIT License 29 | license_file: ../LICENSE 30 | summary: The Python client library for the InfluxDB 2.0. 31 | dev_url: https://github.com/influxdata/influxdb-client-python 32 | 33 | -------------------------------------------------------------------------------- /docker/aws_lambda_layer/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM amazonlinux:2 2 | RUN mkdir /install 3 | WORKDIR /install 4 | RUN yum install -y amazon-linux-extras 5 | RUN amazon-linux-extras enable python3.8 6 | RUN yum install -y python38 python38-devel python3-pip zip gcc 7 | RUN python3.8 -m pip install --no-cache-dir --upgrade pip && \ 8 | python3.8 -m pip install --no-cache-dir virtualenv 9 | RUN python3.8 -m venv lambda 10 | RUN source lambda/bin/activate 11 | # Python dependencies to be included in output zip file: 12 | RUN python3.8 -m pip install --no-cache-dir influxdb-client[ciso] -t /install/python 13 | # Create zip file 14 | WORKDIR /install/python 15 | RUN zip -r ../python.zip . 16 | VOLUME ["/install"] 17 | -------------------------------------------------------------------------------- /docker/aws_lambda_layer/README.md: -------------------------------------------------------------------------------- 1 | ## AWS Lambda Layer Docker image 2 | Docker image which allows user to create a zip file with all Python dependencies for a custom AWS Lambda function (by default with influxdb-client). This should be within the limit of 10MB per single browser upload (~3,5MB with just the influxdb-client-python library). If the zip archive is uploaded to a custom Lambda Layer then user is able to keep using the Console IDE for editing main Lambda function. 3 | ### Build image: 4 | `docker build -t lambdalayer:latest .` 5 | ### Create container: 6 | `docker create --name lambdalayer lambdalayer:latest` 7 | ### Copy zip from container: 8 | `docker cp lambdalayer:/install/python.zip .` 9 | ### Upload zip to AWS Lambda 10 | Use AWS CLI or AWS Console to create and upload archive to a custom Lambda Layer. Then import those dependencies in lambda function as usual. 11 | ``` 12 | ... 13 | from influxdb_client import InfluxDBClient, Point 14 | ... 15 | ``` 16 | ### Reference: 17 | https://docs.aws.amazon.com/lambda/latest/dg/python-package.html#python-package-dependencies 18 | https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html#configuration-layers-using -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = influxdb_client 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /docs/_static/custom.css: -------------------------------------------------------------------------------- 1 | .wy-table-responsive table td { 2 | white-space: normal; 3 | } 4 | -------------------------------------------------------------------------------- /docs/api.rst: -------------------------------------------------------------------------------- 1 | API Reference 2 | ============= 3 | 4 | .. contents:: 5 | :local: 6 | 7 | InfluxDBClient 8 | """""""""""""" 9 | .. autoclass:: influxdb_client.InfluxDBClient 10 | :members: 11 | 12 | QueryApi 13 | """""""" 14 | .. autoclass:: influxdb_client.QueryApi 15 | :members: 16 | 17 | .. autoclass:: influxdb_client.client.flux_table.FluxTable 18 | :members: 19 | 20 | .. autoclass:: influxdb_client.client.flux_table.FluxRecord 21 | :members: 22 | 23 | .. autoclass:: influxdb_client.client.flux_table.TableList 24 | :members: 25 | 26 | .. autoclass:: influxdb_client.client.flux_table.CSVIterator 27 | :members: 28 | 29 | WriteApi 30 | """""""" 31 | .. autoclass:: influxdb_client.WriteApi 32 | :members: 33 | 34 | .. autoclass:: influxdb_client.client.write.point.Point 35 | :members: 36 | 37 | .. autoclass:: influxdb_client.domain.write_precision.WritePrecision 38 | :members: 39 | 40 | BucketsApi 41 | """""""""" 42 | .. autoclass:: influxdb_client.BucketsApi 43 | :members: 44 | 45 | .. autoclass:: influxdb_client.domain.Bucket 46 | :members: 47 | 48 | LabelsApi 49 | """"""""" 50 | .. autoclass:: influxdb_client.LabelsApi 51 | :members: 52 | 53 | OrganizationsApi 54 | """""""""""""""" 55 | .. autoclass:: influxdb_client.OrganizationsApi 56 | :members: 57 | 58 | .. autoclass:: influxdb_client.domain.Organization 59 | :members: 60 | 61 | UsersApi 62 | """""""" 63 | .. autoclass:: influxdb_client.UsersApi 64 | :members: 65 | 66 | .. autoclass:: influxdb_client.domain.User 67 | :members: 68 | 69 | TasksApi 70 | """""""" 71 | .. autoclass:: influxdb_client.TasksApi 72 | :members: 73 | 74 | .. autoclass:: influxdb_client.domain.Task 75 | :members: 76 | 77 | InvokableScriptsApi 78 | """"""""""""""""""" 79 | .. autoclass:: influxdb_client.InvokableScriptsApi 80 | :members: 81 | 82 | .. autoclass:: influxdb_client.domain.Script 83 | :members: 84 | 85 | .. autoclass:: influxdb_client.domain.ScriptCreateRequest 86 | :members: 87 | 88 | DeleteApi 89 | """"""""" 90 | .. autoclass:: influxdb_client.DeleteApi 91 | :members: 92 | 93 | .. autoclass:: influxdb_client.domain.DeletePredicateRequest 94 | :members: 95 | 96 | Helpers 97 | """"""" 98 | .. autoclass:: influxdb_client.client.util.date_utils.DateHelper 99 | :members: 100 | 101 | .. autoclass:: influxdb_client.client.util.date_utils_pandas.PandasDateTimeHelper 102 | :members: 103 | 104 | .. autoclass:: influxdb_client.client.util.multiprocessing_helper.MultiprocessingWriter 105 | :members: 106 | 107 | -------------------------------------------------------------------------------- /docs/api_async.rst: -------------------------------------------------------------------------------- 1 | Async API Reference 2 | =================== 3 | 4 | .. contents:: 5 | :local: 6 | 7 | InfluxDBClientAsync 8 | """"""""""""""""""" 9 | .. autoclass:: influxdb_client.client.influxdb_client_async.InfluxDBClientAsync 10 | :members: 11 | 12 | QueryApiAsync 13 | """"""""""""" 14 | .. autoclass:: influxdb_client.client.query_api_async.QueryApiAsync 15 | :members: 16 | 17 | WriteApiAsync 18 | """"""""""""" 19 | .. autoclass:: influxdb_client.client.write_api_async.WriteApiAsync 20 | :members: 21 | 22 | DeleteApiAsync 23 | """""""""""""" 24 | .. autoclass:: influxdb_client.client.delete_api_async.DeleteApiAsync 25 | :members: 26 | -------------------------------------------------------------------------------- /docs/images/realtime-result.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/influxdata/influxdb-client-python/feb97eef067013881e798b322f90a83e27d07366/docs/images/realtime-result.gif -------------------------------------------------------------------------------- /docs/images/stock-price-prediction-results.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/influxdata/influxdb-client-python/feb97eef067013881e798b322f90a83e27d07366/docs/images/stock-price-prediction-results.png -------------------------------------------------------------------------------- /docs/images/stock-price-prediction.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/influxdata/influxdb-client-python/feb97eef067013881e798b322f90a83e27d07366/docs/images/stock-price-prediction.gif -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | 2 | InfluxDB 2.0 python client 3 | ========================== 4 | 5 | .. toctree:: 6 | :maxdepth: 3 7 | :caption: Contents: 8 | 9 | usage 10 | api 11 | api_async 12 | migration 13 | development 14 | 15 | .. include:: ../README.md 16 | :parser: myst_parser.sphinx_ 17 | :start-after: 18 | :end-before: 19 | 20 | Indices and tables 21 | ================== 22 | 23 | * :ref:`genindex` 24 | * :ref:`modindex` 25 | * :ref:`search` 26 | -------------------------------------------------------------------------------- /docs/migration.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../MIGRATION_GUIDE.rst -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx>=5.0.0 2 | sphinx_rtd_theme==2.0.0 3 | myst_parser>=0.19.2 4 | -------------------------------------------------------------------------------- /docs/usage.rst: -------------------------------------------------------------------------------- 1 | User Guide 2 | ========== 3 | 4 | .. contents:: 5 | :local: 6 | 7 | Query 8 | ^^^^^ 9 | .. include:: ../README.md 10 | :parser: myst_parser.sphinx_ 11 | :start-after: 12 | :end-before: 13 | 14 | Write 15 | ^^^^^ 16 | .. include:: ../README.md 17 | :parser: myst_parser.sphinx_ 18 | :start-after: 19 | :end-before: 20 | 21 | Delete data 22 | ^^^^^^^^^^^ 23 | .. include:: ../README.md 24 | :parser: myst_parser.sphinx_ 25 | :start-after: 26 | :end-before: 27 | 28 | Pandas DataFrame 29 | ^^^^^^^^^^^^^^^^ 30 | .. include:: ../README.md 31 | :parser: myst_parser.sphinx_ 32 | :start-after: 33 | :end-before: 34 | 35 | How to use Asyncio 36 | ^^^^^^^^^^^^^^^^^^ 37 | .. include:: ../README.md 38 | :parser: myst_parser.sphinx_ 39 | :start-after: 40 | :end-before: 41 | 42 | Gzip support 43 | ^^^^^^^^^^^^ 44 | .. include:: ../README.md 45 | :parser: myst_parser.sphinx_ 46 | :start-after: 47 | :end-before: 48 | 49 | Proxy configuration 50 | ^^^^^^^^^^^^^^^^^^^ 51 | .. include:: ../README.md 52 | :parser: myst_parser.sphinx_ 53 | :start-after: 54 | :end-before: 55 | 56 | Authentication 57 | ^^^^^^^^^^^^^^ 58 | .. include:: ../README.md 59 | :parser: myst_parser.sphinx_ 60 | :start-after: 61 | :end-before: 62 | 63 | Nanosecond precision 64 | ^^^^^^^^^^^^^^^^^^^^ 65 | .. include:: ../README.md 66 | :parser: myst_parser.sphinx_ 67 | :start-after: 68 | :end-before: 69 | 70 | Handling Errors 71 | ^^^^^^^^^^^^^^^ 72 | .. include:: ../README.md 73 | :parser: myst_parser.sphinx_ 74 | :start-after: 75 | :end-before: 76 | 77 | Logging 78 | ^^^^^^^ 79 | 80 | .. include:: ../README.md 81 | :parser: myst_parser.sphinx_ 82 | :start-after: 83 | :end-before: 84 | 85 | Examples 86 | ^^^^^^^^ 87 | .. include:: ../README.md 88 | :parser: myst_parser.sphinx_ 89 | :start-after: 90 | :end-before: 91 | -------------------------------------------------------------------------------- /examples/asynchronous_batching.py: -------------------------------------------------------------------------------- 1 | """ 2 | How to use RxPY to prepare batches for asyncio client. 3 | """ 4 | import asyncio 5 | from csv import DictReader 6 | 7 | import reactivex as rx 8 | from reactivex import operators as ops 9 | from reactivex.scheduler.eventloop import AsyncIOScheduler 10 | 11 | from influxdb_client import Point 12 | from influxdb_client.client.influxdb_client_async import InfluxDBClientAsync 13 | 14 | 15 | def csv_to_generator(csv_file_path): 16 | """ 17 | Parse your CSV file into generator 18 | """ 19 | for row in DictReader(open(csv_file_path, 'r')): 20 | point = Point('financial-analysis') \ 21 | .tag('type', 'vix-daily') \ 22 | .field('open', float(row['VIX Open'])) \ 23 | .field('high', float(row['VIX High'])) \ 24 | .field('low', float(row['VIX Low'])) \ 25 | .field('close', float(row['VIX Close'])) \ 26 | .time(row['Date']) 27 | yield point 28 | 29 | 30 | async def main(): 31 | async with InfluxDBClientAsync(url='http://localhost:8086', token='my-token', org='my-org') as client: 32 | write_api = client.write_api() 33 | 34 | """ 35 | Async write 36 | """ 37 | 38 | async def async_write(batch): 39 | """ 40 | Prepare async task 41 | """ 42 | await write_api.write(bucket='my-bucket', record=batch) 43 | return batch 44 | 45 | """ 46 | Prepare batches from generator 47 | """ 48 | batches = rx \ 49 | .from_iterable(csv_to_generator('vix-daily.csv')) \ 50 | .pipe(ops.buffer_with_count(500)) \ 51 | .pipe(ops.map(lambda batch: rx.from_future(asyncio.ensure_future(async_write(batch)))), ops.merge_all()) 52 | 53 | done = asyncio.Future() 54 | 55 | """ 56 | Write batches by subscribing to Rx generator 57 | """ 58 | batches.subscribe(on_next=lambda batch: print(f'Written batch... {len(batch)}'), 59 | on_error=lambda ex: print(f'Unexpected error: {ex}'), 60 | on_completed=lambda: done.set_result(0), 61 | scheduler=AsyncIOScheduler(asyncio.get_event_loop())) 62 | """ 63 | Wait to finish all writes 64 | """ 65 | await done 66 | 67 | 68 | if __name__ == "__main__": 69 | asyncio.run(main()) 70 | -------------------------------------------------------------------------------- /examples/asynchronous_management.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from influxdb_client import OrganizationsService 4 | from influxdb_client.client.influxdb_client_async import InfluxDBClientAsync 5 | 6 | 7 | async def main(): 8 | async with InfluxDBClientAsync(url='http://localhost:8086', token='my-token', org='my-org') as client: 9 | # Initialize async OrganizationsService 10 | organizations_service = OrganizationsService(api_client=client.api_client) 11 | 12 | # Find organization with name 'my-org' 13 | organizations = await organizations_service.get_orgs_async(org='my-org') 14 | for organization in organizations.orgs: 15 | print(f'name: {organization.name}, id: {organization.id}') 16 | 17 | 18 | if __name__ == "__main__": 19 | asyncio.run(main()) 20 | -------------------------------------------------------------------------------- /examples/asynchronous_retry.py: -------------------------------------------------------------------------------- 1 | """ 2 | How to use `aiohttp-retry` with async client. 3 | 4 | This example depends on `aiohttp_retry `_. 5 | Install ``aiohttp_retry`` by: pip install aiohttp-retry. 6 | 7 | """ 8 | import asyncio 9 | 10 | from aiohttp_retry import ExponentialRetry, RetryClient 11 | 12 | from influxdb_client import Point 13 | from influxdb_client.client.influxdb_client_async import InfluxDBClientAsync 14 | 15 | 16 | async def main(): 17 | """ 18 | Configure Retries - for more info see https://github.com/inyutin/aiohttp_retry 19 | """ 20 | retry_options = ExponentialRetry(attempts=3) 21 | async with InfluxDBClientAsync(url="http://localhost:8086", token="my-token", org="my-org", 22 | client_session_type=RetryClient, 23 | client_session_kwargs={"retry_options": retry_options}) as client: 24 | """ 25 | Write data: 26 | """ 27 | print(f"\n------- Written data: -------\n") 28 | write_api = client.write_api() 29 | _point1 = Point("async_m").tag("location", "Prague").field("temperature", 25.3) 30 | _point2 = Point("async_m").tag("location", "New York").field("temperature", 24.3) 31 | successfully = await write_api.write(bucket="my-bucket", record=[_point1, _point2]) 32 | print(f" > successfully: {successfully}") 33 | 34 | """ 35 | Query: Stream of FluxRecords 36 | """ 37 | print(f"\n------- Query: Stream of FluxRecords -------\n") 38 | query_api = client.query_api() 39 | records = await query_api.query_stream('from(bucket:"my-bucket") ' 40 | '|> range(start: -10m) ' 41 | '|> filter(fn: (r) => r["_measurement"] == "async_m")') 42 | async for record in records: 43 | print(record) 44 | 45 | 46 | if __name__ == "__main__": 47 | asyncio.run(main()) 48 | -------------------------------------------------------------------------------- /examples/buckets_management.py: -------------------------------------------------------------------------------- 1 | """ 2 | How to create, list and delete Buckets. 3 | """ 4 | 5 | from influxdb_client import InfluxDBClient, BucketRetentionRules 6 | 7 | """ 8 | Define credentials 9 | """ 10 | url = "http://localhost:8086" 11 | token = "my-token" 12 | org = "my-org" 13 | 14 | with InfluxDBClient(url=url, token=token) as client: 15 | buckets_api = client.buckets_api() 16 | 17 | """ 18 | Create Bucket with retention policy set to 3600 seconds and name "bucket-by-python" 19 | """ 20 | print(f"------- Create -------\n") 21 | retention_rules = BucketRetentionRules(type="expire", every_seconds=3600) 22 | created_bucket = buckets_api.create_bucket(bucket_name="bucket-by-python", 23 | retention_rules=retention_rules, 24 | org=org) 25 | print(created_bucket) 26 | 27 | """ 28 | Update Bucket 29 | """ 30 | print(f"------- Update -------\n") 31 | created_bucket.description = "Update description" 32 | created_bucket = buckets_api.update_bucket(bucket=created_bucket) 33 | print(created_bucket) 34 | 35 | """ 36 | List all Buckets 37 | """ 38 | print(f"\n------- List -------\n") 39 | buckets = buckets_api.find_buckets_iter() 40 | print("\n".join([f" ---\n ID: {bucket.id}\n Name: {bucket.name}\n Retention: {bucket.retention_rules}" 41 | for bucket in buckets])) 42 | print("---") 43 | 44 | """ 45 | Delete previously created bucket 46 | """ 47 | print(f"------- Delete -------\n") 48 | buckets_api.delete_bucket(created_bucket) 49 | print(f" successfully deleted bucket: {created_bucket.name}") 50 | -------------------------------------------------------------------------------- /examples/connection_check.py: -------------------------------------------------------------------------------- 1 | """ 2 | How to check that connection credentials are suitable for queries and writes from/into specified bucket. 3 | """ 4 | 5 | from influxdb_client import InfluxDBClient 6 | from influxdb_client.client.write_api import SYNCHRONOUS 7 | from influxdb_client.rest import ApiException 8 | 9 | """ 10 | Define credentials 11 | """ 12 | url = "http://localhost:8086" 13 | token = "my-token" 14 | org = "my-org" 15 | bucket = "my-bucket" 16 | 17 | 18 | def check_connection(): 19 | """Check that the InfluxDB is running.""" 20 | print("> Checking connection ...", end=" ") 21 | client.api_client.call_api('/ping', 'GET') 22 | print("ok") 23 | 24 | 25 | def check_query(): 26 | """Check that the credentials has permission to query from the Bucket""" 27 | print("> Checking credentials for query ...", end=" ") 28 | try: 29 | client.query_api().query(f"from(bucket:\"{bucket}\") |> range(start: -1m) |> limit(n:1)", org) 30 | except ApiException as e: 31 | # missing credentials 32 | if e.status == 404: 33 | raise Exception(f"The specified token doesn't have sufficient credentials to read from '{bucket}' " 34 | f"or specified bucket doesn't exists.") from e 35 | raise 36 | print("ok") 37 | 38 | 39 | def check_write(): 40 | """Check that the credentials has permission to write into the Bucket""" 41 | print("> Checking credentials for write ...", end=" ") 42 | try: 43 | client.write_api(write_options=SYNCHRONOUS).write(bucket, org, b"") 44 | except ApiException as e: 45 | # bucket does not exist 46 | if e.status == 404: 47 | raise Exception(f"The specified bucket does not exist.") from e 48 | # insufficient permissions 49 | if e.status == 403: 50 | raise Exception(f"The specified token does not have sufficient credentials to write to '{bucket}'.") from e 51 | # 400 (BadRequest) caused by empty LineProtocol 52 | if e.status != 400: 53 | raise 54 | print("ok") 55 | 56 | 57 | with InfluxDBClient(url=url, token=token, org=org) as client: 58 | check_connection() 59 | check_query() 60 | check_write() 61 | pass 62 | -------------------------------------------------------------------------------- /examples/example.py: -------------------------------------------------------------------------------- 1 | import codecs 2 | from datetime import datetime, timezone 3 | 4 | from influxdb_client import WritePrecision, InfluxDBClient, Point 5 | from influxdb_client.client.write_api import SYNCHRONOUS 6 | 7 | with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org", debug=False) as client: 8 | query_api = client.query_api() 9 | 10 | p = Point("my_measurement").tag("location", "Prague").field("temperature", 25.3) \ 11 | .time(datetime.now(tz=timezone.utc), WritePrecision.MS) 12 | write_api = client.write_api(write_options=SYNCHRONOUS) 13 | 14 | # write using point structure 15 | write_api.write(bucket="my-bucket", record=p) 16 | 17 | line_protocol = p.to_line_protocol() 18 | print(line_protocol) 19 | 20 | # write using line protocol string 21 | write_api.write(bucket="my-bucket", record=line_protocol) 22 | 23 | # using Table structure 24 | tables = query_api.query('from(bucket:"my-bucket") |> range(start: -10m)') 25 | for table in tables: 26 | print(table) 27 | for record in table.records: 28 | # process record 29 | print(record.values) 30 | 31 | # using csv library 32 | csv_result = query_api.query_csv('from(bucket:"my-bucket") |> range(start: -10m)') 33 | val_count = 0 34 | for record in csv_result: 35 | for cell in record: 36 | val_count += 1 37 | print("val count: ", val_count) 38 | 39 | response = query_api.query_raw('from(bucket:"my-bucket") |> range(start: -10m)') 40 | print(codecs.decode(response.data)) 41 | -------------------------------------------------------------------------------- /examples/import_data_set.py: -------------------------------------------------------------------------------- 1 | """ 2 | Import VIX - CBOE Volatility Index - from "vix-daily.csv" file into InfluxDB 2.0 3 | 4 | https://datahub.io/core/finance-vix#data 5 | """ 6 | 7 | from collections import OrderedDict 8 | from csv import DictReader 9 | 10 | import reactivex as rx 11 | from reactivex import operators as ops 12 | 13 | from influxdb_client import Point, InfluxDBClient, WriteOptions 14 | 15 | 16 | def parse_row(row: OrderedDict): 17 | """Parse row of CSV file into Point with structure: 18 | 19 | financial-analysis,type=vix-daily close=18.47,high=19.82,low=18.28,open=19.82 1198195200000000000 20 | 21 | CSV format: 22 | Date,VIX Open,VIX High,VIX Low,VIX Close\n 23 | 2004-01-02,17.96,18.68,17.54,18.22\n 24 | 2004-01-05,18.45,18.49,17.44,17.49\n 25 | 2004-01-06,17.66,17.67,16.19,16.73\n 26 | 2004-01-07,16.72,16.75,15.5,15.5\n 27 | 2004-01-08,15.42,15.68,15.32,15.61\n 28 | 2004-01-09,16.15,16.88,15.57,16.75\n 29 | ... 30 | 31 | :param row: the row of CSV file 32 | :return: Parsed csv row to [Point] 33 | """ 34 | 35 | """ 36 | For better performance is sometimes useful directly create a LineProtocol to avoid unnecessary escaping overhead: 37 | """ 38 | # from datetime import timezone 39 | # import ciso8601 40 | # from influxdb_client.client.write.point import EPOCH 41 | # 42 | # time = (ciso8601.parse_datetime(row["Date"]).replace(tzinfo=timezone.utc) - EPOCH).total_seconds() * 1e9 43 | # return f"financial-analysis,type=vix-daily" \ 44 | # f" close={float(row['VIX Close'])},high={float(row['VIX High'])},low={float(row['VIX Low'])},open={float(row['VIX Open'])} " \ 45 | # f" {int(time)}" 46 | 47 | return Point("financial-analysis") \ 48 | .tag("type", "vix-daily") \ 49 | .field("open", float(row['VIX Open'])) \ 50 | .field("high", float(row['VIX High'])) \ 51 | .field("low", float(row['VIX Low'])) \ 52 | .field("close", float(row['VIX Close'])) \ 53 | .time(row['Date']) 54 | 55 | 56 | """ 57 | Converts vix-daily.csv into sequence of data point 58 | """ 59 | data = rx \ 60 | .from_iterable(DictReader(open('vix-daily.csv', 'r'))) \ 61 | .pipe(ops.map(lambda row: parse_row(row))) 62 | 63 | with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org", debug=True) as client: 64 | 65 | """ 66 | Create client that writes data in batches with 50_000 items. 67 | """ 68 | with client.write_api(write_options=WriteOptions(batch_size=50_000, flush_interval=10_000)) as write_api: 69 | 70 | """ 71 | Write data into InfluxDB 72 | """ 73 | write_api.write(bucket="my-bucket", record=data) 74 | 75 | """ 76 | Querying max value of CBOE Volatility Index 77 | """ 78 | query = 'from(bucket:"my-bucket")' \ 79 | ' |> range(start: 0, stop: now())' \ 80 | ' |> filter(fn: (r) => r._measurement == "financial-analysis")' \ 81 | ' |> max()' 82 | result = client.query_api().query(query=query) 83 | 84 | """ 85 | Processing results 86 | """ 87 | print() 88 | print("=== results ===") 89 | print() 90 | for table in result: 91 | for record in table.records: 92 | print('max {0:5} = {1}'.format(record.get_field(), record.get_value())) 93 | -------------------------------------------------------------------------------- /examples/import_data_set_sync_batching.py: -------------------------------------------------------------------------------- 1 | """ 2 | How to use RxPY to prepare batches for synchronous write into InfluxDB 3 | """ 4 | 5 | from csv import DictReader 6 | 7 | import reactivex as rx 8 | from reactivex import operators as ops 9 | 10 | from influxdb_client import InfluxDBClient, Point 11 | from influxdb_client.client.write.retry import WritesRetry 12 | from influxdb_client.client.write_api import SYNCHRONOUS 13 | 14 | 15 | def csv_to_generator(csv_file_path): 16 | """ 17 | Parse your CSV file into generator 18 | """ 19 | for row in DictReader(open(csv_file_path, 'r')): 20 | point = Point('financial-analysis') \ 21 | .tag('type', 'vix-daily') \ 22 | .field('open', float(row['VIX Open'])) \ 23 | .field('high', float(row['VIX High'])) \ 24 | .field('low', float(row['VIX Low'])) \ 25 | .field('close', float(row['VIX Close'])) \ 26 | .time(row['Date']) 27 | yield point 28 | 29 | 30 | """ 31 | Define Retry strategy - 3 attempts => 2, 4, 8 32 | """ 33 | retries = WritesRetry(total=3, retry_interval=1, exponential_base=2) 34 | with InfluxDBClient(url='http://localhost:8086', token='my-token', org='my-org', retries=retries) as client: 35 | 36 | """ 37 | Use synchronous version of WriteApi to strongly depends on result of write 38 | """ 39 | write_api = client.write_api(write_options=SYNCHRONOUS) 40 | 41 | """ 42 | Prepare batches from generator 43 | """ 44 | batches = rx \ 45 | .from_iterable(csv_to_generator('vix-daily.csv')) \ 46 | .pipe(ops.buffer_with_count(500)) 47 | 48 | 49 | def write_batch(batch): 50 | """ 51 | Synchronous write 52 | """ 53 | print(f'Writing... {len(batch)}') 54 | write_api.write(bucket='my-bucket', record=batch) 55 | 56 | 57 | """ 58 | Write batches 59 | """ 60 | batches.subscribe(on_next=lambda batch: write_batch(batch), 61 | on_error=lambda ex: print(f'Unexpected error: {ex}'), 62 | on_completed=lambda: print('Import finished!')) 63 | -------------------------------------------------------------------------------- /examples/import_parquet.py: -------------------------------------------------------------------------------- 1 | import pyarrow.parquet as pq 2 | 3 | from influxdb_client import InfluxDBClient, WriteOptions 4 | 5 | with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org", timeout=0, debug=False) as client: 6 | """ 7 | You can download NYC TLC Trip Record Data parquet file from https://www1.nyc.gov/site/tlc/about/tlc-trip-record-data.page 8 | """ 9 | table = pq.read_table('fhvhv_tripdata_2022-01.parquet') 10 | with client.write_api(write_options=WriteOptions(batch_size=50_000)) as write_api: 11 | 12 | dataframe = table.to_pandas() 13 | """ 14 | Keep only interesting columns 15 | """ 16 | keep_df = dataframe[ 17 | ['dispatching_base_num', "PULocationID", "DOLocationID", "pickup_datetime", "dropoff_datetime", "shared_request_flag"]] 18 | print(keep_df.tail().to_string()) 19 | 20 | write_api.write(bucket="my-bucket", record=keep_df, data_frame_measurement_name="taxi-trip-data", 21 | data_frame_tag_columns=['dispatching_base_num', "shared_request_flag"], 22 | data_frame_timestamp_column="pickup_datetime") 23 | 24 | """ 25 | Querying 10 pickups from dispatching 'B03404' 26 | """ 27 | query = ''' 28 | from(bucket:"my-bucket") 29 | |> range(start: 2022-01-01T00:00:00Z, stop: now()) 30 | |> filter(fn: (r) => r._measurement == "taxi-trip-data") 31 | |> filter(fn: (r) => r.dispatching_base_num == "B03404") 32 | |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value") 33 | |> rename(columns: {_time: "pickup_datetime"}) 34 | |> drop(columns: ["_start", "_stop"]) 35 | |> limit(n:10, offset: 0) 36 | ''' 37 | 38 | result = client.query_api().query(query=query) 39 | 40 | """ 41 | Processing results 42 | """ 43 | print() 44 | print("=== Querying 10 pickups from dispatching 'B03404' ===") 45 | print() 46 | for table in result: 47 | for record in table.records: 48 | print( 49 | f'Dispatching: {record["dispatching_base_num"]} pickup: {record["pickup_datetime"]} dropoff: {record["dropoff_datetime"]}') 50 | -------------------------------------------------------------------------------- /examples/influx_cloud.py: -------------------------------------------------------------------------------- 1 | """ 2 | Connect to InfluxDB 2.0 - write data and query them 3 | """ 4 | 5 | from datetime import datetime, timezone 6 | 7 | from influxdb_client import Point, InfluxDBClient 8 | from influxdb_client.client.write_api import SYNCHRONOUS 9 | 10 | """ 11 | Configure credentials 12 | """ 13 | influx_cloud_url = 'https://us-west-2-1.aws.cloud2.influxdata.com' 14 | influx_cloud_token = '...' 15 | bucket = '...' 16 | org = '...' 17 | 18 | with InfluxDBClient(url=influx_cloud_url, token=influx_cloud_token) as client: 19 | kind = 'temperature' 20 | host = 'host1' 21 | device = 'opt-123' 22 | 23 | """ 24 | Write data by Point structure 25 | """ 26 | point = Point(kind).tag('host', host).tag('device', device).field('value', 25.3) \ 27 | .time(time=datetime.now(tz=timezone.utc)) 28 | 29 | print(f'Writing to InfluxDB cloud: {point.to_line_protocol()} ...') 30 | 31 | write_api = client.write_api(write_options=SYNCHRONOUS) 32 | write_api.write(bucket=bucket, org=org, record=point) 33 | 34 | print() 35 | print('success') 36 | print() 37 | print() 38 | 39 | """ 40 | Query written data 41 | """ 42 | query = f'from(bucket: "{bucket}") |> range(start: -1d) |> filter(fn: (r) => r._measurement == "{kind}")' 43 | print(f'Querying from InfluxDB cloud: "{query}" ...') 44 | print() 45 | 46 | query_api = client.query_api() 47 | tables = query_api.query(query=query, org=org) 48 | 49 | for table in tables: 50 | for row in table.records: 51 | print(f'{row.values["_time"]}: host={row.values["host"]},device={row.values["device"]} ' 52 | f'{row.values["_value"]} °C') 53 | 54 | print() 55 | print('success') 56 | -------------------------------------------------------------------------------- /examples/influxdb_18_example.py: -------------------------------------------------------------------------------- 1 | from influxdb_client import InfluxDBClient, Point 2 | 3 | username = 'username' 4 | password = 'password' 5 | 6 | database = 'telegraf' 7 | retention_policy = 'autogen' 8 | 9 | bucket = f'{database}/{retention_policy}' 10 | 11 | with InfluxDBClient(url='http://localhost:8086', token=f'{username}:{password}', org='-') as client: 12 | 13 | with client.write_api() as write_api: 14 | print('*** Write Points ***') 15 | 16 | point = Point("mem").tag("host", "host1").field("used_percent", 25.43234543) 17 | print(point.to_line_protocol()) 18 | 19 | write_api.write(bucket=bucket, record=point) 20 | 21 | print('*** Query Points ***') 22 | 23 | query_api = client.query_api() 24 | query = f'from(bucket: \"{bucket}\") |> range(start: -1h)' 25 | tables = query_api.query(query) 26 | for record in tables[0].records: 27 | print(f'#{record.get_time()} #{record.get_measurement()}: #{record.get_field()} #{record.get_value()}') 28 | 29 | -------------------------------------------------------------------------------- /examples/ingest_dataframe_default_tags.py: -------------------------------------------------------------------------------- 1 | """ 2 | How to ingest DataFrame with default tags. 3 | """ 4 | 5 | import pandas as pd 6 | 7 | from influxdb_client import InfluxDBClient 8 | from influxdb_client.client.write_api import SYNCHRONOUS, PointSettings 9 | 10 | """ 11 | Load DataFrame form CSV File 12 | """ 13 | df = pd.read_csv("vix-daily.csv") 14 | print(df.head()) 15 | 16 | with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: 17 | """ 18 | Ingest DataFrame with default tags 19 | """ 20 | point_settings = PointSettings(**{"type": "vix-daily"}) 21 | point_settings.add_default_tag("example-name", "ingest-data-frame") 22 | 23 | write_api = client.write_api(write_options=SYNCHRONOUS, point_settings=point_settings) 24 | write_api.write(bucket="my-bucket", record=df, data_frame_measurement_name="financial-analysis-df") 25 | 26 | """ 27 | Querying ingested data 28 | """ 29 | query = 'from(bucket:"my-bucket")' \ 30 | ' |> range(start: 0, stop: now())' \ 31 | ' |> filter(fn: (r) => r._measurement == "financial-analysis-df")' \ 32 | ' |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")' \ 33 | ' |> limit(n:10, offset: 0)' 34 | result = client.query_api().query(query=query) 35 | 36 | """ 37 | Processing results 38 | """ 39 | print() 40 | print("=== results ===") 41 | print() 42 | for table in result: 43 | for record in table.records: 44 | print('{4}: Open {0}, Close {1}, High {2}, Low {3}'.format(record["VIX Open"], record["VIX Close"], 45 | record["VIX High"], record["VIX Low"], 46 | record["type"])) 47 | -------------------------------------------------------------------------------- /examples/ingest_large_dataframe.py: -------------------------------------------------------------------------------- 1 | """ 2 | How to ingest large DataFrame by splitting into chunks. 3 | """ 4 | import logging 5 | import random 6 | from datetime import datetime 7 | 8 | from influxdb_client import InfluxDBClient 9 | from influxdb_client.extras import pd, np 10 | 11 | """ 12 | Enable logging for DataFrame serializer 13 | """ 14 | loggerSerializer = logging.getLogger('influxdb_client.client.write.dataframe_serializer') 15 | loggerSerializer.setLevel(level=logging.DEBUG) 16 | handler = logging.StreamHandler() 17 | handler.setFormatter(logging.Formatter('%(asctime)s | %(message)s')) 18 | loggerSerializer.addHandler(handler) 19 | 20 | """ 21 | Configuration 22 | """ 23 | url = 'http://localhost:8086' 24 | token = 'my-token' 25 | org = 'my-org' 26 | bucket = 'my-bucket' 27 | 28 | """ 29 | Generate Dataframe 30 | """ 31 | print() 32 | print("=== Generating DataFrame ===") 33 | print() 34 | dataframe_rows_count = 150_000 35 | 36 | col_data = { 37 | 'time': np.arange(0, dataframe_rows_count, 1, dtype=int), 38 | 'tag': np.random.choice(['tag_a', 'tag_b', 'test_c'], size=(dataframe_rows_count,)), 39 | } 40 | for n in range(2, 2999): 41 | col_data[f'col{n}'] = random.randint(1, 10) 42 | 43 | data_frame = pd.DataFrame(data=col_data).set_index('time') 44 | print(data_frame) 45 | 46 | """ 47 | Ingest DataFrame 48 | """ 49 | print() 50 | print("=== Ingesting DataFrame via batching API ===") 51 | print() 52 | startTime = datetime.now() 53 | 54 | with InfluxDBClient(url=url, token=token, org=org) as client: 55 | 56 | """ 57 | Use batching API 58 | """ 59 | with client.write_api() as write_api: 60 | write_api.write(bucket=bucket, record=data_frame, 61 | data_frame_tag_columns=['tag'], 62 | data_frame_measurement_name="measurement_name") 63 | print() 64 | print("Wait to finishing ingesting DataFrame...") 65 | print() 66 | 67 | print() 68 | print(f'Import finished in: {datetime.now() - startTime}') 69 | print() 70 | -------------------------------------------------------------------------------- /examples/iot_sensor.py: -------------------------------------------------------------------------------- 1 | """ 2 | Efficiency write data from IOT sensor - write changed temperature every minute 3 | """ 4 | import atexit 5 | import platform 6 | from datetime import timedelta 7 | 8 | import psutil as psutil 9 | import reactivex as rx 10 | from reactivex import operators as ops 11 | 12 | from influxdb_client import WriteApi, WriteOptions 13 | from influxdb_client.client.influxdb_client import InfluxDBClient 14 | 15 | 16 | def on_exit(db_client: InfluxDBClient, write_api: WriteApi): 17 | """Close clients after terminate a script. 18 | 19 | :param db_client: InfluxDB client 20 | :param write_api: WriteApi 21 | :return: nothing 22 | """ 23 | write_api.close() 24 | db_client.close() 25 | 26 | 27 | def sensor_temperature(): 28 | """Read a CPU temperature. The [psutil] doesn't support MacOS so we use [sysctl]. 29 | 30 | :return: actual CPU temperature 31 | """ 32 | os_name = platform.system() 33 | if os_name == 'Darwin': 34 | from subprocess import check_output 35 | output = check_output(["sysctl", "machdep.xcpm.cpu_thermal_level"]) 36 | import re 37 | return re.findall(r'\d+', str(output))[0] 38 | else: 39 | return psutil.sensors_temperatures()["coretemp"][0] 40 | 41 | 42 | def line_protocol(temperature): 43 | """Create a InfluxDB line protocol with structure: 44 | 45 | iot_sensor,hostname=mine_sensor_12,type=temperature value=68 46 | 47 | :param temperature: the sensor temperature 48 | :return: Line protocol to write into InfluxDB 49 | """ 50 | 51 | import socket 52 | return 'iot_sensor,hostname={},type=temperature value={}'.format(socket.gethostname(), temperature) 53 | 54 | 55 | """ 56 | Read temperature every minute; distinct_until_changed - produce only if temperature change 57 | """ 58 | data = rx \ 59 | .interval(period=timedelta(seconds=60)) \ 60 | .pipe(ops.map(lambda t: sensor_temperature()), 61 | ops.distinct_until_changed(), 62 | ops.map(lambda temperature: line_protocol(temperature))) 63 | 64 | _db_client = InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org", debug=True) 65 | 66 | """ 67 | Create client that writes data into InfluxDB 68 | """ 69 | _write_api = _db_client.write_api(write_options=WriteOptions(batch_size=1)) 70 | _write_api.write(bucket="my-bucket", record=data) 71 | 72 | """ 73 | Call after terminate a script 74 | """ 75 | atexit.register(on_exit, _db_client, _write_api) 76 | 77 | input() 78 | -------------------------------------------------------------------------------- /examples/logging_handler.py: -------------------------------------------------------------------------------- 1 | """ 2 | Show the usage of influx with python native logging. 3 | 4 | This is useful if you 5 | * want to log to influx and a local file. 6 | * want to set up influx logging in a project without specifying it in submodules 7 | """ 8 | import datetime 9 | import logging 10 | import time 11 | 12 | from influxdb_client import InfluxLoggingHandler, WritePrecision, Point 13 | from influxdb_client.client.write_api import SYNCHRONOUS 14 | 15 | DATA_LOGGER_NAME = '…' 16 | 17 | 18 | def setup_logger(): 19 | """ 20 | Set up data logger with the influx logging handler. 21 | 22 | This can happen in your core module. 23 | """ 24 | influx_logging_handler = InfluxLoggingHandler( 25 | url="http://localhost:8086", token="my-token", org="my-org", bucket="my-bucket", 26 | client_args={'timeout': 30_000}, # optional configuration of the client 27 | write_api_args={'write_options': SYNCHRONOUS}) # optional configuration of the write api 28 | influx_logging_handler.setLevel(logging.DEBUG) 29 | 30 | data_logger = logging.getLogger(DATA_LOGGER_NAME) 31 | data_logger.setLevel(logging.DEBUG) 32 | data_logger.addHandler(influx_logging_handler) 33 | # feel free to add other handlers here. 34 | # if you find yourself writing filters e.g. to only log points to influx, think about adding a PR :) 35 | 36 | 37 | def use_logger(): 38 | """Use the logger. This can happen in any submodule.""" 39 | # `data_logger` will have the influx_logging_handler attached if setup_logger was called somewhere. 40 | data_logger = logging.getLogger(DATA_LOGGER_NAME) 41 | # write a line yourself 42 | data_logger.debug(f"my-measurement,host=host1 temperature=25.3 {int(time.time() * 1e9)}") 43 | # or make use of the influxdb helpers like Point 44 | data_logger.debug( 45 | Point('my-measurement') 46 | .tag('host', 'host1') 47 | .field('temperature', 25.3) 48 | .time(datetime.datetime.now(tz=datetime.timezone.utc), WritePrecision.MS) 49 | ) 50 | 51 | 52 | if __name__ == "__main__": 53 | setup_logger() 54 | use_logger() 55 | -------------------------------------------------------------------------------- /examples/nanosecond_precision.py: -------------------------------------------------------------------------------- 1 | from influxdb_client import Point, InfluxDBClient 2 | from influxdb_client.client.util.date_utils_pandas import PandasDateTimeHelper 3 | from influxdb_client.client.write_api import SYNCHRONOUS 4 | 5 | """ 6 | Set PandasDate helper which supports nanoseconds. 7 | """ 8 | import influxdb_client.client.util.date_utils as date_utils 9 | 10 | date_utils.date_helper = PandasDateTimeHelper() 11 | 12 | """ 13 | Prepare client. 14 | """ 15 | with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: 16 | 17 | write_api = client.write_api(write_options=SYNCHRONOUS) 18 | """ 19 | Prepare data 20 | """ 21 | 22 | point = Point("h2o_feet") \ 23 | .field("water_level", 10) \ 24 | .tag("location", "pacific") \ 25 | .time('1996-02-25T21:20:00.001001231Z') 26 | 27 | print(f'Time serialized with nanosecond precision: {point.to_line_protocol()}') 28 | print() 29 | 30 | write_api.write(bucket="my-bucket", record=point) 31 | 32 | query_api = client.query_api() 33 | 34 | """ 35 | Query: using Stream 36 | """ 37 | query = ''' 38 | from(bucket:"my-bucket") 39 | |> range(start: 0, stop: now()) 40 | |> filter(fn: (r) => r._measurement == "h2o_feet") 41 | ''' 42 | records = query_api.query_stream(query) 43 | 44 | for record in records: 45 | print(f'Temperature in {record["location"]} is {record["_value"]} at time: {record["_time"]}') 46 | 47 | -------------------------------------------------------------------------------- /examples/query.flux: -------------------------------------------------------------------------------- 1 | import "date" 2 | 3 | from(bucket: "my-bucket") 4 | |> range(start: -50d) 5 | |> filter(fn: (r) => r["_measurement"] == "weather" and r["_field"] == "temperature") 6 | |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value") 7 | |> map(fn: (r) => ({ r with weekDay: date.weekDay(t: r._time) })) 8 | -------------------------------------------------------------------------------- /examples/query_from_file.py: -------------------------------------------------------------------------------- 1 | """ 2 | How to load and execute query that is stored in file. 3 | """ 4 | import calendar 5 | import random 6 | from datetime import datetime, timedelta, timezone 7 | 8 | from influxdb_client import InfluxDBClient, Point 9 | from influxdb_client.client.write_api import SYNCHRONOUS 10 | 11 | with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: 12 | 13 | write_api = client.write_api(write_options=SYNCHRONOUS) 14 | """ 15 | Prepare data 16 | """ 17 | 18 | _points = [] 19 | now = datetime.now(timezone.utc).replace(hour=13, minute=20, second=15, microsecond=0) 20 | for i in range(50): 21 | _point = Point("weather")\ 22 | .tag("location", "New York")\ 23 | .field("temperature", random.randint(-10, 30))\ 24 | .time(now - timedelta(days=i)) 25 | _points.append(_point) 26 | 27 | write_api.write(bucket="my-bucket", record=_points) 28 | 29 | query_api = client.query_api() 30 | 31 | """ 32 | Query: using Flux from file 33 | """ 34 | with open('query.flux', 'r') as file: 35 | query = file.read() 36 | 37 | tables = query_api.query(query) 38 | 39 | for table in tables: 40 | for record in table.records: 41 | day_name = calendar.day_name[record["weekDay"]] 42 | print(f'Temperature in {record["location"]} is {record["temperature"]}°C at {day_name}') 43 | 44 | 45 | -------------------------------------------------------------------------------- /examples/query_response_to_json.py: -------------------------------------------------------------------------------- 1 | from influxdb_client import InfluxDBClient, Point 2 | from influxdb_client.client.write_api import SYNCHRONOUS 3 | 4 | with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: 5 | 6 | """ 7 | Prepare data 8 | """ 9 | _point1 = Point("my_measurement").tag("location", "Prague").field("temperature", 25.3) 10 | _point2 = Point("my_measurement").tag("location", "New York").field("temperature", 24.3) 11 | 12 | client.write_api(write_options=SYNCHRONOUS).write(bucket="my-bucket", record=[_point1, _point2]) 13 | 14 | """ 15 | Query: using Table structure 16 | """ 17 | tables = client.query_api().query('from(bucket:"my-bucket") |> range(start: -10m)') 18 | 19 | """ 20 | Serialize to JSON 21 | """ 22 | output = tables.to_json(indent=5) 23 | print(output) 24 | -------------------------------------------------------------------------------- /examples/query_with_profilers.py: -------------------------------------------------------------------------------- 1 | from influxdb_client import InfluxDBClient, Point 2 | from influxdb_client.client.query_api import QueryOptions 3 | from influxdb_client.client.write_api import SYNCHRONOUS 4 | 5 | with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org", debug=True) as client: 6 | 7 | """ 8 | Define callback to process profiler results. 9 | """ 10 | class ProfilersCallback(object): 11 | def __init__(self): 12 | self.records = [] 13 | 14 | def __call__(self, flux_record): 15 | self.records.append(flux_record.values) 16 | 17 | 18 | callback = ProfilersCallback() 19 | 20 | write_api = client.write_api(write_options=SYNCHRONOUS) 21 | 22 | """ 23 | Prepare data 24 | """ 25 | _point1 = Point("my_measurement").tag("location", "Prague").field("temperature", 25.3) 26 | _point2 = Point("my_measurement").tag("location", "New York").field("temperature", 24.3) 27 | write_api.write(bucket="my-bucket", record=[_point1, _point2]) 28 | 29 | """ 30 | Pass callback to QueryOptions 31 | """ 32 | query_api = client.query_api( 33 | query_options=QueryOptions(profilers=["query", "operator"], profiler_callback=callback)) 34 | 35 | """ 36 | Perform query 37 | """ 38 | tables = query_api.query('from(bucket:"my-bucket") |> range(start: -10m)') 39 | 40 | for profiler in callback.records: 41 | print(f'Custom processing of profiler result: {profiler}') 42 | -------------------------------------------------------------------------------- /examples/task_example.py: -------------------------------------------------------------------------------- 1 | from influxdb_client import InfluxDBClient, TaskCreateRequest 2 | 3 | url = "http://localhost:8086" 4 | org = "my-org" 5 | bucket = "my-bucket" 6 | token = "my-token" 7 | 8 | with InfluxDBClient(url=url, token=token, org=org, debug=True) as client: 9 | tasks_api = client.tasks_api() 10 | 11 | flux = \ 12 | ''' 13 | option task = {{ 14 | name: "{task_name}", 15 | every: 1d 16 | }} 17 | 18 | from(bucket: "{from_bucket}") 19 | |> range(start: -task.every) 20 | |> filter(fn: (r) => (r._measurement == "m")) 21 | |> aggregateWindow(every: 1h, fn: mean) 22 | |> to(bucket: "{to_bucket}", org: "{org}") 23 | '''.format(task_name="my-task", from_bucket=bucket, to_bucket="to-my-bucket", org=org) 24 | 25 | task_request = TaskCreateRequest(flux=flux, org=org, description="Task Description", status="active") 26 | task = tasks_api.create_task(task_create_request=task_request) 27 | print(task) 28 | 29 | tasks = tasks_api.find_tasks_iter() 30 | 31 | # print all tasks id 32 | for task in tasks: 33 | print(task.id) 34 | -------------------------------------------------------------------------------- /examples/templates_management.py: -------------------------------------------------------------------------------- 1 | """ 2 | How to use Templates and Stack API. 3 | """ 4 | import datetime 5 | 6 | from influxdb_client import InfluxDBClient, TemplatesService, TemplateApply, TemplateApplyRemotes, PatchStackRequest, \ 7 | TemplateApplyTemplate 8 | 9 | """ 10 | Define credentials 11 | """ 12 | url = 'http://localhost:8086' 13 | token = 'my-token' 14 | bucket_name = 'my-bucket' 15 | org_name = 'my-org' 16 | 17 | with InfluxDBClient(url=url, token=token, org=org_name, debug=True) as client: 18 | uniqueId = str(datetime.datetime.now()) 19 | """ 20 | Find Organization ID by Organization API. 21 | """ 22 | org = client.organizations_api().find_organizations(org=org_name)[0] 23 | 24 | """ 25 | Initialize Template service 26 | """ 27 | templates_service = TemplatesService(api_client=client.api_client) 28 | 29 | """ 30 | Apply 'Linux System Monitoring Template' 31 | """ 32 | template_yaml_url = "https://raw.githubusercontent.com/influxdata/community-templates/master/linux_system/linux_system.yml" # noqa: E501 33 | template_linux = templates_service.apply_template( 34 | template_apply=TemplateApply(dry_run=False, 35 | org_id=org.id, 36 | remotes=[TemplateApplyRemotes(url=template_yaml_url)])) 37 | """ 38 | Set Stack name 39 | """ 40 | templates_service.update_stack(stack_id=template_linux.stack_id, 41 | patch_stack_request=PatchStackRequest(name="linux_system")) 42 | 43 | """ 44 | Create template as an inline definition 45 | """ 46 | template_definition = { 47 | "apiVersion": "influxdata.com/v2alpha1", 48 | "kind": "Bucket", 49 | "metadata": {"name": "template-bucket"}, 50 | "spec": {"description": "bucket 1 description"} 51 | } 52 | template_inline = templates_service.apply_template( 53 | template_apply=TemplateApply(dry_run=False, 54 | org_id=org.id, 55 | template=TemplateApplyTemplate(content_type="json", 56 | contents=[template_definition]))) 57 | """ 58 | Set Stack name 59 | """ 60 | templates_service.update_stack(stack_id=template_inline.stack_id, 61 | patch_stack_request=PatchStackRequest(name="inline_stack")) 62 | 63 | """ 64 | List installed stacks 65 | """ 66 | print(f"\n------- List -------\n") 67 | stacks = templates_service.list_stacks(org_id=org.id).stacks 68 | print("\n".join([f" ---\n ID: {it.id}\n Stack: {it}" for it in stacks])) 69 | print("---") 70 | 71 | """ 72 | Delete previously created Stack 73 | """ 74 | print(f"------- Delete -------\n") 75 | templates_service.delete_stack(stack_id=template_linux.stack_id, org_id=org.id) 76 | print(f" Successfully deleted stack: '{template_linux.stack_id}'") 77 | -------------------------------------------------------------------------------- /examples/write_api_callbacks.py: -------------------------------------------------------------------------------- 1 | """ 2 | How to use WriteApi's callbacks to notify about state of background batches. 3 | """ 4 | 5 | from influxdb_client import InfluxDBClient, Point 6 | from influxdb_client.client.exceptions import InfluxDBError 7 | 8 | """ 9 | Configuration 10 | """ 11 | url = 'http://localhost:8086' 12 | token = 'my-token' 13 | org = 'my-org' 14 | bucket = 'my-bucket' 15 | 16 | """ 17 | Data 18 | """ 19 | points = [Point("my-temperature").tag("location", "Prague").field("temperature", 25.3), 20 | Point("my-temperature").tag("location", "New York").field("temperature", 18.4)] 21 | 22 | 23 | class BatchingCallback(object): 24 | 25 | def success(self, conf: (str, str, str), data: str): 26 | """Successfully writen batch.""" 27 | print(f"Written batch: {conf}, data: {data}") 28 | 29 | def error(self, conf: (str, str, str), data: str, exception: InfluxDBError): 30 | """Unsuccessfully writen batch.""" 31 | print(f"Cannot write batch: {conf}, data: {data} due: {exception}") 32 | 33 | def retry(self, conf: (str, str, str), data: str, exception: InfluxDBError): 34 | """Retryable error.""" 35 | print(f"Retryable error occurs for batch: {conf}, data: {data} retry: {exception}") 36 | 37 | 38 | callback = BatchingCallback() 39 | with InfluxDBClient(url=url, token=token, org=org) as client: 40 | """ 41 | Use batching API 42 | """ 43 | with client.write_api(success_callback=callback.success, 44 | error_callback=callback.error, 45 | retry_callback=callback.retry) as write_api: 46 | write_api.write(bucket=bucket, record=points) 47 | print() 48 | print("Wait to finishing ingesting...") 49 | print() 50 | -------------------------------------------------------------------------------- /examples/write_structured_data.py: -------------------------------------------------------------------------------- 1 | from collections import namedtuple 2 | from dataclasses import dataclass 3 | from datetime import datetime, timezone 4 | 5 | from influxdb_client import InfluxDBClient 6 | from influxdb_client.client.write_api import SYNCHRONOUS 7 | 8 | 9 | class Sensor(namedtuple('Sensor', ['name', 'location', 'version', 'pressure', 'temperature', 'timestamp'])): 10 | """ 11 | Named structure - Sensor 12 | """ 13 | pass 14 | 15 | 16 | @dataclass 17 | class Car: 18 | """ 19 | DataClass structure - Car 20 | """ 21 | engine: str 22 | type: str 23 | speed: float 24 | 25 | 26 | """ 27 | Initialize client 28 | """ 29 | with InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") as client: 30 | write_api = client.write_api(write_options=SYNCHRONOUS) 31 | 32 | """ 33 | Sensor "current" state 34 | """ 35 | sensor = Sensor(name="sensor_pt859", 36 | location="warehouse_125", 37 | version="2021.06.05.5874", 38 | pressure=125, 39 | temperature=10, 40 | timestamp=datetime.now(tz=timezone.utc)) 41 | print(sensor) 42 | 43 | """ 44 | Synchronous write 45 | """ 46 | write_api.write(bucket="my-bucket", 47 | record=sensor, 48 | record_measurement_key="name", 49 | record_time_key="timestamp", 50 | record_tag_keys=["location", "version"], 51 | record_field_keys=["pressure", "temperature"]) 52 | 53 | """ 54 | Car "current" speed 55 | """ 56 | car = Car('12V-BT', 'sport-cars', 125.25) 57 | print(car) 58 | 59 | """ 60 | Synchronous write 61 | """ 62 | write_api.write(bucket="my-bucket", 63 | record=car, 64 | record_measurement_name="performance", 65 | record_tag_keys=["engine", "type"], 66 | record_field_keys=["speed"]) 67 | -------------------------------------------------------------------------------- /influxdb_client/_async/__init__.py: -------------------------------------------------------------------------------- 1 | """Asynchronous REST APIs.""" 2 | -------------------------------------------------------------------------------- /influxdb_client/_sync/__init__.py: -------------------------------------------------------------------------------- 1 | """Synchronous REST APIs.""" 2 | -------------------------------------------------------------------------------- /influxdb_client/client/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | from __future__ import absolute_import 14 | 15 | # import apis into api package 16 | from influxdb_client.service.authorizations_service import AuthorizationsService 17 | from influxdb_client.service.backup_service import BackupService 18 | from influxdb_client.service.bucket_schemas_service import BucketSchemasService 19 | from influxdb_client.service.buckets_service import BucketsService 20 | from influxdb_client.service.cells_service import CellsService 21 | from influxdb_client.service.checks_service import ChecksService 22 | from influxdb_client.service.config_service import ConfigService 23 | from influxdb_client.service.dbr_ps_service import DBRPsService 24 | from influxdb_client.service.dashboards_service import DashboardsService 25 | from influxdb_client.service.delete_service import DeleteService 26 | from influxdb_client.service.health_service import HealthService 27 | from influxdb_client.service.invokable_scripts_service import InvokableScriptsService 28 | from influxdb_client.service.labels_service import LabelsService 29 | from influxdb_client.service.legacy_authorizations_service import LegacyAuthorizationsService 30 | from influxdb_client.service.metrics_service import MetricsService 31 | from influxdb_client.service.notification_endpoints_service import NotificationEndpointsService 32 | from influxdb_client.service.notification_rules_service import NotificationRulesService 33 | from influxdb_client.service.organizations_service import OrganizationsService 34 | from influxdb_client.service.ping_service import PingService 35 | from influxdb_client.service.query_service import QueryService 36 | from influxdb_client.service.ready_service import ReadyService 37 | from influxdb_client.service.remote_connections_service import RemoteConnectionsService 38 | from influxdb_client.service.replications_service import ReplicationsService 39 | from influxdb_client.service.resources_service import ResourcesService 40 | from influxdb_client.service.restore_service import RestoreService 41 | from influxdb_client.service.routes_service import RoutesService 42 | from influxdb_client.service.rules_service import RulesService 43 | from influxdb_client.service.scraper_targets_service import ScraperTargetsService 44 | from influxdb_client.service.secrets_service import SecretsService 45 | from influxdb_client.service.setup_service import SetupService 46 | from influxdb_client.service.signin_service import SigninService 47 | from influxdb_client.service.signout_service import SignoutService 48 | from influxdb_client.service.sources_service import SourcesService 49 | from influxdb_client.service.tasks_service import TasksService 50 | from influxdb_client.service.telegraf_plugins_service import TelegrafPluginsService 51 | from influxdb_client.service.telegrafs_service import TelegrafsService 52 | from influxdb_client.service.templates_service import TemplatesService 53 | from influxdb_client.service.users_service import UsersService 54 | from influxdb_client.service.variables_service import VariablesService 55 | from influxdb_client.service.views_service import ViewsService 56 | from influxdb_client.service.write_service import WriteService 57 | -------------------------------------------------------------------------------- /influxdb_client/client/_pages.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | class _Page: 4 | def __init__(self, values, has_next, next_after): 5 | self.has_next = has_next 6 | self.values = values 7 | self.next_after = next_after 8 | 9 | @staticmethod 10 | def empty(): 11 | return _Page([], False, None) 12 | 13 | @staticmethod 14 | def initial(after): 15 | return _Page([], True, after) 16 | 17 | 18 | class _PageIterator: 19 | def __init__(self, page: _Page, get_next_page): 20 | self.page = page 21 | self.get_next_page = get_next_page 22 | 23 | def __iter__(self): 24 | return self 25 | 26 | def __next__(self): 27 | if not self.page.values: 28 | if self.page.has_next: 29 | self.page = self.get_next_page(self.page) 30 | if not self.page.values: 31 | raise StopIteration 32 | return self.page.values.pop(0) 33 | 34 | 35 | class _Paginated: 36 | def __init__(self, paginated_getter, pluck_page_resources_from_response): 37 | self.paginated_getter = paginated_getter 38 | self.pluck_page_resources_from_response = pluck_page_resources_from_response 39 | 40 | def find_iter(self, **kwargs): 41 | """Iterate over resources with pagination. 42 | 43 | :key str org: The organization name. 44 | :key str org_id: The organization ID. 45 | :key str after: The last resource ID from which to seek from (but not including). 46 | :key int limit: the maximum number of items per page 47 | :return: resources iterator 48 | """ 49 | 50 | def get_next_page(page: _Page): 51 | return self._find_next_page(page, **kwargs) 52 | 53 | return iter(_PageIterator(_Page.initial(kwargs.get('after')), get_next_page)) 54 | 55 | def _find_next_page(self, page: _Page, **kwargs): 56 | if not page.has_next: 57 | return _Page.empty() 58 | 59 | kw_args = {**kwargs, 'after': page.next_after} if page.next_after is not None else kwargs 60 | response = self.paginated_getter(**kw_args) 61 | 62 | resources = self.pluck_page_resources_from_response(response) 63 | has_next = response.links.next is not None 64 | last_id = resources[-1].id if resources else None 65 | 66 | return _Page(resources, has_next, last_id) 67 | -------------------------------------------------------------------------------- /influxdb_client/client/delete_api.py: -------------------------------------------------------------------------------- 1 | """Delete time series data from InfluxDB.""" 2 | 3 | from datetime import datetime 4 | from typing import Union 5 | 6 | from influxdb_client import Organization 7 | from influxdb_client.client._base import _BaseDeleteApi 8 | from influxdb_client.client.util.helpers import get_org_query_param 9 | 10 | 11 | class DeleteApi(_BaseDeleteApi): 12 | """Implementation for '/api/v2/delete' endpoint.""" 13 | 14 | def __init__(self, influxdb_client): 15 | """Initialize defaults.""" 16 | super().__init__(influxdb_client) 17 | 18 | def delete(self, start: Union[str, datetime], stop: Union[str, datetime], predicate: str, bucket: str, 19 | org: Union[str, Organization, None] = None) -> None: 20 | """ 21 | Delete Time series data from InfluxDB. 22 | 23 | :param str, datetime.datetime start: start time 24 | :param str, datetime.datetime stop: stop time 25 | :param str predicate: predicate 26 | :param str bucket: bucket id or name from which data will be deleted 27 | :param str, Organization org: specifies the organization to delete data from. 28 | Take the ``ID``, ``Name`` or ``Organization``. 29 | If not specified the default value from ``InfluxDBClient.org`` is used. 30 | :return: 31 | """ 32 | predicate_request = self._prepare_predicate_request(start, stop, predicate) 33 | org_param = get_org_query_param(org=org, client=self._influxdb_client, required_id=False) 34 | 35 | return self._service.post_delete(delete_predicate_request=predicate_request, bucket=bucket, org=org_param) 36 | -------------------------------------------------------------------------------- /influxdb_client/client/delete_api_async.py: -------------------------------------------------------------------------------- 1 | """Delete time series data from InfluxDB.""" 2 | 3 | from datetime import datetime 4 | from typing import Union 5 | 6 | from influxdb_client import Organization 7 | from influxdb_client.client._base import _BaseDeleteApi 8 | from influxdb_client.client.util.helpers import get_org_query_param 9 | 10 | 11 | class DeleteApiAsync(_BaseDeleteApi): 12 | """Async implementation for '/api/v2/delete' endpoint.""" 13 | 14 | def __init__(self, influxdb_client): 15 | """Initialize defaults.""" 16 | super().__init__(influxdb_client) 17 | 18 | async def delete(self, start: Union[str, datetime], stop: Union[str, datetime], predicate: str, bucket: str, 19 | org: Union[str, Organization, None] = None) -> bool: 20 | """ 21 | Delete Time series data from InfluxDB. 22 | 23 | :param str, datetime.datetime start: start time 24 | :param str, datetime.datetime stop: stop time 25 | :param str predicate: predicate 26 | :param str bucket: bucket id or name from which data will be deleted 27 | :param str, Organization org: specifies the organization to delete data from. 28 | Take the ``ID``, ``Name`` or ``Organization``. 29 | If not specified the default value from ``InfluxDBClientAsync.org`` is used. 30 | :return: ``True`` for successfully deleted data, otherwise raise an exception 31 | """ 32 | predicate_request = self._prepare_predicate_request(start, stop, predicate) 33 | org_param = get_org_query_param(org=org, client=self._influxdb_client, required_id=False) 34 | 35 | response = await self._service.post_delete_async(delete_predicate_request=predicate_request, bucket=bucket, 36 | org=org_param, _return_http_data_only=False) 37 | return response[1] == 204 38 | -------------------------------------------------------------------------------- /influxdb_client/client/exceptions.py: -------------------------------------------------------------------------------- 1 | """Exceptions utils for InfluxDB.""" 2 | 3 | import logging 4 | 5 | from urllib3 import HTTPResponse 6 | 7 | logger = logging.getLogger('influxdb_client.client.exceptions') 8 | 9 | 10 | class InfluxDBError(Exception): 11 | """Raised when a server error occurs.""" 12 | 13 | def __init__(self, response: HTTPResponse = None, message: str = None): 14 | """Initialize the InfluxDBError handler.""" 15 | if response is not None: 16 | self.response = response 17 | self.message = self._get_message(response) 18 | if isinstance(response, HTTPResponse): # response is HTTPResponse 19 | self.headers = response.headers 20 | self.retry_after = response.headers.get('Retry-After') 21 | else: # response is RESTResponse 22 | self.headers = response.getheaders() 23 | self.retry_after = response.getheader('Retry-After') 24 | else: 25 | self.response = None 26 | self.message = message or 'no response' 27 | self.retry_after = None 28 | super().__init__(self.message) 29 | 30 | def _get_message(self, response): 31 | # Body 32 | if response.data: 33 | import json 34 | try: 35 | return json.loads(response.data)["message"] 36 | except Exception as e: 37 | logging.debug(f"Cannot parse error response to JSON: {response.data}, {e}") 38 | return response.data 39 | 40 | # Header 41 | for header_key in ["X-Platform-Error-Code", "X-Influx-Error", "X-InfluxDb-Error"]: 42 | header_value = response.getheader(header_key) 43 | if header_value is not None: 44 | return header_value 45 | 46 | # Http Status 47 | return response.reason 48 | -------------------------------------------------------------------------------- /influxdb_client/client/labels_api.py: -------------------------------------------------------------------------------- 1 | """Labels are a way to add visual metadata to dashboards, tasks, and other items in the InfluxDB UI.""" 2 | 3 | from typing import List, Dict, Union 4 | 5 | from influxdb_client import LabelsService, LabelCreateRequest, Label, LabelUpdate 6 | 7 | 8 | class LabelsApi(object): 9 | """Implementation for '/api/v2/labels' endpoint.""" 10 | 11 | def __init__(self, influxdb_client): 12 | """Initialize defaults.""" 13 | self._influxdb_client = influxdb_client 14 | self._service = LabelsService(influxdb_client.api_client) 15 | 16 | def create_label(self, name: str, org_id: str, properties: Dict[str, str] = None) -> Label: 17 | """ 18 | Create a new label. 19 | 20 | :param name: label name 21 | :param org_id: organization id 22 | :param properties: optional label properties 23 | :return: created label 24 | """ 25 | label_request = LabelCreateRequest(org_id=org_id, name=name, properties=properties) 26 | return self._service.post_labels(label_create_request=label_request).label 27 | 28 | def update_label(self, label: Label): 29 | """ 30 | Update an existing label name and properties. 31 | 32 | :param label: label 33 | :return: the updated label 34 | """ 35 | label_update = LabelUpdate() 36 | label_update.properties = label.properties 37 | label_update.name = label.name 38 | return self._service.patch_labels_id(label_id=label.id, label_update=label_update).label 39 | 40 | def delete_label(self, label: Union[str, Label]): 41 | """ 42 | Delete the label. 43 | 44 | :param label: label id or Label 45 | """ 46 | label_id = None 47 | 48 | if isinstance(label, str): 49 | label_id = label 50 | 51 | if isinstance(label, Label): 52 | label_id = label.id 53 | 54 | return self._service.delete_labels_id(label_id=label_id) 55 | 56 | def clone_label(self, cloned_name: str, label: Label) -> Label: 57 | """ 58 | Create the new instance of the label as a copy existing label. 59 | 60 | :param cloned_name: new label name 61 | :param label: existing label 62 | :return: clonned Label 63 | """ 64 | cloned_properties = None 65 | if label.properties is not None: 66 | cloned_properties = label.properties.copy() 67 | 68 | return self.create_label(name=cloned_name, properties=cloned_properties, org_id=label.org_id) 69 | 70 | def find_labels(self, **kwargs) -> List['Label']: 71 | """ 72 | Get all available labels. 73 | 74 | :key str org_id: The organization ID. 75 | 76 | :return: labels 77 | """ 78 | return self._service.get_labels(**kwargs).labels 79 | 80 | def find_label_by_id(self, label_id: str): 81 | """ 82 | Retrieve the label by id. 83 | 84 | :param label_id: 85 | :return: Label 86 | """ 87 | return self._service.get_labels_id(label_id=label_id).label 88 | 89 | def find_label_by_org(self, org_id) -> List['Label']: 90 | """ 91 | Get the list of all labels for given organization. 92 | 93 | :param org_id: organization id 94 | :return: list of labels 95 | """ 96 | return self._service.get_labels(org_id=org_id).labels 97 | -------------------------------------------------------------------------------- /influxdb_client/client/logging_handler.py: -------------------------------------------------------------------------------- 1 | """Use the influxdb_client with python native logging.""" 2 | import logging 3 | 4 | from influxdb_client import InfluxDBClient 5 | 6 | 7 | class InfluxLoggingHandler(logging.Handler): 8 | """ 9 | InfluxLoggingHandler instances dispatch logging events to influx. 10 | 11 | There is no need to set a Formatter. 12 | The raw input will be passed on to the influx write api. 13 | """ 14 | 15 | DEFAULT_LOG_RECORD_KEYS = list(logging.makeLogRecord({}).__dict__.keys()) + ['message'] 16 | 17 | def __init__(self, *, url, token, org, bucket, client_args=None, write_api_args=None): 18 | """ 19 | Initialize defaults. 20 | 21 | The arguments `client_args` and `write_api_args` can be dicts of kwargs. 22 | They are passed on to the InfluxDBClient and write_api calls respectively. 23 | """ 24 | super().__init__() 25 | 26 | self.bucket = bucket 27 | 28 | client_args = {} if client_args is None else client_args 29 | self.client = InfluxDBClient(url=url, token=token, org=org, **client_args) 30 | 31 | write_api_args = {} if write_api_args is None else write_api_args 32 | self.write_api = self.client.write_api(**write_api_args) 33 | 34 | def __del__(self): 35 | """Make sure all resources are closed.""" 36 | self.close() 37 | 38 | def close(self) -> None: 39 | """Close the write_api, client and logger.""" 40 | self.write_api.close() 41 | self.client.close() 42 | super().close() 43 | 44 | def emit(self, record: logging.LogRecord) -> None: 45 | """Emit a record via the influxDB WriteApi.""" 46 | try: 47 | message = self.format(record) 48 | extra = self._get_extra_values(record) 49 | return self.write_api.write(record=message, **extra) 50 | except (KeyboardInterrupt, SystemExit): 51 | raise 52 | except (Exception,): 53 | self.handleError(record) 54 | 55 | def _get_extra_values(self, record: logging.LogRecord) -> dict: 56 | """ 57 | Extract all items from the record that were injected via extra. 58 | 59 | Example: `logging.debug(msg, extra={key: value, ...})`. 60 | """ 61 | extra = {'bucket': self.bucket} 62 | extra.update({key: value for key, value in record.__dict__.items() 63 | if key not in self.DEFAULT_LOG_RECORD_KEYS}) 64 | return extra 65 | -------------------------------------------------------------------------------- /influxdb_client/client/organizations_api.py: -------------------------------------------------------------------------------- 1 | """ 2 | An organization is a workspace for a group of users. 3 | 4 | All dashboards, tasks, buckets, members, etc., belong to an organization. 5 | """ 6 | 7 | from influxdb_client import OrganizationsService, UsersService, Organization, PatchOrganizationRequest 8 | 9 | 10 | class OrganizationsApi(object): 11 | """Implementation for '/api/v2/orgs' endpoint.""" 12 | 13 | def __init__(self, influxdb_client): 14 | """Initialize defaults.""" 15 | self._influxdb_client = influxdb_client 16 | self._organizations_service = OrganizationsService(influxdb_client.api_client) 17 | self._users_service = UsersService(influxdb_client.api_client) 18 | 19 | def me(self): 20 | """Return the current authenticated user.""" 21 | user = self._users_service.get_me() 22 | return user 23 | 24 | def find_organization(self, org_id): 25 | """Retrieve an organization.""" 26 | return self._organizations_service.get_orgs_id(org_id=org_id) 27 | 28 | def find_organizations(self, **kwargs): 29 | """ 30 | List all organizations. 31 | 32 | :key int offset: Offset for pagination 33 | :key int limit: Limit for pagination 34 | :key bool descending: 35 | :key str org: Filter organizations to a specific organization name. 36 | :key str org_id: Filter organizations to a specific organization ID. 37 | :key str user_id: Filter organizations to a specific user ID. 38 | """ 39 | return self._organizations_service.get_orgs(**kwargs).orgs 40 | 41 | def create_organization(self, name: str = None, organization: Organization = None) -> Organization: 42 | """Create an organization.""" 43 | if organization is None: 44 | organization = Organization(name=name) 45 | return self._organizations_service.post_orgs(post_organization_request=organization) 46 | 47 | def update_organization(self, organization: Organization) -> Organization: 48 | """Update an organization. 49 | 50 | :param organization: Organization update to apply (required) 51 | :return: Organization 52 | """ 53 | request = PatchOrganizationRequest(name=organization.name, 54 | description=organization.description) 55 | 56 | return self._organizations_service.patch_orgs_id(org_id=organization.id, patch_organization_request=request) 57 | 58 | def delete_organization(self, org_id: str): 59 | """Delete an organization.""" 60 | return self._organizations_service.delete_orgs_id(org_id=org_id) 61 | -------------------------------------------------------------------------------- /influxdb_client/client/users_api.py: -------------------------------------------------------------------------------- 1 | """ 2 | Users are those with access to InfluxDB. 3 | 4 | To grant a user permission to access data, add them as a member of an organization 5 | and provide them with an authentication token. 6 | """ 7 | 8 | from typing import Union 9 | from influxdb_client import UsersService, User, Users, UserResponse, PasswordResetBody 10 | 11 | 12 | class UsersApi(object): 13 | """Implementation for '/api/v2/users' endpoint.""" 14 | 15 | def __init__(self, influxdb_client): 16 | """Initialize defaults.""" 17 | self._influxdb_client = influxdb_client 18 | self._service = UsersService(influxdb_client.api_client) 19 | 20 | def me(self) -> User: 21 | """Return the current authenticated user.""" 22 | user = self._service.get_me() 23 | return user 24 | 25 | def create_user(self, name: str) -> User: 26 | """Create a user.""" 27 | user = User(name=name) 28 | 29 | return self._service.post_users(user=user) 30 | 31 | def update_user(self, user: User) -> UserResponse: 32 | """Update a user. 33 | 34 | :param user: User update to apply (required) 35 | :return: User 36 | """ 37 | return self._service.patch_users_id(user_id=user.id, user=user) 38 | 39 | def update_password(self, user: Union[str, User, UserResponse], password: str) -> None: 40 | """Update a password. 41 | 42 | :param user: User to update password (required) 43 | :param password: New password (required) 44 | :return: None 45 | """ 46 | user_id = self._user_id(user) 47 | 48 | return self._service.post_users_id_password(user_id=user_id, password_reset_body=PasswordResetBody(password)) 49 | 50 | def delete_user(self, user: Union[str, User, UserResponse]) -> None: 51 | """Delete a user. 52 | 53 | :param user: user id or User 54 | :return: None 55 | """ 56 | user_id = self._user_id(user) 57 | 58 | return self._service.delete_users_id(user_id=user_id) 59 | 60 | def find_users(self, **kwargs) -> Users: 61 | """List all users. 62 | 63 | :key int offset: The offset for pagination. The number of records to skip. 64 | :key int limit: Limits the number of records returned. Default is `20`. 65 | :key str after: The last resource ID from which to seek from (but not including). 66 | This is to be used instead of `offset`. 67 | :key str name: The user name. 68 | :key str id: The user ID. 69 | :return: Buckets 70 | """ 71 | return self._service.get_users(**kwargs) 72 | 73 | def _user_id(self, user: Union[str, User, UserResponse]): 74 | if isinstance(user, User): 75 | user_id = user.id 76 | elif isinstance(user, UserResponse): 77 | user_id = user.id 78 | else: 79 | user_id = user 80 | return user_id 81 | -------------------------------------------------------------------------------- /influxdb_client/client/util/__init__.py: -------------------------------------------------------------------------------- 1 | """Utils package.""" 2 | -------------------------------------------------------------------------------- /influxdb_client/client/util/date_utils_pandas.py: -------------------------------------------------------------------------------- 1 | """Pandas date utils.""" 2 | from influxdb_client.client.util.date_utils import DateHelper 3 | from influxdb_client.extras import pd 4 | 5 | 6 | class PandasDateTimeHelper(DateHelper): 7 | """DateHelper that use Pandas library with nanosecond precision.""" 8 | 9 | def parse_date(self, date_string: str): 10 | """Parse date string into `class 'pandas._libs.tslibs.timestamps.Timestamp`.""" 11 | return pd.to_datetime(date_string) 12 | 13 | def to_nanoseconds(self, delta): 14 | """Get number of nanoseconds with nanos precision.""" 15 | return super().to_nanoseconds(delta) + (delta.nanoseconds if hasattr(delta, 'nanoseconds') else 0) 16 | -------------------------------------------------------------------------------- /influxdb_client/client/util/helpers.py: -------------------------------------------------------------------------------- 1 | """Functions to share utility across client classes.""" 2 | from influxdb_client.rest import ApiException 3 | 4 | 5 | def _is_id(value): 6 | """ 7 | Check if the value is valid InfluxDB ID. 8 | 9 | :param value: to check 10 | :return: True if provided parameter is valid InfluxDB ID. 11 | """ 12 | if value and len(value) == 16: 13 | try: 14 | int(value, 16) 15 | return True 16 | except ValueError: 17 | return False 18 | return False 19 | 20 | 21 | def get_org_query_param(org, client, required_id=False): 22 | """ 23 | Get required type of Org query parameter. 24 | 25 | :param str, Organization org: value provided as a parameter into API (optional) 26 | :param InfluxDBClient client: with default value for Org parameter 27 | :param bool required_id: true if the query param has to be a ID 28 | :return: request type of org query parameter or None 29 | """ 30 | _org = client.org if org is None else org 31 | if 'Organization' in type(_org).__name__: 32 | _org = _org.id 33 | if required_id and _org and not _is_id(_org): 34 | try: 35 | organizations = client.organizations_api().find_organizations(org=_org) 36 | if len(organizations) < 1: 37 | from influxdb_client.client.exceptions import InfluxDBError 38 | message = f"The client cannot find organization with name: '{_org}' " \ 39 | "to determine their ID. Are you using token with sufficient permission?" 40 | raise InfluxDBError(response=None, message=message) 41 | return organizations[0].id 42 | except ApiException as e: 43 | if e.status == 404: 44 | from influxdb_client.client.exceptions import InfluxDBError 45 | message = f"The client cannot find organization with name: '{_org}' " \ 46 | "to determine their ID." 47 | raise InfluxDBError(response=None, message=message) 48 | raise e 49 | 50 | return _org 51 | -------------------------------------------------------------------------------- /influxdb_client/client/warnings.py: -------------------------------------------------------------------------------- 1 | """The warnings message definition.""" 2 | import warnings 3 | 4 | 5 | class MissingPivotFunction(UserWarning): 6 | """User warning about missing pivot() function.""" 7 | 8 | @staticmethod 9 | def print_warning(query: str): 10 | """Print warning about missing pivot() function and how to deal with that.""" 11 | if 'fieldsAsCols' in query or 'pivot' in query: 12 | return 13 | 14 | message = f"""The query doesn't contains the pivot() function. 15 | 16 | The result will not be shaped to optimal processing by pandas.DataFrame. Use the pivot() function by: 17 | 18 | {query} |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value") 19 | 20 | You can disable this warning by: 21 | import warnings 22 | from influxdb_client.client.warnings import MissingPivotFunction 23 | 24 | warnings.simplefilter("ignore", MissingPivotFunction) 25 | 26 | For more info see: 27 | - https://docs.influxdata.com/resources/videos/pivots-in-flux/ 28 | - https://docs.influxdata.com/flux/latest/stdlib/universe/pivot/ 29 | - https://docs.influxdata.com/flux/latest/stdlib/influxdata/influxdb/schema/fieldsascols/ 30 | """ 31 | warnings.warn(message, MissingPivotFunction) 32 | 33 | 34 | class CloudOnlyWarning(UserWarning): 35 | """User warning about availability only on the InfluxDB Cloud.""" 36 | 37 | @staticmethod 38 | def print_warning(api_name: str, doc_url: str): 39 | """Print warning about availability only on the InfluxDB Cloud.""" 40 | message = f"""The '{api_name}' is available only on the InfluxDB Cloud. 41 | 42 | For more info see: 43 | - {doc_url} 44 | - https://docs.influxdata.com/influxdb/cloud/ 45 | 46 | You can disable this warning by: 47 | import warnings 48 | from influxdb_client.client.warnings import CloudOnlyWarning 49 | 50 | warnings.simplefilter("ignore", CloudOnlyWarning) 51 | """ 52 | warnings.warn(message, CloudOnlyWarning) 53 | -------------------------------------------------------------------------------- /influxdb_client/client/write/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | from __future__ import absolute_import 14 | 15 | # import apis into api package 16 | from influxdb_client.service.authorizations_service import AuthorizationsService 17 | from influxdb_client.service.backup_service import BackupService 18 | from influxdb_client.service.bucket_schemas_service import BucketSchemasService 19 | from influxdb_client.service.buckets_service import BucketsService 20 | from influxdb_client.service.cells_service import CellsService 21 | from influxdb_client.service.checks_service import ChecksService 22 | from influxdb_client.service.config_service import ConfigService 23 | from influxdb_client.service.dbr_ps_service import DBRPsService 24 | from influxdb_client.service.dashboards_service import DashboardsService 25 | from influxdb_client.service.delete_service import DeleteService 26 | from influxdb_client.service.health_service import HealthService 27 | from influxdb_client.service.invokable_scripts_service import InvokableScriptsService 28 | from influxdb_client.service.labels_service import LabelsService 29 | from influxdb_client.service.legacy_authorizations_service import LegacyAuthorizationsService 30 | from influxdb_client.service.metrics_service import MetricsService 31 | from influxdb_client.service.notification_endpoints_service import NotificationEndpointsService 32 | from influxdb_client.service.notification_rules_service import NotificationRulesService 33 | from influxdb_client.service.organizations_service import OrganizationsService 34 | from influxdb_client.service.ping_service import PingService 35 | from influxdb_client.service.query_service import QueryService 36 | from influxdb_client.service.ready_service import ReadyService 37 | from influxdb_client.service.remote_connections_service import RemoteConnectionsService 38 | from influxdb_client.service.replications_service import ReplicationsService 39 | from influxdb_client.service.resources_service import ResourcesService 40 | from influxdb_client.service.restore_service import RestoreService 41 | from influxdb_client.service.routes_service import RoutesService 42 | from influxdb_client.service.rules_service import RulesService 43 | from influxdb_client.service.scraper_targets_service import ScraperTargetsService 44 | from influxdb_client.service.secrets_service import SecretsService 45 | from influxdb_client.service.setup_service import SetupService 46 | from influxdb_client.service.signin_service import SigninService 47 | from influxdb_client.service.signout_service import SignoutService 48 | from influxdb_client.service.sources_service import SourcesService 49 | from influxdb_client.service.tasks_service import TasksService 50 | from influxdb_client.service.telegraf_plugins_service import TelegrafPluginsService 51 | from influxdb_client.service.telegrafs_service import TelegrafsService 52 | from influxdb_client.service.templates_service import TemplatesService 53 | from influxdb_client.service.users_service import UsersService 54 | from influxdb_client.service.variables_service import VariablesService 55 | from influxdb_client.service.views_service import ViewsService 56 | from influxdb_client.service.write_service import WriteService 57 | -------------------------------------------------------------------------------- /influxdb_client/domain/ast_response.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class ASTResponse(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | Attributes: 27 | openapi_types (dict): The key is attribute name 28 | and the value is attribute type. 29 | attribute_map (dict): The key is attribute name 30 | and the value is json key in definition. 31 | """ 32 | openapi_types = { 33 | 'ast': 'Package' 34 | } 35 | 36 | attribute_map = { 37 | 'ast': 'ast' 38 | } 39 | 40 | def __init__(self, ast=None): # noqa: E501,D401,D403 41 | """ASTResponse - a model defined in OpenAPI.""" # noqa: E501 42 | self._ast = None 43 | self.discriminator = None 44 | 45 | if ast is not None: 46 | self.ast = ast 47 | 48 | @property 49 | def ast(self): 50 | """Get the ast of this ASTResponse. 51 | 52 | :return: The ast of this ASTResponse. 53 | :rtype: Package 54 | """ # noqa: E501 55 | return self._ast 56 | 57 | @ast.setter 58 | def ast(self, ast): 59 | """Set the ast of this ASTResponse. 60 | 61 | :param ast: The ast of this ASTResponse. 62 | :type: Package 63 | """ # noqa: E501 64 | self._ast = ast 65 | 66 | def to_dict(self): 67 | """Return the model properties as a dict.""" 68 | result = {} 69 | 70 | for attr, _ in self.openapi_types.items(): 71 | value = getattr(self, attr) 72 | if isinstance(value, list): 73 | result[attr] = list(map( 74 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 75 | value 76 | )) 77 | elif hasattr(value, "to_dict"): 78 | result[attr] = value.to_dict() 79 | elif isinstance(value, dict): 80 | result[attr] = dict(map( 81 | lambda item: (item[0], item[1].to_dict()) 82 | if hasattr(item[1], "to_dict") else item, 83 | value.items() 84 | )) 85 | else: 86 | result[attr] = value 87 | 88 | return result 89 | 90 | def to_str(self): 91 | """Return the string representation of the model.""" 92 | return pprint.pformat(self.to_dict()) 93 | 94 | def __repr__(self): 95 | """For `print` and `pprint`.""" 96 | return self.to_str() 97 | 98 | def __eq__(self, other): 99 | """Return true if both objects are equal.""" 100 | if not isinstance(other, ASTResponse): 101 | return False 102 | 103 | return self.__dict__ == other.__dict__ 104 | 105 | def __ne__(self, other): 106 | """Return true if both objects are not equal.""" 107 | return not self == other 108 | -------------------------------------------------------------------------------- /influxdb_client/domain/axis_scale.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class AxisScale(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | allowed enum values 27 | """ 28 | LOG = "log" 29 | LINEAR = "linear" 30 | 31 | """ 32 | Attributes: 33 | openapi_types (dict): The key is attribute name 34 | and the value is attribute type. 35 | attribute_map (dict): The key is attribute name 36 | and the value is json key in definition. 37 | """ 38 | openapi_types = { 39 | } 40 | 41 | attribute_map = { 42 | } 43 | 44 | def __init__(self): # noqa: E501,D401,D403 45 | """AxisScale - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None 46 | 47 | def to_dict(self): 48 | """Return the model properties as a dict.""" 49 | result = {} 50 | 51 | for attr, _ in self.openapi_types.items(): 52 | value = getattr(self, attr) 53 | if isinstance(value, list): 54 | result[attr] = list(map( 55 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 56 | value 57 | )) 58 | elif hasattr(value, "to_dict"): 59 | result[attr] = value.to_dict() 60 | elif isinstance(value, dict): 61 | result[attr] = dict(map( 62 | lambda item: (item[0], item[1].to_dict()) 63 | if hasattr(item[1], "to_dict") else item, 64 | value.items() 65 | )) 66 | else: 67 | result[attr] = value 68 | 69 | return result 70 | 71 | def to_str(self): 72 | """Return the string representation of the model.""" 73 | return pprint.pformat(self.to_dict()) 74 | 75 | def __repr__(self): 76 | """For `print` and `pprint`.""" 77 | return self.to_str() 78 | 79 | def __eq__(self, other): 80 | """Return true if both objects are equal.""" 81 | if not isinstance(other, AxisScale): 82 | return False 83 | 84 | return self.__dict__ == other.__dict__ 85 | 86 | def __ne__(self, other): 87 | """Return true if both objects are not equal.""" 88 | return not self == other 89 | -------------------------------------------------------------------------------- /influxdb_client/domain/builder_aggregate_function_type.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class BuilderAggregateFunctionType(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | allowed enum values 27 | """ 28 | FILTER = "filter" 29 | GROUP = "group" 30 | 31 | """ 32 | Attributes: 33 | openapi_types (dict): The key is attribute name 34 | and the value is attribute type. 35 | attribute_map (dict): The key is attribute name 36 | and the value is json key in definition. 37 | """ 38 | openapi_types = { 39 | } 40 | 41 | attribute_map = { 42 | } 43 | 44 | def __init__(self): # noqa: E501,D401,D403 45 | """BuilderAggregateFunctionType - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None 46 | 47 | def to_dict(self): 48 | """Return the model properties as a dict.""" 49 | result = {} 50 | 51 | for attr, _ in self.openapi_types.items(): 52 | value = getattr(self, attr) 53 | if isinstance(value, list): 54 | result[attr] = list(map( 55 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 56 | value 57 | )) 58 | elif hasattr(value, "to_dict"): 59 | result[attr] = value.to_dict() 60 | elif isinstance(value, dict): 61 | result[attr] = dict(map( 62 | lambda item: (item[0], item[1].to_dict()) 63 | if hasattr(item[1], "to_dict") else item, 64 | value.items() 65 | )) 66 | else: 67 | result[attr] = value 68 | 69 | return result 70 | 71 | def to_str(self): 72 | """Return the string representation of the model.""" 73 | return pprint.pformat(self.to_dict()) 74 | 75 | def __repr__(self): 76 | """For `print` and `pprint`.""" 77 | return self.to_str() 78 | 79 | def __eq__(self, other): 80 | """Return true if both objects are equal.""" 81 | if not isinstance(other, BuilderAggregateFunctionType): 82 | return False 83 | 84 | return self.__dict__ == other.__dict__ 85 | 86 | def __ne__(self, other): 87 | """Return true if both objects are not equal.""" 88 | return not self == other 89 | -------------------------------------------------------------------------------- /influxdb_client/domain/check_status_level.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class CheckStatusLevel(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | allowed enum values 27 | """ 28 | UNKNOWN = "UNKNOWN" 29 | OK = "OK" 30 | INFO = "INFO" 31 | CRIT = "CRIT" 32 | WARN = "WARN" 33 | 34 | """ 35 | Attributes: 36 | openapi_types (dict): The key is attribute name 37 | and the value is attribute type. 38 | attribute_map (dict): The key is attribute name 39 | and the value is json key in definition. 40 | """ 41 | openapi_types = { 42 | } 43 | 44 | attribute_map = { 45 | } 46 | 47 | def __init__(self): # noqa: E501,D401,D403 48 | """CheckStatusLevel - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None 49 | 50 | def to_dict(self): 51 | """Return the model properties as a dict.""" 52 | result = {} 53 | 54 | for attr, _ in self.openapi_types.items(): 55 | value = getattr(self, attr) 56 | if isinstance(value, list): 57 | result[attr] = list(map( 58 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 59 | value 60 | )) 61 | elif hasattr(value, "to_dict"): 62 | result[attr] = value.to_dict() 63 | elif isinstance(value, dict): 64 | result[attr] = dict(map( 65 | lambda item: (item[0], item[1].to_dict()) 66 | if hasattr(item[1], "to_dict") else item, 67 | value.items() 68 | )) 69 | else: 70 | result[attr] = value 71 | 72 | return result 73 | 74 | def to_str(self): 75 | """Return the string representation of the model.""" 76 | return pprint.pformat(self.to_dict()) 77 | 78 | def __repr__(self): 79 | """For `print` and `pprint`.""" 80 | return self.to_str() 81 | 82 | def __eq__(self, other): 83 | """Return true if both objects are equal.""" 84 | if not isinstance(other, CheckStatusLevel): 85 | return False 86 | 87 | return self.__dict__ == other.__dict__ 88 | 89 | def __ne__(self, other): 90 | """Return true if both objects are not equal.""" 91 | return not self == other 92 | -------------------------------------------------------------------------------- /influxdb_client/domain/column_data_type.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class ColumnDataType(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | allowed enum values 27 | """ 28 | INTEGER = "integer" 29 | FLOAT = "float" 30 | BOOLEAN = "boolean" 31 | STRING = "string" 32 | UNSIGNED = "unsigned" 33 | 34 | """ 35 | Attributes: 36 | openapi_types (dict): The key is attribute name 37 | and the value is attribute type. 38 | attribute_map (dict): The key is attribute name 39 | and the value is json key in definition. 40 | """ 41 | openapi_types = { 42 | } 43 | 44 | attribute_map = { 45 | } 46 | 47 | def __init__(self): # noqa: E501,D401,D403 48 | """ColumnDataType - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None 49 | 50 | def to_dict(self): 51 | """Return the model properties as a dict.""" 52 | result = {} 53 | 54 | for attr, _ in self.openapi_types.items(): 55 | value = getattr(self, attr) 56 | if isinstance(value, list): 57 | result[attr] = list(map( 58 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 59 | value 60 | )) 61 | elif hasattr(value, "to_dict"): 62 | result[attr] = value.to_dict() 63 | elif isinstance(value, dict): 64 | result[attr] = dict(map( 65 | lambda item: (item[0], item[1].to_dict()) 66 | if hasattr(item[1], "to_dict") else item, 67 | value.items() 68 | )) 69 | else: 70 | result[attr] = value 71 | 72 | return result 73 | 74 | def to_str(self): 75 | """Return the string representation of the model.""" 76 | return pprint.pformat(self.to_dict()) 77 | 78 | def __repr__(self): 79 | """For `print` and `pprint`.""" 80 | return self.to_str() 81 | 82 | def __eq__(self, other): 83 | """Return true if both objects are equal.""" 84 | if not isinstance(other, ColumnDataType): 85 | return False 86 | 87 | return self.__dict__ == other.__dict__ 88 | 89 | def __ne__(self, other): 90 | """Return true if both objects are not equal.""" 91 | return not self == other 92 | -------------------------------------------------------------------------------- /influxdb_client/domain/column_semantic_type.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class ColumnSemanticType(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | allowed enum values 27 | """ 28 | TIMESTAMP = "timestamp" 29 | TAG = "tag" 30 | FIELD = "field" 31 | 32 | """ 33 | Attributes: 34 | openapi_types (dict): The key is attribute name 35 | and the value is attribute type. 36 | attribute_map (dict): The key is attribute name 37 | and the value is json key in definition. 38 | """ 39 | openapi_types = { 40 | } 41 | 42 | attribute_map = { 43 | } 44 | 45 | def __init__(self): # noqa: E501,D401,D403 46 | """ColumnSemanticType - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None 47 | 48 | def to_dict(self): 49 | """Return the model properties as a dict.""" 50 | result = {} 51 | 52 | for attr, _ in self.openapi_types.items(): 53 | value = getattr(self, attr) 54 | if isinstance(value, list): 55 | result[attr] = list(map( 56 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 57 | value 58 | )) 59 | elif hasattr(value, "to_dict"): 60 | result[attr] = value.to_dict() 61 | elif isinstance(value, dict): 62 | result[attr] = dict(map( 63 | lambda item: (item[0], item[1].to_dict()) 64 | if hasattr(item[1], "to_dict") else item, 65 | value.items() 66 | )) 67 | else: 68 | result[attr] = value 69 | 70 | return result 71 | 72 | def to_str(self): 73 | """Return the string representation of the model.""" 74 | return pprint.pformat(self.to_dict()) 75 | 76 | def __repr__(self): 77 | """For `print` and `pprint`.""" 78 | return self.to_str() 79 | 80 | def __eq__(self, other): 81 | """Return true if both objects are equal.""" 82 | if not isinstance(other, ColumnSemanticType): 83 | return False 84 | 85 | return self.__dict__ == other.__dict__ 86 | 87 | def __ne__(self, other): 88 | """Return true if both objects are not equal.""" 89 | return not self == other 90 | -------------------------------------------------------------------------------- /influxdb_client/domain/expression.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | from influxdb_client.domain.node import Node 17 | 18 | 19 | class Expression(Node): 20 | """NOTE: This class is auto generated by OpenAPI Generator. 21 | 22 | Ref: https://openapi-generator.tech 23 | 24 | Do not edit the class manually. 25 | """ 26 | 27 | """ 28 | Attributes: 29 | openapi_types (dict): The key is attribute name 30 | and the value is attribute type. 31 | attribute_map (dict): The key is attribute name 32 | and the value is json key in definition. 33 | """ 34 | openapi_types = { 35 | } 36 | 37 | attribute_map = { 38 | } 39 | 40 | def __init__(self): # noqa: E501,D401,D403 41 | """Expression - a model defined in OpenAPI.""" # noqa: E501 42 | Node.__init__(self) # noqa: E501 43 | self.discriminator = None 44 | 45 | def to_dict(self): 46 | """Return the model properties as a dict.""" 47 | result = {} 48 | 49 | for attr, _ in self.openapi_types.items(): 50 | value = getattr(self, attr) 51 | if isinstance(value, list): 52 | result[attr] = list(map( 53 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 54 | value 55 | )) 56 | elif hasattr(value, "to_dict"): 57 | result[attr] = value.to_dict() 58 | elif isinstance(value, dict): 59 | result[attr] = dict(map( 60 | lambda item: (item[0], item[1].to_dict()) 61 | if hasattr(item[1], "to_dict") else item, 62 | value.items() 63 | )) 64 | else: 65 | result[attr] = value 66 | 67 | return result 68 | 69 | def to_str(self): 70 | """Return the string representation of the model.""" 71 | return pprint.pformat(self.to_dict()) 72 | 73 | def __repr__(self): 74 | """For `print` and `pprint`.""" 75 | return self.to_str() 76 | 77 | def __eq__(self, other): 78 | """Return true if both objects are equal.""" 79 | if not isinstance(other, Expression): 80 | return False 81 | 82 | return self.__dict__ == other.__dict__ 83 | 84 | def __ne__(self, other): 85 | """Return true if both objects are not equal.""" 86 | return not self == other 87 | -------------------------------------------------------------------------------- /influxdb_client/domain/node.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class Node(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | Attributes: 27 | openapi_types (dict): The key is attribute name 28 | and the value is attribute type. 29 | attribute_map (dict): The key is attribute name 30 | and the value is json key in definition. 31 | """ 32 | openapi_types = { 33 | } 34 | 35 | attribute_map = { 36 | } 37 | 38 | def __init__(self): # noqa: E501,D401,D403 39 | """Node - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None 40 | 41 | def to_dict(self): 42 | """Return the model properties as a dict.""" 43 | result = {} 44 | 45 | for attr, _ in self.openapi_types.items(): 46 | value = getattr(self, attr) 47 | if isinstance(value, list): 48 | result[attr] = list(map( 49 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 50 | value 51 | )) 52 | elif hasattr(value, "to_dict"): 53 | result[attr] = value.to_dict() 54 | elif isinstance(value, dict): 55 | result[attr] = dict(map( 56 | lambda item: (item[0], item[1].to_dict()) 57 | if hasattr(item[1], "to_dict") else item, 58 | value.items() 59 | )) 60 | else: 61 | result[attr] = value 62 | 63 | return result 64 | 65 | def to_str(self): 66 | """Return the string representation of the model.""" 67 | return pprint.pformat(self.to_dict()) 68 | 69 | def __repr__(self): 70 | """For `print` and `pprint`.""" 71 | return self.to_str() 72 | 73 | def __eq__(self, other): 74 | """Return true if both objects are equal.""" 75 | if not isinstance(other, Node): 76 | return False 77 | 78 | return self.__dict__ == other.__dict__ 79 | 80 | def __ne__(self, other): 81 | """Return true if both objects are not equal.""" 82 | return not self == other 83 | -------------------------------------------------------------------------------- /influxdb_client/domain/notification_endpoint_type.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class NotificationEndpointType(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | allowed enum values 27 | """ 28 | SLACK = "slack" 29 | PAGERDUTY = "pagerduty" 30 | HTTP = "http" 31 | TELEGRAM = "telegram" 32 | 33 | """ 34 | Attributes: 35 | openapi_types (dict): The key is attribute name 36 | and the value is attribute type. 37 | attribute_map (dict): The key is attribute name 38 | and the value is json key in definition. 39 | """ 40 | openapi_types = { 41 | } 42 | 43 | attribute_map = { 44 | } 45 | 46 | def __init__(self): # noqa: E501,D401,D403 47 | """NotificationEndpointType - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None 48 | 49 | def to_dict(self): 50 | """Return the model properties as a dict.""" 51 | result = {} 52 | 53 | for attr, _ in self.openapi_types.items(): 54 | value = getattr(self, attr) 55 | if isinstance(value, list): 56 | result[attr] = list(map( 57 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 58 | value 59 | )) 60 | elif hasattr(value, "to_dict"): 61 | result[attr] = value.to_dict() 62 | elif isinstance(value, dict): 63 | result[attr] = dict(map( 64 | lambda item: (item[0], item[1].to_dict()) 65 | if hasattr(item[1], "to_dict") else item, 66 | value.items() 67 | )) 68 | else: 69 | result[attr] = value 70 | 71 | return result 72 | 73 | def to_str(self): 74 | """Return the string representation of the model.""" 75 | return pprint.pformat(self.to_dict()) 76 | 77 | def __repr__(self): 78 | """For `print` and `pprint`.""" 79 | return self.to_str() 80 | 81 | def __eq__(self, other): 82 | """Return true if both objects are equal.""" 83 | if not isinstance(other, NotificationEndpointType): 84 | return False 85 | 86 | return self.__dict__ == other.__dict__ 87 | 88 | def __ne__(self, other): 89 | """Return true if both objects are not equal.""" 90 | return not self == other 91 | -------------------------------------------------------------------------------- /influxdb_client/domain/property_key.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | from influxdb_client.domain.expression import Expression 17 | 18 | 19 | class PropertyKey(Expression): 20 | """NOTE: This class is auto generated by OpenAPI Generator. 21 | 22 | Ref: https://openapi-generator.tech 23 | 24 | Do not edit the class manually. 25 | """ 26 | 27 | """ 28 | Attributes: 29 | openapi_types (dict): The key is attribute name 30 | and the value is attribute type. 31 | attribute_map (dict): The key is attribute name 32 | and the value is json key in definition. 33 | """ 34 | openapi_types = { 35 | } 36 | 37 | attribute_map = { 38 | } 39 | 40 | def __init__(self): # noqa: E501,D401,D403 41 | """PropertyKey - a model defined in OpenAPI.""" # noqa: E501 42 | Expression.__init__(self) # noqa: E501 43 | self.discriminator = None 44 | 45 | def to_dict(self): 46 | """Return the model properties as a dict.""" 47 | result = {} 48 | 49 | for attr, _ in self.openapi_types.items(): 50 | value = getattr(self, attr) 51 | if isinstance(value, list): 52 | result[attr] = list(map( 53 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 54 | value 55 | )) 56 | elif hasattr(value, "to_dict"): 57 | result[attr] = value.to_dict() 58 | elif isinstance(value, dict): 59 | result[attr] = dict(map( 60 | lambda item: (item[0], item[1].to_dict()) 61 | if hasattr(item[1], "to_dict") else item, 62 | value.items() 63 | )) 64 | else: 65 | result[attr] = value 66 | 67 | return result 68 | 69 | def to_str(self): 70 | """Return the string representation of the model.""" 71 | return pprint.pformat(self.to_dict()) 72 | 73 | def __repr__(self): 74 | """For `print` and `pprint`.""" 75 | return self.to_str() 76 | 77 | def __eq__(self, other): 78 | """Return true if both objects are equal.""" 79 | if not isinstance(other, PropertyKey): 80 | return False 81 | 82 | return self.__dict__ == other.__dict__ 83 | 84 | def __ne__(self, other): 85 | """Return true if both objects are not equal.""" 86 | return not self == other 87 | -------------------------------------------------------------------------------- /influxdb_client/domain/query_edit_mode.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class QueryEditMode(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | allowed enum values 27 | """ 28 | BUILDER = "builder" 29 | ADVANCED = "advanced" 30 | 31 | """ 32 | Attributes: 33 | openapi_types (dict): The key is attribute name 34 | and the value is attribute type. 35 | attribute_map (dict): The key is attribute name 36 | and the value is json key in definition. 37 | """ 38 | openapi_types = { 39 | } 40 | 41 | attribute_map = { 42 | } 43 | 44 | def __init__(self): # noqa: E501,D401,D403 45 | """QueryEditMode - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None 46 | 47 | def to_dict(self): 48 | """Return the model properties as a dict.""" 49 | result = {} 50 | 51 | for attr, _ in self.openapi_types.items(): 52 | value = getattr(self, attr) 53 | if isinstance(value, list): 54 | result[attr] = list(map( 55 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 56 | value 57 | )) 58 | elif hasattr(value, "to_dict"): 59 | result[attr] = value.to_dict() 60 | elif isinstance(value, dict): 61 | result[attr] = dict(map( 62 | lambda item: (item[0], item[1].to_dict()) 63 | if hasattr(item[1], "to_dict") else item, 64 | value.items() 65 | )) 66 | else: 67 | result[attr] = value 68 | 69 | return result 70 | 71 | def to_str(self): 72 | """Return the string representation of the model.""" 73 | return pprint.pformat(self.to_dict()) 74 | 75 | def __repr__(self): 76 | """For `print` and `pprint`.""" 77 | return self.to_str() 78 | 79 | def __eq__(self, other): 80 | """Return true if both objects are equal.""" 81 | if not isinstance(other, QueryEditMode): 82 | return False 83 | 84 | return self.__dict__ == other.__dict__ 85 | 86 | def __ne__(self, other): 87 | """Return true if both objects are not equal.""" 88 | return not self == other 89 | -------------------------------------------------------------------------------- /influxdb_client/domain/rule_status_level.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class RuleStatusLevel(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | allowed enum values 27 | """ 28 | UNKNOWN = "UNKNOWN" 29 | OK = "OK" 30 | INFO = "INFO" 31 | CRIT = "CRIT" 32 | WARN = "WARN" 33 | ANY = "ANY" 34 | 35 | """ 36 | Attributes: 37 | openapi_types (dict): The key is attribute name 38 | and the value is attribute type. 39 | attribute_map (dict): The key is attribute name 40 | and the value is json key in definition. 41 | """ 42 | openapi_types = { 43 | } 44 | 45 | attribute_map = { 46 | } 47 | 48 | def __init__(self): # noqa: E501,D401,D403 49 | """RuleStatusLevel - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None 50 | 51 | def to_dict(self): 52 | """Return the model properties as a dict.""" 53 | result = {} 54 | 55 | for attr, _ in self.openapi_types.items(): 56 | value = getattr(self, attr) 57 | if isinstance(value, list): 58 | result[attr] = list(map( 59 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 60 | value 61 | )) 62 | elif hasattr(value, "to_dict"): 63 | result[attr] = value.to_dict() 64 | elif isinstance(value, dict): 65 | result[attr] = dict(map( 66 | lambda item: (item[0], item[1].to_dict()) 67 | if hasattr(item[1], "to_dict") else item, 68 | value.items() 69 | )) 70 | else: 71 | result[attr] = value 72 | 73 | return result 74 | 75 | def to_str(self): 76 | """Return the string representation of the model.""" 77 | return pprint.pformat(self.to_dict()) 78 | 79 | def __repr__(self): 80 | """For `print` and `pprint`.""" 81 | return self.to_str() 82 | 83 | def __eq__(self, other): 84 | """Return true if both objects are equal.""" 85 | if not isinstance(other, RuleStatusLevel): 86 | return False 87 | 88 | return self.__dict__ == other.__dict__ 89 | 90 | def __ne__(self, other): 91 | """Return true if both objects are not equal.""" 92 | return not self == other 93 | -------------------------------------------------------------------------------- /influxdb_client/domain/schema_type.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class SchemaType(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | allowed enum values 27 | """ 28 | IMPLICIT = "implicit" 29 | EXPLICIT = "explicit" 30 | 31 | """ 32 | Attributes: 33 | openapi_types (dict): The key is attribute name 34 | and the value is attribute type. 35 | attribute_map (dict): The key is attribute name 36 | and the value is json key in definition. 37 | """ 38 | openapi_types = { 39 | } 40 | 41 | attribute_map = { 42 | } 43 | 44 | def __init__(self): # noqa: E501,D401,D403 45 | """SchemaType - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None 46 | 47 | def to_dict(self): 48 | """Return the model properties as a dict.""" 49 | result = {} 50 | 51 | for attr, _ in self.openapi_types.items(): 52 | value = getattr(self, attr) 53 | if isinstance(value, list): 54 | result[attr] = list(map( 55 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 56 | value 57 | )) 58 | elif hasattr(value, "to_dict"): 59 | result[attr] = value.to_dict() 60 | elif isinstance(value, dict): 61 | result[attr] = dict(map( 62 | lambda item: (item[0], item[1].to_dict()) 63 | if hasattr(item[1], "to_dict") else item, 64 | value.items() 65 | )) 66 | else: 67 | result[attr] = value 68 | 69 | return result 70 | 71 | def to_str(self): 72 | """Return the string representation of the model.""" 73 | return pprint.pformat(self.to_dict()) 74 | 75 | def __repr__(self): 76 | """For `print` and `pprint`.""" 77 | return self.to_str() 78 | 79 | def __eq__(self, other): 80 | """Return true if both objects are equal.""" 81 | if not isinstance(other, SchemaType): 82 | return False 83 | 84 | return self.__dict__ == other.__dict__ 85 | 86 | def __ne__(self, other): 87 | """Return true if both objects are not equal.""" 88 | return not self == other 89 | -------------------------------------------------------------------------------- /influxdb_client/domain/script_language.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class ScriptLanguage(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | allowed enum values 27 | """ 28 | FLUX = "flux" 29 | SQL = "sql" 30 | INFLUXQL = "influxql" 31 | 32 | """ 33 | Attributes: 34 | openapi_types (dict): The key is attribute name 35 | and the value is attribute type. 36 | attribute_map (dict): The key is attribute name 37 | and the value is json key in definition. 38 | """ 39 | openapi_types = { 40 | } 41 | 42 | attribute_map = { 43 | } 44 | 45 | def __init__(self): # noqa: E501,D401,D403 46 | """ScriptLanguage - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None 47 | 48 | def to_dict(self): 49 | """Return the model properties as a dict.""" 50 | result = {} 51 | 52 | for attr, _ in self.openapi_types.items(): 53 | value = getattr(self, attr) 54 | if isinstance(value, list): 55 | result[attr] = list(map( 56 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 57 | value 58 | )) 59 | elif hasattr(value, "to_dict"): 60 | result[attr] = value.to_dict() 61 | elif isinstance(value, dict): 62 | result[attr] = dict(map( 63 | lambda item: (item[0], item[1].to_dict()) 64 | if hasattr(item[1], "to_dict") else item, 65 | value.items() 66 | )) 67 | else: 68 | result[attr] = value 69 | 70 | return result 71 | 72 | def to_str(self): 73 | """Return the string representation of the model.""" 74 | return pprint.pformat(self.to_dict()) 75 | 76 | def __repr__(self): 77 | """For `print` and `pprint`.""" 78 | return self.to_str() 79 | 80 | def __eq__(self, other): 81 | """Return true if both objects are equal.""" 82 | if not isinstance(other, ScriptLanguage): 83 | return False 84 | 85 | return self.__dict__ == other.__dict__ 86 | 87 | def __ne__(self, other): 88 | """Return true if both objects are not equal.""" 89 | return not self == other 90 | -------------------------------------------------------------------------------- /influxdb_client/domain/statement.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class Statement(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | Attributes: 27 | openapi_types (dict): The key is attribute name 28 | and the value is attribute type. 29 | attribute_map (dict): The key is attribute name 30 | and the value is json key in definition. 31 | """ 32 | openapi_types = { 33 | } 34 | 35 | attribute_map = { 36 | } 37 | 38 | def __init__(self): # noqa: E501,D401,D403 39 | """Statement - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None 40 | 41 | def to_dict(self): 42 | """Return the model properties as a dict.""" 43 | result = {} 44 | 45 | for attr, _ in self.openapi_types.items(): 46 | value = getattr(self, attr) 47 | if isinstance(value, list): 48 | result[attr] = list(map( 49 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 50 | value 51 | )) 52 | elif hasattr(value, "to_dict"): 53 | result[attr] = value.to_dict() 54 | elif isinstance(value, dict): 55 | result[attr] = dict(map( 56 | lambda item: (item[0], item[1].to_dict()) 57 | if hasattr(item[1], "to_dict") else item, 58 | value.items() 59 | )) 60 | else: 61 | result[attr] = value 62 | 63 | return result 64 | 65 | def to_str(self): 66 | """Return the string representation of the model.""" 67 | return pprint.pformat(self.to_dict()) 68 | 69 | def __repr__(self): 70 | """For `print` and `pprint`.""" 71 | return self.to_str() 72 | 73 | def __eq__(self, other): 74 | """Return true if both objects are equal.""" 75 | if not isinstance(other, Statement): 76 | return False 77 | 78 | return self.__dict__ == other.__dict__ 79 | 80 | def __ne__(self, other): 81 | """Return true if both objects are not equal.""" 82 | return not self == other 83 | -------------------------------------------------------------------------------- /influxdb_client/domain/task_status_type.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class TaskStatusType(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | allowed enum values 27 | """ 28 | ACTIVE = "active" 29 | INACTIVE = "inactive" 30 | 31 | """ 32 | Attributes: 33 | openapi_types (dict): The key is attribute name 34 | and the value is attribute type. 35 | attribute_map (dict): The key is attribute name 36 | and the value is json key in definition. 37 | """ 38 | openapi_types = { 39 | } 40 | 41 | attribute_map = { 42 | } 43 | 44 | def __init__(self): # noqa: E501,D401,D403 45 | """TaskStatusType - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None 46 | 47 | def to_dict(self): 48 | """Return the model properties as a dict.""" 49 | result = {} 50 | 51 | for attr, _ in self.openapi_types.items(): 52 | value = getattr(self, attr) 53 | if isinstance(value, list): 54 | result[attr] = list(map( 55 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 56 | value 57 | )) 58 | elif hasattr(value, "to_dict"): 59 | result[attr] = value.to_dict() 60 | elif isinstance(value, dict): 61 | result[attr] = dict(map( 62 | lambda item: (item[0], item[1].to_dict()) 63 | if hasattr(item[1], "to_dict") else item, 64 | value.items() 65 | )) 66 | else: 67 | result[attr] = value 68 | 69 | return result 70 | 71 | def to_str(self): 72 | """Return the string representation of the model.""" 73 | return pprint.pformat(self.to_dict()) 74 | 75 | def __repr__(self): 76 | """For `print` and `pprint`.""" 77 | return self.to_str() 78 | 79 | def __eq__(self, other): 80 | """Return true if both objects are equal.""" 81 | if not isinstance(other, TaskStatusType): 82 | return False 83 | 84 | return self.__dict__ == other.__dict__ 85 | 86 | def __ne__(self, other): 87 | """Return true if both objects are not equal.""" 88 | return not self == other 89 | -------------------------------------------------------------------------------- /influxdb_client/domain/template_kind.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class TemplateKind(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | allowed enum values 27 | """ 28 | BUCKET = "Bucket" 29 | CHECK = "Check" 30 | CHECKDEADMAN = "CheckDeadman" 31 | CHECKTHRESHOLD = "CheckThreshold" 32 | DASHBOARD = "Dashboard" 33 | LABEL = "Label" 34 | NOTIFICATIONENDPOINT = "NotificationEndpoint" 35 | NOTIFICATIONENDPOINTHTTP = "NotificationEndpointHTTP" 36 | NOTIFICATIONENDPOINTPAGERDUTY = "NotificationEndpointPagerDuty" 37 | NOTIFICATIONENDPOINTSLACK = "NotificationEndpointSlack" 38 | NOTIFICATIONRULE = "NotificationRule" 39 | TASK = "Task" 40 | TELEGRAF = "Telegraf" 41 | VARIABLE = "Variable" 42 | 43 | """ 44 | Attributes: 45 | openapi_types (dict): The key is attribute name 46 | and the value is attribute type. 47 | attribute_map (dict): The key is attribute name 48 | and the value is json key in definition. 49 | """ 50 | openapi_types = { 51 | } 52 | 53 | attribute_map = { 54 | } 55 | 56 | def __init__(self): # noqa: E501,D401,D403 57 | """TemplateKind - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None 58 | 59 | def to_dict(self): 60 | """Return the model properties as a dict.""" 61 | result = {} 62 | 63 | for attr, _ in self.openapi_types.items(): 64 | value = getattr(self, attr) 65 | if isinstance(value, list): 66 | result[attr] = list(map( 67 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 68 | value 69 | )) 70 | elif hasattr(value, "to_dict"): 71 | result[attr] = value.to_dict() 72 | elif isinstance(value, dict): 73 | result[attr] = dict(map( 74 | lambda item: (item[0], item[1].to_dict()) 75 | if hasattr(item[1], "to_dict") else item, 76 | value.items() 77 | )) 78 | else: 79 | result[attr] = value 80 | 81 | return result 82 | 83 | def to_str(self): 84 | """Return the string representation of the model.""" 85 | return pprint.pformat(self.to_dict()) 86 | 87 | def __repr__(self): 88 | """For `print` and `pprint`.""" 89 | return self.to_str() 90 | 91 | def __eq__(self, other): 92 | """Return true if both objects are equal.""" 93 | if not isinstance(other, TemplateKind): 94 | return False 95 | 96 | return self.__dict__ == other.__dict__ 97 | 98 | def __ne__(self, other): 99 | """Return true if both objects are not equal.""" 100 | return not self == other 101 | -------------------------------------------------------------------------------- /influxdb_client/domain/variable_properties.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class VariableProperties(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | Attributes: 27 | openapi_types (dict): The key is attribute name 28 | and the value is attribute type. 29 | attribute_map (dict): The key is attribute name 30 | and the value is json key in definition. 31 | """ 32 | openapi_types = { 33 | } 34 | 35 | attribute_map = { 36 | } 37 | 38 | def __init__(self): # noqa: E501,D401,D403 39 | """VariableProperties - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None 40 | 41 | def to_dict(self): 42 | """Return the model properties as a dict.""" 43 | result = {} 44 | 45 | for attr, _ in self.openapi_types.items(): 46 | value = getattr(self, attr) 47 | if isinstance(value, list): 48 | result[attr] = list(map( 49 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 50 | value 51 | )) 52 | elif hasattr(value, "to_dict"): 53 | result[attr] = value.to_dict() 54 | elif isinstance(value, dict): 55 | result[attr] = dict(map( 56 | lambda item: (item[0], item[1].to_dict()) 57 | if hasattr(item[1], "to_dict") else item, 58 | value.items() 59 | )) 60 | else: 61 | result[attr] = value 62 | 63 | return result 64 | 65 | def to_str(self): 66 | """Return the string representation of the model.""" 67 | return pprint.pformat(self.to_dict()) 68 | 69 | def __repr__(self): 70 | """For `print` and `pprint`.""" 71 | return self.to_str() 72 | 73 | def __eq__(self, other): 74 | """Return true if both objects are equal.""" 75 | if not isinstance(other, VariableProperties): 76 | return False 77 | 78 | return self.__dict__ == other.__dict__ 79 | 80 | def __ne__(self, other): 81 | """Return true if both objects are not equal.""" 82 | return not self == other 83 | -------------------------------------------------------------------------------- /influxdb_client/domain/view_links.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class ViewLinks(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | Attributes: 27 | openapi_types (dict): The key is attribute name 28 | and the value is attribute type. 29 | attribute_map (dict): The key is attribute name 30 | and the value is json key in definition. 31 | """ 32 | openapi_types = { 33 | '_self': 'str' 34 | } 35 | 36 | attribute_map = { 37 | '_self': 'self' 38 | } 39 | 40 | def __init__(self, _self=None): # noqa: E501,D401,D403 41 | """ViewLinks - a model defined in OpenAPI.""" # noqa: E501 42 | self.__self = None 43 | self.discriminator = None 44 | 45 | if _self is not None: 46 | self._self = _self 47 | 48 | @property 49 | def _self(self): 50 | """Get the _self of this ViewLinks. 51 | 52 | :return: The _self of this ViewLinks. 53 | :rtype: str 54 | """ # noqa: E501 55 | return self.__self 56 | 57 | @_self.setter 58 | def _self(self, _self): 59 | """Set the _self of this ViewLinks. 60 | 61 | :param _self: The _self of this ViewLinks. 62 | :type: str 63 | """ # noqa: E501 64 | self.__self = _self 65 | 66 | def to_dict(self): 67 | """Return the model properties as a dict.""" 68 | result = {} 69 | 70 | for attr, _ in self.openapi_types.items(): 71 | value = getattr(self, attr) 72 | if isinstance(value, list): 73 | result[attr] = list(map( 74 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 75 | value 76 | )) 77 | elif hasattr(value, "to_dict"): 78 | result[attr] = value.to_dict() 79 | elif isinstance(value, dict): 80 | result[attr] = dict(map( 81 | lambda item: (item[0], item[1].to_dict()) 82 | if hasattr(item[1], "to_dict") else item, 83 | value.items() 84 | )) 85 | else: 86 | result[attr] = value 87 | 88 | return result 89 | 90 | def to_str(self): 91 | """Return the string representation of the model.""" 92 | return pprint.pformat(self.to_dict()) 93 | 94 | def __repr__(self): 95 | """For `print` and `pprint`.""" 96 | return self.to_str() 97 | 98 | def __eq__(self, other): 99 | """Return true if both objects are equal.""" 100 | if not isinstance(other, ViewLinks): 101 | return False 102 | 103 | return self.__dict__ == other.__dict__ 104 | 105 | def __ne__(self, other): 106 | """Return true if both objects are not equal.""" 107 | return not self == other 108 | -------------------------------------------------------------------------------- /influxdb_client/domain/view_properties.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class ViewProperties(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | Attributes: 27 | openapi_types (dict): The key is attribute name 28 | and the value is attribute type. 29 | attribute_map (dict): The key is attribute name 30 | and the value is json key in definition. 31 | """ 32 | openapi_types = { 33 | } 34 | 35 | attribute_map = { 36 | } 37 | 38 | def __init__(self): # noqa: E501,D401,D403 39 | """ViewProperties - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None 40 | 41 | def to_dict(self): 42 | """Return the model properties as a dict.""" 43 | result = {} 44 | 45 | for attr, _ in self.openapi_types.items(): 46 | value = getattr(self, attr) 47 | if isinstance(value, list): 48 | result[attr] = list(map( 49 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 50 | value 51 | )) 52 | elif hasattr(value, "to_dict"): 53 | result[attr] = value.to_dict() 54 | elif isinstance(value, dict): 55 | result[attr] = dict(map( 56 | lambda item: (item[0], item[1].to_dict()) 57 | if hasattr(item[1], "to_dict") else item, 58 | value.items() 59 | )) 60 | else: 61 | result[attr] = value 62 | 63 | return result 64 | 65 | def to_str(self): 66 | """Return the string representation of the model.""" 67 | return pprint.pformat(self.to_dict()) 68 | 69 | def __repr__(self): 70 | """For `print` and `pprint`.""" 71 | return self.to_str() 72 | 73 | def __eq__(self, other): 74 | """Return true if both objects are equal.""" 75 | if not isinstance(other, ViewProperties): 76 | return False 77 | 78 | return self.__dict__ == other.__dict__ 79 | 80 | def __ne__(self, other): 81 | """Return true if both objects are not equal.""" 82 | return not self == other 83 | -------------------------------------------------------------------------------- /influxdb_client/domain/write_precision.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class WritePrecision(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | allowed enum values 27 | """ 28 | MS = "ms" 29 | S = "s" 30 | US = "us" 31 | NS = "ns" 32 | 33 | """ 34 | Attributes: 35 | openapi_types (dict): The key is attribute name 36 | and the value is attribute type. 37 | attribute_map (dict): The key is attribute name 38 | and the value is json key in definition. 39 | """ 40 | openapi_types = { 41 | } 42 | 43 | attribute_map = { 44 | } 45 | 46 | def __init__(self): # noqa: E501,D401,D403 47 | """WritePrecision - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None 48 | 49 | def to_dict(self): 50 | """Return the model properties as a dict.""" 51 | result = {} 52 | 53 | for attr, _ in self.openapi_types.items(): 54 | value = getattr(self, attr) 55 | if isinstance(value, list): 56 | result[attr] = list(map( 57 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 58 | value 59 | )) 60 | elif hasattr(value, "to_dict"): 61 | result[attr] = value.to_dict() 62 | elif isinstance(value, dict): 63 | result[attr] = dict(map( 64 | lambda item: (item[0], item[1].to_dict()) 65 | if hasattr(item[1], "to_dict") else item, 66 | value.items() 67 | )) 68 | else: 69 | result[attr] = value 70 | 71 | return result 72 | 73 | def to_str(self): 74 | """Return the string representation of the model.""" 75 | return pprint.pformat(self.to_dict()) 76 | 77 | def __repr__(self): 78 | """For `print` and `pprint`.""" 79 | return self.to_str() 80 | 81 | def __eq__(self, other): 82 | """Return true if both objects are equal.""" 83 | if not isinstance(other, WritePrecision): 84 | return False 85 | 86 | return self.__dict__ == other.__dict__ 87 | 88 | def __ne__(self, other): 89 | """Return true if both objects are not equal.""" 90 | return not self == other 91 | -------------------------------------------------------------------------------- /influxdb_client/domain/xy_geom.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | import pprint 14 | import re # noqa: F401 15 | 16 | 17 | class XYGeom(object): 18 | """NOTE: This class is auto generated by OpenAPI Generator. 19 | 20 | Ref: https://openapi-generator.tech 21 | 22 | Do not edit the class manually. 23 | """ 24 | 25 | """ 26 | allowed enum values 27 | """ 28 | LINE = "line" 29 | STEP = "step" 30 | STACKED = "stacked" 31 | BAR = "bar" 32 | MONOTONEX = "monotoneX" 33 | STEPBEFORE = "stepBefore" 34 | STEPAFTER = "stepAfter" 35 | 36 | """ 37 | Attributes: 38 | openapi_types (dict): The key is attribute name 39 | and the value is attribute type. 40 | attribute_map (dict): The key is attribute name 41 | and the value is json key in definition. 42 | """ 43 | openapi_types = { 44 | } 45 | 46 | attribute_map = { 47 | } 48 | 49 | def __init__(self): # noqa: E501,D401,D403 50 | """XYGeom - a model defined in OpenAPI.""" # noqa: E501 self.discriminator = None 51 | 52 | def to_dict(self): 53 | """Return the model properties as a dict.""" 54 | result = {} 55 | 56 | for attr, _ in self.openapi_types.items(): 57 | value = getattr(self, attr) 58 | if isinstance(value, list): 59 | result[attr] = list(map( 60 | lambda x: x.to_dict() if hasattr(x, "to_dict") else x, 61 | value 62 | )) 63 | elif hasattr(value, "to_dict"): 64 | result[attr] = value.to_dict() 65 | elif isinstance(value, dict): 66 | result[attr] = dict(map( 67 | lambda item: (item[0], item[1].to_dict()) 68 | if hasattr(item[1], "to_dict") else item, 69 | value.items() 70 | )) 71 | else: 72 | result[attr] = value 73 | 74 | return result 75 | 76 | def to_str(self): 77 | """Return the string representation of the model.""" 78 | return pprint.pformat(self.to_dict()) 79 | 80 | def __repr__(self): 81 | """For `print` and `pprint`.""" 82 | return self.to_str() 83 | 84 | def __eq__(self, other): 85 | """Return true if both objects are equal.""" 86 | if not isinstance(other, XYGeom): 87 | return False 88 | 89 | return self.__dict__ == other.__dict__ 90 | 91 | def __ne__(self, other): 92 | """Return true if both objects are not equal.""" 93 | return not self == other 94 | -------------------------------------------------------------------------------- /influxdb_client/extras.py: -------------------------------------------------------------------------------- 1 | """Extras to selectively import Pandas or NumPy.""" 2 | 3 | try: 4 | import pandas as pd 5 | except ModuleNotFoundError as err: 6 | raise ImportError(f"`query_data_frame` requires Pandas which couldn't be imported due: {err}") 7 | 8 | try: 9 | import numpy as np 10 | except ModuleNotFoundError as err: 11 | raise ImportError(f"`data_frame` requires numpy which couldn't be imported due: {err}") 12 | 13 | __all__ = ['pd', 'np'] 14 | -------------------------------------------------------------------------------- /influxdb_client/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/influxdata/influxdb-client-python/feb97eef067013881e798b322f90a83e27d07366/influxdb_client/py.typed -------------------------------------------------------------------------------- /influxdb_client/rest.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | from __future__ import absolute_import 13 | 14 | import logging 15 | from typing import Dict 16 | from urllib3 import HTTPResponse 17 | from influxdb_client.client.exceptions import InfluxDBError 18 | from influxdb_client.configuration import Configuration 19 | 20 | _UTF_8_encoding = 'utf-8' 21 | 22 | 23 | class ApiException(InfluxDBError): 24 | """NOTE: This class is auto generated by OpenAPI Generator. 25 | 26 | Ref: https://openapi-generator.tech 27 | Do not edit the class manually. 28 | """ 29 | 30 | def __init__(self, status=None, reason=None, http_resp=None): 31 | """Initialize with HTTP response.""" 32 | super().__init__(response=http_resp) 33 | if http_resp: 34 | self.status = http_resp.status 35 | self.reason = http_resp.reason 36 | self.body = http_resp.data 37 | if isinstance(http_resp, HTTPResponse): # response is HTTPResponse 38 | self.headers = http_resp.headers 39 | else: # response is RESTResponse 40 | self.headers = http_resp.getheaders() 41 | else: 42 | self.status = status 43 | self.reason = reason 44 | self.body = None 45 | self.headers = None 46 | 47 | def __str__(self): 48 | """Get custom error messages for exception.""" 49 | error_message = "({0})\n" \ 50 | "Reason: {1}\n".format(self.status, self.reason) 51 | if self.headers: 52 | error_message += "HTTP response headers: {0}\n".format( 53 | self.headers) 54 | 55 | if self.body: 56 | error_message += "HTTP response body: {0}\n".format(self.body) 57 | 58 | return error_message 59 | 60 | 61 | class _BaseRESTClient(object): 62 | logger = logging.getLogger('influxdb_client.client.http') 63 | 64 | @staticmethod 65 | def log_request(method: str, url: str): 66 | _BaseRESTClient.logger.debug(f">>> Request: '{method} {url}'") 67 | 68 | @staticmethod 69 | def log_response(status: str): 70 | _BaseRESTClient.logger.debug(f"<<< Response: {status}") 71 | 72 | @staticmethod 73 | def log_body(body: object, prefix: str): 74 | _BaseRESTClient.logger.debug(f"{prefix} Body: {body}") 75 | 76 | @staticmethod 77 | def log_headers(headers: Dict[str, str], prefix: str): 78 | for key, v in headers.items(): 79 | value = v 80 | if 'authorization' == key.lower(): 81 | value = '***' 82 | _BaseRESTClient.logger.debug(f"{prefix} {key}: {value}") 83 | 84 | 85 | def _requires_create_user_session(configuration: Configuration, cookie: str, resource_path: str): 86 | _unauthorized = ['/api/v2/signin', '/api/v2/signout'] 87 | return configuration.username and configuration.password and not cookie and resource_path not in _unauthorized 88 | 89 | 90 | def _requires_expire_user_session(configuration: Configuration, cookie: str): 91 | return configuration.username and configuration.password and cookie 92 | -------------------------------------------------------------------------------- /influxdb_client/service/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | from __future__ import absolute_import 14 | 15 | # import apis into api package 16 | from influxdb_client.service.authorizations_service import AuthorizationsService 17 | from influxdb_client.service.backup_service import BackupService 18 | from influxdb_client.service.bucket_schemas_service import BucketSchemasService 19 | from influxdb_client.service.buckets_service import BucketsService 20 | from influxdb_client.service.cells_service import CellsService 21 | from influxdb_client.service.checks_service import ChecksService 22 | from influxdb_client.service.config_service import ConfigService 23 | from influxdb_client.service.dbr_ps_service import DBRPsService 24 | from influxdb_client.service.dashboards_service import DashboardsService 25 | from influxdb_client.service.delete_service import DeleteService 26 | from influxdb_client.service.health_service import HealthService 27 | from influxdb_client.service.invokable_scripts_service import InvokableScriptsService 28 | from influxdb_client.service.labels_service import LabelsService 29 | from influxdb_client.service.legacy_authorizations_service import LegacyAuthorizationsService 30 | from influxdb_client.service.metrics_service import MetricsService 31 | from influxdb_client.service.notification_endpoints_service import NotificationEndpointsService 32 | from influxdb_client.service.notification_rules_service import NotificationRulesService 33 | from influxdb_client.service.organizations_service import OrganizationsService 34 | from influxdb_client.service.ping_service import PingService 35 | from influxdb_client.service.query_service import QueryService 36 | from influxdb_client.service.ready_service import ReadyService 37 | from influxdb_client.service.remote_connections_service import RemoteConnectionsService 38 | from influxdb_client.service.replications_service import ReplicationsService 39 | from influxdb_client.service.resources_service import ResourcesService 40 | from influxdb_client.service.restore_service import RestoreService 41 | from influxdb_client.service.routes_service import RoutesService 42 | from influxdb_client.service.rules_service import RulesService 43 | from influxdb_client.service.scraper_targets_service import ScraperTargetsService 44 | from influxdb_client.service.secrets_service import SecretsService 45 | from influxdb_client.service.setup_service import SetupService 46 | from influxdb_client.service.signin_service import SigninService 47 | from influxdb_client.service.signout_service import SignoutService 48 | from influxdb_client.service.sources_service import SourcesService 49 | from influxdb_client.service.tasks_service import TasksService 50 | from influxdb_client.service.telegraf_plugins_service import TelegrafPluginsService 51 | from influxdb_client.service.telegrafs_service import TelegrafsService 52 | from influxdb_client.service.templates_service import TemplatesService 53 | from influxdb_client.service.users_service import UsersService 54 | from influxdb_client.service.variables_service import VariablesService 55 | from influxdb_client.service.views_service import ViewsService 56 | from influxdb_client.service.write_service import WriteService 57 | -------------------------------------------------------------------------------- /influxdb_client/service/_base_service.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | # noinspection PyMethodMayBeStatic 4 | class _BaseService(object): 5 | 6 | def __init__(self, api_client=None): 7 | """Init common services operation.""" 8 | if api_client is None: 9 | raise ValueError("Invalid value for `api_client`, must be defined.") 10 | self.api_client = api_client 11 | self._build_type = None 12 | 13 | def _check_operation_params(self, operation_id, supported_params, local_params): 14 | supported_params.append('async_req') 15 | supported_params.append('_return_http_data_only') 16 | supported_params.append('_preload_content') 17 | supported_params.append('_request_timeout') 18 | supported_params.append('urlopen_kw') 19 | for key, val in local_params['kwargs'].items(): 20 | if key not in supported_params: 21 | raise TypeError( 22 | f"Got an unexpected keyword argument '{key}'" 23 | f" to method {operation_id}" 24 | ) 25 | local_params[key] = val 26 | del local_params['kwargs'] 27 | 28 | def _is_cloud_instance(self) -> bool: 29 | if not self._build_type: 30 | self._build_type = self.build_type() 31 | return 'cloud' in self._build_type.lower() 32 | 33 | async def _is_cloud_instance_async(self) -> bool: 34 | if not self._build_type: 35 | self._build_type = await self.build_type_async() 36 | return 'cloud' in self._build_type.lower() 37 | 38 | def build_type(self) -> str: 39 | """ 40 | Return the build type of the connected InfluxDB Server. 41 | 42 | :return: The type of InfluxDB build. 43 | """ 44 | from influxdb_client import PingService 45 | ping_service = PingService(self.api_client) 46 | 47 | response = ping_service.get_ping_with_http_info(_return_http_data_only=False) 48 | return self.response_header(response, header_name='X-Influxdb-Build') 49 | 50 | async def build_type_async(self) -> str: 51 | """ 52 | Return the build type of the connected InfluxDB Server. 53 | 54 | :return: The type of InfluxDB build. 55 | """ 56 | from influxdb_client import PingService 57 | ping_service = PingService(self.api_client) 58 | 59 | response = await ping_service.get_ping_async(_return_http_data_only=False) 60 | return self.response_header(response, header_name='X-Influxdb-Build') 61 | 62 | def response_header(self, response, header_name='X-Influxdb-Version') -> str: 63 | if response is not None and len(response) >= 3: 64 | if header_name in response[2]: 65 | return response[2][header_name] 66 | 67 | return "unknown" 68 | -------------------------------------------------------------------------------- /influxdb_client/version.py: -------------------------------------------------------------------------------- 1 | """Version of the Client that is used in User-Agent header.""" 2 | 3 | VERSION = '1.50.0dev0' 4 | -------------------------------------------------------------------------------- /notebooks/stock_predictions_import_data.py: -------------------------------------------------------------------------------- 1 | """ 2 | Import VIX - CBOE Volatility Index - from "vix-daily.csv" file into InfluxDB 2.0 3 | 4 | https://datahub.io/core/finance-vix#data 5 | """ 6 | from collections import OrderedDict 7 | from csv import DictReader 8 | from datetime import timezone 9 | 10 | import ciso8601 11 | import requests 12 | import reactivex as rx 13 | from reactivex import operators as ops 14 | 15 | from influxdb_client import InfluxDBClient, WriteOptions 16 | from influxdb_client.client.write.point import EPOCH 17 | 18 | _progress = 0 19 | 20 | 21 | def parse_row(row: OrderedDict): 22 | """Parse row of CSV file into LineProtocol with structure: 23 | 24 | CSV format: 25 | date,symbol,open,close,low,high,volume 26 | 2016-01-05,WLTW,123.43,125.839996,122.309998,126.25,2163600.0 27 | 2016-01-06,WLTW,125.239998,119.980003,119.940002,125.540001,2386400.0 28 | 2016-01-07,WLTW,116.379997,114.949997,114.93,119.739998,2489500.0 29 | 2016-01-08,WLTW,115.480003,116.620003,113.5,117.440002,2006300.0 30 | 2016-01-11,WLTW,117.010002,114.970001,114.089996,117.330002,1408600.0 31 | 2016-01-12,WLTW,115.510002,115.550003,114.5,116.059998,1098000.0 32 | 2016-01-13,WLTW,116.459999,112.849998,112.589996,117.07,949600.0 33 | ... 34 | 35 | :param row: the row of CSV file 36 | :return: Parsed csv row to LineProtocol 37 | """ 38 | global _progress 39 | _progress += 1 40 | 41 | if _progress % 10000 == 0: 42 | print(_progress) 43 | 44 | time = (ciso8601.parse_datetime(row["date"]).replace(tzinfo=timezone.utc) - EPOCH).total_seconds() * 1e9 45 | 46 | return f'financial-analysis,symbol={row["symbol"]} ' \ 47 | f'close={row["close"]},high={row["high"]},low={row["low"]},open={row["open"]} ' \ 48 | f'{int(time)}' 49 | 50 | 51 | def main(): 52 | parse_row.progress = 0 53 | 54 | url = "https://github.com/influxdata/influxdb-client-python/wiki/data/stock-prices-example.csv" 55 | response = requests.get(url, stream=True) 56 | data = rx \ 57 | .from_iterable(DictReader(response.iter_lines(decode_unicode=True))) \ 58 | .pipe(ops.map(lambda row: parse_row(row))) 59 | 60 | client = InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org", debug=False) 61 | write_api = client.write_api(write_options=WriteOptions(batch_size=50_000, flush_interval=10_000)) 62 | 63 | write_api.write(bucket="my-bucket", record=data) 64 | write_api.close() 65 | 66 | query = ''' 67 | from(bucket:"my-bucket") 68 | |> range(start: 0, stop: now()) 69 | |> filter(fn: (r) => r._measurement == "financial-analysis") 70 | |> filter(fn: (r) => r.symbol == "AAPL") 71 | |> filter(fn: (r) => r._field == "close") 72 | |> drop(columns: ["_start", "_stop", "table", "_field","_measurement"]) 73 | ''' 74 | 75 | result = client.query_api().query_data_frame(query=query) 76 | print(result.head(100)) 77 | 78 | """ 79 | Close client 80 | """ 81 | client.close() 82 | # %% 83 | 84 | 85 | if __name__ == '__main__': 86 | main() 87 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=21.0.0"] 3 | build-backend = "setuptools.build_meta" -------------------------------------------------------------------------------- /scripts/ci-test.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | ENABLED_CISO_8601="${ENABLED_CISO_8601:-true}" 6 | 7 | # 8 | # Install requirements 9 | # 10 | python --version 11 | pip install . --user 12 | pip install .\[extra\] --user 13 | pip install .\[test\] --user 14 | pip install .\[async\] --user 15 | if [ "$ENABLED_CISO_8601" = true ] ; then 16 | echo "ciso8601 is enabled" 17 | pip install .\[ciso\] --user 18 | else 19 | echo "ciso8601 is disabled" 20 | fi 21 | pip install pytest pytest-cov --user 22 | 23 | # 24 | # Prepare for test results 25 | # 26 | mkdir test-reports || true 27 | 28 | # 29 | # Test 30 | # 31 | pytest tests --junitxml=test-reports/junit.xml --cov=./ --cov-report xml:coverage.xml 32 | 33 | -------------------------------------------------------------------------------- /scripts/generate-sources.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # 4 | # How to run script from ROOT path: 5 | # docker run --rm -it -v "${PWD}":/code/client -v ~/.m2:/root/.m2 -w /code maven:3-openjdk-8 /code/client/scripts/generate-sources.sh 6 | # 7 | 8 | # 9 | # Download customized generator 10 | # 11 | git clone --single-branch --branch master https://github.com/bonitoo-io/influxdb-clients-apigen "/code/influxdb-clients-apigen" 12 | mkdir -p /code/influxdb-clients-apigen/build/ 13 | ln -s /code/client /code/influxdb-clients-apigen/build/influxdb-client-python 14 | cd /code/influxdb-clients-apigen/ || exit 15 | 16 | # 17 | # Download APIs contracts 18 | # 19 | wget https://raw.githubusercontent.com/influxdata/openapi/master/contracts/oss.yml -O "/code/influxdb-clients-apigen/oss.yml" 20 | wget https://raw.githubusercontent.com/influxdata/openapi/master/contracts/cloud.yml -O "/code/influxdb-clients-apigen/cloud.yml" 21 | wget https://raw.githubusercontent.com/influxdata/openapi/master/contracts/invocable-scripts.yml -O "/code/influxdb-clients-apigen/invocable-scripts.yml" 22 | 23 | # 24 | # Build generator 25 | # 26 | mvn -DskipTests -f /code/influxdb-clients-apigen/openapi-generator/pom.xml clean install 27 | 28 | # 29 | # Prepare customized contract 30 | # 31 | mvn -f /code/influxdb-clients-apigen/openapi-generator/pom.xml compile exec:java -Dexec.mainClass="com.influxdb.AppendCloudDefinitions" -Dexec.args="oss.yml cloud.yml" 32 | mvn -f /code/influxdb-clients-apigen/openapi-generator/pom.xml compile exec:java -Dexec.mainClass="com.influxdb.MergeContracts" -Dexec.args="oss.yml invocable-scripts.yml" 33 | 34 | # 35 | # Generate sources 36 | # 37 | ./generate-python.sh 38 | -------------------------------------------------------------------------------- /scripts/influxdb-onboarding.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # The MIT License 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy 6 | # of this software and associated documentation files (the "Software"), to deal 7 | # in the Software without restriction, including without limitation the rights 8 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | # copies of the Software, and to permit persons to whom the Software is 10 | # furnished to do so, subject to the following conditions: 11 | # 12 | # The above copyright notice and this permission notice shall be included in 13 | # all copies or substantial portions of the Software. 14 | # 15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | # THE SOFTWARE. 22 | # 23 | 24 | set -e 25 | 26 | echo "Wait to start InfluxDB 2.0" 27 | wget -S --spider --tries=20 --retry-connrefused --waitretry=5 http://localhost:8086/metrics 28 | 29 | echo 30 | echo "Post onBoarding request, to setup initial user (my-user@my-password), org (my-org) and bucketSetup (my-bucket)" 31 | echo 32 | curl -i -X POST http://localhost:8086/api/v2/setup -H 'accept: application/json' \ 33 | -d '{ 34 | "username": "my-user", 35 | "password": "my-password", 36 | "org": "my-org", 37 | "bucket": "my-bucket", 38 | "token": "my-token" 39 | }' 40 | -------------------------------------------------------------------------------- /scripts/influxdb-restart.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # The MIT License 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy 6 | # of this software and associated documentation files (the "Software"), to deal 7 | # in the Software without restriction, including without limitation the rights 8 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | # copies of the Software, and to permit persons to whom the Software is 10 | # furnished to do so, subject to the following conditions: 11 | # 12 | # The above copyright notice and this permission notice shall be included in 13 | # all copies or substantial portions of the Software. 14 | # 15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | # THE SOFTWARE. 22 | # 23 | 24 | set -e 25 | 26 | DEFAULT_INFLUXDB_V2_VERSION="latest" 27 | INFLUXDB_V2_VERSION="${INFLUXDB_V2_VERSION:-$DEFAULT_INFLUXDB_V2_VERSION}" 28 | INFLUXDB_V2_IMAGE=influxdb:${INFLUXDB_V2_VERSION} 29 | 30 | SCRIPT_PATH="$( cd "$(dirname "$0")" ; pwd -P )" 31 | 32 | docker kill influxdb_v2 || true 33 | docker rm influxdb_v2 || true 34 | docker network rm influx_network || true 35 | docker network create -d bridge influx_network --subnet 192.168.0.0/24 --gateway 192.168.0.1 36 | 37 | # 38 | # InfluxDB 2.0 39 | # 40 | echo 41 | echo "Restarting InfluxDB 2.0 [${INFLUXDB_V2_IMAGE}] ... " 42 | echo 43 | 44 | docker pull ${INFLUXDB_V2_IMAGE} || true 45 | docker run \ 46 | --detach \ 47 | --env INFLUXD_HTTP_BIND_ADDRESS=:8086 \ 48 | --name influxdb_v2 \ 49 | --network influx_network \ 50 | --publish 8086:8086 \ 51 | ${INFLUXDB_V2_IMAGE} 52 | 53 | echo "Wait to start InfluxDB 2.0" 54 | wget -S --spider --tries=20 --retry-connrefused --waitretry=5 http://localhost:8086/metrics 55 | 56 | # 57 | # Post onBoarding request to InfluxDB 2 58 | # 59 | "${SCRIPT_PATH}"/influxdb-onboarding.sh 60 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from pathlib import Path 4 | 5 | from setuptools import setup, find_packages # noqa: H301 6 | 7 | requires = [ 8 | 'reactivex >= 4.0.4', 9 | 'certifi >= 14.05.14', 10 | 'python_dateutil >= 2.5.3', 11 | 'urllib3 >= 1.26.0' 12 | ] 13 | 14 | test_requires = [ 15 | 'flake8>=5.0.3', 16 | 'coverage>=4.0.3', 17 | 'nose>=1.3.7', 18 | 'pluggy>=0.3.1', 19 | 'py>=1.4.31', 20 | 'randomize>=0.13', 21 | 'pytest>=5.0.0', 22 | 'pytest-cov>=3.0.0', 23 | 'pytest-timeout>=2.1.0', 24 | 'httpretty==1.0.5', 25 | 'psutil>=5.6.3', 26 | 'aioresponses>=0.7.3', 27 | 'sphinx==1.8.5', 28 | 'sphinx_rtd_theme', 29 | 'jinja2>=3.1.4' 30 | ] 31 | 32 | extra_requires = [ 33 | 'pandas>=1.0.0', 34 | 'numpy' 35 | ] 36 | 37 | ciso_requires = [ 38 | 'ciso8601>=2.1.1' 39 | ] 40 | 41 | async_requires = [ 42 | 'aiohttp>=3.8.1', 43 | 'aiocsv>=1.2.2' 44 | ] 45 | 46 | this_directory = Path(__file__).parent 47 | long_description = (this_directory / "README.md").read_text() 48 | 49 | NAME = "influxdb_client" 50 | 51 | meta = {} 52 | with open(Path(__file__).parent / 'influxdb_client' / 'version.py') as f: 53 | exec('\n'.join(line for line in f if line.startswith('VERSION')), meta) 54 | 55 | setup( 56 | name=NAME, 57 | version=meta['VERSION'], 58 | description="InfluxDB 2.0 Python client library", 59 | long_description=long_description, 60 | url="https://github.com/influxdata/influxdb-client-python", 61 | keywords=["InfluxDB", "InfluxDB Python Client"], 62 | tests_require=test_requires, 63 | install_requires=requires, 64 | extras_require={'extra': extra_requires, 'ciso': ciso_requires, 'async': async_requires, 'test': test_requires}, 65 | long_description_content_type="text/markdown", 66 | packages=find_packages(exclude=('tests*',)), 67 | package_data={'influxdb_client': ['py.typed']}, 68 | test_suite='tests', 69 | python_requires='>=3.7', 70 | include_package_data=True, 71 | classifiers=[ 72 | 'Development Status :: 4 - Beta', 73 | 'Intended Audience :: Developers', 74 | 'License :: OSI Approved :: MIT License', 75 | 'Programming Language :: Python :: 3.7', 76 | 'Programming Language :: Python :: 3.8', 77 | 'Programming Language :: Python :: 3.9', 78 | 'Programming Language :: Python :: 3.10', 79 | 'Programming Language :: Python :: 3.11', 80 | 'Programming Language :: Python :: 3.12', 81 | 'Topic :: Database', 82 | 'Topic :: Software Development :: Libraries', 83 | 'Topic :: Software Development :: Libraries :: Python Modules', 84 | ]) 85 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | 3 | """ 4 | InfluxDB OSS API Service. 5 | 6 | The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501 7 | 8 | OpenAPI spec version: 2.0.0 9 | Generated by: https://openapi-generator.tech 10 | """ 11 | 12 | 13 | from __future__ import absolute_import 14 | 15 | # import apis into api package 16 | from influxdb_client.service.authorizations_service import AuthorizationsService 17 | from influxdb_client.service.backup_service import BackupService 18 | from influxdb_client.service.bucket_schemas_service import BucketSchemasService 19 | from influxdb_client.service.buckets_service import BucketsService 20 | from influxdb_client.service.cells_service import CellsService 21 | from influxdb_client.service.checks_service import ChecksService 22 | from influxdb_client.service.config_service import ConfigService 23 | from influxdb_client.service.dbr_ps_service import DBRPsService 24 | from influxdb_client.service.dashboards_service import DashboardsService 25 | from influxdb_client.service.delete_service import DeleteService 26 | from influxdb_client.service.health_service import HealthService 27 | from influxdb_client.service.invokable_scripts_service import InvokableScriptsService 28 | from influxdb_client.service.labels_service import LabelsService 29 | from influxdb_client.service.legacy_authorizations_service import LegacyAuthorizationsService 30 | from influxdb_client.service.metrics_service import MetricsService 31 | from influxdb_client.service.notification_endpoints_service import NotificationEndpointsService 32 | from influxdb_client.service.notification_rules_service import NotificationRulesService 33 | from influxdb_client.service.organizations_service import OrganizationsService 34 | from influxdb_client.service.ping_service import PingService 35 | from influxdb_client.service.query_service import QueryService 36 | from influxdb_client.service.ready_service import ReadyService 37 | from influxdb_client.service.remote_connections_service import RemoteConnectionsService 38 | from influxdb_client.service.replications_service import ReplicationsService 39 | from influxdb_client.service.resources_service import ResourcesService 40 | from influxdb_client.service.restore_service import RestoreService 41 | from influxdb_client.service.routes_service import RoutesService 42 | from influxdb_client.service.rules_service import RulesService 43 | from influxdb_client.service.scraper_targets_service import ScraperTargetsService 44 | from influxdb_client.service.secrets_service import SecretsService 45 | from influxdb_client.service.setup_service import SetupService 46 | from influxdb_client.service.signin_service import SigninService 47 | from influxdb_client.service.signout_service import SignoutService 48 | from influxdb_client.service.sources_service import SourcesService 49 | from influxdb_client.service.tasks_service import TasksService 50 | from influxdb_client.service.telegraf_plugins_service import TelegrafPluginsService 51 | from influxdb_client.service.telegrafs_service import TelegrafsService 52 | from influxdb_client.service.templates_service import TemplatesService 53 | from influxdb_client.service.users_service import UsersService 54 | from influxdb_client.service.variables_service import VariablesService 55 | from influxdb_client.service.views_service import ViewsService 56 | from influxdb_client.service.write_service import WriteService 57 | -------------------------------------------------------------------------------- /tests/base_test.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import os 3 | import re 4 | import time 5 | import unittest 6 | 7 | import influxdb_client 8 | from influxdb_client import BucketRetentionRules, Organization, InfluxDBClient 9 | 10 | current_milli_time = lambda: int(round(time.time() * 1000)) 11 | 12 | 13 | def generate_bucket_name(): 14 | return generate_name(key="bucket") 15 | 16 | 17 | def generate_name(key: str): 18 | return f"test_{key}_" + str(datetime.datetime.now().timestamp()) + "_IT" 19 | 20 | 21 | class BaseTest(unittest.TestCase): 22 | 23 | def setUp(self) -> None: 24 | self.conf = influxdb_client.configuration.Configuration() 25 | 26 | self.host = os.getenv('INFLUXDB_V2_URL', "http://localhost:8086") 27 | self.debug = False 28 | self.auth_token = os.getenv('INFLUXDB_V2_TOKEN', "my-token") 29 | self.org = os.getenv('INFLUXDB_V2_ORG', "my-org") 30 | 31 | self.client = InfluxDBClient(url=self.host, token=self.auth_token, debug=self.debug, org=self.org) 32 | self.api_client = self.client.api_client 33 | 34 | self.query_api = self.client.query_api() 35 | self.buckets_api = self.client.buckets_api() 36 | self.users_api = self.client.users_api() 37 | self.organizations_api = self.client.organizations_api() 38 | self.authorizations_api = self.client.authorizations_api() 39 | self.labels_api = self.client.labels_api() 40 | 41 | self.my_organization = self.find_my_org() 42 | 43 | def tearDown(self) -> None: 44 | self.client.close() 45 | 46 | def create_test_bucket(self): 47 | bucket_name = generate_bucket_name() 48 | bucket = self.buckets_api.create_bucket(bucket_name=bucket_name, org=self.my_organization, 49 | description=bucket_name + "description") 50 | return bucket 51 | 52 | def delete_test_bucket(self, bucket): 53 | return self.buckets_api.delete_bucket(bucket) 54 | 55 | def find_my_org(self) -> Organization: 56 | return self.client.organizations_api().find_organizations(org=self.org)[0] 57 | 58 | @staticmethod 59 | def log(args): 60 | print(">>>", args) 61 | 62 | @staticmethod 63 | def generate_name(prefix): 64 | assert prefix != "" or prefix is not None 65 | return prefix + str(datetime.datetime.now().timestamp()) + "-IT" 66 | 67 | @classmethod 68 | def retention_rule(cls) -> BucketRetentionRules: 69 | return BucketRetentionRules(type='expire', every_seconds=3600) 70 | 71 | def assertEqualIgnoringWhitespace(self, first, second, msg=None) -> None: 72 | whitespace_pattern = re.compile(r"\s+") 73 | self.assertEqual(whitespace_pattern.sub("", first), whitespace_pattern.sub("", second), msg=msg) 74 | -------------------------------------------------------------------------------- /tests/config-disabled-ssl.ini: -------------------------------------------------------------------------------- 1 | [influx2] 2 | url=http://localhost:8086 3 | org=my-org 4 | token=my-token 5 | timeout=6000 6 | verify_ssl=False 7 | 8 | [tags] 9 | id = 132-987-655 10 | customer = California Miner 11 | data_center = ${env.data_center} -------------------------------------------------------------------------------- /tests/config-enabled-proxy.ini: -------------------------------------------------------------------------------- 1 | [influx2] 2 | url=http://localhost:8086 3 | org=my-org 4 | token=my-token 5 | timeout=6000 6 | connection_pool_maxsize=55 7 | auth_basic=false 8 | profilers=query, operator 9 | proxy=http://proxy.domain.org:8080 10 | 11 | [tags] 12 | id = 132-987-655 13 | customer = California Miner 14 | data_center = ${env.data_center} -------------------------------------------------------------------------------- /tests/config-ssl-ca-cert.ini: -------------------------------------------------------------------------------- 1 | [influx2] 2 | url=http://localhost:8086 3 | org=my-org 4 | token=my-token 5 | timeout=6000 6 | ssl_ca_cert=/path/to/my/cert 7 | 8 | [tags] 9 | id = 132-987-655 10 | customer = California Miner 11 | data_center = ${env.data_center} -------------------------------------------------------------------------------- /tests/config-ssl-mtls-certs.ini: -------------------------------------------------------------------------------- 1 | [influx2] 2 | url=http://localhost:8086 3 | org=my-org 4 | token=my-token 5 | timeout=6000 6 | ssl_ca_cert=/path/to/my/cert 7 | cert_file=/path/to/my/cert 8 | cert_key_file=/path/to/my/key 9 | cert_key_password=test 10 | 11 | [tags] 12 | id = 132-987-655 13 | customer = California Miner 14 | data_center = ${env.data_center} -------------------------------------------------------------------------------- /tests/config.ini: -------------------------------------------------------------------------------- 1 | [influx2] 2 | url=http://localhost:8086 3 | org=my-org 4 | token=my-token 5 | timeout=6000 6 | connection_pool_maxsize=55 7 | auth_basic=false 8 | profilers=query, operator 9 | 10 | [tags] 11 | id = 132-987-655 12 | customer = California Miner 13 | data_center = ${env.data_center} -------------------------------------------------------------------------------- /tests/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "url": "http://localhost:8086", 3 | "token": "my-token", 4 | "org": "my-org", 5 | "active": true, 6 | "timeout": 6000, 7 | "connection_pool_maxsize": 55, 8 | "auth_basic": false, 9 | "profilers": "query, operator", 10 | "tags": { 11 | "id": "132-987-655", 12 | "customer": "California Miner", 13 | "data_center": "${env.data_center}" 14 | } 15 | } -------------------------------------------------------------------------------- /tests/config.toml: -------------------------------------------------------------------------------- 1 | [influx2] 2 | url = "http://localhost:8086" 3 | token = "my-token" 4 | org = "my-org" 5 | active = true 6 | timeout = 6000 7 | connection_pool_maxsize = 55 8 | auth_basic = False 9 | profilers = "query, operator" 10 | 11 | [tags] 12 | id = "132-987-655" 13 | customer = "California Miner" 14 | data_center = "${env.data_center}" 15 | -------------------------------------------------------------------------------- /tests/config2.ini: -------------------------------------------------------------------------------- 1 | [test_name] 2 | url=http://localhost:8086 3 | org=my-org 4 | token=my-token 5 | timeout=6000 6 | connection_pool_maxsize=55 7 | auth_basic=false 8 | profilers=query, operator 9 | 10 | [tags] 11 | id = 132-987-655 12 | customer = California Miner 13 | data_center = ${env.data_center} -------------------------------------------------------------------------------- /tests/server.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDsDCCApigAwIBAgIJAJUMvag5FRxuMA0GCSqGSIb3DQEBCwUAMHUxCzAJBgNV 3 | BAYTAlVTMRAwDgYDVQQIDAdOZXdZb3JrMRAwDgYDVQQHDAdOZXdZb3JrMR0wGwYD 4 | VQQKDBRJbmZsdXhEQlB5dGhvbkNsaWVudDEPMA0GA1UECwwGQ2xpZW50MRIwEAYD 5 | VQQDDAlsb2NhbGhvc3QwHhcNMjMwNjIwMDYzOTAzWhcNMzMwNjE3MDYzOTAzWjB1 6 | MQswCQYDVQQGEwJVUzEQMA4GA1UECAwHTmV3WW9yazEQMA4GA1UEBwwHTmV3WW9y 7 | azEdMBsGA1UECgwUSW5mbHV4REJQeXRob25DbGllbnQxDzANBgNVBAsMBkNsaWVu 8 | dDESMBAGA1UEAwwJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB 9 | CgKCAQEAp4ajJzE8RsHF9M7xO0CtuUD2Wm+h3jYpybfjgLpg0ZfEiAQxXul293f7 10 | 2EUuxUHYgvTNfxpLK9OIqhUzmhdGaDsnldC2p90CJ71iDnmd9W3eGkT6WZ+zDMva 11 | Cm93s73P4AAjbahD6509kh6DKx5L/iAve0giqYmCA00lJRX+y3QgkwK7mQ+0WGX8 12 | guJqec+o/UV05HAyCRBQrHOzYwTvLUo8KHnXQ2fHyKe0qzzcFDbBJ9rLC8EJZbjg 13 | fv4EoaUd1HaVAO49NoHZyretcWQh3Ec2rIAp946aE8+ihlvE/iyotSNnnil7o+Ki 14 | NM9zytpWJHwy+zcm/G/0Zl1bssl9vQIDAQABo0MwQTAOBgNVHQ8BAf8EBAMCA4gw 15 | EwYDVR0lBAwwCgYIKwYBBQUHAwEwGgYDVR0RBBMwEYIJbG9jYWxob3N0hwR/AAAB 16 | MA0GCSqGSIb3DQEBCwUAA4IBAQA5V+maIkFDI8IRELl8v0TxiGyYLopNVHabbI/+ 17 | o52dB8jt08b3T0T0RpPdoIM3B0bKq8nELUyFdH3sJwaOWx3N39iSfU+LRitUq+Co 18 | /Ii02kewgoWhh4ZD6E7Esme+GXeJYK2j40d+zs1r40o02LW04YP6tUucW27JaYTX 19 | CAQRPwQJGi+b7S1T/BG2chuFwixKnjfuQWXJQXxma24UxLCTzD7PkEsGppUIrqh6 20 | e/H6vazFAshdcOwB+hkmmVdTS4swJ5PKhlqIzHZll3Pe61uog2amA2rTvQ8bjXne 21 | 0iuWMZMOWX4h88y/4sf3fsvjUJiJn1DL3CmIYE2S+cquy2Su 22 | -----END CERTIFICATE----- 23 | -----BEGIN PRIVATE KEY----- 24 | MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCnhqMnMTxGwcX0 25 | zvE7QK25QPZab6HeNinJt+OAumDRl8SIBDFe6Xb3d/vYRS7FQdiC9M1/Gksr04iq 26 | FTOaF0ZoOyeV0Lan3QInvWIOeZ31bd4aRPpZn7MMy9oKb3ezvc/gACNtqEPrnT2S 27 | HoMrHkv+IC97SCKpiYIDTSUlFf7LdCCTAruZD7RYZfyC4mp5z6j9RXTkcDIJEFCs 28 | c7NjBO8tSjwoeddDZ8fIp7SrPNwUNsEn2ssLwQlluOB+/gShpR3UdpUA7j02gdnK 29 | t61xZCHcRzasgCn3jpoTz6KGW8T+LKi1I2eeKXuj4qI0z3PK2lYkfDL7Nyb8b/Rm 30 | XVuyyX29AgMBAAECggEAdisOYtLkBbUhgXGHz5fr8sTUAS7FKw1hpJxFqcPOD4nS 31 | Y28xtBVSiXTk/MSwHQw5QQaU/Zb6d0ubFv4ljAfm7hbr8U1ryjSnFIkN5c/7j+eT 32 | x0vom9d+YQRaJQpDkVHOk98FHpCWeFhN2ggT1hJk4DirxXY4nQYV42ZDUUmWlMOM 33 | ZT96mRpN6iTYbNt97DjVQHiY6Yg2Mgb3Pp0GpryLtxcp7ant/xCERkqxchJnJZrU 34 | DUWybVpoZA6qnviegc5CNej0Usp5FfvjDRdZcxsL83cinp8gTCBM6yDwxo5yp07h 35 | 1ELBjqDlL/4cmzl9Cr5dGrQC0ogumQFed07He9hggQKBgQDeMA/TQ5d96WNn64cp 36 | x/keWYARdmPkuy8mLeEBCCldUqeyEKH+MEKebrkJp++e2UZ9yHHULU0wZsjup09E 37 | iprV2GwC5sJFDFJC9sN7UhRGTENSG3ROmxxsRQ85AuxQ8r2Kkcus/Gln7NkQIew9 38 | PTYChszRYhgmvd8dbQnWJR5nyQKBgQDBBRLC++K4b5Ia7ZdPoEd1p8rUdHzgEJpu 39 | OULEw4amrZX5YCUii14G6bh86Yp1QBEPS4I33Y/WQfMTSBTWI5h4B39+bvkFyQD0 40 | lomt6T0I01Czg6325ifsOLCedRBJGyzKhcG2PP5d6orioT2tNx+CdabjkQqdnwiu 41 | FEyjr7zIVQKBgEIJ+PorEeDruI8PsznY5RN/OJB4JtK6TBZF9qWO3nSNd6h2Rx+9 42 | Cl/6uubC0nbdT5VdOW/+Ev463qgIdIgDxmefDuOB4OYS9nRO0eVajQoj6rKB1Hz0 43 | n2sSz6Z7Vu/sZifEk8wTreSkwhOIcaX2BpKTrFbjmBgTwTgGy7CpkbL5AoGAJ/dm 44 | 9Ry2yz2+4PiFNiWX5lLB+s/L85E8iMouexOJVAXpYeN4jPiSlZfbbXHmC3Uikyot 45 | YmfsGo85R6rLNbPVJDVZPtK3mW0PV5W8GFiNHMC0mxcO9GqfpZfvakAPCTQpSB3+ 46 | CEAE42lnLJn6bV1WlurJjaFAs3Z0k6b8X4BJ9KkCgYEA0cKrgKB3GidtKBeyuGKg 47 | Wdemq9T4c7R5swn/87Ud6UE0zALA8wBx7ZacQm9IOHiCSVdBGAEAzNd14Kri8DaK 48 | /5EHjZ2yI5cO108IbkTZ2TXevgjAHCvrhEtcgG0SrNNFJ6JoGiSqJ8Ue5xbj3P2y 49 | KYSFK5vRSIQ5GpR4zTXN7S4= 50 | -----END PRIVATE KEY----- 51 | -------------------------------------------------------------------------------- /tests/test_Dashboards.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from influxdb_client import DashboardsService, CreateDashboardRequest, CellsService, \ 4 | CreateCell 5 | from tests.base_test import BaseTest 6 | 7 | 8 | class DashboardsClientTest(BaseTest): 9 | 10 | def setUp(self) -> None: 11 | super(DashboardsClientTest, self).setUp() 12 | 13 | self.dashboards_service = DashboardsService(self.client.api_client) 14 | dashboards = self.dashboards_service.get_dashboards() 15 | 16 | for dashboard in dashboards.dashboards: 17 | if dashboard.name.endswith("_IT"): 18 | print("Delete dashboard: ", dashboard.name) 19 | self.dashboards_service.delete_dashboards_id(dashboard.id) 20 | 21 | def test_create_dashboard_with_cell(self): 22 | unique_id = str(datetime.datetime.now().timestamp()) 23 | 24 | dashboard = self.dashboards_service.post_dashboards( 25 | create_dashboard_request=CreateDashboardRequest(org_id=self.find_my_org().id, name=f"Dashboard_{unique_id}_IT")) 26 | self.assertEqual(dashboard.name, f"Dashboard_{unique_id}_IT") 27 | 28 | cells_service = CellsService(self.client.api_client) 29 | cell = cells_service.post_dashboards_id_cells( 30 | dashboard_id=dashboard.id, create_cell=CreateCell(name=f"Cell_{unique_id}_IT", h=3, w=12)) 31 | self.assertIsNotNone(cell.id) 32 | view = cells_service.get_dashboards_id_cells_id_view(dashboard_id=dashboard.id, cell_id=cell.id) 33 | self.assertEqual(view.name, f"Cell_{unique_id}_IT") 34 | 35 | def test_get_dashboard_with_cell_with_properties(self): 36 | unique_id = str(datetime.datetime.now().timestamp()) 37 | 38 | dashboard = self.dashboards_service.post_dashboards( 39 | create_dashboard_request=CreateDashboardRequest(org_id=self.find_my_org().id, 40 | name=f"Dashboard_{unique_id}_IT")) 41 | 42 | # create cell 43 | CellsService(self.client.api_client).post_dashboards_id_cells( 44 | dashboard_id=dashboard.id, create_cell=CreateCell(name=f"Cell_{unique_id}_IT", h=3, w=12)) 45 | 46 | # retrieve dashboard 47 | dashboard = self.dashboards_service.get_dashboards_id(dashboard.id) 48 | 49 | from influxdb_client import DashboardWithViewProperties, CellWithViewProperties 50 | self.assertEqual(DashboardWithViewProperties, type(dashboard)) 51 | self.assertEqual(1, len(dashboard.cells)) 52 | self.assertEqual(CellWithViewProperties, type(dashboard.cells[0])) 53 | -------------------------------------------------------------------------------- /tests/test_DateHelper.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import unittest 4 | from datetime import datetime, timezone 5 | 6 | from dateutil import tz 7 | 8 | from influxdb_client.client.util.date_utils import DateHelper 9 | 10 | 11 | class DateHelperTest(unittest.TestCase): 12 | 13 | def test_to_utc(self): 14 | date = DateHelper().to_utc(datetime(2021, 4, 29, 20, 30, 10, 0)) 15 | self.assertEqual(datetime(2021, 4, 29, 20, 30, 10, 0, timezone.utc), date) 16 | 17 | def test_to_utc_different_timezone(self): 18 | date = DateHelper(timezone=tz.gettz('ETC/GMT+2')).to_utc(datetime(2021, 4, 29, 20, 30, 10, 0)) 19 | self.assertEqual(datetime(2021, 4, 29, 22, 30, 10, 0, timezone.utc), date) 20 | 21 | 22 | if __name__ == '__main__': 23 | unittest.main() 24 | -------------------------------------------------------------------------------- /tests/test_Helpers.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from influxdb_client import InfluxDBClient, Organization, PermissionResource, Permission 4 | # noinspection PyProtectedMember 5 | from influxdb_client.client.exceptions import InfluxDBError 6 | from influxdb_client.client.util.helpers import get_org_query_param, _is_id 7 | from tests.base_test import BaseTest 8 | 9 | 10 | class HelpersTest(BaseTest): 11 | 12 | def test_is_id(self): 13 | self.assertTrue(_is_id("ffffffffffffffff")) 14 | self.assertTrue(_is_id("020f755c3c082000")) 15 | self.assertTrue(_is_id("ca55e77eca55e77e")) 16 | self.assertTrue(_is_id("02def021097c6000")) 17 | self.assertFalse(_is_id("gggggggggggggggg")) 18 | self.assertFalse(_is_id("abc")) 19 | self.assertFalse(_is_id("abcdabcdabcdabcd0")) 20 | self.assertFalse(_is_id("020f75")) 21 | self.assertFalse(_is_id("020f755c3c082000aaa")) 22 | self.assertFalse(_is_id(None)) 23 | 24 | def test_organization_as_query_param(self): 25 | organization = Organization(id="org-id", name="org-name") 26 | org = get_org_query_param(organization, self.client) 27 | self.assertEqual("org-id", org) 28 | 29 | def test_required_id(self): 30 | org = get_org_query_param(None, self.client, required_id=True) 31 | self.assertEqual(self.my_organization.id, org) 32 | 33 | def test_required_id_not_exist(self): 34 | with pytest.raises(InfluxDBError) as e: 35 | get_org_query_param("not_exist_name", self.client, required_id=True) 36 | assert "The client cannot find organization with name: 'not_exist_name' to determine their ID." in f"{e.value} " 37 | 38 | def test_both_none(self): 39 | self.client.close() 40 | self.client = InfluxDBClient(url=self.client.url, token="my-token") 41 | org = get_org_query_param(None, self.client) 42 | self.assertIsNone(org) 43 | 44 | def test_not_permission_to_read_org(self): 45 | # Create Token without permission to read Organizations 46 | resource = PermissionResource(type="buckets", org_id=self.find_my_org().id) 47 | authorization = self.client \ 48 | .authorizations_api() \ 49 | .create_authorization(org_id=self.find_my_org().id, 50 | permissions=[Permission(resource=resource, action="read"), 51 | Permission(resource=resource, action="write")]) 52 | self.client.close() 53 | 54 | # Initialize client without permission to read Organizations 55 | self.client = InfluxDBClient(url=self.client.url, token=authorization.token) 56 | 57 | with pytest.raises(InfluxDBError) as e: 58 | get_org_query_param("my-org", self.client, required_id=True) 59 | assert "The client cannot find organization with name: 'my-org' to determine their ID. Are you using token " \ 60 | "with sufficient permission?" in f"{e.value} " 61 | -------------------------------------------------------------------------------- /tests/test_InfluxDBClientAuthorization.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import httpretty 4 | 5 | from influxdb_client import InfluxDBClient 6 | 7 | 8 | class InfluxDBClientAuthorization(unittest.TestCase): 9 | 10 | def setUp(self) -> None: 11 | httpretty.enable() 12 | httpretty.reset() 13 | 14 | def tearDown(self) -> None: 15 | if self.influxdb_client: 16 | self.influxdb_client.close() 17 | httpretty.disable() 18 | 19 | def test_session_request(self): 20 | httpretty.reset() 21 | self.influxdb_client = InfluxDBClient(url="http://localhost", token="my-token", 22 | username="my-username", 23 | password="my-password") 24 | 25 | # create user session 26 | httpretty.register_uri(httpretty.POST, uri="http://localhost/api/v2/signin", 27 | adding_headers={'Set-Cookie': 'session=xyz'}) 28 | # authorized request 29 | httpretty.register_uri(httpretty.GET, uri="http://localhost/ping") 30 | # expires current session 31 | httpretty.register_uri(httpretty.POST, uri="http://localhost/api/v2/signout") 32 | 33 | ping = self.influxdb_client.ping() 34 | self.assertTrue(ping) 35 | 36 | self.assertEqual(2, len(httpretty.httpretty.latest_requests)) 37 | # basic auth header 38 | self.assertEqual('Basic bXktdXNlcm5hbWU6bXktcGFzc3dvcmQ=', httpretty.httpretty.latest_requests[0].headers['Authorization']) 39 | # cookie header 40 | self.assertEqual('session=xyz', httpretty.httpretty.latest_requests[1].headers['Cookie']) 41 | self.assertIsNotNone(self.influxdb_client.api_client.cookie) 42 | 43 | # signout 44 | self.influxdb_client.close() 45 | 46 | self.assertEqual(3, len(httpretty.httpretty.latest_requests)) 47 | -------------------------------------------------------------------------------- /tests/test_InfluxDBError.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from urllib3 import HTTPResponse 4 | 5 | from influxdb_client.client.exceptions import InfluxDBError 6 | 7 | 8 | class TestInfluxDBError(unittest.TestCase): 9 | def test_response(self): 10 | response = HTTPResponse() 11 | self.assertEqual(response, InfluxDBError(response=response).response) 12 | 13 | def test_message(self): 14 | 15 | response = HTTPResponse() 16 | response.headers.add('X-Platform-Error-Code', 'too many requests 1') 17 | self.assertEqual("too many requests 1", str(InfluxDBError(response=response))) 18 | 19 | response = HTTPResponse() 20 | response.headers.add('X-Influx-Error', 'too many requests 2') 21 | self.assertEqual("too many requests 2", str(InfluxDBError(response=response))) 22 | 23 | response = HTTPResponse() 24 | response.headers.add('X-InfluxDb-Error', 'too many requests 3') 25 | self.assertEqual("too many requests 3", str(InfluxDBError(response=response))) 26 | 27 | response = HTTPResponse(body='{"code":"too many requests","message":"org 04014de4ed590000 has exceeded limited_write plan limit"}') 28 | response.headers.add('X-InfluxDb-Error', 'error 3') 29 | self.assertEqual("org 04014de4ed590000 has exceeded limited_write plan limit", str(InfluxDBError(response=response))) 30 | 31 | response = HTTPResponse(body='org 04014de4ed590000 has exceeded limited_write plan limit') 32 | response.headers.add('X-InfluxDb-Error', 'error 3') 33 | self.assertEqual("org 04014de4ed590000 has exceeded limited_write plan limit", str(InfluxDBError(response=response))) 34 | 35 | response = HTTPResponse(reason='too many requests 4') 36 | self.assertEqual("too many requests 4", str(InfluxDBError(response=response))) 37 | 38 | def test_message_get_retry_after(self): 39 | response = HTTPResponse(reason="too many requests") 40 | response.headers.add('Retry-After', '63') 41 | 42 | influx_db_error = InfluxDBError(response=response) 43 | self.assertEqual("too many requests", str(influx_db_error)) 44 | self.assertEqual("63", influx_db_error.retry_after) 45 | 46 | influx_db_error = InfluxDBError(response=HTTPResponse(reason="too many requests")) 47 | self.assertEqual("too many requests", str(influx_db_error)) 48 | self.assertEqual(None, influx_db_error.retry_after) 49 | 50 | def test_no_response(self): 51 | influx_db_error = InfluxDBError(response=None) 52 | self.assertEqual("no response", str(influx_db_error)) 53 | self.assertIsNone(influx_db_error.response) 54 | self.assertIsNone(influx_db_error.retry_after) 55 | -------------------------------------------------------------------------------- /tests/test_OrganizationsApi.py: -------------------------------------------------------------------------------- 1 | from tests.base_test import BaseTest, generate_name 2 | 3 | 4 | class OrganizationsApiTests(BaseTest): 5 | 6 | def setUp(self) -> None: 7 | super(OrganizationsApiTests, self).setUp() 8 | organizations_api = self.client.organizations_api() 9 | organizations = organizations_api.find_organizations() 10 | 11 | for organization in organizations: 12 | if organization.name.endswith("_IT"): 13 | print("Delete organization: ", organization.name) 14 | organizations_api.delete_organization(org_id=organization.id) 15 | 16 | def test_update_organization(self): 17 | organizations_api = self.client.organizations_api() 18 | 19 | organization = organizations_api.create_organization(name=generate_name(key='org')) 20 | self.assertEqual("", organization.description) 21 | 22 | organization.description = "updated description" 23 | organization = organizations_api.update_organization(organization=organization) 24 | self.assertEqual("updated description", organization.description) 25 | -------------------------------------------------------------------------------- /tests/test_PandasDateTimeHelper.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from datetime import datetime, timedelta, timezone 3 | 4 | from influxdb_client.client.util.date_utils_pandas import PandasDateTimeHelper 5 | 6 | 7 | class PandasDateTimeHelperTest(unittest.TestCase): 8 | 9 | def setUp(self) -> None: 10 | self.helper = PandasDateTimeHelper() 11 | 12 | def test_parse_date(self): 13 | date = self.helper.parse_date('2020-08-07T06:21:57.331249158Z') 14 | 15 | self.assertEqual(date.year, 2020) 16 | self.assertEqual(date.month, 8) 17 | self.assertEqual(date.day, 7) 18 | self.assertEqual(date.hour, 6) 19 | self.assertEqual(date.minute, 21) 20 | self.assertEqual(date.second, 57) 21 | self.assertEqual(date.microsecond, 331249) 22 | self.assertEqual(date.nanosecond, 158) 23 | 24 | def test_to_nanoseconds(self): 25 | date = self.helper.parse_date('2020-08-07T06:21:57.331249158Z').replace(tzinfo=timezone.utc) 26 | nanoseconds = self.helper.to_nanoseconds(date - datetime.fromtimestamp(0, tz=timezone.utc)) 27 | 28 | self.assertEqual(nanoseconds, 1596781317331249158) 29 | 30 | def test_to_nanoseconds_buildin_timedelta(self): 31 | nanoseconds = self.helper.to_nanoseconds(timedelta(days=1)) 32 | 33 | self.assertEqual(nanoseconds, 86400000000000) 34 | -------------------------------------------------------------------------------- /tests/test_QueryApiStream.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | import time 3 | import types 4 | 5 | from influxdb_client import WritePrecision 6 | from influxdb_client.client.write_api import SYNCHRONOUS 7 | from tests.base_test import BaseTest 8 | 9 | 10 | class QueryStreamApi(BaseTest): 11 | 12 | def setUp(self) -> None: 13 | super().setUp() 14 | self.write_client = self.client.write_api(write_options=SYNCHRONOUS) 15 | self.bucket = self.create_test_bucket() 16 | 17 | def tearDown(self) -> None: 18 | self.write_client.close() 19 | super().tearDown() 20 | 21 | def test_block(self): 22 | self._prepareData() 23 | 24 | _result = self.query_api.query( 25 | f'from(bucket:"{self.bucket.name}") |> range(start: 1970-01-01T00:00:00.000000001Z)', self.org) 26 | 27 | self.assertEqual(len(_result), 1) 28 | self.assertEqual(len(_result[0].records), 100) 29 | 30 | def test_stream(self): 31 | self._prepareData() 32 | 33 | _result = self.query_api.query_stream( 34 | f'from(bucket:"{self.bucket.name}") |> range(start: 1970-01-01T00:00:00.000000001Z)', self.org) 35 | 36 | self.assertTrue(isinstance(_result, types.GeneratorType)) 37 | _result_list = list(_result) 38 | 39 | self.assertEqual(len(_result_list), 100) 40 | 41 | def test_stream_break(self): 42 | self._prepareData() 43 | 44 | _result = self.query_api.query_stream( 45 | f'from(bucket:"{self.bucket.name}") |> range(start: 1970-01-01T00:00:00.000000001Z)', self.org) 46 | 47 | _result_list = list(itertools.islice(_result, 10)) 48 | _result.close() 49 | 50 | self.assertEqual(len(_result_list), 10) 51 | 52 | def _prepareData(self): 53 | _list = [f'h2o_feet,location=coyote_creek water_level={x} {x}' for x in range(1, 101)] 54 | self.write_client.write(self.bucket.name, self.org, _list, write_precision=WritePrecision.S) 55 | time.sleep(1) 56 | -------------------------------------------------------------------------------- /tests/test_Thresholds.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import httpretty 4 | 5 | from influxdb_client import InfluxDBClient, ChecksService 6 | from tests.base_test import BaseTest 7 | 8 | 9 | class ThresholdsClientTest(BaseTest): 10 | 11 | def setUp(self) -> None: 12 | super(ThresholdsClientTest, self).setUp() 13 | 14 | httpretty.enable() 15 | httpretty.reset() 16 | 17 | def tearDown(self) -> None: 18 | self.client.close() 19 | httpretty.disable() 20 | 21 | def test_threshold(self): 22 | dictionary = { 23 | "id": "01", 24 | "orgID": "org_id", 25 | "name": "name", 26 | "type": "threshold", 27 | "query": "query", 28 | "thresholds": [{ 29 | "allValues": False, 30 | "level": "CRIT", 31 | "value": 10.5, 32 | "type": "greater" 33 | }], 34 | } 35 | httpretty.register_uri(httpretty.GET, uri="http://localhost/api/v2/checks/01", status=200, 36 | body=json.dumps(dictionary, indent=2), 37 | adding_headers={'Content-Type': 'application/json'}) 38 | self.client = InfluxDBClient("http://localhost", "my-token", org="my-org", debug=True) 39 | checks_service = ChecksService(api_client=self.client.api_client) 40 | check = checks_service.get_checks_id(check_id="01") 41 | self.assertEqual(1, len(check.thresholds)) 42 | self.assertEqual(False, check.thresholds[0].all_values) 43 | self.assertEqual(10.5, check.thresholds[0].value) 44 | self.assertEqual("CRIT", check.thresholds[0].level) 45 | self.assertEqual("greater", check.thresholds[0].type) 46 | -------------------------------------------------------------------------------- /tests/test_UsersApi.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from influxdb_client import UserResponse 4 | from influxdb_client.rest import ApiException 5 | from tests.base_test import BaseTest, generate_name 6 | 7 | 8 | class UsersApiTests(BaseTest): 9 | 10 | def setUp(self) -> None: 11 | super(UsersApiTests, self).setUp() 12 | users_api = self.client.users_api() 13 | users = users_api.find_users() 14 | 15 | for user in users.users: 16 | if user.name.endswith("_IT"): 17 | print("Delete user: ", user.name) 18 | users_api.delete_user(user=user) 19 | 20 | def test_delete_user(self): 21 | users_api = self.client.users_api() 22 | 23 | user = users_api.create_user(name=generate_name(key='user')) 24 | users = users_api.find_users(id=user.id) 25 | self.assertEqual(1, len(users.users)) 26 | self.assertEqual(user, users.users[0]) 27 | 28 | users_api.delete_user(user) 29 | 30 | with pytest.raises(ApiException) as e: 31 | assert users_api.find_users(id=user.id) 32 | assert "user not found" in e.value.body 33 | 34 | def test_update_user(self): 35 | users_api = self.client.users_api() 36 | 37 | name = generate_name(key='user') 38 | user = users_api.create_user(name=name) 39 | self.assertEqual(name, user.name) 40 | 41 | user.name = "updated_" + name 42 | user = users_api.update_user(user=user) 43 | self.assertIsInstance(user, UserResponse) 44 | user = users_api.find_users(id=user.id).users[0] 45 | self.assertEqual("updated_" + name, user.name) 46 | 47 | def test_update_password(self): 48 | users_api = self.client.users_api() 49 | 50 | user = users_api.create_user(name=generate_name(key='user')) 51 | users_api.update_password(user, "my-password-2") 52 | users_api.update_password(user, "my-password-3") 53 | 54 | -------------------------------------------------------------------------------- /tests/test_Warnings.py: -------------------------------------------------------------------------------- 1 | import json 2 | import unittest 3 | 4 | import httpretty 5 | import pytest 6 | 7 | from influxdb_client import InfluxDBClient, BucketSchemasService 8 | from influxdb_client.client.warnings import CloudOnlyWarning 9 | 10 | 11 | class Warnings(unittest.TestCase): 12 | 13 | def setUp(self) -> None: 14 | httpretty.enable() 15 | httpretty.reset() 16 | 17 | def tearDown(self) -> None: 18 | httpretty.disable() 19 | 20 | def test_cloud_only_warning(self): 21 | httpretty.register_uri(httpretty.GET, uri="http://localhost/ping", 22 | status=200, body="{}", adding_headers={'X-Influxdb-Build': 'OSS'}) 23 | httpretty.register_uri(httpretty.GET, uri="http://localhost/api/v2/buckets/01010101/schema/measurements", 24 | status=200, body=json.dumps({'measurementSchemas': []})) 25 | 26 | with pytest.warns(CloudOnlyWarning) as warnings: 27 | with InfluxDBClient(url="http://localhost", token="my-token", org="my-org") as client: 28 | service = BucketSchemasService(api_client=client.api_client) 29 | service.get_measurement_schemas(bucket_id="01010101") 30 | warnings = [w for w in warnings if w.category == CloudOnlyWarning] 31 | self.assertEqual(1, len(warnings)) 32 | -------------------------------------------------------------------------------- /tests/test_WriteApiPickle.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | import sys 3 | 4 | import pytest 5 | 6 | from influxdb_client import InfluxDBClient, WriteOptions 7 | from influxdb_client.client.write_api import WriteType 8 | from tests.base_test import current_milli_time, BaseTest 9 | 10 | 11 | class InfluxDBWriterToPickle: 12 | 13 | def __init__(self): 14 | self.client = InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org", debug=False) 15 | self.write_api = self.client.write_api( 16 | write_options=WriteOptions(write_type=WriteType.batching, batch_size=50_000, flush_interval=10_000)) 17 | 18 | def write(self, record): 19 | self.write_api.write(bucket="my-bucket", record=record) 20 | 21 | def terminate(self) -> None: 22 | self.write_api.close() 23 | self.client.close() 24 | 25 | 26 | class WriteApiPickle(BaseTest): 27 | 28 | def setUp(self) -> None: 29 | super().setUp() 30 | 31 | def tearDown(self) -> None: 32 | super().tearDown() 33 | 34 | @pytest.mark.skipif(sys.version_info < (3, 7), reason="requires python3.7 or higher") 35 | def test_write_line_protocol(self): 36 | writer = InfluxDBWriterToPickle() 37 | 38 | pickle_out = open("writer.pickle", "wb") 39 | pickle.dump(writer, pickle_out) 40 | pickle_out.close() 41 | 42 | writer = pickle.load(open("writer.pickle", "rb")) 43 | 44 | measurement = "h2o_feet_" + str(current_milli_time()) 45 | writer.write(record=f"{measurement},location=coyote_creek water_level=1.0") 46 | writer.terminate() 47 | 48 | tables = self.query_api.query( 49 | f'from(bucket: "my-bucket") |> range(start: 0) |> filter(fn: (r) => r._measurement == "{measurement}")') 50 | 51 | self.assertEqual(len(tables), 1) 52 | self.assertEqual(len(tables[0].records), 1) 53 | self.assertEqual(tables[0].records[0].get_measurement(), measurement) 54 | self.assertEqual(tables[0].records[0].get_value(), 1.0) 55 | self.assertEqual(tables[0].records[0].get_field(), "water_level") 56 | -------------------------------------------------------------------------------- /tests/test_WriteOptions.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from influxdb_client.client.write_api import WriteOptions 4 | 5 | 6 | class TestWriteOptions(unittest.TestCase): 7 | def test_default(self): 8 | retry = WriteOptions().to_retry_strategy() 9 | 10 | self.assertEqual(retry.total, 5) 11 | self.assertEqual(retry.retry_interval, 5) 12 | self.assertEqual(retry.max_retry_time, 180) 13 | self.assertEqual(retry.max_retry_delay, 125) 14 | self.assertEqual(retry.exponential_base, 2) 15 | self.assertEqual(retry.allowed_methods, ["POST"]) 16 | 17 | def test_custom(self): 18 | retry = WriteOptions(max_retries=5, max_retry_delay=7500, 19 | retry_interval=500, jitter_interval=2000, 20 | exponential_base=2)\ 21 | .to_retry_strategy() 22 | 23 | self.assertEqual(retry.total, 5) 24 | self.assertEqual(retry.retry_interval, 0.5) 25 | self.assertEqual(retry.max_retry_delay, 7.5) 26 | self.assertEqual(retry.exponential_base, 2) 27 | self.assertEqual(retry.allowed_methods, ["POST"]) 28 | -------------------------------------------------------------------------------- /tests/test_health.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import unittest 3 | 4 | import httpretty 5 | from urllib3 import Retry 6 | 7 | from influxdb_client import InfluxDBClient 8 | from tests.base_test import BaseTest 9 | 10 | 11 | class TestHealth(BaseTest): 12 | 13 | def setUp(self) -> None: 14 | super(TestHealth, self).setUp() 15 | self.client.api_client.configuration.debug = True 16 | 17 | def test_health(self): 18 | health = self.client.health() 19 | self.assertEqual(health.message, 'ready for queries and writes') 20 | self.assertEqual(health.status, "pass") 21 | self.assertEqual(health.name, "influxdb") 22 | 23 | def test_health_not_running_instance(self): 24 | client_not_running = InfluxDBClient("http://localhost:8099", token="my-token", debug=True) 25 | check = client_not_running.health() 26 | self.assertTrue("Connection refused" in check.message) 27 | self.assertEqual(check.status, "fail") 28 | self.assertEqual(check.name, "influxdb") 29 | 30 | def test_ready(self): 31 | ready = self.client.ready() 32 | self.assertEqual(ready.status, "ready") 33 | self.assertIsNotNone(ready.started) 34 | self.assertTrue(datetime.datetime.now(tz=ready.started.tzinfo) > ready.started) 35 | self.assertIsNotNone(ready.up) 36 | 37 | 38 | class TestHealthMock(unittest.TestCase): 39 | 40 | def setUp(self) -> None: 41 | httpretty.enable() 42 | httpretty.reset() 43 | 44 | self.influxdb_client = InfluxDBClient(url="http://localhost", token="my-token") 45 | 46 | def tearDown(self) -> None: 47 | self.influxdb_client.close() 48 | httpretty.disable() 49 | 50 | def test_without_retry(self): 51 | httpretty.register_uri(httpretty.GET, uri="http://localhost/health", status=429, 52 | adding_headers={'Retry-After': '5', 'Content-Type': 'application/json'}, 53 | body="{\"message\":\"Health is not working\"}") 54 | 55 | check = self.influxdb_client.health() 56 | self.assertTrue("Health is not working" in check.message, msg=check.message) 57 | self.assertEqual(check.status, "fail") 58 | self.assertEqual(check.name, "influxdb") 59 | 60 | self.assertEqual(1, len(httpretty.httpretty.latest_requests)) 61 | 62 | def test_with_retry(self): 63 | 64 | self.influxdb_client.close() 65 | self.influxdb_client = InfluxDBClient(url="http://localhost", token="my-token", retries=Retry()) 66 | 67 | httpretty.register_uri(httpretty.GET, uri="http://localhost/health", status=200, 68 | adding_headers={'Content-Type': 'application/json'}, 69 | body="{\"message\":\"ready for queries and writes\", \"name\":\"influxdb\", \"status\":\"pass\"}") 70 | httpretty.register_uri(httpretty.GET, uri="http://localhost/health", status=429, 71 | adding_headers={'Retry-After': '1', 'Content-Type': 'application/json'}, 72 | body="{\"message\":\"Health is not working\"}") 73 | 74 | health = self.influxdb_client.health() 75 | self.assertEqual(health.message, 'ready for queries and writes') 76 | self.assertEqual(health.status, "pass") 77 | self.assertEqual(health.name, "influxdb") 78 | 79 | self.assertEqual(2, len(httpretty.httpretty.latest_requests)) 80 | --------------------------------------------------------------------------------