From f53aa37a34dc37026d430e71b5e0d1b871bc5ac1 Mon Sep 17 00:00:00 2001 From: wyattscarpenter Date: Tue, 2 Jul 2024 07:09:12 -0400 Subject: [PATCH 1/4] move py.typed to correct places (#403) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * move py.typed to correct places https://peps.python.org/pep-0561/ says 'For namespace packages (see PEP 420), the py.typed file should be in the submodules of the namespace, to avoid conflicts and for clarity.'. Previously, when I added the py.typed file to this project, https://github.com/databricks/databricks-sql-python/pull/382 , I was unaware this was a namespace package (although, curiously, it seems I had done it right initially and then changed to the wrong way). As PEP 561 warns us, this does create conflicts; other libraries in the databricks namespace package (such as, in my case, databricks-vectorsearch) are then treated as though they are typed, which they are not. This commit moves the py.typed file to the correct places, the submodule folders, fixing that problem. Signed-off-by: wyattscarpenter * change target of mypy to src/databricks instead of src. I think this might fix the CI code-quality checks failure, but unfortunately I can't replicate that failure locally and the error message is unhelpful Signed-off-by: wyattscarpenter * Possible workaround for bad error message 'error: --install-types failed (no mypy cache directory)'; see https://github.com/python/mypy/issues/10768#issuecomment-2178450153 Signed-off-by: wyattscarpenter * fix invalid yaml syntax Signed-off-by: wyattscarpenter * Best fix (#3) Fixes the problem by cding and supplying a flag to mypy (that mypy needs this flag is seemingly fixed/changed in later versions of mypy; but that's another pr altogether...). Also fixes a type error that was somehow in the arguments of the program (?!) (I guess this is because you guys are still using implicit optional) --------- Signed-off-by: wyattscarpenter * return the old result_links default (#5) Return the old result_links default, make the type optional, & I'm pretty sure the original problem is that add_file_links can't take a None, so these statements should be in the body of the if-statement that ensures it is not None Signed-off-by: wyattscarpenter * Update src/databricks/sql/utils.py "self.download_manager is unconditionally used later, so must be created. Looks this part of code is totally not covered with tests 🤔" Co-authored-by: Levko Kravets Signed-off-by: wyattscarpenter --------- Signed-off-by: wyattscarpenter Co-authored-by: Levko Kravets --- .github/workflows/code-quality-checks.yml | 5 ++++- src/databricks/{ => sql}/py.typed | 0 src/databricks/sql/utils.py | 7 +++---- src/databricks/sqlalchemy/py.typed | 0 4 files changed, 7 insertions(+), 5 deletions(-) rename src/databricks/{ => sql}/py.typed (100%) create mode 100755 src/databricks/sqlalchemy/py.typed diff --git a/.github/workflows/code-quality-checks.yml b/.github/workflows/code-quality-checks.yml index 03c3991d..2dd55b77 100644 --- a/.github/workflows/code-quality-checks.yml +++ b/.github/workflows/code-quality-checks.yml @@ -160,4 +160,7 @@ jobs: # mypy the code #---------------------------------------------- - name: Mypy - run: poetry run mypy --install-types --non-interactive src + run: | + cd src # Need to be in the actual databricks/ folder or mypy does the wrong thing. + mkdir .mypy_cache # Workaround for bad error message "error: --install-types failed (no mypy cache directory)"; see https://github.com/python/mypy/issues/10768#issuecomment-2178450153 + poetry run mypy --config-file ../pyproject.toml --install-types --non-interactive --namespace-packages databricks diff --git a/src/databricks/py.typed b/src/databricks/sql/py.typed similarity index 100% rename from src/databricks/py.typed rename to src/databricks/sql/py.typed diff --git a/src/databricks/sql/utils.py b/src/databricks/sql/utils.py index 9f21a8eb..6063eca1 100644 --- a/src/databricks/sql/utils.py +++ b/src/databricks/sql/utils.py @@ -7,7 +7,7 @@ from collections.abc import Iterable from decimal import Decimal from enum import Enum -from typing import Any, Dict, List, Union +from typing import Any, Dict, List, Optional, Union import re import lz4.frame @@ -134,7 +134,7 @@ def __init__( schema_bytes, max_download_threads: int, start_row_offset: int = 0, - result_links: List[TSparkArrowResultLink] = None, + result_links: Optional[List[TSparkArrowResultLink]] = None, lz4_compressed: bool = True, description: List[List[Any]] = None, ): @@ -168,11 +168,10 @@ def __init__( result_link.startRowOffset, result_link.rowCount ) ) - self.download_manager = ResultFileDownloadManager( self.max_download_threads, self.lz4_compressed ) - self.download_manager.add_file_links(result_links) + self.download_manager.add_file_links(result_links or []) self.table = self._create_next_table() self.table_row_index = 0 diff --git a/src/databricks/sqlalchemy/py.typed b/src/databricks/sqlalchemy/py.typed new file mode 100755 index 00000000..e69de29b From 93e207e206e51b6ae9d879be76c586061065dc58 Mon Sep 17 00:00:00 2001 From: wyattscarpenter Date: Wed, 3 Jul 2024 04:22:12 -0500 Subject: [PATCH 2/4] Upgrade mypy (#406) * Upgrade mypy This commit removes the flag (and cd step) from https://github.com/databricks/databricks-sql-python/commit/f53aa37a34dc37026d430e71b5e0d1b871bc5ac1 which we added to get mypy to treat namespaces correctly. This was apparently a bug in mypy, or behavior they decided to change. To get the new behavior, we must upgrade mypy. (This also allows us to remove a couple `# type: ignore` comment that are no longer needed.) This commit runs changes the version of mypy and runs `poetry lock`. It also conforms the whitespace of files in this project to the expectations of various tools and standard (namely: removing trailing whitespace as expected by git and enforcing the existence of one and only one newline at the end of a file as expected by unix and github.) It also uses https://github.com/hauntsaninja/no_implicit_optional to automatically upgrade codebase due to a change in mypy behavior. For a similar reason, it also fixes a new type (or otherwise) errors: * "Return type 'Retry' of 'new' incompatible with return type 'DatabricksRetryPolicy' in supertype 'Retry'" * databricks/sql/auth/retry.py:225: error: object has no attribute update [attr-defined] * /test_param_escaper.py:31: DeprecationWarning: invalid escape sequence \) [as it happens, I think it was also wrong for the string not to be raw, because I'm pretty sure it wants all of its backslashed single-quotes to appear literally with the backslashes, which wasn't happening until now] * ValueError: numpy.dtype size changed, may indicate binary incompatibility. Expected 96 from C header, got 88 from PyObject [this is like a numpy version thing, which I fixed by being stricter about numpy version] --------- Signed-off-by: wyattscarpenter * Incorporate suggestion. I decided the most expedient way of dealing with this type error was just adding the type ignore comment back in, but with a `[attr-defined]` specifier this time. I mean, otherwise I would have to restructure the code or figure out the proper types for a TypedDict for the dict and I don't think that's worth it at the moment. Signed-off-by: wyattscarpenter --------- Signed-off-by: wyattscarpenter --- .github/.github/pull_request_template.md | 6 +- .github/workflows/code-quality-checks.yml | 3 +- .github/workflows/publish.yml | 2 +- .gitignore | 2 +- CONTRIBUTING.md | 5 +- docs/parameters.md | 12 +- examples/README.md | 6 +- examples/insert_data.py | 2 +- examples/persistent_oauth.py | 4 +- examples/query_cancel.py | 10 +- examples/query_execute.py | 2 +- examples/sqlalchemy.py | 24 +- examples/staging_ingestion.py | 2 +- examples/v3_retries_query_execute.py | 4 +- poetry.lock | 340 +++++++++--------- pyproject.toml | 6 +- src/databricks/sql/auth/auth.py | 20 +- src/databricks/sql/auth/retry.py | 16 +- src/databricks/sql/client.py | 20 +- src/databricks/sql/thrift_backend.py | 4 +- src/databricks/sql/utils.py | 4 +- .../sqlalchemy/README.sqlalchemy.md | 3 +- src/databricks/sqlalchemy/README.tests.md | 6 +- src/databricks/sqlalchemy/requirements.py | 2 +- .../overrides/_componentreflectiontest.py | 2 +- .../sqlalchemy/test_local/__init__.py | 2 +- test.env.example | 4 +- tests/e2e/common/large_queries_mixin.py | 2 +- tests/e2e/common/predicates.py | 4 +- tests/e2e/common/retry_test_mixins.py | 6 +- tests/unit/test_client.py | 10 +- tests/unit/test_param_escaper.py | 4 +- 32 files changed, 272 insertions(+), 267 deletions(-) mode change 100644 => 100755 poetry.lock mode change 100644 => 100755 src/databricks/sql/auth/auth.py mode change 100644 => 100755 src/databricks/sql/auth/retry.py mode change 100644 => 100755 src/databricks/sql/client.py mode change 100644 => 100755 tests/e2e/common/retry_test_mixins.py diff --git a/.github/.github/pull_request_template.md b/.github/.github/pull_request_template.md index 8ce224e8..62c0286c 100644 --- a/.github/.github/pull_request_template.md +++ b/.github/.github/pull_request_template.md @@ -1,7 +1,7 @@ -## What type of PR is this? +## What type of PR is this? - [ ] Refactor @@ -13,8 +13,8 @@ ## How is this tested? -- [ ] Unit tests -- [ ] E2E Tests +- [ ] Unit tests +- [ ] E2E Tests - [ ] Manually - [ ] N/A diff --git a/.github/workflows/code-quality-checks.yml b/.github/workflows/code-quality-checks.yml index 2dd55b77..80ac94a7 100644 --- a/.github/workflows/code-quality-checks.yml +++ b/.github/workflows/code-quality-checks.yml @@ -161,6 +161,5 @@ jobs: #---------------------------------------------- - name: Mypy run: | - cd src # Need to be in the actual databricks/ folder or mypy does the wrong thing. mkdir .mypy_cache # Workaround for bad error message "error: --install-types failed (no mypy cache directory)"; see https://github.com/python/mypy/issues/10768#issuecomment-2178450153 - poetry run mypy --config-file ../pyproject.toml --install-types --non-interactive --namespace-packages databricks + poetry run mypy --install-types --non-interactive src diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 9ea751d0..324575ff 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -61,4 +61,4 @@ jobs: - name: Build and publish to pypi uses: JRubics/poetry-publish@v1.10 with: - pypi_token: ${{ secrets.PROD_PYPI_TOKEN }} \ No newline at end of file + pypi_token: ${{ secrets.PROD_PYPI_TOKEN }} diff --git a/.gitignore b/.gitignore index d89a4116..a1fe5bbd 100644 --- a/.gitignore +++ b/.gitignore @@ -207,4 +207,4 @@ build/ .vscode # don't commit authentication info to source control -test.env \ No newline at end of file +test.env diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 1d5c2e97..ce0968d4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -74,7 +74,7 @@ If you set your `user.name` and `user.email` git configs, you can sign your comm This project uses [Poetry](https://python-poetry.org/) for dependency management, tests, and linting. 1. Clone this respository -2. Run `poetry install` +2. Run `poetry install` ### Run tests @@ -167,5 +167,4 @@ Modify the dependency specification (syntax can be found [here](https://python-p - `poetry update` - `rm poetry.lock && poetry install` -Sometimes `poetry update` can freeze or run forever. Deleting the `poetry.lock` file and calling `poetry install` is guaranteed to update everything but is usually _slower_ than `poetry update` **if `poetry update` works at all**. - +Sometimes `poetry update` can freeze or run forever. Deleting the `poetry.lock` file and calling `poetry install` is guaranteed to update everything but is usually _slower_ than `poetry update` **if `poetry update` works at all**. diff --git a/docs/parameters.md b/docs/parameters.md index 05529d42..a538af1a 100644 --- a/docs/parameters.md +++ b/docs/parameters.md @@ -43,7 +43,7 @@ SELECT * FROM table WHERE field = %(value)s ## Python Syntax -This connector follows the [PEP-249 interface](https://peps.python.org/pep-0249/#id20). The expected structure of the parameter collection follows the paramstyle of the variables in your query. +This connector follows the [PEP-249 interface](https://peps.python.org/pep-0249/#id20). The expected structure of the parameter collection follows the paramstyle of the variables in your query. ### `named` paramstyle Usage Example @@ -85,7 +85,7 @@ The result of the above two examples is identical. Databricks Runtime expects variable markers to use either `named` or `qmark` paramstyles. Historically, this connector used `pyformat` which Databricks Runtime does not support. So to assist assist customers transitioning their codebases from `pyformat` → `named`, we can dynamically rewrite the variable markers before sending the query to Databricks. This happens only when `use_inline_params=False`. - This dynamic rewrite will be deprecated in a future release. New queries should be written using the `named` paramstyle instead. And users should update their client code to replace `pyformat` markers with `named` markers. + This dynamic rewrite will be deprecated in a future release. New queries should be written using the `named` paramstyle instead. And users should update their client code to replace `pyformat` markers with `named` markers. For example: @@ -106,7 +106,7 @@ SELECT field1, field2, :param1 FROM table WHERE field4 = :param2 Under the covers, parameter values are annotated with a valid Databricks SQL type. As shown in the examples above, this connector accepts primitive Python types like `int`, `str`, and `Decimal`. When this happens, the connector infers the corresponding Databricks SQL type (e.g. `INT`, `STRING`, `DECIMAL`) automatically. This means that the parameters passed to `cursor.execute()` are always wrapped in a `TDbsqlParameter` subtype prior to execution. -Automatic inferrence is sufficient for most usages. But you can bypass the inference by explicitly setting the Databricks SQL type in your client code. All supported Databricks SQL types have `TDbsqlParameter` implementations which you can import from `databricks.sql.parameters`. +Automatic inferrence is sufficient for most usages. But you can bypass the inference by explicitly setting the Databricks SQL type in your client code. All supported Databricks SQL types have `TDbsqlParameter` implementations which you can import from `databricks.sql.parameters`. `TDbsqlParameter` objects must always be passed within a list. Either paramstyle (`:named` or `?`) may be used. However, if your query uses the `named` paramstyle, all `TDbsqlParameter` objects must be provided a `name` when they are constructed. @@ -158,7 +158,7 @@ Rendering parameters inline is supported on all versions of DBR since these quer ## SQL Syntax -Variables in your SQL query can look like `%(param)s` or like `%s`. +Variables in your SQL query can look like `%(param)s` or like `%s`. #### Example @@ -172,7 +172,7 @@ SELECT * FROM table WHERE field = %s ## Python Syntax -This connector follows the [PEP-249 interface](https://peps.python.org/pep-0249/#id20). The expected structure of the parameter collection follows the paramstyle of the variables in your query. +This connector follows the [PEP-249 interface](https://peps.python.org/pep-0249/#id20). The expected structure of the parameter collection follows the paramstyle of the variables in your query. ### `pyformat` paramstyle Usage Example @@ -210,7 +210,7 @@ with sql.connect(..., use_inline_params=True) as conn: The result of the above two examples is identical. -**Note**: `%s` is not compliant with PEP-249 and only works due to the specific implementation of our inline renderer. +**Note**: `%s` is not compliant with PEP-249 and only works due to the specific implementation of our inline renderer. **Note:** This `%s` syntax overlaps with valid SQL syntax around the usage of `LIKE` DML. For example if your query includes a clause like `WHERE field LIKE '%sequence'`, the parameter inlining function will raise an exception because this string appears to include an inline marker but none is provided. This means that connector versions below 3.0.0 it has been impossible to execute a query that included both parameters and LIKE wildcards. When `use_inline_params=False`, we will pass `%s` occurrences along to the database, allowing it to be used as expected in `LIKE` statements. diff --git a/examples/README.md b/examples/README.md index ad0fb07e..43d248da 100644 --- a/examples/README.md +++ b/examples/README.md @@ -7,7 +7,7 @@ We provide example scripts so you can see the connector in action for basic usag - DATABRICKS_TOKEN Follow the quick start in our [README](../README.md) to install `databricks-sql-connector` and see -how to find the hostname, http path, and access token. Note that for the OAuth examples below a +how to find the hostname, http path, and access token. Note that for the OAuth examples below a personal access token is not needed. @@ -38,7 +38,7 @@ To run all of these examples you can clone the entire repository to your disk. O - **`set_user_agent.py`** shows how to customize the user agent header used for Thrift commands. In this example the string `ExamplePartnerTag` will be added to the the user agent on every request. - **`staging_ingestion.py`** shows how the connector handles Databricks' experimental staging ingestion commands `GET`, `PUT`, and `REMOVE`. -- **`sqlalchemy.py`** shows a basic example of connecting to Databricks with [SQLAlchemy 2.0](https://www.sqlalchemy.org/). +- **`sqlalchemy.py`** shows a basic example of connecting to Databricks with [SQLAlchemy 2.0](https://www.sqlalchemy.org/). - **`custom_cred_provider.py`** shows how to pass a custom credential provider to bypass connector authentication. Please install databricks-sdk prior to running this example. - **`v3_retries_query_execute.py`** shows how to enable v3 retries in connector version 2.9.x including how to enable retries for non-default retry cases. -- **`parameters.py`** shows how to use parameters in native and inline modes. \ No newline at end of file +- **`parameters.py`** shows how to use parameters in native and inline modes. diff --git a/examples/insert_data.py b/examples/insert_data.py index 511986aa..b304a0e9 100644 --- a/examples/insert_data.py +++ b/examples/insert_data.py @@ -18,4 +18,4 @@ result = cursor.fetchall() for row in result: - print(row) \ No newline at end of file + print(row) diff --git a/examples/persistent_oauth.py b/examples/persistent_oauth.py index 7ea83a2f..0f2ba077 100644 --- a/examples/persistent_oauth.py +++ b/examples/persistent_oauth.py @@ -23,10 +23,10 @@ class SampleOAuthPersistence(OAuthPersistence): def persist(self, hostname: str, oauth_token: OAuthToken): """To be implemented by the end user to persist in the preferred storage medium. - + OAuthToken has two properties: 1. OAuthToken.access_token - 2. OAuthToken.refresh_token + 2. OAuthToken.refresh_token Both should be persisted. """ diff --git a/examples/query_cancel.py b/examples/query_cancel.py index 59202088..4e0b74a5 100644 --- a/examples/query_cancel.py +++ b/examples/query_cancel.py @@ -19,13 +19,13 @@ def execute_really_long_query(): print("It looks like this query was cancelled.") exec_thread = threading.Thread(target=execute_really_long_query) - + print("\n Beginning to execute long query") exec_thread.start() - + # Make sure the query has started before cancelling print("\n Waiting 15 seconds before canceling", end="", flush=True) - + seconds_waited = 0 while seconds_waited < 15: seconds_waited += 1 @@ -34,7 +34,7 @@ def execute_really_long_query(): print("\n Cancelling the cursor's operation. This can take a few seconds.") cursor.cancel() - + print("\n Now checking the cursor status:") exec_thread.join(5) @@ -42,7 +42,7 @@ def execute_really_long_query(): print("\n The previous command was successfully canceled") print("\n Now reusing the cursor to run a separate query.") - + # We can still execute a new command on the cursor cursor.execute("SELECT * FROM range(3)") diff --git a/examples/query_execute.py b/examples/query_execute.py index ec79fd0e..a851ab50 100644 --- a/examples/query_execute.py +++ b/examples/query_execute.py @@ -10,4 +10,4 @@ result = cursor.fetchall() for row in result: - print(row) \ No newline at end of file + print(row) diff --git a/examples/sqlalchemy.py b/examples/sqlalchemy.py index 15608d03..7492dc5a 100644 --- a/examples/sqlalchemy.py +++ b/examples/sqlalchemy.py @@ -8,7 +8,7 @@ Our dialect implements the majority of SQLAlchemy 2.0's API. Because of the extent of SQLAlchemy's capabilities it isn't feasible to provide examples of every usage in a single script, so we only -provide a basic one here. Learn more about usage in README.sqlalchemy.md in this repo. +provide a basic one here. Learn more about usage in README.sqlalchemy.md in this repo. """ # fmt: off @@ -89,17 +89,17 @@ class SampleObject(Base): # Output SQL is: # CREATE TABLE pysql_sqlalchemy_example_table ( -# bigint_col BIGINT NOT NULL, -# string_col STRING, -# tinyint_col SMALLINT, -# int_col INT, -# numeric_col DECIMAL(10, 2), -# boolean_col BOOLEAN, -# date_col DATE, -# datetime_col TIMESTAMP, -# datetime_col_ntz TIMESTAMP_NTZ, -# time_col STRING, -# uuid_col STRING, +# bigint_col BIGINT NOT NULL, +# string_col STRING, +# tinyint_col SMALLINT, +# int_col INT, +# numeric_col DECIMAL(10, 2), +# boolean_col BOOLEAN, +# date_col DATE, +# datetime_col TIMESTAMP, +# datetime_col_ntz TIMESTAMP_NTZ, +# time_col STRING, +# uuid_col STRING, # PRIMARY KEY (bigint_col) # ) USING DELTA diff --git a/examples/staging_ingestion.py b/examples/staging_ingestion.py index 2980506d..a55be477 100644 --- a/examples/staging_ingestion.py +++ b/examples/staging_ingestion.py @@ -24,7 +24,7 @@ Additionally, the connection can only manipulate files within the cloud storage location of the authenticated user. -To run this script: +To run this script: 1. Set the INGESTION_USER constant to the account email address of the authenticated user 2. Set the FILEPATH constant to the path of a file that will be uploaded (this example assumes its a CSV file) diff --git a/examples/v3_retries_query_execute.py b/examples/v3_retries_query_execute.py index bf8fce69..4b6772fe 100644 --- a/examples/v3_retries_query_execute.py +++ b/examples/v3_retries_query_execute.py @@ -5,7 +5,7 @@ # This flag will be deprecated in databricks-sql-connector~=3.0.0 as it will become the default. # # The new retry behaviour is defined in src/databricks/sql/auth/retry.py -# +# # The new retry behaviour allows users to force the connector to automatically retry requests that fail with codes # that are not retried by default (in most cases only codes 429 and 503 are retried by default). Additional HTTP # codes to retry are specified as a list passed to `_retry_dangerous_codes`. @@ -16,7 +16,7 @@ # the SQL gateway / load balancer. So there is no risk that retrying the request would result in a doubled # (or tripled etc) command execution. These codes are always accompanied by a Retry-After header, which we honour. # -# However, if your use-case emits idempotent queries such as SELECT statements, it can be helpful to retry +# However, if your use-case emits idempotent queries such as SELECT statements, it can be helpful to retry # for 502 (Bad Gateway) codes etc. In these cases, there is a possibility that the initial command _did_ reach # Databricks compute and retrying it could result in additional executions. Retrying under these conditions uses # an exponential back-off since a Retry-After header is not present. diff --git a/poetry.lock b/poetry.lock old mode 100644 new mode 100755 index 86c05bba..1e351e4f --- a/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "alembic" -version = "1.13.1" +version = "1.13.2" description = "A database migration tool for SQLAlchemy." optional = true python-versions = ">=3.8" files = [ - {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, - {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, + {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, + {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, ] [package.dependencies] @@ -23,13 +23,13 @@ tz = ["backports.zoneinfo"] [[package]] name = "astroid" -version = "3.1.0" +version = "3.2.2" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.8.0" files = [ - {file = "astroid-3.1.0-py3-none-any.whl", hash = "sha256:951798f922990137ac090c53af473db7ab4e70c770e6d7fae0cec59f74411819"}, - {file = "astroid-3.1.0.tar.gz", hash = "sha256:ac248253bfa4bd924a0de213707e7ebeeb3138abeb48d798784ead1e56d419d4"}, + {file = "astroid-3.2.2-py3-none-any.whl", hash = "sha256:e8a0083b4bb28fcffb6207a3bfc9e5d0a68be951dd7e336d5dcf639c682388c0"}, + {file = "astroid-3.2.2.tar.gz", hash = "sha256:8ead48e31b92b2e217b6c9733a21afafe479d52d6e164dd25fb1a770c7c3cf94"}, ] [package.dependencies] @@ -72,13 +72,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.6.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, + {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, ] [[package]] @@ -329,22 +329,22 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.1.0" +version = "8.0.0" description = "Read metadata from Python packages" optional = true python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, - {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, + {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, + {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "importlib-resources" @@ -441,13 +441,13 @@ tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] [[package]] name = "mako" -version = "1.3.3" +version = "1.3.5" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = true python-versions = ">=3.8" files = [ - {file = "Mako-1.3.3-py3-none-any.whl", hash = "sha256:5324b88089a8978bf76d1629774fcc2f1c07b82acdf00f4c5dd8ceadfffc4b40"}, - {file = "Mako-1.3.3.tar.gz", hash = "sha256:e16c01d9ab9c11f7290eef1cfefc093fb5a45ee4a3da09e2fec2e4d1bae54e73"}, + {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, + {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, ] [package.dependencies] @@ -540,45 +540,49 @@ files = [ [[package]] name = "mypy" -version = "0.981" +version = "1.10.1" description = "Optional static typing for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-0.981-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4bc460e43b7785f78862dab78674e62ec3cd523485baecfdf81a555ed29ecfa0"}, - {file = "mypy-0.981-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:756fad8b263b3ba39e4e204ee53042671b660c36c9017412b43af210ddee7b08"}, - {file = "mypy-0.981-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a16a0145d6d7d00fbede2da3a3096dcc9ecea091adfa8da48fa6a7b75d35562d"}, - {file = "mypy-0.981-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce65f70b14a21fdac84c294cde75e6dbdabbcff22975335e20827b3b94bdbf49"}, - {file = "mypy-0.981-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e35d764784b42c3e256848fb8ed1d4292c9fc0098413adb28d84974c095b279"}, - {file = "mypy-0.981-cp310-cp310-win_amd64.whl", hash = "sha256:e53773073c864d5f5cec7f3fc72fbbcef65410cde8cc18d4f7242dea60dac52e"}, - {file = "mypy-0.981-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6ee196b1d10b8b215e835f438e06965d7a480f6fe016eddbc285f13955cca659"}, - {file = "mypy-0.981-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ad21d4c9d3673726cf986ea1d0c9fb66905258709550ddf7944c8f885f208be"}, - {file = "mypy-0.981-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d1debb09043e1f5ee845fa1e96d180e89115b30e47c5d3ce53bc967bab53f62d"}, - {file = "mypy-0.981-cp37-cp37m-win_amd64.whl", hash = "sha256:9f362470a3480165c4c6151786b5379351b790d56952005be18bdbdd4c7ce0ae"}, - {file = "mypy-0.981-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c9e0efb95ed6ca1654951bd5ec2f3fa91b295d78bf6527e026529d4aaa1e0c30"}, - {file = "mypy-0.981-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e178eaffc3c5cd211a87965c8c0df6da91ed7d258b5fc72b8e047c3771317ddb"}, - {file = "mypy-0.981-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:06e1eac8d99bd404ed8dd34ca29673c4346e76dd8e612ea507763dccd7e13c7a"}, - {file = "mypy-0.981-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa38f82f53e1e7beb45557ff167c177802ba7b387ad017eab1663d567017c8ee"}, - {file = "mypy-0.981-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:64e1f6af81c003f85f0dfed52db632817dabb51b65c0318ffbf5ff51995bbb08"}, - {file = "mypy-0.981-cp38-cp38-win_amd64.whl", hash = "sha256:e1acf62a8c4f7c092462c738aa2c2489e275ed386320c10b2e9bff31f6f7e8d6"}, - {file = "mypy-0.981-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b6ede64e52257931315826fdbfc6ea878d89a965580d1a65638ef77cb551f56d"}, - {file = "mypy-0.981-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eb3978b191b9fa0488524bb4ffedf2c573340e8c2b4206fc191d44c7093abfb7"}, - {file = "mypy-0.981-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77f8fcf7b4b3cc0c74fb33ae54a4cd00bb854d65645c48beccf65fa10b17882c"}, - {file = "mypy-0.981-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f64d2ce043a209a297df322eb4054dfbaa9de9e8738291706eaafda81ab2b362"}, - {file = "mypy-0.981-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2ee3dbc53d4df7e6e3b1c68ac6a971d3a4fb2852bf10a05fda228721dd44fae1"}, - {file = "mypy-0.981-cp39-cp39-win_amd64.whl", hash = "sha256:8e8e49aa9cc23aa4c926dc200ce32959d3501c4905147a66ce032f05cb5ecb92"}, - {file = "mypy-0.981-py3-none-any.whl", hash = "sha256:794f385653e2b749387a42afb1e14c2135e18daeb027e0d97162e4b7031210f8"}, - {file = "mypy-0.981.tar.gz", hash = "sha256:ad77c13037d3402fbeffda07d51e3f228ba078d1c7096a73759c9419ea031bf4"}, + {file = "mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02"}, + {file = "mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7"}, + {file = "mypy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:901c89c2d67bba57aaaca91ccdb659aa3a312de67f23b9dfb059727cce2e2e0a"}, + {file = "mypy-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0cd62192a4a32b77ceb31272d9e74d23cd88c8060c34d1d3622db3267679a5d9"}, + {file = "mypy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a2cbc68cb9e943ac0814c13e2452d2046c2f2b23ff0278e26599224cf164e78d"}, + {file = "mypy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd6f629b67bb43dc0d9211ee98b96d8dabc97b1ad38b9b25f5e4c4d7569a0c6a"}, + {file = "mypy-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1bbb3a6f5ff319d2b9d40b4080d46cd639abe3516d5a62c070cf0114a457d84"}, + {file = "mypy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8edd4e9bbbc9d7b79502eb9592cab808585516ae1bcc1446eb9122656c6066f"}, + {file = "mypy-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6166a88b15f1759f94a46fa474c7b1b05d134b1b61fca627dd7335454cc9aa6b"}, + {file = "mypy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bb9cd11c01c8606a9d0b83ffa91d0b236a0e91bc4126d9ba9ce62906ada868e"}, + {file = "mypy-1.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d8681909f7b44d0b7b86e653ca152d6dff0eb5eb41694e163c6092124f8246d7"}, + {file = "mypy-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:378c03f53f10bbdd55ca94e46ec3ba255279706a6aacaecac52ad248f98205d3"}, + {file = "mypy-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bacf8f3a3d7d849f40ca6caea5c055122efe70e81480c8328ad29c55c69e93e"}, + {file = "mypy-1.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:701b5f71413f1e9855566a34d6e9d12624e9e0a8818a5704d74d6b0402e66c04"}, + {file = "mypy-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:3c4c2992f6ea46ff7fce0072642cfb62af7a2484efe69017ed8b095f7b39ef31"}, + {file = "mypy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604282c886497645ffb87b8f35a57ec773a4a2721161e709a4422c1636ddde5c"}, + {file = "mypy-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37fd87cab83f09842653f08de066ee68f1182b9b5282e4634cdb4b407266bade"}, + {file = "mypy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8addf6313777dbb92e9564c5d32ec122bf2c6c39d683ea64de6a1fd98b90fe37"}, + {file = "mypy-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cc3ca0a244eb9a5249c7c583ad9a7e881aa5d7b73c35652296ddcdb33b2b9c7"}, + {file = "mypy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:1b3a2ffce52cc4dbaeee4df762f20a2905aa171ef157b82192f2e2f368eec05d"}, + {file = "mypy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe85ed6836165d52ae8b88f99527d3d1b2362e0cb90b005409b8bed90e9059b3"}, + {file = "mypy-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2ae450d60d7d020d67ab440c6e3fae375809988119817214440033f26ddf7bf"}, + {file = "mypy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be84c06e6abd72f960ba9a71561c14137a583093ffcf9bbfaf5e613d63fa531"}, + {file = "mypy-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2189ff1e39db399f08205e22a797383613ce1cb0cb3b13d8bcf0170e45b96cc3"}, + {file = "mypy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:97a131ee36ac37ce9581f4220311247ab6cba896b4395b9c87af0675a13a755f"}, + {file = "mypy-1.10.1-py3-none-any.whl", hash = "sha256:71d8ac0b906354ebda8ef1673e5fde785936ac1f29ff6987c7483cfbd5a4235a"}, + {file = "mypy-1.10.1.tar.gz", hash = "sha256:1f8f492d7db9e3593ef42d4f115f04e556130f2819ad33ab84551403e97dd4c0"}, ] [package.dependencies] -mypy-extensions = ">=0.4.3" +mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=3.10" +typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] -python2 = ["typed-ast (>=1.4.0,<2)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] @@ -692,13 +696,13 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "openpyxl" -version = "3.1.2" +version = "3.1.5" description = "A Python library to read/write Excel 2010 xlsx/xlsm files" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"}, - {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"}, + {file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"}, + {file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"}, ] [package.dependencies] @@ -706,13 +710,13 @@ et-xmlfile = "*" [[package]] name = "packaging" -version = "24.0" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] @@ -795,13 +799,13 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.1" +version = "4.2.2" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.1-py3-none-any.whl", hash = "sha256:17d5a1161b3fd67b390023cb2d3b026bbd40abde6fdb052dfbd3a29c3ba22ee1"}, - {file = "platformdirs-4.2.1.tar.gz", hash = "sha256:031cd18d4ec63ec53e82dceaac0417d218a6863f7745dfcc9efe7793b7039bdf"}, + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, ] [package.extras] @@ -826,47 +830,47 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pyarrow" -version = "16.0.0" +version = "16.1.0" description = "Python library for Apache Arrow" optional = false python-versions = ">=3.8" files = [ - {file = "pyarrow-16.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:22a1fdb1254e5095d629e29cd1ea98ed04b4bbfd8e42cc670a6b639ccc208b60"}, - {file = "pyarrow-16.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:574a00260a4ed9d118a14770edbd440b848fcae5a3024128be9d0274dbcaf858"}, - {file = "pyarrow-16.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0815d0ddb733b8c1b53a05827a91f1b8bde6240f3b20bf9ba5d650eb9b89cdf"}, - {file = "pyarrow-16.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df0080339387b5d30de31e0a149c0c11a827a10c82f0c67d9afae3981d1aabb7"}, - {file = "pyarrow-16.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:edf38cce0bf0dcf726e074159c60516447e4474904c0033f018c1f33d7dac6c5"}, - {file = "pyarrow-16.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:91d28f9a40f1264eab2af7905a4d95320ac2f287891e9c8b0035f264fe3c3a4b"}, - {file = "pyarrow-16.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:99af421ee451a78884d7faea23816c429e263bd3618b22d38e7992c9ce2a7ad9"}, - {file = "pyarrow-16.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:d22d0941e6c7bafddf5f4c0662e46f2075850f1c044bf1a03150dd9e189427ce"}, - {file = "pyarrow-16.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:266ddb7e823f03733c15adc8b5078db2df6980f9aa93d6bb57ece615df4e0ba7"}, - {file = "pyarrow-16.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cc23090224b6594f5a92d26ad47465af47c1d9c079dd4a0061ae39551889efe"}, - {file = "pyarrow-16.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56850a0afe9ef37249d5387355449c0f94d12ff7994af88f16803a26d38f2016"}, - {file = "pyarrow-16.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:705db70d3e2293c2f6f8e84874b5b775f690465798f66e94bb2c07bab0a6bb55"}, - {file = "pyarrow-16.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:5448564754c154997bc09e95a44b81b9e31ae918a86c0fcb35c4aa4922756f55"}, - {file = "pyarrow-16.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:729f7b262aa620c9df8b9967db96c1575e4cfc8c25d078a06968e527b8d6ec05"}, - {file = "pyarrow-16.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:fb8065dbc0d051bf2ae2453af0484d99a43135cadabacf0af588a3be81fbbb9b"}, - {file = "pyarrow-16.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:20ce707d9aa390593ea93218b19d0eadab56390311cb87aad32c9a869b0e958c"}, - {file = "pyarrow-16.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5823275c8addbbb50cd4e6a6839952682a33255b447277e37a6f518d6972f4e1"}, - {file = "pyarrow-16.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ab8b9050752b16a8b53fcd9853bf07d8daf19093533e990085168f40c64d978"}, - {file = "pyarrow-16.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:42e56557bc7c5c10d3e42c3b32f6cff649a29d637e8f4e8b311d334cc4326730"}, - {file = "pyarrow-16.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2a7abdee4a4a7cfa239e2e8d721224c4b34ffe69a0ca7981354fe03c1328789b"}, - {file = "pyarrow-16.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:ef2f309b68396bcc5a354106741d333494d6a0d3e1951271849787109f0229a6"}, - {file = "pyarrow-16.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:ed66e5217b4526fa3585b5e39b0b82f501b88a10d36bd0d2a4d8aa7b5a48e2df"}, - {file = "pyarrow-16.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc8814310486f2a73c661ba8354540f17eef51e1b6dd090b93e3419d3a097b3a"}, - {file = "pyarrow-16.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c2f5e239db7ed43e0ad2baf46a6465f89c824cc703f38ef0fde927d8e0955f7"}, - {file = "pyarrow-16.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f293e92d1db251447cb028ae12f7bc47526e4649c3a9924c8376cab4ad6b98bd"}, - {file = "pyarrow-16.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:dd9334a07b6dc21afe0857aa31842365a62eca664e415a3f9536e3a8bb832c07"}, - {file = "pyarrow-16.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:d91073d1e2fef2c121154680e2ba7e35ecf8d4969cc0af1fa6f14a8675858159"}, - {file = "pyarrow-16.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:71d52561cd7aefd22cf52538f262850b0cc9e4ec50af2aaa601da3a16ef48877"}, - {file = "pyarrow-16.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b93c9a50b965ee0bf4fef65e53b758a7e8dcc0c2d86cebcc037aaaf1b306ecc0"}, - {file = "pyarrow-16.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d831690844706e374c455fba2fb8cfcb7b797bfe53ceda4b54334316e1ac4fa4"}, - {file = "pyarrow-16.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35692ce8ad0b8c666aa60f83950957096d92f2a9d8d7deda93fb835e6053307e"}, - {file = "pyarrow-16.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9dd3151d098e56f16a8389c1247137f9e4c22720b01c6f3aa6dec29a99b74d80"}, - {file = "pyarrow-16.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:bd40467bdb3cbaf2044ed7a6f7f251c8f941c8b31275aaaf88e746c4f3ca4a7a"}, - {file = "pyarrow-16.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:00a1dcb22ad4ceb8af87f7bd30cc3354788776c417f493089e0a0af981bc8d80"}, - {file = "pyarrow-16.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:fda9a7cebd1b1d46c97b511f60f73a5b766a6de4c5236f144f41a5d5afec1f35"}, - {file = "pyarrow-16.0.0.tar.gz", hash = "sha256:59bb1f1edbbf4114c72415f039f1359f1a57d166a331c3229788ccbfbb31689a"}, + {file = "pyarrow-16.1.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:17e23b9a65a70cc733d8b738baa6ad3722298fa0c81d88f63ff94bf25eaa77b9"}, + {file = "pyarrow-16.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4740cc41e2ba5d641071d0ab5e9ef9b5e6e8c7611351a5cb7c1d175eaf43674a"}, + {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98100e0268d04e0eec47b73f20b39c45b4006f3c4233719c3848aa27a03c1aef"}, + {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f68f409e7b283c085f2da014f9ef81e885d90dcd733bd648cfba3ef265961848"}, + {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:a8914cd176f448e09746037b0c6b3a9d7688cef451ec5735094055116857580c"}, + {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:48be160782c0556156d91adbdd5a4a7e719f8d407cb46ae3bb4eaee09b3111bd"}, + {file = "pyarrow-16.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9cf389d444b0f41d9fe1444b70650fea31e9d52cfcb5f818b7888b91b586efff"}, + {file = "pyarrow-16.1.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:d0ebea336b535b37eee9eee31761813086d33ed06de9ab6fc6aaa0bace7b250c"}, + {file = "pyarrow-16.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e73cfc4a99e796727919c5541c65bb88b973377501e39b9842ea71401ca6c1c"}, + {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf9251264247ecfe93e5f5a0cd43b8ae834f1e61d1abca22da55b20c788417f6"}, + {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddf5aace92d520d3d2a20031d8b0ec27b4395cab9f74e07cc95edf42a5cc0147"}, + {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:25233642583bf658f629eb230b9bb79d9af4d9f9229890b3c878699c82f7d11e"}, + {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a33a64576fddfbec0a44112eaf844c20853647ca833e9a647bfae0582b2ff94b"}, + {file = "pyarrow-16.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:185d121b50836379fe012753cf15c4ba9638bda9645183ab36246923875f8d1b"}, + {file = "pyarrow-16.1.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:2e51ca1d6ed7f2e9d5c3c83decf27b0d17bb207a7dea986e8dc3e24f80ff7d6f"}, + {file = "pyarrow-16.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06ebccb6f8cb7357de85f60d5da50e83507954af617d7b05f48af1621d331c9a"}, + {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b04707f1979815f5e49824ce52d1dceb46e2f12909a48a6a753fe7cafbc44a0c"}, + {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d32000693deff8dc5df444b032b5985a48592c0697cb6e3071a5d59888714e2"}, + {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:8785bb10d5d6fd5e15d718ee1d1f914fe768bf8b4d1e5e9bf253de8a26cb1628"}, + {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e1369af39587b794873b8a307cc6623a3b1194e69399af0efd05bb202195a5a7"}, + {file = "pyarrow-16.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:febde33305f1498f6df85e8020bca496d0e9ebf2093bab9e0f65e2b4ae2b3444"}, + {file = "pyarrow-16.1.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b5f5705ab977947a43ac83b52ade3b881eb6e95fcc02d76f501d549a210ba77f"}, + {file = "pyarrow-16.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0d27bf89dfc2576f6206e9cd6cf7a107c9c06dc13d53bbc25b0bd4556f19cf5f"}, + {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d07de3ee730647a600037bc1d7b7994067ed64d0eba797ac74b2bc77384f4c2"}, + {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbef391b63f708e103df99fbaa3acf9f671d77a183a07546ba2f2c297b361e83"}, + {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19741c4dbbbc986d38856ee7ddfdd6a00fc3b0fc2d928795b95410d38bb97d15"}, + {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f2c5fb249caa17b94e2b9278b36a05ce03d3180e6da0c4c3b3ce5b2788f30eed"}, + {file = "pyarrow-16.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:e6b6d3cd35fbb93b70ade1336022cc1147b95ec6af7d36906ca7fe432eb09710"}, + {file = "pyarrow-16.1.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:18da9b76a36a954665ccca8aa6bd9f46c1145f79c0bb8f4f244f5f8e799bca55"}, + {file = "pyarrow-16.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:99f7549779b6e434467d2aa43ab2b7224dd9e41bdde486020bae198978c9e05e"}, + {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f07fdffe4fd5b15f5ec15c8b64584868d063bc22b86b46c9695624ca3505b7b4"}, + {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddfe389a08ea374972bd4065d5f25d14e36b43ebc22fc75f7b951f24378bf0b5"}, + {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:3b20bd67c94b3a2ea0a749d2a5712fc845a69cb5d52e78e6449bbd295611f3aa"}, + {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ba8ac20693c0bb0bf4b238751d4409e62852004a8cf031c73b0e0962b03e45e3"}, + {file = "pyarrow-16.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:31a1851751433d89a986616015841977e0a188662fcffd1a5677453f1df2de0a"}, + {file = "pyarrow-16.1.0.tar.gz", hash = "sha256:15fbb22ea96d11f0b5768504a3f961edab25eaf4197c341720c4a387f6c60315"}, ] [package.dependencies] @@ -874,17 +878,17 @@ numpy = ">=1.16.6" [[package]] name = "pylint" -version = "3.1.0" +version = "3.2.5" description = "python code static checker" optional = false python-versions = ">=3.8.0" files = [ - {file = "pylint-3.1.0-py3-none-any.whl", hash = "sha256:507a5b60953874766d8a366e8e8c7af63e058b26345cfcb5f91f89d987fd6b74"}, - {file = "pylint-3.1.0.tar.gz", hash = "sha256:6a69beb4a6f63debebaab0a3477ecd0f559aa726af4954fc948c51f7a2549e23"}, + {file = "pylint-3.2.5-py3-none-any.whl", hash = "sha256:32cd6c042b5004b8e857d727708720c54a676d1e22917cf1a2df9b4d4868abd6"}, + {file = "pylint-3.2.5.tar.gz", hash = "sha256:e9b7171e242dcc6ebd0aaa7540481d1a72860748a0a7816b8fe6cf6c80a6fe7e"}, ] [package.dependencies] -astroid = ">=3.1.0,<=3.2.0-dev0" +astroid = ">=3.2.2,<=3.3.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, @@ -980,13 +984,13 @@ files = [ [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -1012,64 +1016,64 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.29" +version = "2.0.31" description = "Database Abstraction Library" optional = true python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c142852ae192e9fe5aad5c350ea6befe9db14370b34047e1f0f7cf99e63c63b"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:99a1e69d4e26f71e750e9ad6fdc8614fbddb67cfe2173a3628a2566034e223c7"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef3fbccb4058355053c51b82fd3501a6e13dd808c8d8cd2561e610c5456013c"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6753305936eddc8ed190e006b7bb33a8f50b9854823485eed3a886857ab8d1"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0f3ca96af060a5250a8ad5a63699180bc780c2edf8abf96c58af175921df847a"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4520047006b1d3f0d89e0532978c0688219857eb2fee7c48052560ae76aca1e"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-win32.whl", hash = "sha256:b2a0e3cf0caac2085ff172c3faacd1e00c376e6884b5bc4dd5b6b84623e29e4f"}, - {file = "SQLAlchemy-2.0.29-cp310-cp310-win_amd64.whl", hash = "sha256:01d10638a37460616708062a40c7b55f73e4d35eaa146781c683e0fa7f6c43fb"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:308ef9cb41d099099fffc9d35781638986870b29f744382904bf9c7dadd08513"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:296195df68326a48385e7a96e877bc19aa210e485fa381c5246bc0234c36c78e"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a13b917b4ffe5a0a31b83d051d60477819ddf18276852ea68037a144a506efb9"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f6d971255d9ddbd3189e2e79d743ff4845c07f0633adfd1de3f63d930dbe673"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61405ea2d563407d316c63a7b5271ae5d274a2a9fbcd01b0aa5503635699fa1e"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de7202ffe4d4a8c1e3cde1c03e01c1a3772c92858837e8f3879b497158e4cb44"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-win32.whl", hash = "sha256:b5d7ed79df55a731749ce65ec20d666d82b185fa4898430b17cb90c892741520"}, - {file = "SQLAlchemy-2.0.29-cp311-cp311-win_amd64.whl", hash = "sha256:205f5a2b39d7c380cbc3b5dcc8f2762fb5bcb716838e2d26ccbc54330775b003"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d96710d834a6fb31e21381c6d7b76ec729bd08c75a25a5184b1089141356171f"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52de4736404e53c5c6a91ef2698c01e52333988ebdc218f14c833237a0804f1b"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7b02525ede2a164c5fa5014915ba3591730f2cc831f5be9ff3b7fd3e30958e"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dfefdb3e54cd15f5d56fd5ae32f1da2d95d78319c1f6dfb9bcd0eb15d603d5d"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a88913000da9205b13f6f195f0813b6ffd8a0c0c2bd58d499e00a30eb508870c"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fecd5089c4be1bcc37c35e9aa678938d2888845a134dd016de457b942cf5a758"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-win32.whl", hash = "sha256:8197d6f7a3d2b468861ebb4c9f998b9df9e358d6e1cf9c2a01061cb9b6cf4e41"}, - {file = "SQLAlchemy-2.0.29-cp312-cp312-win_amd64.whl", hash = "sha256:9b19836ccca0d321e237560e475fd99c3d8655d03da80c845c4da20dda31b6e1"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87a1d53a5382cdbbf4b7619f107cc862c1b0a4feb29000922db72e5a66a5ffc0"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0732dffe32333211801b28339d2a0babc1971bc90a983e3035e7b0d6f06b93"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90453597a753322d6aa770c5935887ab1fc49cc4c4fdd436901308383d698b4b"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ea311d4ee9a8fa67f139c088ae9f905fcf0277d6cd75c310a21a88bf85e130f5"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5f20cb0a63a3e0ec4e169aa8890e32b949c8145983afa13a708bc4b0a1f30e03"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-win32.whl", hash = "sha256:e5bbe55e8552019c6463709b39634a5fc55e080d0827e2a3a11e18eb73f5cdbd"}, - {file = "SQLAlchemy-2.0.29-cp37-cp37m-win_amd64.whl", hash = "sha256:c2f9c762a2735600654c654bf48dad388b888f8ce387b095806480e6e4ff6907"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e614d7a25a43a9f54fcce4675c12761b248547f3d41b195e8010ca7297c369c"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471fcb39c6adf37f820350c28aac4a7df9d3940c6548b624a642852e727ea586"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:988569c8732f54ad3234cf9c561364221a9e943b78dc7a4aaf35ccc2265f1930"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dddaae9b81c88083e6437de95c41e86823d150f4ee94bf24e158a4526cbead01"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:334184d1ab8f4c87f9652b048af3f7abea1c809dfe526fb0435348a6fef3d380"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38b624e5cf02a69b113c8047cf7f66b5dfe4a2ca07ff8b8716da4f1b3ae81567"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-win32.whl", hash = "sha256:bab41acf151cd68bc2b466deae5deeb9e8ae9c50ad113444151ad965d5bf685b"}, - {file = "SQLAlchemy-2.0.29-cp38-cp38-win_amd64.whl", hash = "sha256:52c8011088305476691b8750c60e03b87910a123cfd9ad48576d6414b6ec2a1d"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3071ad498896907a5ef756206b9dc750f8e57352113c19272bdfdc429c7bd7de"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dba622396a3170974f81bad49aacebd243455ec3cc70615aeaef9e9613b5bca5"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b184e3de58009cc0bf32e20f137f1ec75a32470f5fede06c58f6c355ed42a72"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c37f1050feb91f3d6c32f864d8e114ff5545a4a7afe56778d76a9aec62638ba"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bda7ce59b06d0f09afe22c56714c65c957b1068dee3d5e74d743edec7daba552"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25664e18bef6dc45015b08f99c63952a53a0a61f61f2e48a9e70cec27e55f699"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-win32.whl", hash = "sha256:77d29cb6c34b14af8a484e831ab530c0f7188f8efed1c6a833a2c674bf3c26ec"}, - {file = "SQLAlchemy-2.0.29-cp39-cp39-win_amd64.whl", hash = "sha256:04c487305ab035a9548f573763915189fc0fe0824d9ba28433196f8436f1449c"}, - {file = "SQLAlchemy-2.0.29-py3-none-any.whl", hash = "sha256:dc4ee2d4ee43251905f88637d5281a8d52e916a021384ec10758826f5cbae305"}, - {file = "SQLAlchemy-2.0.29.tar.gz", hash = "sha256:bd9566b8e58cabd700bc367b60e90d9349cd16f0984973f98a9a09f9c64e86f0"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2a213c1b699d3f5768a7272de720387ae0122f1becf0901ed6eaa1abd1baf6c"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9fea3d0884e82d1e33226935dac990b967bef21315cbcc894605db3441347443"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ad7f221d8a69d32d197e5968d798217a4feebe30144986af71ada8c548e9fa"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2bee229715b6366f86a95d497c347c22ddffa2c7c96143b59a2aa5cc9eebbc"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cd5b94d4819c0c89280b7c6109c7b788a576084bf0a480ae17c227b0bc41e109"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:750900a471d39a7eeba57580b11983030517a1f512c2cb287d5ad0fcf3aebd58"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-win32.whl", hash = "sha256:7bd112be780928c7f493c1a192cd8c5fc2a2a7b52b790bc5a84203fb4381c6be"}, + {file = "SQLAlchemy-2.0.31-cp310-cp310-win_amd64.whl", hash = "sha256:5a48ac4d359f058474fadc2115f78a5cdac9988d4f99eae44917f36aa1476327"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f68470edd70c3ac3b6cd5c2a22a8daf18415203ca1b036aaeb9b0fb6f54e8298"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e2c38c2a4c5c634fe6c3c58a789712719fa1bf9b9d6ff5ebfce9a9e5b89c1ca"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd15026f77420eb2b324dcb93551ad9c5f22fab2c150c286ef1dc1160f110203"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2196208432deebdfe3b22185d46b08f00ac9d7b01284e168c212919891289396"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:352b2770097f41bff6029b280c0e03b217c2dcaddc40726f8f53ed58d8a85da4"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:56d51ae825d20d604583f82c9527d285e9e6d14f9a5516463d9705dab20c3740"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-win32.whl", hash = "sha256:6e2622844551945db81c26a02f27d94145b561f9d4b0c39ce7bfd2fda5776dac"}, + {file = "SQLAlchemy-2.0.31-cp311-cp311-win_amd64.whl", hash = "sha256:ccaf1b0c90435b6e430f5dd30a5aede4764942a695552eb3a4ab74ed63c5b8d3"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3b74570d99126992d4b0f91fb87c586a574a5872651185de8297c6f90055ae42"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f77c4f042ad493cb8595e2f503c7a4fe44cd7bd59c7582fd6d78d7e7b8ec52c"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd1591329333daf94467e699e11015d9c944f44c94d2091f4ac493ced0119449"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74afabeeff415e35525bf7a4ecdab015f00e06456166a2eba7590e49f8db940e"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b9c01990d9015df2c6f818aa8f4297d42ee71c9502026bb074e713d496e26b67"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66f63278db425838b3c2b1c596654b31939427016ba030e951b292e32b99553e"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-win32.whl", hash = "sha256:0b0f658414ee4e4b8cbcd4a9bb0fd743c5eeb81fc858ca517217a8013d282c96"}, + {file = "SQLAlchemy-2.0.31-cp312-cp312-win_amd64.whl", hash = "sha256:fa4b1af3e619b5b0b435e333f3967612db06351217c58bfb50cee5f003db2a5a"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f43e93057cf52a227eda401251c72b6fbe4756f35fa6bfebb5d73b86881e59b0"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d337bf94052856d1b330d5fcad44582a30c532a2463776e1651bd3294ee7e58b"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c06fb43a51ccdff3b4006aafee9fcf15f63f23c580675f7734245ceb6b6a9e05"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:b6e22630e89f0e8c12332b2b4c282cb01cf4da0d26795b7eae16702a608e7ca1"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:79a40771363c5e9f3a77f0e28b3302801db08040928146e6808b5b7a40749c88"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-win32.whl", hash = "sha256:501ff052229cb79dd4c49c402f6cb03b5a40ae4771efc8bb2bfac9f6c3d3508f"}, + {file = "SQLAlchemy-2.0.31-cp37-cp37m-win_amd64.whl", hash = "sha256:597fec37c382a5442ffd471f66ce12d07d91b281fd474289356b1a0041bdf31d"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dc6d69f8829712a4fd799d2ac8d79bdeff651c2301b081fd5d3fe697bd5b4ab9"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23b9fbb2f5dd9e630db70fbe47d963c7779e9c81830869bd7d137c2dc1ad05fb"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a21c97efcbb9f255d5c12a96ae14da873233597dfd00a3a0c4ce5b3e5e79704"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26a6a9837589c42b16693cf7bf836f5d42218f44d198f9343dd71d3164ceeeac"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc251477eae03c20fae8db9c1c23ea2ebc47331bcd73927cdcaecd02af98d3c3"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2fd17e3bb8058359fa61248c52c7b09a97cf3c820e54207a50af529876451808"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-win32.whl", hash = "sha256:c76c81c52e1e08f12f4b6a07af2b96b9b15ea67ccdd40ae17019f1c373faa227"}, + {file = "SQLAlchemy-2.0.31-cp38-cp38-win_amd64.whl", hash = "sha256:4b600e9a212ed59355813becbcf282cfda5c93678e15c25a0ef896b354423238"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b6cf796d9fcc9b37011d3f9936189b3c8074a02a4ed0c0fbbc126772c31a6d4"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:78fe11dbe37d92667c2c6e74379f75746dc947ee505555a0197cfba9a6d4f1a4"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc47dc6185a83c8100b37acda27658fe4dbd33b7d5e7324111f6521008ab4fe"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a41514c1a779e2aa9a19f67aaadeb5cbddf0b2b508843fcd7bafdf4c6864005"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:afb6dde6c11ea4525318e279cd93c8734b795ac8bb5dda0eedd9ebaca7fa23f1"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3f9faef422cfbb8fd53716cd14ba95e2ef655400235c3dfad1b5f467ba179c8c"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-win32.whl", hash = "sha256:fc6b14e8602f59c6ba893980bea96571dd0ed83d8ebb9c4479d9ed5425d562e9"}, + {file = "SQLAlchemy-2.0.31-cp39-cp39-win_amd64.whl", hash = "sha256:3cb8a66b167b033ec72c3812ffc8441d4e9f5f78f5e31e54dcd4c90a4ca5bebc"}, + {file = "SQLAlchemy-2.0.31-py3-none-any.whl", hash = "sha256:69f3e3c08867a8e4856e92d7afb618b95cdee18e0bc1647b77599722c9a28911"}, + {file = "SQLAlchemy-2.0.31.tar.gz", hash = "sha256:b607489dd4a54de56984a0c7656247504bd5523d9d0ba799aef59d4add009484"}, ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} typing-extensions = ">=4.6.0" [package.extras] @@ -1128,24 +1132,24 @@ files = [ [[package]] name = "tomlkit" -version = "0.12.4" +version = "0.12.5" description = "Style preserving TOML library" optional = false python-versions = ">=3.7" files = [ - {file = "tomlkit-0.12.4-py3-none-any.whl", hash = "sha256:5cd82d48a3dd89dee1f9d64420aa20ae65cfbd00668d6f094d7578a78efbb77b"}, - {file = "tomlkit-0.12.4.tar.gz", hash = "sha256:7ca1cfc12232806517a8515047ba66a19369e71edf2439d0f5824f91032b6cc3"}, + {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"}, + {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"}, ] [[package]] name = "typing-extensions" -version = "4.11.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] @@ -1161,13 +1165,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.1" +version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] @@ -1178,18 +1182,18 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "zipp" -version = "3.18.1" +version = "3.19.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = true python-versions = ">=3.8" files = [ - {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, - {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [extras] alembic = ["alembic", "sqlalchemy"] @@ -1198,4 +1202,4 @@ sqlalchemy = ["sqlalchemy"] [metadata] lock-version = "2.0" python-versions = "^3.8.0" -content-hash = "d9fa00df9e0a90e8d44ee6bb88d291acd2f48c9c96d2a979fb06ad6e6de66529" +content-hash = "f99292898ce01a04566c5deaea9b46646881bf816d8d755e89446c2e8b8a616e" diff --git a/pyproject.toml b/pyproject.toml index 4320d4eb..e1476c3f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,8 +20,8 @@ lz4 = "^4.0.2" requests = "^2.18.1" oauthlib = "^3.1.0" numpy = [ - { version = ">=1.16.6", python = ">=3.8,<3.11" }, - { version = ">=1.23.4", python = ">=3.11" }, + { version = "^1.16.6", python = ">=3.8,<3.11" }, + { version = "^1.23.4", python = ">=3.11" }, ] sqlalchemy = { version = ">=2.0.21", optional = true } openpyxl = "^3.0.10" @@ -34,7 +34,7 @@ alembic = ["sqlalchemy", "alembic"] [tool.poetry.dev-dependencies] pytest = "^7.1.2" -mypy = "^0.981" +mypy = "^1.10.1" pylint = ">=2.12.0" black = "^22.3.0" pytest-dotenv = "^0.5.2" diff --git a/src/databricks/sql/auth/auth.py b/src/databricks/sql/auth/auth.py old mode 100644 new mode 100755 index fcb2219e..40376b90 --- a/src/databricks/sql/auth/auth.py +++ b/src/databricks/sql/auth/auth.py @@ -1,5 +1,5 @@ from enum import Enum -from typing import List +from typing import Optional, List from databricks.sql.auth.authenticators import ( AuthProvider, @@ -21,15 +21,15 @@ class ClientContext: def __init__( self, hostname: str, - username: str = None, - password: str = None, - access_token: str = None, - auth_type: str = None, - oauth_scopes: List[str] = None, - oauth_client_id: str = None, - oauth_redirect_port_range: List[int] = None, - use_cert_as_auth: str = None, - tls_client_cert_file: str = None, + username: Optional[str] = None, + password: Optional[str] = None, + access_token: Optional[str] = None, + auth_type: Optional[str] = None, + oauth_scopes: Optional[List[str]] = None, + oauth_client_id: Optional[str] = None, + oauth_redirect_port_range: Optional[List[int]] = None, + use_cert_as_auth: Optional[str] = None, + tls_client_cert_file: Optional[str] = None, oauth_persistence=None, credentials_provider=None, ): diff --git a/src/databricks/sql/auth/retry.py b/src/databricks/sql/auth/retry.py old mode 100644 new mode 100755 index eee6f839..d2f774ed --- a/src/databricks/sql/auth/retry.py +++ b/src/databricks/sql/auth/retry.py @@ -7,7 +7,7 @@ # We only use this import for type hinting try: # If urllib3~=2.0 is installed - from urllib3 import BaseHTTPResponse # type: ignore + from urllib3 import BaseHTTPResponse except ImportError: # If urllib3~=1.0 is installed from urllib3 import HTTPResponse as BaseHTTPResponse @@ -129,7 +129,7 @@ def __init__( urllib3_kwargs.update(**_urllib_kwargs_we_care_about) super().__init__( - **urllib3_kwargs, # type: ignore + **urllib3_kwargs, ) @classmethod @@ -162,7 +162,9 @@ def __private_init__( new_object.command_type = command_type return new_object - def new(self, **urllib3_incremented_counters: typing.Any) -> Retry: + def new( + self, **urllib3_incremented_counters: typing.Any + ) -> "DatabricksRetryPolicy": """This method is responsible for passing the entire Retry state to its next iteration. urllib3 calls Retry.new() between successive requests as part of its `.increment()` method @@ -210,7 +212,7 @@ def new(self, **urllib3_incremented_counters: typing.Any) -> Retry: other=self.other, allowed_methods=self.allowed_methods, status_forcelist=self.status_forcelist, - backoff_factor=self.backoff_factor, # type: ignore + backoff_factor=self.backoff_factor, raise_on_redirect=self.raise_on_redirect, raise_on_status=self.raise_on_status, history=self.history, @@ -222,7 +224,7 @@ def new(self, **urllib3_incremented_counters: typing.Any) -> Retry: urllib3_init_params.update(**urllib3_incremented_counters) # Include urllib3's current state in our __init__ params - databricks_init_params["urllib3_kwargs"].update(**urllib3_init_params) # type: ignore + databricks_init_params["urllib3_kwargs"].update(**urllib3_init_params) # type: ignore[attr-defined] return type(self).__private_init__( retry_start_time=self._retry_start_time, @@ -274,7 +276,7 @@ def check_proposed_wait(self, proposed_wait: Union[int, float]) -> None: f"Retry request would exceed Retry policy max retry duration of {self.stop_after_attempts_duration} seconds" ) - def sleep_for_retry(self, response: BaseHTTPResponse) -> bool: # type: ignore + def sleep_for_retry(self, response: BaseHTTPResponse) -> bool: """Sleeps for the duration specified in the response Retry-After header, if present A MaxRetryDurationError will be raised if doing so would exceed self.max_attempts_duration @@ -349,7 +351,7 @@ def should_retry(self, method: str, status_code: int) -> Tuple[bool, str]: raise NonRecoverableNetworkError("Received code 501 from server.") # Request failed and this method is not retryable. We only retry POST requests. - if not self._is_method_retryable(method): # type: ignore + if not self._is_method_retryable(method): return False, "Only POST requests are retried" # Request failed with 404 and was a GetOperationStatus. This is not recoverable. Don't retry. diff --git a/src/databricks/sql/client.py b/src/databricks/sql/client.py old mode 100644 new mode 100755 index e9e2978f..c5962147 --- a/src/databricks/sql/client.py +++ b/src/databricks/sql/client.py @@ -59,7 +59,7 @@ def __init__( http_path: str, access_token: Optional[str] = None, http_headers: Optional[List[Tuple[str, str]]] = None, - session_configuration: Dict[str, Any] = None, + session_configuration: Optional[Dict[str, Any]] = None, catalog: Optional[str] = None, schema: Optional[str] = None, _use_arrow_native_complex_types: Optional[bool] = True, @@ -460,9 +460,9 @@ def _normalize_tparametersequence( output: List[TDbsqlParameter] = [] for p in params: if isinstance(p, DbsqlParameterBase): - output.append(p) # type: ignore + output.append(p) else: - output.append(dbsql_parameter_from_primitive(value=p)) # type: ignore + output.append(dbsql_parameter_from_primitive(value=p)) return output @@ -640,7 +640,7 @@ def _handle_staging_operation( ) def _handle_staging_put( - self, presigned_url: str, local_file: str, headers: dict = None + self, presigned_url: str, local_file: str, headers: Optional[dict] = None ): """Make an HTTP PUT request @@ -655,7 +655,7 @@ def _handle_staging_put( # fmt: off # Design borrowed from: https://stackoverflow.com/a/2342589/5093960 - + OK = requests.codes.ok # 200 CREATED = requests.codes.created # 201 ACCEPTED = requests.codes.accepted # 202 @@ -675,7 +675,7 @@ def _handle_staging_put( ) def _handle_staging_get( - self, local_file: str, presigned_url: str, headers: dict = None + self, local_file: str, presigned_url: str, headers: Optional[dict] = None ): """Make an HTTP GET request, create a local file with the received data @@ -697,7 +697,9 @@ def _handle_staging_get( with open(local_file, "wb") as fp: fp.write(r.content) - def _handle_staging_remove(self, presigned_url: str, headers: dict = None): + def _handle_staging_remove( + self, presigned_url: str, headers: Optional[dict] = None + ): """Make an HTTP DELETE request to the presigned_url""" r = requests.delete(url=presigned_url, headers=headers) @@ -757,7 +759,7 @@ def execute( normalized_parameters = self._normalize_tparametercollection(parameters) param_structure = self._determine_parameter_structure(normalized_parameters) transformed_operation = transform_paramstyle( - operation, normalized_parameters, param_structure # type: ignore + operation, normalized_parameters, param_structure ) prepared_operation, prepared_params = self._prepare_native_parameters( transformed_operation, normalized_parameters, param_structure @@ -861,7 +863,7 @@ def tables( catalog_name: Optional[str] = None, schema_name: Optional[str] = None, table_name: Optional[str] = None, - table_types: List[str] = None, + table_types: Optional[List[str]] = None, ) -> "Cursor": """ Get tables corresponding to the catalog_name, schema_name and table_name. diff --git a/src/databricks/sql/thrift_backend.py b/src/databricks/sql/thrift_backend.py index e7b6dfd1..79293e85 100644 --- a/src/databricks/sql/thrift_backend.py +++ b/src/databricks/sql/thrift_backend.py @@ -437,11 +437,11 @@ def attempt_request(attempt): # log.info for errors we believe are not unusual or unexpected. log.warn for # for others like EEXIST, EBADF, ERANGE which are not expected in this context. # - # I manually tested this retry behaviour using mitmweb and confirmed that + # I manually tested this retry behaviour using mitmweb and confirmed that # GetOperationStatus requests are retried when I forced network connection # interruptions / timeouts / reconnects. See #24 for more info. # | Debian | Darwin | - info_errs = [ # |--------|--------| + info_errs = [ # |--------|--------| errno.ESHUTDOWN, # | 32 | 32 | errno.EAFNOSUPPORT, # | 97 | 47 | errno.ECONNRESET, # | 104 | 54 | diff --git a/src/databricks/sql/utils.py b/src/databricks/sql/utils.py index 6063eca1..950b2898 100644 --- a/src/databricks/sql/utils.py +++ b/src/databricks/sql/utils.py @@ -48,7 +48,7 @@ def build_queue( arrow_schema_bytes: bytes, max_download_threads: int, lz4_compressed: bool = True, - description: List[List[Any]] = None, + description: Optional[List[List[Any]]] = None, ) -> ResultSetQueue: """ Factory method to build a result set queue. @@ -136,7 +136,7 @@ def __init__( start_row_offset: int = 0, result_links: Optional[List[TSparkArrowResultLink]] = None, lz4_compressed: bool = True, - description: List[List[Any]] = None, + description: Optional[List[List[Any]]] = None, ): """ A queue-like wrapper over CloudFetch arrow batches. diff --git a/src/databricks/sqlalchemy/README.sqlalchemy.md b/src/databricks/sqlalchemy/README.sqlalchemy.md index 976c3455..8aa51973 100644 --- a/src/databricks/sqlalchemy/README.sqlalchemy.md +++ b/src/databricks/sqlalchemy/README.sqlalchemy.md @@ -181,7 +181,7 @@ with engine.connect() as conn: df.to_sql('squares',conn) ``` -## [`PrimaryKey()`](https://docs.sqlalchemy.org/en/20/core/constraints.html#sqlalchemy.schema.PrimaryKeyConstraint) and [`ForeignKey()`](https://docs.sqlalchemy.org/en/20/core/constraints.html#defining-foreign-keys) +## [`PrimaryKey()`](https://docs.sqlalchemy.org/en/20/core/constraints.html#sqlalchemy.schema.PrimaryKeyConstraint) and [`ForeignKey()`](https://docs.sqlalchemy.org/en/20/core/constraints.html#defining-foreign-keys) Unity Catalog workspaces in Databricks support PRIMARY KEY and FOREIGN KEY constraints. _Note that Databricks Runtime does not enforce the integrity of FOREIGN KEY constraints_. You can establish a primary key by setting `primary_key=True` when defining a column. @@ -201,4 +201,3 @@ users = Table( Column("manager_id", ForeignKey("users.id", name="fk_users_manager_id_x_users_id", use_alter=True)) ) ``` - diff --git a/src/databricks/sqlalchemy/README.tests.md b/src/databricks/sqlalchemy/README.tests.md index d005daa3..3ed92aba 100644 --- a/src/databricks/sqlalchemy/README.tests.md +++ b/src/databricks/sqlalchemy/README.tests.md @@ -5,7 +5,7 @@ The contents of the `test/` directory follow the SQLAlchemy developers' [guidanc - Any test can be skipped by subclassing its parent class, re-declaring the test-case and adding a `pytest.mark.skip` directive. - Any test that is decorated with a `@requires` decorator can be skipped by marking the indicated requirement as `.closed()` in `requirements.py` -We prefer to skip test cases directly with the first method wherever possible. We only mark requirements as `closed()` if there is no easier option to avoid a test failure. This principally occurs in test cases where the same test in the suite is parametrized, and some parameter combinations are conditionally skipped depending on `requirements.py`. If we skip the entire test method, then we skip _all_ permutations, not just the combinations we don't support. +We prefer to skip test cases directly with the first method wherever possible. We only mark requirements as `closed()` if there is no easier option to avoid a test failure. This principally occurs in test cases where the same test in the suite is parametrized, and some parameter combinations are conditionally skipped depending on `requirements.py`. If we skip the entire test method, then we skip _all_ permutations, not just the combinations we don't support. ## Regression, Unsupported, and Future test cases @@ -25,7 +25,7 @@ We maintain `_extra.py` with test cases that depend on SQLAlchemy's reusable dia poetry shell cd src/databricks/sqlalchemy/test python -m pytest test_suite.py --dburi \ - "databricks://token:$access_token@$host?http_path=$http_path&catalog=$catalog&schema=$schema" + "databricks://token:$access_token@$host?http_path=$http_path&catalog=$catalog&schema=$schema" ``` Whatever schema you pass in the `dburi` argument should be empty. Some tests also require the presence of an empty schema named `test_schema`. Note that we plan to implement our own `provision.py` which SQLAlchemy can automatically use to create an empty schema for testing. But for now this is a manual process. @@ -41,4 +41,4 @@ Note that because these tests depend on SQLAlchemy's custom pytest plugin, they Apart from the SQLAlchemy reusable suite, we maintain our own unit and e2e tests under the `test_local/` directory. These can be invoked from a VSCode or Pycharm since they don't depend on a custom pytest plugin. Due to pytest's lookup order, the `pytest.ini` which is required for running the reusable dialect tests, also conflicts with VSCode and Pycharm's default pytest implementation and overrides the settings in `pyproject.toml`. So to run these tests, you can delete or rename `pytest.ini`. -[guidance]: "https://github.com/sqlalchemy/sqlalchemy/blob/rel_2_0_22/README.dialects.rst" \ No newline at end of file +[guidance]: "https://github.com/sqlalchemy/sqlalchemy/blob/rel_2_0_22/README.dialects.rst" diff --git a/src/databricks/sqlalchemy/requirements.py b/src/databricks/sqlalchemy/requirements.py index 75227efb..5c70c029 100644 --- a/src/databricks/sqlalchemy/requirements.py +++ b/src/databricks/sqlalchemy/requirements.py @@ -4,7 +4,7 @@ https://github.com/sqlalchemy/sqlalchemy/blob/main/lib/sqlalchemy/testing/requirements.py When SQLAlchemy skips a test because a requirement is closed() it gives a generic skip message. -To make these failures more actionable, we only define requirements in this file that we wish to +To make these failures more actionable, we only define requirements in this file that we wish to force to be open(). If a test should be skipped on Databricks, it will be specifically marked skip in test_suite.py with a Databricks-specific reason. diff --git a/src/databricks/sqlalchemy/test/overrides/_componentreflectiontest.py b/src/databricks/sqlalchemy/test/overrides/_componentreflectiontest.py index e06af2d0..a1f58fa6 100644 --- a/src/databricks/sqlalchemy/test/overrides/_componentreflectiontest.py +++ b/src/databricks/sqlalchemy/test/overrides/_componentreflectiontest.py @@ -1,7 +1,7 @@ """The default test setup uses self-referential foreign keys and indexes for a test table. We override to remove these assumptions. -Note that test_multi_foreign_keys currently does not pass for all combinations due to +Note that test_multi_foreign_keys currently does not pass for all combinations due to an ordering issue. The dialect returns the expected information. But this test makes assertions on the order of the returned results. We can't guarantee that order at the moment. diff --git a/src/databricks/sqlalchemy/test_local/__init__.py b/src/databricks/sqlalchemy/test_local/__init__.py index d6cebbfe..eca1cf55 100644 --- a/src/databricks/sqlalchemy/test_local/__init__.py +++ b/src/databricks/sqlalchemy/test_local/__init__.py @@ -1,5 +1,5 @@ """ -This module contains tests entirely maintained by Databricks. +This module contains tests entirely maintained by Databricks. These tests do not rely on SQLAlchemy's custom test runner. """ diff --git a/test.env.example b/test.env.example index 3109f601..f99abc9d 100644 --- a/test.env.example +++ b/test.env.example @@ -1,4 +1,4 @@ -# Authentication details for running e2e tests +# Authentication details for running e2e tests DATABRICKS_SERVER_HOSTNAME= DATABRICKS_HTTP_PATH= DATABRICKS_TOKEN= @@ -8,4 +8,4 @@ DATABRICKS_USER= # Only required to run SQLAlchemy tests DATABRICKS_CATALOG= -DATABRICKS_SCHEMA= \ No newline at end of file +DATABRICKS_SCHEMA= diff --git a/tests/e2e/common/large_queries_mixin.py b/tests/e2e/common/large_queries_mixin.py index 8ec32fd4..9ebc3f01 100644 --- a/tests/e2e/common/large_queries_mixin.py +++ b/tests/e2e/common/large_queries_mixin.py @@ -95,7 +95,7 @@ def test_long_running_query(self): """SELECT count(*) FROM RANGE({scale}) x JOIN RANGE({scale0}) y - ON from_unixtime(x.id * y.id, "yyyy-MM-dd") LIKE "%not%a%date%" + ON from_unixtime(x.id * y.id, "yyyy-MM-dd") LIKE "%not%a%date%" """.format( scale=scale_factor * scale0, scale0=scale0 ) diff --git a/tests/e2e/common/predicates.py b/tests/e2e/common/predicates.py index 3450087f..88b14961 100644 --- a/tests/e2e/common/predicates.py +++ b/tests/e2e/common/predicates.py @@ -29,10 +29,10 @@ def test_some_pyhive_v1_stuff(): def is_endpoint_test(cli_args=None): - + # Currently only supporting tests against DBSQL Endpoints # So we don't read `is_endpoint_test` from the CLI args - return True + return True def compare_dbr_versions(cli_args, compare, major_version, minor_version): diff --git a/tests/e2e/common/retry_test_mixins.py b/tests/e2e/common/retry_test_mixins.py old mode 100644 new mode 100755 index 9a49870e..3efb83d2 --- a/tests/e2e/common/retry_test_mixins.py +++ b/tests/e2e/common/retry_test_mixins.py @@ -1,6 +1,6 @@ from contextlib import contextmanager import time -from typing import List +from typing import Optional, List from unittest.mock import MagicMock, PropertyMock, patch import pytest @@ -59,7 +59,7 @@ def _test_retry_disabled_with_message(self, error_msg_substring, exception_type) @contextmanager -def mocked_server_response(status: int = 200, headers: dict = {}, redirect_location: str = None): +def mocked_server_response(status: int = 200, headers: dict = {}, redirect_location: Optional[str] = None): """Context manager for patching urllib3 responses""" # When mocking mocking a BaseHTTPResponse for urllib3 the mock must include @@ -420,4 +420,4 @@ def test_403_not_retried(self): with pytest.raises(RequestError) as cm: with self.connection(extra_params=self._retry_policy) as conn: pass - assert isinstance(cm.value.args[1], NonRecoverableNetworkError) \ No newline at end of file + assert isinstance(cm.value.args[1], NonRecoverableNetworkError) diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 72f8e7d5..43574160 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -67,7 +67,7 @@ def apply_property_to_mock(self, mock_obj, **kwargs): prop = PropertyMock(**kwargs) setattr(type(mock_obj), key, prop) - + @@ -235,7 +235,7 @@ def test_closing_result_set_hard_closes_commands(self): @patch("%s.client.ResultSet" % PACKAGE_NAME) def test_executing_multiple_commands_uses_the_most_recent_command(self, mock_result_set_class): - + mock_result_sets = [Mock(), Mock()] mock_result_set_class.side_effect = mock_result_sets @@ -565,7 +565,7 @@ def test_column_name_api(self): @patch("%s.client.ThriftBackend" % PACKAGE_NAME) def test_finalizer_closes_abandoned_connection(self, mock_client_class): instance = mock_client_class.return_value - + mock_open_session_resp = MagicMock(spec=TOpenSessionResp)() mock_open_session_resp.sessionHandle.sessionId = b'\x22' instance.open_session.return_value = mock_open_session_resp @@ -602,11 +602,11 @@ def test_cursor_keeps_connection_alive(self, mock_client_class): def test_staging_operation_response_is_handled(self, mock_client_class, mock_handle_staging_operation, mock_execute_response): # If server sets ExecuteResponse.is_staging_operation True then _handle_staging_operation should be called - + ThriftBackendMockFactory.apply_property_to_mock(mock_execute_response, is_staging_operation=True) mock_client_class.execute_command.return_value = mock_execute_response mock_client_class.return_value = mock_client_class - + connection = databricks.sql.connect(**self.DUMMY_CONNECTION_ARGS) cursor = connection.cursor() cursor.execute("Text of some staging operation command;") diff --git a/tests/unit/test_param_escaper.py b/tests/unit/test_param_escaper.py index 5e695ae9..472a0843 100644 --- a/tests/unit/test_param_escaper.py +++ b/tests/unit/test_param_escaper.py @@ -28,7 +28,7 @@ def test_escape_string_normal(self): assert pe.escape_string("golly bob howdy") == "'golly bob howdy'" def test_escape_string_that_includes_special_characters(self): - """Tests for how special characters are treated. + r"""Tests for how special characters are treated. When passed a string, the `escape_string` method wraps it in single quotes and escapes any special characters with a back stroke (\) @@ -219,7 +219,7 @@ class TestInlineToNativeTransformer(object): def test_transformer( self, label: str, query: str, params: Dict[str, Any], expected: str ): - + _params = [dbsql_parameter_from_primitive(value=value, name=name) for name, value in params.items()] output = transform_paramstyle(query, _params, param_structure=ParameterStructure.NAMED) assert output == expected From 5d869d01f920e657cee034706ad60def1b97ad8e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tor=20H=C3=B8dneb=C3=B8?= Date: Wed, 3 Jul 2024 12:09:58 +0200 Subject: [PATCH 3/4] Do not retry failing requests with status code 401 (#408) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Raises NonRecoverableNetworkError when request results in 401 status code Signed-off-by: Tor Hødnebø Signed-off-by: Tor Hødnebø --- CHANGELOG.md | 4 ++++ src/databricks/sql/auth/retry.py | 8 +++++++- tests/e2e/common/retry_test_mixins.py | 13 +++++++++++++ 3 files changed, 24 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 487f0c32..03e8b836 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,9 @@ # Release History +# x.x.x (TBD) + +- Don't retry requests that fail with code 401 + # 3.2.0 (2024-06-06) - Update proxy authentication (databricks/databricks-sql-python#354 by @amir-haroun) diff --git a/src/databricks/sql/auth/retry.py b/src/databricks/sql/auth/retry.py index d2f774ed..0c6547cb 100755 --- a/src/databricks/sql/auth/retry.py +++ b/src/databricks/sql/auth/retry.py @@ -327,7 +327,8 @@ def should_retry(self, method: str, status_code: int) -> Tuple[bool, str]: default, this means ExecuteStatement is only retried for codes 429 and 503. This limit prevents automatically retrying non-idempotent commands that could be destructive. - 5. The request received a 403 response, because this can never succeed. + 5. The request received a 401 response, because this can never succeed. + 6. The request received a 403 response, because this can never succeed. Q: What about OSErrors and Redirects? @@ -341,6 +342,11 @@ def should_retry(self, method: str, status_code: int) -> Tuple[bool, str]: if status_code == 200: return False, "200 codes are not retried" + if status_code == 401: + raise NonRecoverableNetworkError( + "Received 401 - UNAUTHORIZED. Confirm your authentication credentials." + ) + if status_code == 403: raise NonRecoverableNetworkError( "Received 403 - FORBIDDEN. Confirm your authentication credentials." diff --git a/tests/e2e/common/retry_test_mixins.py b/tests/e2e/common/retry_test_mixins.py index 3efb83d2..106a8fb5 100755 --- a/tests/e2e/common/retry_test_mixins.py +++ b/tests/e2e/common/retry_test_mixins.py @@ -421,3 +421,16 @@ def test_403_not_retried(self): with self.connection(extra_params=self._retry_policy) as conn: pass assert isinstance(cm.value.args[1], NonRecoverableNetworkError) + + def test_401_not_retried(self): + """GIVEN the server returns a code 401 + WHEN the connector receives this response + THEN nothing is retried and an exception is raised + """ + + # Code 401 is an Unauthorized error + with mocked_server_response(status=401): + with pytest.raises(RequestError) as cm: + with self.connection(extra_params=self._retry_policy): + pass + assert isinstance(cm.value.args[1], NonRecoverableNetworkError) From 9f9e96d352d95c5586c8358de3835ec37f002e91 Mon Sep 17 00:00:00 2001 From: Jacky Hu Date: Thu, 4 Jul 2024 16:21:23 -0700 Subject: [PATCH 4/4] [PECO-1715] Remove username/password (BasicAuth) auth option (#409) Signed-off-by: Jacky Hu --- src/databricks/sql/auth/auth.py | 17 ++++------ src/databricks/sql/auth/authenticators.py | 15 --------- src/databricks/sql/client.py | 4 +-- tests/unit/test_auth.py | 40 +++++------------------ tests/unit/test_client.py | 8 ----- tests/unit/test_oauth_persistence.py | 2 -- 6 files changed, 17 insertions(+), 69 deletions(-) diff --git a/src/databricks/sql/auth/auth.py b/src/databricks/sql/auth/auth.py index 40376b90..2a6c9d4d 100755 --- a/src/databricks/sql/auth/auth.py +++ b/src/databricks/sql/auth/auth.py @@ -4,7 +4,6 @@ from databricks.sql.auth.authenticators import ( AuthProvider, AccessTokenAuthProvider, - BasicAuthProvider, ExternalAuthProvider, DatabricksOAuthProvider, ) @@ -13,7 +12,7 @@ class AuthType(Enum): DATABRICKS_OAUTH = "databricks-oauth" AZURE_OAUTH = "azure-oauth" - # other supported types (access_token, user/pass) can be inferred + # other supported types (access_token) can be inferred # we can add more types as needed later @@ -21,8 +20,6 @@ class ClientContext: def __init__( self, hostname: str, - username: Optional[str] = None, - password: Optional[str] = None, access_token: Optional[str] = None, auth_type: Optional[str] = None, oauth_scopes: Optional[List[str]] = None, @@ -34,8 +31,6 @@ def __init__( credentials_provider=None, ): self.hostname = hostname - self.username = username - self.password = password self.access_token = access_token self.auth_type = auth_type self.oauth_scopes = oauth_scopes @@ -65,8 +60,6 @@ def get_auth_provider(cfg: ClientContext): ) elif cfg.access_token is not None: return AccessTokenAuthProvider(cfg.access_token) - elif cfg.username is not None and cfg.password is not None: - return BasicAuthProvider(cfg.username, cfg.password) elif cfg.use_cert_as_auth and cfg.tls_client_cert_file: # no op authenticator. authentication is performed using ssl certificate outside of headers return AuthProvider() @@ -100,12 +93,16 @@ def get_python_sql_connector_auth_provider(hostname: str, **kwargs): (client_id, redirect_port_range) = get_client_id_and_redirect_port( auth_type == AuthType.AZURE_OAUTH.value ) + if kwargs.get("username") or kwargs.get("password"): + raise ValueError( + "Username/password authentication is no longer supported. " + "Please use OAuth or access token instead." + ) + cfg = ClientContext( hostname=normalize_host_name(hostname), auth_type=auth_type, access_token=kwargs.get("access_token"), - username=kwargs.get("_username"), - password=kwargs.get("_password"), use_cert_as_auth=kwargs.get("_use_cert_as_auth"), tls_client_cert_file=kwargs.get("_tls_client_cert_file"), oauth_scopes=PYSQL_OAUTH_SCOPES, diff --git a/src/databricks/sql/auth/authenticators.py b/src/databricks/sql/auth/authenticators.py index e89c2bd5..64eb91bb 100644 --- a/src/databricks/sql/auth/authenticators.py +++ b/src/databricks/sql/auth/authenticators.py @@ -43,21 +43,6 @@ def add_headers(self, request_headers: Dict[str, str]): request_headers["Authorization"] = self.__authorization_header_value -# Private API: this is an evolving interface and it will change in the future. -# Please must not depend on it in your applications. -class BasicAuthProvider(AuthProvider): - def __init__(self, username: str, password: str): - auth_credentials = f"{username}:{password}".encode("UTF-8") - auth_credentials_base64 = base64.standard_b64encode(auth_credentials).decode( - "UTF-8" - ) - - self.__authorization_header_value = f"Basic {auth_credentials_base64}" - - def add_headers(self, request_headers: Dict[str, str]): - request_headers["Authorization"] = self.__authorization_header_value - - # Private API: this is an evolving interface and it will change in the future. # Please must not depend on it in your applications. class DatabricksOAuthProvider(AuthProvider): diff --git a/src/databricks/sql/client.py b/src/databricks/sql/client.py index c5962147..e56d22f6 100755 --- a/src/databricks/sql/client.py +++ b/src/databricks/sql/client.py @@ -163,10 +163,8 @@ def read(self) -> Optional[OAuthToken]: # Internal arguments in **kwargs: # _user_agent_entry # Tag to add to User-Agent header. For use by partners. - # _username, _password - # Username and password Basic authentication (no official support) # _use_cert_as_auth - # Use a TLS cert instead of a token or username / password (internal use only) + # Use a TLS cert instead of a token # _enable_ssl # Connect over HTTP instead of HTTPS # _port diff --git a/tests/unit/test_auth.py b/tests/unit/test_auth.py index 5b81f2b7..a1b36bf7 100644 --- a/tests/unit/test_auth.py +++ b/tests/unit/test_auth.py @@ -5,7 +5,6 @@ from databricks.sql.auth.auth import ( AccessTokenAuthProvider, - BasicAuthProvider, AuthProvider, ExternalAuthProvider, AuthType, @@ -33,21 +32,6 @@ def test_access_token_provider(self): self.assertEqual(len(http_request.keys()), 2) self.assertEqual(http_request["myKey"], "myVal") - def test_basic_auth_provider(self): - username = "moderakh" - password = "Elevate Databricks 123!!!" - auth = BasicAuthProvider(username=username, password=password) - - http_request = {"myKey": "myVal"} - auth.add_headers(http_request) - - self.assertEqual( - http_request["Authorization"], - "Basic bW9kZXJha2g6RWxldmF0ZSBEYXRhYnJpY2tzIDEyMyEhIQ==", - ) - self.assertEqual(len(http_request.keys()), 2) - self.assertEqual(http_request["myKey"], "myVal") - def test_noop_auth_provider(self): auth = AuthProvider() @@ -175,21 +159,6 @@ def __call__(self, *args, **kwargs) -> HeaderFactory: auth_provider.add_headers(headers) self.assertEqual(headers["foo"], "bar") - def test_get_python_sql_connector_auth_provider_username_password(self): - username = "moderakh" - password = "Elevate Databricks 123!!!" - hostname = "moderakh-test.cloud.databricks.com" - kwargs = {"_username": username, "_password": password} - auth_provider = get_python_sql_connector_auth_provider(hostname, **kwargs) - self.assertTrue(type(auth_provider).__name__, "BasicAuthProvider") - - headers = {} - auth_provider.add_headers(headers) - self.assertEqual( - headers["Authorization"], - "Basic bW9kZXJha2g6RWxldmF0ZSBEYXRhYnJpY2tzIDEyMyEhIQ==", - ) - def test_get_python_sql_connector_auth_provider_noop(self): tls_client_cert_file = "fake.cert" use_cert_as_auth = "abc" @@ -200,3 +169,12 @@ def test_get_python_sql_connector_auth_provider_noop(self): } auth_provider = get_python_sql_connector_auth_provider(hostname, **kwargs) self.assertTrue(type(auth_provider).__name__, "CredentialProvider") + + def test_get_python_sql_connector_basic_auth(self): + kwargs = { + "username": "username", + "password": "password", + } + with self.assertRaises(ValueError) as e: + get_python_sql_connector_auth_provider("foo.cloud.databricks.com", **kwargs) + self.assertIn("Username/password authentication is no longer supported", str(e.exception)) diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 43574160..68e8b830 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -103,7 +103,6 @@ def test_close_uses_the_correct_session_id(self, mock_client_class): def test_auth_args(self, mock_client_class): # Test that the following auth args work: # token = foo, - # token = None, _username = foo, _password = bar # token = None, _tls_client_cert_file = something, _use_cert_as_auth = True connection_args = [ { @@ -111,13 +110,6 @@ def test_auth_args(self, mock_client_class): "http_path": None, "access_token": "tok", }, - { - "server_hostname": "foo", - "http_path": None, - "_username": "foo", - "_password": "bar", - "access_token": None, - }, { "server_hostname": "foo", "http_path": None, diff --git a/tests/unit/test_oauth_persistence.py b/tests/unit/test_oauth_persistence.py index 10677c16..28b3cab3 100644 --- a/tests/unit/test_oauth_persistence.py +++ b/tests/unit/test_oauth_persistence.py @@ -1,8 +1,6 @@ import unittest -from databricks.sql.auth.auth import AccessTokenAuthProvider, BasicAuthProvider, AuthProvider -from databricks.sql.auth.auth import get_python_sql_connector_auth_provider from databricks.sql.experimental.oauth_persistence import DevOnlyFilePersistence, OAuthToken import tempfile import os