Skip to content

Commit

Permalink
migrate two tests
Browse files Browse the repository at this point in the history
  • Loading branch information
benc-db committed Oct 27, 2023
1 parent b199f38 commit fd2f18c
Show file tree
Hide file tree
Showing 15 changed files with 187 additions and 164 deletions.
2 changes: 1 addition & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@


def pytest_addoption(parser):
parser.addoption("--profile", action="store", default="databricks_uc_sql_endpoint", type=str)
parser.addoption("--profile", action="store", default="databricks_cluster", type=str)


# Using @pytest.mark.skip_profile('databricks_cluster') uses the 'skip_by_adapter_type'
Expand Down
17 changes: 17 additions & 0 deletions tests/functional/adapter/basic/fixtures.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
basic_seed_csv = """
id,msg
1,hello
2,goodbye
2,yo
3,anyway
"""

basic_model_sql = """
{{ config(
materialized = 'table'
)}}
select cast(1 as bigint) as id, 'hello' as msg
union all
select cast(2 as bigint) as id, 'goodbye' as msg
"""
27 changes: 27 additions & 0 deletions tests/functional/adapter/basic/test_ensure_no_describe_extended.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import pytest
from dbt.tests import util

from tests.functional.adapter.basic import fixtures


class TestEnsureNoDescribeExtended:
"""Tests in this class exist to ensure we don't call describe extended unnecessarily.
This became a problem due to needing to discern tables from streaming tables, which is not
relevant on hive, but users on hive were having all of their tables describe extended-ed.
We only need to call describe extended if we are using a UC catalog and we can't determine the
type of the materialization."""

@pytest.fixture(scope="class")
def seeds(self):
return {"my_seed.csv": fixtures.basic_seed_csv}

@pytest.fixture(scope="class")
def models(self):
return {"my_model.sql": fixtures.basic_model_sql}

def test_ensure_no_describe_extended(self, project):
# Add some existing data to ensure we don't try to 'describe extended' it.
util.run_dbt(["seed"])

_, log_output = util.run_dbt_and_capture(["run"])
assert "describe extended" not in log_output
59 changes: 59 additions & 0 deletions tests/functional/adapter/copy_into/fixtures.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
expected_target_with_expression_list = """id,name,date
0,Zero,2022-01-01
1,Alice,null
2,Bob,null
"""

expected_target = """id,name,date
0,Zero,2022-01-01
1,Alice,2022-01-01
2,Bob,2022-01-02
"""

source = """id,name,date
1,Alice,2022-01-01
2,Bob,2022-01-02
"""

target = """
{{config(materialized='table')}}
select * from values
(0, 'Zero', '2022-01-01') as t(id, name, date)
"""

seed_schema = """
version: 2
seeds:
- name: source
config:
file_format: parquet
column_types:
id: int
name: string
date: string
- name: expected_target
config:
column_types:
id: int
name: string
date: string
- name: expected_target_with_expression_list
config:
column_types:
id: int
name: string
date: string
"""

model_schema = """
version: 2
models:
- name: target
columns:
- name: id
- name: name
- name: date
"""
83 changes: 83 additions & 0 deletions tests/functional/adapter/copy_into/test_copy_into.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
import pytest
from dbt.tests import util

from tests.functional.adapter.copy_into import fixtures


class BaseCopyInto:
args_formatter = ""

@pytest.fixture(scope="class")
def seeds(self):
return {
"source.csv": fixtures.source,
"expected_target.csv": fixtures.expected_target,
"expected_target_with_expression_list.csv": fixtures.expected_target_with_expression_list,
"seed_schema.yml": fixtures.seed_schema,
}

@pytest.fixture(scope="class")
def models(self):
return {"target.sql": fixtures.target, "schema.yml": fixtures.model_schema}

@pytest.fixture(scope="class")
def path(self, project):
util.run_dbt(["seed"])

# Get the location of the source table.
rows = util.run_sql_with_adapter(
project.adapter, "describe table extended {schema}.source", fetch="all"
)
path = None
for row in rows:
if row.col_name == "Location":
path = row.data_type
if path is None:
raise Exception("No location found for the source table")
return path

def copy_into(self, path, args_formatter):
util.run_dbt(["run"])
util.run_dbt(
[
"run-operation",
"databricks_copy_into",
"--args",
args_formatter.format(source_path=path),
]
)


class TestCopyInto(BaseCopyInto):
args_formatter = """
target_table: target
source: {source_path}
file_format: parquet
format_options:
mergeSchema: 'true'
copy_options:
mergeSchema: 'true'
"""

def test_copy_into(self, project, path):
self.copy_into(path, self.args_formatter)
util.check_relations_equal(project.adapter, ["target", "expected_target"])


class TestCopyIntoWithExpressionList(BaseCopyInto):
args_formatter = """
target_table: target
source: {source_path}
expression_list: 'id, name'
file_format: parquet
format_options:
mergeSchema: 'true'
copy_options:
mergeSchema: 'true'
"""

def test_copy_into_with_expression_list(self, project, path):
self.copy_into(path, self.args_formatter)
util.check_relations_equal(
project.adapter, ["target", "expected_target_with_expression_list"]
)

This file was deleted.

This file was deleted.

This file was deleted.

7 changes: 0 additions & 7 deletions tests/integration/copy_into/models/expected_target.sql

This file was deleted.

This file was deleted.

15 changes: 0 additions & 15 deletions tests/integration/copy_into/models/schema.yml

This file was deleted.

6 changes: 0 additions & 6 deletions tests/integration/copy_into/models/source.sql

This file was deleted.

4 changes: 0 additions & 4 deletions tests/integration/copy_into/models/target.sql

This file was deleted.

This file was deleted.

75 changes: 0 additions & 75 deletions tests/integration/copy_into/test_copy_into.py

This file was deleted.

0 comments on commit fd2f18c

Please sign in to comment.