Skip to content

Commit 5d44dc6

Browse files
authored
Port testing framework changes. (#70)
### Description Port testing framework changes from dbt-labs/dbt-spark#299 and dbt-labs/dbt-spark#314.
1 parent 266b602 commit 5d44dc6

File tree

12 files changed

+172
-158
lines changed

12 files changed

+172
-158
lines changed

CHANGELOG.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
## dbt-databricks 1.1.0 (Release TBD)
22

3+
### Under the hood
4+
- Port testing framework changes from [dbt-labs/dbt-spark#299](https://github.com/dbt-labs/dbt-spark/pull/299) and [dbt-labs/dbt-spark#314](https://github.com/dbt-labs/dbt-spark/pull/314) ([#70](https://github.com/databricks/dbt-databricks/pull/70))
5+
36
## dbt-databricks 1.0.2 (March 31, 2022)
47

58
### Features

dev_requirements.txt

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,24 +1,23 @@
11
# install latest changes in dbt-core
22
# TODO: how to automate switching from develop to version branches?
3-
git+https://github.com/dbt-labs/dbt.git#egg=dbt-core&subdirectory=core
3+
git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-core&subdirectory=core
4+
git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-tests-adapter&subdirectory=tests/adapter
45

56
# install latest changes in dbt-spark
67
# TODO: how to automate switching from develop to version branches?
78
git+https://github.com/dbt-labs/dbt-spark.git#egg=dbt-spark
89

910
freezegun==0.3.9
10-
pytest==6.0.2
11+
pytest>=6.0.2
1112
mock>=1.3.0
12-
flake8>=3.5.0
13-
pytz==2017.2
14-
bumpversion==0.5.3
15-
tox==3.2.0
13+
flake8
14+
pytz
15+
tox>=3.2.0
1616
ipdb
17-
pytest-xdist>=2.1.0,<3
18-
flaky>=3.5.3,<4
17+
pytest-xdist
18+
pytest-dotenv
1919
pytest-csv
20+
flaky
21+
2022
mypy==0.920
2123
black==22.3.0
22-
23-
# Test requirements
24-
pytest-dbt-adapter==0.6.0

pytest.ini

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
[pytest]
2+
filterwarnings =
3+
ignore:.*'soft_unicode' has been renamed to 'soft_str'*:DeprecationWarning
4+
ignore:unclosed file .*:ResourceWarning
5+
env_files =
6+
test.env
7+
testpaths =
8+
tests/unit
9+
tests/integration
10+
tests/functional

tests/conftest.py

Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
import pytest
2+
import os
3+
4+
pytest_plugins = ["dbt.tests.fixtures.project"]
5+
6+
7+
def pytest_addoption(parser):
8+
parser.addoption("--profile", action="store", default="databricks_cluster", type=str)
9+
10+
11+
# Using @pytest.mark.skip_profile('databricks_cluster') uses the 'skip_by_adapter_type'
12+
# autouse fixture below
13+
def pytest_configure(config):
14+
config.addinivalue_line(
15+
"markers",
16+
"skip_profile(profile): skip test for the given profile",
17+
)
18+
19+
20+
@pytest.fixture(scope="session")
21+
def dbt_profile_target(request):
22+
profile_type = request.config.getoption("--profile")
23+
if profile_type == "databricks_cluster":
24+
target = databricks_cluster_target()
25+
elif profile_type == "databricks_sql_endpoint":
26+
target = databricks_sql_endpoint_target()
27+
elif profile_type == "databricks_uc_cluster":
28+
target = databricks_uc_cluster_target()
29+
elif profile_type == "databricks_uc_sql_endpoint":
30+
target = databricks_uc_sql_endpoint_target()
31+
else:
32+
raise ValueError(f"Invalid profile type '{profile_type}'")
33+
return target
34+
35+
36+
def databricks_cluster_target():
37+
return {
38+
"type": "databricks",
39+
"host": os.getenv("DBT_DATABRICKS_HOST_NAME"),
40+
"http_path": os.getenv(
41+
"DBT_DATABRICKS_CLUSTER_HTTP_PATH", os.getenv("DBT_DATABRICKS_HTTP_PATH")
42+
),
43+
"token": os.getenv("DBT_DATABRICKS_TOKEN"),
44+
}
45+
46+
47+
def databricks_sql_endpoint_target():
48+
return {
49+
"type": "databricks",
50+
"host": os.getenv("DBT_DATABRICKS_HOST_NAME"),
51+
"http_path": os.getenv(
52+
"DBT_DATABRICKS_ENDPOINT_HTTP_PATH", os.getenv("DBT_DATABRICKS_HTTP_PATH")
53+
),
54+
"token": os.getenv("DBT_DATABRICKS_TOKEN"),
55+
}
56+
57+
58+
def databricks_uc_cluster_target():
59+
return {
60+
"type": "databricks",
61+
"host": os.getenv("DBT_DATABRICKS_HOST_NAME"),
62+
"http_path": os.getenv(
63+
"DBT_DATABRICKS_UC_CLUSTER_HTTP_PATH", os.getenv("DBT_DATABRICKS_HTTP_PATH")
64+
),
65+
"token": os.getenv("DBT_DATABRICKS_TOKEN"),
66+
}
67+
68+
69+
def databricks_uc_sql_endpoint_target():
70+
return {
71+
"type": "databricks",
72+
"host": os.getenv("DBT_DATABRICKS_HOST_NAME"),
73+
"http_path": os.getenv(
74+
"DBT_DATABRICKS_UC_ENDPOINT_HTTP_PATH", os.getenv("DBT_DATABRICKS_HTTP_PATH")
75+
),
76+
"token": os.getenv("DBT_DATABRICKS_TOKEN"),
77+
}
78+
79+
80+
@pytest.fixture(autouse=True)
81+
def skip_by_profile_type(request):
82+
profile_type = request.config.getoption("--profile")
83+
if request.node.get_closest_marker("skip_profile"):
84+
if request.node.get_closest_marker("skip_profile").args[0] == profile_type:
85+
pytest.skip("skipped on '{profile_type}' profile")
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
from dbt.tests.adapter.basic.test_base import BaseSimpleMaterializations
2+
from dbt.tests.adapter.basic.test_singular_tests import BaseSingularTests
3+
from dbt.tests.adapter.basic.test_singular_tests_ephemeral import (
4+
BaseSingularTestsEphemeral,
5+
)
6+
from dbt.tests.adapter.basic.test_empty import BaseEmpty
7+
from dbt.tests.adapter.basic.test_ephemeral import BaseEphemeral
8+
from dbt.tests.adapter.basic.test_incremental import BaseIncremental
9+
from dbt.tests.adapter.basic.test_generic_tests import BaseGenericTests
10+
from dbt.tests.adapter.basic.test_snapshot_check_cols import BaseSnapshotCheckCols
11+
from dbt.tests.adapter.basic.test_snapshot_timestamp import BaseSnapshotTimestamp
12+
13+
14+
class TestSimpleMaterializationsDatabricks(BaseSimpleMaterializations):
15+
pass
16+
17+
18+
class TestSingularTestsDatabricks(BaseSingularTests):
19+
pass
20+
21+
22+
class TestSingularTestsEphemeralDatabricks(BaseSingularTestsEphemeral):
23+
pass
24+
25+
26+
class TestEmptyDatabricks(BaseEmpty):
27+
pass
28+
29+
30+
class TestEphemeralDatabricks(BaseEphemeral):
31+
pass
32+
33+
34+
class TestIncrementalDatabricks(BaseIncremental):
35+
pass
36+
37+
38+
class TestGenericTestsDatabricks(BaseGenericTests):
39+
pass
40+
41+
42+
class TestSnapshotCheckColsDatabricks(BaseSnapshotCheckCols):
43+
pass
44+
45+
46+
class TestSnapshotTimestampDatabricks(BaseSnapshotTimestamp):
47+
pass

tests/integration/base.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -77,6 +77,8 @@ def __init__(self):
7777

7878

7979
class TestArgs:
80+
__test__ = False
81+
8082
def __init__(self, kwargs):
8183
self.which = "run"
8284
self.single_threaded = False

tests/integration/incremental_unique_id_test/test_incremental_unique_id.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,8 @@
66
from tests.integration.base import DBTIntegrationTest, use_profile
77

88

9-
TestResults = namedtuple(
10-
"TestResults",
9+
ResultHolder = namedtuple(
10+
"ResultHolder",
1111
[
1212
"seed_count",
1313
"model_count",
@@ -76,7 +76,7 @@ def test_scenario_correctness(self, expected_fields, test_case_fields):
7676
self.assertTablesEqual(expected_fields.relation, test_case_fields.relation)
7777

7878
def stub_expected_fields(self, relation, seed_rows, opt_model_count=None):
79-
return TestResults(
79+
return ResultHolder(
8080
seed_count=1,
8181
model_count=1,
8282
seed_rows=seed_rows,
@@ -110,7 +110,7 @@ def test_no_unique_keys(self):
110110
update_sql_file = "add_new_rows"
111111

112112
expected_fields = self.stub_expected_fields(relation=seed, seed_rows=seed_rows)
113-
test_case_fields = TestResults(
113+
test_case_fields = ResultHolder(
114114
*self.setup_test(seed, incremental_model, update_sql_file),
115115
opt_model_count=None,
116116
relation=incremental_model,
@@ -144,7 +144,7 @@ def test_empty_str_unique_key(self):
144144
update_sql_file = "add_new_rows"
145145

146146
expected_fields = self.stub_expected_fields(relation=seed, seed_rows=seed_rows)
147-
test_case_fields = TestResults(
147+
test_case_fields = ResultHolder(
148148
*self.setup_test(seed, incremental_model, update_sql_file),
149149
opt_model_count=None,
150150
relation=incremental_model,
@@ -179,7 +179,7 @@ def test_one_unique_key(self):
179179
expected_fields = self.stub_expected_fields(
180180
relation=expected_model, seed_rows=seed_rows, opt_model_count=1
181181
)
182-
test_case_fields = TestResults(
182+
test_case_fields = ResultHolder(
183183
*self.setup_test(seed, incremental_model, update_sql_file),
184184
opt_model_count=self.update_incremental_model(expected_model),
185185
relation=incremental_model,
@@ -239,7 +239,7 @@ def test_empty_unique_key_list(self):
239239
update_sql_file = "add_new_rows"
240240

241241
expected_fields = self.stub_expected_fields(relation=seed, seed_rows=seed_rows)
242-
test_case_fields = TestResults(
242+
test_case_fields = ResultHolder(
243243
*self.setup_test(seed, incremental_model, update_sql_file),
244244
opt_model_count=None,
245245
relation=incremental_model,
@@ -274,7 +274,7 @@ def test_unary_unique_key_list(self):
274274
expected_fields = self.stub_expected_fields(
275275
relation=expected_model, seed_rows=seed_rows, opt_model_count=1
276276
)
277-
test_case_fields = TestResults(
277+
test_case_fields = ResultHolder(
278278
*self.setup_test(seed, incremental_model, update_sql_file),
279279
opt_model_count=self.update_incremental_model(expected_model),
280280
relation=incremental_model,
@@ -309,7 +309,7 @@ def test_duplicated_unary_unique_key_list(self):
309309
expected_fields = self.stub_expected_fields(
310310
relation=expected_model, seed_rows=seed_rows, opt_model_count=1
311311
)
312-
test_case_fields = TestResults(
312+
test_case_fields = ResultHolder(
313313
*self.setup_test(seed, incremental_model, update_sql_file),
314314
opt_model_count=self.update_incremental_model(expected_model),
315315
relation=incremental_model,
@@ -344,7 +344,7 @@ def test_trinary_unique_key_list(self):
344344
expected_fields = self.stub_expected_fields(
345345
relation=expected_model, seed_rows=seed_rows, opt_model_count=1
346346
)
347-
test_case_fields = TestResults(
347+
test_case_fields = ResultHolder(
348348
*self.setup_test(seed, incremental_model, update_sql_file),
349349
opt_model_count=self.update_incremental_model(expected_model),
350350
relation=incremental_model,
@@ -377,7 +377,7 @@ def test_trinary_unique_key_list_no_update(self):
377377
update_sql_file = "add_new_rows"
378378

379379
expected_fields = self.stub_expected_fields(relation=seed, seed_rows=seed_rows)
380-
test_case_fields = TestResults(
380+
test_case_fields = ResultHolder(
381381
*self.setup_test(seed, incremental_model, update_sql_file),
382382
opt_model_count=None,
383383
relation=incremental_model,

tests/specs/databricks-cluster.dbtspec

Lines changed: 0 additions & 30 deletions
This file was deleted.

tests/specs/databricks-sql-endpoint.dbtspec

Lines changed: 0 additions & 31 deletions
This file was deleted.

tests/specs/databricks-uc-cluster.dbtspec

Lines changed: 0 additions & 35 deletions
This file was deleted.

0 commit comments

Comments
 (0)