Skip to content

Commit 745c383

Browse files
authored
Chore: improve test stability and address some warnings (#5293)
1 parent 8732257 commit 745c383

File tree

10 files changed

+49
-103
lines changed

10 files changed

+49
-103
lines changed

.circleci/install-prerequisites.sh

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,4 +34,9 @@ echo "Installing OS-level dependencies: $ALL_DEPENDENCIES"
3434

3535
sudo apt-get clean && sudo apt-get -y update && sudo ACCEPT_EULA='Y' apt-get -y install $ALL_DEPENDENCIES
3636

37+
if [ "$ENGINE" == "spark" ]; then
38+
echo "Using Java version for spark:"
39+
java -version
40+
fi
41+
3742
echo "All done"

.circleci/manage-test-db.sh

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,9 @@ databricks_init() {
5151

5252
# Note: the cluster doesnt need to be running to create / drop catalogs, but it does need to be running to run the integration tests
5353
echo "Ensuring cluster is running"
54-
databricks clusters start $CLUSTER_ID
54+
# the || true is to prevent the following error from causing an abort:
55+
# > Error: is in unexpected state Running.
56+
databricks clusters start $CLUSTER_ID || true
5557
}
5658

5759
databricks_up() {

Makefile

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ dbt-test:
138138
pytest -n auto -m "dbt and not cicdonly"
139139

140140
dbt-fast-test:
141-
pytest -n auto -m "dbt and fast" --retries 3
141+
pytest -n auto -m "dbt and fast" --reruns 3
142142

143143
github-test:
144144
pytest -n auto -m "github"
@@ -173,58 +173,58 @@ engine-%-down:
173173
##################
174174

175175
clickhouse-test: engine-clickhouse-up
176-
pytest -n auto -m "clickhouse" --retries 3 --junitxml=test-results/junit-clickhouse.xml
176+
pytest -n auto -m "clickhouse" --reruns 3 --junitxml=test-results/junit-clickhouse.xml
177177

178178
duckdb-test: engine-duckdb-install
179-
pytest -n auto -m "duckdb" --retries 3 --junitxml=test-results/junit-duckdb.xml
179+
pytest -n auto -m "duckdb" --reruns 3 --junitxml=test-results/junit-duckdb.xml
180180

181181
mssql-test: engine-mssql-up
182-
pytest -n auto -m "mssql" --retries 3 --junitxml=test-results/junit-mssql.xml
182+
pytest -n auto -m "mssql" --reruns 3 --junitxml=test-results/junit-mssql.xml
183183

184184
mysql-test: engine-mysql-up
185-
pytest -n auto -m "mysql" --retries 3 --junitxml=test-results/junit-mysql.xml
185+
pytest -n auto -m "mysql" --reruns 3 --junitxml=test-results/junit-mysql.xml
186186

187187
postgres-test: engine-postgres-up
188-
pytest -n auto -m "postgres" --retries 3 --junitxml=test-results/junit-postgres.xml
188+
pytest -n auto -m "postgres" --reruns 3 --junitxml=test-results/junit-postgres.xml
189189

190190
spark-test: engine-spark-up
191-
pytest -n auto -m "spark" --retries 3 --junitxml=test-results/junit-spark.xml
191+
pytest -n auto -m "spark" --reruns 3 --junitxml=test-results/junit-spark.xml && pytest -n auto -m "pyspark" --reruns 3 --junitxml=test-results/junit-pyspark.xml
192192

193193
trino-test: engine-trino-up
194-
pytest -n auto -m "trino" --retries 3 --junitxml=test-results/junit-trino.xml
194+
pytest -n auto -m "trino" --reruns 3 --junitxml=test-results/junit-trino.xml
195195

196196
risingwave-test: engine-risingwave-up
197-
pytest -n auto -m "risingwave" --retries 3 --junitxml=test-results/junit-risingwave.xml
197+
pytest -n auto -m "risingwave" --reruns 3 --junitxml=test-results/junit-risingwave.xml
198198

199199
#################
200200
# Cloud Engines #
201201
#################
202202

203203
snowflake-test: guard-SNOWFLAKE_ACCOUNT guard-SNOWFLAKE_WAREHOUSE guard-SNOWFLAKE_DATABASE guard-SNOWFLAKE_USER guard-SNOWFLAKE_PASSWORD engine-snowflake-install
204-
pytest -n auto -m "snowflake" --retries 3 --junitxml=test-results/junit-snowflake.xml
204+
pytest -n auto -m "snowflake" --reruns 3 --junitxml=test-results/junit-snowflake.xml
205205

206206
bigquery-test: guard-BIGQUERY_KEYFILE engine-bigquery-install
207207
$(PIP) install -e ".[bigframes]"
208-
pytest -n auto -m "bigquery" --retries 3 --junitxml=test-results/junit-bigquery.xml
208+
pytest -n auto -m "bigquery" --reruns 3 --junitxml=test-results/junit-bigquery.xml
209209

210210
databricks-test: guard-DATABRICKS_CATALOG guard-DATABRICKS_SERVER_HOSTNAME guard-DATABRICKS_HTTP_PATH guard-DATABRICKS_ACCESS_TOKEN guard-DATABRICKS_CONNECT_VERSION engine-databricks-install
211211
$(PIP) install 'databricks-connect==${DATABRICKS_CONNECT_VERSION}'
212-
pytest -n auto -m "databricks" --retries 3 --junitxml=test-results/junit-databricks.xml
212+
pytest -n auto -m "databricks" --reruns 3 --junitxml=test-results/junit-databricks.xml
213213

214214
redshift-test: guard-REDSHIFT_HOST guard-REDSHIFT_USER guard-REDSHIFT_PASSWORD guard-REDSHIFT_DATABASE engine-redshift-install
215-
pytest -n auto -m "redshift" --retries 3 --junitxml=test-results/junit-redshift.xml
215+
pytest -n auto -m "redshift" --reruns 3 --junitxml=test-results/junit-redshift.xml
216216

217217
clickhouse-cloud-test: guard-CLICKHOUSE_CLOUD_HOST guard-CLICKHOUSE_CLOUD_USERNAME guard-CLICKHOUSE_CLOUD_PASSWORD engine-clickhouse-install
218-
pytest -n 1 -m "clickhouse_cloud" --retries 3 --junitxml=test-results/junit-clickhouse-cloud.xml
218+
pytest -n 1 -m "clickhouse_cloud" --reruns 3 --junitxml=test-results/junit-clickhouse-cloud.xml
219219

220220
athena-test: guard-AWS_ACCESS_KEY_ID guard-AWS_SECRET_ACCESS_KEY guard-ATHENA_S3_WAREHOUSE_LOCATION engine-athena-install
221-
pytest -n auto -m "athena" --retries 3 --junitxml=test-results/junit-athena.xml
221+
pytest -n auto -m "athena" --reruns 3 --junitxml=test-results/junit-athena.xml
222222

223223
fabric-test: guard-FABRIC_HOST guard-FABRIC_CLIENT_ID guard-FABRIC_CLIENT_SECRET guard-FABRIC_DATABASE engine-fabric-install
224-
pytest -n auto -m "fabric" --retries 3 --junitxml=test-results/junit-fabric.xml
224+
pytest -n auto -m "fabric" --reruns 3 --junitxml=test-results/junit-fabric.xml
225225

226226
gcp-postgres-test: guard-GCP_POSTGRES_INSTANCE_CONNECTION_STRING guard-GCP_POSTGRES_USER guard-GCP_POSTGRES_PASSWORD guard-GCP_POSTGRES_KEYFILE_JSON engine-gcppostgres-install
227-
pytest -n auto -m "gcp_postgres" --retries 3 --junitxml=test-results/junit-gcp-postgres.xml
227+
pytest -n auto -m "gcp_postgres" --reruns 3 --junitxml=test-results/junit-gcp-postgres.xml
228228

229229
vscode_settings:
230230
mkdir -p .vscode

pyproject.toml

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ dev = [
8585
"pytest",
8686
"pytest-asyncio",
8787
"pytest-mock",
88-
"pytest-retry",
88+
"pytest-rerunfailures",
8989
"pytest-xdist",
9090
"pytz",
9191
"redshift_connector",
@@ -264,8 +264,13 @@ markers = [
264264
"redshift: test for Redshift",
265265
"snowflake: test for Snowflake",
266266
"spark: test for Spark",
267+
"pyspark: test for PySpark that need to run separately from the other spark tests",
267268
"trino: test for Trino (all connectors)",
268-
"risingwave: test for Risingwave"
269+
"risingwave: test for Risingwave",
270+
271+
# Other
272+
"set_default_connection",
273+
"registry_isolation"
269274
]
270275
addopts = "-n 0 --dist=loadgroup"
271276
asyncio_default_fixture_loop_scope = "session"
@@ -275,7 +280,7 @@ log_cli_level = "INFO"
275280
filterwarnings = [
276281
"ignore:The localize method is no longer necessary, as this time zone supports the fold attribute"
277282
]
278-
retry_delay = 10
283+
reruns_delay = 10
279284

280285
[tool.ruff]
281286
line-length = 100

sqlmesh/core/test/context.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@ class TestExecutionContext(ExecutionContext):
1818
models: All upstream models to use for expansion and mapping of physical locations.
1919
"""
2020

21+
__test__ = False # prevent pytest trying to collect this as a test class
22+
2123
def __init__(
2224
self,
2325
engine_adapter: EngineAdapter,

sqlmesh/dbt/test.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,10 @@ class TestConfig(GeneralConfig):
6060
error_if: Conditional expression (default "!=0") to detect if error condition met (Not supported).
6161
"""
6262

63+
__test__ = (
64+
False # prevent pytest trying to collect this as a test class when it's imported in a test
65+
)
66+
6367
# SQLMesh fields
6468
path: Path = Path()
6569
name: str

tests/conftest.py

Lines changed: 0 additions & 78 deletions
Original file line numberDiff line numberDiff line change
@@ -212,84 +212,6 @@ def pytest_collection_modifyitems(items, *args, **kwargs):
212212
item.add_marker("fast")
213213

214214

215-
@pytest.hookimpl(hookwrapper=True, tryfirst=True)
216-
def pytest_runtest_makereport(item: pytest.Item, call: pytest.CallInfo):
217-
# The tmp_path fixture frequently throws errors like:
218-
# - KeyError: <_pytest.stash.StashKey object at 0x79ba385fe1a0>
219-
# in its teardown. This causes pytest to mark the test as failed even though we have zero control over this behaviour.
220-
# So we log/swallow that particular error here rather than raising it
221-
222-
# note: the hook always has to yield
223-
outcome = yield
224-
225-
# we only care about tests that used the tmp_path fixture
226-
if "tmp_path" not in getattr(item, "fixturenames", []):
227-
return
228-
229-
result: pytest.TestReport = outcome.get_result()
230-
231-
if result.when != "teardown":
232-
return
233-
234-
# If we specifically failed with a StashKey error in teardown, mark the test as passed
235-
if result.failed:
236-
exception = call.excinfo
237-
if (
238-
exception
239-
and isinstance(exception.value, KeyError)
240-
and "_pytest.stash.StashKey" in repr(exception)
241-
):
242-
result.outcome = "passed"
243-
item.add_report_section(
244-
"teardown", "stderr", f"Ignored tmp_path teardown error: {exception}"
245-
)
246-
247-
248-
def pytest_configure(config: pytest.Config):
249-
# we need to adjust the hook order if pytest-retry is present because it:
250-
# - also declares a `pytest_runtest_makereport` with `hookwrapper=True, tryfirst=True`
251-
# - this supersedes our one because pytest always loads plugins first and they take precedence over user code
252-
#
253-
# but, we need our one to run first because it's capturing and ignoring certain errors that cause pytest-retry to fail
254-
# and not retry. so we need to adjust the order the hooks are called which pytest does NOT make easy.
255-
#
256-
# we can't just unload the pytest-retry plugin, load our hook and reload the pytest-retry plugin either.
257-
# this causes an error:
258-
# > Hook 'pytest_set_excluded_exceptions' is already registered within namespace
259-
# because unregister() apparently doesnt unregister plugins cleanly in such a way they can be re-registered
260-
#
261-
# so what we end up doing below is a small monkey-patch to adjust the call order of the hooks
262-
pm = config.pluginmanager
263-
264-
from pluggy._hooks import HookCaller
265-
266-
hook_caller: HookCaller = pm.hook.pytest_runtest_makereport
267-
hook_impls = hook_caller.get_hookimpls()
268-
269-
# find the index of our one
270-
our_makereport_idx = next(
271-
(i for i, v in enumerate(hook_impls) if v.plugin_name.endswith("tests/conftest.py")), None
272-
)
273-
274-
# find the index of the pytest-retry one
275-
pytest_retry_makereport_idx = next(
276-
(i for i, v in enumerate(hook_impls) if v.plugin_name == "pytest-retry"), None
277-
)
278-
279-
if (
280-
pytest_retry_makereport_idx is not None
281-
and our_makereport_idx is not None
282-
and our_makereport_idx > pytest_retry_makereport_idx
283-
):
284-
our_makereport_hook = hook_impls.pop(our_makereport_idx)
285-
286-
# inject our one to run before the pytest-retry one
287-
hook_impls.insert(pytest_retry_makereport_idx, our_makereport_hook)
288-
289-
# HookCaller doesnt have a setter method for this.
290-
hook_caller._hookimpls = hook_impls # type: ignore
291-
292-
293215
# Ignore all local config files
294216
@pytest.fixture(scope="session", autouse=True)
295217
def ignore_local_config_files():

tests/dbt/test_integration.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,8 @@
2727

2828

2929
class TestType(str, Enum):
30+
__test__ = False # prevent pytest trying to collect this as a test class
31+
3032
DBT_RUNTIME = "dbt_runtime"
3133
DBT_ADAPTER = "dbt_adapter"
3234
SQLMESH = "sqlmesh"
@@ -53,6 +55,8 @@ def is_sqlmesh_runtime(self) -> bool:
5355

5456

5557
class TestStrategy(str, Enum):
58+
__test__ = False # prevent pytest trying to collect this as a test class
59+
5660
CHECK = "check"
5761
TIMESTAMP = "timestamp"
5862

tests/engines/spark/test_db_api.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,10 @@
44
from sqlmesh.engines.spark.db_api import errors
55
from sqlmesh.engines.spark.db_api import spark_session as spark_session_db
66

7-
pytestmark = [
8-
pytest.mark.slow,
9-
pytest.mark.spark_pyspark,
10-
]
7+
# note: this is deliberately not marked with 'spark' so that it
8+
# can run separately from the spark integration tests.
9+
# running them at the same time mutates some global state in the SparkSession which breaks these tests
10+
pytestmark = [pytest.mark.slow, pytest.mark.pyspark]
1111

1212

1313
def test_spark_session_cursor(spark_session: SparkSession):

tests/integrations/github/cicd/test_github_commands.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,8 @@
55
from unittest import TestCase, mock
66
from unittest.result import TestResult
77

8+
TestResult.__test__ = False # prevent pytest trying to collect this as a test class
9+
810
import pytest
911
from pytest_mock.plugin import MockerFixture
1012

0 commit comments

Comments
 (0)