Skip to content

Commit e59a990

Browse files
committed
revery ruff
1 parent f75b2de commit e59a990

10 files changed

Lines changed: 120 additions & 36 deletions

File tree

openml/_api_calls.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -362,7 +362,7 @@ def _send_request( # noqa: C901, PLR0912
362362
files: FILE_ELEMENTS_TYPE | None = None,
363363
md5_checksum: str | None = None,
364364
) -> requests.Response:
365-
n_retries = 1
365+
n_retries = max(1, config.connection_n_retries)
366366

367367
response: requests.Response | None = None
368368
delay_method = _human_delay if config.retry_policy == "human" else _robot_delay

openml/config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ def _resolve_default_cache_dir() -> Path:
157157
"cachedir": _resolve_default_cache_dir(),
158158
"avoid_duplicate_runs": False,
159159
"retry_policy": "human",
160-
"connection_n_retries": 1,
160+
"connection_n_retries": 5,
161161
"show_progress": False,
162162
}
163163

tests/conftest.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -297,8 +297,8 @@ def with_test_cache(test_files_directory, request):
297297
openml.config.set_root_cache_directory(_root_cache_directory)
298298
if tmp_cache.exists():
299299
shutil.rmtree(tmp_cache)
300+
300301

301-
302302
@pytest.fixture
303303
def static_cache_dir():
304304
return Path(__file__).parent / "files"
@@ -308,4 +308,4 @@ def workdir(tmp_path):
308308
original_cwd = Path.cwd()
309309
os.chdir(tmp_path)
310310
yield tmp_path
311-
os.chdir(original_cwd)
311+
os.chdir(original_cwd)

tests/test_datasets/test_dataset_functions.py

Lines changed: 36 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -406,7 +406,6 @@ def test__download_minio_file_works_with_bucket_subdirectory(self):
406406
file_destination
407407
), "_download_minio_file can download from subdirectories"
408408

409-
410409
@mock.patch("openml._api_calls._download_minio_file")
411410
@pytest.mark.test_server()
412411
def test__get_dataset_parquet_is_cached(self, patch):
@@ -530,7 +529,10 @@ def test_deletion_of_cache_dir_faulty_download(self, patch):
530529
datasets_cache_dir = os.path.join(openml.config.get_cache_directory(), "datasets")
531530
assert len(os.listdir(datasets_cache_dir)) == 0
532531

533-
@pytest.mark.skip(reason="Pending resolution of #1657")
532+
@pytest.mark.skipif(
533+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
534+
reason="Pending resolution of #1657",
535+
)
534536
@pytest.mark.test_server()
535537
def test_publish_dataset(self):
536538
arff_file_path = self.static_cache_dir / "org" / "openml" / "test" / "datasets" / "2" / "dataset.arff"
@@ -690,7 +692,10 @@ def test_attributes_arff_from_df_unknown_dtype(self):
690692
with pytest.raises(ValueError, match=err_msg):
691693
attributes_arff_from_df(df)
692694

693-
@pytest.mark.skip(reason="Pending resolution of #1657")
695+
@pytest.mark.skipif(
696+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
697+
reason="Pending resolution of #1657",
698+
)
694699
@pytest.mark.test_server()
695700
def test_create_dataset_numpy(self):
696701
data = np.array([[1, 2, 3], [1.2, 2.5, 3.8], [2, 5, 8], [0, 1, 0]]).T
@@ -725,7 +730,10 @@ def test_create_dataset_numpy(self):
725730
), "Uploaded arff does not match original one"
726731
assert _get_online_dataset_format(dataset.id) == "arff", "Wrong format for dataset"
727732

728-
@pytest.mark.skip(reason="Pending resolution of #1657")
733+
@pytest.mark.skipif(
734+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
735+
reason="Pending resolution of #1657",
736+
)
729737
@pytest.mark.test_server()
730738
def test_create_dataset_list(self):
731739
data = [
@@ -781,7 +789,10 @@ def test_create_dataset_list(self):
781789
), "Uploaded ARFF does not match original one"
782790
assert _get_online_dataset_format(dataset.id) == "arff", "Wrong format for dataset"
783791

784-
@pytest.mark.skip(reason="Pending resolution of #1657")
792+
@pytest.mark.skipif(
793+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
794+
reason="Pending resolution of #1657",
795+
)
785796
@pytest.mark.test_server()
786797
def test_create_dataset_sparse(self):
787798
# test the scipy.sparse.coo_matrix
@@ -930,7 +941,10 @@ def test_get_online_dataset_format(self):
930941
dataset_id
931942
), "The format of the ARFF files is different"
932943

933-
@pytest.mark.skip(reason="Pending resolution of #1657")
944+
@pytest.mark.skipif(
945+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
946+
reason="Pending resolution of #1657",
947+
)
934948
@pytest.mark.test_server()
935949
def test_create_dataset_pandas(self):
936950
data = [
@@ -1156,7 +1170,10 @@ def test_ignore_attributes_dataset(self):
11561170
paper_url=paper_url,
11571171
)
11581172

1159-
@pytest.mark.skip(reason="Pending resolution of #1657")
1173+
@pytest.mark.skipif(
1174+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
1175+
reason="Pending resolution of #1657",
1176+
)
11601177
@pytest.mark.test_server()
11611178
def test_publish_fetch_ignore_attribute(self):
11621179
"""Test to upload and retrieve dataset and check ignore_attributes"""
@@ -1276,7 +1293,10 @@ def test_create_dataset_row_id_attribute_error(self):
12761293
paper_url=paper_url,
12771294
)
12781295

1279-
@pytest.mark.skip(reason="Pending resolution of #1657")
1296+
@pytest.mark.skipif(
1297+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
1298+
reason="Pending resolution of #1657",
1299+
)
12801300
@pytest.mark.test_server()
12811301
def test_create_dataset_row_id_attribute_inference(self):
12821302
# meta-information
@@ -1445,7 +1465,10 @@ def test_data_edit_non_critical_field(self):
14451465
edited_dataset = openml.datasets.get_dataset(did)
14461466
assert edited_dataset.description == desc
14471467

1448-
@pytest.mark.skip(reason="Pending resolution of #1657")
1468+
@pytest.mark.skipif(
1469+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
1470+
reason="Pending resolution of #1657",
1471+
)
14491472
@pytest.mark.test_server()
14501473
def test_data_edit_critical_field(self):
14511474
# Case 2
@@ -1498,7 +1521,10 @@ def test_data_edit_requires_valid_dataset(self):
14981521
description="xor operation dataset",
14991522
)
15001523

1501-
@pytest.mark.skip(reason="Pending resolution of #1657")
1524+
@pytest.mark.skipif(
1525+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
1526+
reason="Pending resolution of #1657",
1527+
)
15021528
@pytest.mark.test_server()
15031529
def test_data_edit_cannot_edit_critical_field_if_dataset_has_task(self):
15041530
# Need to own a dataset to be able to edit meta-data
@@ -1551,7 +1577,6 @@ def test_data_fork(self):
15511577
data_id=999999,
15521578
)
15531579

1554-
15551580
@pytest.mark.production_server()
15561581
def test_list_datasets_with_high_size_parameter(self):
15571582
# Testing on prod since concurrent deletion of uploded datasets make the test fail

tests/test_flows/test_flow.py

Lines changed: 21 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
import copy
66
import hashlib
77
import re
8+
import os
89
import time
910
from packaging.version import Version
1011
from unittest import mock
@@ -33,7 +34,6 @@
3334
from openml.testing import SimpleImputer, TestBase
3435

3536

36-
3737
class TestFlow(TestBase):
3838
_multiprocess_can_split_ = True
3939

@@ -180,7 +180,10 @@ def test_to_xml_from_xml(self):
180180
openml.flows.functions.assert_flows_equal(new_flow, flow)
181181
assert new_flow is not flow
182182

183-
@pytest.mark.skip(reason="Pending resolution of #1657")
183+
@pytest.mark.skipif(
184+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
185+
reason="Pending resolution of #1657",
186+
)
184187
@pytest.mark.sklearn()
185188
@pytest.mark.test_server()
186189
def test_publish_flow(self):
@@ -223,7 +226,10 @@ def test_publish_existing_flow(self, flow_exists_mock):
223226
f"collected from {__file__.split('/')[-1]}: {flow.flow_id}",
224227
)
225228

226-
@pytest.mark.skip(reason="Pending resolution of #1657")
229+
@pytest.mark.skipif(
230+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
231+
reason="Pending resolution of #1657",
232+
)
227233
@pytest.mark.sklearn()
228234
@pytest.mark.test_server()
229235
def test_publish_flow_with_similar_components(self):
@@ -275,7 +281,10 @@ def test_publish_flow_with_similar_components(self):
275281
TestBase._mark_entity_for_removal("flow", flow3.flow_id, flow3.name)
276282
TestBase.logger.info(f"collected from {__file__.split('/')[-1]}: {flow3.flow_id}")
277283

278-
@pytest.mark.skip(reason="Pending resolution of #1657")
284+
@pytest.mark.skipif(
285+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
286+
reason="Pending resolution of #1657",
287+
)
279288
@pytest.mark.sklearn()
280289
@pytest.mark.test_server()
281290
def test_semi_legal_flow(self):
@@ -386,7 +395,10 @@ def get_sentinel():
386395
flow_id = openml.flows.flow_exists(name, version)
387396
assert not flow_id
388397

389-
@pytest.mark.skip(reason="Pending resolution of #1657")
398+
@pytest.mark.skipif(
399+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
400+
reason="Pending resolution of #1657",
401+
)
390402
@pytest.mark.sklearn()
391403
@pytest.mark.test_server()
392404
def test_existing_flow_exists(self):
@@ -428,7 +440,10 @@ def test_existing_flow_exists(self):
428440
)
429441
assert downloaded_flow_id == flow.flow_id
430442

431-
@pytest.mark.skip(reason="Pending resolution of #1657")
443+
@pytest.mark.skipif(
444+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
445+
reason="Pending resolution of #1657",
446+
)
432447
@pytest.mark.sklearn()
433448
@pytest.mark.test_server()
434449
def test_sklearn_to_upload_to_flow(self):

tests/test_flows/test_flow_functions.py

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
from unittest import mock
1313
from unittest.mock import patch
1414

15+
import os
1516
import pandas as pd
1617
import pytest
1718
import requests
@@ -309,7 +310,10 @@ def test_get_flow1(self):
309310
flow = openml.flows.get_flow(1)
310311
assert flow.external_version is None
311312

312-
@pytest.mark.skip(reason="Pending resolution of #1657")
313+
@pytest.mark.skipif(
314+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
315+
reason="Pending resolution of #1657",
316+
)
313317
@pytest.mark.sklearn()
314318
@pytest.mark.test_server()
315319
def test_get_flow_reinstantiate_model(self):
@@ -393,7 +397,10 @@ def test_get_flow_reinstantiate_flow_not_strict_pre_023(self):
393397
assert flow.flow_id is None
394398
assert "sklearn==0.19.1" not in flow.dependencies
395399

396-
@pytest.mark.skip(reason="Pending resolution of #1657")
400+
@pytest.mark.skipif(
401+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
402+
reason="Pending resolution of #1657",
403+
)
397404
@pytest.mark.sklearn()
398405
@pytest.mark.test_server()
399406
def test_get_flow_id(self):

tests/test_openml/test_api_calls.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77

88
import minio
99
import pytest
10+
import os
1011

1112
import openml
1213
from openml.config import ConfigurationForExamples
@@ -20,7 +21,10 @@ def test_too_long_uri(self):
2021
with pytest.raises(openml.exceptions.OpenMLServerError, match="URI too long!"):
2122
openml.datasets.list_datasets(data_id=list(range(10000)))
2223

23-
@pytest.mark.skip(reason="Pending resolution of #1657")
24+
@pytest.mark.skipif(
25+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
26+
reason="Pending resolution of #1657",
27+
)
2428
@unittest.mock.patch("time.sleep")
2529
@unittest.mock.patch("requests.Session")
2630
@pytest.mark.test_server()

tests/test_runs/test_run.py

Lines changed: 20 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -118,7 +118,10 @@ def _check_array(array, type_):
118118
else:
119119
assert run_prime_trace_content is None
120120

121-
@pytest.mark.skip(reason="Pending resolution of #1657")
121+
@pytest.mark.skipif(
122+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
123+
reason="Pending resolution of #1657",
124+
)
122125
@pytest.mark.sklearn()
123126
@pytest.mark.test_server()
124127
def test_to_from_filesystem_vanilla(self):
@@ -154,7 +157,10 @@ def test_to_from_filesystem_vanilla(self):
154157
f"collected from {__file__.split('/')[-1]}: {run_prime.run_id}",
155158
)
156159

157-
@pytest.mark.skip(reason="Pending resolution of #1657")
160+
@pytest.mark.skipif(
161+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
162+
reason="Pending resolution of #1657",
163+
)
158164
@pytest.mark.sklearn()
159165
@pytest.mark.flaky()
160166
@pytest.mark.test_server()
@@ -191,7 +197,10 @@ def test_to_from_filesystem_search(self):
191197
f"collected from {__file__.split('/')[-1]}: {run_prime.run_id}",
192198
)
193199

194-
@pytest.mark.skip(reason="Pending resolution of #1657")
200+
@pytest.mark.skipif(
201+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
202+
reason="Pending resolution of #1657",
203+
)
195204
@pytest.mark.sklearn()
196205
@pytest.mark.test_server()
197206
def test_to_from_filesystem_no_model(self):
@@ -298,7 +307,10 @@ def assert_run_prediction_data(task, run, model):
298307
assert_method(y_pred, saved_y_pred)
299308
assert_method(y_test, saved_y_test)
300309

301-
@pytest.mark.skip(reason="Pending resolution of #1657")
310+
@pytest.mark.skipif(
311+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
312+
reason="Pending resolution of #1657",
313+
)
302314
@pytest.mark.sklearn()
303315
@pytest.mark.test_server()
304316
def test_publish_with_local_loaded_flow(self):
@@ -343,7 +355,10 @@ def test_publish_with_local_loaded_flow(self):
343355
assert openml.flows.flow_exists(flow.name, flow.external_version)
344356
openml.runs.get_run(loaded_run.run_id)
345357

346-
@pytest.mark.skip(reason="Pending resolution of #1657")
358+
@pytest.mark.skipif(
359+
os.getenv("OPENML_USE_LOCAL_SERVICES") == "true",
360+
reason="Pending resolution of #1657",
361+
)
347362
@pytest.mark.sklearn()
348363
@pytest.mark.test_server()
349364
def test_offline_and_online_run_identical(self):

0 commit comments

Comments
 (0)