forked from openml/openml-python
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path__init__.py
More file actions
125 lines (109 loc) · 2.82 KB
/
__init__.py
File metadata and controls
125 lines (109 loc) · 2.82 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
"""
The OpenML module implements a python interface to
`OpenML <https://www.openml.org>`_, a collaborative platform for machine
learning. OpenML can be used to
* store, download and analyze datasets
* make experiments and their results (e.g. models, predictions)
accesible and reproducible for everybody
* analyze experiments (uploaded by you and other collaborators) and conduct
meta studies
In particular, this module implements a python interface for the
`OpenML REST API <https://www.openml.org/guide#!rest_services>`_
(`REST on wikipedia
<https://en.wikipedia.org/wiki/Representational_state_transfer>`_).
"""
# License: BSD 3-Clause
from __future__ import annotations
from . import (
_api_calls,
config,
datasets,
evaluations,
exceptions,
extensions,
flows,
runs,
setups,
study,
tasks,
utils,
)
from .__version__ import __version__
from ._api import _backend
from .datasets import OpenMLDataFeature, OpenMLDataset
from .evaluations import OpenMLEvaluation
from .flows import OpenMLFlow
from .runs import OpenMLRun
from .setups import OpenMLParameter, OpenMLSetup
from .study import OpenMLBenchmarkSuite, OpenMLStudy
from .tasks import (
OpenMLClassificationTask,
OpenMLClusteringTask,
OpenMLLearningCurveTask,
OpenMLRegressionTask,
OpenMLSplit,
OpenMLSupervisedTask,
OpenMLTask,
)
def populate_cache(
task_ids: list[int] | None = None,
dataset_ids: list[int | str] | None = None,
flow_ids: list[int] | None = None,
run_ids: list[int] | None = None,
) -> None:
"""
Populate a cache for offline and parallel usage of the OpenML connector.
Parameters
----------
task_ids : iterable
dataset_ids : iterable
flow_ids : iterable
run_ids : iterable
Returns
-------
None
"""
if task_ids is not None:
for task_id in task_ids:
tasks.functions.get_task(task_id)
if dataset_ids is not None:
for dataset_id in dataset_ids:
datasets.functions.get_dataset(dataset_id)
if flow_ids is not None:
for flow_id in flow_ids:
flows.functions.get_flow(flow_id)
if run_ids is not None:
for run_id in run_ids:
runs.functions.get_run(run_id)
__all__ = [
"OpenMLBenchmarkSuite",
"OpenMLClassificationTask",
"OpenMLClusteringTask",
"OpenMLDataFeature",
"OpenMLDataset",
"OpenMLEvaluation",
"OpenMLFlow",
"OpenMLLearningCurveTask",
"OpenMLParameter",
"OpenMLRegressionTask",
"OpenMLRun",
"OpenMLSetup",
"OpenMLSplit",
"OpenMLStudy",
"OpenMLSupervisedTask",
"OpenMLTask",
"__version__",
"_api_calls",
"_backend",
"config",
"datasets",
"evaluations",
"exceptions",
"extensions",
"flows",
"runs",
"setups",
"study",
"tasks",
"utils",
]