@@ -119,7 +119,6 @@ def _wait_for_processed_run(self, run_id, max_waiting_time_seconds):
119119 # time.time() works in seconds
120120 start_time = time .time ()
121121 while time .time () - start_time < max_waiting_time_seconds :
122-
123122 try :
124123 openml .runs .get_run_trace (run_id )
125124 except openml .exceptions .OpenMLServerException :
@@ -131,7 +130,9 @@ def _wait_for_processed_run(self, run_id, max_waiting_time_seconds):
131130 time .sleep (10 )
132131 continue
133132
134- assert len (run .evaluations ) > 0 , "Expect not-None evaluations to always contain elements."
133+ assert (
134+ len (run .evaluations ) > 0
135+ ), "Expect not-None evaluations to always contain elements."
135136 return
136137
137138 raise RuntimeError (
@@ -557,7 +558,7 @@ def determine_grid_size(param_grid):
557558 fold_evaluations = run .fold_evaluations ,
558559 num_repeats = 1 ,
559560 num_folds = num_folds ,
560- task_type = task_type
561+ task_type = task_type ,
561562 )
562563
563564 # Check if run string and print representation do not run into an error
@@ -796,7 +797,9 @@ def test_run_and_upload_knn_pipeline(self, warnings_mock):
796797
797798 @pytest .mark .sklearn ()
798799 def test_run_and_upload_gridsearch (self ):
799- estimator_name = "base_estimator" if Version (sklearn .__version__ ) < Version ("1.4" ) else "estimator"
800+ estimator_name = (
801+ "base_estimator" if Version (sklearn .__version__ ) < Version ("1.4" ) else "estimator"
802+ )
800803 gridsearch = GridSearchCV (
801804 BaggingClassifier (** {estimator_name : SVC ()}),
802805 {f"{ estimator_name } __C" : [0.01 , 0.1 , 10 ], f"{ estimator_name } __gamma" : [0.01 , 0.1 , 10 ]},
@@ -1826,7 +1829,9 @@ def test_joblib_backends(self, parallel_mock):
18261829 num_instances = x .shape [0 ]
18271830 line_length = 6 + len (task .class_labels )
18281831
1829- backend_choice = "loky" if Version (joblib .__version__ ) > Version ("0.11" ) else "multiprocessing"
1832+ backend_choice = (
1833+ "loky" if Version (joblib .__version__ ) > Version ("0.11" ) else "multiprocessing"
1834+ )
18301835 for n_jobs , backend , call_count in [
18311836 (1 , backend_choice , 10 ),
18321837 (2 , backend_choice , 10 ),
@@ -1877,14 +1882,23 @@ def test_joblib_backends(self, parallel_mock):
18771882 reason = "SimpleImputer doesn't handle mixed type DataFrame as input" ,
18781883 )
18791884 def test_delete_run (self ):
1880- rs = 1
1885+ rs = np . random . randint ( 1 , 2 ** 32 - 1 )
18811886 clf = sklearn .pipeline .Pipeline (
1882- steps = [("imputer" , SimpleImputer ()), ("estimator" , DecisionTreeClassifier ())],
1887+ steps = [
1888+ (f"test_server_imputer_{ rs } " , SimpleImputer ()),
1889+ ("estimator" , DecisionTreeClassifier ()),
1890+ ],
18831891 )
18841892 task = openml .tasks .get_task (32 ) # diabetes; crossvalidation
18851893
1886- run = openml .runs .run_model_on_task (model = clf , task = task , seed = rs )
1894+ run = openml .runs .run_model_on_task (
1895+ model = clf , task = task , seed = rs , avoid_duplicate_runs = False
1896+ )
18871897 run .publish ()
1898+
1899+ with pytest .raises (openml .exceptions .OpenMLRunsExistError ):
1900+ openml .runs .run_model_on_task (model = clf , task = task , seed = rs , avoid_duplicate_runs = True )
1901+
18881902 TestBase ._mark_entity_for_removal ("run" , run .run_id )
18891903 TestBase .logger .info (f"collected from test_run_functions: { run .run_id } " )
18901904
0 commit comments