@@ -176,7 +176,7 @@ def test_cleanup(executor: SlurmExecutor) -> None:
176
176
def test_task_get_before_finalize (executor : SlurmExecutor ) -> None :
177
177
"""Test that _get before finalize returns None."""
178
178
task = executor .submit (example_func , 1.0 )
179
- with pytest .raises (AssertionError , match = "RunManager not initialized " ):
179
+ with pytest .raises (RuntimeError , match = "Task mapping not found; finalize() " ):
180
180
task ._get ()
181
181
182
182
@@ -272,3 +272,108 @@ async def simulate_result() -> None:
272
272
asyncio .create_task (simulate_result ()) # noqa: RUF006
273
273
result = await task
274
274
assert result == 42
275
+
276
+
277
+ @pytest .mark .usefixtures ("_mock_slurm_partitions" )
278
+ @pytest .mark .usefixtures ("_mock_slurm_queue" )
279
+ def test_to_learners_mapping_single_function (tmp_path : Path ) -> None :
280
+ """Test that _to_learners creates the correct mapping for a single function."""
281
+ executor = SlurmExecutor (folder = tmp_path , size_per_learner = 2 )
282
+ # Submit 5 tasks to example_func so that they are split into chunks of 2.
283
+ for i in range (5 ):
284
+ executor .submit (example_func , i )
285
+ learners , fnames , mapping = executor ._to_learners ()
286
+
287
+ # We expect ceil(5/2) = 3 learners.
288
+ assert len (learners ) == 3
289
+
290
+ func_id = executor ._sequence_mapping [example_func ]
291
+ expected_mapping = {
292
+ (func_id , 0 ): (0 , 0 ), # first learner, first task
293
+ (func_id , 1 ): (0 , 1 ), # first learner, second task
294
+ (func_id , 2 ): (1 , 0 ), # second learner, first task
295
+ (func_id , 3 ): (1 , 1 ), # second learner, second task
296
+ (func_id , 4 ): (2 , 0 ), # third learner, first task (only one task in this chunk)
297
+ }
298
+ assert mapping == expected_mapping
299
+
300
+
301
+ @pytest .mark .usefixtures ("_mock_slurm_partitions" )
302
+ @pytest .mark .usefixtures ("_mock_slurm_queue" )
303
+ def test_finalize_mapping_and_learners (tmp_path : Path ) -> None :
304
+ """Test that finalize() sets the task mapping correctly and creates the right number of learners."""
305
+ executor = SlurmExecutor (folder = tmp_path , size_per_learner = 2 )
306
+ # Submit 3 tasks to example_func.
307
+ for i in range (3 ):
308
+ executor .submit (example_func , i )
309
+
310
+ rm = executor .finalize (start = False )
311
+ # For 3 tasks with chunk size 2:
312
+ # - The first chunk (learner 0) has tasks 0 and 1.
313
+ # - The second chunk (learner 1) has task 2.
314
+ func_id = executor ._sequence_mapping [example_func ]
315
+ expected_mapping = {
316
+ (func_id , 0 ): (0 , 0 ),
317
+ (func_id , 1 ): (0 , 1 ),
318
+ (func_id , 2 ): (1 , 0 ),
319
+ }
320
+ assert executor ._task_mapping == expected_mapping
321
+ # Also, the run manager should have 2 learners.
322
+ assert isinstance (rm , RunManager )
323
+ assert len (rm .learners ) == 2
324
+
325
+
326
+ @pytest .mark .usefixtures ("_mock_slurm_partitions" )
327
+ @pytest .mark .usefixtures ("_mock_slurm_queue" )
328
+ def test_task_get_with_chunking (tmp_path : Path ) -> None :
329
+ """Test that tasks in different learners retrieve the correct result when using size_per_learner."""
330
+ executor = SlurmExecutor (folder = tmp_path , size_per_learner = 2 , save_interval = 1 )
331
+ # Submit three tasks; with size_per_learner=2, this will produce 2 learners.
332
+ task1 = executor .submit (example_func , 42 )
333
+ task2 = executor .submit (example_func , 43 )
334
+ task3 = executor .submit (example_func , 44 )
335
+ rm = executor .finalize (start = False )
336
+
337
+ # For learner 0 (tasks 0 and 1)
338
+ assert isinstance (rm , RunManager )
339
+ learner0 = rm .learners [0 ]
340
+ fname0 = rm .fnames [0 ]
341
+ learner0 .data [0 ] = 42
342
+ learner0 .data [1 ] = 43
343
+ learner0 .save (fname0 )
344
+ # For learner 1 (task 2)
345
+ learner1 = rm .learners [1 ]
346
+ fname1 = rm .fnames [1 ]
347
+ learner1 .data [0 ] = 44
348
+ learner1 .save (fname1 )
349
+
350
+ # _get() should now retrieve the correct values based on the mapping.
351
+ assert task1 ._get () == 42
352
+ assert task2 ._get () == 43
353
+ assert task3 ._get () == 44
354
+
355
+
356
+ @pytest .mark .usefixtures ("_mock_slurm_partitions" )
357
+ @pytest .mark .usefixtures ("_mock_slurm_queue" )
358
+ def test_mapping_multiple_functions (tmp_path : Path ) -> None :
359
+ """Test that the mapping is correct when tasks are submitted for multiple functions."""
360
+ executor = SlurmExecutor (folder = tmp_path , size_per_learner = 2 )
361
+ # Submit two tasks for example_func and two for another_func.
362
+ executor .submit (example_func , 10 )
363
+ executor .submit (example_func , 20 )
364
+ executor .submit (another_func , 5 )
365
+ executor .submit (another_func , 6 )
366
+
367
+ # Directly call _to_learners to examine the mapping.
368
+ learners , fnames , mapping = executor ._to_learners ()
369
+
370
+ expected_mapping = {
371
+ # For example_func: two tasks in one learner (since 2 tasks fit in one chunk).
372
+ (executor ._sequence_mapping [example_func ], 0 ): (0 , 0 ),
373
+ (executor ._sequence_mapping [example_func ], 1 ): (0 , 1 ),
374
+ # For another_func: two tasks in one learner.
375
+ (executor ._sequence_mapping [another_func ], 0 ): (1 , 0 ),
376
+ (executor ._sequence_mapping [another_func ], 1 ): (1 , 1 ),
377
+ }
378
+ assert mapping == expected_mapping
379
+ assert len (learners ) == 2
0 commit comments