Launching parallel tasks#

Execution pool#

Execute a function on SLURM with an API similar to python’s concurrent.futures

from pyslurmutils.concurrent.futures import SlurmRestExecutor

with SlurmRestExecutor(
    url,
    user_name,
    token,
    log_directory="/path/to/log",  # for log files
    data_directory="/path/to/data",  # TCP when not provided
    pre_script="module load ewoks",  # load environment
    parameters={"time_limit": 120}  # SLURM job parameters
    python_cmd="python",  # python command (python3 by default)
) as pool:

    future = pool.submit(sum, [1, 1])
    assert future.result() == 2

The python environment can be selected with the pre_script argument. Note that pyslurmutils only relies on python builtins on the SLURM side.

Slurm python client#

A lower level API exists to execute python functions on Slurm

from pyslurmutils.client import SlurmPythonJobRestClient

with SlurmPythonJobRestClient(
    url,
    user_name,
    token,
    log_directory="/path/to/log",  # for log files
    data_directory="/path/to/data",  # TCP when not provided
    pre_script="module load ewoks",  # load environment
    parameters={"time_limit": 120}  # SLURM job parameters
    python_cmd="python",  # python command (python3 by default)
) as client:
    future = client.spawn(sum, args=([1, 1],))
    try:
        assert future.result() == 2
    finally:
        try:
            client.print_stdout_stderr(future.job_id)
        finally:
            future.cleanup_job()

Slurm script client#

To execute a script on Slurm (bash script by default):

from pyslurmutils.client import SlurmScriptRestClient

with SlurmScriptRestClient(
    url,
    user_name,
    token,
    log_directory="/path/to/log",  # for log files
    parameters={"time_limit": 120}  # SLURM job parameters
) as client:
    job_id = client.submit_script("echo Hello World")
    try:
        client.wait_done(job_id)
        client.print_stdout_stderr(job_id)
    finally:
        client.clean_job_artifacts(job_id)