Skip to content
Open
Show file tree
Hide file tree
Changes from 18 commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
87e226c
Add backend parameter to inference methods for inpainting and segment…
SimoneBendazzoli93 Sep 25, 2025
e59321d
Add Backends enum for algorithm execution environments
SimoneBendazzoli93 Sep 25, 2025
b4c4903
Add backend support for inference methods in BraTSAlgorithm class
SimoneBendazzoli93 Sep 25, 2025
9139798
Add Singularity support with image management and container execution
SimoneBendazzoli93 Sep 25, 2025
82b3123
Add spython dependency to pyproject.toml
SimoneBendazzoli93 Sep 25, 2025
4632521
revert poetry.lock file
SimoneBendazzoli93 Sep 25, 2025
3941650
Refactor backend parameter handling in inference methods to support e…
SimoneBendazzoli93 Sep 25, 2025
c4cf081
Add Singularity support to README with installation instructions and …
SimoneBendazzoli93 Sep 25, 2025
939b1be
Add spython package to poetry.lock for Singularity support
SimoneBendazzoli93 Sep 25, 2025
1092233
Autoformat with black
brainless-bot[bot] Sep 25, 2025
189a6eb
Update brats/core/singularity.py
SimoneBendazzoli93 Sep 25, 2025
4df7322
Remove unnecessary options for container execution in run_container f…
SimoneBendazzoli93 Sep 25, 2025
3d3d8e2
Remove unused import of _ensure_image in test_singularity.py
SimoneBendazzoli93 Sep 25, 2025
7eb59eb
Update volume mapping function references to use _get_volume_mappings…
SimoneBendazzoli93 Sep 25, 2025
2ec7c27
Rename _build_args to _build_command_args in singularity.py and test_…
SimoneBendazzoli93 Sep 25, 2025
11ab654
Update test assertions to use mock_build_command_args in TestSingular…
SimoneBendazzoli93 Sep 25, 2025
5984364
Rename run_container to run_docker_container in test_brats_algorithm.py
SimoneBendazzoli93 Sep 25, 2025
87f030e
Rename run_container to run_docker_container in test_segmentation_alg…
SimoneBendazzoli93 Sep 25, 2025
5657c26
Merge branch 'main' into 114-singularity-integration
neuronflow Sep 26, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,9 @@ pip install brats
- **Docker**: Installation instructions on the official [website](https://docs.docker.com/get-docker/)
- **NVIDIA Container Toolkit**: Refer to the [NVIDIA install guide](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html) and the official [GitHub page](https://github.com/NVIDIA/nvidia-container-toolkit)

## Singularity Support
BraTS orchestrator also supports Singularity as an alternative to Docker.
To enable Singularity, install it following the [official guide](https://docs.sylabs.io/guides/3.0/user-guide/installation.html) and set the environment variable `BRATS_ORCHESTRATOR_BACKEND=singularity`.

## Available Algorithms and Usage

Expand Down
10 changes: 10 additions & 0 deletions brats/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,16 @@
# TASK ENUM


class Backends(str, Enum):
"""Available backends for running the algorithms."""

DOCKER = "docker"
"""Run the algorithms using Docker containers."""

SINGULARITY = "singularity"
"""Run the algorithms using Singularity containers."""


class Task(str, Enum):
"""Available tasks."""

Expand Down
33 changes: 29 additions & 4 deletions brats/core/brats_algorithm.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,10 @@

from loguru import logger

from brats.core.docker import run_container
from brats.core.docker import run_container as run_docker_container
from brats.core.singularity import run_container as run_singularity_container
from brats.utils.algorithm_config import load_algorithms
from brats.constants import OUTPUT_NAME_SCHEMA, Algorithms, Task
from brats.constants import OUTPUT_NAME_SCHEMA, Algorithms, Task, Backends
from brats.utils.data_handling import InferenceSetup
from brats.utils.exceptions import AlgorithmConfigException

Expand Down Expand Up @@ -163,6 +164,7 @@ def _infer_single(
inputs: dict[str, Path | str],
output_file: Path | str,
log_file: Optional[Path | str] = None,
backend: str = "docker",
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why not use the Backend enum as type and default value? Would be cleaner IMO

) -> None:
"""
Perform a single inference run with the provided inputs and save the output in the specified file.
Expand All @@ -171,6 +173,7 @@ def _infer_single(
inputs (dict[str, Path | str]): Input Images for the task
output_file (Path | str): File to save the output
log_file (Optional[Path | str], optional): Log file with extra information. Defaults to None.
backend (str, optional): Backend to use for inference. Defaults to "docker".
"""
with InferenceSetup(log_file=log_file) as (tmp_data_folder, tmp_output_folder):
logger.info(f"Performing single inference")
Expand All @@ -184,14 +187,25 @@ def _infer_single(
inputs=inputs,
subject_modality_separator=self.algorithm.run_args.subject_modality_separator,
)
backend_dispatch = {
Backends.DOCKER: run_docker_container,
Backends.SINGULARITY: run_singularity_container,
}

run_container(
# Get the function for the selected backend
runner = backend_dispatch.get(backend)

if runner is None:
raise ValueError(f"Unsupported backend: {backend}")

runner(
algorithm=self.algorithm,
data_path=tmp_data_folder,
output_path=tmp_output_folder,
cuda_devices=self.cuda_devices,
force_cpu=self.force_cpu,
)

Copy link
Preview

Copilot AI Sep 25, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

[nitpick] This extra blank line is unnecessary and reduces code readability. Remove the empty line after the runner() function call.

Suggested change

Copilot uses AI. Check for mistakes.

self._process_single_output(
tmp_output_folder=tmp_output_folder,
subject_id=subject_id,
Expand All @@ -204,13 +218,15 @@ def _infer_batch(
data_folder: Path | str,
output_folder: Path | str,
log_file: Optional[Path | str] = None,
backend: str = "docker",
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

same here

):
"""Perform a batch inference run with the provided inputs and save the outputs in the specified folder.

Args:
data_folder (Path | str): Folder with the input data
output_folder (Path | str): Folder to save the outputs
log_file (Optional[Path | str], optional): Log file with extra information. Defaults to None.
backend (str, optional): Backend to use for inference. Defaults to "docker".
"""
with InferenceSetup(log_file=log_file) as (tmp_data_folder, tmp_output_folder):

Expand All @@ -226,9 +242,18 @@ def _infer_batch(
input_name_schema=self.algorithm.run_args.input_name_schema,
)
logger.info(f"Standardized input names to match algorithm requirements.")
backend_dispatch = {
Backends.DOCKER: run_docker_container,
Backends.SINGULARITY: run_singularity_container,
}

# Get the function for the selected backend
runner = backend_dispatch.get(backend)

if runner is None:
raise ValueError(f"Unsupported backend: {backend}")
# run inference in container
run_container(
runner(
algorithm=self.algorithm,
data_path=tmp_data_folder,
output_path=tmp_output_folder,
Expand Down
25 changes: 24 additions & 1 deletion brats/core/inpainting_algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from brats.core.brats_algorithm import BraTSAlgorithm
from brats.constants import INPAINTING_ALGORITHMS, InpaintingAlgorithms, Task
from brats.utils.data_handling import input_sanity_check
import os


class Inpainter(BraTSAlgorithm):
Expand Down Expand Up @@ -102,6 +103,7 @@ def infer_single(
mask: Path | str,
output_file: Path | str,
log_file: Optional[Path | str] = None,
backend: Optional[str] = None,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

same for all algorithm classes: use the enum

) -> None:
"""Perform inpainting task on a single subject with the provided images and save the result to the output file.

Expand All @@ -110,18 +112,28 @@ def infer_single(
mask (Path | str): Path to the mask image
output_file (Path | str): Path to save the segmentation
log_file (Path | str, optional): Save logs to this file
backend (str, optional): Backend to use for inference. Defaults to "docker".
"""
if backend is None:
backend_env = os.environ.get("BRATS_ORCHESTRATOR_BACKEND")
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

no difference but prettier: backend = os.environ.get("BRATS_ORCHESTRATOR_BACKEND", "docker")

if backend_env:
backend = backend_env
else:
backend = "docker"

self._infer_single(
inputs={"t1n": t1n, "mask": mask},
output_file=output_file,
log_file=log_file,
backend=backend,
)

def infer_batch(
self,
data_folder: Path | str,
output_folder: Path | str,
log_file: Path | str | None = None,
backend: Optional[str] = None,
) -> None:
"""Perform inpainting on a batch of subjects with the provided images and save the results to the output folder. \n
Requires the following structure:\n
Expand All @@ -139,7 +151,18 @@ def infer_batch(
data_folder (Path | str): Folder containing the subjects with required structure
output_folder (Path | str): Output folder to save the segmentations
log_file (Path | str, optional): Save logs to this file
backend (str, optional): Backend to use for inference. Defaults to "docker".
"""
if backend is None:
backend_env = os.environ.get("BRATS_ORCHESTRATOR_BACKEND")
if backend_env:
backend = backend_env
else:
backend = "docker"

return self._infer_batch(
data_folder=data_folder, output_folder=output_folder, log_file=log_file
data_folder=data_folder,
output_folder=output_folder,
log_file=log_file,
backend=backend,
)
27 changes: 25 additions & 2 deletions brats/core/missing_mri_algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from brats.constants import MISSING_MRI_ALGORITHMS, MissingMRIAlgorithms, Task
from brats.core.brats_algorithm import BraTSAlgorithm
from brats.utils.data_handling import input_sanity_check
import os


class MissingMRI(BraTSAlgorithm):
Expand Down Expand Up @@ -116,6 +117,7 @@ def infer_single(
t2f: Optional[Union[Path, str]] = None,
t2w: Optional[Union[Path, str]] = None,
log_file: Optional[Path | str] = None,
backend: Optional[str] = None,
) -> None:
"""
Perform synthesis of the missing modality for a single subject with the provided images and save the result to the output file.
Expand All @@ -129,7 +131,8 @@ def infer_single(
t1n (Optional[Union[Path, str]], optional): Path to the T1n image. Defaults to None.
t2f (Optional[Union[Path, str]], optional): Path to the T2f image. Defaults to None.
t2w (Optional[Union[Path, str]], optional): Path to the T2w image. Defaults to None.
log_file (Optional[Path | str], optional): Save logs to this file. Defaults to None
log_file (Optional[Path | str], optional): Save logs to this file. Defaults to None.
backend (str, optional): Backend to use for inference. Defaults to "docker".
Comment on lines +134 to +135
Copy link
Preview

Copilot AI Sep 25, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

[nitpick] The docstring formatting is inconsistent. Line 134 has a period at the end while line 135 doesn't. For consistency, both lines should either have periods or not have them.

Copilot uses AI. Check for mistakes.

"""

inputs = {"t1c": t1c, "t1n": t1n, "t2f": t2f, "t2w": t2w}
Expand All @@ -141,17 +144,26 @@ def infer_single(
len(inputs) == 3
), "Exactly 3 inputs are required to perform synthesis of the missing modality"

if backend is None:
backend_env = os.environ.get("BRATS_ORCHESTRATOR_BACKEND")
if backend_env:
backend = backend_env
else:
backend = "docker"

self._infer_single(
inputs=inputs,
output_file=output_file,
log_file=log_file,
backend=backend,
)

def infer_batch(
self,
data_folder: Path | str,
output_folder: Path | str,
log_file: Path | str | None = None,
backend: Optional[str] = None,
) -> None:
"""Perform synthesis on a batch of subjects with the provided images and save the results to the output folder. \n

Expand All @@ -170,7 +182,18 @@ def infer_batch(
data_folder (Path | str): Folder containing the subjects with required structure
output_folder (Path | str): Output folder to save the segmentation
log_file (Path | str, optional): Save logs to this file
backend (str, optional): Backend to use for inference. Defaults to "docker".
"""
if backend is None:
backend_env = os.environ.get("BRATS_ORCHESTRATOR_BACKEND")
if backend_env:
backend = backend_env
else:
backend = "docker"

return self._infer_batch(
data_folder=data_folder, output_folder=output_folder, log_file=log_file
data_folder=data_folder,
output_folder=output_folder,
log_file=log_file,
backend=backend,
)
25 changes: 24 additions & 1 deletion brats/core/segmentation_algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
)
from brats.core.brats_algorithm import BraTSAlgorithm
from brats.utils.data_handling import input_sanity_check
import os


class SegmentationAlgorithm(BraTSAlgorithm):
Expand Down Expand Up @@ -165,6 +166,7 @@ def infer_single(
t2w: Path | str,
output_file: Path | str,
log_file: Optional[Path | str] = None,
backend: Optional[str] = None,
) -> None:
"""Perform segmentation on a single subject with the provided images and save the result to the output file.

Expand All @@ -175,18 +177,28 @@ def infer_single(
t2w (Path | str): Path to the T2w image
output_file (Path | str): Path to save the segmentation
log_file (Path | str, optional): Save logs to this file
backend (str, optional): Backend to use for inference. Defaults to "docker".
"""
if backend is None:
backend_env = os.environ.get("BRATS_ORCHESTRATOR_BACKEND")
if backend_env:
backend = backend_env
else:
backend = "docker"

self._infer_single(
inputs={"t1c": t1c, "t1n": t1n, "t2f": t2f, "t2w": t2w},
output_file=output_file,
log_file=log_file,
backend=backend,
)

def infer_batch(
self,
data_folder: Path | str,
output_folder: Path | str,
log_file: Path | str | None = None,
backend: Optional[str] = None,
) -> None:
"""Perform segmentation on a batch of subjects with the provided images and save the results to the output folder. \n
Requires the following structure:\n
Expand All @@ -205,9 +217,20 @@ def infer_batch(
data_folder (Path | str): Folder containing the subjects with required structure
output_folder (Path | str): Output folder to save the segmentations
log_file (Path | str, optional): Save logs to this file
backend (str, optional): Backend to use for inference. Defaults to "docker".
"""
if backend is None:
backend_env = os.environ.get("BRATS_ORCHESTRATOR_BACKEND")
if backend_env:
backend = backend_env
else:
backend = "docker"

return self._infer_batch(
data_folder=data_folder, output_folder=output_folder, log_file=log_file
data_folder=data_folder,
output_folder=output_folder,
log_file=log_file,
backend=backend,
)


Expand Down
Loading