-
Notifications
You must be signed in to change notification settings - Fork 8
114 singularity integration #118
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 18 commits
87e226c
e59321d
b4c4903
9139798
82b3123
4632521
3941650
c4cf081
939b1be
1092233
189a6eb
4df7322
3d3d8e2
7eb59eb
2ec7c27
11ab654
5984364
87f030e
5657c26
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change | ||
---|---|---|---|---|
|
@@ -7,9 +7,10 @@ | |||
|
||||
from loguru import logger | ||||
|
||||
from brats.core.docker import run_container | ||||
from brats.core.docker import run_container as run_docker_container | ||||
from brats.core.singularity import run_container as run_singularity_container | ||||
from brats.utils.algorithm_config import load_algorithms | ||||
from brats.constants import OUTPUT_NAME_SCHEMA, Algorithms, Task | ||||
from brats.constants import OUTPUT_NAME_SCHEMA, Algorithms, Task, Backends | ||||
from brats.utils.data_handling import InferenceSetup | ||||
from brats.utils.exceptions import AlgorithmConfigException | ||||
|
||||
|
@@ -163,6 +164,7 @@ def _infer_single( | |||
inputs: dict[str, Path | str], | ||||
output_file: Path | str, | ||||
log_file: Optional[Path | str] = None, | ||||
backend: str = "docker", | ||||
) -> None: | ||||
""" | ||||
Perform a single inference run with the provided inputs and save the output in the specified file. | ||||
|
@@ -171,6 +173,7 @@ def _infer_single( | |||
inputs (dict[str, Path | str]): Input Images for the task | ||||
output_file (Path | str): File to save the output | ||||
log_file (Optional[Path | str], optional): Log file with extra information. Defaults to None. | ||||
backend (str, optional): Backend to use for inference. Defaults to "docker". | ||||
""" | ||||
with InferenceSetup(log_file=log_file) as (tmp_data_folder, tmp_output_folder): | ||||
logger.info(f"Performing single inference") | ||||
|
@@ -184,14 +187,25 @@ def _infer_single( | |||
inputs=inputs, | ||||
subject_modality_separator=self.algorithm.run_args.subject_modality_separator, | ||||
) | ||||
backend_dispatch = { | ||||
Backends.DOCKER: run_docker_container, | ||||
Backends.SINGULARITY: run_singularity_container, | ||||
} | ||||
|
||||
run_container( | ||||
# Get the function for the selected backend | ||||
runner = backend_dispatch.get(backend) | ||||
|
||||
if runner is None: | ||||
raise ValueError(f"Unsupported backend: {backend}") | ||||
|
||||
runner( | ||||
algorithm=self.algorithm, | ||||
data_path=tmp_data_folder, | ||||
output_path=tmp_output_folder, | ||||
cuda_devices=self.cuda_devices, | ||||
force_cpu=self.force_cpu, | ||||
) | ||||
|
||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. [nitpick] This extra blank line is unnecessary and reduces code readability. Remove the empty line after the
Suggested change
Copilot uses AI. Check for mistakes. Positive FeedbackNegative Feedback |
||||
self._process_single_output( | ||||
tmp_output_folder=tmp_output_folder, | ||||
subject_id=subject_id, | ||||
|
@@ -204,13 +218,15 @@ def _infer_batch( | |||
data_folder: Path | str, | ||||
output_folder: Path | str, | ||||
log_file: Optional[Path | str] = None, | ||||
backend: str = "docker", | ||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. same here |
||||
): | ||||
"""Perform a batch inference run with the provided inputs and save the outputs in the specified folder. | ||||
|
||||
Args: | ||||
data_folder (Path | str): Folder with the input data | ||||
output_folder (Path | str): Folder to save the outputs | ||||
log_file (Optional[Path | str], optional): Log file with extra information. Defaults to None. | ||||
backend (str, optional): Backend to use for inference. Defaults to "docker". | ||||
""" | ||||
with InferenceSetup(log_file=log_file) as (tmp_data_folder, tmp_output_folder): | ||||
|
||||
|
@@ -226,9 +242,18 @@ def _infer_batch( | |||
input_name_schema=self.algorithm.run_args.input_name_schema, | ||||
) | ||||
logger.info(f"Standardized input names to match algorithm requirements.") | ||||
backend_dispatch = { | ||||
Backends.DOCKER: run_docker_container, | ||||
Backends.SINGULARITY: run_singularity_container, | ||||
} | ||||
|
||||
# Get the function for the selected backend | ||||
runner = backend_dispatch.get(backend) | ||||
|
||||
if runner is None: | ||||
raise ValueError(f"Unsupported backend: {backend}") | ||||
# run inference in container | ||||
run_container( | ||||
runner( | ||||
algorithm=self.algorithm, | ||||
data_path=tmp_data_folder, | ||||
output_path=tmp_output_folder, | ||||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -10,6 +10,7 @@ | |
from brats.core.brats_algorithm import BraTSAlgorithm | ||
from brats.constants import INPAINTING_ALGORITHMS, InpaintingAlgorithms, Task | ||
from brats.utils.data_handling import input_sanity_check | ||
import os | ||
|
||
|
||
class Inpainter(BraTSAlgorithm): | ||
|
@@ -102,6 +103,7 @@ def infer_single( | |
mask: Path | str, | ||
output_file: Path | str, | ||
log_file: Optional[Path | str] = None, | ||
backend: Optional[str] = None, | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. same for all algorithm classes: use the enum |
||
) -> None: | ||
"""Perform inpainting task on a single subject with the provided images and save the result to the output file. | ||
|
||
|
@@ -110,18 +112,28 @@ def infer_single( | |
mask (Path | str): Path to the mask image | ||
output_file (Path | str): Path to save the segmentation | ||
log_file (Path | str, optional): Save logs to this file | ||
backend (str, optional): Backend to use for inference. Defaults to "docker". | ||
""" | ||
if backend is None: | ||
backend_env = os.environ.get("BRATS_ORCHESTRATOR_BACKEND") | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. no difference but prettier: |
||
if backend_env: | ||
backend = backend_env | ||
else: | ||
backend = "docker" | ||
|
||
self._infer_single( | ||
inputs={"t1n": t1n, "mask": mask}, | ||
output_file=output_file, | ||
log_file=log_file, | ||
backend=backend, | ||
) | ||
|
||
def infer_batch( | ||
self, | ||
data_folder: Path | str, | ||
output_folder: Path | str, | ||
log_file: Path | str | None = None, | ||
backend: Optional[str] = None, | ||
) -> None: | ||
"""Perform inpainting on a batch of subjects with the provided images and save the results to the output folder. \n | ||
Requires the following structure:\n | ||
|
@@ -139,7 +151,18 @@ def infer_batch( | |
data_folder (Path | str): Folder containing the subjects with required structure | ||
output_folder (Path | str): Output folder to save the segmentations | ||
log_file (Path | str, optional): Save logs to this file | ||
backend (str, optional): Backend to use for inference. Defaults to "docker". | ||
""" | ||
if backend is None: | ||
backend_env = os.environ.get("BRATS_ORCHESTRATOR_BACKEND") | ||
if backend_env: | ||
backend = backend_env | ||
else: | ||
backend = "docker" | ||
|
||
return self._infer_batch( | ||
data_folder=data_folder, output_folder=output_folder, log_file=log_file | ||
data_folder=data_folder, | ||
output_folder=output_folder, | ||
log_file=log_file, | ||
backend=backend, | ||
) |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -10,6 +10,7 @@ | |
from brats.constants import MISSING_MRI_ALGORITHMS, MissingMRIAlgorithms, Task | ||
from brats.core.brats_algorithm import BraTSAlgorithm | ||
from brats.utils.data_handling import input_sanity_check | ||
import os | ||
|
||
|
||
class MissingMRI(BraTSAlgorithm): | ||
|
@@ -116,6 +117,7 @@ def infer_single( | |
t2f: Optional[Union[Path, str]] = None, | ||
t2w: Optional[Union[Path, str]] = None, | ||
log_file: Optional[Path | str] = None, | ||
backend: Optional[str] = None, | ||
) -> None: | ||
""" | ||
Perform synthesis of the missing modality for a single subject with the provided images and save the result to the output file. | ||
|
@@ -129,7 +131,8 @@ def infer_single( | |
t1n (Optional[Union[Path, str]], optional): Path to the T1n image. Defaults to None. | ||
t2f (Optional[Union[Path, str]], optional): Path to the T2f image. Defaults to None. | ||
t2w (Optional[Union[Path, str]], optional): Path to the T2w image. Defaults to None. | ||
log_file (Optional[Path | str], optional): Save logs to this file. Defaults to None | ||
log_file (Optional[Path | str], optional): Save logs to this file. Defaults to None. | ||
backend (str, optional): Backend to use for inference. Defaults to "docker". | ||
Comment on lines
+134
to
+135
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. [nitpick] The docstring formatting is inconsistent. Line 134 has a period at the end while line 135 doesn't. For consistency, both lines should either have periods or not have them. Copilot uses AI. Check for mistakes. Positive FeedbackNegative Feedback |
||
""" | ||
|
||
inputs = {"t1c": t1c, "t1n": t1n, "t2f": t2f, "t2w": t2w} | ||
|
@@ -141,17 +144,26 @@ def infer_single( | |
len(inputs) == 3 | ||
), "Exactly 3 inputs are required to perform synthesis of the missing modality" | ||
|
||
if backend is None: | ||
backend_env = os.environ.get("BRATS_ORCHESTRATOR_BACKEND") | ||
if backend_env: | ||
backend = backend_env | ||
else: | ||
backend = "docker" | ||
|
||
self._infer_single( | ||
inputs=inputs, | ||
output_file=output_file, | ||
log_file=log_file, | ||
backend=backend, | ||
) | ||
|
||
def infer_batch( | ||
self, | ||
data_folder: Path | str, | ||
output_folder: Path | str, | ||
log_file: Path | str | None = None, | ||
backend: Optional[str] = None, | ||
) -> None: | ||
"""Perform synthesis on a batch of subjects with the provided images and save the results to the output folder. \n | ||
|
||
|
@@ -170,7 +182,18 @@ def infer_batch( | |
data_folder (Path | str): Folder containing the subjects with required structure | ||
output_folder (Path | str): Output folder to save the segmentation | ||
log_file (Path | str, optional): Save logs to this file | ||
backend (str, optional): Backend to use for inference. Defaults to "docker". | ||
""" | ||
if backend is None: | ||
backend_env = os.environ.get("BRATS_ORCHESTRATOR_BACKEND") | ||
if backend_env: | ||
backend = backend_env | ||
else: | ||
backend = "docker" | ||
|
||
return self._infer_batch( | ||
data_folder=data_folder, output_folder=output_folder, log_file=log_file | ||
data_folder=data_folder, | ||
output_folder=output_folder, | ||
log_file=log_file, | ||
backend=backend, | ||
) |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Why not use the Backend enum as type and default value? Would be cleaner IMO