Skip to content

Commit 5bc6725

Browse files
Merge branch 'Project-MONAI:dev' into dev
2 parents 63715b7 + 0968da2 commit 5bc6725

File tree

107 files changed

+621
-712
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

107 files changed

+621
-712
lines changed

.github/workflows/pythonapp-min.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -124,7 +124,7 @@ jobs:
124124
strategy:
125125
fail-fast: false
126126
matrix:
127-
pytorch-version: ['2.4.1', '2.5.1', '2.6.0', '2.7.1']
127+
pytorch-version: ['2.5.1', '2.6.0', '2.7.1', '2.8.0']
128128
timeout-minutes: 40
129129
steps:
130130
- uses: actions/checkout@v4

.github/workflows/pythonapp.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ jobs:
9494
- if: runner.os == 'windows'
9595
name: Install torch cpu from pytorch.org (Windows only)
9696
run: |
97-
python -m pip install torch==2.4.1 torchvision==0.19.1+cpu --index-url https://download.pytorch.org/whl/cpu
97+
python -m pip install torch==2.5.1 torchvision==0.20.1+cpu --index-url https://download.pytorch.org/whl/cpu
9898
- if: runner.os == 'Linux'
9999
name: Install itk pre-release (Linux only)
100100
run: |
@@ -103,7 +103,7 @@ jobs:
103103
- name: Install the dependencies
104104
run: |
105105
python -m pip install --user --upgrade pip wheel
106-
python -m pip install torch==2.4.1 torchvision==0.19.1
106+
python -m pip install torch==2.5.1 torchvision==0.20.1
107107
cat "requirements-dev.txt"
108108
python -m pip install -r requirements-dev.txt
109109
python -m pip list
@@ -155,7 +155,7 @@ jobs:
155155
# install the latest pytorch for testing
156156
# however, "pip install monai*.tar.gz" will build cpp/cuda with an isolated
157157
# fresh torch installation according to pyproject.toml
158-
python -m pip install torch>=2.4.1 torchvision
158+
python -m pip install torch>=2.5.1 torchvision
159159
- name: Check packages
160160
run: |
161161
pip uninstall monai

docs/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,6 @@ onnx>=1.13.0
4040
onnxruntime; python_version <= '3.10'
4141
zarr
4242
huggingface_hub
43-
pyamg>=5.0.0
43+
pyamg>=5.0.0, <5.3.0
4444
packaging
4545
polygraphy

monai/apps/auto3dseg/auto_runner.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -194,6 +194,15 @@ class AutoRunner:
194194
├── segresnet2d_0 # network scripts/configs/checkpoints and pickle object of the algo
195195
└── swinunetr_0 # network scripts/configs/checkpoints and pickle object of the algo
196196
197+
198+
The input config requires at least the following keys:
199+
- ``modality``: the modality of the data, e.g. "ct", "mri", etc.
200+
- ``datalist``: the path to the datalist file in JSON format.
201+
- ``dataroot``: the root directory of the data files.
202+
203+
For the datalist file format, see the description under :py:func:`monai.data.load_decathlon_datalist`.
204+
Note that the AutoRunner will use the "validation" key in the datalist file if it exists, otherwise
205+
it will do cross-validation, by default with five folds (this is hardcoded).
197206
"""
198207

199208
analyze_params: dict | None

monai/apps/detection/metrics/coco.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -457,7 +457,7 @@ def _compute_statistics(self, results_list: list[dict[int, dict[str, np.ndarray]
457457
dt_ignores = np.concatenate([r["dtIgnore"][:, 0:max_det] for r in results], axis=1)[:, inds]
458458
self.check_number_of_iou(dt_matches, dt_ignores)
459459
gt_ignore = np.concatenate([r["gtIgnore"] for r in results])
460-
num_gt = np.count_nonzero(gt_ignore == 0) # number of ground truth boxes (non ignored)
460+
num_gt = int(np.count_nonzero(gt_ignore == 0)) # number of ground truth boxes (non ignored)
461461
if num_gt == 0:
462462
logger.warning(f"WARNING, no gt found for coco metric for class {cls_i}")
463463
continue
@@ -523,13 +523,12 @@ def _compute_stats_single_threshold(
523523
recall = 0
524524

525525
# array where precision values nearest to given recall th are saved
526-
precision = np.zeros((num_recall_th,))
526+
precision = [0.0] * num_recall_th
527527
# save scores for corresponding recall value in here
528528
th_scores = np.zeros((num_recall_th,))
529529
# numpy is slow without cython optimization for accessing elements
530530
# use python array gets significant speed improvement
531531
pr = pr.tolist()
532-
precision = precision.tolist()
533532

534533
# smooth precision curve (create box shape)
535534
for i in range(len(tp) - 1, 0, -1):

monai/apps/detection/utils/box_coder.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -210,7 +210,10 @@ def decode_single(self, rel_codes: Tensor, reference_boxes: Tensor) -> Tensor:
210210
offset = reference_boxes.shape[-1]
211211

212212
pred_boxes = []
213-
boxes_cccwhd = convert_box_mode(reference_boxes, src_mode=StandardMode, dst_mode=CenterSizeMode)
213+
boxes_cccwhd: torch.Tensor = convert_box_mode(
214+
reference_boxes, src_mode=StandardMode, dst_mode=CenterSizeMode
215+
) # type: ignore[assignment]
216+
214217
for axis in range(self.spatial_dims):
215218
whd_axis = boxes_cccwhd[:, axis + self.spatial_dims]
216219
ctr_xyz_axis = boxes_cccwhd[:, axis]

monai/apps/generation/maisi/networks/diffusion_model_unet_maisi.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -358,8 +358,9 @@ def _apply_down_blocks(self, h, emb, context, down_block_additional_residuals):
358358

359359
def _apply_up_blocks(self, h, emb, context, down_block_res_samples):
360360
for upsample_block in self.up_blocks:
361-
res_samples = down_block_res_samples[-len(upsample_block.resnets) :]
362-
down_block_res_samples = down_block_res_samples[: -len(upsample_block.resnets)]
361+
idx: int = -len(upsample_block.resnets) # type: ignore
362+
res_samples = down_block_res_samples[idx:]
363+
down_block_res_samples = down_block_res_samples[:idx]
363364
h = upsample_block(hidden_states=h, res_hidden_states_list=res_samples, temb=emb, context=context)
364365

365366
return h

monai/data/box_utils.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -811,9 +811,9 @@ def _box_inter_union(
811811

812812
# compute size for the intersection region for the NxM combinations
813813
wh = (rb - lt + TO_REMOVE).clamp(min=0) # (N,M,spatial_dims)
814-
inter = torch.prod(wh, dim=-1, keepdim=False) # (N,M)
814+
inter: torch.Tensor = torch.prod(wh, dim=-1, keepdim=False) # (N,M)
815815

816-
union = area1[:, None] + area2 - inter
816+
union: torch.Tensor = area1[:, None] + area2 - inter # type: ignore
817817
return inter, union
818818

819819

@@ -981,7 +981,7 @@ def box_pair_giou(boxes1: NdarrayOrTensor, boxes2: NdarrayOrTensor) -> NdarrayOr
981981
wh = (rb - lt + TO_REMOVE).clamp(min=0) # (N,spatial_dims)
982982
enclosure = torch.prod(wh, dim=-1, keepdim=False) # (N,)
983983

984-
giou_t = iou - (enclosure - union) / (enclosure + torch.finfo(COMPUTE_DTYPE).eps)
984+
giou_t: torch.Tensor = iou - (enclosure - union) / (enclosure + torch.finfo(COMPUTE_DTYPE).eps) # type: ignore
985985
giou_t = giou_t.to(dtype=box_dtype) # (N,spatial_dims)
986986
if torch.isnan(giou_t).any() or torch.isinf(giou_t).any():
987987
raise ValueError("Box GIoU is NaN or Inf.")

monai/data/dataset.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1353,7 +1353,7 @@ def __len__(self) -> int:
13531353
return len(self.dataset)
13541354

13551355
def randomize(self, data: Any | None = None) -> None:
1356-
self._seed = self.R.randint(MAX_SEED, dtype="uint32")
1356+
self._seed = int(self.R.randint(MAX_SEED, dtype="uint32"))
13571357

13581358
def __getitem__(self, index: int):
13591359
self.randomize()

monai/data/decathlon_datalist.py

Lines changed: 41 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -92,8 +92,42 @@ def load_decathlon_datalist(
9292
) -> list[dict]:
9393
"""Load image/label paths of decathlon challenge from JSON file
9494
95-
Json file is similar to what you get from http://medicaldecathlon.com/
96-
Those dataset.json files
95+
JSON file should follow the format of the Medical Segmentation Decathlon
96+
datalist.json files, see http://medicaldecathlon.com.
97+
The files are structured as follows:
98+
99+
.. code-block:: python
100+
101+
{
102+
"metadata_key_0": "metadata_value_0",
103+
"metadata_key_1": "metadata_value_1",
104+
...,
105+
"training": [
106+
{"image": "path/to/image_1.nii.gz", "label": "path/to/label_1.nii.gz"},
107+
{"image": "path/to/image_2.nii.gz", "label": "path/to/label_2.nii.gz"},
108+
...
109+
],
110+
"test": [
111+
"path/to/image_3.nii.gz",
112+
"path/to/image_4.nii.gz",
113+
...
114+
]
115+
}
116+
117+
118+
The metadata keys are optional for loading the datalist, but include:
119+
- some string items: ``name``, ``description``, ``reference``, ``licence``, ``release``, ``tensorImageSize``
120+
- two dict items: ``modality`` (keyed by channel index), and ``labels`` (keyed by label index)
121+
- and two integer items: ``numTraining`` and ``numTest``, with the number of items.
122+
123+
The ``training`` key contains a list of dictionaries, each of which has at least
124+
the ``image`` and ``label`` keys.
125+
The image and label are loaded by :py:func:`monai.transforms.LoadImaged`, so both can be either
126+
a single file path or a list of file paths, in which case they are loaded as multi-channel images.
127+
Each item can also include a ``fold`` key for cross-validation purposes.
128+
The "test" key contains a list of image paths, without labels, MONAI also supports a "validation" list
129+
with the same format as the "training" list.
130+
97131
98132
Args:
99133
data_list_file_path: the path to the json file of datalist.
@@ -107,11 +141,11 @@ def load_decathlon_datalist(
107141
108142
Returns a list of data items, each of which is a dict keyed by element names, for example:
109143
110-
.. code-block::
144+
.. code-block:: python
111145
112146
[
113-
{'image': '/workspace/data/chest_19.nii.gz', 'label': 0},
114-
{'image': '/workspace/data/chest_31.nii.gz', 'label': 1}
147+
{'image': '/workspace/data/chest_19.nii.gz', 'label': '/workspace/labels/chest_19.nii.gz'},
148+
{'image': '/workspace/data/chest_31.nii.gz', 'label': '/workspace/labels/chest_31.nii.gz'},
115149
]
116150
117151
"""
@@ -134,7 +168,8 @@ def load_decathlon_datalist(
134168

135169

136170
def load_decathlon_properties(data_property_file_path: PathLike, property_keys: Sequence[str] | str) -> dict:
137-
"""Load the properties from the JSON file contains data property with specified `property_keys`.
171+
"""Extract the properties with the specified keys from the Decathlon JSON file.
172+
See under `load_decathlon_datalist` for the expected keys in the Decathlon challenge.
138173
139174
Args:
140175
data_property_file_path: the path to the JSON file of data properties.

0 commit comments

Comments
 (0)