Skip to content

Commit 9f4bac0

Browse files
authored
Merge branch 'dev' into torchscript_deprecation
2 parents d5cca7a + daaedaa commit 9f4bac0

14 files changed

Lines changed: 255 additions & 48 deletions

File tree

.dockerignore

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,11 +3,15 @@
33
__pycache__/
44
docs/
55

6+
.vscode
7+
.git
8+
.mypy_cache
9+
.ruff_cache
10+
.pytype
611
.coverage
712
.coverage.*
813
.coverage/
914
coverage.xml
1015
.readthedocs.yml
11-
*.toml
1216

1317
!README.md

CONTRIBUTING.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -380,7 +380,8 @@ All code review comments should be specific, constructive, and actionable.
380380

381381
### Release a new version
382382

383-
The `dev` branch's `HEAD` always corresponds to MONAI docker image's latest tag: `projectmonai/monai:latest`.
383+
The `dev` branch's `HEAD` always corresponds to MONAI Docker image's latest tag: `projectmonai/monai:latest`. (No
384+
release is currently done for the slim MONAI image, this is built locally by users.)
384385
The `main` branch's `HEAD` always corresponds to the latest MONAI milestone release.
385386

386387
When major features are ready for a milestone, to prepare for a new release:

Dockerfile.slim

Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
# Copyright (c) MONAI Consortium
2+
# Licensed under the Apache License, Version 2.0 (the "License");
3+
# you may not use this file except in compliance with the License.
4+
# You may obtain a copy of the License at
5+
# http://www.apache.org/licenses/LICENSE-2.0
6+
# Unless required by applicable law or agreed to in writing, software
7+
# distributed under the License is distributed on an "AS IS" BASIS,
8+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
9+
# See the License for the specific language governing permissions and
10+
# limitations under the License.
11+
12+
# This is a slimmed down version of the MONAI Docker image using a smaller base image and multi-stage building. Not all
13+
# NVIDIA tools will be present but all libraries and compiled code are included. This image isn't provided through
14+
# Dockerhub so users must build locally: `docker build -t monai_slim -f Dockerfile.slim .`
15+
# Containers may require more shared memory, eg.: `docker run -ti --rm --gpus all --shm-size=10gb monai_slim /bin/bash`
16+
17+
ARG IMAGE=debian:12-slim
18+
19+
FROM ${IMAGE} AS build
20+
21+
ARG TORCH_CUDA_ARCH_LIST="7.5 8.0 8.6 8.9 9.0+PTX"
22+
23+
ENV DEBIAN_FRONTEND=noninteractive
24+
ENV APT_INSTALL="apt install -y --no-install-recommends"
25+
26+
RUN apt update && apt upgrade -y && \
27+
${APT_INSTALL} ca-certificates python3-pip python-is-python3 git wget libopenslide0 unzip python3-dev && \
28+
wget https://developer.download.nvidia.com/compute/cuda/repos/debian12/x86_64/cuda-keyring_1.1-1_all.deb && \
29+
dpkg -i cuda-keyring_1.1-1_all.deb && \
30+
apt update && \
31+
${APT_INSTALL} cuda-toolkit-12 && \
32+
rm -rf /usr/lib/python*/EXTERNALLY-MANAGED /var/lib/apt/lists/* && \
33+
python -m pip install --upgrade --no-cache-dir --no-build-isolation pip
34+
35+
# TODO: remark for issue [revise the dockerfile](https://github.com/zarr-developers/numcodecs/issues/431)
36+
RUN if [[ $(uname -m) =~ "aarch64" ]]; then \
37+
CFLAGS="-O3" DISABLE_NUMCODECS_SSE2=true DISABLE_NUMCODECS_AVX2=true python -m pip install numcodecs; \
38+
fi
39+
40+
# NGC Client
41+
WORKDIR /opt/tools
42+
ARG NGC_CLI_URI="https://ngc.nvidia.com/downloads/ngccli_linux.zip"
43+
RUN wget -q ${NGC_CLI_URI} && unzip ngccli_linux.zip && chmod u+x ngc-cli/ngc && \
44+
find ngc-cli/ -type f -exec md5sum {} + | LC_ALL=C sort | md5sum -c ngc-cli.md5 && \
45+
rm -rf ngccli_linux.zip ngc-cli.md5
46+
47+
WORKDIR /opt/monai
48+
49+
# copy relevant parts of repo
50+
COPY requirements.txt requirements-min.txt requirements-dev.txt versioneer.py setup.py setup.cfg pyproject.toml ./
51+
COPY LICENSE CHANGELOG.md CODE_OF_CONDUCT.md CONTRIBUTING.md README.md MANIFEST.in runtests.sh ./
52+
COPY tests ./tests
53+
COPY monai ./monai
54+
55+
# install full deps
56+
RUN python -m pip install --no-cache-dir --no-build-isolation -r requirements-dev.txt
57+
58+
# compile ext
59+
RUN CUDA_HOME=/usr/local/cuda FORCE_CUDA=1 USE_COMPILED=1 BUILD_MONAI=1 python setup.py develop
60+
61+
# recreate the image without the installed CUDA packages then copy the installed MONAI and Python directories
62+
FROM ${IMAGE} AS build2
63+
64+
ENV DEBIAN_FRONTEND=noninteractive
65+
ENV APT_INSTALL="apt install -y --no-install-recommends"
66+
67+
RUN apt update && apt upgrade -y && \
68+
${APT_INSTALL} ca-certificates python3-pip python-is-python3 git libopenslide0 && \
69+
apt clean && \
70+
rm -rf /usr/lib/python*/EXTERNALLY-MANAGED /var/lib/apt/lists/* && \
71+
python -m pip install --upgrade --no-cache-dir --no-build-isolation pip
72+
73+
COPY --from=build /opt/monai /opt/monai
74+
COPY --from=build /opt/tools /opt/tools
75+
ARG PYTHON_VERSION=3.11
76+
COPY --from=build /usr/local/lib/python${PYTHON_VERSION}/dist-packages /usr/local/lib/python${PYTHON_VERSION}/dist-packages
77+
COPY --from=build /usr/local/bin /usr/local/bin
78+
79+
RUN rm -rf /opt/monai/build /opt/monai/monai.egg-info && \
80+
find /opt /usr/local/lib -type d -name __pycache__ -exec rm -rf {} +
81+
82+
# flatten all layers down to one
83+
FROM ${IMAGE}
84+
LABEL maintainer="monai.contact@gmail.com"
85+
86+
COPY --from=build2 / /
87+
88+
WORKDIR /opt/monai
89+
90+
ENV PATH=${PATH}:/opt/tools:/opt/tools/ngc-cli
91+
ENV POLYGRAPHY_AUTOINSTALL_DEPS=1
92+
ENV CUDA_HOME=/usr/local/cuda
93+
ENV BUILD_MONAI=1

README.md

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,14 @@ Examples and notebook tutorials are located at [Project-MONAI/tutorials](https:/
6161

6262
Technical documentation is available at [docs.monai.io](https://docs.monai.io).
6363

64+
## Docker
65+
66+
The MONAI Docker image is available from [Dockerhub](https://hub.docker.com/r/projectmonai/monai),
67+
tagged as `latest` for the latest state of `dev` or with a release version. A slimmed down image can also be built
68+
locally using `Dockerfile.slim`, see that file for instructions.
69+
70+
To get started with the latest MONAI, use `docker run -ti --rm --gpus all projectmonai/monai:latest /bin/bash`.
71+
6472
## Citation
6573

6674
If you have used MONAI in your research, please cite us! The citation can be exported from: <https://arxiv.org/abs/2211.02701>.

monai/apps/vista3d/inferer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -86,13 +86,13 @@ def point_based_window_inferer(
8686
for j in range(len(ly_)):
8787
for k in range(len(lz_)):
8888
lx, rx, ly, ry, lz, rz = (lx_[i], rx_[i], ly_[j], ry_[j], lz_[k], rz_[k])
89-
unravel_slice = [
89+
unravel_slice = (
9090
slice(None),
9191
slice(None),
9292
slice(int(lx), int(rx)),
9393
slice(int(ly), int(ry)),
9494
slice(int(lz), int(rz)),
95-
]
95+
)
9696
batch_image = image[unravel_slice]
9797
output = predictor(
9898
batch_image,

monai/inferers/utils.py

Lines changed: 14 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -243,14 +243,19 @@ def sliding_window_inference(
243243
for idx in slice_range
244244
]
245245
if sw_batch_size > 1:
246-
win_data = torch.cat([inputs[win_slice] for win_slice in unravel_slice]).to(sw_device)
246+
win_data = torch.cat([inputs[ensure_tuple(win_slice)] for win_slice in unravel_slice]).to(sw_device)
247247
if condition is not None:
248-
win_condition = torch.cat([condition[win_slice] for win_slice in unravel_slice]).to(sw_device)
248+
win_condition = torch.cat([condition[ensure_tuple(win_slice)] for win_slice in unravel_slice]).to(
249+
sw_device
250+
)
249251
kwargs["condition"] = win_condition
250252
else:
251-
win_data = inputs[unravel_slice[0]].to(sw_device)
253+
s0 = unravel_slice[0]
254+
s0_idx = ensure_tuple(s0)
255+
256+
win_data = inputs[s0_idx].to(sw_device)
252257
if condition is not None:
253-
win_condition = condition[unravel_slice[0]].to(sw_device)
258+
win_condition = condition[s0_idx].to(sw_device)
254259
kwargs["condition"] = win_condition
255260

256261
if with_coord:
@@ -277,7 +282,7 @@ def sliding_window_inference(
277282
offset = s[buffer_dim + 2].start - c_start
278283
s[buffer_dim + 2] = slice(offset, offset + roi_size[buffer_dim])
279284
s[0] = slice(0, 1)
280-
sw_device_buffer[0][s] += p * w_t
285+
sw_device_buffer[0][ensure_tuple(s)] += p * w_t
281286
b_i += len(unravel_slice)
282287
if b_i < b_slices[b_s][0]:
283288
continue
@@ -308,10 +313,11 @@ def sliding_window_inference(
308313
o_slice[buffer_dim + 2] = slice(c_start, c_end)
309314
img_b = b_s // n_per_batch # image batch index
310315
o_slice[0] = slice(img_b, img_b + 1)
316+
o_slice_idx = ensure_tuple(o_slice)
311317
if non_blocking:
312-
output_image_list[0][o_slice].copy_(sw_device_buffer[0], non_blocking=non_blocking)
318+
output_image_list[0][o_slice_idx].copy_(sw_device_buffer[0], non_blocking=non_blocking)
313319
else:
314-
output_image_list[0][o_slice] += sw_device_buffer[0].to(device=device)
320+
output_image_list[0][o_slice_idx] += sw_device_buffer[0].to(device=device)
315321
else:
316322
sw_device_buffer[ss] *= w_t
317323
sw_device_buffer[ss] = sw_device_buffer[ss].to(device)
@@ -387,7 +393,7 @@ def _compute_coords(coords, z_scale, out, patch):
387393
idx_zm[axis] = slice(
388394
int(original_idx[axis].start * z_scale[axis - 2]), int(original_idx[axis].stop * z_scale[axis - 2])
389395
)
390-
out[idx_zm] += p
396+
out[ensure_tuple(idx_zm)] += p
391397

392398

393399
def _get_scan_interval(

monai/networks/nets/vista3d.py

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -244,14 +244,10 @@ def connected_components_combine(
244244
_logits = logits[mapping_index]
245245
inside = []
246246
for i in range(_logits.shape[0]):
247-
inside.append(
248-
np.any(
249-
[
250-
_logits[i, 0, p[0], p[1], p[2]].item() > 0
251-
for p in point_coords[i].cpu().numpy().round().astype(int)
252-
]
253-
)
254-
)
247+
p_coord = point_coords[i].cpu().numpy().round().astype(int)
248+
inside_p = [_logits[i, 0, p[0], p[1], p[2]].item() > 0 for p in p_coord]
249+
inside.append(int(np.any(inside_p))) # convert to int to avoid typing problems with Numpy
250+
255251
inside_tensor = torch.tensor(inside).to(logits.device)
256252
nan_mask = torch.isnan(_logits)
257253
# _logits are converted to binary [B1, 1, H, W, D]

monai/networks/utils.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -715,7 +715,7 @@ def convert_to_onnx(
715715
torch_versioned_kwargs = {}
716716
if use_trace:
717717
# let torch.onnx.export to trace the model.
718-
mode_to_export = model
718+
model_to_export = model
719719
torch_versioned_kwargs = kwargs
720720
if "dynamo" in kwargs and kwargs["dynamo"] and verify:
721721
torch_versioned_kwargs["verify"] = verify
@@ -728,9 +728,9 @@ def convert_to_onnx(
728728
# pass the raw nn.Module directly—the exporter handles it via torch.export.
729729
_pt_major_minor = tuple(int(x) for x in torch.__version__.split("+")[0].split(".")[:2])
730730
if _pt_major_minor >= (2, 9):
731-
mode_to_export = model
731+
model_to_export = model
732732
else:
733-
mode_to_export = torch.jit.script(model, **kwargs)
733+
model_to_export = torch.jit.script(model, **kwargs)
734734

735735
if torch.is_tensor(inputs) or isinstance(inputs, dict):
736736
onnx_inputs = (inputs,)
@@ -743,7 +743,7 @@ def convert_to_onnx(
743743
else:
744744
f = filename
745745
torch.onnx.export(
746-
mode_to_export,
746+
model_to_export,
747747
onnx_inputs,
748748
f=f,
749749
input_names=input_names,

requirements-dev.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# Full requirements for developments
22
-r requirements-min.txt
3-
pytorch-ignite==0.4.11
3+
pytorch-ignite
44
gdown>=4.7.3
55
scipy>=1.12.0; python_version >= '3.9'
66
itk>=5.2

tests/bundle/test_bundle_download.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
import os
1616
import tempfile
1717
import unittest
18-
from unittest.case import skipUnless
18+
from unittest.case import skipIf, skipUnless
1919
from unittest.mock import patch
2020

2121
import numpy as np
@@ -219,6 +219,7 @@ def test_monaihosting_url_download_bundle(self, bundle_files, bundle_name, url):
219219

220220
@parameterized.expand([TEST_CASE_5])
221221
@skip_if_quick
222+
@skipIf(os.getenv("NGC_API_KEY", None) is None, "NGC API key required for this test")
222223
def test_ngc_private_source_download_bundle(self, bundle_files, bundle_name, _url):
223224
with skip_if_downloading_fails():
224225
# download a single file from url, also use `args_file`

0 commit comments

Comments
 (0)