Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
.vscode/
build/
build-Debug/
protobuf*

# Ignore Python compiled files
*.py[co]
Expand All @@ -23,4 +24,4 @@ groundtruth/
checkpoints/
data/
serverlocal/
densepose/
densepose/
4 changes: 2 additions & 2 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ project(MachineLearningPlugins VERSION 1.0.0)

#===------------------------------------------------------------------------===
# Global settings some based on the external configuration settings
set( CMAKE_CXX_STANDARD 11 )
set( CMAKE_CXX_STANDARD 17 )
set( CMAKE_CXX_EXTENSIONS OFF )
set( CMAKE_CXX_VISIBILITY_PRESET hidden )
set( CMAKE_POSITION_INDEPENDENT_CODE True )
Expand All @@ -24,4 +24,4 @@ set( BUILDDATE_FULL "${BUILDDATE_FULL_INTERNAL}" CACHE STRING "Exact time of the

#===------------------------------------------------------------------------===
# Compile CMakeLists found in subdirectories
add_subdirectory(Plugins/Client)
add_subdirectory(Plugins/Client)
5 changes: 4 additions & 1 deletion INSTALL.md
Original file line number Diff line number Diff line change
Expand Up @@ -46,11 +46,14 @@ If not already cloned, fetch the `nuke-ML-server` repository:
```
git clone https://github.com/TheFoundryVisionmongers/nuke-ML-server
```

Install gcc-11

Execute the commands below to compile the client MLClient.so plugin, setting the NUKE_INSTALL_PATH to point to the folder of the desired Nuke version:
```
cd nuke-ML-server/
mkdir build && cd build
cmake -DNUKE_INSTALL_PATH=/path/to/Nuke11.3v1/ ..
cmake -DNUKE_INSTALL_PATH=/opt/foundry/Nuke16.0v7/ ..
make
```
The MLClient.so plugin will now be in the `build/Plugins/Client` folder. Before it can be used, Nuke needs to know where it lives. One way to do this is to update the NUKE_PATH environment variable to point to the MLClient.so plugin (This can be skipped if it was moved to the root of your ~/.nuke folder, or the path was added in Nuke through Python):
Expand Down
42 changes: 24 additions & 18 deletions Plugins/Server/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Ubuntu 18.04 with CUDA 10.0, CuDNN 7.6
# Python3.6, TensorFlow 1.15.0, PyTorch 1.4
FROM nvidia/cuda:10.0-cudnn7-devel-ubuntu18.04
FROM nvcr.io/nvidia/tensorflow:23.02-tf1-py3

ARG DEBIAN_FRONTEND=noninteractive
RUN apt-get update && apt-get install -y --no-install-recommends \
Expand All @@ -19,14 +19,16 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
rm -rf /var/lib/apt/lists/*

# Install Python 3.6
RUN apt-get update && apt-get install -y --no-install-recommends \
python3-opencv \
python3-pip \
python3.6-dev && \
rm -rf /var/lib/apt/lists/*
#RUN apt-get update && apt-get install -y --no-install-recommends \
# python3-opencv \
# python3-pip \
# python3.9-dev python3.9-venv && \
# rm -rf /var/lib/apt/lists/*
# Have aliases python3->python and pip3->pip
RUN ln -s /usr/bin/python3 /usr/bin/python && \
ln -s /usr/bin/pip3 /usr/bin/pip
#RUN unlink /usr/bin/python && \
# unlink /usr/bin/pip && \
# ln -s /usr/bin/python3.9 /usr/bin/python && \
# ln -s /usr/bin/pip3 /usr/bin/pip
RUN python -m pip install --upgrade pip

RUN pip install --no-cache-dir setuptools wheel && \
Expand All @@ -39,26 +41,30 @@ RUN pip install --no-cache-dir setuptools wheel && \
typing \
imageio \
OpenEXR

# Install TF 1.15.0 GPU for Python3.6 (no TensorRT)
RUN pip install --no-cache-dir \
tensorflow-gpu==1.15.0 \
tensorflow-determinism
#RUN pip install --no-cache-dir \
# tensorflow-gpu==1.15.0 \
# tensorflow-determinism

RUN python -m pip install nvidia-pyindex --extra-index-url=https://pypi.ngc.nvidia.com --trusted-host pypi.ngc.nvidia.com
RUN python -m pip install nvidia-tensorflow[horovod] --extra-index-url=https://pypi.ngc.nvidia.com --trusted-host pypi.ngc.nvidia.com

# Install PyTorch (include Caffe2) for CUDA 10.0
RUN pip install --no-cache-dir torch==1.4.0+cu100 torchvision==0.5.0+cu100 -f https://download.pytorch.org/whl/torch_stable.html
RUN pip install --no-cache-dir cupy-cuda100
RUN pip install torch==2.1.2 torchvision==0.16.2 torchaudio==2.1.2 --index-url https://download.pytorch.org/whl/cu121
# CPU only
RUN pip install --no-cache-dir cupy-cuda12x
RUN pip install --no-cache-dir cython

WORKDIR /workspace
# Install the COCO API
RUN pip install 'git+https://github.com/cocodataset/cocoapi.git#subdirectory=PythonAPI'

# Install detectron for mask RCNN
RUN git clone https://github.com/facebookresearch/detectron
RUN sed -i 's/cythonize(ext_modules)/cythonize(ext_modules, language_level="3")/g' detectron/setup.py
RUN cd detectron && pip install -r requirements.txt && make
#RUN pip install 'git+https://github.com/johnnynunez/detectron2.git'
RUN git clone https://github.com/facebookresearch/detectron2.git /detectron2
WORKDIR /detectron2
RUN pip install -e .

WORKDIR /workspace/ml-server
# Copy your current folder to the docker image /workspace/ml-server/ folder
COPY . .
COPY . .