diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..e5efed0 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,21 @@ +FROM python:3.11-slim + +ENV PYTHONUNBUFFERED=1 +WORKDIR /app + +# Install minimal build dependencies for some Python packages +RUN apt-get update && \ + apt-get install -y --no-install-recommends gcc git && \ + rm -rf /var/lib/apt/lists/* + +# Copy dependency files first to leverage Docker layer caching +COPY requirements.txt requirements.in pyproject.toml /app/ + +RUN pip install --upgrade pip setuptools wheel && \ + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + +# Copy project +COPY . /app + +# Default command prints the CLI help. Users can override with other commands. +CMD ["python", "-m", "sharp.cli.predict", "--help"] diff --git a/README.md b/README.md index 5955753..e44789b 100644 --- a/README.md +++ b/README.md @@ -93,3 +93,35 @@ Our codebase is built using multiple opensource contributions, please see [ACKNO Please check out the repository [LICENSE](LICENSE) before using the provided code and [LICENSE_MODEL](LICENSE_MODEL) for the released models. + +## Docker + +Build the image locally (run from the repository root): + +```bash +docker build -t ml-sharp . +``` + +Run the CLI inside the container. Mount the repository and the `data/` directory so outputs persist locally: + +```bash +docker run --rm -it \ + -v "$PWD":/app \ + -v "$PWD/data":/app/data \ + ml-sharp \ + python -m sharp.cli.predict --help +``` + +Example: predict using the mounted `data/` input and write outputs to `data/output`: + +```bash +docker run --rm -it \ + -v "$PWD":/app \ + -v "$PWD/data":/app/data \ + ml-sharp \ + python -m sharp.cli.predict -i data/input_images -o data/output +``` + +Notes: +- If you need GPU support, use a GPU-enabled base image and the appropriate CUDA toolchain, or prefer running natively with CUDA and PyTorch. +- The default image is a minimal CPU image intended for convenience and reproduction on typical development machines.