-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathDockerfile
More file actions
134 lines (109 loc) · 4.51 KB
/
Dockerfile
File metadata and controls
134 lines (109 loc) · 4.51 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
# -------------------------
# Base Terra Jupyter image
# -------------------------
FROM us.gcr.io/broad-dsp-gcr-public/terra-jupyter-base:latest
# -------------------------
# System setup (as root)
# -------------------------
USER root
# Toolchain + build deps
RUN apt-get update && apt-get install -y --no-install-recommends \
build-essential cmake ninja-build git pkg-config \
curl ca-certificates gnupg lsb-release software-properties-common \
libcurl4-openssl-dev ccache wget jq \
&& rm -rf /var/lib/apt/lists/*
# -------------------------
# CUDA Toolkit (for GPU builds)
# -------------------------
RUN wget -q https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/x86_64/cuda-keyring_1.1-1_all.deb && \
dpkg -i cuda-keyring_1.1-1_all.deb && \
rm -f cuda-keyring_1.1-1_all.deb && \
apt-get update && \
apt-get install -y --no-install-recommends nvidia-utils-525 cuda-toolkit-12-4 && \
rm -rf /var/lib/apt/lists/*
ENV CUDA_HOME=/usr/local/cuda
ENV PATH=$CUDA_HOME/bin:$PATH
ENV LD_LIBRARY_PATH=$CUDA_HOME/lib64:$LD_LIBRARY_PATH
# -------------------------
# Build llama.cpp
# -------------------------
ENV LLAMA_PREFIX=/opt/llama.cpp
RUN git clone --depth 1 https://github.com/ggml-org/llama.cpp ${LLAMA_PREFIX}
# CPU-only build
RUN cmake -S ${LLAMA_PREFIX} -B ${LLAMA_PREFIX}/build-cpu \
-DCMAKE_BUILD_TYPE=Release \
-DBUILD_SHARED_LIBS=OFF \
-DLLAMA_CURL=ON -DGGML_CUDA=OFF && \
cmake --build ${LLAMA_PREFIX}/build-cpu -j
# CUDA build (Ampere+Ada archs as example; adjust if needed)
ENV CMAKE_CUDA_ARCHITECTURES="86;89"
RUN cmake -S ${LLAMA_PREFIX} -B ${LLAMA_PREFIX}/build-cuda \
-DCMAKE_BUILD_TYPE=Release \
-DBUILD_SHARED_LIBS=OFF \
-DLLAMA_CURL=ON -DGGML_CUDA=ON \
-DCMAKE_CUDA_ARCHITECTURES="${CMAKE_CUDA_ARCHITECTURES}" && \
cmake --build ${LLAMA_PREFIX}/build-cuda -j
# Make binaries easy to find
RUN ln -sf ${LLAMA_PREFIX}/build-cpu/bin /usr/local/llama-cpu && \
ln -sf ${LLAMA_PREFIX}/build-cuda/bin /usr/local/llama-cuda || true
# -------------------------
# Ollama (optional local models)
# -------------------------
RUN curl -fsSL https://ollama.com/install.sh | sh || true
# -------------------------
# Node.js (for Claude Code CLI)
# -------------------------
RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash - && \
apt-get install -y --no-install-recommends nodejs && \
rm -rf /var/lib/apt/lists/*
# Install Claude CLI globally
RUN npm install -g @anthropic-ai/claude-code
ENV PATH=$PATH:/usr/local/bin
# -------------------------
# Conda/Jupyter setup
# -------------------------
RUN mkdir -p /home/jupyter/.conda && \
chown -R jupyter:jupyter /home/jupyter/.conda
USER jupyter
WORKDIR /home/jupyter
RUN conda config --add pkgs_dirs /home/jupyter/.conda/pkgs && \
conda config --add envs_dirs /home/jupyter/.conda/envs && \
conda config --set auto_activate_base false
# Your env file
COPY --chown=jupyter:jupyter environment.yaml /home/jupyter/environment.yaml
# --- FIX: ensure conda notices cache is writable ---
USER root
RUN mkdir -p /home/jupyter/.cache/conda/notices && \
chown -R jupyter:jupyter /home/jupyter/.cache
USER jupyter
# ---------------------------------------------------
# Create env
RUN conda env create -f environment.yaml
# Torch GPU wheels (cu118)
RUN conda run -n metadisco pip install \
torch==2.2.0+cu118 torchvision==0.17.0+cu118 torchaudio==2.2.0+cu118 \
--index-url https://download.pytorch.org/whl/cu118
# Pin numpy if desired (prevents ABI surprises)
RUN conda run -n metadisco conda install -y numpy
# Jupyter kernel
RUN conda run -n metadisco python -m ipykernel install --user \
--name metadisco --display-name "meta-disco"
# -------------------------
# Claude SDKs (Python) + optional Bedrock
# -------------------------
# Official Anthropic Python client for use inside notebooks
RUN conda run -n metadisco pip install --no-cache-dir anthropic>=0.34
# OPTIONAL: AWS Bedrock client (if you prefer Claude via Bedrock IAM)
# Comment out if not needed
RUN conda run -n metadisco pip install --no-cache-dir boto3 botocore
# Default model hint (override at runtime as needed)
ENV CLAUDE_MODEL="claude-3-5-sonnet-latest"
# NOTE: Do NOT bake secrets into the image.
# Expect ANTHROPIC_API_KEY (and AWS creds/role if using Bedrock) to be set at runtime:
# - ANTHROPIC_API_KEY
# - AWS_REGION (e.g., us-east-1)
# - BEDROCK_CLAUDE_MODEL (e.g., anthropic.claude-3-5-sonnet-20240620-v1:0)
# -------------------------
# Final working dir
# -------------------------
WORKDIR /home/jupyter