parent
1d8b48e6bc
commit
34d43290e0
|
@ -1,6 +1,3 @@
|
|||
# Base image selection is handled by the Makefile or build script
|
||||
# For CUDA support: FROM nvidia/cuda:12.8.1-devel-ubuntu22.04
|
||||
# For CPU-only: FROM python:3.12
|
||||
FROM python:3.12
|
||||
|
||||
# Set working directory
|
||||
|
@ -27,14 +24,19 @@ COPY docker/app/check_gpu_support.sh /app/
|
|||
COPY docker/app/check_torch_cuda.py /app/
|
||||
RUN chmod +x /app/check_gpu_support.sh
|
||||
|
||||
# Unpack prebuilt llama.cpp CPU binary (no build or GPU detection here)
|
||||
# Build llama.cpp
|
||||
RUN LLAMA_LOCAL_ZIP="dependencies/llama.cpp.zip" \
|
||||
&& echo "Using local llama.cpp archive..." \
|
||||
&& unzip -q "$LLAMA_LOCAL_ZIP" \
|
||||
&& cd llama.cpp \
|
||||
&& mkdir -p build \
|
||||
&& cp -r bin build/ 2>/dev/null || true \
|
||||
&& chmod +x /app/llama.cpp/build/bin/llama-server /app/llama.cpp/build/bin/llama-cli 2>/dev/null || true
|
||||
&& mkdir -p build && cd build \
|
||||
&& cmake .. \
|
||||
&& cmake --build . --config Release \
|
||||
&& if [ ! -f "bin/llama-server" ]; then \
|
||||
echo "Build failed: llama-server executable not found" && exit 1; \
|
||||
else \
|
||||
echo "Successfully built llama-server"; \
|
||||
fi
|
||||
|
||||
# Mark as CPU-only build for runtime reference
|
||||
RUN mkdir -p /app/data && \
|
||||
|
@ -47,6 +49,7 @@ COPY pyproject.toml README.md /app/
|
|||
RUN poetry install --no-interaction --no-root
|
||||
RUN pip install --force-reinstall dependencies/graphrag-1.2.1.dev27.tar.gz
|
||||
|
||||
|
||||
# Copy source code - Files that frequently change
|
||||
COPY docker/ /app/docker/
|
||||
COPY lpm_kernel/ /app/lpm_kernel/
|
||||
|
@ -67,5 +70,5 @@ ENV PYTHONUNBUFFERED=1 \
|
|||
# Expose ports
|
||||
EXPOSE 8002 8080
|
||||
|
||||
# Set the startup command - CUDA check/rebuild removed since it's now handled at build time
|
||||
CMD ["bash", "-c", "echo 'Checking SQLite database...' && if [ ! -s /app/data/sqlite/lpm.db ]; then echo 'SQLite database not found or empty, initializing...' && mkdir -p /app/data/sqlite && sqlite3 /app/data/sqlite/lpm.db '.read /app/docker/sqlite/init.sql' && echo 'SQLite database initialized successfully' && echo 'Tables created:' && sqlite3 /app/data/sqlite/lpm.db '.tables'; else echo 'SQLite database already exists, skipping initialization'; fi && echo 'Checking ChromaDB...' && if [ ! -d /app/data/chroma_db/documents ] || [ ! -d /app/data/chroma_db/document_chunks ]; then echo 'ChromaDB collections not found, initializing...' && python /app/docker/app/init_chroma.py && echo 'ChromaDB initialized successfully'; else echo 'ChromaDB already exists, skipping initialization'; fi && echo 'Starting application at ' $(date) >> /app/logs/backend.log && cd /app && python -m flask run --host=0.0.0.0 --port=${LOCAL_APP_PORT:-8002} >> /app/logs/backend.log 2>&1"]
|
||||
# Set the startup command
|
||||
CMD ["bash", "-c", "echo \"Checking SQLite database...\" && if [ ! -s /app/data/sqlite/lpm.db ]; then echo \"SQLite database not found or empty, initializing...\" && mkdir -p /app/data/sqlite && sqlite3 /app/data/sqlite/lpm.db \".read /app/docker/sqlite/init.sql\" && echo \"SQLite database initialized successfully\" && echo \"Tables created:\" && sqlite3 /app/data/sqlite/lpm.db \".tables\"; else echo \"SQLite database already exists, skipping initialization\"; fi && echo \"Checking ChromaDB...\" && if [ ! -d /app/data/chroma_db/documents ] || [ ! -d /app/data/chroma_db/document_chunks ]; then echo \"ChromaDB collections not found, initializing...\" && python /app/docker/app/init_chroma.py && echo \"ChromaDB initialized successfully\"; else echo \"ChromaDB already exists, skipping initialization\"; fi && echo \"Starting application at $(date)\" >> /app/logs/backend.log && cd /app && python -m flask run --host=0.0.0.0 --port=${LOCAL_APP_PORT:-8002} >> /app/logs/backend.log 2>&1"]
|
||||
|
|
2
Makefile
2
Makefile
|
@ -213,7 +213,7 @@ ifeq ($(WINDOWS),1)
|
|||
@echo "Prompting for CUDA preference..."
|
||||
@scripts\prompt_cuda.bat
|
||||
@echo "Checking CUDA preference..."
|
||||
@cmd /c "if exist .gpu_selected ( echo CUDA support detected, using GPU configuration... & docker compose -f docker-compose-gpu.yml build --no-cache & docker compose -f docker-compose-gpu.yml up -d ) else ( echo No CUDA support selected, using CPU-only configuration... & docker compose -f docker-compose.yml build --no-cache & docker compose -f docker-compose.yml up -d )"
|
||||
@cmd /c "if exist .gpu_selected ( echo CUDA support detected, using GPU configuration... & docker compose -f docker-compose-gpu.yml build & docker compose -f docker-compose-gpu.yml up -d ) else ( echo No CUDA support selected, using CPU-only configuration... & docker compose -f docker-compose.yml build & docker compose -f docker-compose.yml up -d )"
|
||||
else
|
||||
@echo "Prompting for CUDA preference..."
|
||||
@chmod +x ./scripts/prompt_cuda.sh
|
||||
|
|
Loading…
Reference in New Issue