diff --git a/backend/Deb_Dockerfile b/backend/Deb_Dockerfile new file mode 100644 index 0000000..ef2fa40 --- /dev/null +++ b/backend/Deb_Dockerfile @@ -0,0 +1,89 @@ +#FROM python:3.12-bookworm +#FROM python:3.12-bullseye +FROM python:3.10-bullseye +ARG DEBIAN_FRONTEND=noninteractive + +# https://blog.moubou.com/2023/11/install-amd-rocm-on-debian-bookworm/ +# https://blog.moubou.com/2024/05/install-amd-rocm-6-on-debian-stable-bookworm/ + +RUN apt-get update + +#on debian: +RUN apt-get install -y firefox-esr +RUN apt-get install -y ffmpeg espeak flite + + +#RUN apt-get install -y libmpdec3 +RUN apt-get install -y libmpdec-dev + + + +#ARG MIRROR_URL="https://mirror.5i.fi/debian2/pool/main/p/python3.10/" + +#ROCm only supports Python 3.10, unfortunately this is no longer available in Debian Bookworm. However python3.10 can be installed from Debian Sid. + +#RUN wget ${MIRROR_URL}/libpython3.10-minimal_3.10.13-1_amd64.deb +#RUN dpkg -i libpython3.10-minimal_3.10.13-1_amd64.deb + +#RUN wget ${MIRROR_URL}/libpython3.10-stdlib_3.10.13-1_amd64.deb +#RUN dpkg -i libpython3.10-stdlib_3.10.13-1_amd64.deb + +#RUN wget ${MIRROR_URL}/python3.10_3.10.13-1_amd64.deb +#RUN dpkg -i python3.10_3.10.13-1_amd64.deb + + + + +RUN wget https://repo.radeon.com/amdgpu-install/6.1.1/ubuntu/jammy/amdgpu-install_6.1.60101-1_all.deb +RUN apt -y install ./amdgpu-install_6.1.60101-1_all.deb +RUN amdgpu-install --usecase=rocm + + + +# Debian 12 "Bookworm" +#RUN echo "deb http://deb.debian.org/debian/ bookworm main contrib non-free-firmware" > /etc/apt/sources.list + +#RUN apt-add-repository contrib +#RUN apt-add-repository non-free +#RUN apt update -y +#RUN apt-get -y install firmware-amd-graphics libgl1-mesa-dri libglx-mesa0 mesa-vulkan-drivers xserver-xorg-video-all + + +#install "apt-add-repository" command: +#RUN apt-get -y install software-properties-common dirmngr apt-transport-https lsb-release ca-certificates +#RUN apt-add-repository -r ppa:graphics-drivers/ppa +#RUN add-apt-repository ppa:oibaf/graphics-drivers + +#RUN apt install nvidia-driver -y +#RUN apt-get install firmware-amd-graphics libgl1-mesa-dri libglx-mesa0 mesa-vulkan-drivers xserver-xorg-video-all -y + +#RUN apt-get update -y --allow-unauthenticated +#RUN apt-get upgrade -y --allow-unauthenticated +RUN apt-get autoremove -y +RUN apt-get autoclean -y + +#RUN curl https://ollama.ai/install.sh | sh +#RUN ollama run llama2 +WORKDIR /code + +COPY requirements.txt /code/requirements.txt +#RUN pip3 install --no-cache-dir --upgrade -r requirements.txt + +RUN pip3 install --no-cache-dir -r requirements.txt --break-system-packages +RUN pip3 freeze > current_requirements.txt + +COPY . . + +ENTRYPOINT ["python3", "/code/app.py"] + +#gunicorn -w 4 -b 0.0.0.0 'hello:create_app()' +#ENTRYPOINT ["gunicorn", "-w", "1", "-b", "0.0.0.0", "app:create_app()"] +#ENTRYPOINT ["gunicorn", "-w", "1", "-b", "0.0.0.0:5000", "app:create_app()"] +#gunicorn app:app --worker-class eventlet -w 1 --bind 0.0.0.0:5000 --reload + + +#ENTRYPOINT ["fastapi", "run", "main.py", "--port", "8000"] + +#ENTRYPOINT ["uvicorn", "main:app", "--port", "8000", "--host", "0.0.0.0"] + + diff --git a/backend/Dockerfile b/backend/Dockerfile index 25081a5..33ddde3 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -1,88 +1,41 @@ -#FROM python:3.12 -FROM ubuntu - -RUN apt-get update -RUN apt-get install -y python3 -RUN apt-get install -y python3-pip - - -#on debian: -#RUN apt-get install -y firefox-esr -RUN apt-get install -y firefox - -RUN apt-get install -y ffmpeg -RUN apt-get install -y espeak -RUN apt-get install -y flite - -#COPY "amdgpu-install_6.1.60103-1_all.deb" "amdgpu-install_6.1.60103-1_all.deb" -#RUN dpkg -i "amdgpu-install_6.1.60103-1_all.deb" -#RUN amdgpu-install -y -#RUN apt-get -y install rocm-device-libs - - -#install "apt-add-repository" command: -RUN apt-get -y install software-properties-common dirmngr apt-transport-https lsb-release ca-certificates -RUN apt-add-repository -r ppa:graphics-drivers/ppa -RUN add-apt-repository ppa:oibaf/graphics-drivers - - -#RUN apt install hipsolver rocm-gdb -y - -#RUN apt-get -y install wget -#RUN apt-get update -y --allow-unauthenticated -#RUN wget https://repo.radeon.com/amdgpu-install/6.1.1/ubuntu/jammy/amdgpu-install_6.1.60101-1_all.deb -#RUN apt-get install ./amdgpu-install_6.1.60101-1_all.deb -y -#RUN amdgpu-install --usecase=rocm - - -#RUN wget https://repo.radeon.com/amdgpu-install/6.1.1/ubuntu/jammy/amdgpu-install_6.1.60101-1_all.deb -#RUN apt install ./amdgpu-install_6.1.60101-1_all.deb -y -#RUN amdgpu-install --usecase=graphics,rocm -y -#RUN usermod -a -G render,video $LOGNAME - - -#RUN wget https://repo.radeon.com/amdgpu-install/6.1.1/ubuntu/jammy/amdgpu-install_6.1.60101-1_all.deb -#RUN apt install ./amdgpu-install_6.1.60101-1_all.deb -y -#RUN amdgpu-install --usecase=graphics,rocm -y - - -#RUN echo "deb http://deb.debian.org/debian/ bookworm main contrib non-free-firmware" > /etc/apt/sources.list -#RUN echo "deb http://deb.debian.org/debian/ bookworm main contrib non-free" > /etc/apt/sources.list - -#RUN apt-add-repository contrib -#RUN apt-add-repository non-free -#RUN apt update -y - -#RUN apt install nvidia-driver -y -#RUN apt-get install firmware-amd-graphics libgl1-mesa-dri libglx-mesa0 mesa-vulkan-drivers xserver-xorg-video-all -y - -RUN apt-get update -y --allow-unauthenticated -RUN apt-get upgrade -y --allow-unauthenticated -RUN apt-get autoremove -y -RUN apt-get autoclean -y - -#RUN curl https://ollama.ai/install.sh | sh -#RUN ollama run llama2 -WORKDIR /code - -COPY requirements.txt /code/requirements.txt -#RUN pip3 install --no-cache-dir --upgrade -r requirements.txt - -RUN pip3 install --no-cache-dir -r requirements.txt --break-system-packages -RUN pip3 freeze > current_requirements.txt - -COPY . . - -ENTRYPOINT ["python3", "/code/app.py"] - -#gunicorn -w 4 -b 0.0.0.0 'hello:create_app()' -#ENTRYPOINT ["gunicorn", "-w", "1", "-b", "0.0.0.0", "app:create_app()"] -#ENTRYPOINT ["gunicorn", "-w", "1", "-b", "0.0.0.0:5000", "app:create_app()"] -#gunicorn app:app --worker-class eventlet -w 1 --bind 0.0.0.0:5000 --reload - - -#ENTRYPOINT ["fastapi", "run", "main.py", "--port", "8000"] - -#ENTRYPOINT ["uvicorn", "main:app", "--port", "8000", "--host", "0.0.0.0"] - - + +FROM ubuntu +ARG DEBIAN_FRONTEND=noninteractive + +RUN apt-get update -y +RUN apt-get install -y python3 python3-pip +RUN apt-get install -y firefox curl ffmpeg espeak flite + + +#install "apt-add-repository" command: +RUN apt-get -y install software-properties-common dirmngr apt-transport-https lsb-release ca-certificates +RUN add-apt-repository ppa:graphics-drivers/ppa -y + +#amd? +RUN add-apt-repository ppa:oibaf/graphics-drivers +RUN apt install -y ubuntu-drivers-common +RUN ubuntu-drivers autoinstall + +RUN apt-get update -y --allow-unauthenticated +RUN apt-get upgrade -y --allow-unauthenticated +RUN apt-get autoremove -y +RUN apt-get autoclean -y + +WORKDIR /code +COPY requirements.txt /code/requirements.txt +RUN pip3 install --no-cache-dir -r requirements.txt --break-system-packages +RUN pip3 freeze > current_requirements.txt + +COPY . . + +ENTRYPOINT ["python3", "/code/app.py"] + +#gunicorn -w 4 -b 0.0.0.0 'hello:create_app()' +#ENTRYPOINT ["gunicorn", "-w", "1", "-b", "0.0.0.0", "app:create_app()"] +#ENTRYPOINT ["gunicorn", "-w", "1", "-b", "0.0.0.0:5000", "app:create_app()"] +#gunicorn app:app --worker-class eventlet -w 1 --bind 0.0.0.0:5000 --reload + + +#ENTRYPOINT ["fastapi", "run", "main.py", "--port", "8000"] + +#ENTRYPOINT ["uvicorn", "main:app", "--port", "8000", "--host", "0.0.0.0"] diff --git a/backend/Focal_Dockerfile b/backend/Focal_Dockerfile new file mode 100644 index 0000000..1a23e5e --- /dev/null +++ b/backend/Focal_Dockerfile @@ -0,0 +1,66 @@ +#FROM python:3.12 +#FROM ubuntu +FROM ubuntu:focal + +ARG DEBIAN_FRONTEND=noninteractive + +RUN apt-get update + +#for ubuntu base image: +RUN apt-get install -y python3 python3-pip +RUN apt-get install -y firefox ffmpeg espeak flite + +#COPY "amdgpu-install_6.1.60103-1_all.deb" "amdgpu-install_6.1.60103-1_all.deb" +#RUN dpkg -i "amdgpu-install_6.1.60103-1_all.deb" +#RUN amdgpu-install -y +#RUN apt-get -y install rocm-device-libs + +#install "apt-add-repository" command: +#RUN apt-get -y install software-properties-common dirmngr apt-transport-https lsb-release ca-certificates +#RUN apt-add-repository -r ppa:graphics-drivers/ppa +#RUN add-apt-repository ppa:oibaf/graphics-drivers + +#RUN apt install hipsolver rocm-gdb -y + + +RUN apt install -y linux-image-5.4.0-54-generic linux-headers-5.4.0-54-generic linux-modules-extra-5.4.0-54-generic + + +RUN apt-get -y install wget +RUN apt-get update -y --allow-unauthenticated +#RUN wget https://repo.radeon.com/amdgpu-install/6.1.1/ubuntu/jammy/amdgpu-install_6.1.60101-1_all.deb +#RUN apt-get install ./amdgpu-install_6.1.60101-1_all.deb -y + +RUN wget https://repo.radeon.com/amdgpu-install/5.7.1/ubuntu/focal/amdgpu-install_5.7.50701-1_all.deb +RUN apt-get install -y ./amdgpu-install_5.7.50701-1_all.deb + +RUN amdgpu-install -y --usecase=graphics,rocm +RUN rocminfo + + + +#RUN curl https://ollama.ai/install.sh | sh +#RUN ollama run llama2 +WORKDIR /code + +COPY requirements.txt /code/requirements.txt +#RUN pip3 install --no-cache-dir --upgrade -r requirements.txt + +RUN pip3 install --no-cache-dir -r requirements.txt --break-system-packages +RUN pip3 freeze > current_requirements.txt + +COPY . . + +ENTRYPOINT ["python3", "/code/app.py"] + +#gunicorn -w 4 -b 0.0.0.0 'hello:create_app()' +#ENTRYPOINT ["gunicorn", "-w", "1", "-b", "0.0.0.0", "app:create_app()"] +#ENTRYPOINT ["gunicorn", "-w", "1", "-b", "0.0.0.0:5000", "app:create_app()"] +#gunicorn app:app --worker-class eventlet -w 1 --bind 0.0.0.0:5000 --reload + + +#ENTRYPOINT ["fastapi", "run", "main.py", "--port", "8000"] + +#ENTRYPOINT ["uvicorn", "main:app", "--port", "8000", "--host", "0.0.0.0"] + + diff --git a/deployment/docker-compose.yml b/deployment/docker-compose.yml index 09a14e9..90dfb31 100644 --- a/deployment/docker-compose.yml +++ b/deployment/docker-compose.yml @@ -69,7 +69,6 @@ services: - ../ollama/ollama:/root/.ollama networks: - llm_network - #command: "ollama pull llama2" devices: #- /dev/dri/renderD128:/dev/dri/renderD128 #- /dev/fdk @@ -116,6 +115,10 @@ services: - OLLAMA_URI=http://ollama:11434 build: ../backend + #build: + # context: ../backend + # dockerfile: ../backend/Deb_Dockerfile + networks: - llm_network depends_on: