From 5b826c84afab65f7f51f8aeee30c66e08d29630a Mon Sep 17 00:00:00 2001 From: SarveshAtawane Date: Fri, 2 Aug 2024 01:36:58 +0530 Subject: [PATCH] added multistage docker file Signed-off-by: SarveshAtawane --- src/DockerFile | 46 ++++++++++++++++++++++++++++++++++++++++ src/Readme.md | 7 ++++++ src/core/conversation.py | 23 ++++++++++---------- src/entrypoint.sh | 8 +++++++ 4 files changed, 73 insertions(+), 11 deletions(-) create mode 100644 src/DockerFile create mode 100644 src/Readme.md create mode 100644 src/entrypoint.sh diff --git a/src/DockerFile b/src/DockerFile new file mode 100644 index 0000000..7642486 --- /dev/null +++ b/src/DockerFile @@ -0,0 +1,46 @@ +FROM node:20-alpine AS frontend-build +WORKDIR /app/frontend +COPY frontend . +RUN yarn install && yarn build + + +FROM nvidia/cuda:11.8.0-cudnn8-devel-ubuntu22.04 AS backend-build +ENV DEBIAN_FRONTEND=noninteractive +ENV PYTHONUNBUFFERED=1 +ENV PATH="/usr/local/bin:${PATH}" +RUN apt-get update && \ + apt-get install -y python3-pip python3-dev && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* +WORKDIR /app/core +COPY core/requirements.txt . +RUN pip3 install --no-cache-dir -r requirements.txt --extra-index-url https://download.pytorch.org/whl/cu118 + + +FROM nvidia/cuda:11.8.0-cudnn8-runtime-ubuntu22.04 +ENV DEBIAN_FRONTEND=noninteractive +ENV PYTHONUNBUFFERED=1 +ENV PATH="/usr/local/bin:${PATH}" +ENV NODE_VERSION=20.x +RUN apt-get update && apt-get install -y \ + python3-pip \ + wget \ + curl \ + && curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION} | bash - \ + && apt-get install -y nodejs \ + && npm install -g yarn \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app +COPY --from=backend-build /usr/local/lib/python3.10/dist-packages /usr/local/lib/python3.10/dist-packages +COPY core ./core +COPY --from=frontend-build /app/frontend ./frontend +COPY frontend/package.json frontend/yarn.lock ./frontend/ +WORKDIR /app/frontend +RUN yarn install --production +WORKDIR /app +COPY entrypoint.sh . +RUN chmod +x entrypoint.sh +EXPOSE 3000 8080 +ENTRYPOINT ["./entrypoint.sh"] \ No newline at end of file diff --git a/src/Readme.md b/src/Readme.md new file mode 100644 index 0000000..26a4a3a --- /dev/null +++ b/src/Readme.md @@ -0,0 +1,7 @@ +```bash +docker build -t aifaq . +``` +Now run this image from this command +```bash +docker run --gpus all -p 3000:3000 -p 8080:8080 aifaq +``` \ No newline at end of file diff --git a/src/core/conversation.py b/src/core/conversation.py index 8448392..75cfaea 100644 --- a/src/core/conversation.py +++ b/src/core/conversation.py @@ -32,17 +32,18 @@ def get_conversation(): # build huggingface pipeline for using zephyr-7b-beta llm_pipeline = pipeline( - "text-generation", - model=model, - tokenizer=tokenizer, - use_cache=True, - device_map="auto", - max_length=4096, # 4096 - do_sample=True, - top_k=5, - num_return_sequences=1, - eos_token_id=tokenizer.eos_token_id, - pad_token_id=tokenizer.eos_token_id, + "text-generation", + model=model, + tokenizer=tokenizer, + use_cache=True, + device_map="auto", + max_length=4096, # 4096 + truncation=True, + do_sample=True, + top_k=5, + num_return_sequences=1, + eos_token_id=tokenizer.eos_token_id, + pad_token_id=tokenizer.eos_token_id, ) # specify the llm diff --git a/src/entrypoint.sh b/src/entrypoint.sh new file mode 100644 index 0000000..f74bb3b --- /dev/null +++ b/src/entrypoint.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +cd /app/core +python3 fetch_and_organize_data.py +python3 api.py & + +cd /app/frontend +yarn start \ No newline at end of file