zylj commited on
Commit
255ac49
1 Parent(s): c30d5c3

add model 7b

Browse files
Files changed (2) hide show
  1. Dockerfile +1 -1
  2. Dockerfile-7b +1 -1
Dockerfile CHANGED
@@ -17,7 +17,7 @@ RUN pip install -r requriments.txt \
17
  && git lfs install \
18
  && git clone https://huggingface.co/lmsys/${vicuna_diff} \
19
  && git clone https://huggingface.co/decapoda-research/${llama_version} \
20
- && pip install git+https://github.com/lm-sys/FastChat.git@v0.1.10 \
21
  && python -m fastchat.model.apply_delta --base ${dir}/${llama_version}/ --target ${dir}/vicuna_out --delta ${dir}/${vicuna_diff}/ \
22
  && sed -i -e '16c\ \ llama_model: "/usr/local/src/MiniGPT-4/vicuna_out"' ${dir}/minigpt4/configs/models/minigpt4.yaml
23
 
 
17
  && git lfs install \
18
  && git clone https://huggingface.co/lmsys/${vicuna_diff} \
19
  && git clone https://huggingface.co/decapoda-research/${llama_version} \
20
+ && pip install git+https://github.com/lm-sys/FastChat.git@v0.2.0 \
21
  && python -m fastchat.model.apply_delta --base ${dir}/${llama_version}/ --target ${dir}/vicuna_out --delta ${dir}/${vicuna_diff}/ \
22
  && sed -i -e '16c\ \ llama_model: "/usr/local/src/MiniGPT-4/vicuna_out"' ${dir}/minigpt4/configs/models/minigpt4.yaml
23
 
Dockerfile-7b CHANGED
@@ -17,7 +17,7 @@ RUN pip install -r requriments.txt \
17
  && git lfs install \
18
  && git clone https://huggingface.co/lmsys/${vicuna_diff} \
19
  && git clone https://huggingface.co/decapoda-research/${llama_version} \
20
- && pip install git+https://github.com/lm-sys/FastChat.git@v0.1.10 \
21
  && python -m fastchat.model.apply_delta --base ${dir}/${llama_version}/ --target ${dir}/vicuna_out --delta ${dir}/${vicuna_diff}/ \
22
  && sed -i -e '16c\ \ llama_model: "/usr/local/src/MiniGPT-4/vicuna_out"' ${dir}/minigpt4/configs/models/minigpt4.yaml
23
 
 
17
  && git lfs install \
18
  && git clone https://huggingface.co/lmsys/${vicuna_diff} \
19
  && git clone https://huggingface.co/decapoda-research/${llama_version} \
20
+ && pip install git+https://github.com/lm-sys/FastChat.git@v0.2.0 \
21
  && python -m fastchat.model.apply_delta --base ${dir}/${llama_version}/ --target ${dir}/vicuna_out --delta ${dir}/${vicuna_diff}/ \
22
  && sed -i -e '16c\ \ llama_model: "/usr/local/src/MiniGPT-4/vicuna_out"' ${dir}/minigpt4/configs/models/minigpt4.yaml
23