cyfyifanchen commited on
Commit
12bbaf2
·
verified ·
1 Parent(s): cfc0dcd

Adding Shisa

Browse files
Files changed (1) hide show
  1. entrypoint.sh +16 -19
entrypoint.sh CHANGED
@@ -2,30 +2,27 @@
2
 
3
  mkdir -p data
4
 
5
- #HF_MODEL_PATH=Qwen/Qwen2.5-1.5B-Instruct
6
- #HF_MODEL_PATH=deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B
7
- #HF_MODEL_PATH=Qwen/Qwen2.5-VL-3B-Instruct
8
- HF_MODEL_PATH=TEN-framework/TEN_Turn_Detection
9
- HF_MODEL_NAME=$(basename ${HF_MODEL_PATH})
10
- LOCAL_MODEL_PATH=./data/${HF_MODEL_NAME}
11
 
12
- # TODO: use your own key and put into secret
13
- VLLM_SERVE_API_KEY=TEN_Turn_Detection
14
 
15
- # download model
16
- HF_HUB_ENABLE_HF_TRANSFER=1 huggingface-cli download ${HF_MODEL_PATH} --local-dir ${LOCAL_MODEL_PATH}
 
 
 
17
 
18
- # start vllm server
19
- vllm serve ${LOCAL_MODEL_PATH} --served-model-name ${HF_MODEL_NAME} --api-key ${VLLM_SERVE_API_KEY} &
20
 
21
- # start frontend
22
  export NEXT_PUBLIC_EDIT_GRAPH_MODE=false
23
- #cd /app/playground && npm run dev &
24
  cd /app/demo && npm run dev &
25
 
26
- # start backend
27
- #export OPENAI_API_BASE=http://127.0.0.1:8000/v1
28
- #export OPENAI_PROXY_URL=
29
- #export OPENAI_MODEL=${HF_MODEL_NAME}
30
- #export OPENAI_API_KEY=${VLLM_SERVE_API_KEY}
31
  cd /app && task run
 
2
 
3
  mkdir -p data
4
 
5
+ # TEN Turn Detection
6
+ HF_MODEL_PATH_1=TEN-framework/TEN_Turn_Detection
7
+ HF_MODEL_NAME_1=$(basename ${HF_MODEL_PATH_1})
8
+ LOCAL_MODEL_PATH_1=./data/${HF_MODEL_NAME_1}
9
+ VLLM_SERVE_API_KEY_1=TEN_Turn_Detection
 
10
 
11
+ HF_HUB_ENABLE_HF_TRANSFER=1 huggingface-cli download ${HF_MODEL_PATH_1} --local-dir ${LOCAL_MODEL_PATH_1}
12
+ vllm serve ${LOCAL_MODEL_PATH_1} --served-model-name ${HF_MODEL_NAME_1} --api-key ${VLLM_SERVE_API_KEY_1} &
13
 
14
+ # Shisa 7b V1
15
+ HF_MODEL_PATH_2=TEN-framework/Shisa-7b-V1
16
+ HF_MODEL_NAME_2=$(basename ${HF_MODEL_PATH_2})
17
+ LOCAL_MODEL_PATH_2=./data/${HF_MODEL_NAME_2}
18
+ VLLM_SERVE_API_KEY_2=Shisa_7b_V1
19
 
20
+ HF_HUB_ENABLE_HF_TRANSFER=1 huggingface-cli download ${HF_MODEL_PATH_2} --local-dir ${LOCAL_MODEL_PATH_2}
21
+ vllm serve ${LOCAL_MODEL_PATH_2} --served-model-name ${HF_MODEL_NAME_2} --api-key ${VLLM_SERVE_API_KEY_2} &
22
 
23
+ # Start frontend
24
  export NEXT_PUBLIC_EDIT_GRAPH_MODE=false
 
25
  cd /app/demo && npm run dev &
26
 
27
+ # Start backend
 
 
 
 
28
  cd /app && task run