ChatGLM2-6B / ptuning /web_demo.sh
whyazalea's picture
Upload folder using huggingface_hub
b7b138d
raw
history blame contribute delete
227 Bytes
PRE_SEQ_LEN=128
CUDA_VISIBLE_DEVICES=0 python3 web_demo.py \
--model_name_or_path /mnt/workspace/chatglm-6b \
--ptuning_checkpoint output/adgen-chatglm2-6b-pt-128-2e-2/checkpoint-3000 \
--pre_seq_len $PRE_SEQ_LEN