Spaces:
Build error
Build error
export DATA_DIR=../DATA | |
export TASK_NAME=QA2NLI | |
python ../run_nli.py --model_type bert --model_name_or_path bert-base-uncased --task_name $TASK_NAME --do_train --eval_all_checkpoints --do_eval --do_lower_case --data_dir $DATA_DIR/$TASK_NAME --max_seq_length 128 --per_gpu_eval_batch_size=16 --per_gpu_train_batch_size=8 --gradient_accumulation_steps 3 --logging_steps 5000 --save_steps 5000 --eval_all_checkpoints --learning_rate 2e-5 --num_train_epochs 2.0 --output_dir ./tmp/$TASK_NAME/bert-base/ | |
#python ../run_nli.py --model_type bert --model_name_or_path bert-large-uncased --task_name $TASK_NAME --do_train --evaluate_during_training --do_eval --do_lower_case --data_dir $DATA_DIR/$TASK_NAME --max_seq_length 128 --per_gpu_eval_batch_size=16 --per_gpu_train_batch_size=8 --gradient_accumulation_steps 1 --learning_rate 2e-5 --save_steps 200 --adam_epsilon 1e-6 --no_clip_grad_norm --warmup_proportion 0.1 --num_train_epochs 5.0 --output_dir ./tmp/$TASK_NAME/bertlarge/ | |
python ../run_nli.py --model_type roberta --model_name_or_path roberta-base --task_name $TASK_NAME --do_train --do_eval --eval_all_checkpoints --do_lower_case --data_dir $DATA_DIR/$TASK_NAME --max_seq_length 256 --per_gpu_eval_batch_size=16 --per_gpu_train_batch_size=8 --gradient_accumulation_steps 3 --logging_steps 5000 --save_steps 5000 --eval_all_checkpoints --learning_rate 1e-5 --num_train_epochs 2.0 --output_dir ./tmp/$TASK_NAME/roberta/ | |
#python ../run_nli.py --model_type roberta --model_name_or_path /home/bimu/PycharmProjects/liu_nli/tmp-1/QNLI/roberta/ --task_name $TASK_NAME --do_train --do_eval --do_lower_case --data_dir $DATA_DIR/$TASK_NAME --max_seq_length 128 --per_gpu_eval_batch_size=16 --per_gpu_train_batch_size=16 --gradient_accumulation_steps 2 --learning_rate 2e-5 --num_train_epochs 5.0 --output_dir ./tmp/$TASK_NAME/roberta/ | |
#python ../run_nli.py --model_type xlnet --model_name_or_path xlnet-base-cased --task_name $TASK_NAME --do_train --do_eval --eval_all_checkpoints --do_lower_case --data_dir $DATA_DIR/$TASK_NAME --max_seq_length 128 --per_gpu_eval_batch_size=16 --per_gpu_train_batch_size=8 --gradient_accumulation_steps 3 --logging_steps 500 --save_steps 500 --eval_all_checkpoints --learning_rate 2e-5 --adam_epsilon 1e-6 --num_train_epochs 5.0 --output_dir ./tmp/$TASK_NAME/xlnet/ | |
#python ../run_nli.py --model_type bert --model_name_or_path bert-base-uncased --task_name $TASK_NAME --do_train --do_eval --do_lower_case --data_dir $DATA_DIR/$TASK_NAME --max_seq_length 128 --per_gpu_eval_batch_size=16 --per_gpu_train_batch_size=16 --gradient_accumulation_steps 2 --logging_steps 500 --save_steps 500 --eval_all_checkpoints --learning_rate 2e-5 --num_train_epochs 5.0 --output_dir ./tmp/$TASK_NAME/bert-base/ | |