-
Notifications
You must be signed in to change notification settings - Fork 6
Expand file tree
/
Copy pathexp_dist_shift.sh
More file actions
executable file
·50 lines (43 loc) · 1.62 KB
/
exp_dist_shift.sh
File metadata and controls
executable file
·50 lines (43 loc) · 1.62 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
#!/usr/bin/env bash
# Copyright (c) Jin Zhu.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# setup the environment
echo `date`, Setup the environment ...
set -e # exit if error
# prepare folders
data_path=exp_main/data
exp_path=exp_dist_shift
training_data_path=$exp_path/data
res_path=$exp_path/results
mkdir -p $exp_path $training_data_path $res_path
datasets="xsum squad writing"
source_models="gpt2-xl opt-2.7b gpt-neo-2.7B"
n_prompts_list=( 50 70 90 110 130 150 )
# preparing dataset
for D in $datasets; do
for M in $source_models; do
for N in "${n_prompts_list[@]}"; do
echo `date`, Preparing dataset ${D}_${M}_"$N" ...
python scripts/data_builder.py --dataset $D --n_samples 200 --base_model_name $M --output_file $training_data_path/${D}_${M}_"$N" --n_prompts "$N"
done
done
done
# evaluate Fast-DetectGPT and fast baselines
for N in "${n_prompts_list[@]}"; do
for D in $datasets; do
# build train_dataset as the other two datasets joined by '&'
train_parts=()
for d in $datasets; do
if [[ ${d} != ${D} ]]; then
train_parts+=("$d")
fi
done
for M in $source_models; do
train_dataset="${training_data_path}/${train_parts[0]}_${M}_${N}&${training_data_path}/${train_parts[1]}_${M}_${N}"
echo `date`, Evaluating StatsDetectGPT/FastDetectGPT on ${D}_${M}_"$N" ...
python scripts/detect_gpt_ada.py --sampling_model_name $M --scoring_model_name $M --dataset $D --train_dataset "$train_dataset" --dataset_file $data_path/${D}_${M} --output_file $res_path/${D}_${M}_"$N"
done
done
done