-
Notifications
You must be signed in to change notification settings - Fork 0
/
pretrain_model.sh
99 lines (84 loc) · 2.17 KB
/
pretrain_model.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
#!/bin/bash
set -u
unset NCCL_DEBUG
export CUDA_DEVICE_MAX_CONNECTIONS=1
######## GPT or Retro?. ########
# 0 : GPT.
# 1 : Retro
ADD_RETRIEVER=1
######## Megatron, Retro dirs. ########
REPO_DIR="<path/to/megatron/repo>"
RETRO_WORKDIR="<path/to/retro/data/directory>"
######## Data. ########
DATA_BLEND="<see --data-path in arguments.py>"
######## Args. ########
ARGS=" \
--log-interval 1 \
--use-flash-attn \
--apply-layernorm-1p \
--untie-embeddings-and-output-weights \
--disable-bias-linear \
--no-position-embedding \
--use-rotary-position-embeddings \
--rotary-percent 0.5 \
--swiglu \
--attention-dropout 0.0 \
--hidden-dropout 0.0 \
--exit-duration-in-mins 220 \
--tensor-model-parallel-size 1 \
--pipeline-model-parallel-size 1 \
--num-layers 24 \
--hidden-size 1024 \
--num-attention-heads 16 \
--seq-length 512 \
--max-position-embeddings 512 \
--micro-batch-size 16 \
--global-batch-size 256 \
--train-samples 200000 \
--lr-decay-samples 175000 \
--lr-warmup-samples 10000 \
--lr 2.5e-5 \
--min-lr 2.5e-6 \
--lr-decay-style cosine \
--eval-iters 50 \
--eval-interval 2000 \
--tokenizer-type GPTSentencePieceTokenizer \
--tokenizer-model <path/to/gpt/tokenizer/model> \
--data-path ${DATA_BLEND} \
--split 98,2,0 \
--clip-grad 1.0 \
--weight-decay 0.1 \
--adam-beta1 0.9 \
--adam-beta2 0.95 \
--init-method-std 0.007 \
--log-params-norm \
--log-num-zeros-in-grad \
--bf16 \
"
######## Retro. ########
if [ "$ADD_RETRIEVER" = "0" ]; then
SCRIPT=pretrain_gpt.py
else
ARGS="${ARGS} \
--retro-workdir ${RETRO_WORKDIR} \
--retro-add-retriever \
"
SCRIPT=pretrain_retro.py
fi
######## Command. ########
NPROCS=8
CMD="\
pwd && cd ${REPO_DIR} && pwd && \
export PYTHONPATH=$PYTHONPATH:${REPO_DIR} && \
python -m torch.distributed.run \
--nproc_per_node ${NPROCS} \
--nnodes 1 \
--node_rank ${NODE_RANK} \
--master_addr ${MASTER_ADDR} \
--master_port 6000 \
${SCRIPT} ${ARGS} \
"
echo "~~~~~~~~~~~~~~~~~~~~~~~~~~"
echo "CMD = '$CMD'."
echo "~~~~~~~~~~~~~~~~~~~~~~~~~~"
eval $CMD