-
Notifications
You must be signed in to change notification settings - Fork 3
/
train_IQA.yml
83 lines (72 loc) · 2.43 KB
/
train_IQA.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
name: LPIPS-Alex_TonPIPAL
use_tb_logger: true
gpu_ids: [2]
#### datasets
datasets:
train:
# Select PIPAL or BAPPS as Training Data
choice: PIPAL
# Settings for PIPAL
# Displace 'mos_root' 'ref_root' 'dist_root' with your own path
train_PIPAL:
phase: train
name: PIPAL
mos_root: /home/hmcai/home/PIPAL_SWD_Remake/Remake_Dataset/PIPAL/Full_EloMOSV2
ref_root: /home/hmcai/home/PIPAL_SWD_Remake/Remake_Dataset/PIPAL/Remake_Train_Ref
dist_root: /home/hmcai/home/PIPAL_SWD_Remake/Remake_Dataset/PIPAL/Remake_Train_Dis
n_workers: 8 # per GPU
batch_size: 16
crop_flag: true
crop_size: 248
norm_flag : true
# Settings for BAPPS
# Displace 'train_root' 'valid_root'
# Choose 'both' or 'train' or 'valid' for 'train_valid'
train_BAPPS:
phase: train
name: BAPPS
train_valid: both # both or train or valid | Decide which part of BAPPS dataset you want to use.
train_root : /home/hmcai/home/Dataset/BAPPS/train
valid_root : /home/hmcai/home/Dataset/BAPPS/val
n_workers: 8 # per GPU
batch_size: 16
crop_flag: true
crop_size: 248
norm_flag : true
val:
# Displace Validation & Change data_util.py based on your need.
test_PIPAL_Full:
name: PIPAL
mos_root: /home/hmcai/home/Dataset/PIPAL/PIPAL_Partition/FullDataset/Full_EloMOSV2
ref_root: /home/hmcai/home/PIPAL_SWD_Remake/Remake_Dataset/PIPAL/Remake_Full_Ref
dist_root: /home/hmcai/home/PIPAL_SWD_Remake/Remake_Dataset/PIPAL/Remake_Full_Dis
test_PIPAL_Valid:
name: PIPAL
mos_root: /home/hmcai/home/Dataset/PIPAL/PIPAL_Partition/MOS/Valid_EloMOSV2
ref_root: /home/hmcai/home/PIPAL_SWD_Remake/Remake_Dataset/PIPAL/Remake_Valid_Ref
dist_root: /home/hmcai/home/PIPAL_SWD_Remake/Remake_Dataset/PIPAL/Remake_Valid_Dis
#### network structures
network_G:
FENet: Alex # Alex or VGG16
tune_flag: false # Fixed FE or Tunable FE
bias_flag: true # Active bias of conv2d in regression layer
lpips_flag: false # Use LPIPS's regression layer
#### path
path:
pretrain_model_G:
pretrain_model_R:
strict_load: True
resume_state:
#### training settings: learning rate scheme, loss
train:
lr_IQA: !!float 1e-4
weight_decay_IQA: 0
niter: 400000
warmup_iter: -1 # no warm up
loss_weight: !!float 1.0
manual_seed: 10
val_freq: !!float 2e3
#### logger
logger:
print_freq: 50
save_checkpoint_freq: !!float 2e3