-
Notifications
You must be signed in to change notification settings - Fork 1
/
config.ini
55 lines (49 loc) · 1.42 KB
/
config.ini
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
[Test]
EXPNO=01
REVNO=0
EXPNAME=Multi-2X
EXPDATE=10-15-2023
EXPTAG=(DGH.WH)
NUM_QUERIES=7
NUM_CLASSES=6
OBJ_LIST=["dog house center", "dog house", "house", "whole ship", "super structure", "ship stern"]
COLORS = ["red", "darksalmon", "Orange", "gold", "greenyellow", "limegreen"]
# TESTNAME=E${EXPNO}rev${REVNO}_${EXPDATE}_${EXPNAME}${EXPTAG}__DETR-${Training:DETR_VERSION}-NQ${NUM_QUERIES}_NC${NUM_CLASSES}__DT${Training:DATASET_SIZE}
TESTNAME=TNN_MO_6-Object_model
SNAPSHOT=${TESTNAME}.pth
RESULTS=${Dir:CKPT}${TESTNAME}/
[Dir]
DATA_DIR=/synthetic_dataset/sample_dataset/
CKPT=checkpoints/
COCO_PATH=${CKPT}${Test:TESTNAME}/
EXAMPLES=/TNN-MO/examples/
COCOSUBFOLDER=
# ${Test:TESTNAME}
[Training]
DETR_VERSION = 1.1
#Resume training [ON/OFF]
RESUME = OFF
#Distributed training [ON/OFF]
DIST = OFF
DEVICE = 0
# for sample DATASET_SIZE = 10 images
DATASET_SIZE = 10
EPOCHS = 300
BATCH_SIZE = 2
[DETR]
# number of keypoints
NUM_KEYS = 32
# Number of attention heads inside the transformer's attentions (Default = 8)
HEADS = 8
# Number of encoding layers in the transformer (Default = 6)
ELAYERS = 6
# Number of decoding layers in the transformer (Default = 6)
DLAYERS = 6
# Intermediate size of the feedforward layers in the transformer blocks (Default = 2048)
DIM_FEEDFORWARD = 2048
# Size of the embeddings (dimension of the transformer) (Default = 256)
HIDDEN_DIM = 256
# Auxiliary loss
AUXLOSS = True
[Experiment]
Experimented=False