Skip to content

Commit c2afac5

Browse files
committed
update results
1 parent fdab190 commit c2afac5

File tree

8 files changed

+3663
-5175
lines changed

8 files changed

+3663
-5175
lines changed

=6.2.2

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
Defaulting to user installation because normal site-packages is not writeable
2+
Requirement already satisfied: Pillow in /opt/conda/lib/python3.7/site-packages (7.0.0)

MODEL_ZOO.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ ISA | ResNet-101 | Train | Val | 40000 | 8 | No | No | No | 79.55 | 80.62 | [Log
1717
OCR | ResNet-101 | Train | Val | 40000 | 8 | No | No | No | 79.63 | 80.68 | [Log](https://github.com/hsfzxjy/models.storage/releases/download/openseg.pytorch.cityscapes/spatial_ocrnet_deepbase_resnet101_dilated8_1.log) / [Model](https://github.com/hsfzxjy/models.storage/releases/download/openseg.pytorch.cityscapes/spatial_ocrnet_deepbase_resnet101_dilated8_1_latest.pth) | scripts/cityscapes/ocrnet/run_r_101_d_8_ocrnet_train.sh |
1818
ASP-OCR | ResNet-101 | Train | Val | 40000 | 8 | No | No | No | 79.89 | 80.69 | [Log](https://github.com/hsfzxjy/models.storage/releases/download/openseg.pytorch.cityscapes/spatial_asp_ocrnet_deepbase_resnet101_dilated8_1.log) / [Model](https://github.com/hsfzxjy/models.storage/releases/download/openseg.pytorch.cityscapes/spatial_asp_ocrnet_deepbase_resnet101_dilated8_1_latest.pth) | scripts/cityscapes/ocrnet/run_r_101_d_8_asp_ocrnet_train.sh |
1919
OCR | HRNet-W48 | Train | Val | 80000 | 8 | No | No | No | 81.09 | 81.73 | [Log](https://github.com/hsfzxjy/models.storage/releases/download/openseg.pytorch.cityscapes/hrnet_w48_ocr_1.log) / [Model](https://github.com/hsfzxjy/models.storage/releases/download/openseg.pytorch.cityscapes/hrnet_w48_ocr_1_latest.pth) | scripts/cityscapes/hrnet/run_h_48_d_4_ocr.sh |
20-
OCR | HRNet-W48 (Paddle) | Train | Val | 40000 | 16 | No | No | No | 81.53 | ---- | [Log]() / [Model]() | scripts/cityscapes/hrnet/run_h_48_d_4_ocr_paddle.sh |
20+
OCR | HRNet-W48 (Paddle) | Train | Val | 40000 | 16 | No | No | No | 81.53 | 82.78 | [Log]() / [Model]() | scripts/cityscapes/hrnet/run_h_48_d_4_ocr_paddle.sh |
2121

2222

2323
### How to reproduce the HRNet + OCR with Mapillary pretraining
@@ -46,6 +46,7 @@ Methods | Backbone | Train Set | Test Set | Iterations | Batch Size | OHEM | Mul
4646
| :---- | :----: | :--: | :--: | :--: | :--: | :--: | :--: | :--: | :--: | :--: | :--: |
4747
OCR | HRNet-W48 | Train | Val | 60000 | 16 | No | No | No | 55.11 | [Log](https://github.com/hsfzxjy/models.storage/releases/download/openseg.pytorch.pascal_context/hrnet_w48_ocr_hrnet48_2.log) / [Model](https://github.com/hsfzxjy/models.storage/releases/download/openseg.pytorch.pascal_context/hrnet_w48_ocr_hrnet48_2_latest.pth) | scripts/pascal_context/run_h_48_d_4_ocr_train.sh |
4848
OCR | HRNet-W48 (Paddle) | Train | Val | 60000 | 16 | No | No | No | 57.82 | [Log]() / [Model]() | scripts/pascal_context/run_h_48_d_4_ocr_train_paddle.sh |
49+
OCR | HRNet-W48 (Paddle) | Train | Val | 60000 | 16 | No | Yes | Yes | 59.13 | [Log]() / [Model]() | scripts/pascal_context/run_h_48_d_4_ocr_train_paddle.sh |
4950

5051
## LIP
5152

README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# openseg.pytorch
1+
# openseg.pytorch [pytorch-1.7]
22

33

44
[![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/object-contextual-representations-for/semantic-segmentation-on-coco-stuff-test)](https://paperswithcode.com/sota/semantic-segmentation-on-coco-stuff-test?p=object-contextual-representations-for)
@@ -14,7 +14,7 @@
1414

1515
## News
1616

17-
- 2021/02/14 Support **pytorch-1.7, mixed-precision, and distributed training**. We also will add more segmentation models that achieve better performance based on the paddlepaddle image pretrained weights soon. Please check [Model Zoo](https://github.com/openseg-group/openseg.pytorch/blob/pytorch-1.7/MODEL_ZOO.md) for more details. For example, we gain **2.61%** on PASCAL-Context and **2.89%** on COCO-Stuff with single-scale test.
17+
- 2021/02/14 Support **pytorch-1.7, mixed-precision, and distributed training**. Based on the [PaddleClas](https://github.com/PaddlePaddle/PaddleClas) ImageNet pretrained weights, "HRNet + OCR" achieves **82.78%** on Cityscapes val, **59.13%** on PASCAL-Context val and **43.26%** on COCO-Stuff val. Check [Model Zoo](https://github.com/openseg-group/openseg.pytorch/blob/pytorch-1.7/MODEL_ZOO.md) for more details.
1818

1919
- 2020/08/16 [MMSegmentation](https://github.com/open-mmlab/mmsegmentation) has supported our HRNet + OCR.
2020

Lines changed: 137 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,137 @@
1+
{
2+
"dataset": "cityscapes",
3+
"method": "fcn_segmentor",
4+
"data": {
5+
"image_tool": "cv2",
6+
"input_mode": "BGR",
7+
"num_classes": 19,
8+
"label_list": [7, 8, 11, 12, 13, 17, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 31, 32, 33],
9+
"data_dir": "/msravcshare/dataset/cityscapes",
10+
"workers": 8
11+
},
12+
"train": {
13+
"batch_size": 16,
14+
"data_transformer": {
15+
"size_mode": "fix_size",
16+
"input_size": [1024, 512],
17+
"align_method": "only_pad",
18+
"pad_mode": "random"
19+
}
20+
},
21+
"val": {
22+
"batch_size": 8,
23+
"scale_search": [0.5, 0.75, 1.0, 1.25, 1.5, 1.75, 2.0],
24+
"mode": "ms_test",
25+
"data_transformer": {
26+
"size_mode": "fix_size",
27+
"input_size": [2048, 1024],
28+
"align_method": "only_pad"
29+
}
30+
},
31+
"test": {
32+
"batch_size": 8,
33+
"scale_search": [0.5, 0.75, 1.0, 1.25, 1.5, 1.75, 2.0],
34+
"mode": "ms_test",
35+
"data_transformer": {
36+
"size_mode": "fix_size",
37+
"input_size": [2048, 1024],
38+
"align_method": "only_pad"
39+
}
40+
},
41+
"train_trans": {
42+
"trans_seq": ["random_resize", "random_crop", "random_hflip", "random_brightness"],
43+
"random_brightness": {
44+
"ratio": 1.0,
45+
"shift_value": 10
46+
},
47+
"random_hflip": {
48+
"ratio": 0.5,
49+
"swap_pair": []
50+
},
51+
"random_resize": {
52+
"ratio": 1.0,
53+
"method": "random",
54+
"scale_range": [0.5, 2.0],
55+
"aspect_range": [0.9, 1.1]
56+
},
57+
"random_crop":{
58+
"ratio": 1.0,
59+
"crop_size": [1024, 512],
60+
"method": "random",
61+
"allow_outside_center": false
62+
}
63+
},
64+
"val_trans": {
65+
"trans_seq": []
66+
},
67+
"normalize": {
68+
"div_value": 255.0,
69+
"mean_value": [0.485, 0.456, 0.406],
70+
"mean": [0.485, 0.456, 0.406],
71+
"std": [0.229, 0.224, 0.225]
72+
},
73+
"checkpoints": {
74+
"checkpoints_name": "fs_baseocnet_cityscapes_seg",
75+
"checkpoints_dir": "./checkpoints/cityscapes",
76+
"save_iters": 1000
77+
},
78+
"network":{
79+
"backbone": "deepbase_resnet101_dilated8",
80+
"multi_grid": [1, 1, 1],
81+
"model_name": "base_ocnet",
82+
"bn_type": "torchsyncbn",
83+
"stride": 8,
84+
"factors": [[8, 8]],
85+
"loss_weights": {
86+
"corr_loss": 0.01,
87+
"aux_loss": 0.4,
88+
"seg_loss": 1.0
89+
}
90+
},
91+
"logging": {
92+
"logfile_level": "info",
93+
"stdout_level": "info",
94+
"log_file": "./log/cityscapes/fs_baseocnet_cityscapes_seg.log",
95+
"log_format": "%(asctime)s %(levelname)-7s %(message)s",
96+
"rewrite": true
97+
},
98+
"lr": {
99+
"base_lr": 0.01,
100+
"metric": "iters",
101+
"lr_policy": "lambda_poly",
102+
"step": {
103+
"gamma": 0.5,
104+
"step_size": 100
105+
}
106+
},
107+
"solver": {
108+
"display_iter": 10,
109+
"test_interval": 1000,
110+
"max_iters": 40000
111+
},
112+
"optim": {
113+
"optim_method": "sgd",
114+
"adam": {
115+
"betas": [0.9, 0.999],
116+
"eps": 1e-08,
117+
"weight_decay": 0.0001
118+
},
119+
"sgd": {
120+
"weight_decay": 0.0005,
121+
"momentum": 0.9,
122+
"nesterov": false
123+
}
124+
},
125+
"loss": {
126+
"loss_type": "fs_auxce_loss",
127+
"params": {
128+
"ce_weight": [0.8373, 0.9180, 0.8660, 1.0345, 1.0166, 0.9969, 0.9754,
129+
1.0489, 0.8786, 1.0023, 0.9539, 0.9843, 1.1116, 0.9037,
130+
1.0865, 1.0955, 1.0865, 1.1529, 1.0507],
131+
"ce_reduction": "elementwise_mean",
132+
"ce_ignore_index": -1,
133+
"ohem_minkeep": 100000,
134+
"ohem_thresh": 0.9
135+
}
136+
}
137+
}

0 commit comments

Comments
 (0)