Skip to content

Commit

Permalink
Adapt to 0-dimensional tensor (PaddlePaddle#8165)
Browse files Browse the repository at this point in the history
  • Loading branch information
WenmuZhou authored Oct 31, 2022
1 parent 484e481 commit 1af190e
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 15 deletions.
2 changes: 1 addition & 1 deletion ppocr/postprocess/rec_postprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -891,7 +891,7 @@ def __call__(self, preds, label=None, length=None, *args, **kwargs):
) + length[i])].topk(1)[0][:, 0]
preds_prob = paddle.exp(
paddle.log(preds_prob).sum() / (preds_prob.shape[0] + 1e-6))
text.append((preds_text, preds_prob.numpy()[0]))
text.append((preds_text, float(preds_prob)))
if label is None:
return text
label = self.decode(label)
Expand Down
4 changes: 3 additions & 1 deletion test_tipc/prepare.sh
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,9 @@ if [ ${MODE} = "lite_train_lite_infer" ];then
# pretrain lite train data
wget -nc -P ./pretrain_models/ https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/MobileNetV3_large_x0_5_pretrained.pdparams --no-check-certificate
wget -nc -P ./pretrain_models/ https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/det_mv3_db_v2.0_train.tar --no-check-certificate
cd ./pretrain_models/ && tar xf det_mv3_db_v2.0_train.tar && cd ../
cd ./pretrain_models/
tar xf det_mv3_db_v2.0_train.tar
cd ../
if [[ ${model_name} =~ "ch_PP-OCRv2_det" ]];then
wget -nc -P ./pretrain_models/ https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_distill_train.tar --no-check-certificate
cd ./pretrain_models/ && tar xf ch_PP-OCRv2_det_distill_train.tar && cd ../
Expand Down
26 changes: 13 additions & 13 deletions test_tipc/supplementary/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,22 +168,22 @@ def train(config, scaler=None):
if idx % 10 == 0:
et = time.time()
strs = f"epoch: [{epoch}/{EPOCH}], iter: [{idx}/{data_num}], "
strs += f"loss: {avg_loss.numpy()[0]}"
strs += f", acc_topk1: {acc['top1'].numpy()[0]}, acc_top5: {acc['top5'].numpy()[0]}"
strs += f"loss: {float(avg_loss)}"
strs += f", acc_topk1: {float(acc['top1'])}, acc_top5: {float(acc['top5'])}"
strs += f", batch_time: {round(et-st, 4)} s"
logger.info(strs)
st = time.time()

if epoch % 10 == 0:
acc = eval(config, model)
if len(best_acc) < 1 or acc['top5'].numpy()[0] > best_acc['top5']:
if len(best_acc) < 1 or float(acc['top5']) > best_acc['top5']:
best_acc = acc
best_acc['epoch'] = epoch
is_best = True
else:
is_best = False
logger.info(
f"The best acc: acc_topk1: {best_acc['top1'].numpy()[0]}, acc_top5: {best_acc['top5'].numpy()[0]}, best_epoch: {best_acc['epoch']}"
f"The best acc: acc_topk1: {float(best_acc['top1'])}, acc_top5: {float(best_acc['top5'])}, best_epoch: {best_acc['epoch']}"
)
save_model(
model,
Expand Down Expand Up @@ -276,22 +276,22 @@ def train_distill(config, scaler=None):
if idx % 10 == 0:
et = time.time()
strs = f"epoch: [{epoch}/{EPOCH}], iter: [{idx}/{data_num}], "
strs += f"loss: {avg_loss.numpy()[0]}"
strs += f", acc_topk1: {acc['top1'].numpy()[0]}, acc_top5: {acc['top5'].numpy()[0]}"
strs += f"loss: {float(avg_loss)}"
strs += f", acc_topk1: {float(acc['top1'])}, acc_top5: {float(acc['top5'])}"
strs += f", batch_time: {round(et-st, 4)} s"
logger.info(strs)
st = time.time()

if epoch % 10 == 0:
acc = eval(config, model._layers.student)
if len(best_acc) < 1 or acc['top5'].numpy()[0] > best_acc['top5']:
if len(best_acc) < 1 or float(acc['top5']) > best_acc['top5']:
best_acc = acc
best_acc['epoch'] = epoch
is_best = True
else:
is_best = False
logger.info(
f"The best acc: acc_topk1: {best_acc['top1'].numpy()[0]}, acc_top5: {best_acc['top5'].numpy()[0]}, best_epoch: {best_acc['epoch']}"
f"The best acc: acc_topk1: {float(best_acc['top1'])}, acc_top5: {float(best_acc['top5'])}, best_epoch: {best_acc['epoch']}"
)

save_model(
Expand Down Expand Up @@ -401,22 +401,22 @@ def train_distill_multiopt(config, scaler=None):
if idx % 10 == 0:
et = time.time()
strs = f"epoch: [{epoch}/{EPOCH}], iter: [{idx}/{data_num}], "
strs += f"loss: {avg_loss.numpy()[0]}, loss1: {avg_loss1.numpy()[0]}"
strs += f", acc_topk1: {acc['top1'].numpy()[0]}, acc_top5: {acc['top5'].numpy()[0]}"
strs += f"loss: {float(avg_loss)}, loss1: {float(avg_loss1)}"
strs += f", acc_topk1: {float(acc['top1'])}, acc_top5: {float(acc['top5'])}"
strs += f", batch_time: {round(et-st, 4)} s"
logger.info(strs)
st = time.time()

if epoch % 10 == 0:
acc = eval(config, model._layers.student)
if len(best_acc) < 1 or acc['top5'].numpy()[0] > best_acc['top5']:
if len(best_acc) < 1 or float(acc['top5']) > best_acc['top5']:
best_acc = acc
best_acc['epoch'] = epoch
is_best = True
else:
is_best = False
logger.info(
f"The best acc: acc_topk1: {best_acc['top1'].numpy()[0]}, acc_top5: {best_acc['top5'].numpy()[0]}, best_epoch: {best_acc['epoch']}"
f"The best acc: acc_topk1: {float(best_acc['top1'])}, acc_top5: {float(best_acc['top5'])}, best_epoch: {best_acc['epoch']}"
)
save_model(
model, [optimizer, optimizer1],
Expand Down Expand Up @@ -450,7 +450,7 @@ def eval(config, model):
labels = paddle.concat(labels, axis=0)
acc = metric_func(outs, labels)

strs = f"The metric are as follows: acc_topk1: {acc['top1'].numpy()[0]}, acc_top5: {acc['top5'].numpy()[0]}"
strs = f"The metric are as follows: acc_topk1: {float(acc['top1'])}, acc_top5: {float(acc['top5'])}"
logger.info(strs)
return acc

Expand Down

0 comments on commit 1af190e

Please sign in to comment.