Skip to content
This repository has been archived by the owner on Jul 2, 2021. It is now read-only.

Unify eval detection #786

Merged
merged 11 commits into from
Feb 15, 2019
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
add eval_detection_multi
  • Loading branch information
Hakuyume committed Feb 14, 2019
commit 617360ec2ecadcf07b4f4aa81b3e9a9baf72e141
50 changes: 50 additions & 0 deletions examples/detection/eval_detection_multi.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import argparse

import chainer
from chainer import iterators

import chainermn

from chainercv.utils import apply_to_iterator
from chainercv.utils import ProgressHook

from eval_detection import models
from eval_detection import setup


def main():
parser = argparse.ArgumentParser()
parser.add_argument('--dataset', ('voc', 'coco'))
parser.add_argument('--model', sorted(models.keys()))
parser.add_argument('--pretrained-model')
parser.add_argument('--batchsize', type=int)
args = parser.parse_args()

comm = chainermn.create_communicator()
device = comm.intra_rank

dataset, label_names, eval_, model, batchsize = setup(
args.dataset, args.model, args.pretrained_model, args.batchsize)

chainer.cuda.get_device_from_id(device).use()
model.to_gpu()

model.use_preset('evaluate')

if not comm.rank == 0:
apply_to_iterator(model.predict, None, comm=comm)
return

iterator = iterators.MultithreadIterator(
dataset, batchsize, repeat=False, shuffle=False)

in_values, out_values, rest_values = apply_to_iterator(
model.predict, iterator, hook=ProgressHook(len(dataset)))
# delete unused iterators explicitly
del in_values

eval_(out_values, rest_values)


if __name__ == '__main__':
main()