Skip to content

Commit

Permalink
a bunch of fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
fxia22 committed Apr 27, 2018
1 parent f5cabcc commit 3700c44
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 10 deletions.
2 changes: 1 addition & 1 deletion datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def __init__(self, root, npoints = 2500, classification = False, class_choice =
self.datapath.append((item, fn[0], fn[1]))


self.classes = dict(zip(self.cat, range(len(self.cat))))
self.classes = dict(zip(sorted(self.cat), range(len(self.cat))))
print(self.classes)
self.num_seg_classes = 0
if not self.classification:
Expand Down
14 changes: 8 additions & 6 deletions show_cls.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,29 +24,31 @@
parser = argparse.ArgumentParser()

parser.add_argument('--model', type=str, default = '', help='model path')
parser.add_argument('--num_points', type=int, default=2500, help='input batch size')


opt = parser.parse_args()
print (opt)

test_dataset = PartDataset(root = 'shapenetcore_partanno_segmentation_benchmark_v0' , train = False, classification = True)
test_dataset = PartDataset(root = 'shapenetcore_partanno_segmentation_benchmark_v0' , train = False, classification = True, npoints = opt.num_points)

testdataloader = torch.utils.data.DataLoader(test_dataset, batch_size=32, shuffle = False)
testdataloader = torch.utils.data.DataLoader(test_dataset, batch_size=32, shuffle = True)


classifier = PointNetCls(k = len(test_dataset.classes))
classifier = PointNetCls(k = len(test_dataset.classes), num_points = opt.num_points)
classifier.cuda()
classifier.load_state_dict(torch.load(opt.model))
classifier.eval()


for i, data in enumerate(testdataloader, 0):
points, target = data
points, target = Variable(points), Variable(target[:,0])
points = points.transpose(2,1)
points, target = Variable(points), Variable(target[:, 0])
points = points.transpose(2, 1)
points, target = points.cuda(), target.cuda()
pred, _ = classifier(points)
loss = F.nll_loss(pred, target)
from IPython import embed; embed()

pred_choice = pred.data.max(1)[1]
correct = pred_choice.eq(target.data).cpu().sum()
print('i:%d loss: %f accuracy: %f' %(i, loss.data[0], correct/float(32)))
6 changes: 3 additions & 3 deletions show_seg.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,12 +57,12 @@

point = Variable(point.view(1, point.size()[0], point.size()[1]))
pred, _ = classifier(point)

pred_choice = pred.data.max(2)[1]
print(pred_choice)

#print(pred_choice.size())
pred_color = cmap[pred_choice.numpy(), :]
pred_color = cmap[pred_choice.numpy()[0], :]

#print(pred_color.shape)

showpoints(point_np, gt, pred_color)

0 comments on commit 3700c44

Please sign in to comment.