Skip to content

Commit

Permalink
add metrics tests for apache#9640 (apache#9759)
Browse files Browse the repository at this point in the history
* add metrics tests

* added myself to contributors list
  • Loading branch information
TheTweak authored and szha committed Feb 11, 2018
1 parent 91d6ed2 commit 05ec81b
Show file tree
Hide file tree
Showing 2 changed files with 43 additions and 0 deletions.
1 change: 1 addition & 0 deletions CONTRIBUTORS.md
Original file line number Diff line number Diff line change
Expand Up @@ -155,3 +155,4 @@ List of Contributors
* [Julian Salazar](https://github.com/JulianSlzr)
* [Meghna Baijal](https://github.com/mbaijal)
* [Tao Hu](https://github.com/dongzhuoyao)
* [Sorokin Evgeniy](https://github.com/TheTweak)
42 changes: 42 additions & 0 deletions tests/python/unittest/test_metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,48 @@ def test_nll_loss():
expected_loss = -(np.log(pred[0][2].asscalar()) + np.log(pred[1][1].asscalar())) / 2
assert loss == expected_loss

def test_acc():
pred = mx.nd.array([[0.3, 0.7], [0, 1.], [0.4, 0.6]])
label = mx.nd.array([0, 1, 1])
metric = mx.metric.create('acc')
metric.update([label], [pred])
_, acc = metric.get()
expected_acc = (np.argmax(pred, axis=1) == label).sum().asscalar() / label.size
assert acc == expected_acc

def test_f1():
pred = mx.nd.array([[0.3, 0.7], [1., 0], [0.4, 0.6], [0.6, 0.4], [0.9, 0.1]])
label = mx.nd.array([0, 1, 1, 1, 1])
positives = np.argmax(pred, axis=1).sum().asscalar()
true_positives = (np.argmax(pred, axis=1) == label).sum().asscalar()
precision = true_positives / positives
overall_positives = label.sum().asscalar()
recall = true_positives / overall_positives
f1_expected = 2 * (precision * recall) / (precision + recall)
metric = mx.metric.create('f1')
metric.update([label], [pred])
_, f1 = metric.get()
assert f1 == f1_expected

def test_perplexity():
pred = mx.nd.array([[0.8, 0.2], [0.2, 0.8], [0, 1.]])
label = mx.nd.array([0, 1, 1])
p = pred.asnumpy()[np.arange(label.size), label.asnumpy().astype('int32')]
perplexity_expected = np.exp(-np.log(p).sum()/label.size)
metric = mx.metric.create('perplexity', -1)
metric.update([label], [pred])
_, perplexity = metric.get()
assert perplexity == perplexity_expected

def test_pearsonr():
pred = mx.nd.array([[0.7, 0.3], [0.1, 0.9], [1., 0]])
label = mx.nd.array([[0, 1], [1, 0], [1, 0]])
pearsonr_expected = np.corrcoef(pred.asnumpy().ravel(), label.asnumpy().ravel())[0, 1]
metric = mx.metric.create('pearsonr')
metric.update([label], [pred])
_, pearsonr = metric.get()
assert pearsonr == pearsonr_expected

if __name__ == '__main__':
import nose
nose.runmodule()

0 comments on commit 05ec81b

Please sign in to comment.