Skip to content

Commit fa2c5d6

Browse files
Circle CICircle CI
authored andcommitted
CircleCI update of dev docs (2955).
1 parent e164e0e commit fa2c5d6

File tree

273 files changed

+733626
-731354
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

273 files changed

+733626
-731354
lines changed
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
{
2+
"cells": [
3+
{
4+
"cell_type": "markdown",
5+
"metadata": {},
6+
"source": [
7+
"\n# Different gradient computations for regularized optimal transport\n\nThis example illustrates the differences in terms of computation time between the gradient options for the Sinkhorn solver.\n"
8+
]
9+
},
10+
{
11+
"cell_type": "code",
12+
"execution_count": null,
13+
"metadata": {
14+
"collapsed": false
15+
},
16+
"outputs": [],
17+
"source": [
18+
"# Author: Sonia Mazelet <sonia.mazelet@polytechnique.edu>\n#\n# License: MIT License\n\n# sphinx_gallery_thumbnail_number = 1\n\nimport matplotlib.pylab as pl\nimport ot\nfrom ot.backend import torch"
19+
]
20+
},
21+
{
22+
"cell_type": "markdown",
23+
"metadata": {},
24+
"source": [
25+
"## Time comparison of the Sinkhorn solver for different gradient options\n\n"
26+
]
27+
},
28+
{
29+
"cell_type": "code",
30+
"execution_count": null,
31+
"metadata": {
32+
"collapsed": false
33+
},
34+
"outputs": [],
35+
"source": [
36+
"n_trials = 10\ntimes_autodiff = torch.zeros(n_trials)\ntimes_envelope = torch.zeros(n_trials)\ntimes_last_step = torch.zeros(n_trials)\n\nn_samples_s = 300\nn_samples_t = 300\nn_features = 5\nreg = 0.03\n\n# Time required for the Sinkhorn solver and gradient computations, for different gradient options over multiple Gaussian distributions\nfor i in range(n_trials):\n x = torch.rand((n_samples_s, n_features))\n y = torch.rand((n_samples_t, n_features))\n a = ot.utils.unif(n_samples_s)\n b = ot.utils.unif(n_samples_t)\n M = ot.dist(x, y)\n\n a = torch.tensor(a, requires_grad=True)\n b = torch.tensor(b, requires_grad=True)\n M = M.clone().detach().requires_grad_(True)\n\n # autodiff provides the gradient for all the outputs (plan, value, value_linear)\n ot.tic()\n res_autodiff = ot.solve(M, a, b, reg=reg, grad=\"autodiff\")\n res_autodiff.value.backward()\n times_autodiff[i] = ot.toq()\n\n a = a.clone().detach().requires_grad_(True)\n b = b.clone().detach().requires_grad_(True)\n M = M.clone().detach().requires_grad_(True)\n\n # envelope provides the gradient for value\n ot.tic()\n res_envelope = ot.solve(M, a, b, reg=reg, grad=\"envelope\")\n res_envelope.value.backward()\n times_envelope[i] = ot.toq()\n\n a = a.clone().detach().requires_grad_(True)\n b = b.clone().detach().requires_grad_(True)\n M = M.clone().detach().requires_grad_(True)\n\n # last_step provides the gradient for all the outputs, but only for the last iteration of the Sinkhorn algorithm\n ot.tic()\n res_last_step = ot.solve(M, a, b, reg=reg, grad=\"last_step\")\n res_last_step.value.backward()\n times_last_step[i] = ot.toq()\n\npl.figure(1, figsize=(5, 3))\npl.ticklabel_format(axis=\"y\", style=\"sci\", scilimits=(0, 0))\npl.boxplot(\n ([times_autodiff, times_envelope, times_last_step]),\n tick_labels=[\"autodiff\", \"envelope\", \"last_step\"],\n showfliers=False,\n)\npl.ylabel(\"Time (s)\")\npl.show()"
37+
]
38+
}
39+
],
40+
"metadata": {
41+
"kernelspec": {
42+
"display_name": "Python 3",
43+
"language": "python",
44+
"name": "python3"
45+
},
46+
"language_info": {
47+
"codemirror_mode": {
48+
"name": "ipython",
49+
"version": 3
50+
},
51+
"file_extension": ".py",
52+
"mimetype": "text/x-python",
53+
"name": "python",
54+
"nbconvert_exporter": "python",
55+
"pygments_lexer": "ipython3",
56+
"version": "3.10.15"
57+
}
58+
},
59+
"nbformat": 4,
60+
"nbformat_minor": 0
61+
}
Binary file not shown.
Binary file not shown.
Binary file not shown.

0 commit comments

Comments
 (0)