Skip to content

Simplified renaming of various core functions and classes #124

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 14 commits into from
Feb 26, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 9 additions & 9 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,12 +56,12 @@ import torch, torchhd
d = 10000 # number of dimensions

# create the hypervectors for each symbol
keys = torchhd.random_hv(3, d)
keys = torchhd.random(3, d)
country, capital, currency = keys

usa, mex = torchhd.random_hv(2, d) # United States and Mexico
wdc, mxc = torchhd.random_hv(2, d) # Washington D.C. and Mexico City
usd, mxn = torchhd.random_hv(2, d) # US Dollar and Mexican Peso
usa, mex = torchhd.random(2, d) # United States and Mexico
wdc, mxc = torchhd.random(2, d) # Washington D.C. and Mexico City
usd, mxn = torchhd.random(2, d) # US Dollar and Mexican Peso

# create country representations
us_values = torch.stack([usa, wdc, usd])
Expand All @@ -77,7 +77,7 @@ mx_us = torchhd.bind(torchhd.inverse(us), mx)
usd_of_mex = torchhd.bind(mx_us, usd)

memory = torch.cat([keys, us_values, mx_values], dim=0)
torchhd.cos_similarity(usd_of_mex, memory)
torchhd.cosine_similarity(usd_of_mex, memory)
# tensor([-0.0062, 0.0123, -0.0057, -0.0019, -0.0084, -0.0078, 0.0102, 0.0057, 0.3292])
# The hypervector for the Mexican Peso is the most similar.
```
Expand All @@ -87,10 +87,10 @@ This example is from the paper [What We Mean When We Say "What's the Dollar of M
## Supported HDC/VSA models
Currently, the library supports the following HDC/VSA models:

- [Multiply-Add-Permute (MAP)](https://torchhd.readthedocs.io/en/stable/generated/torchhd.MAP.html)
- [Binary Spatter Codes (BSC)](https://torchhd.readthedocs.io/en/stable/generated/torchhd.BSC.html)
- [Holographic Reduced Representations (HRR)](https://torchhd.readthedocs.io/en/stable/generated/torchhd.HRR.html)
- [Fourier Holographic Reduced Representations (FHRR)](https://torchhd.readthedocs.io/en/stable/generated/torchhd.FHRR.html)
- [Multiply-Add-Permute (MAP)](https://torchhd.readthedocs.io/en/stable/generated/torchhd.MAPTensor.html)
- [Binary Spatter Codes (BSC)](https://torchhd.readthedocs.io/en/stable/generated/torchhd.BSCTensor.html)
- [Holographic Reduced Representations (HRR)](https://torchhd.readthedocs.io/en/stable/generated/torchhd.HRRTensor.html)
- [Fourier Holographic Reduced Representations (FHRR)](https://torchhd.readthedocs.io/en/stable/generated/torchhd.FHRRTensor.html)

We welcome anyone to help with contributing more models to the library!

Expand Down
22 changes: 11 additions & 11 deletions docs/getting_started.rst
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,13 @@ The first step to encode these records is to define the basis-hypervectors for e

.. code-block:: python

from torchhd import functional
import torchhd

d = 10000 # dimensions
fruits = functional.random_hv(3, d)
weights = functional.level_hv(10, d)
seasons = functional.circular_hv(4, d)
var = functional.random_hv(3, d)
fruits = torchhd.random(3, d)
weights = torchhd.level(10, d)
seasons = torchhd.circular(4, d)
var = torchhd.random(3, d)

which creates hypervectors for the 3 fruit types, 10 weight levels, 4 seasons and the 3 variables. The figure below illustrates the distance between the pairs of hypervectors in each set:

Expand All @@ -55,7 +55,7 @@ Similar behavior can be achieved using the classes in the :ref:`embeddings` modu

weight = torch.tensor([149.0])
# explicit mapping of the fruit weight to an index
w_i = functional.value_to_index(weight, 0, 200, 10)
w_i = torchhd.functional.value_to_index(weight, 0, 200, 10)
weights[w_i] # select representation of 149

whereas the :ref:`embeddings<embeddings>` have this common behavior built-in:
Expand All @@ -75,10 +75,10 @@ Once the basis-hypervectors are defined, we can use the MAP operations from :ref

.. code-block:: python

f = functional.bind(var[0], fruits[0]) # fruit = apple
w = functional.bind(var[1], weights[w_i]) # weight = 149
s = functional.bind(var[2], seasons[3]) # season = fall
r1 = functional.bundle(functional.bundle(f, w), s)
f = torchhd.bind(var[0], fruits[0]) # fruit = apple
w = torchhd.bind(var[1], weights[w_i]) # weight = 149
s = torchhd.bind(var[2], seasons[3]) # season = fall
r1 = torchhd.bundle(torchhd.bundle(f, w), s)

which is equivalent to using the following shortened syntax:

Expand All @@ -95,7 +95,7 @@ Alternatively, we can use one of the commonly used encodings provided in the :re

# combine values in one tensor of shape (3, d)
values = torch.stack([fruits[0], weights[w_i], seasons[3]])
r1 = functional.hash_table(var, values)
r1 = torchhd.hash_table(var, values)

The :ref:`structures` module contains the same encoding patterns in addition to binary trees and finite state automata, but provides them as data structures. This module provides class-based implementations of HDC data structures. Using the hash table class, record :math:`r_1` can be represented as follows:

Expand Down
2 changes: 1 addition & 1 deletion docs/memory.rst
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,4 @@ torchhd.memory
:toctree: generated/
:template: class.rst

SDM
SparseDistributed
26 changes: 13 additions & 13 deletions docs/torchhd.rst
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,12 @@ Basis-hypervector sets
:toctree: generated/
:template: function.rst

empty_hv
identity_hv
random_hv
level_hv
thermometer_hv
circular_hv
empty
identity
random
level
thermometer
circular


Operations
Expand Down Expand Up @@ -50,7 +50,7 @@ Similarities
:toctree: generated/
:template: function.rst

cos_similarity
cosine_similarity
dot_similarity
hamming_similarity

Expand Down Expand Up @@ -79,11 +79,11 @@ VSA Models
:toctree: generated/
:template: class.rst

VSA_Model
BSC
MAP
HRR
FHRR
VSATensor
BSCTensor
MAPTensor
HRRTensor
FHRRTensor


Utilities
Expand All @@ -93,7 +93,7 @@ Utilities
:toctree: generated/
:template: function.rst

as_vsa_model
ensure_vsa_tensor
map_range
value_to_index
index_to_value
2 changes: 1 addition & 1 deletion examples/hd_hashing.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ def __init__(self, levels: int, dimensions: int, device=None):
self.dimensions = dimensions
self.device = device

self.hvs = torchhd.circular_hv(levels, dimensions, device=device)
self.hvs = torchhd.circular(levels, dimensions, device=device)
self.servers = []
self.server_hvs = []
self.weight_by_server = {}
Expand Down
56 changes: 30 additions & 26 deletions torchhd/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,20 @@
import torchhd.datasets as datasets
import torchhd.utils as utils

from torchhd.base import VSA_Model
from torchhd.bsc import BSC
from torchhd.map import MAP
from torchhd.hrr import HRR
from torchhd.fhrr import FHRR
from torchhd.tensors.base import VSATensor
from torchhd.tensors.bsc import BSCTensor
from torchhd.tensors.map import MAPTensor
from torchhd.tensors.hrr import HRRTensor
from torchhd.tensors.fhrr import FHRRTensor

from torchhd.functional import (
as_vsa_model,
empty_hv,
identity_hv,
random_hv,
level_hv,
thermometer_hv,
circular_hv,
ensure_vsa_tensor,
empty,
identity,
random,
level,
thermometer,
circular,
bind,
bundle,
permute,
Expand All @@ -31,8 +31,10 @@
multirandsel,
soft_quantize,
hard_quantize,
cos_similarity,
cosine_similarity,
cos,
dot_similarity,
dot,
hamming_similarity,
multiset,
multibundle,
Expand All @@ -53,25 +55,25 @@

__all__ = [
"__version__",
"VSA_Model",
"BSC",
"MAP",
"HRR",
"FHRR",
"VSATensor",
"BSCTensor",
"MAPTensor",
"HRRTensor",
"FHRRTensor",
"functional",
"embeddings",
"structures",
"models",
"memory",
"datasets",
"utils",
"as_vsa_model",
"empty_hv",
"identity_hv",
"random_hv",
"level_hv",
"thermometer_hv",
"circular_hv",
"ensure_vsa_tensor",
"empty",
"identity",
"random",
"level",
"thermometer",
"circular",
"bind",
"bundle",
"permute",
Expand All @@ -83,8 +85,10 @@
"multirandsel",
"soft_quantize",
"hard_quantize",
"cos_similarity",
"cosine_similarity",
"cos",
"dot_similarity",
"dot",
"hamming_similarity",
"multiset",
"multibundle",
Expand Down
Loading