Skip to content

Commit bb72ce4

Browse files
authored
Rename sequence (#68)
* Rename functions and classes * Rename in tests * Rename in docs * Add multibundle alias
1 parent ca01742 commit bb72ce4

File tree

7 files changed

+94
-91
lines changed

7 files changed

+94
-91
lines changed

docs/functional.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -57,8 +57,8 @@ Encodings
5757

5858
multiset
5959
multibind
60-
sequence
61-
distinct_sequence
60+
bundle_sequence
61+
bind_sequence
6262
hash_table
6363
cross_product
6464
ngrams

docs/structures.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,8 @@ This module provides class-based implementations of HDC data structures.
1414
Memory
1515
Multiset
1616
HashTable
17-
Sequence
18-
DistinctSequence
17+
BundleSequence
18+
BindSequence
1919
Graph
2020
Tree
2121
FiniteStateAutomata

torchhd/functional.py

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,8 @@
2323
"multiset",
2424
"multibind",
2525
"cross_product",
26-
"sequence",
27-
"distinct_sequence",
26+
"bundle_sequence",
27+
"bind_sequence",
2828
"ngrams",
2929
"hash_table",
3030
"map_range",
@@ -621,13 +621,13 @@ def hamming_similarity(input: Tensor, others: Tensor) -> LongTensor:
621621
return torch.sum(input == others, dim=-1, dtype=torch.long)
622622

623623

624-
def multiset(
625-
input: Tensor,
626-
) -> Tensor:
624+
def multiset(input: Tensor) -> Tensor:
627625
r"""Multiset of input hypervectors.
628626
629627
Bundles all the input hypervectors together.
630628
629+
Aliased as ``torchhd.functional.multibundle``.
630+
631631
.. math::
632632
633633
\bigoplus_{i=0}^{n-1} V_i
@@ -662,6 +662,9 @@ def multiset(
662662
return torch.sum(input, dim=-2, dtype=input.dtype)
663663

664664

665+
multibundle = multiset
666+
667+
665668
def multibind(input: Tensor) -> Tensor:
666669
r"""Binding of multiple hypervectors.
667670
@@ -747,7 +750,7 @@ def ngrams(input: Tensor, n: int = 3) -> Tensor:
747750
\bigoplus_{i=0}^{m - n} \bigotimes_{j = 0}^{n - 1} \Pi^{n - j - 1}(V_{i + j})
748751
749752
.. note::
750-
For :math:`n=1` use :func:`~torchhd.functional.multiset` instead and for :math:`n=m` use :func:`~torchhd.functional.distinct_sequence` instead.
753+
For :math:`n=1` use :func:`~torchhd.functional.multiset` instead and for :math:`n=m` use :func:`~torchhd.functional.bind_sequence` instead.
751754
752755
Args:
753756
input (Tensor): The value hypervectors.
@@ -812,7 +815,7 @@ def hash_table(keys: Tensor, values: Tensor) -> Tensor:
812815
return multiset(bind(keys, values))
813816

814817

815-
def sequence(input: Tensor) -> Tensor:
818+
def bundle_sequence(input: Tensor) -> Tensor:
816819
r"""Bundling-based sequence.
817820
818821
The first value is permuted :math:`n-1` times, the last value is not permuted.
@@ -837,7 +840,7 @@ def sequence(input: Tensor) -> Tensor:
837840
[ 1., 1., 1.],
838841
[-1., -1., -1.],
839842
[ 1., 1., 1.]])
840-
>>> functional.sequence(x)
843+
>>> functional.bundle_sequence(x)
841844
tensor([-1., 3., 1.])
842845
843846
"""
@@ -851,7 +854,7 @@ def sequence(input: Tensor) -> Tensor:
851854
return multiset(permuted)
852855

853856

854-
def distinct_sequence(input: Tensor) -> Tensor:
857+
def bind_sequence(input: Tensor) -> Tensor:
855858
r"""Binding-based sequence.
856859
857860
The first value is permuted :math:`n-1` times, the last value is not permuted.
@@ -876,7 +879,7 @@ def distinct_sequence(input: Tensor) -> Tensor:
876879
[ 1., -1., -1.],
877880
[ 1., -1., -1.],
878881
[-1., -1., -1.]])
879-
>>> functional.distinct_sequence(x)
882+
>>> functional.bind_sequence(x)
880883
tensor([-1., 1., 1.])
881884
882885
"""

torchhd/structures.py

Lines changed: 26 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,8 @@
88
"Memory",
99
"Multiset",
1010
"HashTable",
11-
"Sequence",
12-
"DistinctSequence",
11+
"BundleSequence",
12+
"BindSequence",
1313
"Graph",
1414
"Tree",
1515
"FiniteStateAutomata",
@@ -134,7 +134,7 @@ def __delitem__(self, key: Tensor) -> None:
134134

135135

136136
class Multiset:
137-
"""Hypervector-based multiset data structure.
137+
"""Hypervector multiset data structure.
138138
139139
Creates an empty multiset of dim dimensions or from an input tensor.
140140
@@ -278,7 +278,7 @@ def from_tensor(cls, input: Tensor):
278278

279279

280280
class HashTable:
281-
"""Hypervector-based hash table data structure.
281+
"""Hypervector hash table data structure.
282282
283283
Creates an empty hash table of dim dimensions or a hash table from an input tensor.
284284
@@ -437,8 +437,8 @@ def from_tensors(cls, keys: Tensor, values: Tensor):
437437
return cls(value, size=keys.size(-2))
438438

439439

440-
class Sequence:
441-
"""Hypervector-based (bundling-based) sequence data structure
440+
class BundleSequence:
441+
"""Hypervector bundling-based sequence data structure
442442
443443
Creates an empty sequence of dim dimensions or from an input tensor.
444444
@@ -453,11 +453,11 @@ class Sequence:
453453
454454
Examples::
455455
456-
>>> S = structures.Sequence(10000)
456+
>>> S = structures.BundleSequence(10000)
457457
458458
>>> letters = list(string.ascii_lowercase)
459459
>>> letters_hv = functional.random_hv(len(letters), 10000)
460-
>>> S = structures.Sequence(letters_hv[0], size=1)
460+
>>> S = structures.BundleSequence(letters_hv[0], size=1)
461461
462462
"""
463463

@@ -559,21 +559,21 @@ def replace(self, index: int, old: Tensor, new: Tensor) -> None:
559559
rotated_new = functional.permute(new, shifts=self.size - index - 1)
560560
self.value = functional.bundle(self.value, rotated_new)
561561

562-
def concat(self, seq: "Sequence") -> "Sequence":
562+
def concat(self, seq: "BundleSequence") -> "BundleSequence":
563563
"""Concatenates the current sequence with the given one.
564564
565565
Args:
566566
seq (Sequence): Sequence to be concatenated with the current one.
567567
568568
Examples::
569569
570-
>>> S1 = structures.Sequence(dimensions=10000)
570+
>>> S1 = structures.BundleSequence(dimensions=10000)
571571
>>> S2 = S.concat(S1)
572572
573573
"""
574574
value = functional.permute(self.value, shifts=len(seq))
575575
value = functional.bundle(value, seq.value)
576-
return Sequence(value, size=len(self) + len(seq))
576+
return BundleSequence(value, size=len(self) + len(seq))
577577

578578
def __getitem__(self, index: int) -> Tensor:
579579
"""Gets the approximate value from given index.
@@ -615,37 +615,37 @@ def clear(self) -> None:
615615
def from_tensor(cls, input: Tensor):
616616
"""Creates a sequence from hypervectors.
617617
618-
See: :func:`~torchhd.functional.sequence`.
618+
See: :func:`~torchhd.functional.bundle_sequence`.
619619
620620
Args:
621-
input (Tensor): Tensor representing a sequence.
621+
input (Tensor): Tensor containing hypervectors that form the sequence.
622622
623623
Examples::
624624
>>> letters_hv = functional.random_hv(len(letters), 10000)
625-
>>> S = structures.Sequence.from_tensor(letters_hv)
625+
>>> S = structures.BundleSequence.from_tensor(letters_hv)
626626
627627
"""
628-
value = functional.sequence(input)
628+
value = functional.bundle_sequence(input)
629629
return cls(value, size=input.size(-2))
630630

631631

632-
class DistinctSequence:
633-
"""Hypervector-based distinct (binding-based) sequence data structure.
632+
class BindSequence:
633+
"""Hypervector binding-based sequence data structure.
634634
635635
Creates an empty sequence of dim dimensions or from an input tensor.
636636
637637
Args:
638-
dimensions (int): number of dimensions of the distinct sequence.
638+
dimensions (int): number of dimensions of the sequence.
639639
dtype (``torch.dtype``, optional): the desired data type of returned tensor. Default: if ``None``, uses a global default (see ``torch.set_default_tensor_type()``).
640640
device (``torch.device``, optional): the desired device of returned tensor. Default: if ``None``, uses the current device for the default tensor type (see torch.set_default_tensor_type()). ``device`` will be the CPU for CPU tensor types and the current CUDA device for CUDA tensor types.
641641
642642
Args:
643-
input (Tensor): tensor representing a distinct sequence.
644-
size (int, optional): the length of the distinct sequence provided as input. Default: ``0``.
643+
input (Tensor): tensor representing a binding-based sequence.
644+
size (int, optional): the length of the sequence provided as input. Default: ``0``.
645645
646646
Examples::
647647
648-
>>> DS = structures.DistinctSequence(10000)
648+
>>> DS = structures.BindSequence(10000)
649649
"""
650650

651651
@overload
@@ -739,7 +739,7 @@ def replace(self, index: int, old: Tensor, new: Tensor) -> None:
739739
740740
Examples::
741741
742-
>>> DS1 = structures.DistinctSequence(dimensions=10000)
742+
>>> DS1 = structures.BindSequence(dimensions=10000)
743743
>>> DS.concat(DS1)
744744
745745
"""
@@ -775,17 +775,17 @@ def clear(self) -> None:
775775
def from_tensor(cls, input: Tensor):
776776
"""Creates a sequence from tensor.
777777
778-
See: :func:`~torchhd.functional.distinct_sequence`.
778+
See: :func:`~torchhd.functional.bind_sequence`.
779779
780780
Args:
781-
input (Tensor): Tensor representing a sequence.
781+
input (Tensor): Tensor containing hypervectors that form the sequence.
782782
783783
Examples::
784784
>>> letters_hv = functional.random_hv(len(letters), 10000)
785-
>>> DS = structures.DistinctSequence.from_tensor(letters_hv)
785+
>>> DS = structures.BindSequence.from_tensor(letters_hv)
786786
787787
"""
788-
value = functional.distinct_sequence(input)
788+
value = functional.bind_sequence(input)
789789
return cls(value, size=input.size(-2))
790790

791791

torchhd/tests/structures/test_distinct_sequence.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -8,17 +8,17 @@
88
letters = list(string.ascii_lowercase)
99

1010

11-
class TestDistinctSequence:
11+
class TestBindSequence:
1212
def test_creation_dim(self):
13-
S = structures.DistinctSequence(10000)
13+
S = structures.BindSequence(10000)
1414
assert torch.equal(S.value, torch.ones(10000))
1515

1616
def test_creation_tensor(self):
1717
generator = torch.Generator()
1818
generator.manual_seed(seed)
1919
hv = functional.random_hv(len(letters), 10000, generator=generator)
2020

21-
S = structures.DistinctSequence(hv[0])
21+
S = structures.BindSequence(hv[0])
2222
assert torch.equal(S.value, hv[0])
2323

2424
def test_generator(self):
@@ -36,23 +36,23 @@ def test_append(self):
3636
generator = torch.Generator()
3737
generator.manual_seed(seed)
3838
hv = functional.random_hv(len(letters), 10000, generator=generator)
39-
S = structures.DistinctSequence(10000)
39+
S = structures.BindSequence(10000)
4040
S.append(hv[0])
4141
assert functional.cosine_similarity(S.value, hv)[0] > 0.5
4242

4343
def test_appendleft(self):
4444
generator = torch.Generator()
4545
generator.manual_seed(seed)
4646
hv = functional.random_hv(len(letters), 10000, generator=generator)
47-
S = structures.DistinctSequence(10000)
47+
S = structures.BindSequence(10000)
4848
S.appendleft(hv[0])
4949
assert functional.cosine_similarity(S.value, hv)[0] > 0.5
5050

5151
def test_pop(self):
5252
generator = torch.Generator()
5353
generator.manual_seed(seed)
5454
hv = functional.random_hv(len(letters), 10000, generator=generator)
55-
S = structures.DistinctSequence(10000)
55+
S = structures.BindSequence(10000)
5656
S.append(hv[0])
5757
S.append(hv[1])
5858
S.pop(hv[1])
@@ -68,7 +68,7 @@ def test_popleft(self):
6868
generator = torch.Generator()
6969
generator.manual_seed(seed)
7070
hv = functional.random_hv(len(letters), 10000, generator=generator)
71-
S = structures.DistinctSequence(10000)
71+
S = structures.BindSequence(10000)
7272
S.appendleft(hv[0])
7373
S.appendleft(hv[1])
7474
S.popleft(hv[1])
@@ -84,7 +84,7 @@ def test_replace(self):
8484
generator = torch.Generator()
8585
generator.manual_seed(seed)
8686
hv = functional.random_hv(len(letters), 10000, generator=generator)
87-
S = structures.DistinctSequence(10000)
87+
S = structures.BindSequence(10000)
8888
S.append(hv[0])
8989
assert functional.cosine_similarity(S.value, hv)[0] > 0.5
9090
S.replace(0, hv[0], hv[1])
@@ -94,7 +94,7 @@ def test_length(self):
9494
generator = torch.Generator()
9595
generator.manual_seed(seed)
9696
hv = functional.random_hv(len(letters), 10000, generator=generator)
97-
S = structures.DistinctSequence(10000)
97+
S = structures.BindSequence(10000)
9898
S.append(hv[0])
9999
S.append(hv[0])
100100
S.append(hv[0])
@@ -113,7 +113,7 @@ def test_clear(self):
113113
generator = torch.Generator()
114114
generator.manual_seed(seed)
115115
hv = functional.random_hv(len(letters), 10000, generator=generator)
116-
S = structures.DistinctSequence(10000)
116+
S = structures.BindSequence(10000)
117117
S.append(hv[0])
118118
S.append(hv[0])
119119
S.append(hv[0])

0 commit comments

Comments
 (0)