|
| 1 | +from typing import Any, List, Optional, Tuple |
| 2 | +import torch |
| 3 | + |
| 4 | +import torchhd.functional as functional |
| 5 | + |
| 6 | + |
| 7 | +class Memory: |
| 8 | + """Associative memory""" |
| 9 | + |
| 10 | + def __init__(self, threshold=0.5): |
| 11 | + self.threshold = threshold |
| 12 | + self.keys: List[torch.Tensor] = [] |
| 13 | + self.values: List[Any] = [] |
| 14 | + |
| 15 | + def __len__(self) -> int: |
| 16 | + """Returns the number of items in memory""" |
| 17 | + return len(self.values) |
| 18 | + |
| 19 | + def add(self, key: torch.Tensor, value: Any) -> None: |
| 20 | + """Adds one (key, value) pair to memory""" |
| 21 | + self.keys.append(key) |
| 22 | + self.values.append(value) |
| 23 | + |
| 24 | + def _get_index(self, key: torch.Tensor) -> int: |
| 25 | + key_stack = torch.stack(self.keys, dim=0) |
| 26 | + sim = functional.cosine_similarity(key, key_stack) |
| 27 | + value, index = torch.max(sim, 0) |
| 28 | + |
| 29 | + if value.item() < self.threshold: |
| 30 | + raise IndexError() |
| 31 | + |
| 32 | + return index |
| 33 | + |
| 34 | + def __getitem__(self, key: torch.Tensor) -> Tuple[torch.Tensor, Any]: |
| 35 | + """Get the (key, value) pair with an approximate key""" |
| 36 | + index = self._get_index(key) |
| 37 | + return self.keys[index], self.values[index] |
| 38 | + |
| 39 | + def __setitem__(self, key: torch.Tensor, value: Any) -> None: |
| 40 | + """Set the value of an (key, value) pair with an approximate key""" |
| 41 | + index = self._get_index(key) |
| 42 | + self.values[index] = value |
| 43 | + |
| 44 | + def __delitem__(self, key: torch.Tensor) -> None: |
| 45 | + """Delete the (key, value) pair with an approximate key""" |
| 46 | + index = self._get_index(key) |
| 47 | + del self.keys[index] |
| 48 | + del self.values[index] |
| 49 | + |
| 50 | + |
| 51 | +class Multiset: |
| 52 | + def __init__(self, dimensions, threshold=0.5, device=None, dtype=None): |
| 53 | + self.threshold = threshold |
| 54 | + self.cardinality = 0 |
| 55 | + dtype = dtype if dtype is not None else torch.get_default_dtype() |
| 56 | + self.value = torch.zeros(dimensions, dtype=dtype, device=device) |
| 57 | + |
| 58 | + def add(self, input: torch.Tensor) -> None: |
| 59 | + self.value = functional.bundle(self.value, input) |
| 60 | + self.cardinality += 1 |
| 61 | + |
| 62 | + def remove(self, input: torch.Tensor) -> None: |
| 63 | + if input not in self: |
| 64 | + return |
| 65 | + self.value = functional.bundle(self.value, -input) |
| 66 | + self.cardinality -= 1 |
| 67 | + |
| 68 | + def __contains__(self, input: torch.Tensor): |
| 69 | + sim = functional.cosine_similarity(input, self.values.unsqueeze(0)) |
| 70 | + return sim.item() > self.threshold |
| 71 | + |
| 72 | + def __len__(self) -> int: |
| 73 | + return self.cardinality |
| 74 | + |
| 75 | + @classmethod |
| 76 | + def from_ngrams(cls, input: torch.Tensor, n=3, threshold=0.5): |
| 77 | + instance = cls(input.size(-1), threshold, input.device, input.dtype) |
| 78 | + instance.value = functional.ngrams(input, n) |
| 79 | + return instance |
| 80 | + |
| 81 | + @classmethod |
| 82 | + def from_tensors(cls, input: torch.Tensor, dim=-2, threshold=0.5): |
| 83 | + instance = cls(input.size(-1), threshold, input.device, input.dtype) |
| 84 | + instance.value = functional.multiset(input=input, dim=dim) |
| 85 | + return instance |
| 86 | + |
| 87 | + |
| 88 | +class Sequence: |
| 89 | + def __init__(self, dimensions, threshold=0.5, device=None, dtype=None): |
| 90 | + self.length = 0 |
| 91 | + self.threshold = threshold |
| 92 | + dtype = dtype if dtype is not None else torch.get_default_dtype() |
| 93 | + self.value = torch.zeros(dimensions, dtype=dtype, device=device) |
| 94 | + |
| 95 | + def append(self, input: torch.Tensor) -> None: |
| 96 | + rotated_value = functional.permute(self.value, shifts=1) |
| 97 | + self.value = functional.bundle(input, rotated_value) |
| 98 | + |
| 99 | + def appendleft(self, input: torch.Tensor) -> None: |
| 100 | + rotated_input = functional.permute(input, shifts=len(self)) |
| 101 | + self.value = functional.bundle(self.value, rotated_input) |
| 102 | + |
| 103 | + def pop(self, input: torch.Tensor) -> Optional[torch.Tensor]: |
| 104 | + self.value = functional.bundle(self.value, -input) |
| 105 | + self.value = functional.permute(self.value, shifts=-1) |
| 106 | + self.length -= 1 |
| 107 | + |
| 108 | + def popleft(self, input: torch.Tensor) -> None: |
| 109 | + rotated_input = functional.permute(input, shifts=len(self) + 1) |
| 110 | + self.value = functional.bundle(self.value, -rotated_input) |
| 111 | + self.length -= 1 |
| 112 | + |
| 113 | + def __getitem__(self, index: int) -> torch.Tensor: |
| 114 | + rotated_value = functional.permute(self.value, shifts=-index) |
| 115 | + return rotated_value |
| 116 | + |
| 117 | + def __len__(self) -> int: |
| 118 | + return self.length |
| 119 | + |
| 120 | + |
| 121 | +class Graph: |
| 122 | + def __init__( |
| 123 | + self, dimensions, threshold=0.5, directed=False, device=None, dtype=None |
| 124 | + ): |
| 125 | + self.length = 0 |
| 126 | + self.threshold = threshold |
| 127 | + self.dtype = dtype if dtype is not None else torch.get_default_dtype() |
| 128 | + self.value = torch.zeros(dimensions, dtype=dtype, device=device) |
| 129 | + self.directed = directed |
| 130 | + |
| 131 | + def add_edge(self, node1: torch.Tensor, node2: torch.Tensor): |
| 132 | + if self.directed: |
| 133 | + edge = functional.bind(node1, node2) |
| 134 | + else: |
| 135 | + edge = functional.bind(node1, functional.permute(node2)) |
| 136 | + self.value = functional.bundle(self.value, edge) |
| 137 | + |
| 138 | + def edge_exists(self, node1: torch.Tensor, node2: torch.Tensor): |
| 139 | + if self.directed: |
| 140 | + edge = functional.bind(node1, node2) |
| 141 | + else: |
| 142 | + edge = functional.bind(node1, functional.permute(node2)) |
| 143 | + return edge in self |
| 144 | + |
| 145 | + def node_neighbours(self, input: torch.Tensor): |
| 146 | + return functional.bind(self.value, input) |
| 147 | + |
| 148 | + def __contains__(self, input: torch.Tensor): |
| 149 | + sim = functional.cosine_similarity(input, self.value.unsqueeze(0)) |
| 150 | + return sim.item() > self.threshold |
| 151 | + |
| 152 | + |
| 153 | +class Tree: |
| 154 | + def __init__(self, dimensions, device=None, dtype=None): |
| 155 | + self.dtype = dtype if dtype is not None else torch.get_default_dtype() |
| 156 | + self.value = torch.zeros(dimensions, dtype=dtype, device=device) |
| 157 | + self.l_r = functional.random_hv(2, dimensions) |
| 158 | + |
| 159 | + def add_leaf(self, value, path): |
| 160 | + for i in path: |
| 161 | + if i == "l": |
| 162 | + value = functional.bind(value, self.left) |
| 163 | + else: |
| 164 | + value = functional.bind(value, self.right) |
| 165 | + self.value = functional.bundle(self.value, value) |
| 166 | + |
| 167 | + @property |
| 168 | + def left(self): |
| 169 | + return self.l_r[0] |
| 170 | + |
| 171 | + @property |
| 172 | + def right(self): |
| 173 | + return self.l_r[1] |
| 174 | + |
| 175 | + |
| 176 | +class FiniteStateAutomata: |
| 177 | + def __init__(self, dimensions, device=None, dtype=None): |
| 178 | + self.dtype = dtype if dtype is not None else torch.get_default_dtype() |
| 179 | + self.value = torch.zeros(dimensions, dtype=dtype, device=device) |
| 180 | + |
| 181 | + def add_transition( |
| 182 | + self, |
| 183 | + token: torch.Tensor, |
| 184 | + initial_state: torch.Tensor, |
| 185 | + final_state: torch.Tensor, |
| 186 | + ): |
| 187 | + transition_edge = functional.bind( |
| 188 | + initial_state, functional.permute(final_state) |
| 189 | + ) |
| 190 | + transition = functional.bind(token, transition_edge) |
| 191 | + self.value = functional.bundle(self.value, transition) |
| 192 | + |
| 193 | + def change_state(self, token: torch.Tensor, current_state: torch.Tensor): |
| 194 | + # Returns the next state + some noise |
| 195 | + next_state = functional.bind(self.value, current_state) |
| 196 | + next_state = functional.bind(next_state, token) |
| 197 | + return functional.permute(next_state, shifts=-1) |
0 commit comments