|
66 | 66 | #define LIST_DIRTY 1 |
67 | 67 | #define LIST_SIZE 2 |
68 | 68 |
|
| 69 | +/*--------------------------------------------------------------*/ |
| 70 | + |
| 71 | +/* |
| 72 | + * Rather than use an LRU list, we use a clock algorithm where entries |
| 73 | + * are held in a circular list. When an entry is 'hit' a reference bit |
| 74 | + * is set. The least recently used entry is approximated by running a |
| 75 | + * cursor around the list selecting unreferenced entries. Referenced |
| 76 | + * entries have their reference bit cleared as the cursor passes them. |
| 77 | + */ |
| 78 | +struct lru_entry { |
| 79 | + struct list_head list; |
| 80 | + atomic_t referenced; |
| 81 | +}; |
| 82 | + |
| 83 | +struct lru_iter { |
| 84 | + struct lru *lru; |
| 85 | + struct list_head list; |
| 86 | + struct lru_entry *stop; |
| 87 | + struct lru_entry *e; |
| 88 | +}; |
| 89 | + |
| 90 | +struct lru { |
| 91 | + struct list_head *cursor; |
| 92 | + unsigned long count; |
| 93 | + |
| 94 | + struct list_head iterators; |
| 95 | +}; |
| 96 | + |
| 97 | +/*--------------*/ |
| 98 | + |
| 99 | +static void lru_init(struct lru *lru) |
| 100 | +{ |
| 101 | + lru->cursor = NULL; |
| 102 | + lru->count = 0; |
| 103 | + INIT_LIST_HEAD(&lru->iterators); |
| 104 | +} |
| 105 | + |
| 106 | +static void lru_destroy(struct lru *lru) |
| 107 | +{ |
| 108 | + WARN_ON_ONCE(lru->cursor); |
| 109 | + WARN_ON_ONCE(!list_empty(&lru->iterators)); |
| 110 | +} |
| 111 | + |
| 112 | +/* |
| 113 | + * Insert a new entry into the lru. |
| 114 | + */ |
| 115 | +static void lru_insert(struct lru *lru, struct lru_entry *le) |
| 116 | +{ |
| 117 | + /* |
| 118 | + * Don't be tempted to set to 1, makes the lru aspect |
| 119 | + * perform poorly. |
| 120 | + */ |
| 121 | + atomic_set(&le->referenced, 0); |
| 122 | + |
| 123 | + if (lru->cursor) { |
| 124 | + list_add_tail(&le->list, lru->cursor); |
| 125 | + } else { |
| 126 | + INIT_LIST_HEAD(&le->list); |
| 127 | + lru->cursor = &le->list; |
| 128 | + } |
| 129 | + lru->count++; |
| 130 | +} |
| 131 | + |
| 132 | +/*--------------*/ |
| 133 | + |
| 134 | +/* |
| 135 | + * Convert a list_head pointer to an lru_entry pointer. |
| 136 | + */ |
| 137 | +static inline struct lru_entry *to_le(struct list_head *l) |
| 138 | +{ |
| 139 | + return container_of(l, struct lru_entry, list); |
| 140 | +} |
| 141 | + |
| 142 | +/* |
| 143 | + * Initialize an lru_iter and add it to the list of cursors in the lru. |
| 144 | + */ |
| 145 | +static void lru_iter_begin(struct lru *lru, struct lru_iter *it) |
| 146 | +{ |
| 147 | + it->lru = lru; |
| 148 | + it->stop = lru->cursor ? to_le(lru->cursor->prev) : NULL; |
| 149 | + it->e = lru->cursor ? to_le(lru->cursor) : NULL; |
| 150 | + list_add(&it->list, &lru->iterators); |
| 151 | +} |
| 152 | + |
| 153 | +/* |
| 154 | + * Remove an lru_iter from the list of cursors in the lru. |
| 155 | + */ |
| 156 | +static inline void lru_iter_end(struct lru_iter *it) |
| 157 | +{ |
| 158 | + list_del(&it->list); |
| 159 | +} |
| 160 | + |
| 161 | +/* Predicate function type to be used with lru_iter_next */ |
| 162 | +typedef bool (*iter_predicate)(struct lru_entry *le, void *context); |
| 163 | + |
| 164 | +/* |
| 165 | + * Advance the cursor to the next entry that passes the |
| 166 | + * predicate, and return that entry. Returns NULL if the |
| 167 | + * iteration is complete. |
| 168 | + */ |
| 169 | +static struct lru_entry *lru_iter_next(struct lru_iter *it, |
| 170 | + iter_predicate pred, void *context) |
| 171 | +{ |
| 172 | + struct lru_entry *e; |
| 173 | + |
| 174 | + while (it->e) { |
| 175 | + e = it->e; |
| 176 | + |
| 177 | + /* advance the cursor */ |
| 178 | + if (it->e == it->stop) |
| 179 | + it->e = NULL; |
| 180 | + else |
| 181 | + it->e = to_le(it->e->list.next); |
| 182 | + |
| 183 | + if (pred(e, context)) |
| 184 | + return e; |
| 185 | + } |
| 186 | + |
| 187 | + return NULL; |
| 188 | +} |
| 189 | + |
| 190 | +/* |
| 191 | + * Invalidate a specific lru_entry and update all cursors in |
| 192 | + * the lru accordingly. |
| 193 | + */ |
| 194 | +static void lru_iter_invalidate(struct lru *lru, struct lru_entry *e) |
| 195 | +{ |
| 196 | + struct lru_iter *it; |
| 197 | + |
| 198 | + list_for_each_entry(it, &lru->iterators, list) { |
| 199 | + /* Move c->e forwards if necc. */ |
| 200 | + if (it->e == e) { |
| 201 | + it->e = to_le(it->e->list.next); |
| 202 | + if (it->e == e) |
| 203 | + it->e = NULL; |
| 204 | + } |
| 205 | + |
| 206 | + /* Move it->stop backwards if necc. */ |
| 207 | + if (it->stop == e) { |
| 208 | + it->stop = to_le(it->stop->list.prev); |
| 209 | + if (it->stop == e) |
| 210 | + it->stop = NULL; |
| 211 | + } |
| 212 | + } |
| 213 | +} |
| 214 | + |
| 215 | +/*--------------*/ |
| 216 | + |
| 217 | +/* |
| 218 | + * Remove a specific entry from the lru. |
| 219 | + */ |
| 220 | +static void lru_remove(struct lru *lru, struct lru_entry *le) |
| 221 | +{ |
| 222 | + lru_iter_invalidate(lru, le); |
| 223 | + if (lru->count == 1) { |
| 224 | + lru->cursor = NULL; |
| 225 | + } else { |
| 226 | + if (lru->cursor == &le->list) |
| 227 | + lru->cursor = lru->cursor->next; |
| 228 | + list_del(&le->list); |
| 229 | + } |
| 230 | + lru->count--; |
| 231 | +} |
| 232 | + |
| 233 | +/* |
| 234 | + * Mark as referenced. |
| 235 | + */ |
| 236 | +static inline void lru_reference(struct lru_entry *le) |
| 237 | +{ |
| 238 | + atomic_set(&le->referenced, 1); |
| 239 | +} |
| 240 | + |
| 241 | +/*--------------*/ |
| 242 | + |
| 243 | +/* |
| 244 | + * Remove the least recently used entry (approx), that passes the predicate. |
| 245 | + * Returns NULL on failure. |
| 246 | + */ |
| 247 | +enum evict_result { |
| 248 | + ER_EVICT, |
| 249 | + ER_DONT_EVICT, |
| 250 | + ER_STOP, /* stop looking for something to evict */ |
| 251 | +}; |
| 252 | + |
| 253 | +typedef enum evict_result (*le_predicate)(struct lru_entry *le, void *context); |
| 254 | + |
| 255 | +static struct lru_entry *lru_evict(struct lru *lru, le_predicate pred, void *context) |
| 256 | +{ |
| 257 | + unsigned long tested = 0; |
| 258 | + struct list_head *h = lru->cursor; |
| 259 | + struct lru_entry *le; |
| 260 | + |
| 261 | + if (!h) |
| 262 | + return NULL; |
| 263 | + /* |
| 264 | + * In the worst case we have to loop around twice. Once to clear |
| 265 | + * the reference flags, and then again to discover the predicate |
| 266 | + * fails for all entries. |
| 267 | + */ |
| 268 | + while (tested < lru->count) { |
| 269 | + le = container_of(h, struct lru_entry, list); |
| 270 | + |
| 271 | + if (atomic_read(&le->referenced)) { |
| 272 | + atomic_set(&le->referenced, 0); |
| 273 | + } else { |
| 274 | + tested++; |
| 275 | + switch (pred(le, context)) { |
| 276 | + case ER_EVICT: |
| 277 | + /* |
| 278 | + * Adjust the cursor, so we start the next |
| 279 | + * search from here. |
| 280 | + */ |
| 281 | + lru->cursor = le->list.next; |
| 282 | + lru_remove(lru, le); |
| 283 | + return le; |
| 284 | + |
| 285 | + case ER_DONT_EVICT: |
| 286 | + break; |
| 287 | + |
| 288 | + case ER_STOP: |
| 289 | + lru->cursor = le->list.next; |
| 290 | + return NULL; |
| 291 | + } |
| 292 | + } |
| 293 | + |
| 294 | + h = h->next; |
| 295 | + |
| 296 | + cond_resched(); |
| 297 | + } |
| 298 | + |
| 299 | + return NULL; |
| 300 | +} |
| 301 | + |
| 302 | +/*--------------------------------------------------------------*/ |
| 303 | + |
69 | 304 | /* |
70 | 305 | * Linking of buffers: |
71 | 306 | * All buffers are linked to buffer_tree with their node field. |
|
0 commit comments