Skip to content

Commit a54b1ae

Browse files
author
desiena
committed
+ Fixed _result_cache in QuerySet
+ Fixed __getitem()__ to have coherent response on slice and to use _result_cache if possible + Reimplemented CacheIterator __iter__() to support multiple owners in single thread
1 parent 333fd9a commit a54b1ae

File tree

4 files changed

+47
-59
lines changed

4 files changed

+47
-59
lines changed

fmdata/cache_iterator.py

Lines changed: 17 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import itertools
21
from typing import Iterator, List, TypeVar, Optional
32

43
from typing_extensions import Generic
@@ -15,14 +14,25 @@ def __init__(self, iterator: Iterator[T]) -> None:
1514
self.cache_complete: bool = False
1615

1716
def __iter__(self) -> Iterator[T]:
18-
if self.cache_complete:
19-
# all values have been cached
20-
return iter(self.cached_values)
21-
22-
return itertools.chain(self.cached_values, self._iter)
17+
idx = 0
18+
# keep pulling from cache first, then from the generator
19+
while True:
20+
if idx < len(self.cached_values):
21+
yield self.cached_values[idx]
22+
idx += 1
23+
elif not self.cache_complete:
24+
val = next(self._iter, None)
25+
26+
if val is None:
27+
return
28+
29+
yield val
30+
idx += 1
31+
else:
32+
return
2333

2434
def __len__(self):
25-
return len(self.list)
35+
return sum(1 for _ in self.__iter__())
2636

2737
def __getitem__(self, k) -> T:
2838
def read_until(index: Optional[int]):
@@ -52,7 +62,6 @@ def __repr__(self) -> str:
5262
len(self.cached_values), self.cache_complete
5363
)
5464

55-
@property
5665
def empty(self):
5766
# If cache is not empty there is for sure at least one element
5867
if not len(self.cached_values) == 0:
@@ -68,13 +77,6 @@ def empty(self):
6877
# If cached values changes, there is at least one element so is not empty
6978
return len(self.cached_values) == 0
7079

71-
@property
72-
def list(self):
73-
while not self.cache_complete:
74-
next(self._iter, None)
75-
76-
return self.cached_values
77-
7880
def _cache_generator(self, iterator: Iterator) -> Iterator:
7981
for val in iterator:
8082
self.cached_values.append(val)

fmdata/orm.py

Lines changed: 23 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from __future__ import annotations
22

33
import dataclasses
4+
import itertools
45
from datetime import date, datetime
56
from functools import cached_property
67
from typing import Type, Optional, List, Any, Iterator, Iterable, Set, Dict, Union, Tuple
@@ -145,9 +146,9 @@ def __init__(self):
145146
self._chunk_size = None
146147
self._slice_start: int = 0
147148
self._slice_stop: Optional[int] = None
148-
self._result_cache: Optional[CacheIterator[PortalModel]] = None
149149
self._avoid_prefetch_cache = False
150150
self._only_prefetched = False
151+
self._result_cache: Optional[CacheIterator[PortalModel]] = None
151152

152153
def _set_model(self, model: Model, meta_portal: ModelMetaPortalField):
153154
self._model = model
@@ -160,7 +161,6 @@ def _clone(self):
160161
qs._chunk_size = self._chunk_size
161162
qs._slice_start = self._slice_start
162163
qs._slice_stop = self._slice_stop
163-
qs._result_cache = self._result_cache
164164
qs._avoid_prefetch_cache = self._avoid_prefetch_cache
165165
qs._only_prefetched = self._only_prefetched
166166

@@ -180,14 +180,14 @@ def _fetch_all(self):
180180
return
181181

182182
# Try to use cache if the request is inside the prefetch data slice
183-
if prefetch_data is not None:
183+
if not self._avoid_prefetch_cache and prefetch_data is not None:
184184
prefetch_data_slice_start = prefetch_data.offset - 1
185185
prefetch_data_slice_stop = prefetch_data_slice_start + prefetch_data.limit
186186

187187
search_slice_is_inside_prefetch_slice = self._slice_stop is not None and (
188188
self._slice_start >= prefetch_data_slice_start and self._slice_stop <= prefetch_data_slice_stop)
189189

190-
if not self._avoid_prefetch_cache and search_slice_is_inside_prefetch_slice:
190+
if search_slice_is_inside_prefetch_slice:
191191
slice_relative_start = self._slice_start - prefetch_data_slice_start
192192
slice_relative_stop = self._slice_stop - prefetch_data_slice_start
193193
self._result_cache = prefetch_data.cache[slice_relative_start:slice_relative_stop]
@@ -239,24 +239,27 @@ def __getitem__(self, k):
239239
if k.stop is not None and k.stop <= (k.start or 0):
240240
raise ValueError("Stop index must be greater than start index.")
241241

242-
if self._result_cache is not None:
243-
return self._result_cache[k]
244-
245242
new_qs = self._clone()
246243
new_qs._set_new_slice(k.start, k.stop)
247244

245+
if self._result_cache is not None:
246+
new_qs._result_cache = CacheIterator(itertools.islice(self._result_cache.__iter__(), k.start, k.stop))
247+
248248
# In case step is present, the list() force the execution of the query then use the list step to provide the result
249249
return list(new_qs)[::k.step] if k.step else new_qs
250250

251251
elif isinstance(k, int):
252252
if k < 0:
253253
raise ValueError("Negative indexing is not supported.")
254254

255+
if self._result_cache is not None:
256+
return self._result_cache[k]
257+
255258
new_qs = self._clone()
256259
new_qs._set_new_slice(k, k + 1)
257260
new_qs._fetch_all()
258261

259-
return self._result_cache[0]
262+
return new_qs._result_cache[0]
260263

261264
else:
262265
raise TypeError(
@@ -344,8 +347,6 @@ def _execute_query(self):
344347
page_iterator=paged_result
345348
))
346349

347-
return self._result_cache
348-
349350
def portals_record_from_portal_page_iterator(self,
350351
model: Model,
351352
portal_fm_name: str,
@@ -368,7 +369,7 @@ def portals_record_from_portal_page_iterator(self,
368369

369370

370371
class PortalModel(metaclass=PortalMetaclass):
371-
#Example of Meta
372+
# Example of Meta
372373
#
373374
# class Meta:
374375
# base_schema: FileMakerSchema = None
@@ -651,11 +652,10 @@ def _clone(self):
651652
qs._sort = self._sort[:]
652653
qs._scripts = self._scripts.copy()
653654
qs._chunk_size = self._chunk_size
654-
qs._portals = self._portals
655+
qs._portals = self._portals.copy()
655656
qs._slice_start = self._slice_start
656657
qs._slice_stop = self._slice_stop
657658
qs._response_layout = self._response_layout
658-
qs._result_cache = self._result_cache
659659

660660
return qs
661661

@@ -831,24 +831,27 @@ def __getitem__(self, k):
831831
if k.stop is not None and k.stop <= (k.start or 0):
832832
raise ValueError("Stop index must be greater than start index.")
833833

834-
if self._result_cache is not None:
835-
return self._result_cache[k]
836-
837834
new_qs = self._clone()
838835
new_qs._set_new_slice(k.start, k.stop)
839836

837+
if self._result_cache is not None:
838+
new_qs._result_cache = CacheIterator(itertools.islice(self._result_cache.__iter__(), k.start, k.stop))
839+
840840
# In case step is present, the list() force the execution of the query then use the list step to provide the result
841841
return list(new_qs)[::k.step] if k.step else new_qs
842842

843843
elif isinstance(k, int):
844844
if k < 0:
845845
raise ValueError("Negative indexing is not supported.")
846846

847+
if self._result_cache is not None:
848+
return self._result_cache[k]
849+
847850
new_qs = self._clone()
848851
new_qs._set_new_slice(k, k + 1)
849852
new_qs._fetch_all()
850853

851-
return self._result_cache[0]
854+
return new_qs._result_cache[0]
852855

853856
else:
854857
raise TypeError(
@@ -944,7 +947,6 @@ def _execute_query(self):
944947
self._result_cache = CacheIterator(
945948
self.records_iterator_from_page_iterator(page_iterator=paged_result.pages.__iter__(),
946949
portals_input=self._portals))
947-
return self._result_cache
948950

949951
def records_iterator_from_page_iterator(self,
950952
page_iterator: PageIterator,
@@ -1150,7 +1152,8 @@ def __new__(mcls, name, bases, attrs):
11501152
client: FMClient = get_meta_attribute(cls=cls, attrs_meta=attrs_meta, attribute_name="client")
11511153
layout: str = get_meta_attribute(cls=cls, attrs_meta=attrs_meta, attribute_name="layout")
11521154

1153-
base_manager: Type[ModelManager] = get_meta_attribute(cls=cls, attrs_meta=attrs_meta, attribute_name="base_manager") or ModelManager
1155+
base_manager: Type[ModelManager] = get_meta_attribute(cls=cls, attrs_meta=attrs_meta,
1156+
attribute_name="base_manager") or ModelManager
11541157

11551158
cls._meta = ModelMeta(
11561159
client=client,
@@ -1183,7 +1186,7 @@ class Model(metaclass=ModelMetaclass):
11831186
# base_schema: FileMakerSchema = None
11841187
# schema_config: dict = None
11851188

1186-
#TODO not used. Only for type hint
1189+
# TODO not used. Only for type hint
11871190
objects: ModelManager = ModelManager()
11881191

11891192
def __init__(self, **kwargs):

tests/integration/test_server.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -247,7 +247,7 @@ def test_create_get_delete_record(self) -> None:
247247

248248
found_set = result.found_set
249249

250-
if not found_set.empty:
250+
if not found_set.empty():
251251
found_set[0].delete_record()
252252

253253
result = student_layout.create_record(
@@ -267,4 +267,4 @@ def test_create_get_delete_record(self) -> None:
267267
result.raise_exception_if_has_message(exclude_codes=[FMErrorEnum.NO_RECORDS_MATCH_REQUEST])
268268

269269
found_set = result.found_set
270-
self.assertTrue(found_set.empty)
270+
self.assertTrue(found_set.empty())

tests/test_cache_iterator.py

Lines changed: 5 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -138,14 +138,14 @@ def test_empty_property_with_data(self):
138138
data = [1, 2, 3]
139139
cache_iter = CacheIterator(iter(data))
140140

141-
self.assertFalse(cache_iter.empty)
141+
self.assertFalse(cache_iter.empty())
142142
self.assertEqual(cache_iter.cached_values, [1]) # Should consume first element
143143

144144
def test_empty_property_empty_iterator(self):
145145
"""Test empty property with empty iterator."""
146146
cache_iter = CacheIterator(iter([]))
147147

148-
self.assertTrue(cache_iter.empty)
148+
self.assertTrue(cache_iter.empty())
149149
self.assertEqual(cache_iter.cached_values, [])
150150
self.assertTrue(cache_iter.cache_complete)
151151

@@ -157,25 +157,10 @@ def test_empty_property_after_partial_consumption(self):
157157
# Consume some elements
158158
next(iter(cache_iter))
159159

160-
self.assertFalse(cache_iter.empty)
160+
self.assertFalse(cache_iter.empty())
161161
# After consuming one element and checking empty, only 1 element should be cached
162162
self.assertEqual(len(cache_iter.cached_values), 1)
163163

164-
def test_list_property(self):
165-
"""Test list property."""
166-
data = [1, 2, 3, 4, 5]
167-
cache_iter = CacheIterator(iter(data))
168-
169-
# list property should force full consumption
170-
result = cache_iter.list
171-
self.assertEqual(result, data)
172-
self.assertEqual(cache_iter.cached_values, data)
173-
self.assertTrue(cache_iter.cache_complete)
174-
175-
# Subsequent calls should return the same cached list
176-
result2 = cache_iter.list
177-
self.assertEqual(result2, data)
178-
self.assertIs(result, result2) # Should be the same object
179164

180165
def test_repr(self):
181166
"""Test __repr__ method."""
@@ -204,17 +189,15 @@ def test_empty_iterator(self):
204189
self.assertEqual(len(cache_iter), 0)
205190
self.assertTrue(cache_iter.empty)
206191
self.assertTrue(cache_iter.cache_complete)
207-
self.assertEqual(cache_iter.list, [])
208192

209193
def test_single_element_iterator(self):
210194
"""Test CacheIterator with single element."""
211195
cache_iter = CacheIterator(iter([42]))
212196

213197
self.assertEqual(list(cache_iter), [42])
214198
self.assertEqual(len(cache_iter), 1)
215-
self.assertFalse(cache_iter.empty)
199+
self.assertFalse(cache_iter.empty())
216200
self.assertEqual(cache_iter[0], 42)
217-
self.assertEqual(cache_iter.list, [42])
218201

219202
def test_string_iterator(self):
220203
"""Test CacheIterator with string iterator."""
@@ -279,7 +262,7 @@ def test_mixed_operations(self):
279262

280263
# Mix of different operations
281264
self.assertEqual(cache_iter[3], 3)
282-
self.assertFalse(cache_iter.empty)
265+
self.assertFalse(cache_iter.empty())
283266

284267
partial_list = cache_iter[1:5]
285268
self.assertEqual(partial_list, [1, 2, 3, 4])

0 commit comments

Comments
 (0)