Skip to content

Commit ddd1496

Browse files
authored
Adding support for redisearch (#1640)
1 parent 3946da2 commit ddd1496

20 files changed

+8494
-30
lines changed

redis/commands/redismodules.py

+9
Original file line numberDiff line numberDiff line change
@@ -15,3 +15,12 @@ def json(self, encoder=JSONEncoder(), decoder=JSONDecoder()):
1515
from .json import JSON
1616
jj = JSON(client=self, encoder=encoder, decoder=decoder)
1717
return jj
18+
19+
def ft(self, index_name="idx"):
20+
"""Access the search namespace, providing support for redis search."""
21+
if 'search' not in self.loaded_modules:
22+
raise ModuleError("search is not a loaded in the redis instance.")
23+
24+
from .search import Search
25+
s = Search(client=self, index_name=index_name)
26+
return s

redis/commands/search/__init__.py

+96
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,96 @@
1+
from .commands import SearchCommands
2+
3+
4+
class Search(SearchCommands):
5+
"""
6+
Create a client for talking to search.
7+
It abstracts the API of the module and lets you just use the engine.
8+
"""
9+
10+
class BatchIndexer(object):
11+
"""
12+
A batch indexer allows you to automatically batch
13+
document indexing in pipelines, flushing it every N documents.
14+
"""
15+
16+
def __init__(self, client, chunk_size=1000):
17+
18+
self.client = client
19+
self.execute_command = client.execute_command
20+
self.pipeline = client.pipeline(transaction=False, shard_hint=None)
21+
self.total = 0
22+
self.chunk_size = chunk_size
23+
self.current_chunk = 0
24+
25+
def __del__(self):
26+
if self.current_chunk:
27+
self.commit()
28+
29+
def add_document(
30+
self,
31+
doc_id,
32+
nosave=False,
33+
score=1.0,
34+
payload=None,
35+
replace=False,
36+
partial=False,
37+
no_create=False,
38+
**fields
39+
):
40+
"""
41+
Add a document to the batch query
42+
"""
43+
self.client._add_document(
44+
doc_id,
45+
conn=self.pipeline,
46+
nosave=nosave,
47+
score=score,
48+
payload=payload,
49+
replace=replace,
50+
partial=partial,
51+
no_create=no_create,
52+
**fields
53+
)
54+
self.current_chunk += 1
55+
self.total += 1
56+
if self.current_chunk >= self.chunk_size:
57+
self.commit()
58+
59+
def add_document_hash(
60+
self,
61+
doc_id,
62+
score=1.0,
63+
replace=False,
64+
):
65+
"""
66+
Add a hash to the batch query
67+
"""
68+
self.client._add_document_hash(
69+
doc_id,
70+
conn=self.pipeline,
71+
score=score,
72+
replace=replace,
73+
)
74+
self.current_chunk += 1
75+
self.total += 1
76+
if self.current_chunk >= self.chunk_size:
77+
self.commit()
78+
79+
def commit(self):
80+
"""
81+
Manually commit and flush the batch indexing query
82+
"""
83+
self.pipeline.execute()
84+
self.current_chunk = 0
85+
86+
def __init__(self, client, index_name="idx"):
87+
"""
88+
Create a new Client for the given index_name.
89+
The default name is `idx`
90+
91+
If conn is not None, we employ an already existing redis connection
92+
"""
93+
self.client = client
94+
self.index_name = index_name
95+
self.execute_command = client.execute_command
96+
self.pipeline = client.pipeline

redis/commands/search/_util.py

+10
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
import six
2+
3+
4+
def to_string(s):
5+
if isinstance(s, six.string_types):
6+
return s
7+
elif isinstance(s, six.binary_type):
8+
return s.decode("utf-8", "ignore")
9+
else:
10+
return s # Not a string we care about

0 commit comments

Comments
 (0)