Skip to content

Commit b243a54

Browse files
committed
Change default buffer size to 64MB
1 parent 32a29be commit b243a54

File tree

2 files changed

+3
-3
lines changed

2 files changed

+3
-3
lines changed

redisgraph_bulk_loader/bulk_insert.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,8 @@ def process_entities(entities):
6666
@click.option('--escapechar', '-x', default='\\', help='the escape char used for the CSV reader (default \\). Use "none" for None.')
6767
# Buffer size restrictions
6868
@click.option('--max-token-count', '-c', default=1024, help='max number of processed CSVs to send per query (default 1024)')
69-
@click.option('--max-buffer-size', '-b', default=128, help='max buffer size in megabytes (default 128, max 1024)')
70-
@click.option('--max-token-size', '-t', default=128, help='max size of each token in megabytes (default 128, max 512)')
69+
@click.option('--max-buffer-size', '-b', default=64, help='max buffer size in megabytes (default 64, max 1024)')
70+
@click.option('--max-token-size', '-t', default=64, help='max size of each token in megabytes (default 64, max 512)')
7171
@click.option('--index', '-i', multiple=True, help='Label:Propery on which to create an index')
7272
@click.option('--full-text-index', '-f', multiple=True, help='Label:Propery on which to create an full text search index')
7373
def bulk_insert(graph, host, port, password, user, unix_socket_path, nodes, nodes_with_label, relations, relations_with_type, separator, enforce_schema, skip_invalid_nodes, skip_invalid_edges, escapechar, quote, max_token_count, max_buffer_size, max_token_size, index, full_text_index):

redisgraph_bulk_loader/config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
class Config:
2-
def __init__(self, max_token_count=1024 * 1023, max_buffer_size=128, max_token_size=128, enforce_schema=False, skip_invalid_nodes=False, skip_invalid_edges=False, separator=',', quoting=3, store_node_identifiers=False, escapechar='\\'):
2+
def __init__(self, max_token_count=1024 * 1023, max_buffer_size=64, max_token_size=64, enforce_schema=False, skip_invalid_nodes=False, skip_invalid_edges=False, separator=',', quoting=3, store_node_identifiers=False, escapechar='\\'):
33
"""Settings for this run of the bulk loader"""
44
# Maximum number of tokens per query
55
# 1024 * 1024 is the hard-coded Redis maximum. We'll set a slightly lower limit so

0 commit comments

Comments
 (0)