Skip to content

Commit

Permalink
fix-embeddings-loading (#551)
Browse files Browse the repository at this point in the history
  • Loading branch information
granawkins authored Mar 30, 2024
1 parent 182a4b8 commit ba714d5
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 3 deletions.
9 changes: 7 additions & 2 deletions mentat/embeddings.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,12 @@ async def get_feature_similarity_scores(

max_model_tokens = model_context_size(config.embedding_model)
if max_model_tokens is None:
raise MentatError(f"Missing model context size for {embedding_model}.")
stream.send(
f"Warning: Could not determine context size for model {config.embedding_model}."
" Using default value of 8192.",
style="warning",
)
max_model_tokens = 8192

# Initialize DB
collection = Collection(embedding_model)
Expand Down Expand Up @@ -139,7 +144,7 @@ async def get_feature_similarity_scores(
)

# Get similarity scores
stream.send(None, channel="loading")
stream.send(None, channel="loading", terminate=True)
_checksums = list(set(checksums))
scores = collection.query(prompt, _checksums)
return [scores.get(f.get_checksum(), 0) for f in features]
2 changes: 1 addition & 1 deletion mentat/feature_filters/llm_feature_filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ async def filter(
model,
default_timer() - start_time,
)
stream.send(None, channel="loading")
stream.send(None, channel="loading", terminate=True)

# Parse response into features
try:
Expand Down

0 comments on commit ba714d5

Please sign in to comment.