Skip to content

Commit f79f399

Browse files
committed
fix(AbstractGraph): manually select model tokens
closes #768
1 parent 62369e3 commit f79f399

File tree

1 file changed

+9
-6
lines changed

1 file changed

+9
-6
lines changed

scrapegraphai/graphs/abstract_graph.py

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -152,12 +152,15 @@ def _create_llm(self, llm_config: dict) -> object:
152152
raise ValueError(f"""Provider {llm_params['model_provider']} is not supported.
153153
If possible, try to use a model instance instead.""")
154154

155-
try:
156-
self.model_token = models_tokens[llm_params["model_provider"]][llm_params["model"]]
157-
except KeyError:
158-
print(f"""Model {llm_params['model_provider']}/{llm_params['model']} not found,
159-
using default token size (8192)""")
160-
self.model_token = 8192
155+
if "model_tokens" not in llm_params:
156+
try:
157+
self.model_token = models_tokens[llm_params["model_provider"]][llm_params["model"]]
158+
except KeyError:
159+
print(f"""Model {llm_params['model_provider']}/{llm_params['model']} not found,
160+
using default token size (8192)""")
161+
self.model_token = 8192
162+
else:
163+
self.model_token = llm_params["model_tokens"]
161164

162165
try:
163166
if llm_params["model_provider"] not in \

0 commit comments

Comments
 (0)