32
32
import llama_cpp .llama_types as llama_types
33
33
import llama_cpp .llama_grammar as llama_grammar
34
34
35
+ from ._ggml import GGMLLogLevel
35
36
from ._logger import logger
36
37
from ._utils import suppress_stdout_stderr , Singleton
37
38
@@ -2776,11 +2777,13 @@ class Llava15ChatHandler:
2776
2777
"{% endif %}"
2777
2778
)
2778
2779
2779
- def __init__ (self , clip_model_path : str , verbose : bool = True ):
2780
+ def __init__ (self , clip_model_path : str , use_gpu : bool = True , verbosity : GGMLLogLevel = GGMLLogLevel . GGML_LOG_LEVEL_DEBUG ):
2780
2781
import llama_cpp .llava_cpp as llava_cpp
2781
2782
2782
2783
self .clip_model_path = clip_model_path
2783
- self .verbose = verbose
2784
+ self .ctx_clip_params = self ._llava_cpp .clip_context_params
2785
+ self .ctx_clip_params .use_gpu = use_gpu
2786
+ self .ctx_clip_params .ggml_log_level = verbosity
2784
2787
2785
2788
self ._llava_cpp = llava_cpp # TODO: Fix
2786
2789
self ._exit_stack = ExitStack ()
@@ -2792,25 +2795,22 @@ def __init__(self, clip_model_path: str, verbose: bool = True):
2792
2795
if not os .path .exists (clip_model_path ):
2793
2796
raise ValueError (f"Clip model path does not exist: { clip_model_path } " )
2794
2797
2795
- with suppress_stdout_stderr (disable = self .verbose ):
2796
- clip_ctx = self ._llava_cpp .clip_model_load (self .clip_model_path .encode (), 0 )
2798
+ clip_ctx = self ._llava_cpp .clip_init (self .clip_model_path .encode (), self .ctx_clip_params )
2797
2799
2798
- if clip_ctx is None :
2799
- raise ValueError (f"Failed to load clip model: { clip_model_path } " )
2800
+ if clip_ctx is None :
2801
+ raise ValueError (f"Failed to load clip model: { clip_model_path } " )
2800
2802
2801
- self .clip_ctx = clip_ctx
2803
+ self .clip_ctx = clip_ctx
2802
2804
2803
- def clip_free ():
2804
- with suppress_stdout_stderr (disable = self .verbose ):
2805
- self ._llava_cpp .clip_free (self .clip_ctx )
2805
+ def clip_free ():
2806
+ self ._llava_cpp .clip_free (self .clip_ctx )
2806
2807
2807
- self ._exit_stack .callback (clip_free )
2808
+ self ._exit_stack .callback (clip_free )
2808
2809
2809
2810
def last_image_embed_free ():
2810
- with suppress_stdout_stderr (disable = self .verbose ):
2811
- if self ._last_image_embed is not None :
2812
- self ._llava_cpp .llava_image_embed_free (self ._last_image_embed )
2813
- self ._last_image_embed = None
2811
+ if self ._last_image_embed is not None :
2812
+ self ._llava_cpp .llava_image_embed_free (self ._last_image_embed )
2813
+ self ._last_image_embed = None
2814
2814
2815
2815
self ._exit_stack .callback (last_image_embed_free )
2816
2816
0 commit comments