@@ -75,21 +75,48 @@ struct ContentView: View {
7575 VStack {
7676 DownloadButton (
7777 llamaState: llamaState,
78- modelName: " TinyLlama-1.1B (Q4_0) " ,
78+ modelName: " TinyLlama-1.1B (Q4_0, 0.6 GiB ) " ,
7979 modelUrl: " https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q4_0.gguf?download=true " ,
8080 filename: " tinyllama-1.1b-1t-openorca.Q4_0.gguf "
8181 )
8282 . font ( . system( size: 12 ) )
8383 . padding ( . top, 4 )
84+ . frame ( maxWidth: . infinity, alignment: . leading)
8485
8586 DownloadButton (
8687 llamaState: llamaState,
87- modelName: " TinyLlama-1.1B (Q8_0) " ,
88+ modelName: " TinyLlama-1.1B (Q8_0, 1.1 GiB ) " ,
8889 modelUrl: " https://huggingface.co/TheBloke/TinyLlama-1.1B-1T-OpenOrca-GGUF/resolve/main/tinyllama-1.1b-1t-openorca.Q8_0.gguf?download=true " ,
8990 filename: " tinyllama-1.1b-1t-openorca.Q8_0.gguf "
9091 )
9192 . font ( . system( size: 12 ) )
9293
94+ DownloadButton (
95+ llamaState: llamaState,
96+ modelName: " Phi-2.7B (Q4_0, 1.6 GiB) " ,
97+ modelUrl: " https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q4_0.gguf?download=true " ,
98+ filename: " phi-2-q4_0.gguf "
99+ )
100+ . font ( . system( size: 12 ) )
101+ . frame ( maxWidth: . infinity, alignment: . leading)
102+
103+ DownloadButton (
104+ llamaState: llamaState,
105+ modelName: " Phi-2.7B (Q8_0, 2.8 GiB) " ,
106+ modelUrl: " https://huggingface.co/ggml-org/models/resolve/main/phi-2/ggml-model-q8_0.gguf?download=true " ,
107+ filename: " phi-2-q8_0.gguf "
108+ )
109+ . font ( . system( size: 12 ) )
110+
111+ DownloadButton (
112+ llamaState: llamaState,
113+ modelName: " Mistral-7B-v0.1 (Q4_0, 3.8 GiB) " ,
114+ modelUrl: " https://huggingface.co/TheBloke/Mistral-7B-v0.1-GGUF/resolve/main/mistral-7b-v0.1.Q4_0.gguf?download=true " ,
115+ filename: " mistral-7b-v0.1.Q4_0.gguf "
116+ )
117+ . font ( . system( size: 12 ) )
118+ . frame ( maxWidth: . infinity, alignment: . leading)
119+
93120 Button ( " Clear downloaded models " ) {
94121 ContentView . cleanupModelCaches ( )
95122 llamaState. cacheCleared = true
0 commit comments