Skip to content

Commit eb43942

Browse files
authored
Merge pull request #2 from neocortex-link/feat/run-check
Check if ollama is running
2 parents 0d200de + 3962483 commit eb43942

File tree

4 files changed

+35
-4
lines changed

4 files changed

+35
-4
lines changed

CHANGELOG.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,9 @@ All notable changes to this project will be documented in this file.
44
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
55
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
66

7+
## [0.1.2] - 07 July 2025
8+
- Check if Ollama is running on start
9+
710
## [0.1.1] - 16 March 2025
811
- Gemma 3 and Qwen 2.5 models are added to model list
912
- Editor window signup button

Editor/OllamaSupportWindow.cs

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,14 @@ private void OnEnable()
2929
ollama = new OllamaSupport();
3030
logo = Resources.Load<Texture2D>("Visuals/ollama_x_neocortex");
3131
ollama.CheckOllamaInstallation();
32+
ollama.CheckOllamaRunning();
3233
ollama.CheckInstalledModels();
3334
ollama.SetPlatformDependedStrings();
35+
36+
if (!ollama.IsOllamaRunning)
37+
{
38+
EditorUtility.DisplayDialog("Ollama Not Running", "Ollama is not running. Please start Ollama and try again.", "OK");
39+
}
3440
}
3541

3642
private void OnGUI()

Runtime/OllamaSupport.cs

Lines changed: 25 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,16 @@
33
using Neocortex.Data;
44
using System.Diagnostics;
55
using System.Collections.Generic;
6+
using System.Net.Sockets;
7+
using System.Runtime.InteropServices;
68
using SystemInfo = UnityEngine.Device.SystemInfo;
79

810
namespace Neocortex
911
{
1012
public class OllamaSupport
1113
{
1214
public bool IsOllamaInstalled { get; private set; }
15+
public bool IsOllamaRunning { get; private set; }
1316

1417
public List<ModelInfo> Models = new()
1518
{
@@ -69,22 +72,41 @@ private Process CreateProcess(string fileName, string args)
6972

7073
return new Process { StartInfo = psi };
7174
}
75+
76+
private bool IsWindows()
77+
{
78+
return RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
79+
}
7280

7381
public void CheckOllamaInstallation()
7482
{
75-
using (Process process = CreateProcess("cmd.exe", "where ollama"))
83+
using (Process process = CreateProcess(IsWindows() ? "where" : "which", "ollama"))
7684
{
7785
process.Start();
7886
string output = process.StandardOutput.ReadToEnd();
7987
process.WaitForExit();
88+
IsOllamaInstalled = !string.IsNullOrWhiteSpace(output);
89+
}
90+
}
8091

81-
IsOllamaInstalled = !string.IsNullOrEmpty(output);
92+
public void CheckOllamaRunning()
93+
{
94+
try
95+
{
96+
using (var client = new TcpClient("localhost", 11434))
97+
{
98+
IsOllamaRunning = true;
99+
}
100+
}
101+
catch
102+
{
103+
IsOllamaRunning = false;
82104
}
83105
}
84106

85107
public async void CheckInstalledModels()
86108
{
87-
if (IsOllamaInstalled)
109+
if (IsOllamaInstalled && IsOllamaRunning)
88110
{
89111
var tags = await request.GetTags();
90112

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "link.neocortex.ollama",
3-
"version": "0.1.1",
3+
"version": "0.1.2",
44
"displayName": "Neocortex Ollama Support",
55
"description": "Ollama Support for Neocortex Unity SDK. This package helps you download and run LLMs on your local environment with the help of Ollama.",
66
"unity": "2021.3",

0 commit comments

Comments
 (0)