Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add LLM Toolkit #1259

Merged
merged 18 commits into from
May 12, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
fix using ollama vs openai
  • Loading branch information
yogeshojha committed May 12, 2024
commit 75cadb9ad709f19ce42a40454cc68cb982fbaeaf
9 changes: 8 additions & 1 deletion web/api/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,12 +88,19 @@ def delete(self, request):
def put(self, request):
req = self.request
model_name = req.query_params.get('model')
# check if model_name is in DEFAULT_GPT_MODELS
response = {
'status': False
}
use_ollama = True
if any(model['name'] == model_name for model in DEFAULT_GPT_MODELS):
use_ollama = False
try:
OllamaSettings.objects.update_or_create(
defaults={'selected_model': model_name},
defaults={
'selected_model': model_name,
'use_ollama': use_ollama
},
id=1
)
response['status'] = True
Expand Down
18 changes: 18 additions & 0 deletions web/dashboard/migrations/0011_ollamasettings_is_ollama.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# Generated by Django 3.2.4 on 2024-04-21 05:06

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
('dashboard', '0010_ollamasettings'),
]

operations = [
migrations.AddField(
model_name='ollamasettings',
name='is_ollama',
field=models.BooleanField(default=False),
),
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# Generated by Django 3.2.4 on 2024-04-21 05:06

from django.db import migrations


class Migration(migrations.Migration):

dependencies = [
('dashboard', '0011_ollamasettings_is_ollama'),
]

operations = [
migrations.RenameField(
model_name='ollamasettings',
old_name='is_ollama',
new_name='is_openai',
),
]
22 changes: 22 additions & 0 deletions web/dashboard/migrations/0013_auto_20240421_0507.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
# Generated by Django 3.2.4 on 2024-04-21 05:07

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
('dashboard', '0012_rename_is_ollama_ollamasettings_is_openai'),
]

operations = [
migrations.RemoveField(
model_name='ollamasettings',
name='is_openai',
),
migrations.AddField(
model_name='ollamasettings',
name='is_ollama',
field=models.BooleanField(default=True),
),
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# Generated by Django 3.2.4 on 2024-04-21 05:08

from django.db import migrations


class Migration(migrations.Migration):

dependencies = [
('dashboard', '0013_auto_20240421_0507'),
]

operations = [
migrations.RenameField(
model_name='ollamasettings',
old_name='is_ollama',
new_name='use_ollama',
),
]
1 change: 1 addition & 0 deletions web/dashboard/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ def __str__(self):
class OllamaSettings(models.Model):
id = models.AutoField(primary_key=True)
selected_model = models.CharField(max_length=500)
use_ollama = models.BooleanField(default=True)

def __str__(self):
return self.selected_model
Expand Down
32 changes: 23 additions & 9 deletions web/reNgine/gpt.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,19 @@
import openai
import re
from reNgine.common_func import get_open_ai_key, extract_between
from reNgine.definitions import VULNERABILITY_DESCRIPTION_SYSTEM_MESSAGE, ATTACK_SUGGESTION_GPT_SYSTEM_PROMPT
from langchain.llms import Ollama
from reNgine.definitions import VULNERABILITY_DESCRIPTION_SYSTEM_MESSAGE, ATTACK_SUGGESTION_GPT_SYSTEM_PROMPT, OLLAMA_INSTANCE
from langchain_community.llms import Ollama

from dashboard.models import OllamaSettings

class GPTVulnerabilityReportGenerator:

def __init__(self):
self.api_key = get_open_ai_key()
self.model_name = 'gpt-3.5-turbo'
if not self.api_key:
self.ollama = Ollama(base_url='http://ollama:11434', model="llama2-uncensored")
selected_model = OllamaSettings.objects.first()
self.model_name = selected_model.selected_model if selected_model else 'gpt-3.5-turbo'
self.use_ollama = selected_model.use_ollama if selected_model else False
self.openai_api_key = None
self.ollama = None

def get_vulnerability_description(self, description):
"""Generate Vulnerability Description using GPT.
Expand All @@ -26,12 +29,23 @@ def get_vulnerability_description(self, description):
'references': (list) of urls
}
"""
if not self.api_key:
prompt = ATTACK_SUGGESTION_GPT_SYSTEM_PROMPT + "\nUser: " + input
print(f"Generating Vulnerability Description for: {description}")
if self.use_ollama:
prompt = VULNERABILITY_DESCRIPTION_SYSTEM_MESSAGE + "\nUser: " + description
self.ollama = Ollama(
base_url=OLLAMA_INSTANCE,
model=self.model_name
)
response_content = self.ollama(prompt)
else:
openai.api_key = self.api_key
openai_api_key = get_open_ai_key()
if not openai_api_key:
return {
'status': False,
'error': 'OpenAI API Key not set'
}
try:
openai.api_key = openai_api_key
gpt_response = openai.ChatCompletion.create(
model=self.model_name,
messages=[
Expand Down
9 changes: 5 additions & 4 deletions web/scanEngine/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -463,6 +463,7 @@ def llm_toolkit_section(request, slug):
list_all_models_url = f'{OLLAMA_INSTANCE}/api/tags'
response = requests.get(list_all_models_url)
all_models = []
selected_model = None
all_models = DEFAULT_GPT_MODELS.copy()
if response.status_code == 200:
models = response.json()
Expand All @@ -471,7 +472,7 @@ def llm_toolkit_section(request, slug):
for model in ollama_models:
all_models.append({**model,
'modified_at': datetime.strptime(model['modified_at'].split('.')[0], date_format),
'is_local': True
'is_local': True,
})
# find selected model name from db
selected_model = OllamaSettings.objects.first()
Expand All @@ -480,9 +481,9 @@ def llm_toolkit_section(request, slug):
else:
# use gpt3.5-turbo as default
selected_model = {'selected_model': 'gpt-3.5-turbo'}
for models in all_models:
if models['name'] == selected_model['selected_model']:
models['selected'] = True
for model in all_models:
if model['name'] == selected_model['selected_model']:
model['selected'] = True
context['installed_models'] = all_models
return render(request, 'scanEngine/settings/llm_toolkit.html', context)

Expand Down