Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
42 commits
Select commit Hold shift + click to select a range
6f9648d
fix the lsql call
robertwhiffin Sep 10, 2024
e6c4ec3
update UI to a control panel for transforming code files in a Volume.…
robertwhiffin Sep 17, 2024
99e5034
working towards an automated file processing
robertwhiffin Sep 19, 2024
b67885a
adding job creation template in
robertwhiffin Sep 19, 2024
46e17a7
update volume creation code
robertwhiffin Sep 19, 2024
ee7ff98
update volume creation code
robertwhiffin Sep 23, 2024
a3b411b
updating file install location
robertwhiffin Sep 23, 2024
a5a2c3d
fix broken path
robertwhiffin Sep 23, 2024
d1a4527
update to singlue user cluster
robertwhiffin Sep 23, 2024
dc14fc4
add job set up, update ui, update serving cluster to UC enabled
robertwhiffin Sep 23, 2024
b5f4b6f
parameterise job id
robertwhiffin Sep 23, 2024
d9dce75
update jobs infra
robertwhiffin Sep 23, 2024
bdeb33d
update jobs infra to return job id
robertwhiffin Sep 23, 2024
06f38d1
update gradio app
robertwhiffin Sep 23, 2024
1ebb7de
update gradio app
robertwhiffin Sep 23, 2024
a188e44
update gradio app
robertwhiffin Sep 23, 2024
bdf0d49
update job to remove cache for running on serverless
robertwhiffin Sep 23, 2024
1b965d2
update job to remove cache for running on serverless
robertwhiffin Sep 24, 2024
c8d659a
black formatting
robertwhiffin Sep 24, 2024
9dc5ba3
remove reset commands from job notebooks
robertwhiffin Sep 24, 2024
96c4b55
update to write to workspace
robertwhiffin Sep 25, 2024
cd75031
update to write to workspace
robertwhiffin Sep 25, 2024
2851b5f
update to write to workspace
robertwhiffin Sep 25, 2024
9973fe9
update to write to workspace
robertwhiffin Sep 25, 2024
0e38c2e
update to write to workspace
robertwhiffin Sep 25, 2024
95fa3c6
update to write to workspace
robertwhiffin Sep 25, 2024
3d03a0b
update to write to workspace
robertwhiffin Sep 25, 2024
9057036
updating chat infra to let customer chose existing endpoint
robertwhiffin Sep 25, 2024
57a1458
update jobs
robertwhiffin Sep 25, 2024
5f38005
update question
robertwhiffin Sep 25, 2024
206c7df
update silver to gold to write notebooks
robertwhiffin Sep 26, 2024
1b63594
update silver to gold to write notebooks
robertwhiffin Sep 26, 2024
8292fe4
think it works now
robertwhiffin Sep 26, 2024
466d6fd
think it works now
robertwhiffin Sep 26, 2024
93379f5
finishing
robertwhiffin Sep 27, 2024
e213178
adding interactive mode back in
robertwhiffin Oct 1, 2024
a6e8305
get interactive notebook output working
robertwhiffin Oct 1, 2024
70a654e
get interactive notebook output working
robertwhiffin Oct 2, 2024
1c1a93e
added -q to pip installs
robertwhiffin Oct 2, 2024
ffe6cd0
updated notebook path output to be a url
robertwhiffin Oct 2, 2024
c8c7e09
remove mlflow
robertwhiffin Oct 2, 2024
948d5db
blk formatting
robertwhiffin Oct 2, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions sql_migration_assistant/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ def hello():
w = WorkspaceClient(product="sql_migration_assistant", product_version="0.0.1")
p = Prompts()
setter_upper = SetUpMigrationAssistant()
setter_upper.check_cloud(w)
final_config = setter_upper.setup_migration_assistant(w, p)
current_path = Path(__file__).parent.resolve()

Expand Down
38 changes: 21 additions & 17 deletions sql_migration_assistant/app/llm.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,15 @@
import logging
import gradio as gr

from databricks.sdk import WorkspaceClient
from databricks.sdk.service.serving import ChatMessage, ChatMessageRole

w = WorkspaceClient()
foundation_llm_name = "databricks-meta-llama-3-1-405b-instruct"
max_token = 4096
messages = [
ChatMessage(role=ChatMessageRole.SYSTEM, content="You are an unhelpful assistant"),
ChatMessage(role=ChatMessageRole.USER, content="What is RAG?"),
]


class LLMCalls:
def __init__(self, foundation_llm_name, max_tokens):
def __init__(self, foundation_llm_name):
self.w = WorkspaceClient()
self.foundation_llm_name = foundation_llm_name
self.max_tokens = int(max_tokens)

def call_llm(self, messages):
def call_llm(self, messages, max_tokens, temperature):
"""
Function to call the LLM model and return the response.
:param messages: list of messages like
Expand All @@ -29,8 +20,17 @@ def call_llm(self, messages):
]
:return: the response from the model
"""

max_tokens = int(max_tokens)
temperature = float(temperature)
# check to make sure temperature is between 0.0 and 1.0
if temperature < 0.0 or temperature > 1.0:
raise gr.Error("Temperature must be between 0.0 and 1.0")
response = self.w.serving_endpoints.query(
name=foundation_llm_name, max_tokens=max_token, messages=messages
name=self.foundation_llm_name,
max_tokens=max_tokens,
messages=messages,
temperature=temperature,
)
message = response.choices[0].message.content
return message
Expand All @@ -53,14 +53,16 @@ def convert_chat_to_llm_input(self, system_prompt, chat):

# this is called to actually send a request and receive response from the llm endpoint.

def llm_translate(self, system_prompt, input_code):
def llm_translate(self, system_prompt, input_code, max_tokens, temperature):
messages = [
ChatMessage(role=ChatMessageRole.SYSTEM, content=system_prompt),
ChatMessage(role=ChatMessageRole.USER, content=input_code),
]

# call the LLM end point.
llm_answer = self.call_llm(messages=messages)
llm_answer = self.call_llm(
messages=messages, max_tokens=max_tokens, temperature=temperature
)
# Extract the code from in between the triple backticks (```), since LLM often prints the code like this.
# Also removes the 'sql' prefix always added by the LLM.
translation = llm_answer # .split("Final answer:\n")[1].replace(">>", "").replace("<<", "")
Expand All @@ -73,12 +75,14 @@ def llm_chat(self, system_prompt, query, chat_history):
llm_answer = self.call_llm(messages=messages)
return llm_answer

def llm_intent(self, system_prompt, input_code):
def llm_intent(self, system_prompt, input_code, max_tokens, temperature):
messages = [
ChatMessage(role=ChatMessageRole.SYSTEM, content=system_prompt),
ChatMessage(role=ChatMessageRole.USER, content=input_code),
]

# call the LLM end point.
llm_answer = self.call_llm(messages=messages)
llm_answer = self.call_llm(
messages=messages, max_tokens=max_tokens, temperature=temperature
)
return llm_answer
Loading