Skip to content

Version 1.2.8736.39976 with DataExtractorTool enhancements #5

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Dec 2, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file modified build/GenerativeAI.zip
Binary file not shown.
2 changes: 1 addition & 1 deletion nuget/GenAI.NET.nuspec
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
<package xmlns="http://schemas.microsoft.com/packaging/2012/06/nuspec.xsd">
<metadata>
<id>GenAI.NET</id>
<version>1.2.8734.98</version>
<version>1.2.8736.39976</version>
<authors>Automation Agent</authors>
<owners>Automation Agent</owners>
<projectUrl>https://github.com/automaze1/GenAI.NET</projectUrl>
Expand Down
42 changes: 42 additions & 0 deletions src/GenAIFramework.Test/ToolsTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -474,5 +474,47 @@ public async Task SemanticSearchWithContext()
Assert.AreEqual(3, searchresults.Length);
Assert.IsTrue(searchresults.Any(r => r.content.Contains("three quarter")));
}

[TestMethod]
public async Task DataExtractorPipeline()
{
Logger.WriteLog(LogLevel.Info, LogOps.Test, "DataExtractorPipeline");

var textExtractor = TextExtractorTool.Create();
Assert.IsNotNull(textExtractor);

var parameters = new Dictionary<string, string>()
{
{ "Revenue Growth", "What is the revenue growth in dollars in Q1 2023?" },
{ "Revenue Growth Percent", "What is the revenue growth in percentage in Q1 2023?" },
{ "Revenue Forecast", "What is the revenue forecast for Q2 2023?" },
{ "Net Income", "What is the net income in dollars in Q1 2023?" },
{ "Stock Price Change", "What is the change is stock price after the quarterly results of Q1?" },
{ "Market Sentiment", "Based on market response, what is the market sentiment (positive or negative) after the results?" },
{ "Challenges", "What are the key challenges mentioned in this report?" },
{ "Initiatives", "What is the key initiatives to mitigate challenges are mentioned in this report?" },
};

//var llm = new OpenAILanguageModel("gpt-3.5-turbo");
//var prametersJson = FunctionTool.ToJsonString(parameters);
var dataExtractor = DataExtractorTool.Create()
.WithParameters(parameters);
//.WithLanguageModel(llm);

Assert.IsNotNull(dataExtractor);

var pipeline = Pipeline.WithTools(new IFunctionTool[] {textExtractor, dataExtractor});
Assert.IsNotNull(pipeline);

var file = Path.Combine(RootPath, @"..\..\..\..\..\tests\input\article.txt");
var context = new ExecutionContext();
context["input"] = file;

var result = await pipeline.ExecuteAsync(context);
Assert.IsTrue(!string.IsNullOrEmpty(result));

var results = FunctionTool.Deserialize<Dictionary<string, string>>(result);
Assert.AreEqual(8, results.Count);
}
}
}
89 changes: 86 additions & 3 deletions src/GenerativeAI/Configuration.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,36 @@

namespace Automation.GenerativeAI
{
/// <summary>
/// Implements the configuration for OpenAI/AzureOpenAI
/// </summary>
internal class OpenAIConfig
{
/// <summary>
/// Endpoint URL which contains resource name for azure
/// </summary>
public string EndPointUrl { get; set; }

/// <summary>
/// Deployment ID for GPT Model in Azure
/// </summary>
public string GPTDeployment { get; set; }

/// <summary>
/// Deployment ID for Text Embedding model in Azure
/// </summary>
public string EmbeddingDeployment { get; set; }

/// <summary>
/// ApiVersion, applicable to Azure
/// </summary>
public string ApiVersion { get; set; }
private string apiKey;

private string apiKey; //stores api key

/// <summary>
/// API Key
/// </summary>
public string ApiKey
{
get
Expand All @@ -34,7 +57,20 @@ public string ApiKey
apiKey = value;
}
}
public string Model { get; set; }

/// <summary>
/// GPT Model Name
/// </summary>
public string Model { get; set; } = "gpt-3.5-turbo"; //default model for OpenAI

/// <summary>
/// Provides token limit for the given model
/// </summary>
public int TokenLimit { get; set; } = 4000; //Default is 4K model

/// <summary>
/// Gets full URL for the Text Embedding API
/// </summary>
public string EmbeddingUrl
{
get
Expand All @@ -49,6 +85,9 @@ public string EmbeddingUrl
}
}

/// <summary>
/// Gets full URL for the chat completion API
/// </summary>
public string CompletionsUrl
{
get
Expand All @@ -63,6 +102,9 @@ public string CompletionsUrl
}
}

/// <summary>
/// Checks if it is Azure config
/// </summary>
public bool AzureConfig
{
get
Expand All @@ -76,11 +118,21 @@ public bool AzureConfig
}
}

/// <summary>
/// Implements config for Bing Search API
/// </summary>
internal class BingAPIConfig
{
/// <summary>
/// Bing API endpoint url
/// </summary>
public string EndPointUrl { get; set; } = "https://api.bing.microsoft.com/v7.0/";

private string apiKey;
private string apiKey; //stores api key

/// <summary>
/// Api Key
/// </summary>
public string ApiKey
{
get
Expand All @@ -98,11 +150,20 @@ public string ApiKey
}
}

/// <summary>
/// Implements a configuration
/// </summary>
internal class Configuration
{
/// <summary>
/// Default constructor
/// </summary>
public Configuration() { }
private static Configuration instance = null;

/// <summary>
/// Global instance of the configuration
/// </summary>
public static Configuration Instance
{
get
Expand All @@ -117,12 +178,25 @@ public static Configuration Instance
}
}

/// <summary>
/// Configuration for OpenAI or AzureOpenAI APIs
/// </summary>
public OpenAIConfig OpenAIConfig { get; set; } = new OpenAIConfig();

/// <summary>
/// Configuration for Bing API
/// </summary>
public BingAPIConfig BingAPIConfig { get; set; } = new BingAPIConfig();

/// <summary>
/// Log file path
/// </summary>
public string LogFile { get; set; }

/// <summary>
/// Gets executing DLL Path
/// </summary>
/// <returns></returns>
internal static string GetDLLPath()
{
var asm = Assembly.GetExecutingAssembly();
Expand All @@ -133,6 +207,10 @@ internal static string GetDLLPath()
return path;
}

/// <summary>
/// Saves the config as json to a given file
/// </summary>
/// <param name="filePath">Full file path to save the config as json</param>
public void Save(string filePath)
{
var jsonfile = Path.ChangeExtension(filePath, "json");
Expand All @@ -141,6 +219,11 @@ public void Save(string filePath)
File.WriteAllText(jsonfile, jsontxt);
}

/// <summary>
/// Loads the configuration from the given json file
/// </summary>
/// <param name="filePath">Full path of config json file</param>
/// <returns>Configuration</returns>
public static Configuration Load(string filePath)
{
if(!File.Exists(filePath)) return new Configuration();
Expand Down
6 changes: 5 additions & 1 deletion src/GenerativeAI/GenerativeAI.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,11 @@
<None Include="ClassDiagram.cd" />
<None Include="packages.config" />
</ItemGroup>
<ItemGroup />
<ItemGroup>
<EmbeddedResource Include="Prompts\DataExtractorPrompt.txt">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</EmbeddedResource>
</ItemGroup>
<ItemGroup>
<EmbeddedResource Include="Prompts\MRKLPrompt.txt">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
Expand Down
15 changes: 15 additions & 0 deletions src/GenerativeAI/Interfaces/Interfaces.cs
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,21 @@ public interface ILanguageModel
/// Higher temperature will lead to more randomness. Lower temperature will be more deterministic.</param>
/// <returns>An LLMResponse response object</returns>
Task<LLMResponse> GetResponseAsync(IEnumerable<ChatMessage> messages, IEnumerable<FunctionDescriptor> functions, double temperature);

/// <summary>
/// Returns number of Prompt tokens used so far.
/// </summary>
int PromptTokensUsed { get; }

/// <summary>
/// Returns number of Completion tokens used so far.
/// </summary>
int CompletionTokensUsed { get; }

/// <summary>
/// Returns max token limit of the LLM for prompt and completion.
/// </summary>
int MaxTokenLimit { get; }
}

/// <summary>
Expand Down
6 changes: 6 additions & 0 deletions src/GenerativeAI/LLM/AzureOpenAILanguageModel.cs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@ public AzureOpenAILanguageModel(OpenAIConfig config)

public IVectorTransformer VectorTransformer => openAIClient.VectorTransformer;

public int PromptTokensUsed => openAIClient.PromptTokensUsed;

public int CompletionTokensUsed => openAIClient.CompletionTokensUsed;

public int MaxTokenLimit => openAIClient.MaxTokenLimit;

public Task<LLMResponse> GetResponseAsync(IEnumerable<ChatMessage> messages, double temperature)
{
return openAIClient.GetResponseAsync(messages, temperature);
Expand Down
6 changes: 6 additions & 0 deletions src/GenerativeAI/LLM/MockLanguageModel.cs
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,12 @@ public MockLanguageModel(string model, Dictionary<string, string> responses)

public IVectorTransformer VectorTransformer => client.VectorTransformer;

public int PromptTokensUsed => throw new NotImplementedException();

public int CompletionTokensUsed => throw new NotImplementedException();

public int MaxTokenLimit => 4000; //default is 4K

/// <summary>
/// Gets the response for given list of chat messages
/// </summary>
Expand Down
20 changes: 18 additions & 2 deletions src/GenerativeAI/LLM/OpenAIClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Web.Script.Serialization;

Expand All @@ -17,10 +18,16 @@ internal class OpenAIClient : ILanguageModel
/// </summary>
public string ModelName => config.Model;

public int VectorLength => throw new NotImplementedException();

public IVectorTransformer VectorTransformer => transformer;

private int promptTokens = 0;
public int PromptTokensUsed => promptTokens;

private int completionTokens = 0;
public int CompletionTokensUsed => completionTokens;

public int MaxTokenLimit => config.TokenLimit;

private readonly IVectorTransformer transformer;
private readonly OpenAIConfig config;
private readonly HttpTool httpTool;
Expand Down Expand Up @@ -126,8 +133,17 @@ public async Task<LLMResponse> GetResponseAsync(IEnumerable<ChatMessage> message
string json = await httpTool.PostAsync(config.CompletionsUrl, jsonPayload);

var response = serializer.Deserialize<ChatResponse>(json);

//update usage tokens
Interlocked.Add(ref promptTokens, response.usage.prompt_tokens);
Interlocked.Add(ref completionTokens, response.usage.completion_tokens);

var llmResponse = ToLLMResponse(response);

//Log info
Logger.WriteLog(LogLevel.Info, LogOps.Response, llmResponse.Response);
Logger.WriteLog(LogLevel.Info, LogOps.Response, $"Prompts Tokens: {response.usage.prompt_tokens}, Completion Tokens: {response.usage.completion_tokens}");

return llmResponse;
}
catch (Exception ex)
Expand Down
22 changes: 16 additions & 6 deletions src/GenerativeAI/LLM/OpenAILanguageModel.cs
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,14 @@ internal class ChatResponse
public string id { get; set; }
public int created { get; set; }
public Choice[] choices { get; set; }
public Usage usage { get; set; }
}

internal class Usage
{
public int prompt_tokens { get; set; }
public int completion_tokens { get; set; }
public int total_tokens { get; set; }
}

/// <summary>
Expand All @@ -47,18 +55,20 @@ public class OpenAILanguageModel : ILanguageModel
/// to get the API key using OPENAI_API_KEY environment variable.</param>
public OpenAILanguageModel(string model, string apikey = "")
{
if(string.IsNullOrEmpty(apikey))
{
apikey = Configuration.Instance.OpenAIConfig.ApiKey;
}

openAIClient = new OpenAIClient(new OpenAIConfig() { ApiKey = apikey, Model = model });
var config = new OpenAIConfig() { ApiKey = apikey, Model = model };
openAIClient = new OpenAIClient(config);
}

public string ModelName => openAIClient.ModelName;

public IVectorTransformer VectorTransformer => openAIClient.VectorTransformer;

public int PromptTokensUsed => openAIClient.PromptTokensUsed;

public int CompletionTokensUsed => openAIClient.CompletionTokensUsed;

public int MaxTokenLimit => openAIClient.MaxTokenLimit;

public Task<LLMResponse> GetResponseAsync(IEnumerable<ChatMessage> messages, double temperature)
{
return openAIClient.GetResponseAsync(messages, temperature);
Expand Down
7 changes: 7 additions & 0 deletions src/GenerativeAI/Prompts/DataExtractorPrompt.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
Extract arguments and values from the below CONTEXT only based on function specification provided,
do not include extra parameter and always return your answers in a valid JSON format.

CONTEXT:
Today's Date: {{$Today}}

{{$text}}
2 changes: 1 addition & 1 deletion src/GenerativeAI/Stores/MemoryStore.cs
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ private static int GetMessageLength(ChatMessage message)
{
if(message == null) return 0;
if(message.content !=null) return message.content.Length;
return 0;
return 100; //assume 100 characters for function call messages
}

/// <summary>
Expand Down
Loading