Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,24 @@
</BitStack>
}

@if (followUpSuggestions.Any())
{
<BitStack Alignment=" BitAlignment.Center" FitHeight FillContent Class="default-prompt-container">

@foreach (var suggestion in followUpSuggestions)
{
<BitButton FixedColor
Variant="BitVariant.Outline"
Class="default-prompt-button"
Color="BitColor.SecondaryBackground"
OnClick="() => SendPromptMessage(suggestion)">
@suggestion
</BitButton>
}

</BitStack>
}

<BitStack FitHeight Style="position:relative">
<BitTextField Rows="1"
Immediate
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ public partial class AppAiChatPanel
private Channel<string>? channel;
private AiChatMessage? lastAssistantMessage;
private List<AiChatMessage> chatMessages = []; // TODO: Persist these values in client-side storage to retain them across app restarts.
private List<string> followUpSuggestions = [];
//#if(module == "Sales")
private Action unsubSearchProducts = default!;
//#endif
Expand Down Expand Up @@ -92,7 +93,9 @@ private async Task HubConnection_Reconnected(string? _)

private async Task SendPromptMessage(string message)
{
followUpSuggestions = [];
userInput = message;
StateHasChanged();
await SendMessage();
}

Expand Down Expand Up @@ -128,6 +131,7 @@ private void SetDefaultValues()
{
isLoading = false;
responseCounter = 0;
followUpSuggestions = [];
lastAssistantMessage = new() { Role = AiChatMessageRole.Assistant };
chatMessages = [
new()
Expand Down Expand Up @@ -167,25 +171,32 @@ private async Task StartChannel()
{
int expectedResponsesCount = chatMessages.Count(c => c.Role is AiChatMessageRole.User);

if (response is SharedChatProcessMessages.MESSAGE_RPOCESS_SUCESS)
if (response.Contains(nameof(AiChatFollowUpList.FollowUpSuggestions)))
{
responseCounter++;
isLoading = false;
followUpSuggestions = JsonSerializer.Deserialize<AiChatFollowUpList>(response)?.FollowUpSuggestions ?? [];
}
else if (response is SharedChatProcessMessages.MESSAGE_RPOCESS_ERROR)
else
{
responseCounter++;
if (responseCounter == expectedResponsesCount)
if (response is SharedChatProcessMessages.MESSAGE_RPOCESS_SUCESS)
{
isLoading = false; // Hide loading only if this is an error for the last user's message.
responseCounter++;
isLoading = false;
}
chatMessages[responseCounter * 2].Successful = false;
}
else
{
if ((responseCounter + 1) == expectedResponsesCount)
else if (response is SharedChatProcessMessages.MESSAGE_RPOCESS_ERROR)
{
responseCounter++;
if (responseCounter == expectedResponsesCount)
{
isLoading = false; // Hide loading only if this is an error for the last user's message.
}
chatMessages[responseCounter * 2].Successful = false;
}
else
{
lastAssistantMessage!.Content += response;
if ((responseCounter + 1) == expectedResponsesCount)
{
lastAssistantMessage!.Content += response;
}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ protected override async Task<HttpResponseMessage> SendAsync(HttpRequestMessage
if (response.Headers.TryGetValues("Request-Id", out var requestId))
{
requestIdValue = requestId.First();
logScopeData["RequestId"] = requestIdValue;
}

serverCommunicationSuccess = true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ These are the primary functional areas of the application beyond account managem
* **How to Use:**
- Navigate to the [Upgrade account page](/settings/upgradeaccount).
" +
//#endif
//#endif
@"## 4. Informational Pages

### 4.1. About Page
Expand Down Expand Up @@ -197,13 +197,13 @@ These are the primary functional areas of the application beyond account managem
//#endif
//#if (ads == true)
@"### Handling advertisement trouble requests:
**[[[ADS_TROUBLE_RULES_BEGIN]]]**""
**[[[ADS_TROUBLE_RULES_BEGIN]]]""
* **If a user asks about having trouble watching ad (e.g., ""ad not showing"", ""ad is blocked"", ""upgrade is not happening"") :**
1. *Act as a technical support.*
2. **Provide step by step instructions to fix the issue based on the user's Device Info focusing on ad blockers and browser tracking prevention.
**[[[ADS_TROUBLE_RULES_END]]]**
" +
//#endif
//#endif
@"- ### User Feedback and Suggestions:
- If a user provides feedback or suggests a feature, respond: ""Thank you for your feedback! It's valuable to us, and I'll pass it on to the product team."" If the feedback is unclear, ask for clarification: ""Could you please provide more details about your suggestion?""

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ async Task HandleIncomingMessage(string incomingMessage, CancellationToken messa
ChatOptions chatOptions = new()
{
Tools = [
AIFunctionFactory.Create(async (string emailAddress, string conversationHistory) =>
AIFunctionFactory.Create(async ([Required] string emailAddress, string conversationHistory) =>
{
if (messageSpecificCancellationToken.IsCancellationRequested)
return;
Expand All @@ -102,7 +102,7 @@ async Task HandleIncomingMessage(string incomingMessage, CancellationToken messa

}, name: "SaveUserEmailAndConversationHistory", description: "Saves the user's email address and the conversation history for future reference. Use this tool when the user provides their email address during the conversation. Parameters: emailAddress (string), conversationHistory (string)"),
//#if (module == "Sales")
AIFunctionFactory.Create(async ([Description("Concise summary of these user requirements")] string userNeeds,
AIFunctionFactory.Create(async ([Required, Description("Concise summary of these user requirements")] string userNeeds,
[Description("Car manufacturer's name (Optional)")] string? manufacturer,
[Description("Car price below this value (Optional)")] decimal? maxPrice,
[Description("Car price above this value (Optional)")] decimal? minPrice) =>
Expand Down Expand Up @@ -154,6 +154,21 @@ async Task HandleIncomingMessage(string incomingMessage, CancellationToken messa
}

await channel.Writer.WriteAsync(SharedChatProcessMessages.MESSAGE_RPOCESS_SUCESS, cancellationToken);

// This would generate a list of follow-up questions/suggestions to keep the conversation going.
// You could instead generate that list in previous chat completion call:
// 1: Using "tools" or "functions" feature of the model, that would not consider the latest assistant response.
// 2: Returning a json object containing the response and follow-up suggestions all together, losing IAsyncEnumerable streaming capability.
chatOptions.ResponseFormat = ChatResponseFormat.Json;
chatOptions.AdditionalProperties = new() { ["response_format"] = new { type = "json_object" } };
var followUpItems = await chatClient.GetResponseAsync<AiChatFollowUpList>([
new(ChatRole.System, supportSystemPrompt),
new(ChatRole.User, incomingMessage),
new(ChatRole.Assistant, assistantResponse.ToString()),
new(ChatRole.User, @"Return up to 3 relevant follow-up suggestions that help users discover related topics and continue the conversation naturally based on user's query in JSON object containing string[] named FollowUpSuggestions."),],
chatOptions, cancellationToken: cancellationToken);

await channel.Writer.WriteAsync(JsonSerializer.Serialize(followUpItems.Result), cancellationToken);
}
catch (Exception exp)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,3 +30,8 @@ public class AiChatMessage
[JsonIgnore]
public bool Successful { get; set; } = true;
}

public class AiChatFollowUpList
{
public List<string> FollowUpSuggestions { get; set; } = [];
}
Loading