|
7 | 7 | // constructor syntax. |
8 | 8 |
|
9 | 9 | using System; |
| 10 | +using System.Text.Json.Serialization; |
10 | 11 |
|
11 | 12 | namespace Microsoft.Extensions.AI.Evaluation.Reporting; |
12 | 13 |
|
13 | 14 | /// <summary> |
14 | 15 | /// A class that records details related to a particular LLM chat conversation turn involved in the execution of a |
15 | 16 | /// particular <see cref="ScenarioRun"/>. |
16 | 17 | /// </summary> |
17 | | -/// <param name="latency"> |
18 | | -/// The duration between the time when the request was sent to the LLM and the time when the response was received for |
19 | | -/// the chat conversation turn. |
20 | | -/// </param> |
21 | | -/// <param name="model"> |
22 | | -/// The model that was used in the creation of the response for the chat conversation turn. Can be |
23 | | -/// <see langword="null"/> if this information was not available via <see cref="ChatResponse.ModelId"/>. |
24 | | -/// </param> |
25 | | -/// <param name="usage"> |
26 | | -/// Usage details for the chat conversation turn (including input and output token counts). Can be |
27 | | -/// <see langword="null"/> if usage details were not available via <see cref="ChatResponse.Usage"/>. |
28 | | -/// </param> |
29 | | -/// <param name="cacheKey"> |
30 | | -/// The cache key for the cached model response for the chat conversation turn if response caching was enabled; |
31 | | -/// <see langword="null"/> otherwise. |
32 | | -/// </param> |
33 | | -/// <param name="cacheHit"> |
34 | | -/// <see langword="true"/> if response caching was enabled and the model response for the chat conversation turn was |
35 | | -/// retrieved from the cache; <see langword="false"/> if response caching was enabled and the model response was not |
36 | | -/// retrieved from the cache; <see langword="null"/> if response caching was disabled. |
37 | | -/// </param> |
38 | | -public sealed class ChatTurnDetails( |
39 | | - TimeSpan latency, |
40 | | - string? model = null, |
41 | | - UsageDetails? usage = null, |
42 | | - string? cacheKey = null, |
43 | | - bool? cacheHit = null) |
| 18 | +public sealed class ChatTurnDetails |
44 | 19 | { |
45 | 20 | /// <summary> |
46 | 21 | /// Gets or sets the duration between the time when the request was sent to the LLM and the time when the response |
47 | 22 | /// was received for the chat conversation turn. |
48 | 23 | /// </summary> |
49 | | - public TimeSpan Latency { get; set; } = latency; |
| 24 | + public TimeSpan Latency { get; set; } |
50 | 25 |
|
51 | 26 | /// <summary> |
52 | 27 | /// Gets or sets the model that was used in the creation of the response for the chat conversation turn. |
53 | 28 | /// </summary> |
54 | 29 | /// <remarks> |
55 | 30 | /// Returns <see langword="null"/> if this information was not available via <see cref="ChatResponse.ModelId"/>. |
56 | 31 | /// </remarks> |
57 | | - public string? Model { get; set; } = model; |
| 32 | + public string? Model { get; set; } |
| 33 | + |
| 34 | + /// <summary> |
| 35 | + /// Gets or sets the name of the provider for the model identified by <see cref="Model"/>. |
| 36 | + /// </summary> |
| 37 | + /// <remarks> |
| 38 | + /// Returns <see langword="null"/> if this information was not available via the |
| 39 | + /// <see cref="ChatClientMetadata.ProviderName"/> property for the <see cref="IChatClient"/>. |
| 40 | + /// </remarks> |
| 41 | + public string? ModelProvider { get; set; } |
58 | 42 |
|
59 | 43 | /// <summary> |
60 | 44 | /// Gets or sets usage details for the chat conversation turn (including input and output token counts). |
61 | 45 | /// </summary> |
62 | 46 | /// <remarks> |
63 | 47 | /// Returns <see langword="null"/> if usage details were not available via <see cref="ChatResponse.Usage"/>. |
64 | 48 | /// </remarks> |
65 | | - public UsageDetails? Usage { get; set; } = usage; |
| 49 | + public UsageDetails? Usage { get; set; } |
66 | 50 |
|
67 | 51 | /// <summary> |
68 | 52 | /// Gets or sets the cache key for the cached model response for the chat conversation turn. |
69 | 53 | /// </summary> |
70 | 54 | /// <remarks> |
71 | 55 | /// Returns <see langword="null"/> if response caching was disabled. |
72 | 56 | /// </remarks> |
73 | | - public string? CacheKey { get; set; } = cacheKey; |
| 57 | + public string? CacheKey { get; set; } |
74 | 58 |
|
75 | 59 | /// <summary> |
76 | 60 | /// Gets or sets a value indicating whether the model response was retrieved from the cache. |
77 | 61 | /// </summary> |
78 | 62 | /// <remarks> |
79 | 63 | /// Returns <see langword="null"/> if response caching was disabled. |
80 | 64 | /// </remarks> |
81 | | - public bool? CacheHit { get; set; } = cacheHit; |
| 65 | + public bool? CacheHit { get; set; } |
| 66 | + |
| 67 | + /// <summary> |
| 68 | + /// Initializes a new instance of the <see cref="ChatTurnDetails"/> class. |
| 69 | + /// </summary> |
| 70 | + /// <param name="latency"> |
| 71 | + /// The duration between the time when the request was sent to the LLM and the time when the response was received |
| 72 | + /// for the chat conversation turn. |
| 73 | + /// </param> |
| 74 | + /// <param name="model"> |
| 75 | + /// The model that was used in the creation of the response for the chat conversation turn. Can be |
| 76 | + /// <see langword="null"/> if this information was not available via <see cref="ChatResponse.ModelId"/>. |
| 77 | + /// </param> |
| 78 | + /// <param name="usage"> |
| 79 | + /// Usage details for the chat conversation turn (including input and output token counts). Can be |
| 80 | + /// <see langword="null"/> if usage details were not available via <see cref="ChatResponse.Usage"/>. |
| 81 | + /// </param> |
| 82 | + /// <param name="cacheKey"> |
| 83 | + /// The cache key for the cached model response for the chat conversation turn if response caching was enabled; |
| 84 | + /// <see langword="null"/> otherwise. |
| 85 | + /// </param> |
| 86 | + /// <param name="cacheHit"> |
| 87 | + /// <see langword="true"/> if response caching was enabled and the model response for the chat conversation turn |
| 88 | + /// was retrieved from the cache; <see langword="false"/> if response caching was enabled and the model response |
| 89 | + /// was not retrieved from the cache; <see langword="null"/> if response caching was disabled. |
| 90 | + /// </param> |
| 91 | + public ChatTurnDetails( |
| 92 | + TimeSpan latency, |
| 93 | + string? model = null, |
| 94 | + UsageDetails? usage = null, |
| 95 | + string? cacheKey = null, |
| 96 | + bool? cacheHit = null) |
| 97 | + : this(latency, model, modelProvider: null, usage, cacheKey, cacheHit) |
| 98 | + { |
| 99 | + } |
| 100 | + |
| 101 | + /// <summary> |
| 102 | + /// Initializes a new instance of the <see cref="ChatTurnDetails"/> class. |
| 103 | + /// </summary> |
| 104 | + /// <param name="latency"> |
| 105 | + /// The duration between the time when the request was sent to the LLM and the time when the response was received |
| 106 | + /// for the chat conversation turn. |
| 107 | + /// </param> |
| 108 | + /// <param name="model"> |
| 109 | + /// The model that was used in the creation of the response for the chat conversation turn. Can be |
| 110 | + /// <see langword="null"/> if this information was not available via <see cref="ChatResponse.ModelId"/>. |
| 111 | + /// </param> |
| 112 | + /// <param name="modelProvider"> |
| 113 | + /// The name of the provider for the model identified by <paramref name="model"/>. Can be |
| 114 | + /// <see langword="null"/> if this information was not available via the |
| 115 | + /// <see cref="ChatClientMetadata.ProviderName"/> property for the <see cref="IChatClient"/>. |
| 116 | + /// </param> |
| 117 | + /// <param name="usage"> |
| 118 | + /// Usage details for the chat conversation turn (including input and output token counts). Can be |
| 119 | + /// <see langword="null"/> if usage details were not available via <see cref="ChatResponse.Usage"/>. |
| 120 | + /// </param> |
| 121 | + /// <param name="cacheKey"> |
| 122 | + /// The cache key for the cached model response for the chat conversation turn if response caching was enabled; |
| 123 | + /// <see langword="null"/> otherwise. |
| 124 | + /// </param> |
| 125 | + /// <param name="cacheHit"> |
| 126 | + /// <see langword="true"/> if response caching was enabled and the model response for the chat conversation turn |
| 127 | + /// was retrieved from the cache; <see langword="false"/> if response caching was enabled and the model response |
| 128 | + /// was not retrieved from the cache; <see langword="null"/> if response caching was disabled. |
| 129 | + /// </param> |
| 130 | + [JsonConstructor] |
| 131 | + public ChatTurnDetails( |
| 132 | + TimeSpan latency, |
| 133 | + string? model, |
| 134 | + string? modelProvider, |
| 135 | + UsageDetails? usage = null, |
| 136 | + string? cacheKey = null, |
| 137 | + bool? cacheHit = null) |
| 138 | + { |
| 139 | + Latency = latency; |
| 140 | + Model = model; |
| 141 | + ModelProvider = modelProvider; |
| 142 | + Usage = usage; |
| 143 | + CacheKey = cacheKey; |
| 144 | + CacheHit = cacheHit; |
| 145 | + } |
82 | 146 | } |
0 commit comments