From 755b1741907b687d3fbce28af82855abe318bc5a Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Fri, 23 Aug 2024 04:45:27 +0000 Subject: [PATCH] CodeGen from PR 3495 in test-repo-billy/azure-rest-api-specs Merge 1fb36b1fc6fe19369e265c77a5ef2d9cb280287e into c221294c903b4bcbe7b1ee4bce901322a7056943 --- .../Azure.AI.ContentSafety.netstandard2.0.cs | 91 ++++- .../src/Generated/AnalyzeImageOptions.cs | 6 +- .../src/Generated/AnalyzeTextOptions.cs | 6 +- .../src/Generated/ContentSafetyClient.cs | 310 ++++++++++++++-- .../Generated/ContentSafetyClientOptions.cs | 5 +- .../Generated/ContentSafetyModelFactory.cs | 92 +++-- ...tProtectedMaterialOptions.Serialization.cs | 135 +++++++ .../DetectTextProtectedMaterialOptions.cs | 75 ++++ ...xtProtectedMaterialResult.Serialization.cs | 135 +++++++ .../DetectTextProtectedMaterialResult.cs | 75 ++++ .../src/Generated/Docs/BlocklistClient.xml | 10 + .../Generated/Docs/ContentSafetyClient.xml | 340 ++++++++++++++--- ...ntInjectionAnalysisResult.Serialization.cs | 135 +++++++ .../DocumentInjectionAnalysisResult.cs | 72 ++++ .../src/Generated/ImageCategory.cs | 10 +- .../ShieldPromptOptions.Serialization.cs | 163 ++++++++ .../src/Generated/ShieldPromptOptions.cs | 70 ++++ .../ShieldPromptResult.Serialization.cs | 167 +++++++++ .../src/Generated/ShieldPromptResult.cs | 70 ++++ .../TextBlocklistItem.Serialization.cs | 17 +- .../src/Generated/TextBlocklistItem.cs | 12 +- .../src/Generated/TextCategory.cs | 10 +- ...tedMaterialAnalysisResult.Serialization.cs | 135 +++++++ .../TextProtectedMaterialAnalysisResult.cs | 72 ++++ ...ptInjectionAnalysisResult.Serialization.cs | 135 +++++++ .../UserPromptInjectionAnalysisResult.cs | 72 ++++ .../Samples/Samples_BlocklistClient.cs | 10 + .../Samples/Samples_ContentSafetyClient.cs | 348 +++++++++++++++--- .../Azure.AI.ContentSafety/tsp-location.yaml | 7 +- 29 files changed, 2601 insertions(+), 184 deletions(-) create mode 100644 sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DetectTextProtectedMaterialOptions.Serialization.cs create mode 100644 sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DetectTextProtectedMaterialOptions.cs create mode 100644 sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DetectTextProtectedMaterialResult.Serialization.cs create mode 100644 sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DetectTextProtectedMaterialResult.cs create mode 100644 sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DocumentInjectionAnalysisResult.Serialization.cs create mode 100644 sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DocumentInjectionAnalysisResult.cs create mode 100644 sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ShieldPromptOptions.Serialization.cs create mode 100644 sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ShieldPromptOptions.cs create mode 100644 sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ShieldPromptResult.Serialization.cs create mode 100644 sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ShieldPromptResult.cs create mode 100644 sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextProtectedMaterialAnalysisResult.Serialization.cs create mode 100644 sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextProtectedMaterialAnalysisResult.cs create mode 100644 sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/UserPromptInjectionAnalysisResult.Serialization.cs create mode 100644 sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/UserPromptInjectionAnalysisResult.cs diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/api/Azure.AI.ContentSafety.netstandard2.0.cs b/sdk/contentsafety/Azure.AI.ContentSafety/api/Azure.AI.ContentSafety.netstandard2.0.cs index 66828782b620..7445292add84 100644 --- a/sdk/contentsafety/Azure.AI.ContentSafety/api/Azure.AI.ContentSafety.netstandard2.0.cs +++ b/sdk/contentsafety/Azure.AI.ContentSafety/api/Azure.AI.ContentSafety.netstandard2.0.cs @@ -161,13 +161,22 @@ public ContentSafetyClient(System.Uri endpoint, Azure.Core.TokenCredential crede public virtual System.Threading.Tasks.Task> AnalyzeTextAsync(Azure.AI.ContentSafety.AnalyzeTextOptions options, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task AnalyzeTextAsync(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } public virtual System.Threading.Tasks.Task> AnalyzeTextAsync(string text, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response DetectTextProtectedMaterial(Azure.AI.ContentSafety.DetectTextProtectedMaterialOptions options, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response DetectTextProtectedMaterial(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task> DetectTextProtectedMaterialAsync(Azure.AI.ContentSafety.DetectTextProtectedMaterialOptions options, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task DetectTextProtectedMaterialAsync(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response ShieldPrompt(Azure.AI.ContentSafety.ShieldPromptOptions options, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response ShieldPrompt(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task> ShieldPromptAsync(Azure.AI.ContentSafety.ShieldPromptOptions options, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task ShieldPromptAsync(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } } public partial class ContentSafetyClientOptions : Azure.Core.ClientOptions { - public ContentSafetyClientOptions(Azure.AI.ContentSafety.ContentSafetyClientOptions.ServiceVersion version = Azure.AI.ContentSafety.ContentSafetyClientOptions.ServiceVersion.V2023_10_01) { } + public ContentSafetyClientOptions(Azure.AI.ContentSafety.ContentSafetyClientOptions.ServiceVersion version = Azure.AI.ContentSafety.ContentSafetyClientOptions.ServiceVersion.V2024_09_01) { } public enum ServiceVersion { V2023_10_01 = 1, + V2024_09_01 = 2, } } public partial class ContentSafetyImageData : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel @@ -188,11 +197,46 @@ public static partial class ContentSafetyModelFactory public static Azure.AI.ContentSafety.AnalyzeImageResult AnalyzeImageResult(System.Collections.Generic.IEnumerable categoriesAnalysis = null) { throw null; } public static Azure.AI.ContentSafety.AnalyzeTextOptions AnalyzeTextOptions(string text = null, System.Collections.Generic.IEnumerable categories = null, System.Collections.Generic.IEnumerable blocklistNames = null, bool? haltOnBlocklistHit = default(bool?), Azure.AI.ContentSafety.AnalyzeTextOutputType? outputType = default(Azure.AI.ContentSafety.AnalyzeTextOutputType?)) { throw null; } public static Azure.AI.ContentSafety.AnalyzeTextResult AnalyzeTextResult(System.Collections.Generic.IEnumerable blocklistsMatch = null, System.Collections.Generic.IEnumerable categoriesAnalysis = null) { throw null; } + public static Azure.AI.ContentSafety.DetectTextProtectedMaterialResult DetectTextProtectedMaterialResult(Azure.AI.ContentSafety.TextProtectedMaterialAnalysisResult protectedMaterialAnalysis = null) { throw null; } + public static Azure.AI.ContentSafety.DocumentInjectionAnalysisResult DocumentInjectionAnalysisResult(bool attackDetected = false) { throw null; } public static Azure.AI.ContentSafety.ImageCategoriesAnalysis ImageCategoriesAnalysis(Azure.AI.ContentSafety.ImageCategory category = default(Azure.AI.ContentSafety.ImageCategory), int? severity = default(int?)) { throw null; } + public static Azure.AI.ContentSafety.ShieldPromptResult ShieldPromptResult(Azure.AI.ContentSafety.UserPromptInjectionAnalysisResult userPromptAnalysis = null, System.Collections.Generic.IEnumerable documentsAnalysis = null) { throw null; } public static Azure.AI.ContentSafety.TextBlocklist TextBlocklist(string name = null, string description = null) { throw null; } - public static Azure.AI.ContentSafety.TextBlocklistItem TextBlocklistItem(string blocklistItemId = null, string description = null, string text = null) { throw null; } + public static Azure.AI.ContentSafety.TextBlocklistItem TextBlocklistItem(string blocklistItemId = null, string description = null, string text = null, bool? isRegex = default(bool?)) { throw null; } public static Azure.AI.ContentSafety.TextBlocklistMatch TextBlocklistMatch(string blocklistName = null, string blocklistItemId = null, string blocklistItemText = null) { throw null; } public static Azure.AI.ContentSafety.TextCategoriesAnalysis TextCategoriesAnalysis(Azure.AI.ContentSafety.TextCategory category = default(Azure.AI.ContentSafety.TextCategory), int? severity = default(int?)) { throw null; } + public static Azure.AI.ContentSafety.TextProtectedMaterialAnalysisResult TextProtectedMaterialAnalysisResult(bool detected = false) { throw null; } + public static Azure.AI.ContentSafety.UserPromptInjectionAnalysisResult UserPromptInjectionAnalysisResult(bool attackDetected = false) { throw null; } + } + public partial class DetectTextProtectedMaterialOptions : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public DetectTextProtectedMaterialOptions(string text) { } + public string Text { get { throw null; } } + Azure.AI.ContentSafety.DetectTextProtectedMaterialOptions System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentSafety.DetectTextProtectedMaterialOptions System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DetectTextProtectedMaterialResult : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DetectTextProtectedMaterialResult() { } + public Azure.AI.ContentSafety.TextProtectedMaterialAnalysisResult ProtectedMaterialAnalysis { get { throw null; } } + Azure.AI.ContentSafety.DetectTextProtectedMaterialResult System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentSafety.DetectTextProtectedMaterialResult System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class DocumentInjectionAnalysisResult : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal DocumentInjectionAnalysisResult() { } + public bool AttackDetected { get { throw null; } } + Azure.AI.ContentSafety.DocumentInjectionAnalysisResult System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentSafety.DocumentInjectionAnalysisResult System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } public partial class ImageCategoriesAnalysis : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { @@ -235,6 +279,28 @@ public RemoveTextBlocklistItemsOptions(System.Collections.Generic.IEnumerable.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + public partial class ShieldPromptOptions : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public ShieldPromptOptions() { } + public System.Collections.Generic.IList Documents { get { throw null; } } + public string UserPrompt { get { throw null; } set { } } + Azure.AI.ContentSafety.ShieldPromptOptions System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentSafety.ShieldPromptOptions System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class ShieldPromptResult : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal ShieldPromptResult() { } + public System.Collections.Generic.IReadOnlyList DocumentsAnalysis { get { throw null; } } + public Azure.AI.ContentSafety.UserPromptInjectionAnalysisResult UserPromptAnalysis { get { throw null; } } + Azure.AI.ContentSafety.ShieldPromptResult System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentSafety.ShieldPromptResult System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } public partial class TextBlocklist : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal TextBlocklist() { } @@ -251,6 +317,7 @@ public partial class TextBlocklistItem : System.ClientModel.Primitives.IJsonMode public TextBlocklistItem(string text) { } public string BlocklistItemId { get { throw null; } } public string Description { get { throw null; } set { } } + public bool? IsRegex { get { throw null; } set { } } public string Text { get { throw null; } set { } } Azure.AI.ContentSafety.TextBlocklistItem System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -301,6 +368,26 @@ internal TextCategoriesAnalysis() { } public static bool operator !=(Azure.AI.ContentSafety.TextCategory left, Azure.AI.ContentSafety.TextCategory right) { throw null; } public override string ToString() { throw null; } } + public partial class TextProtectedMaterialAnalysisResult : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal TextProtectedMaterialAnalysisResult() { } + public bool Detected { get { throw null; } } + Azure.AI.ContentSafety.TextProtectedMaterialAnalysisResult System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentSafety.TextProtectedMaterialAnalysisResult System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class UserPromptInjectionAnalysisResult : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal UserPromptInjectionAnalysisResult() { } + public bool AttackDetected { get { throw null; } } + Azure.AI.ContentSafety.UserPromptInjectionAnalysisResult System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.ContentSafety.UserPromptInjectionAnalysisResult System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } } namespace Microsoft.Extensions.Azure { diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/AnalyzeImageOptions.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/AnalyzeImageOptions.cs index bf507f5616b3..f5e0a4a860eb 100644 --- a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/AnalyzeImageOptions.cs +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/AnalyzeImageOptions.cs @@ -46,7 +46,7 @@ public partial class AnalyzeImageOptions private IDictionary _serializedAdditionalRawData; /// Initializes a new instance of . - /// The image needs to be analyzed. + /// The image to be analyzed. /// is null. public AnalyzeImageOptions(ContentSafetyImageData image) { @@ -57,7 +57,7 @@ public AnalyzeImageOptions(ContentSafetyImageData image) } /// Initializes a new instance of . - /// The image needs to be analyzed. + /// The image to be analyzed. /// The categories will be analyzed. If they are not assigned, a default set of analysis results for the categories will be returned. /// This refers to the type of image analysis output. If no value is assigned, the default value will be "FourSeverityLevels". /// Keeps track of any properties unknown to the library. @@ -74,7 +74,7 @@ internal AnalyzeImageOptions() { } - /// The image needs to be analyzed. + /// The image to be analyzed. public ContentSafetyImageData Image { get; } /// The categories will be analyzed. If they are not assigned, a default set of analysis results for the categories will be returned. public IList Categories { get; } diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/AnalyzeTextOptions.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/AnalyzeTextOptions.cs index 2be40626cdae..b0d19ae58434 100644 --- a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/AnalyzeTextOptions.cs +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/AnalyzeTextOptions.cs @@ -46,7 +46,7 @@ public partial class AnalyzeTextOptions private IDictionary _serializedAdditionalRawData; /// Initializes a new instance of . - /// The text needs to be analyzed. We support a maximum of 10k Unicode characters (Unicode code points) in the text of one request. + /// The text to be analyzed. We support a maximum of 10k Unicode characters (Unicode code points) in the text of one request. /// is null. public AnalyzeTextOptions(string text) { @@ -58,7 +58,7 @@ public AnalyzeTextOptions(string text) } /// Initializes a new instance of . - /// The text needs to be analyzed. We support a maximum of 10k Unicode characters (Unicode code points) in the text of one request. + /// The text to be analyzed. We support a maximum of 10k Unicode characters (Unicode code points) in the text of one request. /// The categories will be analyzed. If they are not assigned, a default set of analysis results for the categories will be returned. /// The names of blocklists. /// When set to true, further analyses of harmful content will not be performed in cases where blocklists are hit. When set to false, all analyses of harmful content will be performed, whether or not blocklists are hit. @@ -79,7 +79,7 @@ internal AnalyzeTextOptions() { } - /// The text needs to be analyzed. We support a maximum of 10k Unicode characters (Unicode code points) in the text of one request. + /// The text to be analyzed. We support a maximum of 10k Unicode characters (Unicode code points) in the text of one request. public string Text { get; } /// The categories will be analyzed. If they are not assigned, a default set of analysis results for the categories will be returned. public IList Categories { get; } diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ContentSafetyClient.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ContentSafetyClient.cs index abe04eda4f70..aca8c22253e0 100644 --- a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ContentSafetyClient.cs +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ContentSafetyClient.cs @@ -100,6 +100,116 @@ public ContentSafetyClient(Uri endpoint, TokenCredential credential, ContentSafe _apiVersion = options.Version; } + /// Analyze Image. + /// The image analysis request. + /// The cancellation token to use. + /// is null. + /// A synchronous API for the analysis of potentially harmful image content. Currently, it supports four categories: Hate, SelfHarm, Sexual, and Violence. + /// + public virtual async Task> AnalyzeImageAsync(AnalyzeImageOptions options, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(options, nameof(options)); + + using RequestContent content = options.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AnalyzeImageAsync(content, context).ConfigureAwait(false); + return Response.FromValue(AnalyzeImageResult.FromResponse(response), response); + } + + /// Analyze Image. + /// The image analysis request. + /// The cancellation token to use. + /// is null. + /// A synchronous API for the analysis of potentially harmful image content. Currently, it supports four categories: Hate, SelfHarm, Sexual, and Violence. + /// + public virtual Response AnalyzeImage(AnalyzeImageOptions options, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(options, nameof(options)); + + using RequestContent content = options.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = AnalyzeImage(content, context); + return Response.FromValue(AnalyzeImageResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Analyze Image + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task AnalyzeImageAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("ContentSafetyClient.AnalyzeImage"); + scope.Start(); + try + { + using HttpMessage message = CreateAnalyzeImageRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Analyze Image + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response AnalyzeImage(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("ContentSafetyClient.AnalyzeImage"); + scope.Start(); + try + { + using HttpMessage message = CreateAnalyzeImageRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + /// Analyze Text. /// The text analysis request. /// The cancellation token to use. @@ -210,40 +320,40 @@ public virtual Response AnalyzeText(RequestContent content, RequestContext conte } } - /// Analyze Image. - /// The image analysis request. + /// Detect Protected Material for Text. + /// The request body to be detected, which may contain protected material. /// The cancellation token to use. /// is null. - /// A synchronous API for the analysis of potentially harmful image content. Currently, it supports four categories: Hate, SelfHarm, Sexual, and Violence. - /// - public virtual async Task> AnalyzeImageAsync(AnalyzeImageOptions options, CancellationToken cancellationToken = default) + /// A synchronous API for detecting protected material in the given text. + /// + public virtual async Task> DetectTextProtectedMaterialAsync(DetectTextProtectedMaterialOptions options, CancellationToken cancellationToken = default) { Argument.AssertNotNull(options, nameof(options)); using RequestContent content = options.ToRequestContent(); RequestContext context = FromCancellationToken(cancellationToken); - Response response = await AnalyzeImageAsync(content, context).ConfigureAwait(false); - return Response.FromValue(AnalyzeImageResult.FromResponse(response), response); + Response response = await DetectTextProtectedMaterialAsync(content, context).ConfigureAwait(false); + return Response.FromValue(DetectTextProtectedMaterialResult.FromResponse(response), response); } - /// Analyze Image. - /// The image analysis request. + /// Detect Protected Material for Text. + /// The request body to be detected, which may contain protected material. /// The cancellation token to use. /// is null. - /// A synchronous API for the analysis of potentially harmful image content. Currently, it supports four categories: Hate, SelfHarm, Sexual, and Violence. - /// - public virtual Response AnalyzeImage(AnalyzeImageOptions options, CancellationToken cancellationToken = default) + /// A synchronous API for detecting protected material in the given text. + /// + public virtual Response DetectTextProtectedMaterial(DetectTextProtectedMaterialOptions options, CancellationToken cancellationToken = default) { Argument.AssertNotNull(options, nameof(options)); using RequestContent content = options.ToRequestContent(); RequestContext context = FromCancellationToken(cancellationToken); - Response response = AnalyzeImage(content, context); - return Response.FromValue(AnalyzeImageResult.FromResponse(response), response); + Response response = DetectTextProtectedMaterial(content, context); + return Response.FromValue(DetectTextProtectedMaterialResult.FromResponse(response), response); } /// - /// [Protocol Method] Analyze Image + /// [Protocol Method] Detect Protected Material for Text /// /// /// @@ -252,7 +362,7 @@ public virtual Response AnalyzeImage(AnalyzeImageOptions opt /// /// /// - /// Please try the simpler convenience overload with strongly typed models first. + /// Please try the simpler convenience overload with strongly typed models first. /// /// /// @@ -262,16 +372,16 @@ public virtual Response AnalyzeImage(AnalyzeImageOptions opt /// is null. /// Service returned a non-success status code. /// The response returned from the service. - /// - public virtual async Task AnalyzeImageAsync(RequestContent content, RequestContext context = null) + /// + public virtual async Task DetectTextProtectedMaterialAsync(RequestContent content, RequestContext context = null) { Argument.AssertNotNull(content, nameof(content)); - using var scope = ClientDiagnostics.CreateScope("ContentSafetyClient.AnalyzeImage"); + using var scope = ClientDiagnostics.CreateScope("ContentSafetyClient.DetectTextProtectedMaterial"); scope.Start(); try { - using HttpMessage message = CreateAnalyzeImageRequest(content, context); + using HttpMessage message = CreateDetectTextProtectedMaterialRequest(content, context); return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); } catch (Exception e) @@ -282,7 +392,7 @@ public virtual async Task AnalyzeImageAsync(RequestContent content, Re } /// - /// [Protocol Method] Analyze Image + /// [Protocol Method] Detect Protected Material for Text /// /// /// @@ -291,7 +401,7 @@ public virtual async Task AnalyzeImageAsync(RequestContent content, Re /// /// /// - /// Please try the simpler convenience overload with strongly typed models first. + /// Please try the simpler convenience overload with strongly typed models first. /// /// /// @@ -301,16 +411,16 @@ public virtual async Task AnalyzeImageAsync(RequestContent content, Re /// is null. /// Service returned a non-success status code. /// The response returned from the service. - /// - public virtual Response AnalyzeImage(RequestContent content, RequestContext context = null) + /// + public virtual Response DetectTextProtectedMaterial(RequestContent content, RequestContext context = null) { Argument.AssertNotNull(content, nameof(content)); - using var scope = ClientDiagnostics.CreateScope("ContentSafetyClient.AnalyzeImage"); + using var scope = ClientDiagnostics.CreateScope("ContentSafetyClient.DetectTextProtectedMaterial"); scope.Start(); try { - using HttpMessage message = CreateAnalyzeImageRequest(content, context); + using HttpMessage message = CreateDetectTextProtectedMaterialRequest(content, context); return _pipeline.ProcessMessage(message, context); } catch (Exception e) @@ -320,6 +430,133 @@ public virtual Response AnalyzeImage(RequestContent content, RequestContext cont } } + /// Shield Prompt. + /// The request body to be detected, which may contain direct or indirect injection attacks. + /// The cancellation token to use. + /// is null. + /// A synchronous API for shielding prompt from direct and indirect injection attacks. + /// + public virtual async Task> ShieldPromptAsync(ShieldPromptOptions options, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(options, nameof(options)); + + using RequestContent content = options.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await ShieldPromptAsync(content, context).ConfigureAwait(false); + return Response.FromValue(ShieldPromptResult.FromResponse(response), response); + } + + /// Shield Prompt. + /// The request body to be detected, which may contain direct or indirect injection attacks. + /// The cancellation token to use. + /// is null. + /// A synchronous API for shielding prompt from direct and indirect injection attacks. + /// + public virtual Response ShieldPrompt(ShieldPromptOptions options, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(options, nameof(options)); + + using RequestContent content = options.ToRequestContent(); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = ShieldPrompt(content, context); + return Response.FromValue(ShieldPromptResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Shield Prompt + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task ShieldPromptAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("ContentSafetyClient.ShieldPrompt"); + scope.Start(); + try + { + using HttpMessage message = CreateShieldPromptRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Shield Prompt + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response ShieldPrompt(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("ContentSafetyClient.ShieldPrompt"); + scope.Start(); + try + { + using HttpMessage message = CreateShieldPromptRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + internal HttpMessage CreateAnalyzeImageRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/contentsafety", false); + uri.AppendPath("/image:analyze", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + internal HttpMessage CreateAnalyzeTextRequest(RequestContent content, RequestContext context) { var message = _pipeline.CreateMessage(context, ResponseClassifier200); @@ -337,7 +574,7 @@ internal HttpMessage CreateAnalyzeTextRequest(RequestContent content, RequestCon return message; } - internal HttpMessage CreateAnalyzeImageRequest(RequestContent content, RequestContext context) + internal HttpMessage CreateDetectTextProtectedMaterialRequest(RequestContent content, RequestContext context) { var message = _pipeline.CreateMessage(context, ResponseClassifier200); var request = message.Request; @@ -345,7 +582,24 @@ internal HttpMessage CreateAnalyzeImageRequest(RequestContent content, RequestCo var uri = new RawRequestUriBuilder(); uri.Reset(_endpoint); uri.AppendRaw("/contentsafety", false); - uri.AppendPath("/image:analyze", false); + uri.AppendPath("/text:detectProtectedMaterial", false); + uri.AppendQuery("api-version", _apiVersion, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateShieldPromptRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/contentsafety", false); + uri.AppendPath("/text:shieldPrompt", false); uri.AppendQuery("api-version", _apiVersion, true); request.Uri = uri; request.Headers.Add("Accept", "application/json"); diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ContentSafetyClientOptions.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ContentSafetyClientOptions.cs index a827f8bbab37..e81580a74055 100644 --- a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ContentSafetyClientOptions.cs +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ContentSafetyClientOptions.cs @@ -13,13 +13,15 @@ namespace Azure.AI.ContentSafety /// Client options for Azure.AI.ContentSafety library clients. public partial class ContentSafetyClientOptions : ClientOptions { - private const ServiceVersion LatestVersion = ServiceVersion.V2023_10_01; + private const ServiceVersion LatestVersion = ServiceVersion.V2024_09_01; /// The version of the service to use. public enum ServiceVersion { /// Service version "2023-10-01". V2023_10_01 = 1, + /// Service version "2024-09-01". + V2024_09_01 = 2, } internal string Version { get; } @@ -30,6 +32,7 @@ public ContentSafetyClientOptions(ServiceVersion version = LatestVersion) Version = version switch { ServiceVersion.V2023_10_01 => "2023-10-01", + ServiceVersion.V2024_09_01 => "2024-09-01", _ => throw new NotSupportedException() }; } diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ContentSafetyModelFactory.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ContentSafetyModelFactory.cs index 140aed1f91c0..6af24f1d13fa 100644 --- a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ContentSafetyModelFactory.cs +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ContentSafetyModelFactory.cs @@ -14,8 +14,39 @@ namespace Azure.AI.ContentSafety /// Model factory for models. public static partial class ContentSafetyModelFactory { + /// Initializes a new instance of . + /// The image to be analyzed. + /// The categories will be analyzed. If they are not assigned, a default set of analysis results for the categories will be returned. + /// This refers to the type of image analysis output. If no value is assigned, the default value will be "FourSeverityLevels". + /// A new instance for mocking. + public static AnalyzeImageOptions AnalyzeImageOptions(ContentSafetyImageData image = null, IEnumerable categories = null, AnalyzeImageOutputType? outputType = null) + { + categories ??= new List(); + + return new AnalyzeImageOptions(image, categories?.ToList(), outputType, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Analysis result for categories. + /// A new instance for mocking. + public static AnalyzeImageResult AnalyzeImageResult(IEnumerable categoriesAnalysis = null) + { + categoriesAnalysis ??= new List(); + + return new AnalyzeImageResult(categoriesAnalysis?.ToList(), serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// The image analysis category. + /// The value increases with the severity of the input content. The value of this field is determined by the output type specified in the request. The output type could be ‘FourSeverityLevels’, and the output value can be 0, 2, 4, 6. + /// A new instance for mocking. + public static ImageCategoriesAnalysis ImageCategoriesAnalysis(ImageCategory category = default, int? severity = null) + { + return new ImageCategoriesAnalysis(category, severity, serializedAdditionalRawData: null); + } + /// Initializes a new instance of . - /// The text needs to be analyzed. We support a maximum of 10k Unicode characters (Unicode code points) in the text of one request. + /// The text to be analyzed. We support a maximum of 10k Unicode characters (Unicode code points) in the text of one request. /// The categories will be analyzed. If they are not assigned, a default set of analysis results for the categories will be returned. /// The names of blocklists. /// When set to true, further analyses of harmful content will not be performed in cases where blocklists are hit. When set to false, all analyses of harmful content will be performed, whether or not blocklists are hit. @@ -66,35 +97,47 @@ public static TextCategoriesAnalysis TextCategoriesAnalysis(TextCategory categor return new TextCategoriesAnalysis(category, severity, serializedAdditionalRawData: null); } - /// Initializes a new instance of . - /// The image needs to be analyzed. - /// The categories will be analyzed. If they are not assigned, a default set of analysis results for the categories will be returned. - /// This refers to the type of image analysis output. If no value is assigned, the default value will be "FourSeverityLevels". - /// A new instance for mocking. - public static AnalyzeImageOptions AnalyzeImageOptions(ContentSafetyImageData image = null, IEnumerable categories = null, AnalyzeImageOutputType? outputType = null) + /// Initializes a new instance of . + /// Analysis result for the given text. + /// A new instance for mocking. + public static DetectTextProtectedMaterialResult DetectTextProtectedMaterialResult(TextProtectedMaterialAnalysisResult protectedMaterialAnalysis = null) { - categories ??= new List(); + return new DetectTextProtectedMaterialResult(protectedMaterialAnalysis, serializedAdditionalRawData: null); + } - return new AnalyzeImageOptions(image, categories?.ToList(), outputType, serializedAdditionalRawData: null); + /// Initializes a new instance of . + /// Whether potential protected material is detected or not. + /// A new instance for mocking. + public static TextProtectedMaterialAnalysisResult TextProtectedMaterialAnalysisResult(bool detected = default) + { + return new TextProtectedMaterialAnalysisResult(detected, serializedAdditionalRawData: null); } - /// Initializes a new instance of . - /// Analysis result for categories. - /// A new instance for mocking. - public static AnalyzeImageResult AnalyzeImageResult(IEnumerable categoriesAnalysis = null) + /// Initializes a new instance of . + /// Direct injection attacks analysis result for the given user prompt. + /// Direct and indirect injection attacks analysis result for the given documents. + /// A new instance for mocking. + public static ShieldPromptResult ShieldPromptResult(UserPromptInjectionAnalysisResult userPromptAnalysis = null, IEnumerable documentsAnalysis = null) { - categoriesAnalysis ??= new List(); + documentsAnalysis ??= new List(); - return new AnalyzeImageResult(categoriesAnalysis?.ToList(), serializedAdditionalRawData: null); + return new ShieldPromptResult(userPromptAnalysis, documentsAnalysis?.ToList(), serializedAdditionalRawData: null); } - /// Initializes a new instance of . - /// The image analysis category. - /// The value increases with the severity of the input content. The value of this field is determined by the output type specified in the request. The output type could be ‘FourSeverityLevels’, and the output value can be 0, 2, 4, 6. - /// A new instance for mocking. - public static ImageCategoriesAnalysis ImageCategoriesAnalysis(ImageCategory category = default, int? severity = null) + /// Initializes a new instance of . + /// Whether a potential injection attack is detected or not. + /// A new instance for mocking. + public static UserPromptInjectionAnalysisResult UserPromptInjectionAnalysisResult(bool attackDetected = default) { - return new ImageCategoriesAnalysis(category, severity, serializedAdditionalRawData: null); + return new UserPromptInjectionAnalysisResult(attackDetected, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Whether a potential injection attack is detected or not. + /// A new instance for mocking. + public static DocumentInjectionAnalysisResult DocumentInjectionAnalysisResult(bool attackDetected = default) + { + return new DocumentInjectionAnalysisResult(attackDetected, serializedAdditionalRawData: null); } /// Initializes a new instance of . @@ -109,11 +152,12 @@ public static TextBlocklist TextBlocklist(string name = null, string description /// Initializes a new instance of . /// The service will generate a BlocklistItemId, which will be a UUID. /// BlocklistItem description. - /// BlocklistItem content. + /// BlocklistItem content. The length is counted using Unicode code point. + /// An optional properties indicating whether this item is to be matched as a regular expression. /// A new instance for mocking. - public static TextBlocklistItem TextBlocklistItem(string blocklistItemId = null, string description = null, string text = null) + public static TextBlocklistItem TextBlocklistItem(string blocklistItemId = null, string description = null, string text = null, bool? isRegex = null) { - return new TextBlocklistItem(blocklistItemId, description, text, serializedAdditionalRawData: null); + return new TextBlocklistItem(blocklistItemId, description, text, isRegex, serializedAdditionalRawData: null); } /// Initializes a new instance of . diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DetectTextProtectedMaterialOptions.Serialization.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DetectTextProtectedMaterialOptions.Serialization.cs new file mode 100644 index 000000000000..7e3516362a50 --- /dev/null +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DetectTextProtectedMaterialOptions.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.ContentSafety +{ + public partial class DetectTextProtectedMaterialOptions : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DetectTextProtectedMaterialOptions)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("text"u8); + writer.WriteStringValue(Text); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + DetectTextProtectedMaterialOptions IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DetectTextProtectedMaterialOptions)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDetectTextProtectedMaterialOptions(document.RootElement, options); + } + + internal static DetectTextProtectedMaterialOptions DeserializeDetectTextProtectedMaterialOptions(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string text = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("text"u8)) + { + text = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new DetectTextProtectedMaterialOptions(text, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(DetectTextProtectedMaterialOptions)} does not support writing '{options.Format}' format."); + } + } + + DetectTextProtectedMaterialOptions IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeDetectTextProtectedMaterialOptions(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DetectTextProtectedMaterialOptions)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static DetectTextProtectedMaterialOptions FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeDetectTextProtectedMaterialOptions(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DetectTextProtectedMaterialOptions.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DetectTextProtectedMaterialOptions.cs new file mode 100644 index 000000000000..fee6cd9bc51b --- /dev/null +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DetectTextProtectedMaterialOptions.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentSafety +{ + /// The request of detecting potential protected material present in the given text. + public partial class DetectTextProtectedMaterialOptions + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The text to be analyzed, which may contain protected material. The characters will be counted in Unicode code points. + /// is null. + public DetectTextProtectedMaterialOptions(string text) + { + Argument.AssertNotNull(text, nameof(text)); + + Text = text; + } + + /// Initializes a new instance of . + /// The text to be analyzed, which may contain protected material. The characters will be counted in Unicode code points. + /// Keeps track of any properties unknown to the library. + internal DetectTextProtectedMaterialOptions(string text, IDictionary serializedAdditionalRawData) + { + Text = text; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal DetectTextProtectedMaterialOptions() + { + } + + /// The text to be analyzed, which may contain protected material. The characters will be counted in Unicode code points. + public string Text { get; } + } +} diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DetectTextProtectedMaterialResult.Serialization.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DetectTextProtectedMaterialResult.Serialization.cs new file mode 100644 index 000000000000..09591800e1a5 --- /dev/null +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DetectTextProtectedMaterialResult.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.ContentSafety +{ + public partial class DetectTextProtectedMaterialResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DetectTextProtectedMaterialResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("protectedMaterialAnalysis"u8); + writer.WriteObjectValue(ProtectedMaterialAnalysis, options); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + DetectTextProtectedMaterialResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DetectTextProtectedMaterialResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDetectTextProtectedMaterialResult(document.RootElement, options); + } + + internal static DetectTextProtectedMaterialResult DeserializeDetectTextProtectedMaterialResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + TextProtectedMaterialAnalysisResult protectedMaterialAnalysis = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("protectedMaterialAnalysis"u8)) + { + protectedMaterialAnalysis = TextProtectedMaterialAnalysisResult.DeserializeTextProtectedMaterialAnalysisResult(property.Value, options); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new DetectTextProtectedMaterialResult(protectedMaterialAnalysis, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(DetectTextProtectedMaterialResult)} does not support writing '{options.Format}' format."); + } + } + + DetectTextProtectedMaterialResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeDetectTextProtectedMaterialResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DetectTextProtectedMaterialResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static DetectTextProtectedMaterialResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeDetectTextProtectedMaterialResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DetectTextProtectedMaterialResult.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DetectTextProtectedMaterialResult.cs new file mode 100644 index 000000000000..0d1c5aec2272 --- /dev/null +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DetectTextProtectedMaterialResult.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentSafety +{ + /// The combined detection results of potential protected material. + public partial class DetectTextProtectedMaterialResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Analysis result for the given text. + /// is null. + internal DetectTextProtectedMaterialResult(TextProtectedMaterialAnalysisResult protectedMaterialAnalysis) + { + Argument.AssertNotNull(protectedMaterialAnalysis, nameof(protectedMaterialAnalysis)); + + ProtectedMaterialAnalysis = protectedMaterialAnalysis; + } + + /// Initializes a new instance of . + /// Analysis result for the given text. + /// Keeps track of any properties unknown to the library. + internal DetectTextProtectedMaterialResult(TextProtectedMaterialAnalysisResult protectedMaterialAnalysis, IDictionary serializedAdditionalRawData) + { + ProtectedMaterialAnalysis = protectedMaterialAnalysis; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal DetectTextProtectedMaterialResult() + { + } + + /// Analysis result for the given text. + public TextProtectedMaterialAnalysisResult ProtectedMaterialAnalysis { get; } + } +} diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/Docs/BlocklistClient.xml b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/Docs/BlocklistClient.xml index 2867b96ccc3b..640baebcc0bd 100644 --- a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/Docs/BlocklistClient.xml +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/Docs/BlocklistClient.xml @@ -26,6 +26,7 @@ AddOrUpdateTextBlocklistItemsOptions options = new AddOrUpdateTextBlocklistItems new TextBlocklistItem("") { Description = "", + IsRegex = true, } }); Response response = await client.AddOrUpdateBlocklistItemsAsync("", options); @@ -56,6 +57,7 @@ AddOrUpdateTextBlocklistItemsOptions options = new AddOrUpdateTextBlocklistItems new TextBlocklistItem("") { Description = "", + IsRegex = true, } }); Response response = client.AddOrUpdateBlocklistItems("", options); @@ -99,6 +101,7 @@ using RequestContent content = RequestContent.Create(new { description = "", text = "", + isRegex = true, } }, }); @@ -108,6 +111,7 @@ JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; Console.WriteLine(result.GetProperty("blocklistItems")[0].GetProperty("blocklistItemId").ToString()); Console.WriteLine(result.GetProperty("blocklistItems")[0].GetProperty("description").ToString()); Console.WriteLine(result.GetProperty("blocklistItems")[0].GetProperty("text").ToString()); +Console.WriteLine(result.GetProperty("blocklistItems")[0].GetProperty("isRegex").ToString()); ]]> @@ -148,6 +152,7 @@ using RequestContent content = RequestContent.Create(new { description = "", text = "", + isRegex = true, } }, }); @@ -157,6 +162,7 @@ JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; Console.WriteLine(result.GetProperty("blocklistItems")[0].GetProperty("blocklistItemId").ToString()); Console.WriteLine(result.GetProperty("blocklistItems")[0].GetProperty("description").ToString()); Console.WriteLine(result.GetProperty("blocklistItems")[0].GetProperty("text").ToString()); +Console.WriteLine(result.GetProperty("blocklistItems")[0].GetProperty("isRegex").ToString()); ]]> @@ -429,6 +435,7 @@ JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; Console.WriteLine(result.GetProperty("blocklistItemId").ToString()); Console.WriteLine(result.GetProperty("description").ToString()); Console.WriteLine(result.GetProperty("text").ToString()); +Console.WriteLine(result.GetProperty("isRegex").ToString()); ]]> @@ -457,6 +464,7 @@ JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; Console.WriteLine(result.GetProperty("blocklistItemId").ToString()); Console.WriteLine(result.GetProperty("description").ToString()); Console.WriteLine(result.GetProperty("text").ToString()); +Console.WriteLine(result.GetProperty("isRegex").ToString()); ]]> @@ -648,6 +656,7 @@ await foreach (BinaryData item in client.GetTextBlocklistItemsAsync("", 12 Console.WriteLine(result.GetProperty("blocklistItemId").ToString()); Console.WriteLine(result.GetProperty("description").ToString()); Console.WriteLine(result.GetProperty("text").ToString()); + Console.WriteLine(result.GetProperty("isRegex").ToString()); } ]]> @@ -678,6 +687,7 @@ foreach (BinaryData item in client.GetTextBlocklistItems("", 1234, 1234, 1 Console.WriteLine(result.GetProperty("blocklistItemId").ToString()); Console.WriteLine(result.GetProperty("description").ToString()); Console.WriteLine(result.GetProperty("text").ToString()); + Console.WriteLine(result.GetProperty("isRegex").ToString()); } ]]> diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/Docs/ContentSafetyClient.xml b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/Docs/ContentSafetyClient.xml index f5ffab408d37..74841eddaeec 100644 --- a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/Docs/ContentSafetyClient.xml +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/Docs/ContentSafetyClient.xml @@ -1,6 +1,142 @@ + + +This sample shows how to call AnalyzeImageAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + +AnalyzeImageOptions options = new AnalyzeImageOptions(new ContentSafetyImageData()); +Response response = await client.AnalyzeImageAsync(options); +]]> +This sample shows how to call AnalyzeImageAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + +AnalyzeImageOptions options = new AnalyzeImageOptions(new ContentSafetyImageData()) +{ + Categories = { ImageCategory.Hate }, + OutputType = AnalyzeImageOutputType.FourSeverityLevels, +}; +Response response = await client.AnalyzeImageAsync(options); +]]> + + + +This sample shows how to call AnalyzeImage. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + +AnalyzeImageOptions options = new AnalyzeImageOptions(new ContentSafetyImageData()); +Response response = client.AnalyzeImage(options); +]]> +This sample shows how to call AnalyzeImage with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + +AnalyzeImageOptions options = new AnalyzeImageOptions(new ContentSafetyImageData()) +{ + Categories = { ImageCategory.Hate }, + OutputType = AnalyzeImageOutputType.FourSeverityLevels, +}; +Response response = client.AnalyzeImage(options); +]]> + + + +This sample shows how to call AnalyzeImageAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + image = new object(), +}); +Response response = await client.AnalyzeImageAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("category").ToString()); +]]> +This sample shows how to call AnalyzeImageAsync with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + image = new + { + content = new object(), + blobUrl = "http://localhost:3000", + }, + categories = new object[] + { + "Hate" + }, + outputType = "FourSeverityLevels", +}); +Response response = await client.AnalyzeImageAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("category").ToString()); +Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("severity").ToString()); +]]> + + + +This sample shows how to call AnalyzeImage and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + image = new object(), +}); +Response response = client.AnalyzeImage(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("category").ToString()); +]]> +This sample shows how to call AnalyzeImage with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + image = new + { + content = new object(), + blobUrl = "http://localhost:3000", + }, + categories = new object[] + { + "Hate" + }, + outputType = "FourSeverityLevels", +}); +Response response = client.AnalyzeImage(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("category").ToString()); +Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("severity").ToString()); +]]> + This sample shows how to call AnalyzeTextAsync. @@ -149,59 +285,51 @@ Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("categ Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("severity").ToString()); ]]> - + -This sample shows how to call AnalyzeImageAsync. +This sample shows how to call DetectTextProtectedMaterialAsync. "); AzureKeyCredential credential = new AzureKeyCredential(""); ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); -AnalyzeImageOptions options = new AnalyzeImageOptions(new ContentSafetyImageData()); -Response response = await client.AnalyzeImageAsync(options); +DetectTextProtectedMaterialOptions options = new DetectTextProtectedMaterialOptions(""); +Response response = await client.DetectTextProtectedMaterialAsync(options); ]]> -This sample shows how to call AnalyzeImageAsync with all parameters. +This sample shows how to call DetectTextProtectedMaterialAsync with all parameters. "); AzureKeyCredential credential = new AzureKeyCredential(""); ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); -AnalyzeImageOptions options = new AnalyzeImageOptions(new ContentSafetyImageData()) -{ - Categories = { ImageCategory.Hate }, - OutputType = AnalyzeImageOutputType.FourSeverityLevels, -}; -Response response = await client.AnalyzeImageAsync(options); +DetectTextProtectedMaterialOptions options = new DetectTextProtectedMaterialOptions(""); +Response response = await client.DetectTextProtectedMaterialAsync(options); ]]> - + -This sample shows how to call AnalyzeImage. +This sample shows how to call DetectTextProtectedMaterial. "); AzureKeyCredential credential = new AzureKeyCredential(""); ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); -AnalyzeImageOptions options = new AnalyzeImageOptions(new ContentSafetyImageData()); -Response response = client.AnalyzeImage(options); +DetectTextProtectedMaterialOptions options = new DetectTextProtectedMaterialOptions(""); +Response response = client.DetectTextProtectedMaterial(options); ]]> -This sample shows how to call AnalyzeImage with all parameters. +This sample shows how to call DetectTextProtectedMaterial with all parameters. "); AzureKeyCredential credential = new AzureKeyCredential(""); ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); -AnalyzeImageOptions options = new AnalyzeImageOptions(new ContentSafetyImageData()) -{ - Categories = { ImageCategory.Hate }, - OutputType = AnalyzeImageOutputType.FourSeverityLevels, -}; -Response response = client.AnalyzeImage(options); +DetectTextProtectedMaterialOptions options = new DetectTextProtectedMaterialOptions(""); +Response response = client.DetectTextProtectedMaterial(options); ]]> - + -This sample shows how to call AnalyzeImageAsync and parse the result. +This sample shows how to call DetectTextProtectedMaterialAsync and parse the result. "); AzureKeyCredential credential = new AzureKeyCredential(""); @@ -209,14 +337,14 @@ ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); using RequestContent content = RequestContent.Create(new { - image = new object(), + text = "", }); -Response response = await client.AnalyzeImageAsync(content); +Response response = await client.DetectTextProtectedMaterialAsync(content); JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; -Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("category").ToString()); +Console.WriteLine(result.GetProperty("protectedMaterialAnalysis").GetProperty("detected").ToString()); ]]> -This sample shows how to call AnalyzeImageAsync with all request content and parse the result. +This sample shows how to call DetectTextProtectedMaterialAsync with all request content and parse the result. "); AzureKeyCredential credential = new AzureKeyCredential(""); @@ -224,27 +352,17 @@ ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); using RequestContent content = RequestContent.Create(new { - image = new - { - content = new object(), - blobUrl = "http://localhost:3000", - }, - categories = new object[] - { - "Hate" - }, - outputType = "FourSeverityLevels", + text = "", }); -Response response = await client.AnalyzeImageAsync(content); +Response response = await client.DetectTextProtectedMaterialAsync(content); JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; -Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("category").ToString()); -Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("severity").ToString()); +Console.WriteLine(result.GetProperty("protectedMaterialAnalysis").GetProperty("detected").ToString()); ]]> - + -This sample shows how to call AnalyzeImage and parse the result. +This sample shows how to call DetectTextProtectedMaterial and parse the result. "); AzureKeyCredential credential = new AzureKeyCredential(""); @@ -252,14 +370,14 @@ ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); using RequestContent content = RequestContent.Create(new { - image = new object(), + text = "", }); -Response response = client.AnalyzeImage(content); +Response response = client.DetectTextProtectedMaterial(content); JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; -Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("category").ToString()); +Console.WriteLine(result.GetProperty("protectedMaterialAnalysis").GetProperty("detected").ToString()); ]]> -This sample shows how to call AnalyzeImage with all request content and parse the result. +This sample shows how to call DetectTextProtectedMaterial with all request content and parse the result. "); AzureKeyCredential credential = new AzureKeyCredential(""); @@ -267,22 +385,132 @@ ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); using RequestContent content = RequestContent.Create(new { - image = new + text = "", +}); +Response response = client.DetectTextProtectedMaterial(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("protectedMaterialAnalysis").GetProperty("detected").ToString()); +]]> + + + +This sample shows how to call ShieldPromptAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + +ShieldPromptOptions options = new ShieldPromptOptions(); +Response response = await client.ShieldPromptAsync(options); +]]> +This sample shows how to call ShieldPromptAsync with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + +ShieldPromptOptions options = new ShieldPromptOptions +{ + UserPrompt = "", + Documents = { "" }, +}; +Response response = await client.ShieldPromptAsync(options); +]]> + + + +This sample shows how to call ShieldPrompt. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + +ShieldPromptOptions options = new ShieldPromptOptions(); +Response response = client.ShieldPrompt(options); +]]> +This sample shows how to call ShieldPrompt with all parameters. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + +ShieldPromptOptions options = new ShieldPromptOptions +{ + UserPrompt = "", + Documents = { "" }, +}; +Response response = client.ShieldPrompt(options); +]]> + + + +This sample shows how to call ShieldPromptAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = await client.ShieldPromptAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.ToString()); +]]> +This sample shows how to call ShieldPromptAsync with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + userPrompt = "", + documents = new object[] { - content = new object(), - blobUrl = "http://localhost:3000", + "" }, - categories = new object[] +}); +Response response = await client.ShieldPromptAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("userPromptAnalysis").GetProperty("attackDetected").ToString()); +Console.WriteLine(result.GetProperty("documentsAnalysis")[0].GetProperty("attackDetected").ToString()); +]]> + + + +This sample shows how to call ShieldPrompt and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new object()); +Response response = client.ShieldPrompt(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.ToString()); +]]> +This sample shows how to call ShieldPrompt with all request content and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + userPrompt = "", + documents = new object[] { - "Hate" + "" }, - outputType = "FourSeverityLevels", }); -Response response = client.AnalyzeImage(content); +Response response = client.ShieldPrompt(content); JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; -Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("category").ToString()); -Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("severity").ToString()); +Console.WriteLine(result.GetProperty("userPromptAnalysis").GetProperty("attackDetected").ToString()); +Console.WriteLine(result.GetProperty("documentsAnalysis")[0].GetProperty("attackDetected").ToString()); ]]> diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DocumentInjectionAnalysisResult.Serialization.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DocumentInjectionAnalysisResult.Serialization.cs new file mode 100644 index 000000000000..86e8a42a302a --- /dev/null +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DocumentInjectionAnalysisResult.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.ContentSafety +{ + public partial class DocumentInjectionAnalysisResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentInjectionAnalysisResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("attackDetected"u8); + writer.WriteBooleanValue(AttackDetected); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + DocumentInjectionAnalysisResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DocumentInjectionAnalysisResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDocumentInjectionAnalysisResult(document.RootElement, options); + } + + internal static DocumentInjectionAnalysisResult DeserializeDocumentInjectionAnalysisResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + bool attackDetected = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("attackDetected"u8)) + { + attackDetected = property.Value.GetBoolean(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new DocumentInjectionAnalysisResult(attackDetected, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(DocumentInjectionAnalysisResult)} does not support writing '{options.Format}' format."); + } + } + + DocumentInjectionAnalysisResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeDocumentInjectionAnalysisResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DocumentInjectionAnalysisResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static DocumentInjectionAnalysisResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeDocumentInjectionAnalysisResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DocumentInjectionAnalysisResult.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DocumentInjectionAnalysisResult.cs new file mode 100644 index 000000000000..7435c92c4f22 --- /dev/null +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/DocumentInjectionAnalysisResult.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentSafety +{ + /// The individual analysis result of potential injection attacks in the given documents. + public partial class DocumentInjectionAnalysisResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Whether a potential injection attack is detected or not. + internal DocumentInjectionAnalysisResult(bool attackDetected) + { + AttackDetected = attackDetected; + } + + /// Initializes a new instance of . + /// Whether a potential injection attack is detected or not. + /// Keeps track of any properties unknown to the library. + internal DocumentInjectionAnalysisResult(bool attackDetected, IDictionary serializedAdditionalRawData) + { + AttackDetected = attackDetected; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal DocumentInjectionAnalysisResult() + { + } + + /// Whether a potential injection attack is detected or not. + public bool AttackDetected { get; } + } +} diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ImageCategory.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ImageCategory.cs index c7954e183d2c..cf950bc683bc 100644 --- a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ImageCategory.cs +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ImageCategory.cs @@ -10,7 +10,7 @@ namespace Azure.AI.ContentSafety { - /// Image analyze category. + /// The harm category supported in Image content analysis. public readonly partial struct ImageCategory : IEquatable { private readonly string _value; @@ -27,13 +27,13 @@ public ImageCategory(string value) private const string SexualValue = "Sexual"; private const string ViolenceValue = "Violence"; - /// Hate. + /// The harm category for Image - Hate. public static ImageCategory Hate { get; } = new ImageCategory(HateValue); - /// SelfHarm. + /// The harm category for Image - SelfHarm. public static ImageCategory SelfHarm { get; } = new ImageCategory(SelfHarmValue); - /// Sexual. + /// The harm category for Image - Sexual. public static ImageCategory Sexual { get; } = new ImageCategory(SexualValue); - /// Violence. + /// The harm category for Image - Violence. public static ImageCategory Violence { get; } = new ImageCategory(ViolenceValue); /// Determines if two values are the same. public static bool operator ==(ImageCategory left, ImageCategory right) => left.Equals(right); diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ShieldPromptOptions.Serialization.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ShieldPromptOptions.Serialization.cs new file mode 100644 index 000000000000..82ed5e156299 --- /dev/null +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ShieldPromptOptions.Serialization.cs @@ -0,0 +1,163 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.ContentSafety +{ + public partial class ShieldPromptOptions : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ShieldPromptOptions)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (Optional.IsDefined(UserPrompt)) + { + writer.WritePropertyName("userPrompt"u8); + writer.WriteStringValue(UserPrompt); + } + if (Optional.IsCollectionDefined(Documents)) + { + writer.WritePropertyName("documents"u8); + writer.WriteStartArray(); + foreach (var item in Documents) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + ShieldPromptOptions IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ShieldPromptOptions)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeShieldPromptOptions(document.RootElement, options); + } + + internal static ShieldPromptOptions DeserializeShieldPromptOptions(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string userPrompt = default; + IList documents = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("userPrompt"u8)) + { + userPrompt = property.Value.GetString(); + continue; + } + if (property.NameEquals("documents"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + documents = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ShieldPromptOptions(userPrompt, documents ?? new ChangeTrackingList(), serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ShieldPromptOptions)} does not support writing '{options.Format}' format."); + } + } + + ShieldPromptOptions IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeShieldPromptOptions(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ShieldPromptOptions)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static ShieldPromptOptions FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeShieldPromptOptions(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ShieldPromptOptions.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ShieldPromptOptions.cs new file mode 100644 index 000000000000..354fcf81aa2f --- /dev/null +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ShieldPromptOptions.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentSafety +{ + /// The request of analyzing potential direct or indirect injection attacks. + public partial class ShieldPromptOptions + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + public ShieldPromptOptions() + { + Documents = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// The user prompt to be analyzed, which may contain direct injection attacks. + /// The documents to be analyzed, which may contain direct or indirect injection attacks. + /// Keeps track of any properties unknown to the library. + internal ShieldPromptOptions(string userPrompt, IList documents, IDictionary serializedAdditionalRawData) + { + UserPrompt = userPrompt; + Documents = documents; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// The user prompt to be analyzed, which may contain direct injection attacks. + public string UserPrompt { get; set; } + /// The documents to be analyzed, which may contain direct or indirect injection attacks. + public IList Documents { get; } + } +} diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ShieldPromptResult.Serialization.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ShieldPromptResult.Serialization.cs new file mode 100644 index 000000000000..75a4085defcf --- /dev/null +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ShieldPromptResult.Serialization.cs @@ -0,0 +1,167 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.ContentSafety +{ + public partial class ShieldPromptResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ShieldPromptResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (Optional.IsDefined(UserPromptAnalysis)) + { + writer.WritePropertyName("userPromptAnalysis"u8); + writer.WriteObjectValue(UserPromptAnalysis, options); + } + if (Optional.IsCollectionDefined(DocumentsAnalysis)) + { + writer.WritePropertyName("documentsAnalysis"u8); + writer.WriteStartArray(); + foreach (var item in DocumentsAnalysis) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + ShieldPromptResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(ShieldPromptResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeShieldPromptResult(document.RootElement, options); + } + + internal static ShieldPromptResult DeserializeShieldPromptResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + UserPromptInjectionAnalysisResult userPromptAnalysis = default; + IReadOnlyList documentsAnalysis = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("userPromptAnalysis"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + userPromptAnalysis = UserPromptInjectionAnalysisResult.DeserializeUserPromptInjectionAnalysisResult(property.Value, options); + continue; + } + if (property.NameEquals("documentsAnalysis"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(DocumentInjectionAnalysisResult.DeserializeDocumentInjectionAnalysisResult(item, options)); + } + documentsAnalysis = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new ShieldPromptResult(userPromptAnalysis, documentsAnalysis ?? new ChangeTrackingList(), serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(ShieldPromptResult)} does not support writing '{options.Format}' format."); + } + } + + ShieldPromptResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeShieldPromptResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(ShieldPromptResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static ShieldPromptResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeShieldPromptResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ShieldPromptResult.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ShieldPromptResult.cs new file mode 100644 index 000000000000..2ba3a3a8b654 --- /dev/null +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/ShieldPromptResult.cs @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentSafety +{ + /// The combined analysis results of potential direct or indirect injection attacks. + public partial class ShieldPromptResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal ShieldPromptResult() + { + DocumentsAnalysis = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// Direct injection attacks analysis result for the given user prompt. + /// Direct and indirect injection attacks analysis result for the given documents. + /// Keeps track of any properties unknown to the library. + internal ShieldPromptResult(UserPromptInjectionAnalysisResult userPromptAnalysis, IReadOnlyList documentsAnalysis, IDictionary serializedAdditionalRawData) + { + UserPromptAnalysis = userPromptAnalysis; + DocumentsAnalysis = documentsAnalysis; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Direct injection attacks analysis result for the given user prompt. + public UserPromptInjectionAnalysisResult UserPromptAnalysis { get; } + /// Direct and indirect injection attacks analysis result for the given documents. + public IReadOnlyList DocumentsAnalysis { get; } + } +} diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextBlocklistItem.Serialization.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextBlocklistItem.Serialization.cs index cdff1d157978..b3ca0c3ff94a 100644 --- a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextBlocklistItem.Serialization.cs +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextBlocklistItem.Serialization.cs @@ -38,6 +38,11 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWrite } writer.WritePropertyName("text"u8); writer.WriteStringValue(Text); + if (Optional.IsDefined(IsRegex)) + { + writer.WritePropertyName("isRegex"u8); + writer.WriteBooleanValue(IsRegex.Value); + } if (options.Format != "W" && _serializedAdditionalRawData != null) { foreach (var item in _serializedAdditionalRawData) @@ -79,6 +84,7 @@ internal static TextBlocklistItem DeserializeTextBlocklistItem(JsonElement eleme string blocklistItemId = default; string description = default; string text = default; + bool? isRegex = default; IDictionary serializedAdditionalRawData = default; Dictionary rawDataDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -98,13 +104,22 @@ internal static TextBlocklistItem DeserializeTextBlocklistItem(JsonElement eleme text = property.Value.GetString(); continue; } + if (property.NameEquals("isRegex"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + isRegex = property.Value.GetBoolean(); + continue; + } if (options.Format != "W") { rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); } } serializedAdditionalRawData = rawDataDictionary; - return new TextBlocklistItem(blocklistItemId, description, text, serializedAdditionalRawData); + return new TextBlocklistItem(blocklistItemId, description, text, isRegex, serializedAdditionalRawData); } BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextBlocklistItem.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextBlocklistItem.cs index 6a8c855e7ec1..c28fea37e4ac 100644 --- a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextBlocklistItem.cs +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextBlocklistItem.cs @@ -46,7 +46,7 @@ public partial class TextBlocklistItem private IDictionary _serializedAdditionalRawData; /// Initializes a new instance of . - /// BlocklistItem content. + /// BlocklistItem content. The length is counted using Unicode code point. /// is null. public TextBlocklistItem(string text) { @@ -58,13 +58,15 @@ public TextBlocklistItem(string text) /// Initializes a new instance of . /// The service will generate a BlocklistItemId, which will be a UUID. /// BlocklistItem description. - /// BlocklistItem content. + /// BlocklistItem content. The length is counted using Unicode code point. + /// An optional properties indicating whether this item is to be matched as a regular expression. /// Keeps track of any properties unknown to the library. - internal TextBlocklistItem(string blocklistItemId, string description, string text, IDictionary serializedAdditionalRawData) + internal TextBlocklistItem(string blocklistItemId, string description, string text, bool? isRegex, IDictionary serializedAdditionalRawData) { BlocklistItemId = blocklistItemId; Description = description; Text = text; + IsRegex = isRegex; _serializedAdditionalRawData = serializedAdditionalRawData; } @@ -77,7 +79,9 @@ internal TextBlocklistItem() public string BlocklistItemId { get; } /// BlocklistItem description. public string Description { get; set; } - /// BlocklistItem content. + /// BlocklistItem content. The length is counted using Unicode code point. public string Text { get; set; } + /// An optional properties indicating whether this item is to be matched as a regular expression. + public bool? IsRegex { get; set; } } } diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextCategory.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextCategory.cs index 4666aebe2bc0..f7c0346aa87a 100644 --- a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextCategory.cs +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextCategory.cs @@ -10,7 +10,7 @@ namespace Azure.AI.ContentSafety { - /// Text analyze category. + /// The harm category supported in Text content analysis. public readonly partial struct TextCategory : IEquatable { private readonly string _value; @@ -27,13 +27,13 @@ public TextCategory(string value) private const string SexualValue = "Sexual"; private const string ViolenceValue = "Violence"; - /// Hate. + /// The harm category for Text - Hate. public static TextCategory Hate { get; } = new TextCategory(HateValue); - /// SelfHarm. + /// The harm category for Text - SelfHarm. public static TextCategory SelfHarm { get; } = new TextCategory(SelfHarmValue); - /// Sexual. + /// The harm category for Text - Sexual. public static TextCategory Sexual { get; } = new TextCategory(SexualValue); - /// Violence. + /// The harm category for Text - Violence. public static TextCategory Violence { get; } = new TextCategory(ViolenceValue); /// Determines if two values are the same. public static bool operator ==(TextCategory left, TextCategory right) => left.Equals(right); diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextProtectedMaterialAnalysisResult.Serialization.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextProtectedMaterialAnalysisResult.Serialization.cs new file mode 100644 index 000000000000..72aa39351ce7 --- /dev/null +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextProtectedMaterialAnalysisResult.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.ContentSafety +{ + public partial class TextProtectedMaterialAnalysisResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TextProtectedMaterialAnalysisResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("detected"u8); + writer.WriteBooleanValue(Detected); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + TextProtectedMaterialAnalysisResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(TextProtectedMaterialAnalysisResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeTextProtectedMaterialAnalysisResult(document.RootElement, options); + } + + internal static TextProtectedMaterialAnalysisResult DeserializeTextProtectedMaterialAnalysisResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + bool detected = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("detected"u8)) + { + detected = property.Value.GetBoolean(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new TextProtectedMaterialAnalysisResult(detected, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(TextProtectedMaterialAnalysisResult)} does not support writing '{options.Format}' format."); + } + } + + TextProtectedMaterialAnalysisResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeTextProtectedMaterialAnalysisResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(TextProtectedMaterialAnalysisResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static TextProtectedMaterialAnalysisResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeTextProtectedMaterialAnalysisResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextProtectedMaterialAnalysisResult.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextProtectedMaterialAnalysisResult.cs new file mode 100644 index 000000000000..bf29b3fa4907 --- /dev/null +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/TextProtectedMaterialAnalysisResult.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentSafety +{ + /// The individual detection result of potential protected material. + public partial class TextProtectedMaterialAnalysisResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Whether potential protected material is detected or not. + internal TextProtectedMaterialAnalysisResult(bool detected) + { + Detected = detected; + } + + /// Initializes a new instance of . + /// Whether potential protected material is detected or not. + /// Keeps track of any properties unknown to the library. + internal TextProtectedMaterialAnalysisResult(bool detected, IDictionary serializedAdditionalRawData) + { + Detected = detected; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal TextProtectedMaterialAnalysisResult() + { + } + + /// Whether potential protected material is detected or not. + public bool Detected { get; } + } +} diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/UserPromptInjectionAnalysisResult.Serialization.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/UserPromptInjectionAnalysisResult.Serialization.cs new file mode 100644 index 000000000000..46fe8806954d --- /dev/null +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/UserPromptInjectionAnalysisResult.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.ContentSafety +{ + public partial class UserPromptInjectionAnalysisResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(UserPromptInjectionAnalysisResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("attackDetected"u8); + writer.WriteBooleanValue(AttackDetected); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + UserPromptInjectionAnalysisResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(UserPromptInjectionAnalysisResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeUserPromptInjectionAnalysisResult(document.RootElement, options); + } + + internal static UserPromptInjectionAnalysisResult DeserializeUserPromptInjectionAnalysisResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + bool attackDetected = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("attackDetected"u8)) + { + attackDetected = property.Value.GetBoolean(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new UserPromptInjectionAnalysisResult(attackDetected, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(UserPromptInjectionAnalysisResult)} does not support writing '{options.Format}' format."); + } + } + + UserPromptInjectionAnalysisResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeUserPromptInjectionAnalysisResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(UserPromptInjectionAnalysisResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static UserPromptInjectionAnalysisResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeUserPromptInjectionAnalysisResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/UserPromptInjectionAnalysisResult.cs b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/UserPromptInjectionAnalysisResult.cs new file mode 100644 index 000000000000..66b1188d1ede --- /dev/null +++ b/sdk/contentsafety/Azure.AI.ContentSafety/src/Generated/UserPromptInjectionAnalysisResult.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.ContentSafety +{ + /// The individual analysis result of potential injection attacks in the given user prompt. + public partial class UserPromptInjectionAnalysisResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Whether a potential injection attack is detected or not. + internal UserPromptInjectionAnalysisResult(bool attackDetected) + { + AttackDetected = attackDetected; + } + + /// Initializes a new instance of . + /// Whether a potential injection attack is detected or not. + /// Keeps track of any properties unknown to the library. + internal UserPromptInjectionAnalysisResult(bool attackDetected, IDictionary serializedAdditionalRawData) + { + AttackDetected = attackDetected; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal UserPromptInjectionAnalysisResult() + { + } + + /// Whether a potential injection attack is detected or not. + public bool AttackDetected { get; } + } +} diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/tests/Generated/Samples/Samples_BlocklistClient.cs b/sdk/contentsafety/Azure.AI.ContentSafety/tests/Generated/Samples/Samples_BlocklistClient.cs index d3db9d9fdab9..4899d7aae93a 100644 --- a/sdk/contentsafety/Azure.AI.ContentSafety/tests/Generated/Samples/Samples_BlocklistClient.cs +++ b/sdk/contentsafety/Azure.AI.ContentSafety/tests/Generated/Samples/Samples_BlocklistClient.cs @@ -112,6 +112,7 @@ public void Example_BlocklistClient_AddOrUpdateBlocklistItems_AllParameters() { description = "", text = "", +isRegex = true, } }, }); @@ -121,6 +122,7 @@ public void Example_BlocklistClient_AddOrUpdateBlocklistItems_AllParameters() Console.WriteLine(result.GetProperty("blocklistItems")[0].GetProperty("blocklistItemId").ToString()); Console.WriteLine(result.GetProperty("blocklistItems")[0].GetProperty("description").ToString()); Console.WriteLine(result.GetProperty("blocklistItems")[0].GetProperty("text").ToString()); + Console.WriteLine(result.GetProperty("blocklistItems")[0].GetProperty("isRegex").ToString()); } [Test] @@ -139,6 +141,7 @@ public async Task Example_BlocklistClient_AddOrUpdateBlocklistItems_AllParameter { description = "", text = "", +isRegex = true, } }, }); @@ -148,6 +151,7 @@ public async Task Example_BlocklistClient_AddOrUpdateBlocklistItems_AllParameter Console.WriteLine(result.GetProperty("blocklistItems")[0].GetProperty("blocklistItemId").ToString()); Console.WriteLine(result.GetProperty("blocklistItems")[0].GetProperty("description").ToString()); Console.WriteLine(result.GetProperty("blocklistItems")[0].GetProperty("text").ToString()); + Console.WriteLine(result.GetProperty("blocklistItems")[0].GetProperty("isRegex").ToString()); } [Test] @@ -163,6 +167,7 @@ public void Example_BlocklistClient_AddOrUpdateBlocklistItems_AllParameters_Conv new TextBlocklistItem("") { Description = "", +IsRegex = true, } }); Response response = client.AddOrUpdateBlocklistItems("", options); @@ -181,6 +186,7 @@ public async Task Example_BlocklistClient_AddOrUpdateBlocklistItems_AllParameter new TextBlocklistItem("") { Description = "", +IsRegex = true, } }); Response response = await client.AddOrUpdateBlocklistItemsAsync("", options); @@ -482,6 +488,7 @@ public void Example_TextBlocklistItem_GetTextBlocklistItem_AllParameters() Console.WriteLine(result.GetProperty("blocklistItemId").ToString()); Console.WriteLine(result.GetProperty("description").ToString()); Console.WriteLine(result.GetProperty("text").ToString()); + Console.WriteLine(result.GetProperty("isRegex").ToString()); } [Test] @@ -498,6 +505,7 @@ public async Task Example_TextBlocklistItem_GetTextBlocklistItem_AllParameters_A Console.WriteLine(result.GetProperty("blocklistItemId").ToString()); Console.WriteLine(result.GetProperty("description").ToString()); Console.WriteLine(result.GetProperty("text").ToString()); + Console.WriteLine(result.GetProperty("isRegex").ToString()); } [Test] @@ -722,6 +730,7 @@ public void Example_TextBlocklistItem_GetTextBlocklistItems_AllParameters() Console.WriteLine(result.GetProperty("blocklistItemId").ToString()); Console.WriteLine(result.GetProperty("description").ToString()); Console.WriteLine(result.GetProperty("text").ToString()); + Console.WriteLine(result.GetProperty("isRegex").ToString()); } } @@ -739,6 +748,7 @@ public async Task Example_TextBlocklistItem_GetTextBlocklistItems_AllParameters_ Console.WriteLine(result.GetProperty("blocklistItemId").ToString()); Console.WriteLine(result.GetProperty("description").ToString()); Console.WriteLine(result.GetProperty("text").ToString()); + Console.WriteLine(result.GetProperty("isRegex").ToString()); } } diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/tests/Generated/Samples/Samples_ContentSafetyClient.cs b/sdk/contentsafety/Azure.AI.ContentSafety/tests/Generated/Samples/Samples_ContentSafetyClient.cs index 1c44aae47ff7..c33e15fd28c3 100644 --- a/sdk/contentsafety/Azure.AI.ContentSafety/tests/Generated/Samples/Samples_ContentSafetyClient.cs +++ b/sdk/contentsafety/Azure.AI.ContentSafety/tests/Generated/Samples/Samples_ContentSafetyClient.cs @@ -16,6 +16,154 @@ namespace Azure.AI.ContentSafety.Samples { public partial class Samples_ContentSafetyClient { + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_ContentSafetyClient_AnalyzeImage_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + image = new object(), + }); + Response response = client.AnalyzeImage(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("category").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_ContentSafetyClient_AnalyzeImage_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + image = new object(), + }); + Response response = await client.AnalyzeImageAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("category").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_ContentSafetyClient_AnalyzeImage_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + + AnalyzeImageOptions options = new AnalyzeImageOptions(new ContentSafetyImageData()); + Response response = client.AnalyzeImage(options); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_ContentSafetyClient_AnalyzeImage_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + + AnalyzeImageOptions options = new AnalyzeImageOptions(new ContentSafetyImageData()); + Response response = await client.AnalyzeImageAsync(options); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_ContentSafetyClient_AnalyzeImage_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + image = new + { + content = new object(), + blobUrl = "http://localhost:3000", + }, + categories = new object[] + { +"Hate" + }, + outputType = "FourSeverityLevels", + }); + Response response = client.AnalyzeImage(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("category").ToString()); + Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("severity").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_ContentSafetyClient_AnalyzeImage_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + image = new + { + content = new object(), + blobUrl = "http://localhost:3000", + }, + categories = new object[] + { +"Hate" + }, + outputType = "FourSeverityLevels", + }); + Response response = await client.AnalyzeImageAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("category").ToString()); + Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("severity").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_ContentSafetyClient_AnalyzeImage_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + + AnalyzeImageOptions options = new AnalyzeImageOptions(new ContentSafetyImageData()) + { + Categories = { ImageCategory.Hate }, + OutputType = AnalyzeImageOutputType.FourSeverityLevels, + }; + Response response = client.AnalyzeImage(options); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_ContentSafetyClient_AnalyzeImage_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + + AnalyzeImageOptions options = new AnalyzeImageOptions(new ContentSafetyImageData()) + { + Categories = { ImageCategory.Hate }, + OutputType = AnalyzeImageOutputType.FourSeverityLevels, + }; + Response response = await client.AnalyzeImageAsync(options); + } + [Test] [Ignore("Only validating compilation of examples")] public void Example_ContentSafetyClient_AnalyzeText_ShortVersion() @@ -178,7 +326,7 @@ public async Task Example_ContentSafetyClient_AnalyzeText_AllParameters_Convenie [Test] [Ignore("Only validating compilation of examples")] - public void Example_ContentSafetyClient_AnalyzeImage_ShortVersion() + public void Example_ContentSafetyClient_DetectTextProtectedMaterial_ShortVersion() { Uri endpoint = new Uri(""); AzureKeyCredential credential = new AzureKeyCredential(""); @@ -186,17 +334,17 @@ public void Example_ContentSafetyClient_AnalyzeImage_ShortVersion() using RequestContent content = RequestContent.Create(new { - image = new object(), + text = "", }); - Response response = client.AnalyzeImage(content); + Response response = client.DetectTextProtectedMaterial(content); JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; - Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("category").ToString()); + Console.WriteLine(result.GetProperty("protectedMaterialAnalysis").GetProperty("detected").ToString()); } [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_ContentSafetyClient_AnalyzeImage_ShortVersion_Async() + public async Task Example_ContentSafetyClient_DetectTextProtectedMaterial_ShortVersion_Async() { Uri endpoint = new Uri(""); AzureKeyCredential credential = new AzureKeyCredential(""); @@ -204,41 +352,41 @@ public async Task Example_ContentSafetyClient_AnalyzeImage_ShortVersion_Async() using RequestContent content = RequestContent.Create(new { - image = new object(), + text = "", }); - Response response = await client.AnalyzeImageAsync(content); + Response response = await client.DetectTextProtectedMaterialAsync(content); JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; - Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("category").ToString()); + Console.WriteLine(result.GetProperty("protectedMaterialAnalysis").GetProperty("detected").ToString()); } [Test] [Ignore("Only validating compilation of examples")] - public void Example_ContentSafetyClient_AnalyzeImage_ShortVersion_Convenience() + public void Example_ContentSafetyClient_DetectTextProtectedMaterial_ShortVersion_Convenience() { Uri endpoint = new Uri(""); AzureKeyCredential credential = new AzureKeyCredential(""); ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); - AnalyzeImageOptions options = new AnalyzeImageOptions(new ContentSafetyImageData()); - Response response = client.AnalyzeImage(options); + DetectTextProtectedMaterialOptions options = new DetectTextProtectedMaterialOptions(""); + Response response = client.DetectTextProtectedMaterial(options); } [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_ContentSafetyClient_AnalyzeImage_ShortVersion_Convenience_Async() + public async Task Example_ContentSafetyClient_DetectTextProtectedMaterial_ShortVersion_Convenience_Async() { Uri endpoint = new Uri(""); AzureKeyCredential credential = new AzureKeyCredential(""); ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); - AnalyzeImageOptions options = new AnalyzeImageOptions(new ContentSafetyImageData()); - Response response = await client.AnalyzeImageAsync(options); + DetectTextProtectedMaterialOptions options = new DetectTextProtectedMaterialOptions(""); + Response response = await client.DetectTextProtectedMaterialAsync(options); } [Test] [Ignore("Only validating compilation of examples")] - public void Example_ContentSafetyClient_AnalyzeImage_AllParameters() + public void Example_ContentSafetyClient_DetectTextProtectedMaterial_AllParameters() { Uri endpoint = new Uri(""); AzureKeyCredential credential = new AzureKeyCredential(""); @@ -246,27 +394,136 @@ public void Example_ContentSafetyClient_AnalyzeImage_AllParameters() using RequestContent content = RequestContent.Create(new { - image = new - { - content = new object(), - blobUrl = "http://localhost:3000", - }, - categories = new object[] + text = "", + }); + Response response = client.DetectTextProtectedMaterial(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("protectedMaterialAnalysis").GetProperty("detected").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_ContentSafetyClient_DetectTextProtectedMaterial_AllParameters_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new { -"Hate" + text = "", + }); + Response response = await client.DetectTextProtectedMaterialAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("protectedMaterialAnalysis").GetProperty("detected").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_ContentSafetyClient_DetectTextProtectedMaterial_AllParameters_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + + DetectTextProtectedMaterialOptions options = new DetectTextProtectedMaterialOptions(""); + Response response = client.DetectTextProtectedMaterial(options); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_ContentSafetyClient_DetectTextProtectedMaterial_AllParameters_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + + DetectTextProtectedMaterialOptions options = new DetectTextProtectedMaterialOptions(""); + Response response = await client.DetectTextProtectedMaterialAsync(options); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_ContentSafetyClient_ShieldPrompt_ShortVersion() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = client.ShieldPrompt(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_ContentSafetyClient_ShieldPrompt_ShortVersion_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new object()); + Response response = await client.ShieldPromptAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_ContentSafetyClient_ShieldPrompt_ShortVersion_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + + ShieldPromptOptions options = new ShieldPromptOptions(); + Response response = client.ShieldPrompt(options); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_ContentSafetyClient_ShieldPrompt_ShortVersion_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + + ShieldPromptOptions options = new ShieldPromptOptions(); + Response response = await client.ShieldPromptAsync(options); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_ContentSafetyClient_ShieldPrompt_AllParameters() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + userPrompt = "", + documents = new object[] + { +"" }, - outputType = "FourSeverityLevels", }); - Response response = client.AnalyzeImage(content); + Response response = client.ShieldPrompt(content); JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; - Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("category").ToString()); - Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("severity").ToString()); + Console.WriteLine(result.GetProperty("userPromptAnalysis").GetProperty("attackDetected").ToString()); + Console.WriteLine(result.GetProperty("documentsAnalysis")[0].GetProperty("attackDetected").ToString()); } [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_ContentSafetyClient_AnalyzeImage_AllParameters_Async() + public async Task Example_ContentSafetyClient_ShieldPrompt_AllParameters_Async() { Uri endpoint = new Uri(""); AzureKeyCredential credential = new AzureKeyCredential(""); @@ -274,54 +531,49 @@ public async Task Example_ContentSafetyClient_AnalyzeImage_AllParameters_Async() using RequestContent content = RequestContent.Create(new { - image = new - { - content = new object(), - blobUrl = "http://localhost:3000", - }, - categories = new object[] + userPrompt = "", + documents = new object[] { -"Hate" +"" }, - outputType = "FourSeverityLevels", }); - Response response = await client.AnalyzeImageAsync(content); + Response response = await client.ShieldPromptAsync(content); JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; - Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("category").ToString()); - Console.WriteLine(result.GetProperty("categoriesAnalysis")[0].GetProperty("severity").ToString()); + Console.WriteLine(result.GetProperty("userPromptAnalysis").GetProperty("attackDetected").ToString()); + Console.WriteLine(result.GetProperty("documentsAnalysis")[0].GetProperty("attackDetected").ToString()); } [Test] [Ignore("Only validating compilation of examples")] - public void Example_ContentSafetyClient_AnalyzeImage_AllParameters_Convenience() + public void Example_ContentSafetyClient_ShieldPrompt_AllParameters_Convenience() { Uri endpoint = new Uri(""); AzureKeyCredential credential = new AzureKeyCredential(""); ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); - AnalyzeImageOptions options = new AnalyzeImageOptions(new ContentSafetyImageData()) + ShieldPromptOptions options = new ShieldPromptOptions { - Categories = { ImageCategory.Hate }, - OutputType = AnalyzeImageOutputType.FourSeverityLevels, + UserPrompt = "", + Documents = { "" }, }; - Response response = client.AnalyzeImage(options); + Response response = client.ShieldPrompt(options); } [Test] [Ignore("Only validating compilation of examples")] - public async Task Example_ContentSafetyClient_AnalyzeImage_AllParameters_Convenience_Async() + public async Task Example_ContentSafetyClient_ShieldPrompt_AllParameters_Convenience_Async() { Uri endpoint = new Uri(""); AzureKeyCredential credential = new AzureKeyCredential(""); ContentSafetyClient client = new ContentSafetyClient(endpoint, credential); - AnalyzeImageOptions options = new AnalyzeImageOptions(new ContentSafetyImageData()) + ShieldPromptOptions options = new ShieldPromptOptions { - Categories = { ImageCategory.Hate }, - OutputType = AnalyzeImageOutputType.FourSeverityLevels, + UserPrompt = "", + Documents = { "" }, }; - Response response = await client.AnalyzeImageAsync(options); + Response response = await client.ShieldPromptAsync(options); } } } diff --git a/sdk/contentsafety/Azure.AI.ContentSafety/tsp-location.yaml b/sdk/contentsafety/Azure.AI.ContentSafety/tsp-location.yaml index 149ba8c7bad1..809fe3785dc2 100644 --- a/sdk/contentsafety/Azure.AI.ContentSafety/tsp-location.yaml +++ b/sdk/contentsafety/Azure.AI.ContentSafety/tsp-location.yaml @@ -1,5 +1,4 @@ -commit: 4429b7e6c91faeb560cfd9cdc57091565493ff7a -additionalDirectories: [] -repo: Azure/azure-rest-api-specs directory: specification/cognitiveservices/ContentSafety - +commit: de8f71108806f0a7dd366f102484b78976a4bdbb +repo: test-repo-billy/azure-rest-api-specs +additionalDirectories: