diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/CHANGELOG.md b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/CHANGELOG.md index 9ddf68cfdb8d..d00e0831ddd8 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/CHANGELOG.md +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/CHANGELOG.md @@ -1,7 +1,10 @@ # Release History -## 1.0.0-beta.5 (Unreleased) +## 1.1.0-beta.1 (2021-10-27) +- Added device discovery and device detail request for ONVIF enabled devices. +- Added Remote Device Adapter configuration for ingesting video in a private network +- Added retention policy to VideoSink ## 1.0.0-beta.4 (2021-05-24) diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/README.md b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/README.md index b668dce18e45..37fa27c74e33 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/README.md +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/README.md @@ -1,6 +1,6 @@ # Azure Video Analyzer Edge client library for .NET -Azure Video Analyzer provides a platform to build intelligent video applications that span the edge and the cloud. The platform offers the capability to capture, record, and analyze live video along with publishing the results, video and video analytics, to Azure services in the cloud or the edge. It is designed to be an extensible platform, enabling you to connect different video analysis edge modules such as Cognitive services containers, custom edge modules built by you with open source machine learning models or custom models trained with your own data. You can then use them to analyze live video without worrying about the complexity of building and running a live video pipeline. +Azure Video Analyzer is an [Azure Applied AI Service][applied-ai-service] that provides a platform for you to build intelligent video applications that can span both edge and cloud infrastructures. The platform offers the capability to capture, record, and analyze live video along with publishing the results, video and video analytics, to Azure services at the edge or in the cloud. It is designed to be an extensible platform, enabling you to connect different video inferencing edge modules such as Cognitive services modules, or custom inferencing modules that have been trained with your own data using either open-source machine learning or [Azure Machine Learning][machine-learning]. Use the client library for Video Analyzer Edge to: @@ -18,8 +18,8 @@ This is a models-only SDK. All client operations are done using the [Microsoft A The client is coming from Azure IoT SDK. You will need to obtain an IoT device connection string in order to authenticate the Azure IoT SDK. For more information please visit: https://github.com/Azure/azure-iot-sdk-csharp. ```C# Snippet:Azure_VideoAnalyzerSamples_ConnectionString -String connectionString = "connectionString"; -ServiceClient serviceClient = ServiceClient.CreateFromConnectionString(connectionString); +string connectionString = System.Environment.GetEnvironmentVariable("iothub_connectionstring", EnvironmentVariableTarget.User); +var serviceClient = ServiceClient.CreateFromConnectionString(connectionString); ``` ### Install the package @@ -44,6 +44,7 @@ dotnet add package Microsoft.Azure.Devices | SDK | Video Analyzer edge module | | ------------ | -------------------------- | | 1.0.0-beta.x | 1.0 | + | 1.1.0-beta.x | 1.1 | ### Creating a pipeline topology and making requests @@ -104,10 +105,6 @@ pipelineTopologyProperties.Parameters.Add(new ParameterDeclaration("rtspUrl", Pa { Description = "rtsp Url" }); -pipelineTopologyProperties.Parameters.Add(new ParameterDeclaration("hubSinkOutputName", ParameterType.String) -{ - Description = "hub sink output" -}); ``` ### Define a Source @@ -127,7 +124,7 @@ var nodeInput = new List { new NodeInput("rtspSource") }; -pipelineTopologyProps.Sinks.Add(new IotHubMessageSink("msgSink", nodeInput, "${hubSinkOutputName}")); +pipelineTopologyProps.Sinks.Add(new VideoSink("videoSink", nodeInput, "video", "/var/lib/videoanalyzer/tmp/", "1024")); ``` ### Set the topology properties and create a topology @@ -175,7 +172,7 @@ var setPipelineTopRequest = new PipelineTopologySetRequest(pipelineTopology); var directMethod = new CloudToDeviceMethod(setPipelineTopRequest.MethodName); directMethod.SetPayloadJson(setPipelineTopRequest.GetPayloadAsJson()); -var setPipelineTopResponse = await serviceClient.InvokeDeviceMethodAsync(deviceId, moduleId, directMethod); +var setPipelineTopResponse = await _serviceClient.InvokeDeviceMethodAsync(_deviceId, _moduleId, directMethod); ``` To try different pipeline topologies with the SDK, please see the official [Samples][samples]. @@ -226,5 +223,7 @@ additional questions or comments. [iot-device-sdk]: https://www.nuget.org/packages/Microsoft.Azure.Devices.Client/ [iot-hub-sdk]: https://www.nuget.org/packages/Microsoft.Azure.Devices/ [github-page-issues]: https://github.com/Azure/azure-sdk-for-net/issues +[applied-ai-service]: https://azure.microsoft.com/product-categories/applied-ai-services/#services +[machine-learning]: https://azure.microsoft.com/services/machine-learning ![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-net%2Fsdk%2Fvideoanalyzer%2Fazure-media-videoanalyzer-edge%2FREADME.png) diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/api/Azure.Media.VideoAnalyzer.Edge.netstandard2.0.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/api/Azure.Media.VideoAnalyzer.Edge.netstandard2.0.cs index cbe27c9544cd..c13a7ef333c6 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/api/Azure.Media.VideoAnalyzer.Edge.netstandard2.0.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/api/Azure.Media.VideoAnalyzer.Edge.netstandard2.0.cs @@ -16,6 +16,20 @@ public partial class CredentialsBase { public CredentialsBase() { } } + public partial class DiscoveredOnvifDevice + { + public DiscoveredOnvifDevice() { } + public System.Collections.Generic.IList Endpoints { get { throw null; } } + public string RemoteIPAddress { get { throw null; } set { } } + public System.Collections.Generic.IList Scopes { get { throw null; } } + public string ServiceIdentifier { get { throw null; } set { } } + } + public partial class DiscoveredOnvifDeviceCollection + { + public DiscoveredOnvifDeviceCollection() { } + public System.Collections.Generic.IList Value { get { throw null; } } + public static Azure.Media.VideoAnalyzer.Edge.Models.DiscoveredOnvifDeviceCollection Deserialize(string json) { throw null; } + } public partial class EndpointBase { public EndpointBase(string url) { } @@ -66,6 +80,32 @@ public GrpcExtensionDataTransfer(Azure.Media.VideoAnalyzer.Edge.Models.GrpcExten public static bool operator !=(Azure.Media.VideoAnalyzer.Edge.Models.GrpcExtensionDataTransferMode left, Azure.Media.VideoAnalyzer.Edge.Models.GrpcExtensionDataTransferMode right) { throw null; } public override string ToString() { throw null; } } + public partial class H264Configuration + { + public H264Configuration() { } + public float? GovLength { get { throw null; } set { } } + public Azure.Media.VideoAnalyzer.Edge.Models.H264Profile? Profile { get { throw null; } set { } } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct H264Profile : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public H264Profile(string value) { throw null; } + public static Azure.Media.VideoAnalyzer.Edge.Models.H264Profile Baseline { get { throw null; } } + public static Azure.Media.VideoAnalyzer.Edge.Models.H264Profile Extended { get { throw null; } } + public static Azure.Media.VideoAnalyzer.Edge.Models.H264Profile High { get { throw null; } } + public static Azure.Media.VideoAnalyzer.Edge.Models.H264Profile Main { get { throw null; } } + public bool Equals(Azure.Media.VideoAnalyzer.Edge.Models.H264Profile other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.Media.VideoAnalyzer.Edge.Models.H264Profile left, Azure.Media.VideoAnalyzer.Edge.Models.H264Profile right) { throw null; } + public static implicit operator Azure.Media.VideoAnalyzer.Edge.Models.H264Profile (string value) { throw null; } + public static bool operator !=(Azure.Media.VideoAnalyzer.Edge.Models.H264Profile left, Azure.Media.VideoAnalyzer.Edge.Models.H264Profile right) { throw null; } + public override string ToString() { throw null; } + } public partial class HttpExtension : Azure.Media.VideoAnalyzer.Edge.Models.ExtensionProcessorBase { public HttpExtension(string name, System.Collections.Generic.IEnumerable inputs, Azure.Media.VideoAnalyzer.Edge.Models.EndpointBase endpoint, Azure.Media.VideoAnalyzer.Edge.Models.ImageProperties image) : base (default(string), default(System.Collections.Generic.IEnumerable), default(Azure.Media.VideoAnalyzer.Edge.Models.EndpointBase), default(Azure.Media.VideoAnalyzer.Edge.Models.ImageProperties)) { } @@ -157,6 +197,12 @@ public ImageScale() { } public static bool operator !=(Azure.Media.VideoAnalyzer.Edge.Models.ImageScaleMode left, Azure.Media.VideoAnalyzer.Edge.Models.ImageScaleMode right) { throw null; } public override string ToString() { throw null; } } + public partial class IotHubDeviceConnection + { + public IotHubDeviceConnection(string deviceId) { } + public Azure.Media.VideoAnalyzer.Edge.Models.CredentialsBase Credentials { get { throw null; } set { } } + public string DeviceId { get { throw null; } set { } } + } public partial class IotHubMessageSink : Azure.Media.VideoAnalyzer.Edge.Models.SinkNodeBase { public IotHubMessageSink(string name, System.Collections.Generic.IEnumerable inputs, string hubOutputName) : base (default(string), default(System.Collections.Generic.IEnumerable)) { } @@ -241,6 +287,18 @@ public LivePipelineSetRequest(Azure.Media.VideoAnalyzer.Edge.Models.LivePipeline public static bool operator !=(Azure.Media.VideoAnalyzer.Edge.Models.LivePipelineState left, Azure.Media.VideoAnalyzer.Edge.Models.LivePipelineState right) { throw null; } public override string ToString() { throw null; } } + public partial class MediaProfile + { + public MediaProfile() { } + public object MediaUri { get { throw null; } set { } } + public string Name { get { throw null; } set { } } + public Azure.Media.VideoAnalyzer.Edge.Models.VideoEncoderConfiguration VideoEncoderConfiguration { get { throw null; } set { } } + } + public partial class MediaUri + { + public MediaUri() { } + public string Uri { get { throw null; } set { } } + } public partial class MethodRequest { public MethodRequest() { } @@ -279,6 +337,30 @@ public partial class MotionDetectionProcessor : Azure.Media.VideoAnalyzer.Edge.M public static bool operator !=(Azure.Media.VideoAnalyzer.Edge.Models.MotionDetectionSensitivity left, Azure.Media.VideoAnalyzer.Edge.Models.MotionDetectionSensitivity right) { throw null; } public override string ToString() { throw null; } } + public partial class Mpeg4Configuration + { + public Mpeg4Configuration() { } + public float? GovLength { get { throw null; } set { } } + public Azure.Media.VideoAnalyzer.Edge.Models.Mpeg4Profile? Profile { get { throw null; } set { } } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct Mpeg4Profile : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public Mpeg4Profile(string value) { throw null; } + public static Azure.Media.VideoAnalyzer.Edge.Models.Mpeg4Profile ASP { get { throw null; } } + public static Azure.Media.VideoAnalyzer.Edge.Models.Mpeg4Profile SP { get { throw null; } } + public bool Equals(Azure.Media.VideoAnalyzer.Edge.Models.Mpeg4Profile other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.Media.VideoAnalyzer.Edge.Models.Mpeg4Profile left, Azure.Media.VideoAnalyzer.Edge.Models.Mpeg4Profile right) { throw null; } + public static implicit operator Azure.Media.VideoAnalyzer.Edge.Models.Mpeg4Profile (string value) { throw null; } + public static bool operator !=(Azure.Media.VideoAnalyzer.Edge.Models.Mpeg4Profile left, Azure.Media.VideoAnalyzer.Edge.Models.Mpeg4Profile right) { throw null; } + public override string ToString() { throw null; } + } public partial class NamedLineBase { public NamedLineBase(string name) { } @@ -331,6 +413,63 @@ public partial class ObjectTrackingProcessor : Azure.Media.VideoAnalyzer.Edge.Mo public ObjectTrackingProcessor(string name, System.Collections.Generic.IEnumerable inputs) : base (default(string), default(System.Collections.Generic.IEnumerable)) { } public Azure.Media.VideoAnalyzer.Edge.Models.ObjectTrackingAccuracy? Accuracy { get { throw null; } set { } } } + public partial class OnvifDevice + { + public OnvifDevice() { } + public Azure.Media.VideoAnalyzer.Edge.Models.OnvifDns Dns { get { throw null; } set { } } + public Azure.Media.VideoAnalyzer.Edge.Models.OnvifHostName Hostname { get { throw null; } set { } } + public System.Collections.Generic.IList MediaProfiles { get { throw null; } } + public Azure.Media.VideoAnalyzer.Edge.Models.OnvifSystemDateTime SystemDateTime { get { throw null; } set { } } + public static Azure.Media.VideoAnalyzer.Edge.Models.OnvifDevice Deserialize(string json) { throw null; } + } + public partial class OnvifDeviceDiscoverRequest : Azure.Media.VideoAnalyzer.Edge.Models.MethodRequest + { + public OnvifDeviceDiscoverRequest() { } + public string DiscoveryDuration { get { throw null; } set { } } + } + public partial class OnvifDeviceGetRequest : Azure.Media.VideoAnalyzer.Edge.Models.MethodRequest + { + public OnvifDeviceGetRequest(Azure.Media.VideoAnalyzer.Edge.Models.EndpointBase endpoint) { } + public Azure.Media.VideoAnalyzer.Edge.Models.EndpointBase Endpoint { get { throw null; } set { } } + } + public partial class OnvifDns + { + public OnvifDns() { } + public bool? FromDhcp { get { throw null; } set { } } + public System.Collections.Generic.IList Ipv4Address { get { throw null; } } + public System.Collections.Generic.IList Ipv6Address { get { throw null; } } + } + public partial class OnvifHostName + { + public OnvifHostName() { } + public bool? FromDhcp { get { throw null; } set { } } + public string Hostname { get { throw null; } set { } } + } + public partial class OnvifSystemDateTime + { + public OnvifSystemDateTime() { } + public string Time { get { throw null; } set { } } + public string TimeZone { get { throw null; } set { } } + public Azure.Media.VideoAnalyzer.Edge.Models.OnvifSystemDateTimeType? Type { get { throw null; } set { } } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct OnvifSystemDateTimeType : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public OnvifSystemDateTimeType(string value) { throw null; } + public static Azure.Media.VideoAnalyzer.Edge.Models.OnvifSystemDateTimeType Manual { get { throw null; } } + public static Azure.Media.VideoAnalyzer.Edge.Models.OnvifSystemDateTimeType Ntp { get { throw null; } } + public bool Equals(Azure.Media.VideoAnalyzer.Edge.Models.OnvifSystemDateTimeType other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.Media.VideoAnalyzer.Edge.Models.OnvifSystemDateTimeType left, Azure.Media.VideoAnalyzer.Edge.Models.OnvifSystemDateTimeType right) { throw null; } + public static implicit operator Azure.Media.VideoAnalyzer.Edge.Models.OnvifSystemDateTimeType (string value) { throw null; } + public static bool operator !=(Azure.Media.VideoAnalyzer.Edge.Models.OnvifSystemDateTimeType left, Azure.Media.VideoAnalyzer.Edge.Models.OnvifSystemDateTimeType right) { throw null; } + public override string ToString() { throw null; } + } public partial class OutputSelector { public OutputSelector() { } @@ -464,6 +603,59 @@ public ProcessorNodeBase(string name, System.Collections.Generic.IEnumerable Inputs { get { throw null; } } public string Name { get { throw null; } set { } } } + public partial class RateControl + { + public RateControl() { } + public float? BitRateLimit { get { throw null; } set { } } + public float? EncodingInterval { get { throw null; } set { } } + public float? FrameRateLimit { get { throw null; } set { } } + public bool? GuaranteedFrameRate { get { throw null; } set { } } + } + public partial class RemoteDeviceAdapter + { + public RemoteDeviceAdapter(string name) { } + public string Name { get { throw null; } set { } } + public Azure.Media.VideoAnalyzer.Edge.Models.RemoteDeviceAdapterProperties Properties { get { throw null; } set { } } + public Azure.Media.VideoAnalyzer.Edge.Models.SystemData SystemData { get { throw null; } set { } } + public static Azure.Media.VideoAnalyzer.Edge.Models.RemoteDeviceAdapter Deserialize(string json) { throw null; } + } + public partial class RemoteDeviceAdapterCollection + { + public RemoteDeviceAdapterCollection() { } + public string ContinuationToken { get { throw null; } set { } } + public System.Collections.Generic.IList Value { get { throw null; } } + public static Azure.Media.VideoAnalyzer.Edge.Models.RemoteDeviceAdapterCollection Deserialize(string json) { throw null; } + } + public partial class RemoteDeviceAdapterDeleteRequest : Azure.Media.VideoAnalyzer.Edge.Models.MethodRequestEmptyBodyBase + { + public RemoteDeviceAdapterDeleteRequest(string name) : base (default(string)) { } + } + public partial class RemoteDeviceAdapterGetRequest : Azure.Media.VideoAnalyzer.Edge.Models.MethodRequestEmptyBodyBase + { + public RemoteDeviceAdapterGetRequest(string name) : base (default(string)) { } + } + public partial class RemoteDeviceAdapterListRequest : Azure.Media.VideoAnalyzer.Edge.Models.MethodRequest + { + public RemoteDeviceAdapterListRequest() { } + } + public partial class RemoteDeviceAdapterProperties + { + public RemoteDeviceAdapterProperties(Azure.Media.VideoAnalyzer.Edge.Models.RemoteDeviceAdapterTarget target, Azure.Media.VideoAnalyzer.Edge.Models.IotHubDeviceConnection iotHubDeviceConnection) { } + public string Description { get { throw null; } set { } } + public Azure.Media.VideoAnalyzer.Edge.Models.IotHubDeviceConnection IotHubDeviceConnection { get { throw null; } set { } } + public Azure.Media.VideoAnalyzer.Edge.Models.RemoteDeviceAdapterTarget Target { get { throw null; } set { } } + } + public partial class RemoteDeviceAdapterSetRequest : Azure.Media.VideoAnalyzer.Edge.Models.MethodRequest + { + public RemoteDeviceAdapterSetRequest(Azure.Media.VideoAnalyzer.Edge.Models.RemoteDeviceAdapter remoteDeviceAdapter) { } + public Azure.Media.VideoAnalyzer.Edge.Models.RemoteDeviceAdapter RemoteDeviceAdapter { get { throw null; } set { } } + public override string GetPayloadAsJson() { throw null; } + } + public partial class RemoteDeviceAdapterTarget + { + public RemoteDeviceAdapterTarget(string host) { } + public string Host { get { throw null; } set { } } + } public partial class RtspSource : Azure.Media.VideoAnalyzer.Edge.Models.SourceNodeBase { public RtspSource(string name, Azure.Media.VideoAnalyzer.Edge.Models.EndpointBase endpoint) : base (default(string)) { } @@ -671,10 +863,18 @@ public SpatialAnalysisPersonZoneCrossingZoneEvents(Azure.Media.VideoAnalyzer.Edg public partial class SpatialAnalysisTypedOperationBase : Azure.Media.VideoAnalyzer.Edge.Models.SpatialAnalysisOperationBase { public SpatialAnalysisTypedOperationBase() { } + public string CalibrationConfiguration { get { throw null; } set { } } + public string CameraCalibratorNodeConfiguration { get { throw null; } set { } } public string CameraConfiguration { get { throw null; } set { } } public string Debug { get { throw null; } set { } } public string DetectorNodeConfiguration { get { throw null; } set { } } public string EnableFaceMaskClassifier { get { throw null; } set { } } + public string TrackerNodeConfiguration { get { throw null; } set { } } + } + public partial class SymmetricKeyCredentials : Azure.Media.VideoAnalyzer.Edge.Models.CredentialsBase + { + public SymmetricKeyCredentials(string key) { } + public string Key { get { throw null; } set { } } } public partial class SystemData { @@ -708,9 +908,50 @@ public partial class VideoCreationProperties { public VideoCreationProperties() { } public string Description { get { throw null; } set { } } + public string RetentionPeriod { get { throw null; } set { } } public string SegmentLength { get { throw null; } set { } } public string Title { get { throw null; } set { } } } + public partial class VideoEncoderConfiguration + { + public VideoEncoderConfiguration() { } + public Azure.Media.VideoAnalyzer.Edge.Models.VideoEncoding? Encoding { get { throw null; } set { } } + public Azure.Media.VideoAnalyzer.Edge.Models.H264Configuration H264 { get { throw null; } set { } } + public Azure.Media.VideoAnalyzer.Edge.Models.Mpeg4Configuration Mpeg4 { get { throw null; } set { } } + public float? Quality { get { throw null; } set { } } + public Azure.Media.VideoAnalyzer.Edge.Models.RateControl RateControl { get { throw null; } set { } } + public Azure.Media.VideoAnalyzer.Edge.Models.VideoResolution Resolution { get { throw null; } set { } } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct VideoEncoding : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public VideoEncoding(string value) { throw null; } + public static Azure.Media.VideoAnalyzer.Edge.Models.VideoEncoding H264 { get { throw null; } } + public static Azure.Media.VideoAnalyzer.Edge.Models.VideoEncoding Jpeg { get { throw null; } } + public static Azure.Media.VideoAnalyzer.Edge.Models.VideoEncoding Mpeg4 { get { throw null; } } + public bool Equals(Azure.Media.VideoAnalyzer.Edge.Models.VideoEncoding other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.Media.VideoAnalyzer.Edge.Models.VideoEncoding left, Azure.Media.VideoAnalyzer.Edge.Models.VideoEncoding right) { throw null; } + public static implicit operator Azure.Media.VideoAnalyzer.Edge.Models.VideoEncoding (string value) { throw null; } + public static bool operator !=(Azure.Media.VideoAnalyzer.Edge.Models.VideoEncoding left, Azure.Media.VideoAnalyzer.Edge.Models.VideoEncoding right) { throw null; } + public override string ToString() { throw null; } + } + public partial class VideoPublishingOptions + { + public VideoPublishingOptions() { } + public string EnableVideoPreviewImage { get { throw null; } set { } } + } + public partial class VideoResolution + { + public VideoResolution() { } + public float? Height { get { throw null; } set { } } + public float? Width { get { throw null; } set { } } + } public partial class VideoSink : Azure.Media.VideoAnalyzer.Edge.Models.SinkNodeBase { public VideoSink(string name, System.Collections.Generic.IEnumerable inputs, string videoName, string localMediaCachePath, string localMediaCacheMaximumSizeMiB) : base (default(string), default(System.Collections.Generic.IEnumerable)) { } @@ -718,5 +959,6 @@ public partial class VideoSink : Azure.Media.VideoAnalyzer.Edge.Models.SinkNodeB public string LocalMediaCachePath { get { throw null; } set { } } public Azure.Media.VideoAnalyzer.Edge.Models.VideoCreationProperties VideoCreationProperties { get { throw null; } set { } } public string VideoName { get { throw null; } set { } } + public Azure.Media.VideoAnalyzer.Edge.Models.VideoPublishingOptions VideoPublishingOptions { get { throw null; } set { } } } } diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/samples/LiveVideoAnalyzerSample.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/samples/LiveVideoAnalyzerSample.cs index dac516ab2610..b4119902c345 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/samples/LiveVideoAnalyzerSample.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/samples/LiveVideoAnalyzerSample.cs @@ -18,25 +18,28 @@ namespace Azure.Media.VideoAnalyzer.Edge.Samples { public class LiveVideoAnalyzerSample { - private ServiceClient serviceClient; - private String deviceId = "lva-sample-device"; - private String moduleId = "mediaEdge"; + private ServiceClient _serviceClient; + private RegistryManager _registryManager; + private string _deviceId = System.Environment.GetEnvironmentVariable("iothub_deviceid", EnvironmentVariableTarget.User); + private string _moduleId = System.Environment.GetEnvironmentVariable("iothub_moduleid", EnvironmentVariableTarget.User); public LiveVideoAnalyzerSample() { #region Snippet:Azure_VideoAnalyzerSamples_ConnectionString - String connectionString = "connectionString"; - ServiceClient serviceClient = ServiceClient.CreateFromConnectionString(connectionString); + string connectionString = System.Environment.GetEnvironmentVariable("iothub_connectionstring", EnvironmentVariableTarget.User); + var serviceClient = ServiceClient.CreateFromConnectionString(connectionString); #endregion Snippet:Azure_VideoAnalyzerSamples_ConnectionString - this.serviceClient = serviceClient; + + _serviceClient = serviceClient; + _registryManager = RegistryManager.CreateFromConnectionString(connectionString); } [Test] - public async Task SendPipelineRequests() + public async Task SendRequests() { try { - // create a pipeline topology and live pipeline + //create a pipeline topology and live pipeline var pipelineTopology = BuildPipelineTopology(); var livePipeline = BuildLivePipeline(pipelineTopology.Name); @@ -47,7 +50,7 @@ public async Task SendPipelineRequests() var directMethod = new CloudToDeviceMethod(setPipelineTopRequest.MethodName); directMethod.SetPayloadJson(setPipelineTopRequest.GetPayloadAsJson()); - var setPipelineTopResponse = await serviceClient.InvokeDeviceMethodAsync(deviceId, moduleId, directMethod); + var setPipelineTopResponse = await _serviceClient.InvokeDeviceMethodAsync(_deviceId, _moduleId, directMethod); #endregion Snippet:Azure_VideoAnalyzerSamples_InvokeDirectMethod // get a topology using helper function @@ -79,6 +82,33 @@ public async Task SendPipelineRequests() //delete live pipeline var deletePipelineTopology = await InvokeDirectMethodHelper(new PipelineTopologyDeleteRequest(pipelineTopology.Name)); + + //get an onvif device + var onvifDeviceGetRequest = await InvokeDirectMethodHelper(new OnvifDeviceGetRequest(new UnsecuredEndpoint("rtsp://camerasimulator:8554"))); + var onvifDeviceGetResponse = OnvifDevice.Deserialize(onvifDeviceGetRequest.GetPayloadAsJson()); + + //get all onvif devices on the network + var onvifDiscoverRequest = await InvokeDirectMethodHelper(new OnvifDeviceDiscoverRequest()); + var onvifDiscoverResponse = DiscoveredOnvifDeviceCollection.Deserialize(onvifDiscoverRequest.GetPayloadAsJson()); + + // create a remote device adapter and send a set request for it + var iotDeviceName = "iotDeviceSample"; + var remoteDeviceName = "remoteDeviceSample"; + var iotDevice = await GetOrAddIoTDeviceAsync(iotDeviceName); + var remoteDeviceAdapter = CreateRemoteDeviceAdapter(remoteDeviceName, iotDeviceName, iotDevice.Authentication.SymmetricKey.PrimaryKey); + var remoteDeviceAdapterSetRequest = await InvokeDirectMethodHelper(new RemoteDeviceAdapterSetRequest(remoteDeviceAdapter)); + var remoteDeviceAdapterSetResponse = RemoteDeviceAdapter.Deserialize(remoteDeviceAdapterSetRequest.GetPayloadAsJson()); + + //get a remote device adapter + var remoteDeviceAdapterGetRequest = await InvokeDirectMethodHelper(new RemoteDeviceAdapterGetRequest(remoteDeviceName)); + var remoteDeviceAdapterGetResponse = RemoteDeviceAdapter.Deserialize(remoteDeviceAdapterGetRequest.GetPayloadAsJson()); + + //list all remote device adapters + var remoteDeviceAdapterListRequest = await InvokeDirectMethodHelper(new RemoteDeviceAdapterListRequest()); + var remoteDeviceAdapterListResponse = RemoteDeviceAdapterCollection.Deserialize(remoteDeviceAdapterListRequest.GetPayloadAsJson()); + + //delete a remote device adapater + var remoteDeviceAdapterDeleteRequest = await InvokeDirectMethodHelper(new RemoteDeviceAdapterDeleteRequest(remoteDeviceName)); } catch (Exception ex) { @@ -92,7 +122,36 @@ private async Task InvokeDirectMethodHelper(MethodReq var directMethod = new CloudToDeviceMethod(bc.MethodName); directMethod.SetPayloadJson(bc.GetPayloadAsJson()); - return await serviceClient.InvokeDeviceMethodAsync(deviceId, moduleId, directMethod); + return await _serviceClient.InvokeDeviceMethodAsync(_deviceId, _moduleId, directMethod); + } + + protected async Task GetOrAddIoTDeviceAsync(string iotDeviceName) + { + var iotDevice = await _registryManager.GetDeviceAsync(iotDeviceName); + if (iotDevice == null) + { + iotDevice = await _registryManager.AddDeviceAsync(new Device(iotDeviceName)); + } + + return iotDevice; + } + + private RemoteDeviceAdapter CreateRemoteDeviceAdapter(string deviceProxyName, string iotDeviceName, string symmetricKey) + { + var targetHost = new Uri("rtsp://camerasimulator:8554").Host; + + return new RemoteDeviceAdapter(deviceProxyName) + { + Properties = new RemoteDeviceAdapterProperties( + new RemoteDeviceAdapterTarget(targetHost), + new IotHubDeviceConnection(iotDeviceName) + { + Credentials = new SymmetricKeyCredentials(symmetricKey), + }) + { + Description = "description", + }, + }; } private LivePipeline BuildLivePipeline(string topologyName) @@ -148,10 +207,6 @@ private void SetParameters(PipelineTopologyProperties pipelineTopologyProperties { Description = "rtsp Url" }); - pipelineTopologyProperties.Parameters.Add(new ParameterDeclaration("hubSinkOutputName", ParameterType.String) - { - Description = "hub sink output" - }); #endregion Snippet:Azure_VideoAnalyzerSamples_SetParameters } @@ -175,7 +230,7 @@ private void SetSinks(PipelineTopologyProperties pipelineTopologyProps) { new NodeInput("rtspSource") }; - pipelineTopologyProps.Sinks.Add(new IotHubMessageSink("msgSink", nodeInput, "${hubSinkOutputName}")); + pipelineTopologyProps.Sinks.Add(new VideoSink("videoSink", nodeInput, "video", "/var/lib/videoanalyzer/tmp/", "1024")); #endregion Snippet:Azure_VideoAnalyzerSamples_SetSourcesSinks2 } } diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Azure.Media.VideoAnalyzer.Edge.csproj b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Azure.Media.VideoAnalyzer.Edge.csproj index 0e601a43a33e..cb34ac5cb7b6 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Azure.Media.VideoAnalyzer.Edge.csproj +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Azure.Media.VideoAnalyzer.Edge.csproj @@ -10,7 +10,7 @@ Azure Video Analyzer Edge SDK - 1.0.0-beta.5 + 1.1.0-beta.1 Azure Video Analyzer Edge $(RequiredTargetFrameworks) diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/CredentialsBase.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/CredentialsBase.Serialization.cs index 30728f6ea62a..1ab7885df94f 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/CredentialsBase.Serialization.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/CredentialsBase.Serialization.cs @@ -27,6 +27,7 @@ internal static CredentialsBase DeserializeCredentialsBase(JsonElement element) switch (discriminator.GetString()) { case "#Microsoft.VideoAnalyzer.HttpHeaderCredentials": return HttpHeaderCredentials.DeserializeHttpHeaderCredentials(element); + case "#Microsoft.VideoAnalyzer.SymmetricKeyCredentials": return SymmetricKeyCredentials.DeserializeSymmetricKeyCredentials(element); case "#Microsoft.VideoAnalyzer.UsernamePasswordCredentials": return UsernamePasswordCredentials.DeserializeUsernamePasswordCredentials(element); } } diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/DiscoveredOnvifDevice.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/DiscoveredOnvifDevice.Serialization.cs new file mode 100644 index 000000000000..5c0ea25b9e7f --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/DiscoveredOnvifDevice.Serialization.cs @@ -0,0 +1,104 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class DiscoveredOnvifDevice : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(ServiceIdentifier)) + { + writer.WritePropertyName("serviceIdentifier"); + writer.WriteStringValue(ServiceIdentifier); + } + if (Optional.IsDefined(RemoteIPAddress)) + { + writer.WritePropertyName("remoteIPAddress"); + writer.WriteStringValue(RemoteIPAddress); + } + if (Optional.IsCollectionDefined(Scopes)) + { + writer.WritePropertyName("scopes"); + writer.WriteStartArray(); + foreach (var item in Scopes) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Endpoints)) + { + writer.WritePropertyName("endpoints"); + writer.WriteStartArray(); + foreach (var item in Endpoints) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + writer.WriteEndObject(); + } + + internal static DiscoveredOnvifDevice DeserializeDiscoveredOnvifDevice(JsonElement element) + { + Optional serviceIdentifier = default; + Optional remoteIPAddress = default; + Optional> scopes = default; + Optional> endpoints = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("serviceIdentifier")) + { + serviceIdentifier = property.Value.GetString(); + continue; + } + if (property.NameEquals("remoteIPAddress")) + { + remoteIPAddress = property.Value.GetString(); + continue; + } + if (property.NameEquals("scopes")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + scopes = array; + continue; + } + if (property.NameEquals("endpoints")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + endpoints = array; + continue; + } + } + return new DiscoveredOnvifDevice(serviceIdentifier.Value, remoteIPAddress.Value, Optional.ToList(scopes), Optional.ToList(endpoints)); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/DiscoveredOnvifDevice.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/DiscoveredOnvifDevice.cs new file mode 100644 index 000000000000..d7060cb154d2 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/DiscoveredOnvifDevice.cs @@ -0,0 +1,45 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Collections.Generic; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// The discovered properties of the ONVIF device that are returned during the discovery. + public partial class DiscoveredOnvifDevice + { + /// Initializes a new instance of DiscoveredOnvifDevice. + public DiscoveredOnvifDevice() + { + Scopes = new ChangeTrackingList(); + Endpoints = new ChangeTrackingList(); + } + + /// Initializes a new instance of DiscoveredOnvifDevice. + /// The unique identifier of the ONVIF device that was discovered in the same subnet as the IoT Edge device. + /// The IP address of the ONVIF device that was discovered in the same subnet as the IoT Edge device. + /// An array of hostnames for the ONVIF discovered devices that are in the same subnet as the IoT Edge device. + /// An array of media profile endpoints that the ONVIF discovered device supports. + internal DiscoveredOnvifDevice(string serviceIdentifier, string remoteIPAddress, IList scopes, IList endpoints) + { + ServiceIdentifier = serviceIdentifier; + RemoteIPAddress = remoteIPAddress; + Scopes = scopes; + Endpoints = endpoints; + } + + /// The unique identifier of the ONVIF device that was discovered in the same subnet as the IoT Edge device. + public string ServiceIdentifier { get; set; } + /// The IP address of the ONVIF device that was discovered in the same subnet as the IoT Edge device. + public string RemoteIPAddress { get; set; } + /// An array of hostnames for the ONVIF discovered devices that are in the same subnet as the IoT Edge device. + public IList Scopes { get; } + /// An array of media profile endpoints that the ONVIF discovered device supports. + public IList Endpoints { get; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/DiscoveredOnvifDeviceCollection.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/DiscoveredOnvifDeviceCollection.Serialization.cs new file mode 100644 index 000000000000..02ca352014a4 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/DiscoveredOnvifDeviceCollection.Serialization.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class DiscoveredOnvifDeviceCollection : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsCollectionDefined(Value)) + { + writer.WritePropertyName("value"); + writer.WriteStartArray(); + foreach (var item in Value) + { + writer.WriteObjectValue(item); + } + writer.WriteEndArray(); + } + writer.WriteEndObject(); + } + + internal static DiscoveredOnvifDeviceCollection DeserializeDiscoveredOnvifDeviceCollection(JsonElement element) + { + Optional> value = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("value")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(DiscoveredOnvifDevice.DeserializeDiscoveredOnvifDevice(item)); + } + value = array; + continue; + } + } + return new DiscoveredOnvifDeviceCollection(Optional.ToList(value)); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/DiscoveredOnvifDeviceCollection.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/DiscoveredOnvifDeviceCollection.cs new file mode 100644 index 000000000000..aef2b9fd7f01 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/DiscoveredOnvifDeviceCollection.cs @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Collections.Generic; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// A list of ONVIF devices that were discovered in the same subnet as the IoT Edge device. + public partial class DiscoveredOnvifDeviceCollection + { + /// Initializes a new instance of DiscoveredOnvifDeviceCollection. + public DiscoveredOnvifDeviceCollection() + { + Value = new ChangeTrackingList(); + } + + /// Initializes a new instance of DiscoveredOnvifDeviceCollection. + /// An array of ONVIF devices that have been discovered in the same subnet as the IoT Edge device. + internal DiscoveredOnvifDeviceCollection(IList value) + { + Value = value; + } + + /// An array of ONVIF devices that have been discovered in the same subnet as the IoT Edge device. + public IList Value { get; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/H264Configuration.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/H264Configuration.Serialization.cs new file mode 100644 index 000000000000..8d96caae5ff1 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/H264Configuration.Serialization.cs @@ -0,0 +1,61 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class H264Configuration : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(GovLength)) + { + writer.WritePropertyName("govLength"); + writer.WriteNumberValue(GovLength.Value); + } + if (Optional.IsDefined(Profile)) + { + writer.WritePropertyName("profile"); + writer.WriteStringValue(Profile.Value.ToString()); + } + writer.WriteEndObject(); + } + + internal static H264Configuration DeserializeH264Configuration(JsonElement element) + { + Optional govLength = default; + Optional profile = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("govLength")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + govLength = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("profile")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + profile = new H264Profile(property.Value.GetString()); + continue; + } + } + return new H264Configuration(Optional.ToNullable(govLength), Optional.ToNullable(profile)); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/H264Configuration.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/H264Configuration.cs new file mode 100644 index 000000000000..8c65d20eaea7 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/H264Configuration.cs @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// Class representing the H264 Configuration. + public partial class H264Configuration + { + /// Initializes a new instance of H264Configuration. + public H264Configuration() + { + } + + /// Initializes a new instance of H264Configuration. + /// Group of Video frames length. + /// The H264 Profile. + internal H264Configuration(float? govLength, H264Profile? profile) + { + GovLength = govLength; + Profile = profile; + } + + /// Group of Video frames length. + public float? GovLength { get; set; } + /// The H264 Profile. + public H264Profile? Profile { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/H264Profile.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/H264Profile.cs new file mode 100644 index 000000000000..b5b93130e8f5 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/H264Profile.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// The H264 Profile. + public readonly partial struct H264Profile : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public H264Profile(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string BaselineValue = "Baseline"; + private const string MainValue = "Main"; + private const string ExtendedValue = "Extended"; + private const string HighValue = "High"; + + /// Baseline. + public static H264Profile Baseline { get; } = new H264Profile(BaselineValue); + /// Main. + public static H264Profile Main { get; } = new H264Profile(MainValue); + /// Extended. + public static H264Profile Extended { get; } = new H264Profile(ExtendedValue); + /// High. + public static H264Profile High { get; } = new H264Profile(HighValue); + /// Determines if two values are the same. + public static bool operator ==(H264Profile left, H264Profile right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(H264Profile left, H264Profile right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator H264Profile(string value) => new H264Profile(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is H264Profile other && Equals(other); + /// + public bool Equals(H264Profile other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/IotHubDeviceConnection.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/IotHubDeviceConnection.Serialization.cs new file mode 100644 index 000000000000..5ce0f0b110f6 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/IotHubDeviceConnection.Serialization.cs @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class IotHubDeviceConnection : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + writer.WritePropertyName("deviceId"); + writer.WriteStringValue(DeviceId); + if (Optional.IsDefined(Credentials)) + { + writer.WritePropertyName("credentials"); + writer.WriteObjectValue(Credentials); + } + writer.WriteEndObject(); + } + + internal static IotHubDeviceConnection DeserializeIotHubDeviceConnection(JsonElement element) + { + string deviceId = default; + Optional credentials = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("deviceId")) + { + deviceId = property.Value.GetString(); + continue; + } + if (property.NameEquals("credentials")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + credentials = CredentialsBase.DeserializeCredentialsBase(property.Value); + continue; + } + } + return new IotHubDeviceConnection(deviceId, credentials.Value); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/IotHubDeviceConnection.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/IotHubDeviceConnection.cs new file mode 100644 index 000000000000..cfe3019c245b --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/IotHubDeviceConnection.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// Information that enables communication between the IoT Hub and the IoT device - allowing this edge module to act as a transparent gateway between the two. + public partial class IotHubDeviceConnection + { + /// Initializes a new instance of IotHubDeviceConnection. + /// The name of the IoT device configured and managed in IoT Hub. (case-sensitive). + /// is null. + public IotHubDeviceConnection(string deviceId) + { + if (deviceId == null) + { + throw new ArgumentNullException(nameof(deviceId)); + } + + DeviceId = deviceId; + } + + /// Initializes a new instance of IotHubDeviceConnection. + /// The name of the IoT device configured and managed in IoT Hub. (case-sensitive). + /// IoT device connection credentials. Currently IoT device symmetric key credentials are supported. + internal IotHubDeviceConnection(string deviceId, CredentialsBase credentials) + { + DeviceId = deviceId; + Credentials = credentials; + } + + /// The name of the IoT device configured and managed in IoT Hub. (case-sensitive). + public string DeviceId { get; set; } + /// IoT device connection credentials. Currently IoT device symmetric key credentials are supported. + public CredentialsBase Credentials { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/LivePipelineSetRequestBody.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/LivePipelineSetRequestBody.cs index 92d02c71b1fc..2ca98cc2b676 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/LivePipelineSetRequestBody.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/LivePipelineSetRequestBody.cs @@ -9,7 +9,7 @@ namespace Azure.Media.VideoAnalyzer.Edge.Models { - /// Live pipeline resource representation. + /// Live Pipeline represents an unique instance of a pipeline topology which is used for real-time content ingestion and analysis. internal partial class LivePipelineSetRequestBody : MethodRequest { /// Initializes a new instance of LivePipelineSetRequestBody. diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MediaProfile.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MediaProfile.Serialization.cs new file mode 100644 index 000000000000..89f0e4208ec0 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MediaProfile.Serialization.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class MediaProfile : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(Name)) + { + writer.WritePropertyName("name"); + writer.WriteStringValue(Name); + } + if (Optional.IsDefined(MediaUri)) + { + writer.WritePropertyName("mediaUri"); + writer.WriteObjectValue(MediaUri); + } + if (Optional.IsDefined(VideoEncoderConfiguration)) + { + writer.WritePropertyName("videoEncoderConfiguration"); + writer.WriteObjectValue(VideoEncoderConfiguration); + } + writer.WriteEndObject(); + } + + internal static MediaProfile DeserializeMediaProfile(JsonElement element) + { + Optional name = default; + Optional mediaUri = default; + Optional videoEncoderConfiguration = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name")) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("mediaUri")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + mediaUri = property.Value.GetObject(); + continue; + } + if (property.NameEquals("videoEncoderConfiguration")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + videoEncoderConfiguration = VideoEncoderConfiguration.DeserializeVideoEncoderConfiguration(property.Value); + continue; + } + } + return new MediaProfile(name.Value, mediaUri.Value, videoEncoderConfiguration.Value); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MediaProfile.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MediaProfile.cs new file mode 100644 index 000000000000..d9e144770e9d --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MediaProfile.cs @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// Class representing the ONVIF MediaProfiles. + public partial class MediaProfile + { + /// Initializes a new instance of MediaProfile. + public MediaProfile() + { + } + + /// Initializes a new instance of MediaProfile. + /// The name of the Media Profile. + /// Object representing the URI that will be used to request for media streaming. + /// The Video encoder configuration. + internal MediaProfile(string name, object mediaUri, VideoEncoderConfiguration videoEncoderConfiguration) + { + Name = name; + MediaUri = mediaUri; + VideoEncoderConfiguration = videoEncoderConfiguration; + } + + /// The name of the Media Profile. + public string Name { get; set; } + /// Object representing the URI that will be used to request for media streaming. + public object MediaUri { get; set; } + /// The Video encoder configuration. + public VideoEncoderConfiguration VideoEncoderConfiguration { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MediaUri.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MediaUri.Serialization.cs new file mode 100644 index 000000000000..19845d5d7696 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MediaUri.Serialization.cs @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class MediaUri : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(Uri)) + { + writer.WritePropertyName("uri"); + writer.WriteStringValue(Uri); + } + writer.WriteEndObject(); + } + + internal static MediaUri DeserializeMediaUri(JsonElement element) + { + Optional uri = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("uri")) + { + uri = property.Value.GetString(); + continue; + } + } + return new MediaUri(uri.Value); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MediaUri.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MediaUri.cs new file mode 100644 index 000000000000..8db64b5dc8a2 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MediaUri.cs @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// Object representing the URI that will be used to request for media streaming. + public partial class MediaUri + { + /// Initializes a new instance of MediaUri. + public MediaUri() + { + } + + /// Initializes a new instance of MediaUri. + /// URI that can be used for media streaming. + internal MediaUri(string uri) + { + Uri = uri; + } + + /// URI that can be used for media streaming. + public string Uri { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MethodRequest.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MethodRequest.Serialization.cs index 406e34822604..4ceee02a3f3c 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MethodRequest.Serialization.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MethodRequest.Serialization.cs @@ -31,16 +31,23 @@ internal static MethodRequest DeserializeMethodRequest(JsonElement element) { case "MethodRequestEmptyBodyBase": return MethodRequestEmptyBodyBase.DeserializeMethodRequestEmptyBodyBase(element); case "PipelineTopologySetRequestBody": return PipelineTopologySetRequestBody.DeserializePipelineTopologySetRequestBody(element); + case "RemoteDeviceAdapterSetRequestBody": return RemoteDeviceAdapterSetRequestBody.DeserializeRemoteDeviceAdapterSetRequestBody(element); case "livePipelineActivate": return LivePipelineActivateRequest.DeserializeLivePipelineActivateRequest(element); case "livePipelineDeactivate": return LivePipelineDeactivateRequest.DeserializeLivePipelineDeactivateRequest(element); case "livePipelineDelete": return LivePipelineDeleteRequest.DeserializeLivePipelineDeleteRequest(element); case "livePipelineGet": return LivePipelineGetRequest.DeserializeLivePipelineGetRequest(element); case "livePipelineList": return LivePipelineListRequest.DeserializeLivePipelineListRequest(element); case "livePipelineSet": return LivePipelineSetRequest.DeserializeLivePipelineSetRequest(element); + case "onvifDeviceDiscover": return OnvifDeviceDiscoverRequest.DeserializeOnvifDeviceDiscoverRequest(element); + case "onvifDeviceGet": return OnvifDeviceGetRequest.DeserializeOnvifDeviceGetRequest(element); case "pipelineTopologyDelete": return PipelineTopologyDeleteRequest.DeserializePipelineTopologyDeleteRequest(element); case "pipelineTopologyGet": return PipelineTopologyGetRequest.DeserializePipelineTopologyGetRequest(element); case "pipelineTopologyList": return PipelineTopologyListRequest.DeserializePipelineTopologyListRequest(element); case "pipelineTopologySet": return PipelineTopologySetRequest.DeserializePipelineTopologySetRequest(element); + case "remoteDeviceAdapterDelete": return RemoteDeviceAdapterDeleteRequest.DeserializeRemoteDeviceAdapterDeleteRequest(element); + case "remoteDeviceAdapterGet": return RemoteDeviceAdapterGetRequest.DeserializeRemoteDeviceAdapterGetRequest(element); + case "remoteDeviceAdapterList": return RemoteDeviceAdapterListRequest.DeserializeRemoteDeviceAdapterListRequest(element); + case "remoteDeviceAdapterSet": return RemoteDeviceAdapterSetRequest.DeserializeRemoteDeviceAdapterSetRequest(element); case "LivePipelineSetRequestBody": return LivePipelineSetRequestBody.DeserializeLivePipelineSetRequestBody(element); } } diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MethodRequest.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MethodRequest.cs index 0444b097bbd4..945b4a79cec5 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MethodRequest.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MethodRequest.cs @@ -13,7 +13,7 @@ public partial class MethodRequest /// Initializes a new instance of MethodRequest. public MethodRequest() { - ApiVersion = "1.0"; + ApiVersion = "1.1"; } /// Initializes a new instance of MethodRequest. diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MethodRequestEmptyBodyBase.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MethodRequestEmptyBodyBase.Serialization.cs index 9a4968003261..fe837eef50d8 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MethodRequestEmptyBodyBase.Serialization.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/MethodRequestEmptyBodyBase.Serialization.cs @@ -37,6 +37,8 @@ internal static MethodRequestEmptyBodyBase DeserializeMethodRequestEmptyBodyBase case "livePipelineGet": return LivePipelineGetRequest.DeserializeLivePipelineGetRequest(element); case "pipelineTopologyDelete": return PipelineTopologyDeleteRequest.DeserializePipelineTopologyDeleteRequest(element); case "pipelineTopologyGet": return PipelineTopologyGetRequest.DeserializePipelineTopologyGetRequest(element); + case "remoteDeviceAdapterDelete": return RemoteDeviceAdapterDeleteRequest.DeserializeRemoteDeviceAdapterDeleteRequest(element); + case "remoteDeviceAdapterGet": return RemoteDeviceAdapterGetRequest.DeserializeRemoteDeviceAdapterGetRequest(element); } } string name = default; diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/Mpeg4Configuration.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/Mpeg4Configuration.Serialization.cs new file mode 100644 index 000000000000..415e23fff050 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/Mpeg4Configuration.Serialization.cs @@ -0,0 +1,61 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class Mpeg4Configuration : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(GovLength)) + { + writer.WritePropertyName("govLength"); + writer.WriteNumberValue(GovLength.Value); + } + if (Optional.IsDefined(Profile)) + { + writer.WritePropertyName("profile"); + writer.WriteStringValue(Profile.Value.ToString()); + } + writer.WriteEndObject(); + } + + internal static Mpeg4Configuration DeserializeMpeg4Configuration(JsonElement element) + { + Optional govLength = default; + Optional profile = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("govLength")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + govLength = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("profile")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + profile = new Mpeg4Profile(property.Value.GetString()); + continue; + } + } + return new Mpeg4Configuration(Optional.ToNullable(govLength), Optional.ToNullable(profile)); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/Mpeg4Configuration.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/Mpeg4Configuration.cs new file mode 100644 index 000000000000..74365ee4882a --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/Mpeg4Configuration.cs @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// Class representing the MPEG4 Configuration. + public partial class Mpeg4Configuration + { + /// Initializes a new instance of Mpeg4Configuration. + public Mpeg4Configuration() + { + } + + /// Initializes a new instance of Mpeg4Configuration. + /// Group of Video frames length. + /// The MPEG4 Profile. + internal Mpeg4Configuration(float? govLength, Mpeg4Profile? profile) + { + GovLength = govLength; + Profile = profile; + } + + /// Group of Video frames length. + public float? GovLength { get; set; } + /// The MPEG4 Profile. + public Mpeg4Profile? Profile { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/Mpeg4Profile.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/Mpeg4Profile.cs new file mode 100644 index 000000000000..4b3c525d3fce --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/Mpeg4Profile.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// The MPEG4 Profile. + public readonly partial struct Mpeg4Profile : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public Mpeg4Profile(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string SPValue = "SP"; + private const string ASPValue = "ASP"; + + /// Simple Profile. + public static Mpeg4Profile SP { get; } = new Mpeg4Profile(SPValue); + /// Advanced Simple Profile. + public static Mpeg4Profile ASP { get; } = new Mpeg4Profile(ASPValue); + /// Determines if two values are the same. + public static bool operator ==(Mpeg4Profile left, Mpeg4Profile right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(Mpeg4Profile left, Mpeg4Profile right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator Mpeg4Profile(string value) => new Mpeg4Profile(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is Mpeg4Profile other && Equals(other); + /// + public bool Equals(Mpeg4Profile other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDevice.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDevice.Serialization.cs new file mode 100644 index 000000000000..74f9c9228426 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDevice.Serialization.cs @@ -0,0 +1,104 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class OnvifDevice : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(Hostname)) + { + writer.WritePropertyName("hostname"); + writer.WriteObjectValue(Hostname); + } + if (Optional.IsDefined(SystemDateTime)) + { + writer.WritePropertyName("systemDateTime"); + writer.WriteObjectValue(SystemDateTime); + } + if (Optional.IsDefined(Dns)) + { + writer.WritePropertyName("dns"); + writer.WriteObjectValue(Dns); + } + if (Optional.IsCollectionDefined(MediaProfiles)) + { + writer.WritePropertyName("mediaProfiles"); + writer.WriteStartArray(); + foreach (var item in MediaProfiles) + { + writer.WriteObjectValue(item); + } + writer.WriteEndArray(); + } + writer.WriteEndObject(); + } + + internal static OnvifDevice DeserializeOnvifDevice(JsonElement element) + { + Optional hostname = default; + Optional systemDateTime = default; + Optional dns = default; + Optional> mediaProfiles = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("hostname")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + hostname = OnvifHostName.DeserializeOnvifHostName(property.Value); + continue; + } + if (property.NameEquals("systemDateTime")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + systemDateTime = OnvifSystemDateTime.DeserializeOnvifSystemDateTime(property.Value); + continue; + } + if (property.NameEquals("dns")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + dns = OnvifDns.DeserializeOnvifDns(property.Value); + continue; + } + if (property.NameEquals("mediaProfiles")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(MediaProfile.DeserializeMediaProfile(item)); + } + mediaProfiles = array; + continue; + } + } + return new OnvifDevice(hostname.Value, systemDateTime.Value, dns.Value, Optional.ToList(mediaProfiles)); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDevice.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDevice.cs new file mode 100644 index 000000000000..c4e187c8ee69 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDevice.cs @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Collections.Generic; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// The ONVIF device properties. + public partial class OnvifDevice + { + /// Initializes a new instance of OnvifDevice. + public OnvifDevice() + { + MediaProfiles = new ChangeTrackingList(); + } + + /// Initializes a new instance of OnvifDevice. + /// The hostname of the ONVIF device. + /// The system date and time of the ONVIF device. + /// The ONVIF device DNS properties. + /// An array of of ONVIF media profiles supported by the ONVIF device. + internal OnvifDevice(OnvifHostName hostname, OnvifSystemDateTime systemDateTime, OnvifDns dns, IList mediaProfiles) + { + Hostname = hostname; + SystemDateTime = systemDateTime; + Dns = dns; + MediaProfiles = mediaProfiles; + } + + /// The hostname of the ONVIF device. + public OnvifHostName Hostname { get; set; } + /// The system date and time of the ONVIF device. + public OnvifSystemDateTime SystemDateTime { get; set; } + /// The ONVIF device DNS properties. + public OnvifDns Dns { get; set; } + /// An array of of ONVIF media profiles supported by the ONVIF device. + public IList MediaProfiles { get; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDeviceDiscoverRequest.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDeviceDiscoverRequest.Serialization.cs new file mode 100644 index 000000000000..fd81591a240f --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDeviceDiscoverRequest.Serialization.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class OnvifDeviceDiscoverRequest : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(DiscoveryDuration)) + { + writer.WritePropertyName("discoveryDuration"); + writer.WriteStringValue(DiscoveryDuration); + } + if (Optional.IsDefined(ApiVersion)) + { + writer.WritePropertyName("@apiVersion"); + writer.WriteStringValue(ApiVersion); + } + writer.WriteEndObject(); + } + + internal static OnvifDeviceDiscoverRequest DeserializeOnvifDeviceDiscoverRequest(JsonElement element) + { + Optional discoveryDuration = default; + string methodName = default; + Optional apiVersion = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("discoveryDuration")) + { + discoveryDuration = property.Value.GetString(); + continue; + } + if (property.NameEquals("methodName")) + { + methodName = property.Value.GetString(); + continue; + } + if (property.NameEquals("@apiVersion")) + { + apiVersion = property.Value.GetString(); + continue; + } + } + return new OnvifDeviceDiscoverRequest(methodName, apiVersion.Value, discoveryDuration.Value); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDeviceDiscoverRequest.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDeviceDiscoverRequest.cs new file mode 100644 index 000000000000..be4a6e559ab2 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDeviceDiscoverRequest.cs @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// Lists all the discoverable ONVIF devices on the same subnet as the Edge Module. + public partial class OnvifDeviceDiscoverRequest : MethodRequest + { + /// Initializes a new instance of OnvifDeviceDiscoverRequest. + public OnvifDeviceDiscoverRequest() + { + MethodName = "onvifDeviceDiscover"; + } + + /// Initializes a new instance of OnvifDeviceDiscoverRequest. + /// Direct method method name. + /// Video Analyzer API version. + /// The amount of time that the ONVIF device discovery will wait for supported device responses. + internal OnvifDeviceDiscoverRequest(string methodName, string apiVersion, string discoveryDuration) : base(methodName, apiVersion) + { + DiscoveryDuration = discoveryDuration; + MethodName = methodName ?? "onvifDeviceDiscover"; + } + + /// The amount of time that the ONVIF device discovery will wait for supported device responses. + public string DiscoveryDuration { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDeviceGetRequest.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDeviceGetRequest.Serialization.cs new file mode 100644 index 000000000000..65ba44275567 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDeviceGetRequest.Serialization.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class OnvifDeviceGetRequest : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + writer.WritePropertyName("endpoint"); + writer.WriteObjectValue(Endpoint); + if (Optional.IsDefined(ApiVersion)) + { + writer.WritePropertyName("@apiVersion"); + writer.WriteStringValue(ApiVersion); + } + writer.WriteEndObject(); + } + + internal static OnvifDeviceGetRequest DeserializeOnvifDeviceGetRequest(JsonElement element) + { + EndpointBase endpoint = default; + string methodName = default; + Optional apiVersion = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("endpoint")) + { + endpoint = EndpointBase.DeserializeEndpointBase(property.Value); + continue; + } + if (property.NameEquals("methodName")) + { + methodName = property.Value.GetString(); + continue; + } + if (property.NameEquals("@apiVersion")) + { + apiVersion = property.Value.GetString(); + continue; + } + } + return new OnvifDeviceGetRequest(methodName, apiVersion.Value, endpoint); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDeviceGetRequest.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDeviceGetRequest.cs new file mode 100644 index 000000000000..ad98e3138203 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDeviceGetRequest.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// Retrieves properties and media profiles of an ONVIF device. + public partial class OnvifDeviceGetRequest : MethodRequest + { + /// Initializes a new instance of OnvifDeviceGetRequest. + /// Base class for endpoints. + /// is null. + public OnvifDeviceGetRequest(EndpointBase endpoint) + { + if (endpoint == null) + { + throw new ArgumentNullException(nameof(endpoint)); + } + + Endpoint = endpoint; + MethodName = "onvifDeviceGet"; + } + + /// Initializes a new instance of OnvifDeviceGetRequest. + /// Direct method method name. + /// Video Analyzer API version. + /// Base class for endpoints. + internal OnvifDeviceGetRequest(string methodName, string apiVersion, EndpointBase endpoint) : base(methodName, apiVersion) + { + Endpoint = endpoint; + MethodName = methodName ?? "onvifDeviceGet"; + } + + /// Base class for endpoints. + public EndpointBase Endpoint { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDns.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDns.Serialization.cs new file mode 100644 index 000000000000..a9dae0a283d5 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDns.Serialization.cs @@ -0,0 +1,98 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class OnvifDns : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(FromDhcp)) + { + writer.WritePropertyName("fromDhcp"); + writer.WriteBooleanValue(FromDhcp.Value); + } + if (Optional.IsCollectionDefined(Ipv4Address)) + { + writer.WritePropertyName("ipv4Address"); + writer.WriteStartArray(); + foreach (var item in Ipv4Address) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (Optional.IsCollectionDefined(Ipv6Address)) + { + writer.WritePropertyName("ipv6Address"); + writer.WriteStartArray(); + foreach (var item in Ipv6Address) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + writer.WriteEndObject(); + } + + internal static OnvifDns DeserializeOnvifDns(JsonElement element) + { + Optional fromDhcp = default; + Optional> ipv4Address = default; + Optional> ipv6Address = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("fromDhcp")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + fromDhcp = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("ipv4Address")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + ipv4Address = array; + continue; + } + if (property.NameEquals("ipv6Address")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetString()); + } + ipv6Address = array; + continue; + } + } + return new OnvifDns(Optional.ToNullable(fromDhcp), Optional.ToList(ipv4Address), Optional.ToList(ipv6Address)); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDns.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDns.cs new file mode 100644 index 000000000000..414e4f30ade7 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifDns.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Collections.Generic; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// The ONVIF device DNS properties. + public partial class OnvifDns + { + /// Initializes a new instance of OnvifDns. + public OnvifDns() + { + Ipv4Address = new ChangeTrackingList(); + Ipv6Address = new ChangeTrackingList(); + } + + /// Initializes a new instance of OnvifDns. + /// Result value showing if the ONVIF device is configured to use DHCP. + /// An array of IPv4 address for the discovered ONVIF device. + /// An array of IPv6 address for the discovered ONVIF device. + internal OnvifDns(bool? fromDhcp, IList ipv4Address, IList ipv6Address) + { + FromDhcp = fromDhcp; + Ipv4Address = ipv4Address; + Ipv6Address = ipv6Address; + } + + /// Result value showing if the ONVIF device is configured to use DHCP. + public bool? FromDhcp { get; set; } + /// An array of IPv4 address for the discovered ONVIF device. + public IList Ipv4Address { get; } + /// An array of IPv6 address for the discovered ONVIF device. + public IList Ipv6Address { get; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifHostName.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifHostName.Serialization.cs new file mode 100644 index 000000000000..b36ea014744e --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifHostName.Serialization.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class OnvifHostName : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(FromDhcp)) + { + writer.WritePropertyName("fromDhcp"); + writer.WriteBooleanValue(FromDhcp.Value); + } + if (Optional.IsDefined(Hostname)) + { + writer.WritePropertyName("hostname"); + writer.WriteStringValue(Hostname); + } + writer.WriteEndObject(); + } + + internal static OnvifHostName DeserializeOnvifHostName(JsonElement element) + { + Optional fromDhcp = default; + Optional hostname = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("fromDhcp")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + fromDhcp = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("hostname")) + { + hostname = property.Value.GetString(); + continue; + } + } + return new OnvifHostName(Optional.ToNullable(fromDhcp), hostname.Value); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifHostName.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifHostName.cs new file mode 100644 index 000000000000..a2c719778eb2 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifHostName.cs @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// The ONVIF device DNS properties. + public partial class OnvifHostName + { + /// Initializes a new instance of OnvifHostName. + public OnvifHostName() + { + } + + /// Initializes a new instance of OnvifHostName. + /// Result value showing if the ONVIF device is configured to use DHCP. + /// The hostname of the ONVIF device. + internal OnvifHostName(bool? fromDhcp, string hostname) + { + FromDhcp = fromDhcp; + Hostname = hostname; + } + + /// Result value showing if the ONVIF device is configured to use DHCP. + public bool? FromDhcp { get; set; } + /// The hostname of the ONVIF device. + public string Hostname { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifSystemDateTime.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifSystemDateTime.Serialization.cs new file mode 100644 index 000000000000..1e6cd2e24486 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifSystemDateTime.Serialization.cs @@ -0,0 +1,67 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class OnvifSystemDateTime : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(Type)) + { + writer.WritePropertyName("type"); + writer.WriteStringValue(Type.Value.ToString()); + } + if (Optional.IsDefined(Time)) + { + writer.WritePropertyName("time"); + writer.WriteStringValue(Time); + } + if (Optional.IsDefined(TimeZone)) + { + writer.WritePropertyName("timeZone"); + writer.WriteStringValue(TimeZone); + } + writer.WriteEndObject(); + } + + internal static OnvifSystemDateTime DeserializeOnvifSystemDateTime(JsonElement element) + { + Optional type = default; + Optional time = default; + Optional timeZone = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("type")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + type = new OnvifSystemDateTimeType(property.Value.GetString()); + continue; + } + if (property.NameEquals("time")) + { + time = property.Value.GetString(); + continue; + } + if (property.NameEquals("timeZone")) + { + timeZone = property.Value.GetString(); + continue; + } + } + return new OnvifSystemDateTime(Optional.ToNullable(type), time.Value, timeZone.Value); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifSystemDateTime.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifSystemDateTime.cs new file mode 100644 index 000000000000..0e1da08e0ddf --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifSystemDateTime.cs @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// The ONVIF device DNS properties. + public partial class OnvifSystemDateTime + { + /// Initializes a new instance of OnvifSystemDateTime. + public OnvifSystemDateTime() + { + } + + /// Initializes a new instance of OnvifSystemDateTime. + /// An enum value determining whether the date time was configured using NTP or manual. + /// The device datetime returned when calling the request. + /// The timezone of the ONVIF device datetime. + internal OnvifSystemDateTime(OnvifSystemDateTimeType? type, string time, string timeZone) + { + Type = type; + Time = time; + TimeZone = timeZone; + } + + /// An enum value determining whether the date time was configured using NTP or manual. + public OnvifSystemDateTimeType? Type { get; set; } + /// The device datetime returned when calling the request. + public string Time { get; set; } + /// The timezone of the ONVIF device datetime. + public string TimeZone { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifSystemDateTimeType.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifSystemDateTimeType.cs new file mode 100644 index 000000000000..804323701245 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/OnvifSystemDateTimeType.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// An enum value determining whether the date time was configured using NTP or manual. + public readonly partial struct OnvifSystemDateTimeType : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public OnvifSystemDateTimeType(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string NtpValue = "Ntp"; + private const string ManualValue = "Manual"; + + /// Ntp. + public static OnvifSystemDateTimeType Ntp { get; } = new OnvifSystemDateTimeType(NtpValue); + /// Manual. + public static OnvifSystemDateTimeType Manual { get; } = new OnvifSystemDateTimeType(ManualValue); + /// Determines if two values are the same. + public static bool operator ==(OnvifSystemDateTimeType left, OnvifSystemDateTimeType right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(OnvifSystemDateTimeType left, OnvifSystemDateTimeType right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator OnvifSystemDateTimeType(string value) => new OnvifSystemDateTimeType(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is OnvifSystemDateTimeType other && Equals(other); + /// + public bool Equals(OnvifSystemDateTimeType other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/PipelineTopologySetRequestBody.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/PipelineTopologySetRequestBody.cs index 4d165a1dc4d5..f06dc0624d3a 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/PipelineTopologySetRequestBody.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/PipelineTopologySetRequestBody.cs @@ -9,7 +9,14 @@ namespace Azure.Media.VideoAnalyzer.Edge.Models { - /// Pipeline topology resource representation. + /// + /// Pipeline topology describes the processing steps to be applied when processing media for a particular outcome. The topology should be defined according to the scenario to be achieved and can be reused across many pipeline instances which share the same processing characteristics. For instance, a pipeline topology which acquires data from a RTSP camera, process it with an specific AI model and stored the data on the cloud can be reused across many different cameras, as long as the same processing should be applied across all the cameras. Individual instance properties can be defined through the use of user-defined parameters, which allow for a topology to be parameterized, thus allowing individual pipelines to refer to different values, such as individual cameras RTSP endpoints and credentials. Overall a topology is composed of the following: + /// + /// - Parameters: list of user defined parameters that can be references across the topology nodes. + /// - Sources: list of one or more data sources nodes such as an RTSP source which allows for media to be ingested from cameras. + /// - Processors: list of nodes which perform data analysis or transformations. + /// -Sinks: list of one or more data sinks which allow for data to be stored or exported to other destinations. + /// internal partial class PipelineTopologySetRequestBody : MethodRequest { /// Initializes a new instance of PipelineTopologySetRequestBody. diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RateControl.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RateControl.Serialization.cs new file mode 100644 index 000000000000..69812c44f640 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RateControl.Serialization.cs @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class RateControl : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(BitRateLimit)) + { + writer.WritePropertyName("bitRateLimit"); + writer.WriteNumberValue(BitRateLimit.Value); + } + if (Optional.IsDefined(EncodingInterval)) + { + writer.WritePropertyName("encodingInterval"); + writer.WriteNumberValue(EncodingInterval.Value); + } + if (Optional.IsDefined(FrameRateLimit)) + { + writer.WritePropertyName("frameRateLimit"); + writer.WriteNumberValue(FrameRateLimit.Value); + } + if (Optional.IsDefined(GuaranteedFrameRate)) + { + writer.WritePropertyName("guaranteedFrameRate"); + writer.WriteBooleanValue(GuaranteedFrameRate.Value); + } + writer.WriteEndObject(); + } + + internal static RateControl DeserializeRateControl(JsonElement element) + { + Optional bitRateLimit = default; + Optional encodingInterval = default; + Optional frameRateLimit = default; + Optional guaranteedFrameRate = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("bitRateLimit")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + bitRateLimit = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("encodingInterval")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + encodingInterval = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("frameRateLimit")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + frameRateLimit = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("guaranteedFrameRate")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + guaranteedFrameRate = property.Value.GetBoolean(); + continue; + } + } + return new RateControl(Optional.ToNullable(bitRateLimit), Optional.ToNullable(encodingInterval), Optional.ToNullable(frameRateLimit), Optional.ToNullable(guaranteedFrameRate)); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RateControl.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RateControl.cs new file mode 100644 index 000000000000..2e5c26b29a2a --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RateControl.cs @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// Class representing the video's rate control. + public partial class RateControl + { + /// Initializes a new instance of RateControl. + public RateControl() + { + } + + /// Initializes a new instance of RateControl. + /// the maximum output bitrate in kbps. + /// Interval at which images are encoded and transmitted. + /// Maximum output framerate in fps. + /// A value of true indicates that frame rate is a fixed value rather than an upper limit, and that the video encoder shall prioritize frame rate over all other adaptable configuration values such as bitrate. + internal RateControl(float? bitRateLimit, float? encodingInterval, float? frameRateLimit, bool? guaranteedFrameRate) + { + BitRateLimit = bitRateLimit; + EncodingInterval = encodingInterval; + FrameRateLimit = frameRateLimit; + GuaranteedFrameRate = guaranteedFrameRate; + } + + /// the maximum output bitrate in kbps. + public float? BitRateLimit { get; set; } + /// Interval at which images are encoded and transmitted. + public float? EncodingInterval { get; set; } + /// Maximum output framerate in fps. + public float? FrameRateLimit { get; set; } + /// A value of true indicates that frame rate is a fixed value rather than an upper limit, and that the video encoder shall prioritize frame rate over all other adaptable configuration values such as bitrate. + public bool? GuaranteedFrameRate { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapter.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapter.Serialization.cs new file mode 100644 index 000000000000..f29580601642 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapter.Serialization.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class RemoteDeviceAdapter : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + writer.WritePropertyName("name"); + writer.WriteStringValue(Name); + if (Optional.IsDefined(SystemData)) + { + writer.WritePropertyName("systemData"); + writer.WriteObjectValue(SystemData); + } + if (Optional.IsDefined(Properties)) + { + writer.WritePropertyName("properties"); + writer.WriteObjectValue(Properties); + } + writer.WriteEndObject(); + } + + internal static RemoteDeviceAdapter DeserializeRemoteDeviceAdapter(JsonElement element) + { + string name = default; + Optional systemData = default; + Optional properties = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name")) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("systemData")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + systemData = SystemData.DeserializeSystemData(property.Value); + continue; + } + if (property.NameEquals("properties")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + properties = RemoteDeviceAdapterProperties.DeserializeRemoteDeviceAdapterProperties(property.Value); + continue; + } + } + return new RemoteDeviceAdapter(name, systemData.Value, properties.Value); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapter.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapter.cs new file mode 100644 index 000000000000..2e805276d05b --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapter.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// The Video Analyzer edge module can act as a transparent gateway for video, enabling IoT devices to send video to the cloud from behind a firewall. A remote device adapter should be created for each such IoT device. Communication between the cloud and IoT device would then flow via the Video Analyzer edge module. + public partial class RemoteDeviceAdapter + { + /// Initializes a new instance of RemoteDeviceAdapter. + /// The unique identifier for the remote device adapter. + /// is null. + public RemoteDeviceAdapter(string name) + { + if (name == null) + { + throw new ArgumentNullException(nameof(name)); + } + + Name = name; + } + + /// Initializes a new instance of RemoteDeviceAdapter. + /// The unique identifier for the remote device adapter. + /// Read-only system metadata associated with this object. + /// Properties of the remote device adapter. + internal RemoteDeviceAdapter(string name, SystemData systemData, RemoteDeviceAdapterProperties properties) + { + Name = name; + SystemData = systemData; + Properties = properties; + } + + /// The unique identifier for the remote device adapter. + public string Name { get; set; } + /// Read-only system metadata associated with this object. + public SystemData SystemData { get; set; } + /// Properties of the remote device adapter. + public RemoteDeviceAdapterProperties Properties { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterCollection.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterCollection.Serialization.cs new file mode 100644 index 000000000000..451ed3535a0d --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterCollection.Serialization.cs @@ -0,0 +1,67 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class RemoteDeviceAdapterCollection : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsCollectionDefined(Value)) + { + writer.WritePropertyName("value"); + writer.WriteStartArray(); + foreach (var item in Value) + { + writer.WriteObjectValue(item); + } + writer.WriteEndArray(); + } + if (Optional.IsDefined(ContinuationToken)) + { + writer.WritePropertyName("@continuationToken"); + writer.WriteStringValue(ContinuationToken); + } + writer.WriteEndObject(); + } + + internal static RemoteDeviceAdapterCollection DeserializeRemoteDeviceAdapterCollection(JsonElement element) + { + Optional> value = default; + Optional continuationToken = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("value")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(RemoteDeviceAdapter.DeserializeRemoteDeviceAdapter(item)); + } + value = array; + continue; + } + if (property.NameEquals("@continuationToken")) + { + continuationToken = property.Value.GetString(); + continue; + } + } + return new RemoteDeviceAdapterCollection(Optional.ToList(value), continuationToken.Value); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterCollection.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterCollection.cs new file mode 100644 index 000000000000..6990de268852 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterCollection.cs @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Collections.Generic; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// A list of remote device adapters. + public partial class RemoteDeviceAdapterCollection + { + /// Initializes a new instance of RemoteDeviceAdapterCollection. + public RemoteDeviceAdapterCollection() + { + Value = new ChangeTrackingList(); + } + + /// Initializes a new instance of RemoteDeviceAdapterCollection. + /// An array of remote device adapters. + /// A continuation token to use in subsequent calls to enumerate through the remote device adapter collection. This is used when the collection contains too many results to return in one response. + internal RemoteDeviceAdapterCollection(IList value, string continuationToken) + { + Value = value; + ContinuationToken = continuationToken; + } + + /// An array of remote device adapters. + public IList Value { get; } + /// A continuation token to use in subsequent calls to enumerate through the remote device adapter collection. This is used when the collection contains too many results to return in one response. + public string ContinuationToken { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterDeleteRequest.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterDeleteRequest.Serialization.cs new file mode 100644 index 000000000000..e465de3021df --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterDeleteRequest.Serialization.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class RemoteDeviceAdapterDeleteRequest : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + writer.WritePropertyName("name"); + writer.WriteStringValue(Name); + if (Optional.IsDefined(ApiVersion)) + { + writer.WritePropertyName("@apiVersion"); + writer.WriteStringValue(ApiVersion); + } + writer.WriteEndObject(); + } + + internal static RemoteDeviceAdapterDeleteRequest DeserializeRemoteDeviceAdapterDeleteRequest(JsonElement element) + { + string name = default; + string methodName = default; + Optional apiVersion = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name")) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("methodName")) + { + methodName = property.Value.GetString(); + continue; + } + if (property.NameEquals("@apiVersion")) + { + apiVersion = property.Value.GetString(); + continue; + } + } + return new RemoteDeviceAdapterDeleteRequest(methodName, apiVersion.Value, name); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterDeleteRequest.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterDeleteRequest.cs new file mode 100644 index 000000000000..644a867e708a --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterDeleteRequest.cs @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// Deletes an existing remote device adapter. + public partial class RemoteDeviceAdapterDeleteRequest : MethodRequestEmptyBodyBase + { + /// Initializes a new instance of RemoteDeviceAdapterDeleteRequest. + /// Resource name. + /// is null. + public RemoteDeviceAdapterDeleteRequest(string name) : base(name) + { + if (name == null) + { + throw new ArgumentNullException(nameof(name)); + } + + MethodName = "remoteDeviceAdapterDelete"; + } + + /// Initializes a new instance of RemoteDeviceAdapterDeleteRequest. + /// Direct method method name. + /// Video Analyzer API version. + /// Resource name. + internal RemoteDeviceAdapterDeleteRequest(string methodName, string apiVersion, string name) : base(methodName, apiVersion, name) + { + MethodName = methodName ?? "remoteDeviceAdapterDelete"; + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterGetRequest.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterGetRequest.Serialization.cs new file mode 100644 index 000000000000..15d1b4769054 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterGetRequest.Serialization.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class RemoteDeviceAdapterGetRequest : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + writer.WritePropertyName("name"); + writer.WriteStringValue(Name); + if (Optional.IsDefined(ApiVersion)) + { + writer.WritePropertyName("@apiVersion"); + writer.WriteStringValue(ApiVersion); + } + writer.WriteEndObject(); + } + + internal static RemoteDeviceAdapterGetRequest DeserializeRemoteDeviceAdapterGetRequest(JsonElement element) + { + string name = default; + string methodName = default; + Optional apiVersion = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name")) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("methodName")) + { + methodName = property.Value.GetString(); + continue; + } + if (property.NameEquals("@apiVersion")) + { + apiVersion = property.Value.GetString(); + continue; + } + } + return new RemoteDeviceAdapterGetRequest(methodName, apiVersion.Value, name); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterGetRequest.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterGetRequest.cs new file mode 100644 index 000000000000..cbb424ceb869 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterGetRequest.cs @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// Retrieves an existing remote device adapter. + public partial class RemoteDeviceAdapterGetRequest : MethodRequestEmptyBodyBase + { + /// Initializes a new instance of RemoteDeviceAdapterGetRequest. + /// Resource name. + /// is null. + public RemoteDeviceAdapterGetRequest(string name) : base(name) + { + if (name == null) + { + throw new ArgumentNullException(nameof(name)); + } + + MethodName = "remoteDeviceAdapterGet"; + } + + /// Initializes a new instance of RemoteDeviceAdapterGetRequest. + /// Direct method method name. + /// Video Analyzer API version. + /// Resource name. + internal RemoteDeviceAdapterGetRequest(string methodName, string apiVersion, string name) : base(methodName, apiVersion, name) + { + MethodName = methodName ?? "remoteDeviceAdapterGet"; + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterListRequest.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterListRequest.Serialization.cs new file mode 100644 index 000000000000..451d33f1c2f9 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterListRequest.Serialization.cs @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class RemoteDeviceAdapterListRequest : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(ApiVersion)) + { + writer.WritePropertyName("@apiVersion"); + writer.WriteStringValue(ApiVersion); + } + writer.WriteEndObject(); + } + + internal static RemoteDeviceAdapterListRequest DeserializeRemoteDeviceAdapterListRequest(JsonElement element) + { + string methodName = default; + Optional apiVersion = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("methodName")) + { + methodName = property.Value.GetString(); + continue; + } + if (property.NameEquals("@apiVersion")) + { + apiVersion = property.Value.GetString(); + continue; + } + } + return new RemoteDeviceAdapterListRequest(methodName, apiVersion.Value); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterListRequest.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterListRequest.cs new file mode 100644 index 000000000000..cd22a021c810 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterListRequest.cs @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// List all existing remote device adapters. + public partial class RemoteDeviceAdapterListRequest : MethodRequest + { + /// Initializes a new instance of RemoteDeviceAdapterListRequest. + public RemoteDeviceAdapterListRequest() + { + MethodName = "remoteDeviceAdapterList"; + } + + /// Initializes a new instance of RemoteDeviceAdapterListRequest. + /// Direct method method name. + /// Video Analyzer API version. + internal RemoteDeviceAdapterListRequest(string methodName, string apiVersion) : base(methodName, apiVersion) + { + MethodName = methodName ?? "remoteDeviceAdapterList"; + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterProperties.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterProperties.Serialization.cs new file mode 100644 index 000000000000..6729b8f9d1c5 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterProperties.Serialization.cs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class RemoteDeviceAdapterProperties : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(Description)) + { + writer.WritePropertyName("description"); + writer.WriteStringValue(Description); + } + writer.WritePropertyName("target"); + writer.WriteObjectValue(Target); + writer.WritePropertyName("iotHubDeviceConnection"); + writer.WriteObjectValue(IotHubDeviceConnection); + writer.WriteEndObject(); + } + + internal static RemoteDeviceAdapterProperties DeserializeRemoteDeviceAdapterProperties(JsonElement element) + { + Optional description = default; + RemoteDeviceAdapterTarget target = default; + IotHubDeviceConnection iotHubDeviceConnection = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("description")) + { + description = property.Value.GetString(); + continue; + } + if (property.NameEquals("target")) + { + target = RemoteDeviceAdapterTarget.DeserializeRemoteDeviceAdapterTarget(property.Value); + continue; + } + if (property.NameEquals("iotHubDeviceConnection")) + { + iotHubDeviceConnection = IotHubDeviceConnection.DeserializeIotHubDeviceConnection(property.Value); + continue; + } + } + return new RemoteDeviceAdapterProperties(description.Value, target, iotHubDeviceConnection); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterProperties.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterProperties.cs new file mode 100644 index 000000000000..a4219ce81f65 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterProperties.cs @@ -0,0 +1,52 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// Remote device adapter properties. + public partial class RemoteDeviceAdapterProperties + { + /// Initializes a new instance of RemoteDeviceAdapterProperties. + /// The IoT device to which this remote device will connect. + /// Information that enables communication between the IoT Hub and the IoT device - allowing this edge module to act as a transparent gateway between the two. + /// or is null. + public RemoteDeviceAdapterProperties(RemoteDeviceAdapterTarget target, IotHubDeviceConnection iotHubDeviceConnection) + { + if (target == null) + { + throw new ArgumentNullException(nameof(target)); + } + if (iotHubDeviceConnection == null) + { + throw new ArgumentNullException(nameof(iotHubDeviceConnection)); + } + + Target = target; + IotHubDeviceConnection = iotHubDeviceConnection; + } + + /// Initializes a new instance of RemoteDeviceAdapterProperties. + /// An optional description for the remote device adapter. + /// The IoT device to which this remote device will connect. + /// Information that enables communication between the IoT Hub and the IoT device - allowing this edge module to act as a transparent gateway between the two. + internal RemoteDeviceAdapterProperties(string description, RemoteDeviceAdapterTarget target, IotHubDeviceConnection iotHubDeviceConnection) + { + Description = description; + Target = target; + IotHubDeviceConnection = iotHubDeviceConnection; + } + + /// An optional description for the remote device adapter. + public string Description { get; set; } + /// The IoT device to which this remote device will connect. + public RemoteDeviceAdapterTarget Target { get; set; } + /// Information that enables communication between the IoT Hub and the IoT device - allowing this edge module to act as a transparent gateway between the two. + public IotHubDeviceConnection IotHubDeviceConnection { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterSetRequest.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterSetRequest.Serialization.cs new file mode 100644 index 000000000000..2996d0a37d0f --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterSetRequest.Serialization.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class RemoteDeviceAdapterSetRequest : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + writer.WritePropertyName("remoteDeviceAdapter"); + writer.WriteObjectValue(RemoteDeviceAdapter); + if (Optional.IsDefined(ApiVersion)) + { + writer.WritePropertyName("@apiVersion"); + writer.WriteStringValue(ApiVersion); + } + writer.WriteEndObject(); + } + + internal static RemoteDeviceAdapterSetRequest DeserializeRemoteDeviceAdapterSetRequest(JsonElement element) + { + RemoteDeviceAdapter remoteDeviceAdapter = default; + string methodName = default; + Optional apiVersion = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("remoteDeviceAdapter")) + { + remoteDeviceAdapter = RemoteDeviceAdapter.DeserializeRemoteDeviceAdapter(property.Value); + continue; + } + if (property.NameEquals("methodName")) + { + methodName = property.Value.GetString(); + continue; + } + if (property.NameEquals("@apiVersion")) + { + apiVersion = property.Value.GetString(); + continue; + } + } + return new RemoteDeviceAdapterSetRequest(methodName, apiVersion.Value, remoteDeviceAdapter); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterSetRequest.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterSetRequest.cs new file mode 100644 index 000000000000..393f36d87081 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterSetRequest.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// Creates a new remote device adapter or updates an existing one. + public partial class RemoteDeviceAdapterSetRequest : MethodRequest + { + /// Initializes a new instance of RemoteDeviceAdapterSetRequest. + /// The Video Analyzer edge module can act as a transparent gateway for video, enabling IoT devices to send video to the cloud from behind a firewall. A remote device adapter should be created for each such IoT device. Communication between the cloud and IoT device would then flow via the Video Analyzer edge module. + /// is null. + public RemoteDeviceAdapterSetRequest(RemoteDeviceAdapter remoteDeviceAdapter) + { + if (remoteDeviceAdapter == null) + { + throw new ArgumentNullException(nameof(remoteDeviceAdapter)); + } + + RemoteDeviceAdapter = remoteDeviceAdapter; + MethodName = "remoteDeviceAdapterSet"; + } + + /// Initializes a new instance of RemoteDeviceAdapterSetRequest. + /// Direct method method name. + /// Video Analyzer API version. + /// The Video Analyzer edge module can act as a transparent gateway for video, enabling IoT devices to send video to the cloud from behind a firewall. A remote device adapter should be created for each such IoT device. Communication between the cloud and IoT device would then flow via the Video Analyzer edge module. + internal RemoteDeviceAdapterSetRequest(string methodName, string apiVersion, RemoteDeviceAdapter remoteDeviceAdapter) : base(methodName, apiVersion) + { + RemoteDeviceAdapter = remoteDeviceAdapter; + MethodName = methodName ?? "remoteDeviceAdapterSet"; + } + + /// The Video Analyzer edge module can act as a transparent gateway for video, enabling IoT devices to send video to the cloud from behind a firewall. A remote device adapter should be created for each such IoT device. Communication between the cloud and IoT device would then flow via the Video Analyzer edge module. + public RemoteDeviceAdapter RemoteDeviceAdapter { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterSetRequestBody.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterSetRequestBody.Serialization.cs new file mode 100644 index 000000000000..d763e3ee897c --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterSetRequestBody.Serialization.cs @@ -0,0 +1,86 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + internal partial class RemoteDeviceAdapterSetRequestBody : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + writer.WritePropertyName("name"); + writer.WriteStringValue(Name); + if (Optional.IsDefined(SystemData)) + { + writer.WritePropertyName("systemData"); + writer.WriteObjectValue(SystemData); + } + if (Optional.IsDefined(Properties)) + { + writer.WritePropertyName("properties"); + writer.WriteObjectValue(Properties); + } + if (Optional.IsDefined(ApiVersion)) + { + writer.WritePropertyName("@apiVersion"); + writer.WriteStringValue(ApiVersion); + } + writer.WriteEndObject(); + } + + internal static RemoteDeviceAdapterSetRequestBody DeserializeRemoteDeviceAdapterSetRequestBody(JsonElement element) + { + string name = default; + Optional systemData = default; + Optional properties = default; + string methodName = default; + Optional apiVersion = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name")) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("systemData")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + systemData = SystemData.DeserializeSystemData(property.Value); + continue; + } + if (property.NameEquals("properties")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + properties = RemoteDeviceAdapterProperties.DeserializeRemoteDeviceAdapterProperties(property.Value); + continue; + } + if (property.NameEquals("methodName")) + { + methodName = property.Value.GetString(); + continue; + } + if (property.NameEquals("@apiVersion")) + { + apiVersion = property.Value.GetString(); + continue; + } + } + return new RemoteDeviceAdapterSetRequestBody(methodName, apiVersion.Value, name, systemData.Value, properties.Value); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterSetRequestBody.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterSetRequestBody.cs new file mode 100644 index 000000000000..f1f0de9c3858 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterSetRequestBody.cs @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// The Video Analyzer edge module can act as a transparent gateway for video, enabling IoT devices to send video to the cloud from behind a firewall. A remote device adapter should be created for each such IoT device. Communication between the cloud and IoT device would then flow via the Video Analyzer edge module. + internal partial class RemoteDeviceAdapterSetRequestBody : MethodRequest + { + /// Initializes a new instance of RemoteDeviceAdapterSetRequestBody. + /// The unique identifier for the remote device adapter. + /// is null. + public RemoteDeviceAdapterSetRequestBody(string name) + { + if (name == null) + { + throw new ArgumentNullException(nameof(name)); + } + + Name = name; + MethodName = "RemoteDeviceAdapterSetRequestBody"; + } + + /// Initializes a new instance of RemoteDeviceAdapterSetRequestBody. + /// Direct method method name. + /// Video Analyzer API version. + /// The unique identifier for the remote device adapter. + /// Read-only system metadata associated with this object. + /// Properties of the remote device adapter. + internal RemoteDeviceAdapterSetRequestBody(string methodName, string apiVersion, string name, SystemData systemData, RemoteDeviceAdapterProperties properties) : base(methodName, apiVersion) + { + Name = name; + SystemData = systemData; + Properties = properties; + MethodName = methodName ?? "RemoteDeviceAdapterSetRequestBody"; + } + + /// The unique identifier for the remote device adapter. + public string Name { get; set; } + /// Read-only system metadata associated with this object. + public SystemData SystemData { get; set; } + /// Properties of the remote device adapter. + public RemoteDeviceAdapterProperties Properties { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterTarget.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterTarget.Serialization.cs new file mode 100644 index 000000000000..47b9ec750ef5 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterTarget.Serialization.cs @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class RemoteDeviceAdapterTarget : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + writer.WritePropertyName("host"); + writer.WriteStringValue(Host); + writer.WriteEndObject(); + } + + internal static RemoteDeviceAdapterTarget DeserializeRemoteDeviceAdapterTarget(JsonElement element) + { + string host = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("host")) + { + host = property.Value.GetString(); + continue; + } + } + return new RemoteDeviceAdapterTarget(host); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterTarget.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterTarget.cs new file mode 100644 index 000000000000..27e23b9fae7a --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/RemoteDeviceAdapterTarget.cs @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// Properties of the remote device adapter target. + public partial class RemoteDeviceAdapterTarget + { + /// Initializes a new instance of RemoteDeviceAdapterTarget. + /// Hostname or IP address of the remote device. + /// is null. + public RemoteDeviceAdapterTarget(string host) + { + if (host == null) + { + throw new ArgumentNullException(nameof(host)); + } + + Host = host; + } + + /// Hostname or IP address of the remote device. + public string Host { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonCountOperation.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonCountOperation.Serialization.cs index e8c8f54ac1fe..8276d941090a 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonCountOperation.Serialization.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonCountOperation.Serialization.cs @@ -28,16 +28,31 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("debug"); writer.WriteStringValue(Debug); } + if (Optional.IsDefined(CalibrationConfiguration)) + { + writer.WritePropertyName("calibrationConfiguration"); + writer.WriteStringValue(CalibrationConfiguration); + } if (Optional.IsDefined(CameraConfiguration)) { writer.WritePropertyName("cameraConfiguration"); writer.WriteStringValue(CameraConfiguration); } + if (Optional.IsDefined(CameraCalibratorNodeConfiguration)) + { + writer.WritePropertyName("cameraCalibratorNodeConfiguration"); + writer.WriteStringValue(CameraCalibratorNodeConfiguration); + } if (Optional.IsDefined(DetectorNodeConfiguration)) { writer.WritePropertyName("detectorNodeConfiguration"); writer.WriteStringValue(DetectorNodeConfiguration); } + if (Optional.IsDefined(TrackerNodeConfiguration)) + { + writer.WritePropertyName("trackerNodeConfiguration"); + writer.WriteStringValue(TrackerNodeConfiguration); + } if (Optional.IsDefined(EnableFaceMaskClassifier)) { writer.WritePropertyName("enableFaceMaskClassifier"); @@ -52,8 +67,11 @@ internal static SpatialAnalysisPersonCountOperation DeserializeSpatialAnalysisPe { IList zones = default; Optional debug = default; + Optional calibrationConfiguration = default; Optional cameraConfiguration = default; + Optional cameraCalibratorNodeConfiguration = default; Optional detectorNodeConfiguration = default; + Optional trackerNodeConfiguration = default; Optional enableFaceMaskClassifier = default; string type = default; foreach (var property in element.EnumerateObject()) @@ -73,16 +91,31 @@ internal static SpatialAnalysisPersonCountOperation DeserializeSpatialAnalysisPe debug = property.Value.GetString(); continue; } + if (property.NameEquals("calibrationConfiguration")) + { + calibrationConfiguration = property.Value.GetString(); + continue; + } if (property.NameEquals("cameraConfiguration")) { cameraConfiguration = property.Value.GetString(); continue; } + if (property.NameEquals("cameraCalibratorNodeConfiguration")) + { + cameraCalibratorNodeConfiguration = property.Value.GetString(); + continue; + } if (property.NameEquals("detectorNodeConfiguration")) { detectorNodeConfiguration = property.Value.GetString(); continue; } + if (property.NameEquals("trackerNodeConfiguration")) + { + trackerNodeConfiguration = property.Value.GetString(); + continue; + } if (property.NameEquals("enableFaceMaskClassifier")) { enableFaceMaskClassifier = property.Value.GetString(); @@ -94,7 +127,7 @@ internal static SpatialAnalysisPersonCountOperation DeserializeSpatialAnalysisPe continue; } } - return new SpatialAnalysisPersonCountOperation(type, debug.Value, cameraConfiguration.Value, detectorNodeConfiguration.Value, enableFaceMaskClassifier.Value, zones); + return new SpatialAnalysisPersonCountOperation(type, debug.Value, calibrationConfiguration.Value, cameraConfiguration.Value, cameraCalibratorNodeConfiguration.Value, detectorNodeConfiguration.Value, trackerNodeConfiguration.Value, enableFaceMaskClassifier.Value, zones); } } } diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonCountOperation.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonCountOperation.cs index 59a22e425b82..4f5a4fd347b5 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonCountOperation.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonCountOperation.cs @@ -31,11 +31,14 @@ public SpatialAnalysisPersonCountOperation(IEnumerable Initializes a new instance of SpatialAnalysisPersonCountOperation. /// The Type discriminator for the derived types. /// If set to 'true', enables debugging mode for this operation. + /// Advanced calibration configuration. /// Advanced camera configuration. + /// Advanced camera calibrator configuration. /// Advanced detector node configuration. + /// Advanced tracker node configuration. /// If set to 'true', enables face mask detection for this operation. /// The list of zones and optional events. - internal SpatialAnalysisPersonCountOperation(string type, string debug, string cameraConfiguration, string detectorNodeConfiguration, string enableFaceMaskClassifier, IList zones) : base(type, debug, cameraConfiguration, detectorNodeConfiguration, enableFaceMaskClassifier) + internal SpatialAnalysisPersonCountOperation(string type, string debug, string calibrationConfiguration, string cameraConfiguration, string cameraCalibratorNodeConfiguration, string detectorNodeConfiguration, string trackerNodeConfiguration, string enableFaceMaskClassifier, IList zones) : base(type, debug, calibrationConfiguration, cameraConfiguration, cameraCalibratorNodeConfiguration, detectorNodeConfiguration, trackerNodeConfiguration, enableFaceMaskClassifier) { Zones = zones; Type = type ?? "#Microsoft.VideoAnalyzer.SpatialAnalysisPersonCountOperation"; diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonDistanceOperation.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonDistanceOperation.Serialization.cs index 4a5cdf68ae47..b6439dd1d89e 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonDistanceOperation.Serialization.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonDistanceOperation.Serialization.cs @@ -28,16 +28,31 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("debug"); writer.WriteStringValue(Debug); } + if (Optional.IsDefined(CalibrationConfiguration)) + { + writer.WritePropertyName("calibrationConfiguration"); + writer.WriteStringValue(CalibrationConfiguration); + } if (Optional.IsDefined(CameraConfiguration)) { writer.WritePropertyName("cameraConfiguration"); writer.WriteStringValue(CameraConfiguration); } + if (Optional.IsDefined(CameraCalibratorNodeConfiguration)) + { + writer.WritePropertyName("cameraCalibratorNodeConfiguration"); + writer.WriteStringValue(CameraCalibratorNodeConfiguration); + } if (Optional.IsDefined(DetectorNodeConfiguration)) { writer.WritePropertyName("detectorNodeConfiguration"); writer.WriteStringValue(DetectorNodeConfiguration); } + if (Optional.IsDefined(TrackerNodeConfiguration)) + { + writer.WritePropertyName("trackerNodeConfiguration"); + writer.WriteStringValue(TrackerNodeConfiguration); + } if (Optional.IsDefined(EnableFaceMaskClassifier)) { writer.WritePropertyName("enableFaceMaskClassifier"); @@ -52,8 +67,11 @@ internal static SpatialAnalysisPersonDistanceOperation DeserializeSpatialAnalysi { IList zones = default; Optional debug = default; + Optional calibrationConfiguration = default; Optional cameraConfiguration = default; + Optional cameraCalibratorNodeConfiguration = default; Optional detectorNodeConfiguration = default; + Optional trackerNodeConfiguration = default; Optional enableFaceMaskClassifier = default; string type = default; foreach (var property in element.EnumerateObject()) @@ -73,16 +91,31 @@ internal static SpatialAnalysisPersonDistanceOperation DeserializeSpatialAnalysi debug = property.Value.GetString(); continue; } + if (property.NameEquals("calibrationConfiguration")) + { + calibrationConfiguration = property.Value.GetString(); + continue; + } if (property.NameEquals("cameraConfiguration")) { cameraConfiguration = property.Value.GetString(); continue; } + if (property.NameEquals("cameraCalibratorNodeConfiguration")) + { + cameraCalibratorNodeConfiguration = property.Value.GetString(); + continue; + } if (property.NameEquals("detectorNodeConfiguration")) { detectorNodeConfiguration = property.Value.GetString(); continue; } + if (property.NameEquals("trackerNodeConfiguration")) + { + trackerNodeConfiguration = property.Value.GetString(); + continue; + } if (property.NameEquals("enableFaceMaskClassifier")) { enableFaceMaskClassifier = property.Value.GetString(); @@ -94,7 +127,7 @@ internal static SpatialAnalysisPersonDistanceOperation DeserializeSpatialAnalysi continue; } } - return new SpatialAnalysisPersonDistanceOperation(type, debug.Value, cameraConfiguration.Value, detectorNodeConfiguration.Value, enableFaceMaskClassifier.Value, zones); + return new SpatialAnalysisPersonDistanceOperation(type, debug.Value, calibrationConfiguration.Value, cameraConfiguration.Value, cameraCalibratorNodeConfiguration.Value, detectorNodeConfiguration.Value, trackerNodeConfiguration.Value, enableFaceMaskClassifier.Value, zones); } } } diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonDistanceOperation.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonDistanceOperation.cs index c9b748f7ec38..467cd6c48f7a 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonDistanceOperation.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonDistanceOperation.cs @@ -31,11 +31,14 @@ public SpatialAnalysisPersonDistanceOperation(IEnumerable Initializes a new instance of SpatialAnalysisPersonDistanceOperation. /// The Type discriminator for the derived types. /// If set to 'true', enables debugging mode for this operation. + /// Advanced calibration configuration. /// Advanced camera configuration. + /// Advanced camera calibrator configuration. /// Advanced detector node configuration. + /// Advanced tracker node configuration. /// If set to 'true', enables face mask detection for this operation. /// The list of zones with optional events. - internal SpatialAnalysisPersonDistanceOperation(string type, string debug, string cameraConfiguration, string detectorNodeConfiguration, string enableFaceMaskClassifier, IList zones) : base(type, debug, cameraConfiguration, detectorNodeConfiguration, enableFaceMaskClassifier) + internal SpatialAnalysisPersonDistanceOperation(string type, string debug, string calibrationConfiguration, string cameraConfiguration, string cameraCalibratorNodeConfiguration, string detectorNodeConfiguration, string trackerNodeConfiguration, string enableFaceMaskClassifier, IList zones) : base(type, debug, calibrationConfiguration, cameraConfiguration, cameraCalibratorNodeConfiguration, detectorNodeConfiguration, trackerNodeConfiguration, enableFaceMaskClassifier) { Zones = zones; Type = type ?? "#Microsoft.VideoAnalyzer.SpatialAnalysisPersonDistanceOperation"; diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonLineCrossingOperation.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonLineCrossingOperation.Serialization.cs index 77320f39985c..4472034c7b70 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonLineCrossingOperation.Serialization.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonLineCrossingOperation.Serialization.cs @@ -28,16 +28,31 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("debug"); writer.WriteStringValue(Debug); } + if (Optional.IsDefined(CalibrationConfiguration)) + { + writer.WritePropertyName("calibrationConfiguration"); + writer.WriteStringValue(CalibrationConfiguration); + } if (Optional.IsDefined(CameraConfiguration)) { writer.WritePropertyName("cameraConfiguration"); writer.WriteStringValue(CameraConfiguration); } + if (Optional.IsDefined(CameraCalibratorNodeConfiguration)) + { + writer.WritePropertyName("cameraCalibratorNodeConfiguration"); + writer.WriteStringValue(CameraCalibratorNodeConfiguration); + } if (Optional.IsDefined(DetectorNodeConfiguration)) { writer.WritePropertyName("detectorNodeConfiguration"); writer.WriteStringValue(DetectorNodeConfiguration); } + if (Optional.IsDefined(TrackerNodeConfiguration)) + { + writer.WritePropertyName("trackerNodeConfiguration"); + writer.WriteStringValue(TrackerNodeConfiguration); + } if (Optional.IsDefined(EnableFaceMaskClassifier)) { writer.WritePropertyName("enableFaceMaskClassifier"); @@ -52,8 +67,11 @@ internal static SpatialAnalysisPersonLineCrossingOperation DeserializeSpatialAna { IList lines = default; Optional debug = default; + Optional calibrationConfiguration = default; Optional cameraConfiguration = default; + Optional cameraCalibratorNodeConfiguration = default; Optional detectorNodeConfiguration = default; + Optional trackerNodeConfiguration = default; Optional enableFaceMaskClassifier = default; string type = default; foreach (var property in element.EnumerateObject()) @@ -73,16 +91,31 @@ internal static SpatialAnalysisPersonLineCrossingOperation DeserializeSpatialAna debug = property.Value.GetString(); continue; } + if (property.NameEquals("calibrationConfiguration")) + { + calibrationConfiguration = property.Value.GetString(); + continue; + } if (property.NameEquals("cameraConfiguration")) { cameraConfiguration = property.Value.GetString(); continue; } + if (property.NameEquals("cameraCalibratorNodeConfiguration")) + { + cameraCalibratorNodeConfiguration = property.Value.GetString(); + continue; + } if (property.NameEquals("detectorNodeConfiguration")) { detectorNodeConfiguration = property.Value.GetString(); continue; } + if (property.NameEquals("trackerNodeConfiguration")) + { + trackerNodeConfiguration = property.Value.GetString(); + continue; + } if (property.NameEquals("enableFaceMaskClassifier")) { enableFaceMaskClassifier = property.Value.GetString(); @@ -94,7 +127,7 @@ internal static SpatialAnalysisPersonLineCrossingOperation DeserializeSpatialAna continue; } } - return new SpatialAnalysisPersonLineCrossingOperation(type, debug.Value, cameraConfiguration.Value, detectorNodeConfiguration.Value, enableFaceMaskClassifier.Value, lines); + return new SpatialAnalysisPersonLineCrossingOperation(type, debug.Value, calibrationConfiguration.Value, cameraConfiguration.Value, cameraCalibratorNodeConfiguration.Value, detectorNodeConfiguration.Value, trackerNodeConfiguration.Value, enableFaceMaskClassifier.Value, lines); } } } diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonLineCrossingOperation.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonLineCrossingOperation.cs index 9ff5e0c5a4b1..cf8818adfd59 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonLineCrossingOperation.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonLineCrossingOperation.cs @@ -31,11 +31,14 @@ public SpatialAnalysisPersonLineCrossingOperation(IEnumerable Initializes a new instance of SpatialAnalysisPersonLineCrossingOperation. /// The Type discriminator for the derived types. /// If set to 'true', enables debugging mode for this operation. + /// Advanced calibration configuration. /// Advanced camera configuration. + /// Advanced camera calibrator configuration. /// Advanced detector node configuration. + /// Advanced tracker node configuration. /// If set to 'true', enables face mask detection for this operation. /// The list of lines with optional events. - internal SpatialAnalysisPersonLineCrossingOperation(string type, string debug, string cameraConfiguration, string detectorNodeConfiguration, string enableFaceMaskClassifier, IList lines) : base(type, debug, cameraConfiguration, detectorNodeConfiguration, enableFaceMaskClassifier) + internal SpatialAnalysisPersonLineCrossingOperation(string type, string debug, string calibrationConfiguration, string cameraConfiguration, string cameraCalibratorNodeConfiguration, string detectorNodeConfiguration, string trackerNodeConfiguration, string enableFaceMaskClassifier, IList lines) : base(type, debug, calibrationConfiguration, cameraConfiguration, cameraCalibratorNodeConfiguration, detectorNodeConfiguration, trackerNodeConfiguration, enableFaceMaskClassifier) { Lines = lines; Type = type ?? "#Microsoft.VideoAnalyzer.SpatialAnalysisPersonLineCrossingOperation"; diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonZoneCrossingOperation.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonZoneCrossingOperation.Serialization.cs index 3f71f073e3ad..b31d7432db77 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonZoneCrossingOperation.Serialization.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonZoneCrossingOperation.Serialization.cs @@ -28,16 +28,31 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("debug"); writer.WriteStringValue(Debug); } + if (Optional.IsDefined(CalibrationConfiguration)) + { + writer.WritePropertyName("calibrationConfiguration"); + writer.WriteStringValue(CalibrationConfiguration); + } if (Optional.IsDefined(CameraConfiguration)) { writer.WritePropertyName("cameraConfiguration"); writer.WriteStringValue(CameraConfiguration); } + if (Optional.IsDefined(CameraCalibratorNodeConfiguration)) + { + writer.WritePropertyName("cameraCalibratorNodeConfiguration"); + writer.WriteStringValue(CameraCalibratorNodeConfiguration); + } if (Optional.IsDefined(DetectorNodeConfiguration)) { writer.WritePropertyName("detectorNodeConfiguration"); writer.WriteStringValue(DetectorNodeConfiguration); } + if (Optional.IsDefined(TrackerNodeConfiguration)) + { + writer.WritePropertyName("trackerNodeConfiguration"); + writer.WriteStringValue(TrackerNodeConfiguration); + } if (Optional.IsDefined(EnableFaceMaskClassifier)) { writer.WritePropertyName("enableFaceMaskClassifier"); @@ -52,8 +67,11 @@ internal static SpatialAnalysisPersonZoneCrossingOperation DeserializeSpatialAna { IList zones = default; Optional debug = default; + Optional calibrationConfiguration = default; Optional cameraConfiguration = default; + Optional cameraCalibratorNodeConfiguration = default; Optional detectorNodeConfiguration = default; + Optional trackerNodeConfiguration = default; Optional enableFaceMaskClassifier = default; string type = default; foreach (var property in element.EnumerateObject()) @@ -73,16 +91,31 @@ internal static SpatialAnalysisPersonZoneCrossingOperation DeserializeSpatialAna debug = property.Value.GetString(); continue; } + if (property.NameEquals("calibrationConfiguration")) + { + calibrationConfiguration = property.Value.GetString(); + continue; + } if (property.NameEquals("cameraConfiguration")) { cameraConfiguration = property.Value.GetString(); continue; } + if (property.NameEquals("cameraCalibratorNodeConfiguration")) + { + cameraCalibratorNodeConfiguration = property.Value.GetString(); + continue; + } if (property.NameEquals("detectorNodeConfiguration")) { detectorNodeConfiguration = property.Value.GetString(); continue; } + if (property.NameEquals("trackerNodeConfiguration")) + { + trackerNodeConfiguration = property.Value.GetString(); + continue; + } if (property.NameEquals("enableFaceMaskClassifier")) { enableFaceMaskClassifier = property.Value.GetString(); @@ -94,7 +127,7 @@ internal static SpatialAnalysisPersonZoneCrossingOperation DeserializeSpatialAna continue; } } - return new SpatialAnalysisPersonZoneCrossingOperation(type, debug.Value, cameraConfiguration.Value, detectorNodeConfiguration.Value, enableFaceMaskClassifier.Value, zones); + return new SpatialAnalysisPersonZoneCrossingOperation(type, debug.Value, calibrationConfiguration.Value, cameraConfiguration.Value, cameraCalibratorNodeConfiguration.Value, detectorNodeConfiguration.Value, trackerNodeConfiguration.Value, enableFaceMaskClassifier.Value, zones); } } } diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonZoneCrossingOperation.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonZoneCrossingOperation.cs index 202f8a4b6d34..f43e1a1aabf2 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonZoneCrossingOperation.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisPersonZoneCrossingOperation.cs @@ -31,11 +31,14 @@ public SpatialAnalysisPersonZoneCrossingOperation(IEnumerable Initializes a new instance of SpatialAnalysisPersonZoneCrossingOperation. /// The Type discriminator for the derived types. /// If set to 'true', enables debugging mode for this operation. + /// Advanced calibration configuration. /// Advanced camera configuration. + /// Advanced camera calibrator configuration. /// Advanced detector node configuration. + /// Advanced tracker node configuration. /// If set to 'true', enables face mask detection for this operation. /// The list of zones with optional events. - internal SpatialAnalysisPersonZoneCrossingOperation(string type, string debug, string cameraConfiguration, string detectorNodeConfiguration, string enableFaceMaskClassifier, IList zones) : base(type, debug, cameraConfiguration, detectorNodeConfiguration, enableFaceMaskClassifier) + internal SpatialAnalysisPersonZoneCrossingOperation(string type, string debug, string calibrationConfiguration, string cameraConfiguration, string cameraCalibratorNodeConfiguration, string detectorNodeConfiguration, string trackerNodeConfiguration, string enableFaceMaskClassifier, IList zones) : base(type, debug, calibrationConfiguration, cameraConfiguration, cameraCalibratorNodeConfiguration, detectorNodeConfiguration, trackerNodeConfiguration, enableFaceMaskClassifier) { Zones = zones; Type = type ?? "#Microsoft.VideoAnalyzer.SpatialAnalysisPersonZoneCrossingOperation"; diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisTypedOperationBase.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisTypedOperationBase.Serialization.cs index 5a53994d06c8..2d72f04fd42c 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisTypedOperationBase.Serialization.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisTypedOperationBase.Serialization.cs @@ -20,16 +20,31 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("debug"); writer.WriteStringValue(Debug); } + if (Optional.IsDefined(CalibrationConfiguration)) + { + writer.WritePropertyName("calibrationConfiguration"); + writer.WriteStringValue(CalibrationConfiguration); + } if (Optional.IsDefined(CameraConfiguration)) { writer.WritePropertyName("cameraConfiguration"); writer.WriteStringValue(CameraConfiguration); } + if (Optional.IsDefined(CameraCalibratorNodeConfiguration)) + { + writer.WritePropertyName("cameraCalibratorNodeConfiguration"); + writer.WriteStringValue(CameraCalibratorNodeConfiguration); + } if (Optional.IsDefined(DetectorNodeConfiguration)) { writer.WritePropertyName("detectorNodeConfiguration"); writer.WriteStringValue(DetectorNodeConfiguration); } + if (Optional.IsDefined(TrackerNodeConfiguration)) + { + writer.WritePropertyName("trackerNodeConfiguration"); + writer.WriteStringValue(TrackerNodeConfiguration); + } if (Optional.IsDefined(EnableFaceMaskClassifier)) { writer.WritePropertyName("enableFaceMaskClassifier"); @@ -53,8 +68,11 @@ internal static SpatialAnalysisTypedOperationBase DeserializeSpatialAnalysisType } } Optional debug = default; + Optional calibrationConfiguration = default; Optional cameraConfiguration = default; + Optional cameraCalibratorNodeConfiguration = default; Optional detectorNodeConfiguration = default; + Optional trackerNodeConfiguration = default; Optional enableFaceMaskClassifier = default; string type = default; foreach (var property in element.EnumerateObject()) @@ -64,16 +82,31 @@ internal static SpatialAnalysisTypedOperationBase DeserializeSpatialAnalysisType debug = property.Value.GetString(); continue; } + if (property.NameEquals("calibrationConfiguration")) + { + calibrationConfiguration = property.Value.GetString(); + continue; + } if (property.NameEquals("cameraConfiguration")) { cameraConfiguration = property.Value.GetString(); continue; } + if (property.NameEquals("cameraCalibratorNodeConfiguration")) + { + cameraCalibratorNodeConfiguration = property.Value.GetString(); + continue; + } if (property.NameEquals("detectorNodeConfiguration")) { detectorNodeConfiguration = property.Value.GetString(); continue; } + if (property.NameEquals("trackerNodeConfiguration")) + { + trackerNodeConfiguration = property.Value.GetString(); + continue; + } if (property.NameEquals("enableFaceMaskClassifier")) { enableFaceMaskClassifier = property.Value.GetString(); @@ -85,7 +118,7 @@ internal static SpatialAnalysisTypedOperationBase DeserializeSpatialAnalysisType continue; } } - return new SpatialAnalysisTypedOperationBase(type, debug.Value, cameraConfiguration.Value, detectorNodeConfiguration.Value, enableFaceMaskClassifier.Value); + return new SpatialAnalysisTypedOperationBase(type, debug.Value, calibrationConfiguration.Value, cameraConfiguration.Value, cameraCalibratorNodeConfiguration.Value, detectorNodeConfiguration.Value, trackerNodeConfiguration.Value, enableFaceMaskClassifier.Value); } } } diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisTypedOperationBase.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisTypedOperationBase.cs index f6e17e9cbdf9..3447a3ef0c62 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisTypedOperationBase.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SpatialAnalysisTypedOperationBase.cs @@ -19,24 +19,36 @@ public SpatialAnalysisTypedOperationBase() /// Initializes a new instance of SpatialAnalysisTypedOperationBase. /// The Type discriminator for the derived types. /// If set to 'true', enables debugging mode for this operation. + /// Advanced calibration configuration. /// Advanced camera configuration. + /// Advanced camera calibrator configuration. /// Advanced detector node configuration. + /// Advanced tracker node configuration. /// If set to 'true', enables face mask detection for this operation. - internal SpatialAnalysisTypedOperationBase(string type, string debug, string cameraConfiguration, string detectorNodeConfiguration, string enableFaceMaskClassifier) : base(type) + internal SpatialAnalysisTypedOperationBase(string type, string debug, string calibrationConfiguration, string cameraConfiguration, string cameraCalibratorNodeConfiguration, string detectorNodeConfiguration, string trackerNodeConfiguration, string enableFaceMaskClassifier) : base(type) { Debug = debug; + CalibrationConfiguration = calibrationConfiguration; CameraConfiguration = cameraConfiguration; + CameraCalibratorNodeConfiguration = cameraCalibratorNodeConfiguration; DetectorNodeConfiguration = detectorNodeConfiguration; + TrackerNodeConfiguration = trackerNodeConfiguration; EnableFaceMaskClassifier = enableFaceMaskClassifier; Type = type ?? "SpatialAnalysisTypedOperationBase"; } /// If set to 'true', enables debugging mode for this operation. public string Debug { get; set; } + /// Advanced calibration configuration. + public string CalibrationConfiguration { get; set; } /// Advanced camera configuration. public string CameraConfiguration { get; set; } + /// Advanced camera calibrator configuration. + public string CameraCalibratorNodeConfiguration { get; set; } /// Advanced detector node configuration. public string DetectorNodeConfiguration { get; set; } + /// Advanced tracker node configuration. + public string TrackerNodeConfiguration { get; set; } /// If set to 'true', enables face mask detection for this operation. public string EnableFaceMaskClassifier { get; set; } } diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SymmetricKeyCredentials.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SymmetricKeyCredentials.Serialization.cs new file mode 100644 index 000000000000..6ca9b788cebc --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SymmetricKeyCredentials.Serialization.cs @@ -0,0 +1,45 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class SymmetricKeyCredentials : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + writer.WritePropertyName("key"); + writer.WriteStringValue(Key); + writer.WritePropertyName("@type"); + writer.WriteStringValue(Type); + writer.WriteEndObject(); + } + + internal static SymmetricKeyCredentials DeserializeSymmetricKeyCredentials(JsonElement element) + { + string key = default; + string type = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("key")) + { + key = property.Value.GetString(); + continue; + } + if (property.NameEquals("@type")) + { + type = property.Value.GetString(); + continue; + } + } + return new SymmetricKeyCredentials(type, key); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SymmetricKeyCredentials.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SymmetricKeyCredentials.cs new file mode 100644 index 000000000000..146681dc9b32 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/SymmetricKeyCredentials.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// Symmetric key credential. + public partial class SymmetricKeyCredentials : CredentialsBase + { + /// Initializes a new instance of SymmetricKeyCredentials. + /// Symmetric key credential. + /// is null. + public SymmetricKeyCredentials(string key) + { + if (key == null) + { + throw new ArgumentNullException(nameof(key)); + } + + Key = key; + Type = "#Microsoft.VideoAnalyzer.SymmetricKeyCredentials"; + } + + /// Initializes a new instance of SymmetricKeyCredentials. + /// Type discriminator for the derived types. + /// Symmetric key credential. + internal SymmetricKeyCredentials(string type, string key) : base(type) + { + Key = key; + Type = type ?? "#Microsoft.VideoAnalyzer.SymmetricKeyCredentials"; + } + + /// Symmetric key credential. + public string Key { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoCreationProperties.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoCreationProperties.Serialization.cs index c2a5f3e6aa13..926f79800cb4 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoCreationProperties.Serialization.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoCreationProperties.Serialization.cs @@ -30,6 +30,11 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("segmentLength"); writer.WriteStringValue(SegmentLength); } + if (Optional.IsDefined(RetentionPeriod)) + { + writer.WritePropertyName("retentionPeriod"); + writer.WriteStringValue(RetentionPeriod); + } writer.WriteEndObject(); } @@ -38,6 +43,7 @@ internal static VideoCreationProperties DeserializeVideoCreationProperties(JsonE Optional title = default; Optional description = default; Optional segmentLength = default; + Optional retentionPeriod = default; foreach (var property in element.EnumerateObject()) { if (property.NameEquals("title")) @@ -55,8 +61,13 @@ internal static VideoCreationProperties DeserializeVideoCreationProperties(JsonE segmentLength = property.Value.GetString(); continue; } + if (property.NameEquals("retentionPeriod")) + { + retentionPeriod = property.Value.GetString(); + continue; + } } - return new VideoCreationProperties(title.Value, description.Value, segmentLength.Value); + return new VideoCreationProperties(title.Value, description.Value, segmentLength.Value, retentionPeriod.Value); } } } diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoCreationProperties.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoCreationProperties.cs index 8055caf3a944..c5ef35d68854 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoCreationProperties.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoCreationProperties.cs @@ -19,11 +19,13 @@ public VideoCreationProperties() /// Optional video title provided by the user. Value can be up to 256 characters long. /// Optional video description provided by the user. Value can be up to 2048 characters long. /// Video segment length indicates the length of individual video files (segments) which are persisted to storage. Smaller segments provide lower archive playback latency but generate larger volume of storage transactions. Larger segments reduce the amount of storage transactions while increasing the archive playback latency. Value must be specified in ISO8601 duration format (i.e. "PT30S" equals 30 seconds) and can vary between 30 seconds to 5 minutes, in 30 seconds increments. Changing this value after the video is initially created can lead to errors when uploading media to the archive. Default value is 30 seconds. - internal VideoCreationProperties(string title, string description, string segmentLength) + /// Video retention period indicates how long the video is kept in storage, and must be a multiple of 1 day. For example, if this is set to 30 days, then content older than 30 days will be deleted. + internal VideoCreationProperties(string title, string description, string segmentLength, string retentionPeriod) { Title = title; Description = description; SegmentLength = segmentLength; + RetentionPeriod = retentionPeriod; } /// Optional video title provided by the user. Value can be up to 256 characters long. @@ -32,5 +34,7 @@ internal VideoCreationProperties(string title, string description, string segmen public string Description { get; set; } /// Video segment length indicates the length of individual video files (segments) which are persisted to storage. Smaller segments provide lower archive playback latency but generate larger volume of storage transactions. Larger segments reduce the amount of storage transactions while increasing the archive playback latency. Value must be specified in ISO8601 duration format (i.e. "PT30S" equals 30 seconds) and can vary between 30 seconds to 5 minutes, in 30 seconds increments. Changing this value after the video is initially created can lead to errors when uploading media to the archive. Default value is 30 seconds. public string SegmentLength { get; set; } + /// Video retention period indicates how long the video is kept in storage, and must be a multiple of 1 day. For example, if this is set to 30 days, then content older than 30 days will be deleted. + public string RetentionPeriod { get; set; } } } diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoEncoderConfiguration.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoEncoderConfiguration.Serialization.cs new file mode 100644 index 000000000000..520955e053c5 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoEncoderConfiguration.Serialization.cs @@ -0,0 +1,125 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class VideoEncoderConfiguration : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(Encoding)) + { + writer.WritePropertyName("encoding"); + writer.WriteStringValue(Encoding.Value.ToString()); + } + if (Optional.IsDefined(Quality)) + { + writer.WritePropertyName("quality"); + writer.WriteNumberValue(Quality.Value); + } + if (Optional.IsDefined(Resolution)) + { + writer.WritePropertyName("resolution"); + writer.WriteObjectValue(Resolution); + } + if (Optional.IsDefined(RateControl)) + { + writer.WritePropertyName("rateControl"); + writer.WriteObjectValue(RateControl); + } + if (Optional.IsDefined(H264)) + { + writer.WritePropertyName("h264"); + writer.WriteObjectValue(H264); + } + if (Optional.IsDefined(Mpeg4)) + { + writer.WritePropertyName("mpeg4"); + writer.WriteObjectValue(Mpeg4); + } + writer.WriteEndObject(); + } + + internal static VideoEncoderConfiguration DeserializeVideoEncoderConfiguration(JsonElement element) + { + Optional encoding = default; + Optional quality = default; + Optional resolution = default; + Optional rateControl = default; + Optional h264 = default; + Optional mpeg4 = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("encoding")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + encoding = new VideoEncoding(property.Value.GetString()); + continue; + } + if (property.NameEquals("quality")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + quality = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("resolution")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + resolution = VideoResolution.DeserializeVideoResolution(property.Value); + continue; + } + if (property.NameEquals("rateControl")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + rateControl = RateControl.DeserializeRateControl(property.Value); + continue; + } + if (property.NameEquals("h264")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + h264 = H264Configuration.DeserializeH264Configuration(property.Value); + continue; + } + if (property.NameEquals("mpeg4")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + mpeg4 = Mpeg4Configuration.DeserializeMpeg4Configuration(property.Value); + continue; + } + } + return new VideoEncoderConfiguration(Optional.ToNullable(encoding), Optional.ToNullable(quality), resolution.Value, rateControl.Value, h264.Value, mpeg4.Value); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoEncoderConfiguration.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoEncoderConfiguration.cs new file mode 100644 index 000000000000..c09c892a3174 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoEncoderConfiguration.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// Class representing the MPEG4 Configuration. + public partial class VideoEncoderConfiguration + { + /// Initializes a new instance of VideoEncoderConfiguration. + public VideoEncoderConfiguration() + { + } + + /// Initializes a new instance of VideoEncoderConfiguration. + /// The video codec used by the Media Profile. + /// Relative value representing the quality of the video. + /// The Video Resolution. + /// The Video's rate control. + /// The H264 Configuration. + /// The H264 Configuration. + internal VideoEncoderConfiguration(VideoEncoding? encoding, float? quality, VideoResolution resolution, RateControl rateControl, H264Configuration h264, Mpeg4Configuration mpeg4) + { + Encoding = encoding; + Quality = quality; + Resolution = resolution; + RateControl = rateControl; + H264 = h264; + Mpeg4 = mpeg4; + } + + /// The video codec used by the Media Profile. + public VideoEncoding? Encoding { get; set; } + /// Relative value representing the quality of the video. + public float? Quality { get; set; } + /// The Video Resolution. + public VideoResolution Resolution { get; set; } + /// The Video's rate control. + public RateControl RateControl { get; set; } + /// The H264 Configuration. + public H264Configuration H264 { get; set; } + /// The H264 Configuration. + public Mpeg4Configuration Mpeg4 { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoEncoding.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoEncoding.cs new file mode 100644 index 000000000000..533ed1f6bbe5 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoEncoding.cs @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// The video codec used by the Media Profile. + public readonly partial struct VideoEncoding : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public VideoEncoding(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string JpegValue = "JPEG"; + private const string H264Value = "H264"; + private const string Mpeg4Value = "MPEG4"; + + /// The Media Profile uses JPEG encoding. + public static VideoEncoding Jpeg { get; } = new VideoEncoding(JpegValue); + /// The Media Profile uses H264 encoding. + public static VideoEncoding H264 { get; } = new VideoEncoding(H264Value); + /// The Media Profile uses MPEG4 encoding. + public static VideoEncoding Mpeg4 { get; } = new VideoEncoding(Mpeg4Value); + /// Determines if two values are the same. + public static bool operator ==(VideoEncoding left, VideoEncoding right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(VideoEncoding left, VideoEncoding right) => !left.Equals(right); + /// Converts a string to a . + public static implicit operator VideoEncoding(string value) => new VideoEncoding(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is VideoEncoding other && Equals(other); + /// + public bool Equals(VideoEncoding other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value?.GetHashCode() ?? 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoPublishingOptions.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoPublishingOptions.Serialization.cs new file mode 100644 index 000000000000..20b4417bdd70 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoPublishingOptions.Serialization.cs @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class VideoPublishingOptions : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(EnableVideoPreviewImage)) + { + writer.WritePropertyName("enableVideoPreviewImage"); + writer.WriteStringValue(EnableVideoPreviewImage); + } + writer.WriteEndObject(); + } + + internal static VideoPublishingOptions DeserializeVideoPublishingOptions(JsonElement element) + { + Optional enableVideoPreviewImage = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("enableVideoPreviewImage")) + { + enableVideoPreviewImage = property.Value.GetString(); + continue; + } + } + return new VideoPublishingOptions(enableVideoPreviewImage.Value); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoPublishingOptions.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoPublishingOptions.cs new file mode 100644 index 000000000000..dce82a8fb7a3 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoPublishingOptions.cs @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// Options for changing video publishing behavior on the video sink and output video. + public partial class VideoPublishingOptions + { + /// Initializes a new instance of VideoPublishingOptions. + public VideoPublishingOptions() + { + } + + /// Initializes a new instance of VideoPublishingOptions. + /// When set to 'true' the video will publish preview images. Default is 'false'. + internal VideoPublishingOptions(string enableVideoPreviewImage) + { + EnableVideoPreviewImage = enableVideoPreviewImage; + } + + /// When set to 'true' the video will publish preview images. Default is 'false'. + public string EnableVideoPreviewImage { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoResolution.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoResolution.Serialization.cs new file mode 100644 index 000000000000..9f41e3a9846b --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoResolution.Serialization.cs @@ -0,0 +1,61 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System.Text.Json; +using Azure.Core; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class VideoResolution : IUtf8JsonSerializable + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) + { + writer.WriteStartObject(); + if (Optional.IsDefined(Width)) + { + writer.WritePropertyName("width"); + writer.WriteNumberValue(Width.Value); + } + if (Optional.IsDefined(Height)) + { + writer.WritePropertyName("height"); + writer.WriteNumberValue(Height.Value); + } + writer.WriteEndObject(); + } + + internal static VideoResolution DeserializeVideoResolution(JsonElement element) + { + Optional width = default; + Optional height = default; + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("width")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + width = property.Value.GetSingle(); + continue; + } + if (property.NameEquals("height")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + height = property.Value.GetSingle(); + continue; + } + } + return new VideoResolution(Optional.ToNullable(width), Optional.ToNullable(height)); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoResolution.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoResolution.cs new file mode 100644 index 000000000000..0114d88af4aa --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoResolution.cs @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// The Video resolution. + public partial class VideoResolution + { + /// Initializes a new instance of VideoResolution. + public VideoResolution() + { + } + + /// Initializes a new instance of VideoResolution. + /// The number of columns of the Video image. + /// The number of lines of the Video image. + internal VideoResolution(float? width, float? height) + { + Width = width; + Height = height; + } + + /// The number of columns of the Video image. + public float? Width { get; set; } + /// The number of lines of the Video image. + public float? Height { get; set; } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoSink.Serialization.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoSink.Serialization.cs index fb4efdaf00f8..728d84dac3be 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoSink.Serialization.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoSink.Serialization.cs @@ -23,6 +23,11 @@ void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) writer.WritePropertyName("videoCreationProperties"); writer.WriteObjectValue(VideoCreationProperties); } + if (Optional.IsDefined(VideoPublishingOptions)) + { + writer.WritePropertyName("videoPublishingOptions"); + writer.WriteObjectValue(VideoPublishingOptions); + } writer.WritePropertyName("localMediaCachePath"); writer.WriteStringValue(LocalMediaCachePath); writer.WritePropertyName("localMediaCacheMaximumSizeMiB"); @@ -45,6 +50,7 @@ internal static VideoSink DeserializeVideoSink(JsonElement element) { string videoName = default; Optional videoCreationProperties = default; + Optional videoPublishingOptions = default; string localMediaCachePath = default; string localMediaCacheMaximumSizeMiB = default; string type = default; @@ -67,6 +73,16 @@ internal static VideoSink DeserializeVideoSink(JsonElement element) videoCreationProperties = VideoCreationProperties.DeserializeVideoCreationProperties(property.Value); continue; } + if (property.NameEquals("videoPublishingOptions")) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + property.ThrowNonNullablePropertyIsNull(); + continue; + } + videoPublishingOptions = VideoPublishingOptions.DeserializeVideoPublishingOptions(property.Value); + continue; + } if (property.NameEquals("localMediaCachePath")) { localMediaCachePath = property.Value.GetString(); @@ -98,7 +114,7 @@ internal static VideoSink DeserializeVideoSink(JsonElement element) continue; } } - return new VideoSink(type, name, inputs, videoName, videoCreationProperties.Value, localMediaCachePath, localMediaCacheMaximumSizeMiB); + return new VideoSink(type, name, inputs, videoName, videoCreationProperties.Value, videoPublishingOptions.Value, localMediaCachePath, localMediaCacheMaximumSizeMiB); } } } diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoSink.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoSink.cs index fc9c339bf4c8..99dd5a49bbc6 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoSink.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Generated/Models/VideoSink.cs @@ -55,12 +55,14 @@ public VideoSink(string name, IEnumerable inputs, string videoName, s /// An array of upstream node references within the topology to be used as inputs for this node. /// Name of a new or existing Video Analyzer video resource used for the media recording. /// Optional video properties to be used in case a new video resource needs to be created on the service. + /// Optional video publishing options to be used for changing publishing behavior of the output video. /// Path to a local file system directory for caching of temporary media files. This will also be used to store content which cannot be immediately uploaded to Azure due to Internet connectivity issues. /// Maximum amount of disk space that can be used for caching of temporary media files. Once this limit is reached, the oldest segments of the media archive will be continuously deleted in order to make space for new media, thus leading to gaps in the cloud recorded content. - internal VideoSink(string type, string name, IList inputs, string videoName, VideoCreationProperties videoCreationProperties, string localMediaCachePath, string localMediaCacheMaximumSizeMiB) : base(type, name, inputs) + internal VideoSink(string type, string name, IList inputs, string videoName, VideoCreationProperties videoCreationProperties, VideoPublishingOptions videoPublishingOptions, string localMediaCachePath, string localMediaCacheMaximumSizeMiB) : base(type, name, inputs) { VideoName = videoName; VideoCreationProperties = videoCreationProperties; + VideoPublishingOptions = videoPublishingOptions; LocalMediaCachePath = localMediaCachePath; LocalMediaCacheMaximumSizeMiB = localMediaCacheMaximumSizeMiB; Type = type ?? "#Microsoft.VideoAnalyzer.VideoSink"; @@ -70,6 +72,8 @@ internal VideoSink(string type, string name, IList inputs, string vid public string VideoName { get; set; } /// Optional video properties to be used in case a new video resource needs to be created on the service. public VideoCreationProperties VideoCreationProperties { get; set; } + /// Optional video publishing options to be used for changing publishing behavior of the output video. + public VideoPublishingOptions VideoPublishingOptions { get; set; } /// Path to a local file system directory for caching of temporary media files. This will also be used to store content which cannot be immediately uploaded to Azure due to Internet connectivity issues. public string LocalMediaCachePath { get; set; } /// Maximum amount of disk space that can be used for caching of temporary media files. Once this limit is reached, the oldest segments of the media archive will be continuously deleted in order to make space for new media, thus leading to gaps in the cloud recorded content. diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/DiscoveredOnVifDeviceCollection.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/DiscoveredOnVifDeviceCollection.cs new file mode 100644 index 000000000000..336222b90556 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/DiscoveredOnVifDeviceCollection.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System.Text.Json; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// + /// Extensions methods to DiscoveredOnvifDeviceCollection to add serialization and deserialization. + /// + public partial class DiscoveredOnvifDeviceCollection + { + /// + /// Deserialize DiscoveredOnvifDeviceCollection. + /// + /// The json data that is to be deserialized. + /// A Json string representation of a list of DiscoveredOnVifDevices. + public static DiscoveredOnvifDeviceCollection Deserialize(string json) + { + using var doc = JsonDocument.Parse(json); + var element = doc.RootElement; + return DeserializeDiscoveredOnvifDeviceCollection(element); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/LivePipeline.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/LivePipeline.cs index 94ec3ef255b8..f23e9e3ff7ec 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/LivePipeline.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/LivePipeline.cs @@ -1,10 +1,7 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. -using System.IO; -using System.Text; using System.Text.Json; -using Azure.Core; namespace Azure.Media.VideoAnalyzer.Edge.Models { @@ -14,11 +11,11 @@ public partial class LivePipeline /// Deserialize LivePipeline. /// /// The json to be deserialized. - /// A LivePipelineCollection. + /// A Stream. public static LivePipeline Deserialize(string json) { - using JsonDocument doc = JsonDocument.Parse(json); - JsonElement element = doc.RootElement; + using var doc = JsonDocument.Parse(json); + var element = doc.RootElement; return DeserializeLivePipeline(element); } } diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/LivePipelineCollection.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/LivePipelineCollection.cs index f8c0aa08b90c..e0f4ff42585e 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/LivePipelineCollection.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/LivePipelineCollection.cs @@ -1,24 +1,24 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. -using System.IO; -using System.Text; using System.Text.Json; -using Azure.Core; namespace Azure.Media.VideoAnalyzer.Edge.Models { + /// + /// Extensions methods to LivePipelineCollection to add serialization and deserialization. + /// public partial class LivePipelineCollection { /// /// Deserialize LivePipelineCollection. /// /// The json to be deserialized. - /// Json string representation of a Live Pipeline Collection. + /// A LivePipeline Collection. public static LivePipelineCollection Deserialize(string json) { - using JsonDocument doc = JsonDocument.Parse(json); - JsonElement element = doc.RootElement; + using var doc = JsonDocument.Parse(json); + var element = doc.RootElement; return DeserializeLivePipelineCollection(element); } } diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/LivePipelineSetRequest.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/LivePipelineSetRequest.cs index cafd5acd8887..3bb99bb9c0f7 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/LivePipelineSetRequest.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/LivePipelineSetRequest.cs @@ -6,17 +6,17 @@ namespace Azure.Media.VideoAnalyzer.Edge.Models public partial class LivePipelineSetRequest { /// - /// Serialize . + /// Gets the Payload from the request result. /// - /// A String containing the Payload. + /// A string containing the Payload. public override string GetPayloadAsJson() { - var livePipelineBody = new LivePipelineSetRequestBody(LivePipeline.Name) + var instanceBody = new LivePipelineSetRequestBody(LivePipeline.Name) { SystemData = LivePipeline.SystemData, Properties = LivePipeline.Properties }; - return livePipelineBody.GetPayloadAsJson(); + return instanceBody.GetPayloadAsJson(); } } } diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/MethodRequest.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/MethodRequest.cs index 69c9fd09f140..62614015e794 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/MethodRequest.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/MethodRequest.cs @@ -8,6 +8,9 @@ namespace Azure.Media.VideoAnalyzer.Edge.Models { + /// + /// Base Class for Method Requests. + /// public partial class MethodRequest { /// diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/NodeInput.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/NodeInput.cs index da647544b09e..3ca0a223ea61 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/NodeInput.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/NodeInput.cs @@ -1,10 +1,11 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. -using System; - namespace Azure.Media.VideoAnalyzer.Edge.Models { + /// + /// Extension for . + /// public partial class NodeInput { /// diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/OnVifDevice.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/OnVifDevice.cs new file mode 100644 index 000000000000..cc9003783152 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/OnVifDevice.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System.Text.Json; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class OnvifDevice + { + /// + /// Deserialize OnvifDevice. + /// + /// The json to be deserialized. + /// A OnvifDevice. + public static OnvifDevice Deserialize(string json) + { + using var doc = JsonDocument.Parse(json); + var element = doc.RootElement; + return DeserializeOnvifDevice(element); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/ParameterDefinition.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/ParameterDefinition.cs index b708e6cbb9c7..ad8224c772d4 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/ParameterDefinition.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/ParameterDefinition.cs @@ -1,8 +1,6 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. -// - #nullable disable using System; diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/PipelineTopology.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/PipelineTopology.cs index bfd33da5b7c6..9a358852b4f9 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/PipelineTopology.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/PipelineTopology.cs @@ -1,10 +1,7 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. -using System.IO; -using System.Text; using System.Text.Json; -using Azure.Core; namespace Azure.Media.VideoAnalyzer.Edge.Models { @@ -17,8 +14,8 @@ public partial class PipelineTopology /// A Json string representation of a PipelineTopology. public static PipelineTopology Deserialize(string json) { - using JsonDocument doc = JsonDocument.Parse(json); - JsonElement element = doc.RootElement; + using var doc = JsonDocument.Parse(json); + var element = doc.RootElement; return DeserializePipelineTopology(element); } } diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/PipelineTopologyCollection.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/PipelineTopologyCollection.cs index 3b5548ff24b6..f45c5b3b3eff 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/PipelineTopologyCollection.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/PipelineTopologyCollection.cs @@ -1,24 +1,24 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. -using System.IO; -using System.Text; using System.Text.Json; -using Azure.Core; namespace Azure.Media.VideoAnalyzer.Edge.Models { + /// + /// Extensions methods to PipelineTopologyCollection to add serialization and deserialization. + /// public partial class PipelineTopologyCollection { /// - /// Deserialize PipelineTopology. + /// Deserialize PipelineTopologyCollection. /// /// The json data that is to be deserialized. - /// A Json string representation of a list of PipelineTopology. + /// A Json string representation of a list of pipeline topologies. public static PipelineTopologyCollection Deserialize(string json) { - using JsonDocument doc = JsonDocument.Parse(json); - JsonElement element = doc.RootElement; + using var doc = JsonDocument.Parse(json); + var element = doc.RootElement; return DeserializePipelineTopologyCollection(element); } } diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/PipelineTopologySetRequest.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/PipelineTopologySetRequest.cs index 8087fe594988..a8dc3be31da8 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/PipelineTopologySetRequest.cs +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/PipelineTopologySetRequest.cs @@ -1,8 +1,6 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. -using System.Text.Json; - namespace Azure.Media.VideoAnalyzer.Edge.Models { public partial class PipelineTopologySetRequest @@ -10,15 +8,15 @@ public partial class PipelineTopologySetRequest /// /// Gets the Payload from the request result. /// - /// A string containing the Payload. + /// A string containing the Payload. public override string GetPayloadAsJson() { - var pipelineTopologyBody = new PipelineTopologySetRequestBody(PipelineTopology.Name) + var pipelineBody = new PipelineTopologySetRequestBody(PipelineTopology.Name) { SystemData = PipelineTopology.SystemData, Properties = PipelineTopology.Properties }; - return pipelineTopologyBody.GetPayloadAsJson(); + return pipelineBody.GetPayloadAsJson(); } } } diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/RemoteDeviceAdapter.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/RemoteDeviceAdapter.cs new file mode 100644 index 000000000000..83f31ba89958 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/RemoteDeviceAdapter.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System.Text.Json; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class RemoteDeviceAdapter + { + /// + /// Deserialize RemoteDeviceAdapter. + /// + /// The json to be deserialized. + /// A RemoteDeviceAdapter. + public static RemoteDeviceAdapter Deserialize(string json) + { + using var doc = JsonDocument.Parse(json); + var element = doc.RootElement; + return DeserializeRemoteDeviceAdapter(element); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/RemoteDeviceAdapterCollection.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/RemoteDeviceAdapterCollection.cs new file mode 100644 index 000000000000..382feff1a683 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/RemoteDeviceAdapterCollection.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System.Text.Json; + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + /// + /// Extensions methods to RemoteDeviceAdapterCollection to add serialization and deserialization. + /// + public partial class RemoteDeviceAdapterCollection + { + /// + /// Deserialize RemoteDeviceAdapterCollection. + /// + /// The json data that is to be deserialized. + /// RemoteDeviceAdapterCollection. + public static RemoteDeviceAdapterCollection Deserialize(string json) + { + using var doc = JsonDocument.Parse(json); + var element = doc.RootElement; + return DeserializeRemoteDeviceAdapterCollection(element); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/RemoteDeviceAdapterSetRequest.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/RemoteDeviceAdapterSetRequest.cs new file mode 100644 index 000000000000..b933137de4d7 --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/RemoteDeviceAdapterSetRequest.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + public partial class RemoteDeviceAdapterSetRequest + { + /// + /// Gets the Payload from the request result. + /// + /// A string containing the Payload. + public override string GetPayloadAsJson() + { + var remoteDeviceAdapter = new RemoteDeviceAdapterSetRequestBody(RemoteDeviceAdapter.Name) + { + SystemData = RemoteDeviceAdapter.SystemData, + Properties = RemoteDeviceAdapter.Properties + }; + return remoteDeviceAdapter.GetPayloadAsJson(); + } + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/RemoteDeviceAdapterSetRequestBody.cs b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/RemoteDeviceAdapterSetRequestBody.cs new file mode 100644 index 000000000000..dcf6a99cae3c --- /dev/null +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/Models/RemoteDeviceAdapterSetRequestBody.cs @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +namespace Azure.Media.VideoAnalyzer.Edge.Models +{ + internal partial class RemoteDeviceAdapterSetRequestBody + { + } +} diff --git a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/autorest.md b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/autorest.md index 4abcbd073cd8..fdcc85a32318 100644 --- a/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/autorest.md +++ b/sdk/videoanalyzer/Azure.Media.VideoAnalyzer.Edge/src/autorest.md @@ -10,7 +10,7 @@ directive: $["x-csharp-usage"] = "model,input,output"; $["x-csharp-formats"] = "json"; -require: https://github.com/Azure/azure-rest-api-specs/blob/55b3e2d075398ec62f9322829494ff6a4323e299/specification/videoanalyzer/data-plane/readme.md +require: https://github.com/Azure/azure-rest-api-specs/blob/694fe69245024447f8d3647be1da88e9ad942058/specification/videoanalyzer/data-plane/readme.md azure-arm: false payload-flattening-threshold: 2 license-header: MICROSOFT_MIT_NO_VERSION