Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Search SDK: Bug fixes and JSON blob parsing support #3012

Merged
merged 12 commits into from
Apr 4, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ namespace Microsoft.Azure.Search.Models
/// </summary>
public static class IndexingParametersExtensions
{
private const string ParsingModeKey = "parsingMode";

/// <summary>
/// Specifies that the indexer will index only the storage metadata and completely skip the document extraction process. This is useful when
/// you don't need the document content, nor do you need any of the content type-specific metadata properties.
Expand Down Expand Up @@ -110,6 +112,21 @@ public static IndexingParameters SetBlobExtractionMode(this IndexingParameters p
return Configure(parameters, "dataToExtract", (string)extractionMode);
}

/// <summary>
/// Tells the indexer to assume that all blobs contain JSON, which it will then parse such that each blob's JSON will map to a single
/// document in the Azure Search index.
/// See <see href="https://docs.microsoft.com/azure/search/search-howto-index-json-blobs/" /> for details.
/// </summary>
/// <param name="parameters">IndexingParameters to configure.</param>
/// <remarks>
/// This option only applies to indexers that index Azure Blob Storage.
/// </remarks>
/// <returns>The IndexingParameters instance.</returns>
public static IndexingParameters ParseJson(this IndexingParameters parameters)
{
return Configure(parameters, ParsingModeKey, "json");
}

/// <summary>
/// Specifies that <c cref="BlobExtractionMode.StorageMetadata">BlobExtractionMode.StorageMetadata</c> blob extraction mode will be
/// automatically used for blobs of unsupported content types. The default is false.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.

namespace Microsoft.Azure.Search.Serialization
{
using System;
using System.Globalization;
using System.Reflection;
using Newtonsoft.Json;

/// <summary>
/// Serializes doubles to and from the OData wire format.
/// </summary>
internal class DoubleConverter : JsonConverter
{
private const string ODataNegativeInfinity = "-INF";
private const string ODataPositiveInfinity = "INF";

public override bool CanConvert(Type objectType) =>
typeof(double?).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()) ||
typeof(double).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo());

public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
if (reader.TokenType == JsonToken.Null)
{
return null;
}

if (reader.TokenType == JsonToken.String)
{
string strValue = (string)reader.Value;

switch (strValue)
{
case ODataPositiveInfinity: return Double.PositiveInfinity;
case ODataNegativeInfinity: return Double.NegativeInfinity;
default: return Double.Parse(strValue, CultureInfo.InvariantCulture);
}
}

// We can't use a direct cast because sometimes we get integers from the reader.
return Convert.ToDouble(reader.Value);
}

public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
if (value == null)
{
writer.WriteNull();
}
else
{
double doubleValue = (double)value;

if (Double.IsNegativeInfinity(doubleValue))
{
writer.WriteValue(ODataNegativeInfinity);
}
else if (Double.IsPositiveInfinity(doubleValue))
{
writer.WriteValue(ODataPositiveInfinity);
}
else
{
writer.WriteValue(doubleValue);
}
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ private static JsonSerializerSettings CreateSerializerSettings<T>(
settings.Converters.Add(new GeographyPointConverter());
settings.Converters.Add(new IndexActionConverter<T>());
settings.Converters.Add(new DateTimeConverter());
settings.Converters.Add(new DoubleConverter());
settings.NullValueHandling = NullValueHandling.Ignore;

if (useCamelCase)
Expand All @@ -72,6 +73,7 @@ private static JsonSerializerSettings CreateDeserializerSettings<TSearchResult,
settings.Converters.Add(new GeographyPointConverter());
settings.Converters.Add(new DocumentConverter());
settings.Converters.Add(new DateTimeConverter());
settings.Converters.Add(new DoubleConverter());
settings.Converters.Add(new SearchResultConverter<TSearchResult, TDoc>());
settings.Converters.Add(new SuggestResultConverter<TSuggestResult, TDoc>());
settings.DateParseHandling = DateParseHandling.DateTimeOffset;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,10 @@ public CharFilter(string name)
}

/// <summary>
/// Gets or sets the name of the char filter. It must only contain
/// letters, digits, spaces, dashes or underscores, can only start
/// and end with alphanumeric characters, and is limited to 128
/// characters.
/// </summary>
[JsonProperty(PropertyName = "name")]
public string Name { get; set; }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,10 @@ public TokenFilter(string name)
}

/// <summary>
/// Gets or sets the name of the token filter. It must only contain
/// letters, digits, spaces, dashes or underscores, can only start
/// and end with alphanumeric characters, and is limited to 128
/// characters.
/// </summary>
[JsonProperty(PropertyName = "name")]
public string Name { get; set; }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,10 @@ public Tokenizer(string name)
}

/// <summary>
/// Gets or sets the name of the tokenizer. It must only contain
/// letters, digits, spaces, dashes or underscores, can only start
/// and end with alphanumeric characters, and is limited to 128
/// characters.
/// </summary>
[JsonProperty(PropertyName = "name")]
public string Name { get; set; }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
[assembly: AssemblyDescription("Makes it easy to develop a .NET application that uses Azure Search.")]

[assembly: AssemblyVersion("3.0.0.0")]
[assembly: AssemblyFileVersion("3.0.2.0")]
[assembly: AssemblyFileVersion("3.0.3.0")]

[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Microsoft")]
Expand Down
4 changes: 2 additions & 2 deletions src/Search/Microsoft.Azure.Search/project.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"version": "3.0.2",
"version": "3.0.3",
"title": "Microsoft Azure Search Library",
"description": "Makes it easy to develop a .NET application that uses Azure Search.",
"authors": [ "Microsoft" ],
Expand All @@ -11,7 +11,7 @@
"iconUrl": "http://go.microsoft.com/fwlink/?LinkID=288890",
"tags": [ "Microsoft Azure Search", "REST HTTP client", "search", "azureofficial", "windowsazureofficial" ],
"requireLicenseAcceptance": true,
"releaseNotes": "This is the newest major version of the Azure Search .NET SDK, based on version 2016-09-01 of the Azure Search REST API. New in this version is support for indexing Azure Blob and Table storage, indexer field mappings, custom analyzers, and ETags. Also included is support for reflection-based field definitions via the FieldBuilder class, thanks to a contribution from Ian Griffiths (https://github.com/idg10). See this article for help on migrating to the latest version: http://aka.ms/search-sdk-upgrade."
"releaseNotes": "This is the newest major version of the Azure Search .NET SDK, based on version 2016-09-01 of the Azure Search REST API. New in this version is support for indexing Azure Blob storage (including parsing of JSON blobs), indexing Azure Table storage, indexer field mappings, custom analyzers, and ETags. Also included is support for reflection-based field definitions via the FieldBuilder class, thanks to a contribution from Ian Griffiths (https://github.com/idg10). See this article for help on migrating to the latest version: http://aka.ms/search-sdk-upgrade."
},

"buildOptions": {
Expand Down
2 changes: 1 addition & 1 deletion src/Search/Search.Management.Tests/project.json
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
"Microsoft.Rest.ClientRuntime.Azure": "[3.3.4,4.0)",
"Microsoft.Azure.Management.ResourceManager": "1.1.1-preview",
"Microsoft.Azure.Management.Search": "1.0.1",
"Microsoft.Azure.Search": "3.0.2",
"Microsoft.Azure.Search": "3.0.3",
"xunit": "2.2.0-beta2-build3300",
"dotnet-test-xunit": "2.2.0-preview2-build1029"
}
Expand Down
Loading