Skip to content

Commit

Permalink
Code changes to add muli api support (#14706)
Browse files Browse the repository at this point in the history
* Code changes to add muli api support

* Format

* Add Version Check

* Format

* Move ApiVersion to options bag
  • Loading branch information
sarangan12 authored Apr 6, 2021
1 parent c9bcb27 commit d31aa59
Show file tree
Hide file tree
Showing 10 changed files with 104 additions and 24 deletions.
26 changes: 18 additions & 8 deletions sdk/search/search-documents/review/search-documents.api.md
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,12 @@ export interface BaseLexicalAnalyzer {
odatatype: "#Microsoft.Azure.Search.CustomAnalyzer" | "#Microsoft.Azure.Search.PatternAnalyzer" | "#Microsoft.Azure.Search.StandardAnalyzer" | "#Microsoft.Azure.Search.StopAnalyzer";
}

// @public
export interface BaseLexicalNormalizer {
name: string;
odatatype: "#Microsoft.Azure.Search.CustomNormalizer";
}

// @public
export interface BaseLexicalTokenizer {
name: string;
Expand Down Expand Up @@ -314,7 +320,8 @@ export type CustomEntityLookupSkill = BaseSearchIndexerSkill & {
export type CustomEntityLookupSkillLanguage = string;

// @public
export type CustomNormalizer = LexicalNormalizer & {
export type CustomNormalizer = BaseLexicalNormalizer & {
odatatype: "#Microsoft.Azure.Search.CustomNormalizer";
tokenFilters?: TokenFilterName[];
charFilters?: CharFilterName[];
};
Expand Down Expand Up @@ -1289,10 +1296,7 @@ export type LexicalAnalyzer = CustomAnalyzer | PatternAnalyzer | LuceneStandardA
export type LexicalAnalyzerName = string;

// @public
export interface LexicalNormalizer {
name: string;
odatatype: string;
}
export type LexicalNormalizer = CustomNormalizer;

// @public
export type LexicalNormalizerName = string;
Expand Down Expand Up @@ -1548,7 +1552,9 @@ export class SearchClient<T> implements IndexDocumentsClient<T> {
}

// @public
export type SearchClientOptions = PipelineOptions;
export interface SearchClientOptions extends PipelineOptions {
apiVersion?: string;
}

// @public
export interface SearchDocumentsPageResult<T> extends SearchDocumentsResultBase {
Expand Down Expand Up @@ -1618,7 +1624,9 @@ export class SearchIndexClient {
}

// @public
export type SearchIndexClientOptions = PipelineOptions;
export interface SearchIndexClientOptions extends PipelineOptions {
apiVersion?: string;
}

// @public
export interface SearchIndexer {
Expand Down Expand Up @@ -1665,7 +1673,9 @@ export class SearchIndexerClient {
}

// @public
export type SearchIndexerClientOptions = PipelineOptions;
export interface SearchIndexerClientOptions extends PipelineOptions {
apiVersion?: string;
}

// @public
export interface SearchIndexerDataContainer {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ export type CharFilterUnion =
| CharFilter
| MappingCharFilter
| PatternReplaceCharFilter;
export type LexicalNormalizerUnion = LexicalNormalizer | CustomNormalizer;
export type SimilarityUnion = Similarity | ClassicSimilarity | BM25Similarity;

/** Represents a datasource definition, which can be used to configure an indexer. */
Expand Down Expand Up @@ -595,7 +596,7 @@ export interface SearchIndex {
/** The character filters for the index. */
charFilters?: CharFilterUnion[];
/** The normalizers for the index. */
normalizers?: LexicalNormalizer[];
normalizers?: LexicalNormalizerUnion[];
/** A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your data when you want full assurance that no one, not even Microsoft, can decrypt your data in Azure Cognitive Search. Once you have encrypted your data, it will always remain encrypted. Azure Cognitive Search will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your data will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. */
encryptionKey?: SearchResourceEncryptionKey | null;
/** The type of similarity algorithm to be used when scoring and ranking the documents matching a search query. The similarity algorithm can only be defined at index creation time and cannot be modified on existing indexes. If null, the ClassicSimilarity algorithm is used. */
Expand Down Expand Up @@ -761,8 +762,8 @@ export interface CharFilter {

/** Base type for normalizers. */
export interface LexicalNormalizer {
/** Identifies the concrete type of the normalizer. */
odatatype: string;
/** Polymorphic discriminator, which specifies the different types this object can be */
odatatype: "#Microsoft.Azure.Search.CustomNormalizer";
/** The name of the normalizer. It must only contain letters, digits, spaces, dashes or underscores, can only start and end with alphanumeric characters, and is limited to 128 characters. It cannot end in '.microsoft' nor '.lucene', nor be named 'asciifolding', 'standard', 'lowercase', 'uppercase', or 'elision'. */
name: string;
}
Expand Down Expand Up @@ -1654,6 +1655,8 @@ export type PatternReplaceCharFilter = CharFilter & {

/** Allows you to configure normalization for filterable, sortable, and facetable fields, which by default operate with strict matching. This is a user-defined configuration consisting of at least one or more filters, which modify the token that is stored. */
export type CustomNormalizer = LexicalNormalizer & {
/** Polymorphic discriminator, which specifies the different types this object can be */
odatatype: "#Microsoft.Azure.Search.CustomNormalizer";
/** A list of token filters used to filter out or modify the input token. For example, you can specify a lowercase filter that converts all characters to lowercase. The filters are run in the order in which they are listed. */
tokenFilters?: TokenFilterName[];
/** A list of character filters used to prepare input text before it is processed. For instance, they can replace certain characters or symbols. The filters are run in the order in which they are listed. */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1737,6 +1737,11 @@ export const LexicalNormalizer: coreHttp.CompositeMapper = {
type: {
name: "Composite",
className: "LexicalNormalizer",
uberParent: "LexicalNormalizer",
polymorphicDiscriminator: {
serializedName: "@odata\\.type",
clientName: "@odata\\.type"
},
modelProperties: {
odatatype: {
serializedName: "@odata\\.type",
Expand Down Expand Up @@ -4578,6 +4583,8 @@ export const CustomNormalizer: coreHttp.CompositeMapper = {
type: {
name: "Composite",
className: "CustomNormalizer",
uberParent: "LexicalNormalizer",
polymorphicDiscriminator: LexicalNormalizer.type.polymorphicDiscriminator,
modelProperties: {
...LexicalNormalizer.type.modelProperties,
tokenFilters: {
Expand Down Expand Up @@ -4656,6 +4663,7 @@ export let discriminators = {
LexicalTokenizer: LexicalTokenizer,
TokenFilter: TokenFilter,
CharFilter: CharFilter,
LexicalNormalizer: LexicalNormalizer,
Similarity: Similarity,
"DataChangeDetectionPolicy.#Microsoft.Azure.Search.HighWaterMarkChangeDetectionPolicy": HighWaterMarkChangeDetectionPolicy,
"DataChangeDetectionPolicy.#Microsoft.Azure.Search.SqlIntegratedChangeTrackingPolicy": SqlIntegratedChangeTrackingPolicy,
Expand Down Expand Up @@ -4723,6 +4731,7 @@ export let discriminators = {
"TokenFilter.#Microsoft.Azure.Search.WordDelimiterTokenFilter": WordDelimiterTokenFilter,
"CharFilter.#Microsoft.Azure.Search.MappingCharFilter": MappingCharFilter,
"CharFilter.#Microsoft.Azure.Search.PatternReplaceCharFilter": PatternReplaceCharFilter,
"LexicalNormalizer.#Microsoft.Azure.Search.CustomNormalizer": CustomNormalizer,
"Similarity.#Microsoft.Azure.Search.ClassicSimilarity": ClassicSimilarity,
"Similarity.#Microsoft.Azure.Search.BM25Similarity": BM25Similarity
};
7 changes: 4 additions & 3 deletions sdk/search/search-documents/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,8 @@ export {
SearchResourceEncryptionKey,
SearchIndexStatistics,
SearchServiceStatistics,
SearchIndexer
SearchIndexer,
LexicalNormalizer
} from "./serviceModels";
export { default as GeographyPoint } from "./geographyPoint";
export { odata } from "./odata";
Expand Down Expand Up @@ -275,13 +276,13 @@ export {
LexicalAnalyzer as BaseLexicalAnalyzer,
CharFilter as BaseCharFilter,
DataDeletionDetectionPolicy as BaseDataDeletionDetectionPolicy,
LexicalNormalizer,
LexicalNormalizerName,
KnownLexicalNormalizerName,
CustomNormalizer,
TokenFilterName,
KnownTokenFilterName,
CharFilterName,
KnownCharFilterName
KnownCharFilterName,
LexicalNormalizer as BaseLexicalNormalizer
} from "./generated/service/models";
export { AzureKeyCredential } from "@azure/core-auth";
18 changes: 16 additions & 2 deletions sdk/search/search-documents/src/searchClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,12 @@ import { IndexDocumentsClient } from "./searchIndexingBufferedSender";
/**
* Client options used to configure Cognitive Search API requests.
*/
export type SearchClientOptions = PipelineOptions;
export interface SearchClientOptions extends PipelineOptions {
/**
* The API version to use when communicating with the service.
*/
apiVersion?: string;
}

/**
* Class used to perform operations against a search index,
Expand Down Expand Up @@ -146,7 +151,16 @@ export class SearchClient<T> implements IndexDocumentsClient<T> {
pipeline.requestPolicyFactories.unshift(odataMetadataPolicy("none"));
}

this.client = new GeneratedClient(this.endpoint, this.indexName, this.apiVersion, pipeline);
let apiVersion = this.apiVersion;

if (options.apiVersion) {
if (!["2020-06-30-Preview", "2020-06-30"].includes(options.apiVersion)) {
throw new Error(`Invalid Api Version: ${options.apiVersion}`);
}
apiVersion = options.apiVersion;
}

this.client = new GeneratedClient(this.endpoint, this.indexName, apiVersion, pipeline);
}

/**
Expand Down
18 changes: 16 additions & 2 deletions sdk/search/search-documents/src/searchIndexClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,12 @@ import { SearchClient, SearchClientOptions as GetSearchClientOptions } from "./s
/**
* Client options used to configure Cognitive Search API requests.
*/
export type SearchIndexClientOptions = PipelineOptions;
export interface SearchIndexClientOptions extends PipelineOptions {
/**
* The API version to use when communicating with the service.
*/
apiVersion?: string;
}

/**
* Class to perform operations to manage
Expand Down Expand Up @@ -137,7 +142,16 @@ export class SearchIndexClient {
pipeline.requestPolicyFactories.unshift(odataMetadataPolicy("minimal"));
}

this.client = new GeneratedClient(this.endpoint, this.apiVersion, pipeline);
let apiVersion = this.apiVersion;

if (options.apiVersion) {
if (!["2020-06-30-Preview", "2020-06-30"].includes(options.apiVersion)) {
throw new Error(`Invalid Api Version: ${options.apiVersion}`);
}
apiVersion = options.apiVersion;
}

this.client = new GeneratedClient(this.endpoint, apiVersion, pipeline);
}

private async *listIndexesPage(
Expand Down
18 changes: 16 additions & 2 deletions sdk/search/search-documents/src/searchIndexerClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,12 @@ import { odataMetadataPolicy } from "./odataMetadataPolicy";
/**
* Client options used to configure Cognitive Search API requests.
*/
export type SearchIndexerClientOptions = PipelineOptions;
export interface SearchIndexerClientOptions extends PipelineOptions {
/**
* The API version to use when communicating with the service.
*/
apiVersion?: string;
}

/**
* Class to perform operations to manage
Expand Down Expand Up @@ -128,7 +133,16 @@ export class SearchIndexerClient {
pipeline.requestPolicyFactories.unshift(odataMetadataPolicy("minimal"));
}

this.client = new GeneratedClient(this.endpoint, this.apiVersion, pipeline);
let apiVersion = this.apiVersion;

if (options.apiVersion) {
if (!["2020-06-30-Preview", "2020-06-30"].includes(options.apiVersion)) {
throw new Error(`Invalid Api Version: ${options.apiVersion}`);
}
apiVersion = options.apiVersion;
}

this.client = new GeneratedClient(this.endpoint, apiVersion, pipeline);
}

/**
Expand Down
9 changes: 7 additions & 2 deletions sdk/search/search-documents/src/serviceModels.ts
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,8 @@ import {
FieldMapping,
IndexingParameters,
IndexingSchedule,
LexicalNormalizer,
LexicalNormalizerName
LexicalNormalizerName,
CustomNormalizer
} from "./generated/service/models";

import { PagedAsyncIterableIterator } from "@azure/core-paging";
Expand Down Expand Up @@ -660,6 +660,11 @@ export type TokenFilter =
*/
export type CharFilter = MappingCharFilter | PatternReplaceCharFilter;

/**
* Contains the possible cases for LexicalNormalizer.
*/
export type LexicalNormalizer = CustomNormalizer;

/**
* Contains the possible cases for ScoringFunction.
*/
Expand Down
4 changes: 2 additions & 2 deletions sdk/search/search-documents/src/serviceUtils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ import {
PatternAnalyzer as GeneratedPatternAnalyzer,
CustomAnalyzer,
PatternTokenizer,
LexicalNormalizer,
LexicalNormalizerName
} from "./generated/service/models";
import {
Expand All @@ -70,7 +69,8 @@ import {
DataDeletionDetectionPolicy,
SimilarityAlgorithm,
SearchResourceEncryptionKey,
PatternAnalyzer
PatternAnalyzer,
LexicalNormalizer
} from "./serviceModels";
import { SuggestDocumentsResult, SuggestResult, SearchResult } from "./indexModels";
import {
Expand Down
10 changes: 10 additions & 0 deletions sdk/search/search-documents/swagger/Service.md
Original file line number Diff line number Diff line change
Expand Up @@ -278,3 +278,13 @@ directive:
transform: >
$["x-ms-client-name"] = "tokenizerName"
```
### Add discriminator to LexicalNormalizer
```yaml
directive:
from: swagger-document
where: $.definitions.LexicalNormalizer
transform: >
$["discriminator"] = "@odata.type";
```

0 comments on commit d31aa59

Please sign in to comment.