diff --git a/sdk/search/Azure.Search.Documents/CHANGELOG.md b/sdk/search/Azure.Search.Documents/CHANGELOG.md index 3571dd5601aa..4c18cd543a9b 100644 --- a/sdk/search/Azure.Search.Documents/CHANGELOG.md +++ b/sdk/search/Azure.Search.Documents/CHANGELOG.md @@ -1,6 +1,6 @@ # Release History -## 11.7.0-beta.8 (Unreleased) +## 11.8.0-beta.1 (Unreleased) ### Features Added @@ -10,6 +10,23 @@ ### Other Changes +## 11.7.0 (2025-10-09) + +### Features Added +- Added support for `2025-09-01` service version. +- Support for reranker boosted scores in search results and the ability to sort results on either reranker or reranker boosted scores in `SemanticConfiguration.RankingOrder`. +- Support for `VectorSearchCompression.RescoringOptions` to configure how vector compression handles the original vector when indexing and how vectors are used during rescoring. +- Added `SearchIndex.Description` to provide a textual description of the index. +- Support for `LexicalNormalizer` when defining `SearchIndex`, `SimpleField`, and `SearchableField` and the ability to use it when analyzing text with `SearchIndexClient.AnalyzeText`. +- Support `DocumentIntelligenceLayoutSkill` skillset skill and `OneLake` `SearchIndexerDataSourceConnection` data source. +- Support for `QueryDebugMode` in searching to retrieve detailed information about search processing. Only vector is supported for `QueryDebugMode`. + +### Breaking Changes +- `VectorSearchCompression.RerankWithOriginalVectors` and `VectorSearchCompression.DefaultOversampling` don't work with + `2025-09-01` and were replaced by `VectorSearchCompression.RescoringOptions.EnabledRescoring` and + `VectorSearchCompression.RescoringOptions.DefaultOversampling`. If using `2024-07-01` continue using the old properties, + otherwise if using `2025-09-01` use the new properties in `RescoringOptions`. + ## 11.7.0-beta.7 (2025-09-05) ### Features Added diff --git a/sdk/search/Azure.Search.Documents/api/Azure.Search.Documents.net8.0.cs b/sdk/search/Azure.Search.Documents/api/Azure.Search.Documents.net8.0.cs index d14d597de9ea..175939828aa1 100644 --- a/sdk/search/Azure.Search.Documents/api/Azure.Search.Documents.net8.0.cs +++ b/sdk/search/Azure.Search.Documents/api/Azure.Search.Documents.net8.0.cs @@ -106,7 +106,8 @@ public enum ServiceVersion V2020_06_30 = 1, V2023_11_01 = 2, V2024_07_01 = 3, - V2025_08_01_Preview = 4, + V2025_09_01 = 4, + V2025_08_01_Preview = 5, } } public static partial class SearchExtensions @@ -795,11 +796,10 @@ public partial class AnalyzeTextOptions : System.ClientModel.Primitives.IJsonMod { public AnalyzeTextOptions(string text) { } public AnalyzeTextOptions(string text, Azure.Search.Documents.Indexes.Models.LexicalAnalyzerName analyzerName) { } - public AnalyzeTextOptions(string text, Azure.Search.Documents.Indexes.Models.LexicalNormalizerName normalizerName) { } public AnalyzeTextOptions(string text, Azure.Search.Documents.Indexes.Models.LexicalTokenizerName tokenizerName) { } public Azure.Search.Documents.Indexes.Models.LexicalAnalyzerName? AnalyzerName { get { throw null; } } public System.Collections.Generic.IList CharFilters { get { throw null; } } - public Azure.Search.Documents.Indexes.Models.LexicalNormalizerName? NormalizerName { get { throw null; } } + public Azure.Search.Documents.Indexes.Models.LexicalNormalizerName? NormalizerName { get { throw null; } set { } } public string Text { get { throw null; } } public System.Collections.Generic.IList TokenFilters { get { throw null; } } public Azure.Search.Documents.Indexes.Models.LexicalTokenizerName? TokenizerName { get { throw null; } } @@ -4050,6 +4050,7 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer } public partial class SearchResourceEncryptionKey : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { + public SearchResourceEncryptionKey(string keyName, string keyVersion, string vaultUri) { } public SearchResourceEncryptionKey(System.Uri vaultUri, string keyName, string keyVersion) { } public string ApplicationId { get { throw null; } set { } } public string ApplicationSecret { get { throw null; } set { } } @@ -5797,6 +5798,7 @@ public static partial class SearchModelFactory public static Azure.Search.Documents.Models.AutocompleteResults AutocompleteResults(double? coverage = default(double?), System.Collections.Generic.IEnumerable results = null) { throw null; } [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] public static Azure.Search.Documents.Models.AutocompleteResults AutocompleteResults(double? coverage, System.Collections.Generic.IReadOnlyList results) { throw null; } + public static Azure.Search.Documents.Indexes.Models.KnowledgeSource AzureBlobKnowledgeSource(string name = null, string description = null, string kind = null, string eTag = null, Azure.Search.Documents.Indexes.Models.SearchResourceEncryptionKey encryptionKey = null, System.Collections.Generic.IDictionary serializedAdditionalRawData = null, Azure.Search.Documents.Indexes.Models.AzureBlobKnowledgeSourceParameters azureBlobParameters = null) { throw null; } public static Azure.Search.Documents.Indexes.Models.AzureBlobKnowledgeSourceParameters AzureBlobKnowledgeSourceParameters(Azure.Search.Documents.Indexes.Models.SearchIndexerDataIdentity identity = null, string connectionString = null, string containerName = null, string folderPath = null, Azure.Search.Documents.Indexes.Models.VectorSearchVectorizer embeddingModel = null, Azure.Search.Documents.Indexes.Models.KnowledgeAgentModel chatCompletionModel = null, Azure.Search.Documents.Indexes.Models.IndexingSchedule ingestionSchedule = null, System.Collections.Generic.IReadOnlyDictionary createdResources = null, bool? disableImageVerbalization = default(bool?)) { throw null; } public static Azure.Search.Documents.Indexes.Models.CharFilter CharFilter(string oDataType, string name) { throw null; } public static Azure.Search.Documents.Indexes.Models.CognitiveServicesAccount CognitiveServicesAccount(string oDataType, string description) { throw null; } @@ -5805,6 +5807,8 @@ public static partial class SearchModelFactory public static Azure.Search.Documents.Models.DebugInfo DebugInfo(Azure.Search.Documents.Models.QueryRewritesDebugInfo queryRewrites = null) { throw null; } public static Azure.Search.Documents.Models.DocumentDebugInfo DocumentDebugInfo(Azure.Search.Documents.Models.SemanticDebugInfo semantic = null, Azure.Search.Documents.Models.VectorsDebugInfo vectors = null, System.Collections.Generic.IReadOnlyDictionary> innerHits = null) { throw null; } [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public static Azure.Search.Documents.Models.DocumentDebugInfo DocumentDebugInfo(Azure.Search.Documents.Models.VectorsDebugInfo vectors = null) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] public static Azure.Search.Documents.Models.FacetResult FacetResult(long? count = default(long?), System.Collections.Generic.IReadOnlyDictionary additionalProperties = null) { throw null; } public static Azure.Search.Documents.Models.FacetResult FacetResult(long? count = default(long?), double? sum = default(double?), System.Collections.Generic.IReadOnlyDictionary> facets = null, System.Collections.Generic.IReadOnlyDictionary additionalProperties = null) { throw null; } public static Azure.Search.Documents.Models.IndexDocumentsResult IndexDocumentsResult(System.Collections.Generic.IEnumerable results) { throw null; } @@ -5843,6 +5847,14 @@ public static partial class SearchModelFactory public static Azure.Search.Documents.Models.QueryRewritesDebugInfo QueryRewritesDebugInfo(Azure.Search.Documents.Models.QueryRewritesValuesDebugInfo text = null, System.Collections.Generic.IEnumerable vectors = null) { throw null; } public static Azure.Search.Documents.Models.QueryRewritesValuesDebugInfo QueryRewritesValuesDebugInfo(string inputQuery = null, System.Collections.Generic.IEnumerable rewrites = null) { throw null; } public static Azure.Search.Documents.Indexes.Models.ScoringFunction ScoringFunction(string type, string fieldName, double boost, Azure.Search.Documents.Indexes.Models.ScoringFunctionInterpolation? interpolation) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SearchAlias SearchAlias(string name = null, System.Collections.Generic.IList indexes = null, string etag = null, System.Collections.Generic.IDictionary serializedAdditionalRawData = null) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SearchIndex SearchIndex(string name = null, string description = null, System.Collections.Generic.IList fields = null, System.Collections.Generic.IList scoringProfiles = null, string defaultScoringProfile = null, Azure.Search.Documents.Indexes.Models.CorsOptions corsOptions = null, System.Collections.Generic.IList suggesters = null, System.Collections.Generic.IList analyzers = null, System.Collections.Generic.IList tokenizers = null, System.Collections.Generic.IList tokenFilters = null, System.Collections.Generic.IList charFilters = null, System.Collections.Generic.IList normalizers = null, Azure.Search.Documents.Indexes.Models.SearchResourceEncryptionKey encryptionKey = null, Azure.Search.Documents.Indexes.Models.SimilarityAlgorithm similarity = null, Azure.Search.Documents.Indexes.Models.SemanticSearch semanticSearch = null, Azure.Search.Documents.Indexes.Models.VectorSearch vectorSearch = null, Azure.Search.Documents.Models.SearchIndexPermissionFilterOption permissionFilterOption = default(Azure.Search.Documents.Models.SearchIndexPermissionFilterOption), string etag = null, System.Collections.Generic.IDictionary serializedAdditionalRawData = null) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public static Azure.Search.Documents.Indexes.Models.SearchIndex SearchIndex(string name = null, string description = null, System.Collections.Generic.IList fields = null, System.Collections.Generic.IList scoringProfiles = null, string defaultScoringProfile = null, Azure.Search.Documents.Indexes.Models.CorsOptions corsOptions = null, System.Collections.Generic.IList suggesters = null, System.Collections.Generic.IList analyzers = null, System.Collections.Generic.IList tokenizers = null, System.Collections.Generic.IList tokenFilters = null, System.Collections.Generic.IList charFilters = null, System.Collections.Generic.IList normalizers = null, Azure.Search.Documents.Indexes.Models.SearchResourceEncryptionKey encryptionKey = null, Azure.Search.Documents.Indexes.Models.SimilarityAlgorithm similarity = null, Azure.Search.Documents.Indexes.Models.SemanticSearch semanticSearch = null, Azure.Search.Documents.Indexes.Models.VectorSearch vectorSearch = null, string etag = null, System.Collections.Generic.IDictionary serializedAdditionalRawData = null) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SearchIndexer SearchIndexer(string name = null, string description = null, string dataSourceName = null, string skillsetName = null, string targetIndexName = null, Azure.Search.Documents.Indexes.Models.IndexingSchedule schedule = null, Azure.Search.Documents.Indexes.Models.IndexingParameters parameters = null, System.Collections.Generic.IList fieldMappings = null, System.Collections.Generic.IList outputFieldMappings = null, bool? isDisabled = default(bool?), string etag = null, Azure.Search.Documents.Indexes.Models.SearchResourceEncryptionKey encryptionKey = null, Azure.Search.Documents.Indexes.Models.SearchIndexerCache cache = null, System.Collections.Generic.IDictionary serializedAdditionalRawData = null) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public static Azure.Search.Documents.Indexes.Models.SearchIndexer SearchIndexer(string name = null, string description = null, string dataSourceName = null, string skillsetName = null, string targetIndexName = null, Azure.Search.Documents.Indexes.Models.IndexingSchedule schedule = null, Azure.Search.Documents.Indexes.Models.IndexingParameters parameters = null, System.Collections.Generic.IList fieldMappings = null, System.Collections.Generic.IList outputFieldMappings = null, bool? isDisabled = default(bool?), string etag = null, Azure.Search.Documents.Indexes.Models.SearchResourceEncryptionKey encryptionKey = null, System.Collections.Generic.IDictionary serializedAdditionalRawData = null) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection SearchIndexerDataSourceConnection(string name = null, string description = null, Azure.Search.Documents.Indexes.Models.SearchIndexerDataSourceType type = default(Azure.Search.Documents.Indexes.Models.SearchIndexerDataSourceType), string connectionString = null, Azure.Search.Documents.Indexes.Models.SearchIndexerDataContainer container = null, Azure.Search.Documents.Indexes.Models.DataChangeDetectionPolicy dataChangeDetectionPolicy = null, Azure.Search.Documents.Indexes.Models.DataDeletionDetectionPolicy dataDeletionDetectionPolicy = null, string etag = null, Azure.Search.Documents.Indexes.Models.SearchResourceEncryptionKey encryptionKey = null, System.Collections.Generic.IDictionary serializedAdditionalRawData = null) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchIndexerError SearchIndexerError(string key, string errorMessage, int statusCode, string name, string details, string documentationLink) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchIndexerLimits SearchIndexerLimits(System.TimeSpan? maxRunTime, long? maxDocumentExtractionSize, long? maxDocumentContentCharactersToExtract) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchIndexerSkill SearchIndexerSkill(string oDataType, string name, string description, string context, System.Collections.Generic.IList inputs, System.Collections.Generic.IList outputs) { throw null; } @@ -5850,8 +5862,11 @@ public static partial class SearchModelFactory public static Azure.Search.Documents.Indexes.Models.SearchIndexerStatus SearchIndexerStatus(Azure.Search.Documents.Indexes.Models.IndexerStatus status = Azure.Search.Documents.Indexes.Models.IndexerStatus.Unknown, Azure.Search.Documents.Indexes.Models.IndexerExecutionResult lastResult = null, System.Collections.Generic.IEnumerable executionHistory = null, Azure.Search.Documents.Indexes.Models.SearchIndexerLimits limits = null) { throw null; } [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] public static Azure.Search.Documents.Indexes.Models.SearchIndexerStatus SearchIndexerStatus(Azure.Search.Documents.Indexes.Models.IndexerStatus status, Azure.Search.Documents.Indexes.Models.IndexerExecutionResult lastResult, System.Collections.Generic.IReadOnlyList executionHistory, Azure.Search.Documents.Indexes.Models.SearchIndexerLimits limits) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public static Azure.Search.Documents.Indexes.Models.SearchIndexerStatus SearchIndexerStatus(string name = null, Azure.Search.Documents.Indexes.Models.IndexerStatus status = Azure.Search.Documents.Indexes.Models.IndexerStatus.Unknown, Azure.Search.Documents.Indexes.Models.IndexerExecutionResult lastResult = null, System.Collections.Generic.IEnumerable executionHistory = null, Azure.Search.Documents.Indexes.Models.SearchIndexerLimits limits = null) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchIndexerStatus SearchIndexerStatus(string name = null, Azure.Search.Documents.Indexes.Models.IndexerStatus status = Azure.Search.Documents.Indexes.Models.IndexerStatus.Unknown, Azure.Search.Documents.Indexes.Models.IndexerExecutionResult lastResult = null, System.Collections.Generic.IEnumerable executionHistory = null, Azure.Search.Documents.Indexes.Models.SearchIndexerLimits limits = null, Azure.Search.Documents.Indexes.Models.IndexerState currentState = null) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchIndexerWarning SearchIndexerWarning(string key, string message, string name, string details, string documentationLink) { throw null; } + public static Azure.Search.Documents.Indexes.Models.KnowledgeSource SearchIndexKnowledgeSource(string name = null, string description = null, string kind = null, string eTag = null, Azure.Search.Documents.Indexes.Models.SearchResourceEncryptionKey encryptionKey = null, System.Collections.Generic.IDictionary serializedAdditionalRawData = null, Azure.Search.Documents.Indexes.Models.SearchIndexKnowledgeSourceParameters searchIndexParameters = null) { throw null; } [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] public static Azure.Search.Documents.Indexes.Models.SearchIndexStatistics SearchIndexStatistics(long documentCount, long storageSize) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchIndexStatistics SearchIndexStatistics(long documentCount = (long)0, long storageSize = (long)0, long vectorIndexSize = (long)0) { throw null; } @@ -5889,6 +5904,7 @@ public static partial class SearchModelFactory public static Azure.Search.Documents.Indexes.Models.SimilarityAlgorithm SimilarityAlgorithm(string oDataType) { throw null; } public static Azure.Search.Documents.Models.SingleVectorFieldResult SingleVectorFieldResult(double? searchScore = default(double?), double? vectorSimilarity = default(double?)) { throw null; } public static Azure.Search.Documents.Models.SuggestResults SuggestResults(System.Collections.Generic.IReadOnlyList> results, double? coverage) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SynonymMap SynonymMap(string name = null, string format = null, string synonyms = null, Azure.Search.Documents.Indexes.Models.SearchResourceEncryptionKey encryptionKey = null, string etag = null, System.Collections.Generic.IDictionary serializedAdditionalRawData = null) { throw null; } public static Azure.Search.Documents.Models.TextResult TextResult(double? searchScore = default(double?)) { throw null; } public static Azure.Search.Documents.Indexes.Models.TokenFilter TokenFilter(string oDataType, string name) { throw null; } public static Azure.Search.Documents.Models.VectorsDebugInfo VectorsDebugInfo(Azure.Search.Documents.Models.QueryResultDocumentSubscores subscores = null) { throw null; } diff --git a/sdk/search/Azure.Search.Documents/api/Azure.Search.Documents.netstandard2.0.cs b/sdk/search/Azure.Search.Documents/api/Azure.Search.Documents.netstandard2.0.cs index ce5c44b79dab..930584eb782f 100644 --- a/sdk/search/Azure.Search.Documents/api/Azure.Search.Documents.netstandard2.0.cs +++ b/sdk/search/Azure.Search.Documents/api/Azure.Search.Documents.netstandard2.0.cs @@ -106,7 +106,8 @@ public enum ServiceVersion V2020_06_30 = 1, V2023_11_01 = 2, V2024_07_01 = 3, - V2025_08_01_Preview = 4, + V2025_09_01 = 4, + V2025_08_01_Preview = 5, } } public static partial class SearchExtensions @@ -795,11 +796,10 @@ public partial class AnalyzeTextOptions : System.ClientModel.Primitives.IJsonMod { public AnalyzeTextOptions(string text) { } public AnalyzeTextOptions(string text, Azure.Search.Documents.Indexes.Models.LexicalAnalyzerName analyzerName) { } - public AnalyzeTextOptions(string text, Azure.Search.Documents.Indexes.Models.LexicalNormalizerName normalizerName) { } public AnalyzeTextOptions(string text, Azure.Search.Documents.Indexes.Models.LexicalTokenizerName tokenizerName) { } public Azure.Search.Documents.Indexes.Models.LexicalAnalyzerName? AnalyzerName { get { throw null; } } public System.Collections.Generic.IList CharFilters { get { throw null; } } - public Azure.Search.Documents.Indexes.Models.LexicalNormalizerName? NormalizerName { get { throw null; } } + public Azure.Search.Documents.Indexes.Models.LexicalNormalizerName? NormalizerName { get { throw null; } set { } } public string Text { get { throw null; } } public System.Collections.Generic.IList TokenFilters { get { throw null; } } public Azure.Search.Documents.Indexes.Models.LexicalTokenizerName? TokenizerName { get { throw null; } } @@ -4050,6 +4050,7 @@ protected virtual void JsonModelWriteCore(System.Text.Json.Utf8JsonWriter writer } public partial class SearchResourceEncryptionKey : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { + public SearchResourceEncryptionKey(string keyName, string keyVersion, string vaultUri) { } public SearchResourceEncryptionKey(System.Uri vaultUri, string keyName, string keyVersion) { } public string ApplicationId { get { throw null; } set { } } public string ApplicationSecret { get { throw null; } set { } } @@ -5797,6 +5798,7 @@ public static partial class SearchModelFactory public static Azure.Search.Documents.Models.AutocompleteResults AutocompleteResults(double? coverage = default(double?), System.Collections.Generic.IEnumerable results = null) { throw null; } [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] public static Azure.Search.Documents.Models.AutocompleteResults AutocompleteResults(double? coverage, System.Collections.Generic.IReadOnlyList results) { throw null; } + public static Azure.Search.Documents.Indexes.Models.KnowledgeSource AzureBlobKnowledgeSource(string name = null, string description = null, string kind = null, string eTag = null, Azure.Search.Documents.Indexes.Models.SearchResourceEncryptionKey encryptionKey = null, System.Collections.Generic.IDictionary serializedAdditionalRawData = null, Azure.Search.Documents.Indexes.Models.AzureBlobKnowledgeSourceParameters azureBlobParameters = null) { throw null; } public static Azure.Search.Documents.Indexes.Models.AzureBlobKnowledgeSourceParameters AzureBlobKnowledgeSourceParameters(Azure.Search.Documents.Indexes.Models.SearchIndexerDataIdentity identity = null, string connectionString = null, string containerName = null, string folderPath = null, Azure.Search.Documents.Indexes.Models.VectorSearchVectorizer embeddingModel = null, Azure.Search.Documents.Indexes.Models.KnowledgeAgentModel chatCompletionModel = null, Azure.Search.Documents.Indexes.Models.IndexingSchedule ingestionSchedule = null, System.Collections.Generic.IReadOnlyDictionary createdResources = null, bool? disableImageVerbalization = default(bool?)) { throw null; } public static Azure.Search.Documents.Indexes.Models.CharFilter CharFilter(string oDataType, string name) { throw null; } public static Azure.Search.Documents.Indexes.Models.CognitiveServicesAccount CognitiveServicesAccount(string oDataType, string description) { throw null; } @@ -5805,6 +5807,8 @@ public static partial class SearchModelFactory public static Azure.Search.Documents.Models.DebugInfo DebugInfo(Azure.Search.Documents.Models.QueryRewritesDebugInfo queryRewrites = null) { throw null; } public static Azure.Search.Documents.Models.DocumentDebugInfo DocumentDebugInfo(Azure.Search.Documents.Models.SemanticDebugInfo semantic = null, Azure.Search.Documents.Models.VectorsDebugInfo vectors = null, System.Collections.Generic.IReadOnlyDictionary> innerHits = null) { throw null; } [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public static Azure.Search.Documents.Models.DocumentDebugInfo DocumentDebugInfo(Azure.Search.Documents.Models.VectorsDebugInfo vectors = null) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] public static Azure.Search.Documents.Models.FacetResult FacetResult(long? count = default(long?), System.Collections.Generic.IReadOnlyDictionary additionalProperties = null) { throw null; } public static Azure.Search.Documents.Models.FacetResult FacetResult(long? count = default(long?), double? sum = default(double?), System.Collections.Generic.IReadOnlyDictionary> facets = null, System.Collections.Generic.IReadOnlyDictionary additionalProperties = null) { throw null; } public static Azure.Search.Documents.Models.IndexDocumentsResult IndexDocumentsResult(System.Collections.Generic.IEnumerable results) { throw null; } @@ -5843,6 +5847,14 @@ public static partial class SearchModelFactory public static Azure.Search.Documents.Models.QueryRewritesDebugInfo QueryRewritesDebugInfo(Azure.Search.Documents.Models.QueryRewritesValuesDebugInfo text = null, System.Collections.Generic.IEnumerable vectors = null) { throw null; } public static Azure.Search.Documents.Models.QueryRewritesValuesDebugInfo QueryRewritesValuesDebugInfo(string inputQuery = null, System.Collections.Generic.IEnumerable rewrites = null) { throw null; } public static Azure.Search.Documents.Indexes.Models.ScoringFunction ScoringFunction(string type, string fieldName, double boost, Azure.Search.Documents.Indexes.Models.ScoringFunctionInterpolation? interpolation) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SearchAlias SearchAlias(string name = null, System.Collections.Generic.IList indexes = null, string etag = null, System.Collections.Generic.IDictionary serializedAdditionalRawData = null) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SearchIndex SearchIndex(string name = null, string description = null, System.Collections.Generic.IList fields = null, System.Collections.Generic.IList scoringProfiles = null, string defaultScoringProfile = null, Azure.Search.Documents.Indexes.Models.CorsOptions corsOptions = null, System.Collections.Generic.IList suggesters = null, System.Collections.Generic.IList analyzers = null, System.Collections.Generic.IList tokenizers = null, System.Collections.Generic.IList tokenFilters = null, System.Collections.Generic.IList charFilters = null, System.Collections.Generic.IList normalizers = null, Azure.Search.Documents.Indexes.Models.SearchResourceEncryptionKey encryptionKey = null, Azure.Search.Documents.Indexes.Models.SimilarityAlgorithm similarity = null, Azure.Search.Documents.Indexes.Models.SemanticSearch semanticSearch = null, Azure.Search.Documents.Indexes.Models.VectorSearch vectorSearch = null, Azure.Search.Documents.Models.SearchIndexPermissionFilterOption permissionFilterOption = default(Azure.Search.Documents.Models.SearchIndexPermissionFilterOption), string etag = null, System.Collections.Generic.IDictionary serializedAdditionalRawData = null) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public static Azure.Search.Documents.Indexes.Models.SearchIndex SearchIndex(string name = null, string description = null, System.Collections.Generic.IList fields = null, System.Collections.Generic.IList scoringProfiles = null, string defaultScoringProfile = null, Azure.Search.Documents.Indexes.Models.CorsOptions corsOptions = null, System.Collections.Generic.IList suggesters = null, System.Collections.Generic.IList analyzers = null, System.Collections.Generic.IList tokenizers = null, System.Collections.Generic.IList tokenFilters = null, System.Collections.Generic.IList charFilters = null, System.Collections.Generic.IList normalizers = null, Azure.Search.Documents.Indexes.Models.SearchResourceEncryptionKey encryptionKey = null, Azure.Search.Documents.Indexes.Models.SimilarityAlgorithm similarity = null, Azure.Search.Documents.Indexes.Models.SemanticSearch semanticSearch = null, Azure.Search.Documents.Indexes.Models.VectorSearch vectorSearch = null, string etag = null, System.Collections.Generic.IDictionary serializedAdditionalRawData = null) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SearchIndexer SearchIndexer(string name = null, string description = null, string dataSourceName = null, string skillsetName = null, string targetIndexName = null, Azure.Search.Documents.Indexes.Models.IndexingSchedule schedule = null, Azure.Search.Documents.Indexes.Models.IndexingParameters parameters = null, System.Collections.Generic.IList fieldMappings = null, System.Collections.Generic.IList outputFieldMappings = null, bool? isDisabled = default(bool?), string etag = null, Azure.Search.Documents.Indexes.Models.SearchResourceEncryptionKey encryptionKey = null, Azure.Search.Documents.Indexes.Models.SearchIndexerCache cache = null, System.Collections.Generic.IDictionary serializedAdditionalRawData = null) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public static Azure.Search.Documents.Indexes.Models.SearchIndexer SearchIndexer(string name = null, string description = null, string dataSourceName = null, string skillsetName = null, string targetIndexName = null, Azure.Search.Documents.Indexes.Models.IndexingSchedule schedule = null, Azure.Search.Documents.Indexes.Models.IndexingParameters parameters = null, System.Collections.Generic.IList fieldMappings = null, System.Collections.Generic.IList outputFieldMappings = null, bool? isDisabled = default(bool?), string etag = null, Azure.Search.Documents.Indexes.Models.SearchResourceEncryptionKey encryptionKey = null, System.Collections.Generic.IDictionary serializedAdditionalRawData = null) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SearchIndexerDataSourceConnection SearchIndexerDataSourceConnection(string name = null, string description = null, Azure.Search.Documents.Indexes.Models.SearchIndexerDataSourceType type = default(Azure.Search.Documents.Indexes.Models.SearchIndexerDataSourceType), string connectionString = null, Azure.Search.Documents.Indexes.Models.SearchIndexerDataContainer container = null, Azure.Search.Documents.Indexes.Models.DataChangeDetectionPolicy dataChangeDetectionPolicy = null, Azure.Search.Documents.Indexes.Models.DataDeletionDetectionPolicy dataDeletionDetectionPolicy = null, string etag = null, Azure.Search.Documents.Indexes.Models.SearchResourceEncryptionKey encryptionKey = null, System.Collections.Generic.IDictionary serializedAdditionalRawData = null) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchIndexerError SearchIndexerError(string key, string errorMessage, int statusCode, string name, string details, string documentationLink) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchIndexerLimits SearchIndexerLimits(System.TimeSpan? maxRunTime, long? maxDocumentExtractionSize, long? maxDocumentContentCharactersToExtract) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchIndexerSkill SearchIndexerSkill(string oDataType, string name, string description, string context, System.Collections.Generic.IList inputs, System.Collections.Generic.IList outputs) { throw null; } @@ -5850,8 +5862,11 @@ public static partial class SearchModelFactory public static Azure.Search.Documents.Indexes.Models.SearchIndexerStatus SearchIndexerStatus(Azure.Search.Documents.Indexes.Models.IndexerStatus status = Azure.Search.Documents.Indexes.Models.IndexerStatus.Unknown, Azure.Search.Documents.Indexes.Models.IndexerExecutionResult lastResult = null, System.Collections.Generic.IEnumerable executionHistory = null, Azure.Search.Documents.Indexes.Models.SearchIndexerLimits limits = null) { throw null; } [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] public static Azure.Search.Documents.Indexes.Models.SearchIndexerStatus SearchIndexerStatus(Azure.Search.Documents.Indexes.Models.IndexerStatus status, Azure.Search.Documents.Indexes.Models.IndexerExecutionResult lastResult, System.Collections.Generic.IReadOnlyList executionHistory, Azure.Search.Documents.Indexes.Models.SearchIndexerLimits limits) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public static Azure.Search.Documents.Indexes.Models.SearchIndexerStatus SearchIndexerStatus(string name = null, Azure.Search.Documents.Indexes.Models.IndexerStatus status = Azure.Search.Documents.Indexes.Models.IndexerStatus.Unknown, Azure.Search.Documents.Indexes.Models.IndexerExecutionResult lastResult = null, System.Collections.Generic.IEnumerable executionHistory = null, Azure.Search.Documents.Indexes.Models.SearchIndexerLimits limits = null) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchIndexerStatus SearchIndexerStatus(string name = null, Azure.Search.Documents.Indexes.Models.IndexerStatus status = Azure.Search.Documents.Indexes.Models.IndexerStatus.Unknown, Azure.Search.Documents.Indexes.Models.IndexerExecutionResult lastResult = null, System.Collections.Generic.IEnumerable executionHistory = null, Azure.Search.Documents.Indexes.Models.SearchIndexerLimits limits = null, Azure.Search.Documents.Indexes.Models.IndexerState currentState = null) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchIndexerWarning SearchIndexerWarning(string key, string message, string name, string details, string documentationLink) { throw null; } + public static Azure.Search.Documents.Indexes.Models.KnowledgeSource SearchIndexKnowledgeSource(string name = null, string description = null, string kind = null, string eTag = null, Azure.Search.Documents.Indexes.Models.SearchResourceEncryptionKey encryptionKey = null, System.Collections.Generic.IDictionary serializedAdditionalRawData = null, Azure.Search.Documents.Indexes.Models.SearchIndexKnowledgeSourceParameters searchIndexParameters = null) { throw null; } [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] public static Azure.Search.Documents.Indexes.Models.SearchIndexStatistics SearchIndexStatistics(long documentCount, long storageSize) { throw null; } public static Azure.Search.Documents.Indexes.Models.SearchIndexStatistics SearchIndexStatistics(long documentCount = (long)0, long storageSize = (long)0, long vectorIndexSize = (long)0) { throw null; } @@ -5889,6 +5904,7 @@ public static partial class SearchModelFactory public static Azure.Search.Documents.Indexes.Models.SimilarityAlgorithm SimilarityAlgorithm(string oDataType) { throw null; } public static Azure.Search.Documents.Models.SingleVectorFieldResult SingleVectorFieldResult(double? searchScore = default(double?), double? vectorSimilarity = default(double?)) { throw null; } public static Azure.Search.Documents.Models.SuggestResults SuggestResults(System.Collections.Generic.IReadOnlyList> results, double? coverage) { throw null; } + public static Azure.Search.Documents.Indexes.Models.SynonymMap SynonymMap(string name = null, string format = null, string synonyms = null, Azure.Search.Documents.Indexes.Models.SearchResourceEncryptionKey encryptionKey = null, string etag = null, System.Collections.Generic.IDictionary serializedAdditionalRawData = null) { throw null; } public static Azure.Search.Documents.Models.TextResult TextResult(double? searchScore = default(double?)) { throw null; } public static Azure.Search.Documents.Indexes.Models.TokenFilter TokenFilter(string oDataType, string name) { throw null; } public static Azure.Search.Documents.Models.VectorsDebugInfo VectorsDebugInfo(Azure.Search.Documents.Models.QueryResultDocumentSubscores subscores = null) { throw null; } diff --git a/sdk/search/Azure.Search.Documents/src/Azure.Search.Documents.csproj b/sdk/search/Azure.Search.Documents/src/Azure.Search.Documents.csproj index 37432c289ac8..8505df4b8bfb 100644 --- a/sdk/search/Azure.Search.Documents/src/Azure.Search.Documents.csproj +++ b/sdk/search/Azure.Search.Documents/src/Azure.Search.Documents.csproj @@ -1,9 +1,9 @@ Microsoft Azure.Search.Documents client library - 11.7.0-beta.8 + 11.8.0-beta.1 - 11.6.1 + 11.7.0 This is the Azure Cognitive Search client library for developing .NET applications with rich search experiences. It enables you to query your @@ -17,7 +17,6 @@ $(NoWarn); AZC0004; - AZC0035; @@ -31,7 +30,6 @@ - diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/BinaryQuantizationCompression.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/BinaryQuantizationCompression.cs index 221a09c1c146..cc54357e6229 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/BinaryQuantizationCompression.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/Models/BinaryQuantizationCompression.cs @@ -26,8 +26,14 @@ public BinaryQuantizationCompression(string compressionName) : base(compressionN /// Initializes a new instance of . /// The name to associate with this particular configuration. /// The name of the kind of compression method being configured for use with vector search. - /// If set to true, once the ordered set of results calculated using compressed vectors are obtained, they will be reranked again by recalculating the full-precision similarity scores. This will improve recall at the expense of latency. - /// Default oversampling factor. Oversampling will internally request more documents (specified by this multiplier) in the initial search. This increases the set of results that will be reranked using recomputed similarity scores from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when rerankWithOriginalVectors is true. Higher values improve recall at the expense of latency. + /// + /// If set to true, once the ordered set of results calculated using compressed vectors are obtained, they will be reranked again by recalculating the full-precision similarity scores. This will improve recall at the expense of latency. + /// For use with only service version 2024-07-01. If using 2025-09-01 or later, use RescoringOptions.rescoringEnabled. + /// + /// + /// Default oversampling factor. Oversampling will internally request more documents (specified by this multiplier) in the initial search. This increases the set of results that will be reranked using recomputed similarity scores from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when rerankWithOriginalVectors is true. Higher values improve recall at the expense of latency. + /// For use with only service version 2024-07-01. If using 2025-09-01 or later, use RescoringOptions.defaultOversampling. + /// /// Contains the options for rescoring. /// The number of dimensions to truncate the vectors to. Truncating the vectors reduces the size of the vectors and the amount of data that needs to be transferred during search. This can save storage cost and improve search performance at the expense of recall. It should be only used for embeddings trained with Matryoshka Representation Learning (MRL) such as OpenAI text-embedding-3-large (small). The default value is null, which means no truncation. /// Keeps track of any properties unknown to the library. diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScalarQuantizationCompression.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/ScalarQuantizationCompression.cs index 42d55a81cd28..610e5b9ae178 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/ScalarQuantizationCompression.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/Models/ScalarQuantizationCompression.cs @@ -26,8 +26,14 @@ public ScalarQuantizationCompression(string compressionName) : base(compressionN /// Initializes a new instance of . /// The name to associate with this particular configuration. /// The name of the kind of compression method being configured for use with vector search. - /// If set to true, once the ordered set of results calculated using compressed vectors are obtained, they will be reranked again by recalculating the full-precision similarity scores. This will improve recall at the expense of latency. - /// Default oversampling factor. Oversampling will internally request more documents (specified by this multiplier) in the initial search. This increases the set of results that will be reranked using recomputed similarity scores from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when rerankWithOriginalVectors is true. Higher values improve recall at the expense of latency. + /// + /// If set to true, once the ordered set of results calculated using compressed vectors are obtained, they will be reranked again by recalculating the full-precision similarity scores. This will improve recall at the expense of latency. + /// For use with only service version 2024-07-01. If using 2025-09-01 or later, use RescoringOptions.rescoringEnabled. + /// + /// + /// Default oversampling factor. Oversampling will internally request more documents (specified by this multiplier) in the initial search. This increases the set of results that will be reranked using recomputed similarity scores from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when rerankWithOriginalVectors is true. Higher values improve recall at the expense of latency. + /// For use with only service version 2024-07-01. If using 2025-09-01 or later, use RescoringOptions.defaultOversampling. + /// /// Contains the options for rescoring. /// The number of dimensions to truncate the vectors to. Truncating the vectors reduces the size of the vectors and the amount of data that needs to be transferred during search. This can save storage cost and improve search performance at the expense of recall. It should be only used for embeddings trained with Matryoshka Representation Learning (MRL) such as OpenAI text-embedding-3-large (small). The default value is null, which means no truncation. /// Keeps track of any properties unknown to the library. diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchCompression.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchCompression.cs index 8ac9ba0ec5ec..56de4ee83b8b 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchCompression.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/Models/UnknownVectorSearchCompression.cs @@ -17,8 +17,14 @@ internal partial class UnknownVectorSearchCompression : VectorSearchCompression /// Initializes a new instance of . /// The name to associate with this particular configuration. /// The name of the kind of compression method being configured for use with vector search. - /// If set to true, once the ordered set of results calculated using compressed vectors are obtained, they will be reranked again by recalculating the full-precision similarity scores. This will improve recall at the expense of latency. - /// Default oversampling factor. Oversampling will internally request more documents (specified by this multiplier) in the initial search. This increases the set of results that will be reranked using recomputed similarity scores from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when rerankWithOriginalVectors is true. Higher values improve recall at the expense of latency. + /// + /// If set to true, once the ordered set of results calculated using compressed vectors are obtained, they will be reranked again by recalculating the full-precision similarity scores. This will improve recall at the expense of latency. + /// For use with only service version 2024-07-01. If using 2025-09-01 or later, use RescoringOptions.rescoringEnabled. + /// + /// + /// Default oversampling factor. Oversampling will internally request more documents (specified by this multiplier) in the initial search. This increases the set of results that will be reranked using recomputed similarity scores from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when rerankWithOriginalVectors is true. Higher values improve recall at the expense of latency. + /// For use with only service version 2024-07-01. If using 2025-09-01 or later, use RescoringOptions.defaultOversampling. + /// /// Contains the options for rescoring. /// The number of dimensions to truncate the vectors to. Truncating the vectors reduces the size of the vectors and the amount of data that needs to be transferred during search. This can save storage cost and improve search performance at the expense of recall. It should be only used for embeddings trained with Matryoshka Representation Learning (MRL) such as OpenAI text-embedding-3-large (small). The default value is null, which means no truncation. /// Keeps track of any properties unknown to the library. diff --git a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchCompression.cs b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchCompression.cs index 304104e94c55..63e7fb18dd95 100644 --- a/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchCompression.cs +++ b/sdk/search/Azure.Search.Documents/src/Generated/Models/VectorSearchCompression.cs @@ -62,8 +62,14 @@ protected VectorSearchCompression(string compressionName) /// Initializes a new instance of . /// The name to associate with this particular configuration. /// The name of the kind of compression method being configured for use with vector search. - /// If set to true, once the ordered set of results calculated using compressed vectors are obtained, they will be reranked again by recalculating the full-precision similarity scores. This will improve recall at the expense of latency. - /// Default oversampling factor. Oversampling will internally request more documents (specified by this multiplier) in the initial search. This increases the set of results that will be reranked using recomputed similarity scores from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when rerankWithOriginalVectors is true. Higher values improve recall at the expense of latency. + /// + /// If set to true, once the ordered set of results calculated using compressed vectors are obtained, they will be reranked again by recalculating the full-precision similarity scores. This will improve recall at the expense of latency. + /// For use with only service version 2024-07-01. If using 2025-09-01 or later, use RescoringOptions.rescoringEnabled. + /// + /// + /// Default oversampling factor. Oversampling will internally request more documents (specified by this multiplier) in the initial search. This increases the set of results that will be reranked using recomputed similarity scores from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when rerankWithOriginalVectors is true. Higher values improve recall at the expense of latency. + /// For use with only service version 2024-07-01. If using 2025-09-01 or later, use RescoringOptions.defaultOversampling. + /// /// Contains the options for rescoring. /// The number of dimensions to truncate the vectors to. Truncating the vectors reduces the size of the vectors and the amount of data that needs to be transferred during search. This can save storage cost and improve search performance at the expense of recall. It should be only used for embeddings trained with Matryoshka Representation Learning (MRL) such as OpenAI text-embedding-3-large (small). The default value is null, which means no truncation. /// Keeps track of any properties unknown to the library. @@ -84,9 +90,15 @@ internal VectorSearchCompression() } /// The name of the kind of compression method being configured for use with vector search. internal VectorSearchCompressionKind Kind { get; set; } - /// If set to true, once the ordered set of results calculated using compressed vectors are obtained, they will be reranked again by recalculating the full-precision similarity scores. This will improve recall at the expense of latency. + /// + /// If set to true, once the ordered set of results calculated using compressed vectors are obtained, they will be reranked again by recalculating the full-precision similarity scores. This will improve recall at the expense of latency. + /// For use with only service version 2024-07-01. If using 2025-09-01 or later, use RescoringOptions.rescoringEnabled. + /// public bool? RerankWithOriginalVectors { get; set; } - /// Default oversampling factor. Oversampling will internally request more documents (specified by this multiplier) in the initial search. This increases the set of results that will be reranked using recomputed similarity scores from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when rerankWithOriginalVectors is true. Higher values improve recall at the expense of latency. + /// + /// Default oversampling factor. Oversampling will internally request more documents (specified by this multiplier) in the initial search. This increases the set of results that will be reranked using recomputed similarity scores from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when rerankWithOriginalVectors is true. Higher values improve recall at the expense of latency. + /// For use with only service version 2024-07-01. If using 2025-09-01 or later, use RescoringOptions.defaultOversampling. + /// public double? DefaultOversampling { get; set; } /// Contains the options for rescoring. public RescoringOptions RescoringOptions { get; set; } diff --git a/sdk/search/Azure.Search.Documents/src/Indexes/Models/AnalyzeTextOptions.cs b/sdk/search/Azure.Search.Documents/src/Indexes/Models/AnalyzeTextOptions.cs index b986eb027945..b1bd9bcc897a 100644 --- a/sdk/search/Azure.Search.Documents/src/Indexes/Models/AnalyzeTextOptions.cs +++ b/sdk/search/Azure.Search.Documents/src/Indexes/Models/AnalyzeTextOptions.cs @@ -28,15 +28,6 @@ public AnalyzeTextOptions(string text, LexicalAnalyzerName analyzerName) : this( public AnalyzeTextOptions(string text, LexicalTokenizerName tokenizerName) : this(text) => TokenizerName = tokenizerName; - /// - /// Initializes a new instance of AnalyzeRequest. - /// - /// Required text to break into tokens. - /// The name of the tokenizer to use to break the given . - /// is null. - public AnalyzeTextOptions(string text, LexicalNormalizerName normalizerName) : this(text) - => NormalizerName = normalizerName; - /// The name of the analyzer to use to break the given text. [CodeGenMember("Analyzer")] public LexicalAnalyzerName? AnalyzerName { get; } @@ -47,7 +38,7 @@ public AnalyzeTextOptions(string text, LexicalNormalizerName normalizerName) : t /// The name of the normalizer to use to normalize the given text. [CodeGenMember("Normalizer")] - public LexicalNormalizerName? NormalizerName { get; } + public LexicalNormalizerName? NormalizerName { get; set; } /// An optional list of token filters to use when breaking the given text. public IList TokenFilters { get; } diff --git a/sdk/search/Azure.Search.Documents/src/Indexes/Models/SearchIndexerDataSourceConnection.cs b/sdk/search/Azure.Search.Documents/src/Indexes/Models/SearchIndexerDataSourceConnection.cs index 1b120f934d37..be3926fa03ba 100644 --- a/sdk/search/Azure.Search.Documents/src/Indexes/Models/SearchIndexerDataSourceConnection.cs +++ b/sdk/search/Azure.Search.Documents/src/Indexes/Models/SearchIndexerDataSourceConnection.cs @@ -31,6 +31,39 @@ public SearchIndexerDataSourceConnection(string name, SearchIndexerDataSourceTyp IndexerPermissionOptions = new ChangeTrackingList(); } + /// Initializes a new instance of . + /// The name of the datasource. + /// The description of the datasource. + /// The type of the datasource. + /// Credentials for the datasource. + /// The data container for the datasource. + /// + /// The data change detection policy for the datasource. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// + /// The data deletion detection policy for the datasource. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include . + /// + /// The ETag of the data source. + /// A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your datasource definition when you want full assurance that no one, not even Microsoft, can decrypt your data source definition. Once you have encrypted your data source definition, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your datasource definition will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. + /// Keeps track of any properties unknown to the library. + internal SearchIndexerDataSourceConnection(string name, string description, SearchIndexerDataSourceType type, string connectionString, SearchIndexerDataContainer container, DataChangeDetectionPolicy dataChangeDetectionPolicy, DataDeletionDetectionPolicy dataDeletionDetectionPolicy, string etag, SearchResourceEncryptionKey encryptionKey, IDictionary serializedAdditionalRawData) + { + Name = name; + Description = description; + Type = type; + ConnectionString = connectionString; + Container = container; + DataChangeDetectionPolicy = dataChangeDetectionPolicy; + DataDeletionDetectionPolicy = dataDeletionDetectionPolicy; + _etag = etag; + EncryptionKey = encryptionKey; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + /// /// The of the . /// diff --git a/sdk/search/Azure.Search.Documents/src/Indexes/Models/SearchResourceEncryptionKey.cs b/sdk/search/Azure.Search.Documents/src/Indexes/Models/SearchResourceEncryptionKey.cs index aba50fb7e0dc..72ed3248a95c 100644 --- a/sdk/search/Azure.Search.Documents/src/Indexes/Models/SearchResourceEncryptionKey.cs +++ b/sdk/search/Azure.Search.Documents/src/Indexes/Models/SearchResourceEncryptionKey.cs @@ -24,6 +24,22 @@ public SearchResourceEncryptionKey(Uri vaultUri, string keyName, string keyVersi KeyVersion = keyVersion ?? throw new ArgumentNullException(nameof(keyVersion)); } + /// Initializes a new instance of . + /// The name of your Azure Key Vault key to be used to encrypt your data at rest. + /// The version of your Azure Key Vault key to be used to encrypt your data at rest. + /// The URI of your Azure Key Vault, also referred to as DNS name, that contains the key to be used to encrypt your data at rest. An example URI might be `https://my-keyvault-name.vault.azure.net`. + /// , or is null. + public SearchResourceEncryptionKey(string keyName, string keyVersion, string vaultUri) + { + Argument.AssertNotNull(keyName, nameof(keyName)); + Argument.AssertNotNull(keyVersion, nameof(keyVersion)); + Argument.AssertNotNull(vaultUri, nameof(vaultUri)); + + KeyName = keyName; + KeyVersion = keyVersion; + _vaultUri = vaultUri; + } + [CodeGenMember("VaultUri")] private string _vaultUri; diff --git a/sdk/search/Azure.Search.Documents/src/Models/SearchModelFactory.cs b/sdk/search/Azure.Search.Documents/src/Models/SearchModelFactory.cs index 846358ecfcbc..51f2473cad93 100644 --- a/sdk/search/Azure.Search.Documents/src/Models/SearchModelFactory.cs +++ b/sdk/search/Azure.Search.Documents/src/Models/SearchModelFactory.cs @@ -215,6 +215,28 @@ public static SearchIndexerStatus SearchIndexerStatus(IndexerStatus status = def return new SearchIndexerStatus(default, status, lastResult, executionHistory?.ToList(), limits, default, serializedAdditionalRawData: null); } + /// Initializes a new instance of . + /// The name of the indexer. + /// Overall indexer status. + /// The result of the most recent or an in-progress indexer execution. + /// History of the recent indexer executions, sorted in reverse chronological order. + /// The execution limits for the indexer. + /// A new instance for mocking. + [EditorBrowsable(EditorBrowsableState.Never)] + public static SearchIndexerStatus SearchIndexerStatus(string name = null, IndexerStatus status = default, IndexerExecutionResult lastResult = null, IEnumerable executionHistory = null, SearchIndexerLimits limits = null) + { + executionHistory ??= new List(); + + return new SearchIndexerStatus( + name, + status, + lastResult, + executionHistory?.ToList(), + limits, + null, + serializedAdditionalRawData: null); + } + /// Initializes a new instance of SearchIndexerWarning. /// The key of the item which generated a warning. /// The message describing the warning that occurred while processing the item. @@ -461,5 +483,261 @@ public static IndexerChangeTrackingState IndexerChangeTrackingState( allDocumentsFinalState, resetDocumentsInitialState, resetDocumentsFinalState); + + /// Initializes a new instance of SearchIndex. + /// The name of the index. + /// The description of the index. + /// The fields of the index. + /// The scoring profiles for the index. + /// The name of the scoring profile to use if none is specified in the query. If this property is not set and no scoring profile is specified in the query, then default scoring (tf-idf) will be used. + /// Options to control Cross-Origin Resource Sharing (CORS) for the index. + /// The suggesters for the index. + /// + /// The analyzers for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , and . + /// + /// + /// The tokenizers for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , , , , , , , and . + /// + /// + /// The token filters for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , and . + /// + /// + /// The character filters for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// + /// The normalizers for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include . + /// + /// A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your data when you want full assurance that no one, not even Microsoft, can decrypt your data. Once you have encrypted your data, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your data will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. + /// + /// The type of similarity algorithm to be used when scoring and ranking the documents matching a search query. The similarity algorithm can only be defined at index creation time and cannot be modified on existing indexes. If null, the ClassicSimilarity algorithm is used. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// Defines parameters for a search index that influence semantic capabilities. + /// Contains configuration options related to vector search. + /// The ETag of the index. + /// Keeps track of any properties unknown to the library. + [EditorBrowsable(EditorBrowsableState.Never)] + public static SearchIndex SearchIndex(string name = null, string description = null, IList fields = null, IList scoringProfiles = null, string defaultScoringProfile = null, CorsOptions corsOptions = null, IList suggesters = null, IList analyzers = null, IList tokenizers = null, IList tokenFilters = null, IList charFilters = null, IList normalizers = null, SearchResourceEncryptionKey encryptionKey = null, SimilarityAlgorithm similarity = null, SemanticSearch semanticSearch = null, VectorSearch vectorSearch = null, string etag = null, IDictionary serializedAdditionalRawData = null) + { + fields ??= new List(); + scoringProfiles ??= new List(); + suggesters ??= new List(); + analyzers ??= new List(); + tokenizers ??= new List(); + tokenFilters ??= new List(); + charFilters ??= new List(); + normalizers ??= new List(); + serializedAdditionalRawData ??= new Dictionary(); + + return new SearchIndex(name, description, fields, scoringProfiles, defaultScoringProfile, corsOptions, suggesters, analyzers, tokenizers, tokenFilters, charFilters, normalizers, encryptionKey, similarity, semanticSearch, vectorSearch, null, etag, serializedAdditionalRawData); + } + + /// Initializes a new instance of SearchIndex. + /// The name of the index. + /// The description of the index. + /// The fields of the index. + /// The scoring profiles for the index. + /// The name of the scoring profile to use if none is specified in the query. If this property is not set and no scoring profile is specified in the query, then default scoring (tf-idf) will be used. + /// Options to control Cross-Origin Resource Sharing (CORS) for the index. + /// The suggesters for the index. + /// + /// The analyzers for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , and . + /// + /// + /// The tokenizers for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , , , , , , , and . + /// + /// + /// The token filters for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include , , , , , , , , , , , , , , , , , , , , , and . + /// + /// + /// The character filters for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// + /// The normalizers for the index. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include . + /// + /// A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your data when you want full assurance that no one, not even Microsoft, can decrypt your data. Once you have encrypted your data, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your data will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. + /// + /// The type of similarity algorithm to be used when scoring and ranking the documents matching a search query. The similarity algorithm can only be defined at index creation time and cannot be modified on existing indexes. If null, the ClassicSimilarity algorithm is used. + /// Please note is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes. + /// The available derived classes include and . + /// + /// Defines parameters for a search index that influence semantic capabilities. + /// Contains configuration options related to vector search. + /// A value indicating whether permission filtering is enabled for the index. + /// The ETag of the index. + /// Keeps track of any properties unknown to the library. + public static SearchIndex SearchIndex(string name = null, string description = null, IList fields = null, IList scoringProfiles = null, string defaultScoringProfile = null, CorsOptions corsOptions = null, IList suggesters = null, IList analyzers = null, IList tokenizers = null, IList tokenFilters = null, IList charFilters = null, IList normalizers = null, SearchResourceEncryptionKey encryptionKey = null, SimilarityAlgorithm similarity = null, SemanticSearch semanticSearch = null, VectorSearch vectorSearch = null, SearchIndexPermissionFilterOption permissionFilterOption = default, string etag = null, IDictionary serializedAdditionalRawData = null) + { + fields ??= new List(); + scoringProfiles ??= new List(); + suggesters ??= new List(); + analyzers ??= new List(); + tokenizers ??= new List(); + tokenFilters ??= new List(); + charFilters ??= new List(); + normalizers ??= new List(); + serializedAdditionalRawData ??= new Dictionary(); + + return new SearchIndex(name, description, fields, scoringProfiles, defaultScoringProfile, corsOptions, suggesters, analyzers, tokenizers, tokenFilters, charFilters, normalizers, encryptionKey, similarity, semanticSearch, vectorSearch, permissionFilterOption, etag, serializedAdditionalRawData); + } + + /// Initializes a new instance of . + /// The name of the datasource. + /// The description of the datasource. + /// The type of the datasource. + /// Credentials for the datasource. + /// The data container for the datasource. + /// + /// The data change detection policy for the datasource. + /// The available derived classes include and . + /// + /// + /// The data deletion detection policy for the datasource. + /// The available derived classes include . + /// + /// The ETag of the data source. + /// A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your datasource definition when you want full assurance that no one, not even Microsoft, can decrypt your data source definition. Once you have encrypted your data source definition, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your datasource definition will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. + /// Keeps track of any properties unknown to the library. + public static SearchIndexerDataSourceConnection SearchIndexerDataSourceConnection(string name = null, string description = null, SearchIndexerDataSourceType type = default, string connectionString = null, SearchIndexerDataContainer container = null, DataChangeDetectionPolicy dataChangeDetectionPolicy = null, DataDeletionDetectionPolicy dataDeletionDetectionPolicy = null, string etag = null, SearchResourceEncryptionKey encryptionKey = null, IDictionary serializedAdditionalRawData = null) + { + serializedAdditionalRawData ??= new Dictionary(); + + return new SearchIndexerDataSourceConnection(name, description, type, connectionString, container, dataChangeDetectionPolicy, dataDeletionDetectionPolicy, etag, encryptionKey, serializedAdditionalRawData); + } + + /// Initializes a new instance of SearchIndexer. + /// The name of the indexer. + /// The description of the indexer. + /// The name of the datasource from which this indexer reads data. + /// The name of the skillset executing with this indexer. + /// The name of the index to which this indexer writes data. + /// The schedule for this indexer. + /// Parameters for indexer execution. + /// Defines mappings between fields in the data source and corresponding target fields in the index. + /// Output field mappings are applied after enrichment and immediately before indexing. + /// A value indicating whether the indexer is disabled. Default is false. + /// The ETag of the indexer. + /// A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your indexer definition (as well as indexer execution status) when you want full assurance that no one, not even Microsoft, can decrypt them. Once you have encrypted your indexer definition, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your indexer definition (and indexer execution status) will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. + /// Keeps track of any properties unknown to the library. + [EditorBrowsable(EditorBrowsableState.Never)] + public static SearchIndexer SearchIndexer(string name = null, string description = null, string dataSourceName = null, string skillsetName = null, string targetIndexName = null, IndexingSchedule schedule = null, IndexingParameters parameters = null, IList fieldMappings = null, IList outputFieldMappings = null, bool? isDisabled = null, string etag = null, SearchResourceEncryptionKey encryptionKey = null, IDictionary serializedAdditionalRawData = null) + { + fieldMappings ??= new List(); + outputFieldMappings ??= new List(); + serializedAdditionalRawData ??= new Dictionary(); + + return new SearchIndexer(name, description, dataSourceName, skillsetName, targetIndexName, schedule, parameters, fieldMappings, outputFieldMappings, isDisabled, etag, encryptionKey, null, serializedAdditionalRawData); + } + + /// Initializes a new instance of SearchIndexer. + /// The name of the indexer. + /// The description of the indexer. + /// The name of the datasource from which this indexer reads data. + /// The name of the skillset executing with this indexer. + /// The name of the index to which this indexer writes data. + /// The schedule for this indexer. + /// Parameters for indexer execution. + /// Defines mappings between fields in the data source and corresponding target fields in the index. + /// Output field mappings are applied after enrichment and immediately before indexing. + /// A value indicating whether the indexer is disabled. Default is false. + /// The ETag of the indexer. + /// A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your indexer definition (as well as indexer execution status) when you want full assurance that no one, not even Microsoft, can decrypt them. Once you have encrypted your indexer definition, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your indexer definition (and indexer execution status) will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. + /// Adds caching to an enrichment pipeline to allow for incremental modification steps without having to rebuild the index every time. + /// Keeps track of any properties unknown to the library. + public static SearchIndexer SearchIndexer(string name = null, string description = null, string dataSourceName = null, string skillsetName = null, string targetIndexName = null, IndexingSchedule schedule = null, IndexingParameters parameters = null, IList fieldMappings = null, IList outputFieldMappings = null, bool? isDisabled = null, string etag = null, SearchResourceEncryptionKey encryptionKey = null, SearchIndexerCache cache = null, IDictionary serializedAdditionalRawData = null) + { + fieldMappings ??= new List(); + outputFieldMappings ??= new List(); + serializedAdditionalRawData ??= new Dictionary(); + + return new SearchIndexer(name, description, dataSourceName, skillsetName, targetIndexName, schedule, parameters, fieldMappings, outputFieldMappings, isDisabled, etag, encryptionKey, cache, serializedAdditionalRawData); + } + + /// Initializes a new instance of . + /// The name of the synonym map. + /// The format of the synonym map. Only the 'solr' format is currently supported. + /// A series of synonym rules in the specified synonym map format. The rules must be separated by newlines. + /// A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your data when you want full assurance that no one, not even Microsoft, can decrypt your data. Once you have encrypted your data, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your data will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. + /// The ETag of the synonym map. + /// Keeps track of any properties unknown to the library. + public static SynonymMap SynonymMap(string name = null, string format = null, string synonyms = null, SearchResourceEncryptionKey encryptionKey = null, string etag = null, IDictionary serializedAdditionalRawData = null) + { + serializedAdditionalRawData ??= new Dictionary(); + + return new SynonymMap(name, format, synonyms, encryptionKey, etag, serializedAdditionalRawData); + } + + /// Initializes a new instance of . + /// The name of the alias. + /// The name of the index this alias maps to. Only one index name may be specified. + /// The ETag of the alias. + /// Keeps track of any properties unknown to the library. + public static SearchAlias SearchAlias(string name = null, IList indexes = null, string etag = null, IDictionary serializedAdditionalRawData = null) + { + indexes ??= new List(); + serializedAdditionalRawData ??= new Dictionary(); + + return new SearchAlias(name, indexes, etag, serializedAdditionalRawData); + } + + /// Initializes a new instance of . + /// The name of the knowledge source. + /// Optional user-defined description. + /// The type of the knowledge source. + /// The ETag of the agent. + /// A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your agent definition when you want full assurance that no one, not even Microsoft, can decrypt them. Once you have encrypted your agent definition, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your agent definition will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. + /// Keeps track of any properties unknown to the library. + /// The type of the knowledge source. + public static KnowledgeSource AzureBlobKnowledgeSource(string name = null, string description = null, string kind = null, string eTag = null, SearchResourceEncryptionKey encryptionKey = null, IDictionary serializedAdditionalRawData = null, AzureBlobKnowledgeSourceParameters azureBlobParameters = null) + { + serializedAdditionalRawData ??= new Dictionary(); + + return new AzureBlobKnowledgeSource(name, description, kind, eTag, encryptionKey, serializedAdditionalRawData, azureBlobParameters); + } + + /// Initializes a new instance of . + /// The name of the knowledge source. + /// Optional user-defined description. + /// The type of the knowledge source. + /// The ETag of the agent. + /// A description of an encryption key that you create in Azure Key Vault. This key is used to provide an additional level of encryption-at-rest for your agent definition when you want full assurance that no one, not even Microsoft, can decrypt them. Once you have encrypted your agent definition, it will always remain encrypted. The search service will ignore attempts to set this property to null. You can change this property as needed if you want to rotate your encryption key; Your agent definition will be unaffected. Encryption with customer-managed keys is not available for free search services, and is only available for paid services created on or after January 1, 2019. + /// Keeps track of any properties unknown to the library. + /// The parameters for the knowledge source. + public static KnowledgeSource SearchIndexKnowledgeSource(string name = null, string description = null, string kind = null, string eTag = null, SearchResourceEncryptionKey encryptionKey = null, IDictionary serializedAdditionalRawData = null, SearchIndexKnowledgeSourceParameters searchIndexParameters = null) + { + serializedAdditionalRawData ??= new Dictionary(); + + return new SearchIndexKnowledgeSource(name, description, kind, eTag, encryptionKey, serializedAdditionalRawData, searchIndexParameters); + } + + /// Initializes a new instance of . + /// Contains debugging information specific to vector and hybrid search. + /// A new instance for mocking. + [EditorBrowsable(EditorBrowsableState.Never)] + public static DocumentDebugInfo DocumentDebugInfo(VectorsDebugInfo vectors = null) + { + return new DocumentDebugInfo(null, vectors, null, serializedAdditionalRawData: null); + } } } diff --git a/sdk/search/Azure.Search.Documents/src/Models/SearchResults.cs b/sdk/search/Azure.Search.Documents/src/Models/SearchResults.cs index 4d5441ac6d63..932017c7e11a 100644 --- a/sdk/search/Azure.Search.Documents/src/Models/SearchResults.cs +++ b/sdk/search/Azure.Search.Documents/src/Models/SearchResults.cs @@ -27,6 +27,7 @@ internal partial class SearchDocumentsResult { } /// /// Response containing search results from an index. /// + [SuppressMessage("Usage", "AZC0035:Output model type 'SearchResults' should have a corresponding method in a model factory class.", Justification = "Model factory method exists in SearchModelFactory but analyzer isn’t recognizing it")] public abstract class SearchResults { /// @@ -93,7 +94,7 @@ public abstract class SearchResults /// private protected SearchClient _pagingClient; - private protected SearchResults() { } + internal SearchResults() { } /// /// Get all of the s synchronously. diff --git a/sdk/search/Azure.Search.Documents/src/Models/SuggestResults.cs b/sdk/search/Azure.Search.Documents/src/Models/SuggestResults.cs index a018a354d678..b4bf92a48df1 100644 --- a/sdk/search/Azure.Search.Documents/src/Models/SuggestResults.cs +++ b/sdk/search/Azure.Search.Documents/src/Models/SuggestResults.cs @@ -3,6 +3,7 @@ using System.Collections.Generic; using System.Collections.ObjectModel; +using System.Diagnostics.CodeAnalysis; using System.IO; using System.Text.Json; using System.Threading; @@ -25,6 +26,7 @@ internal partial class SuggestDocumentsResult { } /// The .NET type that maps to the index schema. Instances of this type can /// be retrieved as documents from the index. /// + [SuppressMessage("Usage", "AZC0035:Output model type 'SuggestResults' should have a corresponding method in a model factory class.", Justification = "Model factory method exists in SearchModelFactory but analyzer isn’t recognizing it")] public partial class SuggestResults { /// diff --git a/sdk/search/Azure.Search.Documents/src/SearchClientOptions.cs b/sdk/search/Azure.Search.Documents/src/SearchClientOptions.cs index 016f4aa7a362..e1adce6132f0 100644 --- a/sdk/search/Azure.Search.Documents/src/SearchClientOptions.cs +++ b/sdk/search/Azure.Search.Documents/src/SearchClientOptions.cs @@ -41,10 +41,15 @@ public enum ServiceVersion /// V2024_07_01 = 3, + /// + /// The 2025-09-01 version of the Azure Cognitive Search service. + /// + V2025_09_01 = 4, + /// /// The 2025-08-01-preview version of the Azure Cognitive Search service. /// - V2025_08_01_Preview = 4, + V2025_08_01_Preview = 5, #pragma warning restore CA1707 } @@ -206,6 +211,7 @@ public static SearchClientOptions.ServiceVersion Validate(this SearchClientOptio SearchClientOptions.ServiceVersion.V2020_06_30 => version, SearchClientOptions.ServiceVersion.V2023_11_01 => version, SearchClientOptions.ServiceVersion.V2024_07_01 => version, + SearchClientOptions.ServiceVersion.V2025_09_01 => version, SearchClientOptions.ServiceVersion.V2025_08_01_Preview => version, _ => throw CreateInvalidVersionException(version) }; @@ -231,6 +237,7 @@ public static string ToVersionString(this SearchClientOptions.ServiceVersion ver SearchClientOptions.ServiceVersion.V2020_06_30 => "2020-06-30", SearchClientOptions.ServiceVersion.V2023_11_01 => "2023-11-01", SearchClientOptions.ServiceVersion.V2024_07_01 => "2024-07-01", + SearchClientOptions.ServiceVersion.V2025_09_01 => "2025-09-01", SearchClientOptions.ServiceVersion.V2025_08_01_Preview => "2025-08-01-preview", _ => throw CreateInvalidVersionException(version) }; diff --git a/sdk/search/Azure.Search.Documents/src/autorest.md b/sdk/search/Azure.Search.Documents/src/autorest.md index ef11d1304ca7..5ca2f0ca33fc 100644 --- a/sdk/search/Azure.Search.Documents/src/autorest.md +++ b/sdk/search/Azure.Search.Documents/src/autorest.md @@ -107,6 +107,26 @@ directive: - remove-model: NGramTokenFilter ``` +### Retain `rerankWithOriginalVectors` and `defaultOversampling` in `VectorSearchCompressionConfiguration` + +```yaml +directive: +- from: "searchservice.json" + where: $.definitions.VectorSearchCompressionConfiguration + transform: > + $.properties.rerankWithOriginalVectors = { + "type": "boolean", + "description": "If set to true, once the ordered set of results calculated using compressed vectors are obtained, they will be reranked again by recalculating the full-precision similarity scores. This will improve recall at the expense of latency.\nFor use with only service version 2024-07-01. If using 2025-09-01 or later, use RescoringOptions.rescoringEnabled.", + "x-nullable": true + }; + $.properties.defaultOversampling = { + "type": "number", + "format": "double", + "description": "Default oversampling factor. Oversampling will internally request more documents (specified by this multiplier) in the initial search. This increases the set of results that will be reranked using recomputed similarity scores from full-precision vectors. Minimum value is 1, meaning no oversampling (1x). This parameter can only be set when rerankWithOriginalVectors is true. Higher values improve recall at the expense of latency.\nFor use with only service version 2024-07-01. If using 2025-09-01 or later, use RescoringOptions.defaultOversampling.", + "x-nullable": true + }; +``` + ## Renaming models after the AI Studio rebrand to AI Foundry These should eventually be fixed in the swagger files. ```yaml diff --git a/sdk/search/Azure.Search.Documents/tests/Models/DataSourceTests.cs b/sdk/search/Azure.Search.Documents/tests/Models/DataSourceTests.cs index 454b4dac38f3..ef4235a2c4fb 100644 --- a/sdk/search/Azure.Search.Documents/tests/Models/DataSourceTests.cs +++ b/sdk/search/Azure.Search.Documents/tests/Models/DataSourceTests.cs @@ -13,7 +13,7 @@ public class DataSourceTests [TestCase("\"0123abcd\"", "\"0123abcd\"")] public void ParsesETag(string value, string expected) { - SearchIndexerDataSourceConnection sut = new(null, null, SearchIndexerDataSourceType.AzureBlob, null, null, null, null, null, null, null, value, null, serializedAdditionalRawData: null); + SearchIndexerDataSourceConnection sut = new(null, null, SearchIndexerDataSourceType.AzureBlob, connectionString: null, null, null, null, value, null, serializedAdditionalRawData: null); Assert.AreEqual(expected, sut.ETag?.ToString()); } } diff --git a/sdk/search/Azure.Search.Documents/tests/SearchIndexClientTests.cs b/sdk/search/Azure.Search.Documents/tests/SearchIndexClientTests.cs index 8dfa8b7c2209..1db2d7b10138 100644 --- a/sdk/search/Azure.Search.Documents/tests/SearchIndexClientTests.cs +++ b/sdk/search/Azure.Search.Documents/tests/SearchIndexClientTests.cs @@ -475,7 +475,10 @@ public async Task AnalyzeTextWithNormalizer() SearchIndexClient client = resources.GetIndexClient(); - AnalyzeTextOptions request = new("I dARe YoU tO reAd It IN A nORmAl vOiCE.", LexicalNormalizerName.Lowercase); + AnalyzeTextOptions request = new("I dARe YoU tO reAd It IN A nORmAl vOiCE.") + { + NormalizerName = LexicalNormalizerName.Lowercase + }; Response> result = await client.AnalyzeTextAsync(resources.IndexName, request); IReadOnlyList tokens = result.Value; @@ -483,7 +486,10 @@ public async Task AnalyzeTextWithNormalizer() Assert.AreEqual(1, tokens.Count); Assert.AreEqual("i dare you to read it in a normal voice.", tokens[0].Token); - request = new("Item ① in my ⑽ point rant is that 75⁰F is uncomfortably warm.", LexicalNormalizerName.AsciiFolding); + request = new("Item ① in my ⑽ point rant is that 75⁰F is uncomfortably warm.") + { + NormalizerName = LexicalNormalizerName.AsciiFolding + }; result = await client.AnalyzeTextAsync(resources.IndexName, request); tokens = result.Value;