Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Multiple DataSource] Do not support import data source object to Local cluster when not enable data source #6395

Merged
merged 21 commits into from
Apr 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
- [Multiple Datasource] Add icon in datasource table page to show the default datasource ([#6231](https://github.com/opensearch-project/OpenSearch-Dashboards/pull/6231))
- [Multiple Datasource] Add TLS configuration for multiple data sources ([#6171](https://github.com/opensearch-project/OpenSearch-Dashboards/pull/6171))
- [Multiple Datasource] Add multi selectable data source component ([#6211](https://github.com/opensearch-project/OpenSearch-Dashboards/pull/6211))
- [Multiple Datasource] Do not support import data source object to Local cluster when not enable data source ([#6395](https://github.com/opensearch-project/OpenSearch-Dashboards/pull/6395))
- [Multiple Datasource] Refactor data source menu and interface to allow cleaner selection of component and related configurations ([#6256](https://github.com/opensearch-project/OpenSearch-Dashboards/pull/6256))
- [Multiple Datasource] Allow top nav menu to mount data source menu for use case when both menus are mounted ([#6268](https://github.com/opensearch-project/OpenSearch-Dashboards/pull/6268))
- [Workspace] Add create workspace page ([#6179](https://github.com/opensearch-project/OpenSearch-Dashboards/pull/6179))
Expand Down
105 changes: 103 additions & 2 deletions src/core/server/saved_objects/import/import_saved_objects.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -96,11 +96,12 @@ describe('#importSavedObjectsFromStream', () => {
let savedObjectsClient: jest.Mocked<SavedObjectsClientContract>;
let typeRegistry: jest.Mocked<ISavedObjectTypeRegistry>;
const namespace = 'some-namespace';
const testDataSourceId = 'some-datasource';
const testDataSourceId = uuidv4();

const setupOptions = (
createNewCopies: boolean = false,
dataSourceId: string | undefined = undefined
dataSourceId: string | undefined = undefined,
dataSourceEnabled: boolean | undefined = false
): SavedObjectsImportOptions => {
readStream = new Readable();
savedObjectsClient = savedObjectsClientMock.create();
Expand Down Expand Up @@ -135,6 +136,17 @@ describe('#importSavedObjectsFromStream', () => {
attributes: { title: 'some-title' },
};
};

const createDataSourceObject = (): SavedObject<{
title: string;
}> => {
return {
type: 'data-source',
id: uuidv4(),
references: [],
attributes: { title: 'some-title' },
};
};
const createError = (): SavedObjectsImportError => {
const title = 'some-title';
return {
Expand Down Expand Up @@ -589,5 +601,94 @@ describe('#importSavedObjectsFromStream', () => {
const expectedErrors = errors.map(({ type, id }) => expect.objectContaining({ type, id }));
expect(result).toEqual({ success: false, successCount: 0, errors: expectedErrors });
});

test('early return if import data source objects to non-MDS cluster', async () => {
const options = setupOptions(false, testDataSourceId, false);
const dsObj = createDataSourceObject();
const dsExportedObj = createObject(testDataSourceId);
const collectedObjects = [dsObj, dsExportedObj];

const errors = [
{
type: dsObj.type,
id: dsObj.id,
title: dsObj.attributes.title,
meta: { title: dsObj.attributes.title },
error: { type: 'unsupported_type' },
},
{
type: dsExportedObj.type,
id: dsExportedObj.id,
title: dsExportedObj.attributes.title,
meta: { title: dsExportedObj.attributes.title },
error: { type: 'unsupported_type' },
},
];
getMockFn(collectSavedObjects).mockResolvedValue({
errors: [],
collectedObjects,
importIdMap: new Map(),
});
const result = await importSavedObjectsFromStream(options);
const expectedErrors = errors.map(({ type, id }) => expect.objectContaining({ type, id }));
expect(result).toEqual({ success: false, successCount: 0, errors: expectedErrors });
});

test('early return if import mixed non/data source objects to non-MDS cluster', async () => {
const options = setupOptions(false, testDataSourceId, false);
const dsObj = createDataSourceObject();
const dsExportedObj = createObject(testDataSourceId);
const nonDsExportedObj = createObject();
const collectedObjects = [dsObj, dsExportedObj, nonDsExportedObj];

const errors = [
{
type: dsObj.type,
id: dsObj.id,
title: dsObj.attributes.title,
meta: { title: dsObj.attributes.title },
error: { type: 'unsupported_type' },
},
{
type: dsExportedObj.type,
id: dsExportedObj.id,
title: dsExportedObj.attributes.title,
meta: { title: dsExportedObj.attributes.title },
error: { type: 'unsupported_type' },
},
];
getMockFn(collectSavedObjects).mockResolvedValue({
errors: [],
collectedObjects,
importIdMap: new Map(),
});
const result = await importSavedObjectsFromStream(options);
const expectedErrors = errors.map(({ type, id }) => expect.objectContaining({ type, id }));
expect(result).toEqual({ success: false, successCount: 0, errors: expectedErrors });
});

test('early return if import single data source objects to non-MDS cluster', async () => {
const options = setupOptions(false, testDataSourceId, false);
const dsObj = createDataSourceObject();
const collectedObjects = [dsObj];

const errors = [
{
type: dsObj.type,
id: dsObj.id,
title: dsObj.attributes.title,
meta: { title: dsObj.attributes.title },
error: { type: 'unsupported_type' },
},
];
getMockFn(collectSavedObjects).mockResolvedValue({
errors: [],
collectedObjects,
importIdMap: new Map(),
});
const result = await importSavedObjectsFromStream(options);
const expectedErrors = errors.map(({ type, id }) => expect.objectContaining({ type, id }));
expect(result).toEqual({ success: false, successCount: 0, errors: expectedErrors });
});
});
});
25 changes: 25 additions & 0 deletions src/core/server/saved_objects/import/import_saved_objects.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,15 @@ import {
SavedObjectsImportError,
SavedObjectsImportResponse,
SavedObjectsImportOptions,
SavedObjectsImportUnsupportedTypeError,
} from './types';
import { validateReferences } from './validate_references';
import { checkOriginConflicts } from './check_origin_conflicts';
import { createSavedObjects } from './create_saved_objects';
import { checkConflicts } from './check_conflicts';
import { regenerateIds } from './regenerate_ids';
import { checkConflictsForDataSource } from './check_conflict_for_data_source';
import { isSavedObjectWithDataSource } from './validate_object_id';

/**
* Import saved objects from given stream. See the {@link SavedObjectsImportOptions | options} for more
Expand All @@ -58,6 +60,7 @@ export async function importSavedObjectsFromStream({
dataSourceId,
dataSourceTitle,
workspaces,
dataSourceEnabled,
}: SavedObjectsImportOptions): Promise<SavedObjectsImportResponse> {
let errorAccumulator: SavedObjectsImportError[] = [];
const supportedTypes = typeRegistry.getImportableAndExportableTypes().map((type) => type.name);
Expand All @@ -69,6 +72,28 @@ export async function importSavedObjectsFromStream({
supportedTypes,
dataSourceId,
});
// if not enable data_source, throw error early
if (!dataSourceEnabled) {
const notSupportedErrors: SavedObjectsImportError[] = collectSavedObjectsResult.collectedObjects.reduce(
(errors: SavedObjectsImportError[], obj) => {
if (obj.type === 'data-source' || isSavedObjectWithDataSource(obj.id)) {
const error: SavedObjectsImportUnsupportedTypeError = { type: 'unsupported_type' };
const { title } = obj.attributes;
errors.push({ error, type: obj.type, id: obj.id, title, meta: { title } });
}
return errors; // Return the accumulator in each iteration
},
[]
);
if (notSupportedErrors?.length > 0) {
return {
successCount: 0,
success: false,
errors: notSupportedErrors,
};
}
}

errorAccumulator = [...errorAccumulator, ...collectSavedObjectsResult.errors];
/** Map of all IDs for objects that we are attempting to import; each value is empty by default */
let importIdMap = collectSavedObjectsResult.importIdMap;
Expand Down
2 changes: 1 addition & 1 deletion src/core/server/saved_objects/import/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ export interface SavedObjectsImportOptions {
createNewCopies: boolean;
dataSourceId?: string;
dataSourceTitle?: string;
/** if specified, will import in given workspaces */
dataSourceEnabled?: boolean;
workspaces?: SavedObjectsBaseOptions['workspaces'];
}

Expand Down
59 changes: 59 additions & 0 deletions src/core/server/saved_objects/import/validate_object_id.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/

import { isSavedObjectWithDataSource } from './validate_object_id';

describe('isObjectWithDataSource', () => {
test('should return false for valid object with data source ID but in wrong format', () => {
// Valid ID with two parts separated by underscore, and both parts being UUIDs
const inValidId = 'invalid_uuid_1234-invalid_uuid_5678';
expect(isSavedObjectWithDataSource(inValidId)).toBe(false);
});

test('should return false for invalid IDs', () => {
// Missing underscore
const invalidId1 = 'missingunderscore';
expect(isSavedObjectWithDataSource(invalidId1)).toBe(false);

// Invalid UUID in the second part
const invalidId2 = 'valid_uuid_1234-invalid_uuid';
expect(isSavedObjectWithDataSource(invalidId2)).toBe(false);

// Missing second part
const invalidId3 = 'valid_uuid_1234';
expect(isSavedObjectWithDataSource(invalidId3)).toBe(false);

// More than two parts
const invalidId4 = 'valid_uuid_1234-valid_uuid_5678-extra_part';
expect(isSavedObjectWithDataSource(invalidId4)).toBe(false);
});

test('should return false for non-UUID parts', () => {
// First part is not a UUID
const invalidId1 = 'not_a_uuid_valid_uuid_1234';
expect(isSavedObjectWithDataSource(invalidId1)).toBe(false);

// Second part is not a UUID
const invalidId2 = 'valid_uuid_1234_not_a_uuid';
expect(isSavedObjectWithDataSource(invalidId2)).toBe(false);

// Both parts are not UUIDs
const invalidId3 = 'not_a_uuid_not_a_uuid';
expect(isSavedObjectWithDataSource(invalidId3)).toBe(false);
});

test('should return false for string with underscore but not with UUID', () => {
// First part is not a UUID
const invalidId = 'saved_object_with_index_pattern_conflict';
expect(isSavedObjectWithDataSource(invalidId)).toBe(false);
});

test('should return false for string with underscore but with three UUIDs', () => {
// First part is not a UUID
const invalidId =
'7cbd2350-2223-11e8-b802-5bcf64c2cfb4_7cbd2350-2223-11e8-b802-5bcf64c2cfb4_7cbd2350-2223-11e8-b802-5bcf64c2cfb4';
expect(isSavedObjectWithDataSource(invalidId)).toBe(false);
});
});
40 changes: 40 additions & 0 deletions src/core/server/saved_objects/import/validate_object_id.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/

/**
* When enable multiple data source, exported objects from a data source will maintain object id like
* "69a34b00-9ee8-11e7-8711-e7a007dcef99_7cbd2350-2223-11e8-b802-5bcf64c2cfb4"
* two UUIDs are connected with a underscore,
* before the underscore, the UUID represents the data source
* after the underscore, the UUID is the original object id
* when disable multiple data source, the exported object from local cluster will look like 7cbd2350-2223-11e8-b802-5bcf64c2cfb4
* we can use this format to tell out whether a single object is exported from MDS enabled/disabled cluster
*
* This file to going to group some validate function to tell source of object based on the object id
*/

/**
*
* @param candidate: string without underscore
* @returns
*/
const isUUID = (candidate: string): boolean => {
// Regular expression pattern for UUID
const uuidPattern: RegExp = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
return uuidPattern.test(candidate);
};

/**
*
* @param id single object id
* @returns
*/
export const isSavedObjectWithDataSource = (id: string): boolean => {
const idParts = id.split('_');
/**
* check with the
yujin-emma marked this conversation as resolved.
Show resolved Hide resolved
*/
return idParts && idParts.length === 2 && idParts.every(isUUID);
};
4 changes: 4 additions & 0 deletions src/core/server/saved_objects/routes/import.ts
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ export const registerImportRoute = (router: IRouter, config: SavedObjectConfig)
workspaces: schema.maybe(
schema.oneOf([schema.string(), schema.arrayOf(schema.string())])
),
dataSourceEnabled: schema.maybe(schema.boolean({ defaultValue: false })),
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

shouldn't this dataSourceEnabled option determined by the saved object service itself, passing as parameter doesn't looks right to me

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hi, @zengyan-amazon something here might confuse me. The dataSourceEnabled is originally from DataSourcePluginSetup props, it is from data source service and pass as service field, UI component will pass it as parameter to import api, UI got it from saved_object_management plugin set up (code)[https://github.com/opensearch-project/OpenSearch-Dashboards/blob/fb76ee9beee3a98d9b7c0783df072abe745acd7a/src/plugins/saved_objects_management/public/plugin.ts#L110], import api from core is used in this plugin, but does not have plugin setup, I do not know how to get the dataSourceEnabled from the saved object client from the core, could you please point me some example

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Issue to track here: #6557

},
{
validate: (object) => {
Expand Down Expand Up @@ -116,6 +117,8 @@ export const registerImportRoute = (router: IRouter, config: SavedObjectConfig)
workspaces = [workspaces];
}

const dataSourceEnabled = req.query.dataSourceEnabled;

const result = await importSavedObjectsFromStream({
savedObjectsClient: context.core.savedObjects.client,
typeRegistry: context.core.savedObjects.typeRegistry,
Expand All @@ -126,6 +129,7 @@ export const registerImportRoute = (router: IRouter, config: SavedObjectConfig)
dataSourceId,
dataSourceTitle,
workspaces,
dataSourceEnabled,
});

return res.ok({ body: result });
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,14 +41,18 @@ export async function importFile(
http: HttpStart,
file: File,
{ createNewCopies, overwrite }: ImportMode,
selectedDataSourceId?: string
selectedDataSourceId?: string,
dataSourceEnabled?: boolean
) {
const formData = new FormData();
formData.append('file', file);
const query = createNewCopies ? { createNewCopies } : { overwrite };
if (selectedDataSourceId) {
query.dataSourceId = selectedDataSourceId;
}
if (dataSourceEnabled) {
query.dataSourceEnabled = dataSourceEnabled;
}
return await http.post<ImportResponse>('/api/saved_objects/_import', {
body: formData,
headers: {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,7 @@ describe('Flyout', () => {
createNewCopies: true,
overwrite: true,
},
undefined,
undefined
);
expect(component.state()).toMatchObject({
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -189,13 +189,19 @@ export class Flyout extends Component<FlyoutProps, FlyoutState> {
* Does the initial import of a file, resolveImportErrors then handles errors and retries
*/
import = async () => {
const { http } = this.props;
const { http, dataSourceEnabled } = this.props;
const { file, importMode, selectedDataSourceId } = this.state;
this.setState({ status: 'loading', error: undefined });

// Import the file
try {
const response = await importFile(http, file!, importMode, selectedDataSourceId);
const response = await importFile(
http,
file!,
importMode,
selectedDataSourceId,
dataSourceEnabled
);
this.setState(processImportResponse(response), () => {
// Resolve import errors right away if there's no index patterns to match
// This will ask about overwriting each object, etc
Expand Down
Loading