Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion x-pack/plugins/file_upload/public/api/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,9 @@ export async function analyzeFile(
const { getHttp } = await lazyLoadModules();
const body = JSON.stringify(file);
return await getHttp().fetch<FindFileStructureResponse>({
path: `/internal/file_data_visualizer/analyze_file`,
path: `/internal/file_upload/analyze_file`,
method: 'POST',
version: '1',
body,
query: params,
});
Expand All @@ -67,6 +68,7 @@ export async function hasImportPermission(params: HasImportPermissionParams): Pr
const resp = await fileUploadModules.getHttp().fetch<HasImportPermission>({
path: `/internal/file_upload/has_import_permission`,
method: 'GET',
version: '1',
query: { ...params },
});
return resp.hasImportPermission;
Expand All @@ -85,6 +87,7 @@ export async function checkIndexExists(
const { exists } = await fileUploadModules.getHttp().fetch<{ exists: boolean }>({
path: `/internal/file_upload/index_exists`,
method: 'POST',
version: '1',
body,
query: params,
});
Expand All @@ -101,6 +104,7 @@ export async function getTimeFieldRange(index: string, query: unknown, timeField
return await fileUploadModules.getHttp().fetch<GetTimeFieldRangeResponse>({
path: `/internal/file_upload/time_field_range`,
method: 'POST',
version: '1',
body,
});
}
1 change: 1 addition & 0 deletions x-pack/plugins/file_upload/public/importer/importer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -298,6 +298,7 @@ export function callImportRoute({
return getHttp().fetch<ImportResponse>({
path: `/internal/file_upload/import`,
method: 'POST',
version: '1',
query,
body,
});
Expand Down
279 changes: 157 additions & 122 deletions x-pack/plugins/file_upload/server/routes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,37 +44,44 @@ function importData(
export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logger: Logger) {
const router = coreSetup.http.createRouter();

router.get(
{
router.versioned
.get({
path: '/internal/file_upload/has_import_permission',
validate: {
query: schema.object({
indexName: schema.maybe(schema.string()),
checkCreateDataView: schema.boolean(),
checkHasManagePipeline: schema.boolean(),
}),
access: 'internal',
})
.addVersion(
{
version: '1',
validate: {
request: {
query: schema.object({
indexName: schema.maybe(schema.string()),
checkCreateDataView: schema.boolean(),
checkHasManagePipeline: schema.boolean(),
}),
},
},
},
},
async (context, request, response) => {
try {
const [, pluginsStart] = await coreSetup.getStartServices();
const { indexName, checkCreateDataView, checkHasManagePipeline } = request.query;
async (context, request, response) => {
try {
const [, pluginsStart] = await coreSetup.getStartServices();
const { indexName, checkCreateDataView, checkHasManagePipeline } = request.query;

const { hasImportPermission } = await checkFileUploadPrivileges({
authorization: pluginsStart.security?.authz,
request,
indexName,
checkCreateDataView,
checkHasManagePipeline,
});
const { hasImportPermission } = await checkFileUploadPrivileges({
authorization: pluginsStart.security?.authz,
request,
indexName,
checkCreateDataView,
checkHasManagePipeline,
});

return response.ok({ body: { hasImportPermission } });
} catch (e) {
logger.warn(`Unable to check import permission, error: ${e.message}`);
return response.ok({ body: { hasImportPermission: false } });
return response.ok({ body: { hasImportPermission } });
} catch (e) {
logger.warn(`Unable to check import permission, error: ${e.message}`);
return response.ok({ body: { hasImportPermission: false } });
}
}
}
);
);

/**
* @apiGroup FileDataVisualizer
Expand All @@ -85,31 +92,38 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
*
* @apiSchema (query) analyzeFileQuerySchema
*/
router.post(
{
path: '/internal/file_data_visualizer/analyze_file',
validate: {
body: schema.any(),
query: analyzeFileQuerySchema,
},
router.versioned
.post({
path: '/internal/file_upload/analyze_file',
access: 'internal',
options: {
body: {
accepts: ['text/*', 'application/json'],
maxBytes: MAX_FILE_SIZE_BYTES,
},
tags: ['access:fileUpload:analyzeFile'],
},
},
async (context, request, response) => {
try {
const esClient = (await context.core).elasticsearch.client;
const result = await analyzeFile(esClient, request.body, request.query);
return response.ok({ body: result });
} catch (e) {
return response.customError(wrapError(e));
})
.addVersion(
{
version: '1',
validate: {
request: {
body: schema.any(),
query: analyzeFileQuerySchema,
},
},
},
async (context, request, response) => {
try {
const esClient = (await context.core).elasticsearch.client;
const result = await analyzeFile(esClient, request.body, request.query);
return response.ok({ body: result });
} catch (e) {
return response.customError(wrapError(e));
}
}
}
);
);

/**
* @apiGroup FileDataVisualizer
Expand All @@ -121,73 +135,87 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
* @apiSchema (query) importFileQuerySchema
* @apiSchema (body) importFileBodySchema
*/
router.post(
{
router.versioned
.post({
path: '/internal/file_upload/import',
validate: {
query: importFileQuerySchema,
body: importFileBodySchema,
},
access: 'internal',
options: {
body: {
accepts: ['application/json'],
maxBytes: MAX_FILE_SIZE_BYTES,
},
},
},
async (context, request, response) => {
try {
const { id } = request.query;
const { index, data, settings, mappings, ingestPipeline } = request.body;
const esClient = (await context.core).elasticsearch.client;
})
.addVersion(
{
version: '1',
validate: {
request: {
query: importFileQuerySchema,
body: importFileBodySchema,
},
},
},
async (context, request, response) => {
try {
const { id } = request.query;
const { index, data, settings, mappings, ingestPipeline } = request.body;
const esClient = (await context.core).elasticsearch.client;

// `id` being `undefined` tells us that this is a new import due to create a new index.
// follow-up import calls to just add additional data will include the `id` of the created
// index, we'll ignore those and don't increment the counter.
if (id === undefined) {
await updateTelemetry();
}
// `id` being `undefined` tells us that this is a new import due to create a new index.
// follow-up import calls to just add additional data will include the `id` of the created
// index, we'll ignore those and don't increment the counter.
if (id === undefined) {
await updateTelemetry();
}

const result = await importData(
esClient,
id,
index,
settings,
mappings,
// @ts-expect-error
ingestPipeline,
data
);
return response.ok({ body: result });
} catch (e) {
return response.customError(wrapError(e));
const result = await importData(
esClient,
id,
index,
settings,
mappings,
// @ts-expect-error
ingestPipeline,
data
);
return response.ok({ body: result });
} catch (e) {
return response.customError(wrapError(e));
}
}
}
);
);

/**
* @apiGroup FileDataVisualizer
*
* @api {post} /internal/file_upload/index_exists ES indices exists wrapper checks if index exists
* @apiName IndexExists
*/
router.post(
{
router.versioned
.post({
path: '/internal/file_upload/index_exists',
validate: {
body: schema.object({ index: schema.string() }),
access: 'internal',
})
.addVersion(
{
version: '1',
validate: {
request: {
body: schema.object({ index: schema.string() }),
},
},
},
},
async (context, request, response) => {
try {
const esClient = (await context.core).elasticsearch.client;
const indexExists = await esClient.asCurrentUser.indices.exists(request.body);
return response.ok({ body: { exists: indexExists } });
} catch (e) {
return response.customError(wrapError(e));
async (context, request, response) => {
try {
const esClient = (await context.core).elasticsearch.client;
const indexExists = await esClient.asCurrentUser.indices.exists(request.body);
return response.ok({ body: { exists: indexExists } });
} catch (e) {
return response.customError(wrapError(e));
}
}
}
);
);

/**
* @apiGroup FileDataVisualizer
Expand All @@ -201,42 +229,49 @@ export function fileUploadRoutes(coreSetup: CoreSetup<StartDeps, unknown>, logge
* @apiSuccess {Object} start start of time range with epoch and string properties.
* @apiSuccess {Object} end end of time range with epoch and string properties.
*/
router.post(
{
router.versioned
.post({
path: '/internal/file_upload/time_field_range',
validate: {
body: schema.object({
/** Index or indexes for which to return the time range. */
index: schema.oneOf([schema.string(), schema.arrayOf(schema.string())]),
/** Name of the time field in the index. */
timeFieldName: schema.string(),
/** Query to match documents in the index(es). */
query: schema.maybe(schema.any()),
runtimeMappings: schema.maybe(runtimeMappingsSchema),
}),
},
access: 'internal',
options: {
tags: ['access:fileUpload:analyzeFile'],
},
},
async (context, request, response) => {
try {
const { index, timeFieldName, query, runtimeMappings } = request.body;
const esClient = (await context.core).elasticsearch.client;
const resp = await getTimeFieldRange(
esClient,
index,
timeFieldName,
query,
runtimeMappings
);
})
.addVersion(
{
version: '1',
validate: {
request: {
body: schema.object({
/** Index or indexes for which to return the time range. */
index: schema.oneOf([schema.string(), schema.arrayOf(schema.string())]),
/** Name of the time field in the index. */
timeFieldName: schema.string(),
/** Query to match documents in the index(es). */
query: schema.maybe(schema.any()),
runtimeMappings: schema.maybe(runtimeMappingsSchema),
}),
},
},
},
async (context, request, response) => {
try {
const { index, timeFieldName, query, runtimeMappings } = request.body;
const esClient = (await context.core).elasticsearch.client;
const resp = await getTimeFieldRange(
esClient,
index,
timeFieldName,
query,
runtimeMappings
);

return response.ok({
body: resp,
});
} catch (e) {
return response.customError(wrapError(e));
return response.ok({
body: resp,
});
} catch (e) {
return response.customError(wrapError(e));
}
}
}
);
);
}
Loading