diff --git a/x-pack/legacy/plugins/ml/common/types/jobs.ts b/x-pack/legacy/plugins/ml/common/types/jobs.ts index 47f34f6568eed9..a9885048550bbf 100644 --- a/x-pack/legacy/plugins/ml/common/types/jobs.ts +++ b/x-pack/legacy/plugins/ml/common/types/jobs.ts @@ -20,7 +20,10 @@ export interface MlJob { }; create_time: number; custom_settings: object; - data_counts: object; + data_counts: { + earliest_record_timestamp: number; + latest_record_timestamp: number; + }; data_description: { time_field: string; time_format: string; diff --git a/x-pack/legacy/plugins/ml/common/types/modules.ts b/x-pack/legacy/plugins/ml/common/types/modules.ts index cd6395500a804a..3e1a2cf9ab2e62 100644 --- a/x-pack/legacy/plugins/ml/common/types/modules.ts +++ b/x-pack/legacy/plugins/ml/common/types/modules.ts @@ -11,16 +11,25 @@ export interface ModuleJob { config: Omit; } +export interface ModuleDataFeed { + id: string; + config: Omit; +} + export interface KibanaObjectConfig extends SavedObjectAttributes { description: string; title: string; version: number; + kibanaSavedObjectMeta?: { + searchSourceJSON: string; + }; } export interface KibanaObject { id: string; title: string; config: KibanaObjectConfig; + exists?: boolean; } export interface KibanaObjects { @@ -39,14 +48,18 @@ export interface Module { defaultIndexPattern: string; query: any; jobs: ModuleJob[]; - datafeeds: Datafeed[]; + datafeeds: ModuleDataFeed[]; kibana: KibanaObjects; } -export interface KibanaObjectResponse { - exists?: boolean; - success?: boolean; +export interface ResultItem { id: string; + success?: boolean; +} + +export interface KibanaObjectResponse extends ResultItem { + exists?: boolean; + error?: any; } export interface SetupError { @@ -58,16 +71,12 @@ export interface SetupError { statusCode: number; } -export interface DatafeedResponse { - id: string; - success: boolean; +export interface DatafeedResponse extends ResultItem { started: boolean; error?: SetupError; } -export interface JobResponse { - id: string; - success: boolean; +export interface JobResponse extends ResultItem { error?: SetupError; } @@ -75,10 +84,14 @@ export interface DataRecognizerConfigResponse { datafeeds: DatafeedResponse[]; jobs: JobResponse[]; kibana: { - search: KibanaObjectResponse; - visualization: KibanaObjectResponse; - dashboard: KibanaObjectResponse; + search: KibanaObjectResponse[]; + visualization: KibanaObjectResponse[]; + dashboard: KibanaObjectResponse[]; }; } +export type GeneralOverride = any; + export type JobOverride = Partial; + +export type DatafeedOverride = Partial; diff --git a/x-pack/legacy/plugins/ml/common/util/job_utils.d.ts b/x-pack/legacy/plugins/ml/common/util/job_utils.d.ts index cfff15bb97be2e..7dcd4b20fe0bf0 100644 --- a/x-pack/legacy/plugins/ml/common/util/job_utils.d.ts +++ b/x-pack/legacy/plugins/ml/common/util/job_utils.d.ts @@ -45,3 +45,10 @@ export function mlFunctionToESAggregation(functionName: string): string | null; export function isModelPlotEnabled(job: Job, detectorIndex: number, entityFields: any[]): boolean; export function getSafeAggregationName(fieldName: string, index: number): string; + +export function getLatestDataOrBucketTimestamp( + latestDataTimestamp: number, + latestBucketTimestamp: number +): number; + +export function prefixDatafeedId(datafeedId: string, prefix: string): string; diff --git a/x-pack/legacy/plugins/ml/public/application/jobs/new_job/common/job_creator/configs/datafeed.ts b/x-pack/legacy/plugins/ml/public/application/jobs/new_job/common/job_creator/configs/datafeed.ts index c0b9a4872c3c47..e35f3056ce4347 100644 --- a/x-pack/legacy/plugins/ml/public/application/jobs/new_job/common/job_creator/configs/datafeed.ts +++ b/x-pack/legacy/plugins/ml/public/application/jobs/new_job/common/job_creator/configs/datafeed.ts @@ -15,6 +15,10 @@ export interface Datafeed { chunking_config?: ChunkingConfig; frequency?: string; indices: IndexPatternTitle[]; + /** + * The datafeed can contain indexes and indices + */ + indexes?: IndexPatternTitle[]; job_id?: JobId; query: object; query_delay?: string; diff --git a/x-pack/legacy/plugins/ml/server/models/data_recognizer/__tests__/data_recognizer.js b/x-pack/legacy/plugins/ml/server/models/data_recognizer/data_recognizer.test.ts similarity index 79% rename from x-pack/legacy/plugins/ml/server/models/data_recognizer/__tests__/data_recognizer.js rename to x-pack/legacy/plugins/ml/server/models/data_recognizer/data_recognizer.test.ts index 9c5048daeee3f0..de23950e5cc1c8 100644 --- a/x-pack/legacy/plugins/ml/server/models/data_recognizer/__tests__/data_recognizer.js +++ b/x-pack/legacy/plugins/ml/server/models/data_recognizer/data_recognizer.test.ts @@ -4,11 +4,26 @@ * you may not use this file except in compliance with the Elastic License. */ -import expect from '@kbn/expect'; +import { RequestHandlerContext } from 'kibana/server'; +import { Module } from '../../../common/types/modules'; import { DataRecognizer } from '../data_recognizer'; describe('ML - data recognizer', () => { - const dr = new DataRecognizer({}); + const dr = new DataRecognizer(({ + ml: { + mlClient: { + callAsCurrentUser: jest.fn(), + }, + }, + core: { + savedObjects: { + client: { + find: jest.fn(), + bulkCreate: jest.fn(), + }, + }, + }, + } as unknown) as RequestHandlerContext); const moduleIds = [ 'apache_ecs', @@ -34,12 +49,12 @@ describe('ML - data recognizer', () => { it('listModules - check all module IDs', async () => { const modules = await dr.listModules(); const ids = modules.map(m => m.id); - expect(ids.join()).to.equal(moduleIds.join()); + expect(ids.join()).toEqual(moduleIds.join()); }); it('getModule - load a single module', async () => { const module = await dr.getModule(moduleIds[0]); - expect(module.id).to.equal(moduleIds[0]); + expect(module.id).toEqual(moduleIds[0]); }); describe('jobOverrides', () => { @@ -47,7 +62,7 @@ describe('ML - data recognizer', () => { // arrange const prefix = 'pre-'; const testJobId = 'test-job'; - const moduleConfig = { + const moduleConfig = ({ jobs: [ { id: `${prefix}${testJobId}`, @@ -64,7 +79,7 @@ describe('ML - data recognizer', () => { }, }, ], - }; + } as unknown) as Module; const jobOverrides = [ { analysis_limits: { @@ -80,7 +95,7 @@ describe('ML - data recognizer', () => { // act dr.applyJobConfigOverrides(moduleConfig, jobOverrides, prefix); // assert - expect(moduleConfig.jobs).to.eql([ + expect(moduleConfig.jobs).toEqual([ { config: { analysis_config: { diff --git a/x-pack/legacy/plugins/ml/server/models/data_recognizer/data_recognizer.js b/x-pack/legacy/plugins/ml/server/models/data_recognizer/data_recognizer.ts similarity index 74% rename from x-pack/legacy/plugins/ml/server/models/data_recognizer/data_recognizer.js rename to x-pack/legacy/plugins/ml/server/models/data_recognizer/data_recognizer.ts index 1e7a72ee2750fc..b62e44c299a2d1 100644 --- a/x-pack/legacy/plugins/ml/server/models/data_recognizer/data_recognizer.js +++ b/x-pack/legacy/plugins/ml/server/models/data_recognizer/data_recognizer.ts @@ -7,9 +7,25 @@ import fs from 'fs'; import Boom from 'boom'; import numeral from '@elastic/numeral'; -import { merge, get } from 'lodash'; +import { CallAPIOptions, RequestHandlerContext, SavedObjectsClientContract } from 'kibana/server'; +import { merge } from 'lodash'; +import { MlJob } from '../../../common/types/jobs'; +import { + KibanaObjects, + ModuleDataFeed, + ModuleJob, + Module, + JobOverride, + DatafeedOverride, + GeneralOverride, + DatafeedResponse, + JobResponse, + KibanaObjectResponse, + DataRecognizerConfigResponse, +} from '../../../common/types/modules'; import { getLatestDataOrBucketTimestamp, prefixDatafeedId } from '../../../common/util/job_utils'; import { mlLog } from '../../client/log'; +// @ts-ignore import { jobServiceProvider } from '../job_service'; import { resultsServiceProvider } from '../results_service'; @@ -23,16 +39,90 @@ export const SAVED_OBJECT_TYPES = { VISUALIZATION: 'visualization', }; +interface RawModuleConfig { + id: string; + title: string; + description: string; + type: string; + logoFile: string; + defaultIndexPattern: string; + query: any; + jobs: Array<{ file: string; id: string }>; + datafeeds: Array<{ file: string; job_id: string; id: string }>; + kibana: { + search: Array<{ file: string; id: string }>; + visualization: Array<{ file: string; id: string }>; + dashboard: Array<{ file: string; id: string }>; + }; +} + +interface MlJobStats { + jobs: MlJob[]; +} + +interface Config { + dirName: any; + json: RawModuleConfig; +} + +interface Result { + id: string; + title: string; + query: any; + description: string; + logo: { icon: string } | null; +} + +interface JobStat { + id: string; + earliestTimestampMs: number; + latestTimestampMs: number; + latestResultsTimestampMs: number; +} + +interface JobExistResult { + jobsExist: boolean; + jobs: JobStat[]; +} + +interface ObjectExistResult { + id: string; + type: string; +} + +interface ObjectExistResponse { + id: string; + type: string; + exists: boolean; + savedObject?: any; +} + +interface SaveResults { + jobs: JobResponse[]; + datafeeds: DatafeedResponse[]; + savedObjects: KibanaObjectResponse[]; +} + export class DataRecognizer { - constructor(callWithRequest) { - this.callWithRequest = callWithRequest; - this.modulesDir = `${__dirname}/modules`; - this.savedObjectsClient = null; + modulesDir = `${__dirname}/modules`; + indexPatternName: string = ''; + indexPatternId: string | undefined = undefined; + savedObjectsClient: SavedObjectsClientContract; + + callAsCurrentUser: ( + endpoint: string, + clientParams?: Record, + options?: CallAPIOptions + ) => Promise; + + constructor(context: RequestHandlerContext) { + this.callAsCurrentUser = context.ml!.mlClient.callAsCurrentUser; + this.savedObjectsClient = context.core.savedObjects.client; } // list all directories under the given directory - async listDirs(dirName) { - const dirs = []; + async listDirs(dirName: string): Promise { + const dirs: string[] = []; return new Promise((resolve, reject) => { fs.readdir(dirName, (err, fileNames) => { if (err) { @@ -49,7 +139,7 @@ export class DataRecognizer { }); } - async readFile(fileName) { + async readFile(fileName: string): Promise { return new Promise((resolve, reject) => { fs.readFile(fileName, 'utf-8', (err, content) => { if (err) { @@ -61,12 +151,12 @@ export class DataRecognizer { }); } - async loadManifestFiles() { - const configs = []; + async loadManifestFiles(): Promise { + const configs: Config[] = []; const dirs = await this.listDirs(this.modulesDir); await Promise.all( dirs.map(async dir => { - let file; + let file: string | undefined; try { file = await this.readFile(`${this.modulesDir}/${dir}/manifest.json`); } catch (error) { @@ -90,15 +180,15 @@ export class DataRecognizer { } // get the manifest.json file for a specified id, e.g. "nginx" - async getManifestFile(id) { + async getManifestFile(id: string) { const manifestFiles = await this.loadManifestFiles(); return manifestFiles.find(i => i.json.id === id); } // called externally by an endpoint - async findMatches(indexPattern) { + async findMatches(indexPattern: string): Promise { const manifestFiles = await this.loadManifestFiles(); - const results = []; + const results: Result[] = []; await Promise.all( manifestFiles.map(async i => { @@ -138,7 +228,7 @@ export class DataRecognizer { return results; } - async searchForFields(moduleConfig, indexPattern) { + async searchForFields(moduleConfig: RawModuleConfig, indexPattern: string) { if (moduleConfig.query === undefined) { return false; } @@ -149,7 +239,7 @@ export class DataRecognizer { query: moduleConfig.query, }; - const resp = await this.callWithRequest('search', { + const resp = await this.callAsCurrentUser('search', { index, rest_total_hits_as_int: true, size, @@ -174,9 +264,9 @@ export class DataRecognizer { // called externally by an endpoint // supplying an optional prefix will add the prefix // to the job and datafeed configs - async getModule(id, prefix = '') { - let manifestJSON = null; - let dirName = null; + async getModule(id: string, prefix = ''): Promise { + let manifestJSON: RawModuleConfig | null = null; + let dirName: string | null = null; const manifestFile = await this.getManifestFile(id); if (manifestFile !== undefined) { @@ -186,9 +276,9 @@ export class DataRecognizer { throw Boom.notFound(`Module with the id "${id}" not found`); } - const jobs = []; - const datafeeds = []; - const kibana = {}; + const jobs: ModuleJob[] = []; + const datafeeds: ModuleDataFeed[] = []; + const kibana: KibanaObjects = {}; // load all of the job configs await Promise.all( manifestJSON.jobs.map(async job => { @@ -234,12 +324,12 @@ export class DataRecognizer { // load all of the kibana saved objects if (manifestJSON.kibana !== undefined) { - const kKeys = Object.keys(manifestJSON.kibana); + const kKeys = Object.keys(manifestJSON.kibana) as Array; await Promise.all( kKeys.map(async key => { kibana[key] = []; await Promise.all( - manifestJSON.kibana[key].map(async obj => { + manifestJSON!.kibana[key].map(async obj => { try { const kConfig = await this.readFile( `${this.modulesDir}/${dirName}/${KIBANA_DIR}/${key}/${obj.file}` @@ -247,7 +337,7 @@ export class DataRecognizer { // use the file name for the id const kId = obj.file.replace('.json', ''); const config = JSON.parse(kConfig); - kibana[key].push({ + kibana[key]!.push({ id: kId, title: config.title, config, @@ -276,21 +366,18 @@ export class DataRecognizer { // creates all of the jobs, datafeeds and savedObjects listed in the module config. // if any of the savedObjects already exist, they will not be overwritten. async setupModuleItems( - moduleId, - jobPrefix, - groups, - indexPatternName, - query, - useDedicatedIndex, - startDatafeed, - start, - end, - jobOverrides, - datafeedOverrides, - request + moduleId: string, + jobPrefix: string, + groups: string[], + indexPatternName: string, + query: any, + useDedicatedIndex: boolean, + startDatafeed: boolean, + start: number, + end: number, + jobOverrides: JobOverride[], + datafeedOverrides: DatafeedOverride[] ) { - this.savedObjectsClient = request.getSavedObjectsClient(); - // load the config from disk const moduleConfig = await this.getModule(moduleId, jobPrefix); @@ -325,10 +412,10 @@ export class DataRecognizer { // create an empty results object const results = this.createResultsTemplate(moduleConfig); - const saveResults = { - jobs: [], - datafeeds: [], - savedObjects: [], + const saveResults: SaveResults = { + jobs: [] as JobResponse[], + datafeeds: [] as DatafeedResponse[], + savedObjects: [] as KibanaObjectResponse[], }; this.applyJobConfigOverrides(moduleConfig, jobOverrides, jobPrefix); @@ -395,8 +482,8 @@ export class DataRecognizer { return results; } - async dataRecognizerJobsExist(moduleId) { - const results = {}; + async dataRecognizerJobsExist(moduleId: string): Promise { + const results = {} as JobExistResult; // Load the module with the specified ID and check if the jobs // in the module have been created. @@ -405,7 +492,7 @@ export class DataRecognizer { // Add a wildcard at the front of each of the job IDs in the module, // as a prefix may have been supplied when creating the jobs in the module. const jobIds = module.jobs.map(job => `*${job.id}`); - const { jobsExist } = jobServiceProvider(this.callWithRequest); + const { jobsExist } = jobServiceProvider(this.callAsCurrentUser); const jobInfo = await jobsExist(jobIds); // Check if the value for any of the jobs is false. @@ -414,24 +501,24 @@ export class DataRecognizer { if (doJobsExist === true) { // Get the IDs of the jobs created from the module, and their earliest / latest timestamps. - const jobStats = await this.callWithRequest('ml.jobStats', { jobId: jobIds }); - const jobStatsJobs = []; + const jobStats: MlJobStats = await this.callAsCurrentUser('ml.jobStats', { jobId: jobIds }); + const jobStatsJobs: JobStat[] = []; if (jobStats.jobs && jobStats.jobs.length > 0) { const foundJobIds = jobStats.jobs.map(job => job.job_id); - const { getLatestBucketTimestampByJob } = resultsServiceProvider(this.callWithRequest); + const { getLatestBucketTimestampByJob } = resultsServiceProvider(this.callAsCurrentUser); const latestBucketTimestampsByJob = await getLatestBucketTimestampByJob(foundJobIds); jobStats.jobs.forEach(job => { const jobStat = { id: job.job_id, - }; + } as JobStat; if (job.data_counts) { jobStat.earliestTimestampMs = job.data_counts.earliest_record_timestamp; jobStat.latestTimestampMs = job.data_counts.latest_record_timestamp; jobStat.latestResultsTimestampMs = getLatestDataOrBucketTimestamp( jobStat.latestTimestampMs, - latestBucketTimestampsByJob[job.job_id] + latestBucketTimestampsByJob[job.job_id] as number ); } jobStatsJobs.push(jobStat); @@ -449,7 +536,7 @@ export class DataRecognizer { } // returns a id based on an index pattern name - async getIndexPatternId(name) { + async getIndexPatternId(name: string) { try { const indexPatterns = await this.loadIndexPatterns(); if (indexPatterns === undefined || indexPatterns.saved_objects === undefined) { @@ -466,16 +553,13 @@ export class DataRecognizer { // create a list of objects which are used to save the savedObjects. // each has an exists flag and those which do not already exist // contain a savedObject object which is sent to the server to save - async createSavedObjectsToSave(moduleConfig) { + async createSavedObjectsToSave(moduleConfig: Module) { // first check if the saved objects already exist. - const savedObjectExistResults = await this.checkIfSavedObjectsExist( - moduleConfig.kibana, - this.request - ); + const savedObjectExistResults = await this.checkIfSavedObjectsExist(moduleConfig.kibana); // loop through the kibanaSaveResults and update Object.keys(moduleConfig.kibana).forEach(type => { // type e.g. dashboard, search ,visualization - moduleConfig.kibana[type].forEach(configItem => { + moduleConfig.kibana[type]!.forEach(configItem => { const existsResult = savedObjectExistResults.find(o => o.id === configItem.id); if (existsResult !== undefined) { configItem.exists = existsResult.exists; @@ -495,25 +579,30 @@ export class DataRecognizer { } // update the exists flags in the kibana results - updateKibanaResults(kibanaSaveResults, objectExistResults) { - Object.keys(kibanaSaveResults).forEach(type => { - kibanaSaveResults[type].forEach(resultItem => { - const i = objectExistResults.find(o => o.id === resultItem.id && o.type === type); - resultItem.exists = i !== undefined; - }); - }); + updateKibanaResults( + kibanaSaveResults: DataRecognizerConfigResponse['kibana'], + objectExistResults: ObjectExistResult[] + ) { + (Object.keys(kibanaSaveResults) as Array).forEach( + type => { + kibanaSaveResults[type].forEach(resultItem => { + const i = objectExistResults.find(o => o.id === resultItem.id && o.type === type); + resultItem.exists = i !== undefined; + }); + } + ); } // loop through each type (dashboard, search, visualization) // load existing savedObjects for each type and compare to find out if // items with the same id already exist. // returns a flat list of objects with exists flags set - async checkIfSavedObjectsExist(kibanaObjects) { + async checkIfSavedObjectsExist(kibanaObjects: KibanaObjects): Promise { const types = Object.keys(kibanaObjects); - const results = await Promise.all( + const results: ObjectExistResponse[][] = await Promise.all( types.map(async type => { const existingObjects = await this.loadExistingSavedObjects(type); - return kibanaObjects[type].map(obj => { + return kibanaObjects[type]!.map(obj => { const existingObject = existingObjects.saved_objects.find( o => o.attributes && o.attributes.title === obj.title ); @@ -526,17 +615,17 @@ export class DataRecognizer { }) ); // merge all results - return [].concat(...results); + return ([] as ObjectExistResponse[]).concat(...results); } // find all existing savedObjects for a given type - loadExistingSavedObjects(type) { + loadExistingSavedObjects(type: string) { return this.savedObjectsClient.find({ type, perPage: 1000 }); } // save the savedObjects if they do not exist already - async saveKibanaObjects(objectExistResults) { - let results = { saved_objects: [] }; + async saveKibanaObjects(objectExistResults: ObjectExistResponse[]) { + let results = { saved_objects: [] as any[] }; const filteredSavedObjects = objectExistResults .filter(o => o.exists === false) .map(o => o.savedObject); @@ -553,7 +642,7 @@ export class DataRecognizer { // save the jobs. // if any fail (e.g. it already exists), catch the error and mark the result // as success: false - async saveJobs(jobs) { + async saveJobs(jobs: ModuleJob[]): Promise { return await Promise.all( jobs.map(async job => { const jobId = job.id; @@ -568,15 +657,15 @@ export class DataRecognizer { ); } - async saveJob(job) { + async saveJob(job: ModuleJob) { const { id: jobId, config: body } = job; - return this.callWithRequest('ml.addJob', { jobId, body }); + return this.callAsCurrentUser('ml.addJob', { jobId, body }); } // save the datafeeds. // if any fail (e.g. it already exists), catch the error and mark the result // as success: false - async saveDatafeeds(datafeeds) { + async saveDatafeeds(datafeeds: ModuleDataFeed[]) { return await Promise.all( datafeeds.map(async datafeed => { try { @@ -589,24 +678,32 @@ export class DataRecognizer { ); } - async saveDatafeed(datafeed) { + async saveDatafeed(datafeed: ModuleDataFeed) { const { id: datafeedId, config: body } = datafeed; - return this.callWithRequest('ml.addDatafeed', { datafeedId, body }); + return this.callAsCurrentUser('ml.addDatafeed', { datafeedId, body }); } - async startDatafeeds(datafeeds, start, end) { - const results = {}; + async startDatafeeds( + datafeeds: ModuleDataFeed[], + start: number, + end: number + ): Promise<{ [key: string]: DatafeedResponse }> { + const results = {} as { [key: string]: DatafeedResponse }; for (const datafeed of datafeeds) { results[datafeed.id] = await this.startDatafeed(datafeed, start, end); } return results; } - async startDatafeed(datafeed, start, end) { - const result = { started: false }; + async startDatafeed( + datafeed: ModuleDataFeed, + start: number | undefined, + end: number | undefined + ): Promise { + const result = { started: false } as DatafeedResponse; let opened = false; try { - const openResult = await this.callWithRequest('ml.openJob', { + const openResult = await this.callAsCurrentUser('ml.openJob', { jobId: datafeed.config.job_id, }); opened = openResult.opened; @@ -622,7 +719,7 @@ export class DataRecognizer { } if (opened) { try { - const duration = { start: 0 }; + const duration: { start: number; end?: number } = { start: 0 }; if (start !== undefined) { duration.start = start; } @@ -630,7 +727,7 @@ export class DataRecognizer { duration.end = end; } - await this.callWithRequest('ml.startDatafeed', { datafeedId: datafeed.id, ...duration }); + await this.callAsCurrentUser('ml.startDatafeed', { datafeedId: datafeed.id, ...duration }); result.started = true; } catch (error) { result.started = false; @@ -642,7 +739,7 @@ export class DataRecognizer { // merge all of the save results into one result object // which is returned from the endpoint - async updateResults(results, saveResults) { + async updateResults(results: DataRecognizerConfigResponse, saveResults: SaveResults) { // update job results results.jobs.forEach(j => { saveResults.jobs.forEach(j2 => { @@ -669,34 +766,40 @@ export class DataRecognizer { }); // update savedObjects results - Object.keys(results.kibana).forEach(category => { - results.kibana[category].forEach(item => { - const result = saveResults.savedObjects.find(o => o.id === item.id); - if (result !== undefined) { - item.exists = result.exists; - - if (result.error === undefined) { - item.success = true; - } else { - item.success = false; - item.error = result.error; + (Object.keys(results.kibana) as Array).forEach( + category => { + results.kibana[category].forEach(item => { + const result = saveResults.savedObjects.find(o => o.id === item.id); + if (result !== undefined) { + item.exists = result.exists; + + if (result.error === undefined) { + item.success = true; + } else { + item.success = false; + item.error = result.error; + } } - } - }); - }); + }); + } + ); } // creates an empty results object, // listing each job/datafeed/savedObject with a save success boolean - createResultsTemplate(moduleConfig) { - const results = {}; + createResultsTemplate(moduleConfig: Module): DataRecognizerConfigResponse { + const results: DataRecognizerConfigResponse = {} as DataRecognizerConfigResponse; const reducedConfig = { jobs: moduleConfig.jobs, datafeeds: moduleConfig.datafeeds, kibana: moduleConfig.kibana, }; - function createResultsItems(configItems, resultItems, index) { + function createResultsItems( + configItems: any[], + resultItems: any, + index: string | number + ): void { resultItems[index] = []; configItems.forEach(j => { resultItems[index].push({ @@ -706,22 +809,23 @@ export class DataRecognizer { }); } - Object.keys(reducedConfig).forEach(i => { + (Object.keys(reducedConfig) as Array).forEach(i => { if (Array.isArray(reducedConfig[i])) { - createResultsItems(reducedConfig[i], results, i); + createResultsItems(reducedConfig[i] as any[], results, i); } else { - results[i] = {}; + results[i] = {} as any; Object.keys(reducedConfig[i]).forEach(k => { - createResultsItems(reducedConfig[i][k], results[i], k); + createResultsItems((reducedConfig[i] as Module['kibana'])[k] as any[], results[i], k); }); } }); + return results; } // if an override index pattern has been specified, // update all of the datafeeds. - updateDatafeedIndices(moduleConfig) { + updateDatafeedIndices(moduleConfig: Module) { // if the supplied index pattern contains a comma, split into multiple indices and // add each one to the datafeed const indexPatternNames = this.indexPatternName.includes(',') @@ -729,7 +833,7 @@ export class DataRecognizer { : [this.indexPatternName]; moduleConfig.datafeeds.forEach(df => { - const newIndices = []; + const newIndices: string[] = []; // the datafeed can contain indexes and indices const currentIndices = df.config.indexes !== undefined ? df.config.indexes : df.config.indices; @@ -749,12 +853,11 @@ export class DataRecognizer { delete df.config.indexes; df.config.indices = newIndices; }); - moduleConfig.datafeeds; } // loop through the custom urls in each job and replace the INDEX_PATTERN_ID // marker for the id of the specified index pattern - updateJobUrlIndexPatterns(moduleConfig) { + updateJobUrlIndexPatterns(moduleConfig: Module) { if (Array.isArray(moduleConfig.jobs)) { moduleConfig.jobs.forEach(job => { // if the job has custom_urls @@ -763,7 +866,10 @@ export class DataRecognizer { job.config.custom_settings.custom_urls.forEach(cUrl => { const url = cUrl.url_value; if (url.match(INDEX_PATTERN_ID)) { - const newUrl = url.replace(new RegExp(INDEX_PATTERN_ID, 'g'), this.indexPatternId); + const newUrl = url.replace( + new RegExp(INDEX_PATTERN_ID, 'g'), + this.indexPatternId as string + ); // update the job's url cUrl.url_value = newUrl; } @@ -775,7 +881,7 @@ export class DataRecognizer { // check the custom urls in the module's jobs to see if they contain INDEX_PATTERN_ID // which needs replacement - doJobUrlsContainIndexPatternId(moduleConfig) { + doJobUrlsContainIndexPatternId(moduleConfig: Module) { if (Array.isArray(moduleConfig.jobs)) { for (const job of moduleConfig.jobs) { // if the job has custom_urls @@ -793,20 +899,23 @@ export class DataRecognizer { // loop through each kibana saved object and replace any INDEX_PATTERN_ID and // INDEX_PATTERN_NAME markers for the id or name of the specified index pattern - updateSavedObjectIndexPatterns(moduleConfig) { + updateSavedObjectIndexPatterns(moduleConfig: Module) { if (moduleConfig.kibana) { Object.keys(moduleConfig.kibana).forEach(category => { - moduleConfig.kibana[category].forEach(item => { - let jsonString = item.config.kibanaSavedObjectMeta.searchSourceJSON; + moduleConfig.kibana[category]!.forEach(item => { + let jsonString = item.config.kibanaSavedObjectMeta!.searchSourceJSON; if (jsonString.match(INDEX_PATTERN_ID)) { - jsonString = jsonString.replace(new RegExp(INDEX_PATTERN_ID, 'g'), this.indexPatternId); - item.config.kibanaSavedObjectMeta.searchSourceJSON = jsonString; + jsonString = jsonString.replace( + new RegExp(INDEX_PATTERN_ID, 'g'), + this.indexPatternId as string + ); + item.config.kibanaSavedObjectMeta!.searchSourceJSON = jsonString; } if (category === SAVED_OBJECT_TYPES.VISUALIZATION) { // Look for any INDEX_PATTERN_NAME tokens in visualization visState, // as e.g. Vega visualizations reference the Elasticsearch index pattern directly. - let visStateString = item.config.visState; + let visStateString = String(item.config.visState); if (visStateString !== undefined && visStateString.match(INDEX_PATTERN_NAME)) { visStateString = visStateString.replace( new RegExp(INDEX_PATTERN_NAME, 'g'), @@ -822,21 +931,23 @@ export class DataRecognizer { // ensure the model memory limit for each job is not greater than // the max model memory setting for the cluster - async updateModelMemoryLimits(moduleConfig) { - const { limits } = await this.callWithRequest('ml.info'); + async updateModelMemoryLimits(moduleConfig: Module) { + const { limits } = await this.callAsCurrentUser('ml.info'); const maxMml = limits.max_model_memory_limit; if (maxMml !== undefined) { - const maxBytes = numeral(maxMml.toUpperCase()).value(); + // @ts-ignore + const maxBytes: number = numeral(maxMml.toUpperCase()).value(); if (Array.isArray(moduleConfig.jobs)) { moduleConfig.jobs.forEach(job => { - const mml = get(job, 'config.analysis_limits.model_memory_limit'); + const mml = job.config?.analysis_limits?.model_memory_limit; if (mml !== undefined) { - const mmlBytes = numeral(mml.toUpperCase()).value(); + // @ts-ignore + const mmlBytes: number = numeral(mml.toUpperCase()).value(); if (mmlBytes > maxBytes) { // if the job's mml is over the max, // so set the jobs mml to be the max - job.config.analysis_limits.model_memory_limit = maxMml; + job.config.analysis_limits!.model_memory_limit = maxMml; } } }); @@ -846,11 +957,11 @@ export class DataRecognizer { // check the kibana saved searches JSON in the module to see if they contain INDEX_PATTERN_ID // which needs replacement - doSavedObjectsContainIndexPatternId(moduleConfig) { + doSavedObjectsContainIndexPatternId(moduleConfig: Module) { if (moduleConfig.kibana) { for (const category of Object.keys(moduleConfig.kibana)) { - for (const item of moduleConfig.kibana[category]) { - const jsonString = item.config.kibanaSavedObjectMeta.searchSourceJSON; + for (const item of moduleConfig.kibana[category]!) { + const jsonString = item.config.kibanaSavedObjectMeta!.searchSourceJSON; if (jsonString.match(INDEX_PATTERN_ID)) { return true; } @@ -860,7 +971,7 @@ export class DataRecognizer { return false; } - applyJobConfigOverrides(moduleConfig, jobOverrides, jobPrefix = '') { + applyJobConfigOverrides(moduleConfig: Module, jobOverrides: JobOverride[], jobPrefix = '') { if (jobOverrides === undefined || jobOverrides === null) { return; } @@ -878,8 +989,8 @@ export class DataRecognizer { // separate all the overrides. // the overrides which don't contain a job id will be applied to all jobs in the module - const generalOverrides = []; - const jobSpecificOverrides = []; + const generalOverrides: GeneralOverride[] = []; + const jobSpecificOverrides: JobOverride[] = []; overrides.forEach(override => { if (override.job_id === undefined) { @@ -889,7 +1000,7 @@ export class DataRecognizer { } }); - function processArrayValues(source, update) { + function processArrayValues(source: any, update: any) { if (typeof source !== 'object' || typeof update !== 'object') { return; } @@ -935,7 +1046,11 @@ export class DataRecognizer { }); } - applyDatafeedConfigOverrides(moduleConfig, datafeedOverrides, jobPrefix = '') { + applyDatafeedConfigOverrides( + moduleConfig: Module, + datafeedOverrides: DatafeedOverride | DatafeedOverride[], + jobPrefix = '' + ) { if (datafeedOverrides !== undefined && datafeedOverrides !== null) { if (typeof datafeedOverrides !== 'object') { throw Boom.badRequest( @@ -950,8 +1065,8 @@ export class DataRecognizer { // separate all the overrides. // the overrides which don't contain a datafeed id or a job id will be applied to all jobs in the module - const generalOverrides = []; - const datafeedSpecificOverrides = []; + const generalOverrides: GeneralOverride[] = []; + const datafeedSpecificOverrides: DatafeedOverride[] = []; overrides.forEach(o => { if (o.datafeed_id === undefined && o.job_id === undefined) { generalOverrides.push(o); @@ -970,7 +1085,7 @@ export class DataRecognizer { datafeedSpecificOverrides.forEach(o => { // either a job id or datafeed id has been specified, so create a new id // containing either one plus the prefix - const tempId = o.datafeed_id !== undefined ? o.datafeed_id : o.job_id; + const tempId: string = String(o.datafeed_id !== undefined ? o.datafeed_id : o.job_id); const dId = prefixDatafeedId(tempId, jobPrefix); const datafeed = datafeeds.find(d => d.id === dId); diff --git a/x-pack/legacy/plugins/ml/server/models/data_recognizer/index.js b/x-pack/legacy/plugins/ml/server/models/data_recognizer/index.ts similarity index 100% rename from x-pack/legacy/plugins/ml/server/models/data_recognizer/index.js rename to x-pack/legacy/plugins/ml/server/models/data_recognizer/index.ts diff --git a/x-pack/legacy/plugins/ml/server/models/results_service/results_service.ts b/x-pack/legacy/plugins/ml/server/models/results_service/results_service.ts index 5b154991f7cf07..555a58fbb53335 100644 --- a/x-pack/legacy/plugins/ml/server/models/results_service/results_service.ts +++ b/x-pack/legacy/plugins/ml/server/models/results_service/results_service.ts @@ -30,7 +30,7 @@ interface Influencer { fieldValue: any; } -export function resultsServiceProvider(client: RequestHandlerContext | (() => any)) { +export function resultsServiceProvider(client: RequestHandlerContext | ((...args: any[]) => any)) { const callAsCurrentUser = typeof client === 'object' ? client.ml!.mlClient.callAsCurrentUser : client; // Obtains data for the anomalies table, aggregating anomalies by day or hour as requested. @@ -298,7 +298,7 @@ export function resultsServiceProvider(client: RequestHandlerContext | (() => an // Obtains the latest bucket result timestamp by job ID. // Returns data over all jobs unless an optional list of job IDs of interest is supplied. // Returned response consists of latest bucket timestamps (ms since Jan 1 1970) against job ID - async function getLatestBucketTimestampByJob(jobIds = []) { + async function getLatestBucketTimestampByJob(jobIds: string[] = []) { const filter: object[] = [ { term: { diff --git a/x-pack/legacy/plugins/ml/server/new_platform/modules.ts b/x-pack/legacy/plugins/ml/server/new_platform/modules.ts new file mode 100644 index 00000000000000..46b7e53c22a053 --- /dev/null +++ b/x-pack/legacy/plugins/ml/server/new_platform/modules.ts @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { schema } from '@kbn/config-schema'; + +export const setupModuleBodySchema = schema.object({ + prefix: schema.maybe(schema.string()), + groups: schema.maybe(schema.arrayOf(schema.string())), + indexPatternName: schema.maybe(schema.string()), + query: schema.maybe(schema.any()), + useDedicatedIndex: schema.maybe(schema.boolean()), + startDatafeed: schema.maybe(schema.boolean()), + start: schema.maybe(schema.number()), + end: schema.maybe(schema.number()), + jobOverrides: schema.maybe(schema.any()), + datafeedOverrides: schema.maybe(schema.any()), +}); + +export const getModuleIdParamSchema = (optional = false) => { + const stringType = schema.string(); + return { moduleId: optional ? schema.maybe(stringType) : stringType }; +}; diff --git a/x-pack/legacy/plugins/ml/server/new_platform/plugin.ts b/x-pack/legacy/plugins/ml/server/new_platform/plugin.ts index 085f2de06b55e6..28b2ddc4c34679 100644 --- a/x-pack/legacy/plugins/ml/server/new_platform/plugin.ts +++ b/x-pack/legacy/plugins/ml/server/new_platform/plugin.ts @@ -238,7 +238,7 @@ export class Plugin { jobValidationRoutes(extendedRouteInitializationDeps); notificationRoutes(routeInitializationDeps); systemRoutes(extendedRouteInitializationDeps); - dataRecognizer(routeInitializationDeps); + dataRecognizer(extendedRouteInitializationDeps); dataVisualizerRoutes(routeInitializationDeps); calendars(routeInitializationDeps); fieldsService(routeInitializationDeps); diff --git a/x-pack/legacy/plugins/ml/server/routes/apidoc.json b/x-pack/legacy/plugins/ml/server/routes/apidoc.json index 1be31e2316228b..3c041bed99214b 100644 --- a/x-pack/legacy/plugins/ml/server/routes/apidoc.json +++ b/x-pack/legacy/plugins/ml/server/routes/apidoc.json @@ -35,12 +35,17 @@ "GetCategories", "FileDataVisualizer", "AnalyzeFile", - "ImportFile" + "ImportFile", "ResultsService", "GetAnomaliesTableData", "GetCategoryDefinition", "GetMaxAnomalyScore", "GetCategoryExamples", - "GetPartitionFieldsValues" + "GetPartitionFieldsValues", + "DataRecognizer", + "RecognizeIndex", + "GetModule", + "SetupModule", + "CheckExistingModuleJobs" ] } diff --git a/x-pack/legacy/plugins/ml/server/routes/modules.js b/x-pack/legacy/plugins/ml/server/routes/modules.js deleted file mode 100644 index e7d7b76aa7133d..00000000000000 --- a/x-pack/legacy/plugins/ml/server/routes/modules.js +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { callWithRequestFactory } from '../client/call_with_request_factory'; -import { wrapError } from '../client/errors'; -import { DataRecognizer } from '../models/data_recognizer'; - -function recognize(callWithRequest, indexPatternTitle) { - const dr = new DataRecognizer(callWithRequest); - return dr.findMatches(indexPatternTitle); -} - -function getModule(callWithRequest, moduleId) { - const dr = new DataRecognizer(callWithRequest); - if (moduleId === undefined) { - return dr.listModules(); - } else { - return dr.getModule(moduleId); - } -} - -function saveModuleItems( - callWithRequest, - moduleId, - prefix, - groups, - indexPatternName, - query, - useDedicatedIndex, - startDatafeed, - start, - end, - jobOverrides, - datafeedOverrides, - request -) { - const dr = new DataRecognizer(callWithRequest); - return dr.setupModuleItems( - moduleId, - prefix, - groups, - indexPatternName, - query, - useDedicatedIndex, - startDatafeed, - start, - end, - jobOverrides, - datafeedOverrides, - request - ); -} - -function dataRecognizerJobsExist(callWithRequest, moduleId) { - const dr = new DataRecognizer(callWithRequest); - return dr.dataRecognizerJobsExist(moduleId); -} - -export function dataRecognizer({ commonRouteConfig, elasticsearchPlugin, route }) { - route({ - method: 'GET', - path: '/api/ml/modules/recognize/{indexPatternTitle}', - handler(request) { - const callWithRequest = callWithRequestFactory(elasticsearchPlugin, request); - const indexPatternTitle = request.params.indexPatternTitle; - return recognize(callWithRequest, indexPatternTitle).catch(resp => wrapError(resp)); - }, - config: { - ...commonRouteConfig, - }, - }); - - route({ - method: 'GET', - path: '/api/ml/modules/get_module/{moduleId?}', - handler(request) { - const callWithRequest = callWithRequestFactory(elasticsearchPlugin, request); - let moduleId = request.params.moduleId; - if (moduleId === '') { - // if the endpoint is called with a trailing / - // the moduleId will be an empty string. - moduleId = undefined; - } - return getModule(callWithRequest, moduleId).catch(resp => wrapError(resp)); - }, - config: { - ...commonRouteConfig, - }, - }); - - route({ - method: 'POST', - path: '/api/ml/modules/setup/{moduleId}', - handler(request) { - const callWithRequest = callWithRequestFactory(elasticsearchPlugin, request); - const moduleId = request.params.moduleId; - - const { - prefix, - groups, - indexPatternName, - query, - useDedicatedIndex, - startDatafeed, - start, - end, - jobOverrides, - datafeedOverrides, - } = request.payload; - - return saveModuleItems( - callWithRequest, - moduleId, - prefix, - groups, - indexPatternName, - query, - useDedicatedIndex, - startDatafeed, - start, - end, - jobOverrides, - datafeedOverrides, - request - ).catch(resp => wrapError(resp)); - }, - config: { - ...commonRouteConfig, - }, - }); - - route({ - method: 'GET', - path: '/api/ml/modules/jobs_exist/{moduleId}', - handler(request) { - const callWithRequest = callWithRequestFactory(elasticsearchPlugin, request); - const moduleId = request.params.moduleId; - return dataRecognizerJobsExist(callWithRequest, moduleId).catch(resp => wrapError(resp)); - }, - config: { - ...commonRouteConfig, - }, - }); -} diff --git a/x-pack/legacy/plugins/ml/server/routes/modules.ts b/x-pack/legacy/plugins/ml/server/routes/modules.ts new file mode 100644 index 00000000000000..a40fb1c9149ca1 --- /dev/null +++ b/x-pack/legacy/plugins/ml/server/routes/modules.ts @@ -0,0 +1,221 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { schema } from '@kbn/config-schema'; +import { RequestHandlerContext } from 'kibana/server'; +import { DatafeedOverride, JobOverride } from '../../common/types/modules'; +import { wrapError } from '../client/error_wrapper'; +import { DataRecognizer } from '../models/data_recognizer'; +import { licensePreRoutingFactory } from '../new_platform/licence_check_pre_routing_factory'; +import { getModuleIdParamSchema, setupModuleBodySchema } from '../new_platform/modules'; +import { RouteInitialization } from '../new_platform/plugin'; + +function recognize(context: RequestHandlerContext, indexPatternTitle: string) { + const dr = new DataRecognizer(context); + return dr.findMatches(indexPatternTitle); +} + +function getModule(context: RequestHandlerContext, moduleId: string) { + const dr = new DataRecognizer(context); + if (moduleId === undefined) { + return dr.listModules(); + } else { + return dr.getModule(moduleId); + } +} + +function saveModuleItems( + context: RequestHandlerContext, + moduleId: string, + prefix: string, + groups: string[], + indexPatternName: string, + query: any, + useDedicatedIndex: boolean, + startDatafeed: boolean, + start: number, + end: number, + jobOverrides: JobOverride[], + datafeedOverrides: DatafeedOverride[] +) { + const dr = new DataRecognizer(context); + return dr.setupModuleItems( + moduleId, + prefix, + groups, + indexPatternName, + query, + useDedicatedIndex, + startDatafeed, + start, + end, + jobOverrides, + datafeedOverrides + ); +} + +function dataRecognizerJobsExist(context: RequestHandlerContext, moduleId: string) { + const dr = new DataRecognizer(context); + return dr.dataRecognizerJobsExist(moduleId); +} + +/** + * Recognizer routes. + */ +export function dataRecognizer({ xpackMainPlugin, router }: RouteInitialization) { + /** + * @apiGroup DataRecognizer + * + * @api {get} /api/ml/modules/recognize/:indexPatternTitle Recognize index pattern + * @apiName RecognizeIndex + * @apiDescription Returns the list of modules that matching the index pattern. + * + * @apiParam {String} indexPatternTitle Index pattern title. + */ + router.get( + { + path: '/api/ml/modules/recognize/{indexPatternTitle}', + validate: { + params: schema.object({ + indexPatternTitle: schema.string(), + }), + }, + }, + licensePreRoutingFactory(xpackMainPlugin, async (context, request, response) => { + try { + const { indexPatternTitle } = request.params; + const results = await recognize(context, indexPatternTitle); + + return response.ok({ body: results }); + } catch (e) { + return response.customError(wrapError(e)); + } + }) + ); + + /** + * @apiGroup DataRecognizer + * + * @api {get} /api/ml/modules/get_module/:moduleId Get module + * @apiName GetModule + * @apiDescription Returns module by id. + * + * @apiParam {String} [moduleId] Module id + */ + router.get( + { + path: '/api/ml/modules/get_module/{moduleId?}', + validate: { + params: schema.object({ + ...getModuleIdParamSchema(true), + }), + }, + }, + licensePreRoutingFactory(xpackMainPlugin, async (context, request, response) => { + try { + let { moduleId } = request.params; + if (moduleId === '') { + // if the endpoint is called with a trailing / + // the moduleId will be an empty string. + moduleId = undefined; + } + const results = await getModule(context, moduleId); + + return response.ok({ body: results }); + } catch (e) { + return response.customError(wrapError(e)); + } + }) + ); + + /** + * @apiGroup DataRecognizer + * + * @api {post} /api/ml/modules/setup/:moduleId Setup module + * @apiName SetupModule + * @apiDescription Created module items. + * + * @apiParam {String} moduleId Module id + */ + router.post( + { + path: '/api/ml/modules/setup/{moduleId}', + validate: { + params: schema.object({ + ...getModuleIdParamSchema(), + }), + body: setupModuleBodySchema, + }, + }, + licensePreRoutingFactory(xpackMainPlugin, async (context, request, response) => { + try { + const { moduleId } = request.params; + + const { + prefix, + groups, + indexPatternName, + query, + useDedicatedIndex, + startDatafeed, + start, + end, + jobOverrides, + datafeedOverrides, + } = request.body; + + const result = await saveModuleItems( + context, + moduleId, + prefix, + groups, + indexPatternName, + query, + useDedicatedIndex, + startDatafeed, + start, + end, + jobOverrides, + datafeedOverrides + ); + + return response.ok({ body: result }); + } catch (e) { + return response.customError(wrapError(e)); + } + }) + ); + + /** + * @apiGroup DataRecognizer + * + * @api {post} /api/ml/modules/jobs_exist/:moduleId Check if module jobs exist + * @apiName CheckExistingModuleJobs + * @apiDescription Checks if the jobs in the module have been created. + * + * @apiParam {String} moduleId Module id + */ + router.get( + { + path: '/api/ml/modules/jobs_exist/{moduleId}', + validate: { + params: schema.object({ + ...getModuleIdParamSchema(), + }), + }, + }, + licensePreRoutingFactory(xpackMainPlugin, async (context, request, response) => { + try { + const { moduleId } = request.params; + const result = await dataRecognizerJobsExist(context, moduleId); + + return response.ok({ body: result }); + } catch (e) { + return response.customError(wrapError(e)); + } + }) + ); +}