Skip to content

Commit

Permalink
[7.x] [Reporting]: Move router + license checks to new platform (elas…
Browse files Browse the repository at this point in the history
…tic#66331) (elastic#67818)

* [Reporting]: Move router + license checks to new platform (elastic#66331)

* WIP: Move routes to new API, license and other checks inbound

* Move license checks over to np licensing observable

* Fix license checks + remove older modules

* Fixing check_license tests, move to TS/Jest

* Fix licensing setup for mocks

* Move job.test.ts over to np

* WIP: move user checks to higher-order func

* Move more handler logic over to Response factory vs Boom

* Major refactor to consolidate types, remove facades, and udpate helpers

* Fix validation for dates in immediate exports

* Linter fix on check license test

* Fix job generation tests

* Move deps => setupDeps

* fix api test

* fix jobs test

* authorized_user_pre_routing and tests

* Fixing duplicate identifiers

* Fix licensing implementation changes

* WIP: Moving license over to async/observables

* Fix disabled-security case

* finish auth_user_pre_routing cleanup - no more license check

* WIP: Fixing final api tests

* Trying to get schema differences in alignment

* Reverting back to previous generation handler

* Fix final API tests

* Final API test fixes, few more hardening tests and better error messages

* Simplify lower-level module implementation (core only interface) + test updates

* Push some core logic into plugin

* Move some core logic up to plugin

* Marking private setupDeps + downstream fixes

* revert logger as a param

Co-authored-by: Timothy Sullivan <[email protected]>
# Conflicts:
#	x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts
#	x-pack/legacy/plugins/reporting/server/routes/generation.ts

* Add back in legacy /viz /search and /dashboard routes

* Add back in and fix compatibility shim/tests
  • Loading branch information
Joel Griffith authored Jun 2, 2020
1 parent 3ea33a3 commit 0347989
Show file tree
Hide file tree
Showing 46 changed files with 1,600 additions and 1,786 deletions.
1 change: 1 addition & 0 deletions x-pack/legacy/plugins/reporting/common/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ export const WHITELISTED_JOB_CONTENT_TYPES = [
'application/pdf',
CONTENT_TYPE_CSV,
'image/png',
'text/plain',
];

// See:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,30 +4,27 @@
* you may not use this file except in compliance with the Elastic License.
*/

import { KibanaRequest, RequestHandlerContext } from 'src/core/server';
import { ReportingCore } from '../../../server';
import { cryptoFactory } from '../../../server/lib';
import {
ConditionalHeaders,
CreateJobFactory,
ESQueueCreateJobFn,
RequestFacade,
} from '../../../server/types';
import { CreateJobFactory, ESQueueCreateJobFn } from '../../../server/types';
import { JobParamsDiscoverCsv } from '../types';

export const createJobFactory: CreateJobFactory<ESQueueCreateJobFn<
JobParamsDiscoverCsv
>> = function createJobFactoryFn(reporting: ReportingCore) {
const config = reporting.getConfig();
const crypto = cryptoFactory(config.get('encryptionKey'));
const setupDeps = reporting.getPluginSetupDeps();

return async function createJob(
jobParams: JobParamsDiscoverCsv,
headers: ConditionalHeaders['headers'],
request: RequestFacade
context: RequestHandlerContext,
request: KibanaRequest
) {
const serializedEncryptedHeaders = await crypto.encrypt(headers);
const serializedEncryptedHeaders = await crypto.encrypt(request.headers);

const savedObjectsClient = request.getSavedObjectsClient();
const savedObjectsClient = context.core.savedObjects.client;
const indexPatternSavedObject = await savedObjectsClient.get(
'index-pattern',
jobParams.indexPatternId!
Expand All @@ -36,7 +33,7 @@ export const createJobFactory: CreateJobFactory<ESQueueCreateJobFn<
return {
headers: serializedEncryptedHeaders,
indexPatternSavedObject,
basePath: request.getBasePath(),
basePath: setupDeps.basePath(request),
...jobParams,
};
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,11 @@

import { notFound, notImplemented } from 'boom';
import { get } from 'lodash';
import { KibanaRequest, RequestHandlerContext } from 'src/core/server';
import { CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../../common/constants';
import { ReportingCore } from '../../../../server';
import { cryptoFactory, LevelLogger } from '../../../../server/lib';
import { CreateJobFactory, RequestFacade, TimeRangeParams } from '../../../../server/types';
import { CreateJobFactory, TimeRangeParams } from '../../../../server/types';
import {
JobDocPayloadPanelCsv,
JobParamsPanelCsv,
Expand All @@ -23,8 +24,9 @@ import { createJobSearch } from './create_job_search';

export type ImmediateCreateJobFn<JobParamsType> = (
jobParams: JobParamsType,
headers: Record<string, string>,
req: RequestFacade
headers: KibanaRequest['headers'],
context: RequestHandlerContext,
req: KibanaRequest
) => Promise<{
type: string | null;
title: string;
Expand All @@ -46,21 +48,21 @@ export const createJobFactory: CreateJobFactory<ImmediateCreateJobFn<

return async function createJob(
jobParams: JobParamsPanelCsv,
headers: any,
req: RequestFacade
headers: KibanaRequest['headers'],
context: RequestHandlerContext,
req: KibanaRequest
): Promise<JobDocPayloadPanelCsv> {
const { savedObjectType, savedObjectId } = jobParams;
const serializedEncryptedHeaders = await crypto.encrypt(headers);
const client = req.getSavedObjectsClient();

const { panel, title, visType }: VisData = await Promise.resolve()
.then(() => client.get(savedObjectType, savedObjectId))
.then(() => context.core.savedObjects.client.get(savedObjectType, savedObjectId))
.then(async (savedObject: SavedObject) => {
const { attributes, references } = savedObject;
const {
kibanaSavedObjectMeta: kibanaSavedObjectMetaJSON,
} = attributes as SavedSearchObjectAttributesJSON;
const { timerange } = req.payload as { timerange: TimeRangeParams };
const { timerange } = req.body as { timerange: TimeRangeParams };

if (!kibanaSavedObjectMetaJSON) {
throw new Error('Could not parse saved object data!');
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,11 @@
*/

import { i18n } from '@kbn/i18n';
import { KibanaRequest, RequestHandlerContext } from 'src/core/server';
import { CONTENT_TYPE_CSV, CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../common/constants';
import { ReportingCore } from '../../../server';
import { cryptoFactory, LevelLogger } from '../../../server/lib';
import {
ExecuteJobFactory,
JobDocOutput,
JobDocPayload,
RequestFacade,
} from '../../../server/types';
import { ExecuteJobFactory, JobDocOutput, JobDocPayload } from '../../../server/types';
import { CsvResultFromSearch } from '../../csv/types';
import { FakeRequest, JobDocPayloadPanelCsv, JobParamsPanelCsv, SearchPanel } from '../types';
import { createGenerateCsv } from './lib';
Expand All @@ -25,7 +21,8 @@ import { createGenerateCsv } from './lib';
export type ImmediateExecuteFn<JobParamsType> = (
jobId: null,
job: JobDocPayload<JobParamsType>,
request: RequestFacade
context: RequestHandlerContext,
req: KibanaRequest
) => Promise<JobDocOutput>;

export const executeJobFactory: ExecuteJobFactory<ImmediateExecuteFn<
Expand All @@ -39,7 +36,8 @@ export const executeJobFactory: ExecuteJobFactory<ImmediateExecuteFn<
return async function executeJob(
jobId: string | null,
job: JobDocPayloadPanelCsv,
realRequest?: RequestFacade
context,
req
): Promise<JobDocOutput> {
// There will not be a jobID for "immediate" generation.
// jobID is only for "queued" jobs
Expand All @@ -58,10 +56,11 @@ export const executeJobFactory: ExecuteJobFactory<ImmediateExecuteFn<

jobLogger.debug(`Execute job generating [${visType}] csv`);

let requestObject: RequestFacade | FakeRequest;
if (isImmediate && realRequest) {
let requestObject: KibanaRequest | FakeRequest;

if (isImmediate && req) {
jobLogger.info(`Executing job from Immediate API using request context`);
requestObject = realRequest;
requestObject = req;
} else {
jobLogger.info(`Executing job async using encrypted headers`);
let decryptedHeaders: Record<string, unknown>;
Expand Down Expand Up @@ -103,6 +102,7 @@ export const executeJobFactory: ExecuteJobFactory<ImmediateExecuteFn<
let size = 0;
try {
const generateResults: CsvResultFromSearch = await generateCsv(
context,
requestObject,
visType as string,
panel,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,16 @@
*/

import { badRequest } from 'boom';
import { ReportingCore } from '../../../../server';
import { KibanaRequest, RequestHandlerContext } from 'src/core/server';
import { LevelLogger } from '../../../../server/lib';
import { RequestFacade } from '../../../../server/types';
import { ReportingCore } from '../../../../server';
import { FakeRequest, JobParamsPanelCsv, SearchPanel, VisPanel } from '../../types';
import { generateCsvSearch } from './generate_csv_search';

export function createGenerateCsv(reporting: ReportingCore, logger: LevelLogger) {
return async function generateCsv(
request: RequestFacade | FakeRequest,
context: RequestHandlerContext,
request: KibanaRequest | FakeRequest,
visType: string,
panel: VisPanel | SearchPanel,
jobParams: JobParamsPanelCsv
Expand All @@ -26,11 +27,12 @@ export function createGenerateCsv(reporting: ReportingCore, logger: LevelLogger)
switch (visType) {
case 'search':
return await generateCsvSearch(
request as RequestFacade,
reporting,
logger,
context,
request as KibanaRequest,
panel as SearchPanel,
jobParams
jobParams,
logger
);
default:
throw badRequest(`Unsupported or unrecognized saved object type: ${visType}`);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,11 @@
* you may not use this file except in compliance with the Elastic License.
*/

import { IUiSettingsClient, KibanaRequest } from '../../../../../../../../src/core/server';
import {
IUiSettingsClient,
KibanaRequest,
RequestHandlerContext,
} from '../../../../../../../../src/core/server';
import {
esQuery,
EsQueryConfig,
Expand All @@ -13,23 +17,16 @@ import {
Query,
} from '../../../../../../../../src/plugins/data/server';
import { CancellationToken } from '../../../../../../../plugins/reporting/common';
import { ReportingCore } from '../../../../server';
import { LevelLogger } from '../../../../server/lib';
import { RequestFacade } from '../../../../server/types';
import { ReportingCore } from '../../../../server';
import { createGenerateCsv } from '../../../csv/server/lib/generate_csv';
import {
CsvResultFromSearch,
GenerateCsvParams,
JobParamsDiscoverCsv,
SearchRequest,
} from '../../../csv/types';
import {
IndexPatternField,
QueryFilter,
SavedSearchObjectAttributes,
SearchPanel,
SearchSource,
} from '../../types';
import { IndexPatternField, QueryFilter, SearchPanel, SearchSource } from '../../types';
import { getDataSource } from './get_data_source';
import { getFilters } from './get_filters';

Expand All @@ -54,17 +51,16 @@ const getUiSettings = async (config: IUiSettingsClient) => {
};

export async function generateCsvSearch(
req: RequestFacade,
reporting: ReportingCore,
logger: LevelLogger,
context: RequestHandlerContext,
req: KibanaRequest,
searchPanel: SearchPanel,
jobParams: JobParamsDiscoverCsv
jobParams: JobParamsDiscoverCsv,
logger: LevelLogger
): Promise<CsvResultFromSearch> {
const savedObjectsClient = await reporting.getSavedObjectsClient(
KibanaRequest.from(req.getRawRequest())
);
const savedObjectsClient = context.core.savedObjects.client;
const { indexPatternSavedObjectId, timerange } = searchPanel;
const savedSearchObjectAttr = searchPanel.attributes as SavedSearchObjectAttributes;
const savedSearchObjectAttr = searchPanel.attributes;
const { indexPatternSavedObject } = await getDataSource(
savedObjectsClient,
indexPatternSavedObjectId
Expand Down Expand Up @@ -153,9 +149,7 @@ export async function generateCsvSearch(

const config = reporting.getConfig();
const elasticsearch = await reporting.getElasticsearchService();
const { callAsCurrentUser } = elasticsearch.dataClient.asScoped(
KibanaRequest.from(req.getRawRequest())
);
const { callAsCurrentUser } = elasticsearch.dataClient.asScoped(req);
const callCluster = (...params: [string, object]) => callAsCurrentUser(...params);
const uiSettings = await getUiSettings(uiConfig);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ export function getFilters(
let timezone: string | null;

if (indexPatternTimeField) {
if (!timerange) {
if (!timerange || !timerange.min || !timerange.max) {
throw badRequest(
`Time range params are required for index pattern [${indexPatternId}], using time field [${indexPatternTimeField}]`
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,19 @@
* you may not use this file except in compliance with the Elastic License.
*/

import { RequestFacade } from '../../../../server/types';
import { KibanaRequest } from 'src/core/server';
import { JobParamsPanelCsv, JobParamsPostPayloadPanelCsv } from '../../types';

export function getJobParamsFromRequest(
request: RequestFacade,
request: KibanaRequest,
opts: { isImmediate: boolean }
): JobParamsPanelCsv {
const { savedObjectType, savedObjectId } = request.params;
const { timerange, state } = request.payload as JobParamsPostPayloadPanelCsv;
const { savedObjectType, savedObjectId } = request.params as {
savedObjectType: string;
savedObjectId: string;
};
const { timerange, state } = request.body as JobParamsPostPayloadPanelCsv;

const post = timerange || state ? { timerange, state } : undefined;

return {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,28 +5,23 @@
*/

import { validateUrls } from '../../../../common/validate_urls';
import { ReportingCore } from '../../../../server';
import { cryptoFactory } from '../../../../server/lib';
import {
ConditionalHeaders,
CreateJobFactory,
ESQueueCreateJobFn,
RequestFacade,
} from '../../../../server/types';
import { CreateJobFactory, ESQueueCreateJobFn } from '../../../../server/types';
import { JobParamsPNG } from '../../types';

export const createJobFactory: CreateJobFactory<ESQueueCreateJobFn<
JobParamsPNG
>> = function createJobFactoryFn(reporting: ReportingCore) {
>> = function createJobFactoryFn(reporting) {
const config = reporting.getConfig();
const setupDeps = reporting.getPluginSetupDeps();
const crypto = cryptoFactory(config.get('encryptionKey'));

return async function createJob(
{ objectType, title, relativeUrl, browserTimezone, layout }: JobParamsPNG,
headers: ConditionalHeaders['headers'],
request: RequestFacade
{ objectType, title, relativeUrl, browserTimezone, layout },
context,
req
) {
const serializedEncryptedHeaders = await crypto.encrypt(headers);
const serializedEncryptedHeaders = await crypto.encrypt(req.headers);

validateUrls([relativeUrl]);

Expand All @@ -37,7 +32,7 @@ export const createJobFactory: CreateJobFactory<ESQueueCreateJobFn<
headers: serializedEncryptedHeaders,
browserTimezone,
layout,
basePath: request.getBasePath(),
basePath: setupDeps.basePath(req),
forceNow: new Date().toISOString(),
};
};
Expand Down
Loading

0 comments on commit 0347989

Please sign in to comment.