From 9eed9bca599b63e2b000a9e323b1850d04aa649c Mon Sep 17 00:00:00 2001 From: Sebastian Rindom Date: Wed, 14 Sep 2022 16:29:23 +0200 Subject: [PATCH 01/11] fix: setup price list import --- .../src/computerize-amount.js | 13 + packages/medusa-core-utils/src/index.js | 2 +- packages/medusa/src/services/index.ts | 1 + packages/medusa/src/services/price-list.ts | 13 + .../batch-jobs/price-list/import.ts | 544 ++++++++++++++++++ .../strategies/batch-jobs/price-list/types.ts | 90 +++ .../strategies/batch-jobs/product/import.ts | 4 +- 7 files changed, 664 insertions(+), 3 deletions(-) create mode 100644 packages/medusa-core-utils/src/computerize-amount.js create mode 100644 packages/medusa/src/strategies/batch-jobs/price-list/import.ts create mode 100644 packages/medusa/src/strategies/batch-jobs/price-list/types.ts diff --git a/packages/medusa-core-utils/src/computerize-amount.js b/packages/medusa-core-utils/src/computerize-amount.js new file mode 100644 index 0000000000000..6bb57153edc68 --- /dev/null +++ b/packages/medusa-core-utils/src/computerize-amount.js @@ -0,0 +1,13 @@ +import zeroDecimalCurrencies from "./zero-decimal-currencies" + +const computerizeAmount = (amount, currency) => { + let divisor = 100 + + if (zeroDecimalCurrencies.includes(currency.toLowerCase())) { + divisor = 1 + } + + return Math.round(amount * divisor) +} + +export default computerizeAmount diff --git a/packages/medusa-core-utils/src/index.js b/packages/medusa-core-utils/src/index.js index 4b252770d31b6..526a9d2dc74e5 100644 --- a/packages/medusa-core-utils/src/index.js +++ b/packages/medusa-core-utils/src/index.js @@ -4,8 +4,8 @@ export { default as createRequireFromPath } from "./create-require-from-path" export { default as MedusaError } from "./errors" export { default as getConfigFile } from "./get-config-file" export { default as humanizeAmount } from "./humanize-amount" +export { default as computerizeAmount } from "./computerize-amount" export { indexTypes } from "./index-types" export { transformIdableFields } from "./transform-idable-fields" export { default as Validator } from "./validator" export { default as zeroDecimalCurrencies } from "./zero-decimal-currencies" - diff --git a/packages/medusa/src/services/index.ts b/packages/medusa/src/services/index.ts index fe0417f81d14d..89d9aeaa15ec4 100644 --- a/packages/medusa/src/services/index.ts +++ b/packages/medusa/src/services/index.ts @@ -23,6 +23,7 @@ export { default as OauthService } from "./oauth" export { default as OrderService } from "./order" export { default as PaymentProviderService } from "./payment-provider" export { default as PricingService } from "./pricing" +export { default as PriceListService } from "./price-list" export { default as ProductCollectionService } from "./product-collection" export { default as ProductService } from "./product" export { default as ProductTypeService } from "./product-type" diff --git a/packages/medusa/src/services/price-list.ts b/packages/medusa/src/services/price-list.ts index e7083b77913bd..b1ecfb468277d 100644 --- a/packages/medusa/src/services/price-list.ts +++ b/packages/medusa/src/services/price-list.ts @@ -248,6 +248,19 @@ class PriceListService extends TransactionBaseService { }) } + /** + * Removes all prices from a price list and deletes the removed prices in bulk + * @param id - id of the price list + * @returns {Promise} updated Price List + */ + async clearPrices(id: string): Promise { + return await this.atomicPhase_(async (manager: EntityManager) => { + const moneyAmountRepo = manager.getCustomRepository(this.moneyAmountRepo_) + const priceList = await this.retrieve(id, { select: ["id"] }) + await moneyAmountRepo.delete({ price_list_id: priceList.id }) + }) + } + /** * Deletes a Price List * Will never fail due to delete being idempotent. diff --git a/packages/medusa/src/strategies/batch-jobs/price-list/import.ts b/packages/medusa/src/strategies/batch-jobs/price-list/import.ts new file mode 100644 index 0000000000000..3d15396df7083 --- /dev/null +++ b/packages/medusa/src/strategies/batch-jobs/price-list/import.ts @@ -0,0 +1,544 @@ +/* eslint-disable valid-jsdoc */ +import { EntityManager } from "typeorm" +import { MedusaError, computerizeAmount } from "medusa-core-utils" + +import { AbstractBatchJobStrategy, IFileService } from "../../../interfaces" +import CsvParser from "../../../services/csv-parser" +import { + BatchJobService, + ProductService, + ProductVariantService, + PriceListService, + RegionService, + SalesChannelService, + ShippingProfileService, +} from "../../../services" +import { CreateBatchJobInput } from "../../../types/batch-job" +import { + InjectedProps, + OperationType, + PriceListImportOperation, + PriceListImportOperationPrice, + ParsedPriceListImportPrice, + PriceListImportBatchJob, + PriceListImportCsvSchema, + TBuiltPriceListImportLine, + TParsedPriceListImportRowData, +} from "./types" +import { FlagRouter } from "../../../utils/flag-router" + +/** + * Process this many variant rows before reporting progress. + */ +const BATCH_SIZE = 100 + +/** + * Default strategy class used for a batch import of products/variants. + */ +class PriceListImportStrategy extends AbstractBatchJobStrategy { + static identifier = "price-list-import-strategy" + + static batchType = "price-list-import" + + private processedCounter: Record = {} + + protected readonly featureFlagRouter_: FlagRouter + + protected manager_: EntityManager + protected transactionManager_: EntityManager | undefined + + protected readonly fileService_: IFileService + + protected readonly regionService_: RegionService + protected readonly priceListService_: PriceListService + protected readonly productService_: ProductService + protected readonly batchJobService_: BatchJobService + protected readonly salesChannelService_: SalesChannelService + protected readonly productVariantService_: ProductVariantService + protected readonly shippingProfileService_: ShippingProfileService + + protected readonly csvParser_: CsvParser< + PriceListImportCsvSchema, + Record, + Record + > + + constructor({ + batchJobService, + productVariantService, + priceListService, + regionService, + fileService, + manager, + }: InjectedProps) { + // eslint-disable-next-line prefer-rest-params + super(arguments[0]) + + this.csvParser_ = new CsvParser(CSVSchema) + + this.manager_ = manager + this.fileService_ = fileService + this.batchJobService_ = batchJobService + this.priceListService_ = priceListService + this.productVariantService_ = productVariantService + this.regionService_ = regionService + } + + async buildTemplate(): Promise { + throw new Error("Not implemented!") + } + + /** + * Create a description of a row on which the error occurred and throw a Medusa error. + * + * @param row - Parsed CSV row data + * @param errorDescription - Concrete error + */ + protected static throwDescriptiveError( + row: TParsedPriceListImportRowData, + errorDescription?: string + ): never { + const message = `Error while processing row with: + variant ID: ${row[PriceListRowKeys.VARIANT_ID]}, + variant SKU: ${row[PriceListRowKeys.VARIANT_SKU]}, + ${errorDescription}` + + throw new MedusaError(MedusaError.Types.INVALID_DATA, message) + } + + async prepareBatchJobForProcessing( + batchJob: CreateBatchJobInput, + reqContext: any + ): Promise { + if (!batchJob.context?.price_list_id) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + "Price list id is required" + ) + } + + return batchJob + } + + /** + * Generate instructions for update/create of products/variants from parsed CSV rows. + * + * @param csvData - An array of parsed CSV rows. + */ + async getImportInstructions( + priceListId: string, + csvData: TParsedPriceListImportRowData[] + ): Promise> { + const manager = this.transactionManager_ ?? this.manager_ + + // Validate that PriceList exists + await this.priceListService_.withTransaction(manager).retrieve(priceListId) + + const pricesToCreate: PriceListImportOperation[] = [] + for (const row of csvData) { + let variantId = row[PriceListRowKeys.VARIANT_ID] + + if (!variantId) { + if (!row[PriceListRowKeys.VARIANT_SKU]) { + PriceListImportStrategy.throwDescriptiveError( + row, + "SKU or ID is required" + ) + } + + const variant = await this.productVariantService_.retrieveBySKU( + `${row[PriceListRowKeys.VARIANT_SKU]}`, + { + select: ["id"], + } + ) + variantId = variant.id + } else { + // Validate that product exists + await this.productVariantService_.retrieve(`${variantId}`, { + select: ["id"], + }) + } + + const pricesOperationData = await this.prepareVariantPrices( + row[PriceListRowKeys.PRICES] as ParsedPriceListImportPrice[] + ) + + pricesToCreate.push({ + variant_id: `${variantId}`, + prices: pricesOperationData, + }) + } + + return { + [OperationType.PricesCreate]: pricesToCreate, + } + } + + /** + * Prepare prices records for insert - find and append region ids to records that contain a region name. + * + * @param prices - the parsed prices to prepare + * @returns the prepared prices. All prices have amount in DB format, currency_code and if applicable region_id. + */ + protected async prepareVariantPrices( + prices: ParsedPriceListImportPrice[] + ): Promise { + const transactionManager = this.transactionManager_ ?? this.manager_ + + const operationalPrices: PriceListImportOperationPrice[] = [] + + for (const price of prices) { + const record: Partial = { + amount: price.amount, + } + + if ("region_name" in price) { + try { + const region = await this.regionService_ + .withTransaction(transactionManager) + .retrieveByName(price.region_name) + + record.region_id = region.id + record.currency_code = region.currency_code + } catch (e) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `Trying to set a price for a region ${price.region_name} that doesn't exist` + ) + } + } else { + // TODO: Verify that currency is activated for store + record.currency_code = price.currency_code + } + + record.amount = computerizeAmount(record.amount, record.currency_code) + + operationalPrices.push(record as PriceListImportOperationPrice) + } + + return operationalPrices + } + + /** + * A worker method called after a batch job has been created. + * The method parses a CSV file, generates sets of instructions + * for processing and stores these instructions to a JSON file + * which is uploaded to a bucket. + * + * @param batchJobId - An id of a job that is being preprocessed. + */ + async preProcessBatchJob(batchJobId: string): Promise { + const transactionManager = this.transactionManager_ ?? this.manager_ + const batchJob = (await this.batchJobService_ + .withTransaction(transactionManager) + .retrieve(batchJobId)) as PriceListImportBatchJob + + const csvFileKey = batchJob.context.fileKey + const priceListId = batchJob.context.price_list_id + const csvStream = await this.fileService_.getDownloadStream({ + fileKey: csvFileKey, + }) + + let builtData: Record[] + try { + const parsedData = await this.csvParser_.parse(csvStream) + builtData = await this.csvParser_.buildData(parsedData) + } catch (e) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + "The csv file parsing failed due to: " + e.message + ) + } + + const ops = await this.getImportInstructions(priceListId, builtData) + + await this.uploadImportOpsFile(batchJobId, ops) + + let totalOperationCount = 0 + const operationsCounts = {} + Object.keys(ops).forEach((key) => { + operationsCounts[key] = ops[key].length + totalOperationCount += ops[key].length + }) + + await this.batchJobService_ + .withTransaction(transactionManager) + .update(batchJobId, { + result: { + advancement_count: 0, + // number of update/create operations to execute + count: totalOperationCount, + operations: operationsCounts, + stat_descriptors: [ + { + key: "price-list-import-count", + name: "PriceList to import", + message: `${ + ops[OperationType.PricesCreate].length + } prices will be added`, + }, + ], + }, + }) + } + + /** + * The main processing method called after a batch job + * is ready/confirmed for processing. + * + * @param batchJobId - An id of a batch job that is being processed. + */ + async processJob(batchJobId: string): Promise { + return await this.atomicPhase_(async (manager) => { + const batchJob = (await this.batchJobService_ + .withTransaction(manager) + .retrieve(batchJobId)) as PriceListImportBatchJob + + const priceListId = batchJob.context.price_list_id + const txPriceListService = this.priceListService_.withTransaction(manager) + + // Delete Existing prices for price list + await txPriceListService.clearPrices(priceListId) + + // Upload new prices for price list + const priceImportOperations = await this.downloadImportOpsFile( + batchJobId, + OperationType.PricesCreate + ) + + await Promise.all( + priceImportOperations.map(async (op) => { + await txPriceListService.addPrices( + priceListId, + op.prices.map((p) => { + return { + ...p, + variant_id: op.variant_id, + } + }) + ) + }) + ) + + await this.finalize(batchJob) + }) + } + + /** + * Store import ops JSON file to a bucket. + * + * @param batchJobId - An id of the current batch job being processed. + * @param results - An object containing parsed CSV data. + */ + protected async uploadImportOpsFile( + batchJobId: string, + results: Record + ): Promise { + const uploadPromises: Promise[] = [] + const transactionManager = this.transactionManager_ ?? this.manager_ + + for (const op in results) { + if (results[op]?.length) { + const { writeStream, promise } = await this.fileService_ + .withTransaction(transactionManager) + .getUploadStreamDescriptor({ + name: PriceListImportStrategy.buildFilename(batchJobId, op), + ext: "json", + }) + + uploadPromises.push(promise) + + writeStream.write(JSON.stringify(results[op])) + writeStream.end() + } + } + + await Promise.all(uploadPromises) + } + + /** + * Remove parsed ops JSON file. + * + * @param batchJobId - An id of the current batch job being processed. + * @param op - Type of import operation. + */ + protected async downloadImportOpsFile( + batchJobId: string, + op: OperationType + ): Promise { + let data = "" + const transactionManager = this.transactionManager_ ?? this.manager_ + + const readableStream = await this.fileService_ + .withTransaction(transactionManager) + .getDownloadStream({ + fileKey: PriceListImportStrategy.buildFilename(batchJobId, op, { + appendExt: ".json", + }), + }) + + return await new Promise((resolve) => { + readableStream.on("data", (chunk) => { + data += chunk + }) + readableStream.on("end", () => { + resolve(JSON.parse(data)) + }) + readableStream.on("error", () => { + // TODO: maybe should throw + resolve([] as PriceListImportOperation[]) + }) + }) + } + + /** + * Delete parsed CSV ops files. + * + * @param batchJobId - An id of the current batch job being processed. + */ + protected async deleteOpsFiles(batchJobId: string): Promise { + const transactionManager = this.transactionManager_ ?? this.manager_ + + const fileServiceTx = this.fileService_.withTransaction(transactionManager) + for (const op of Object.values(OperationType)) { + try { + await fileServiceTx.delete({ + fileKey: PriceListImportStrategy.buildFilename(batchJobId, op, { + appendExt: ".json", + }), + }) + } catch (e) { + // noop + } + } + } + + /** + * Update count of processed data in the batch job `result` column + * and cleanup temp JSON files. + * + * @param batchJob - The current batch job being processed. + */ + private async finalize(batchJob: PriceListImportBatchJob): Promise { + const transactionManager = this.transactionManager_ ?? this.manager_ + + delete this.processedCounter[batchJob.id] + + await this.batchJobService_ + .withTransaction(transactionManager) + .update(batchJob.id, { + result: { advancement_count: batchJob.result.count }, + }) + + const { fileKey } = batchJob.context + + await this.fileService_ + .withTransaction(transactionManager) + .delete({ fileKey }) + + // await this.deleteOpsFiles(batchJob.id) + } + + /** + * Store the progress in the batch job `result` column. + * Method is called after every update/create operation, + * but after every `BATCH_SIZE` processed rows info is written to the DB. + * + * @param batchJobId - An id of the current batch job being processed. + */ + private async updateProgress(batchJobId: string): Promise { + const newCount = (this.processedCounter[batchJobId] || 0) + 1 + this.processedCounter[batchJobId] = newCount + + if (newCount % BATCH_SIZE !== 0) { + return + } + + await this.batchJobService_ + .withTransaction(this.transactionManager_ ?? this.manager_) + .update(batchJobId, { + result: { + advancement_count: newCount, + }, + }) + } + + private static buildFilename( + batchJobId: string, + operation: string, + { appendExt }: { appendExt?: string } = { appendExt: undefined } + ): string { + const filename = `imports/price-lists/ops/${batchJobId}-${operation}` + return appendExt ? filename + appendExt : filename + } +} + +export default PriceListImportStrategy + +enum PriceListRowKeys { + VARIANT_ID = "id", + VARIANT_SKU = "sku", + PRICES = "prices", +} + +/** + * Schema definition for the CSV parser. + */ +const CSVSchema: PriceListImportCsvSchema = { + columns: [ + { + name: "Product Variant ID", + mapTo: PriceListRowKeys.VARIANT_ID, + }, + { name: "SKU", mapTo: PriceListRowKeys.VARIANT_SKU }, + { + name: "Price Region", + match: /Price .* \[([A-Z]{3})\]/, + reducer: ( + builtLine: TBuiltPriceListImportLine, + key: string, + value: string + ): TBuiltPriceListImportLine => { + builtLine[PriceListRowKeys.PRICES] = + builtLine[PriceListRowKeys.PRICES] || [] + + if (typeof value === "undefined" || value === null) { + return builtLine + } + + const regionName = key.split(" ")[1] + builtLine[PriceListRowKeys.PRICES].push({ + amount: parseFloat(value), + region_name: regionName, + }) + + return builtLine + }, + }, + { + name: "Price Currency", + match: /Price [A-Z]{3}/, + reducer: ( + builtLine: TBuiltPriceListImportLine, + key: string, + value: string + ): TBuiltPriceListImportLine => { + builtLine[PriceListRowKeys.PRICES] = + builtLine[PriceListRowKeys.PRICES] || [] + + if (typeof value === "undefined" || value === null) { + return builtLine + } + + const currency = key.split(" ")[1] + builtLine[PriceListRowKeys.PRICES].push({ + amount: parseFloat(value), + currency_code: currency.toLowerCase(), + }) + + return builtLine + }, + }, + ], +} diff --git a/packages/medusa/src/strategies/batch-jobs/price-list/types.ts b/packages/medusa/src/strategies/batch-jobs/price-list/types.ts new file mode 100644 index 0000000000000..6280cc6fc249b --- /dev/null +++ b/packages/medusa/src/strategies/batch-jobs/price-list/types.ts @@ -0,0 +1,90 @@ +import { EntityManager } from "typeorm" +import { FileService } from "medusa-interfaces" + +import { + BatchJobService, + PriceListService, + ProductVariantService, + RegionService, +} from "../../../services" +import { CsvSchema } from "../../../interfaces/csv-parser" +import { BatchJob } from "../../../models" + +export type PriceListImportBatchJob = BatchJob & { + context: PriceListImportJobContext + result: Pick & { + operations: { + [K in keyof typeof OperationType]: number + } + } +} + +/** + * DI props for the Product import strategy + */ +export type InjectedProps = { + priceListService: PriceListService + batchJobService: BatchJobService + productVariantService: ProductVariantService + regionService: RegionService + fileService: typeof FileService + manager: EntityManager +} + +/** + * Data shape returned by the CSVParser. + */ +export type TParsedPriceListImportRowData = Record< + string, + string | number | (string | number | object)[] +> + +export type PriceListImportOperationPrice = { + region_id?: string + currency_code: string + amount: number +} + +export type PriceListImportOperation = { + variant_id: string + prices: PriceListImportOperationPrice[] +} + +export type ParsedPriceListImportPrice = + | { + amount: number + currency_code: string + } + | { + amount: number + region_name: string + } + +/** + * CSV parser's row reducer result data shape. + */ +export type TBuiltPriceListImportLine = Record + +/** + * Schema definition of for an import CSV file. + */ +export type PriceListImportCsvSchema = CsvSchema< + TParsedPriceListImportRowData, + TBuiltPriceListImportLine +> + +/** + * Import Batch job context column type. + */ +export type PriceListImportJobContext = { + price_list_id: string + total: number + fileKey: string +} + +/** + * Supported batch job import ops. + */ +export enum OperationType { + PricesCreate = "PRICE_LIST_PRICE_CREATE", +} diff --git a/packages/medusa/src/strategies/batch-jobs/product/import.ts b/packages/medusa/src/strategies/batch-jobs/product/import.ts index f5d4cf5ba30c7..f77ce131fa76d 100644 --- a/packages/medusa/src/strategies/batch-jobs/product/import.ts +++ b/packages/medusa/src/strategies/batch-jobs/product/import.ts @@ -832,7 +832,7 @@ const CSVSchema: ProductImportCsvSchema = { // PRICES { name: "Price Region", - match: /Price .* \[([A-Z]{2,4})\]/, + match: /Price .* \[([A-Z]{3})\]/, reducer: ( builtLine: TParsedProductImportRowData, key, @@ -857,7 +857,7 @@ const CSVSchema: ProductImportCsvSchema = { }, { name: "Price Currency", - match: /Price [A-Z]{2,4}/, + match: /Price [A-Z]{3}/, reducer: ( builtLine: TParsedProductImportRowData, key, From ebc1637241059b6282aea5cae4e218323111815c Mon Sep 17 00:00:00 2001 From: Sebastian Rindom Date: Fri, 16 Sep 2022 13:01:04 +0200 Subject: [PATCH 02/11] fix: unit test price list import --- .../__tests__/batch-jobs/price-list/import.ts | 166 ++++++++++++++++++ 1 file changed, 166 insertions(+) create mode 100644 packages/medusa/src/strategies/__tests__/batch-jobs/price-list/import.ts diff --git a/packages/medusa/src/strategies/__tests__/batch-jobs/price-list/import.ts b/packages/medusa/src/strategies/__tests__/batch-jobs/price-list/import.ts new file mode 100644 index 0000000000000..11b3758776a22 --- /dev/null +++ b/packages/medusa/src/strategies/__tests__/batch-jobs/price-list/import.ts @@ -0,0 +1,166 @@ +import { Readable, PassThrough } from "stream" +import { EntityManager } from "typeorm" + +import { FileService } from "medusa-interfaces" +import { MockManager } from "medusa-test-utils" + +import { User } from "../../../../models" +import { BatchJobStatus } from "../../../../types/batch-job" +import PriceListImportStrategy from "../../../batch-jobs/price-list/import" +import { + PriceListService, + BatchJobService, + ProductVariantService, + RegionService, +} from "../../../../services" +import { InjectedProps } from "../../../batch-jobs/price-list/types" + +let fakeJob = { + id: "batch_plimport", + type: "price-list-import", + context: { + price_list_id: "pl_1234", + fileKey: "csv.key", + }, + results: { advancement_count: 0, count: 6 }, + created_by: "usr_tester", + created_by_user: {} as User, + result: {}, + dry_run: false, + status: BatchJobStatus.PROCESSING, +} + +async function* generateCSVDataForStream() { + yield "Product Variant ID,SKU,Price EUR,Price NA [USD]\n" + yield ",MEDUSA-SWEAT-SMALL,15,13.5\n" + yield "5VxiEkmnPV,,15,13.5\n" +} + +/* ******************** SERVICES MOCK ******************** */ + +const fileServiceMock = { + withTransaction: function () { + return this + }, + delete: jest.fn(), + getDownloadStream: jest.fn().mockImplementation(() => { + return Promise.resolve(Readable.from(generateCSVDataForStream())) + }), + getUploadStreamDescriptor: jest.fn().mockImplementation(() => ({ + writeStream: new PassThrough(), + promise: Promise.resolve(), + })), +} + +const priceListServiceMock = { + withTransaction: function () { + return this + }, + retrieve: jest.fn().mockImplementation(() => { + return Promise.resolve(fakeJob) + }), +} + +const batchJobServiceMock = { + withTransaction: function () { + return this + }, + update: jest.fn().mockImplementation((data) => { + fakeJob = { + ...fakeJob, + ...data, + } + return Promise.resolve(fakeJob) + }), + complete: jest.fn().mockImplementation(() => { + fakeJob.status = BatchJobStatus.COMPLETED + return Promise.resolve(fakeJob) + }), + confirmed: jest.fn().mockImplementation(() => { + fakeJob.status = BatchJobStatus.CONFIRMED + return Promise.resolve(fakeJob) + }), + retrieve: jest.fn().mockImplementation(() => { + return Promise.resolve(fakeJob) + }), +} + +const productVariantServiceMock = { + withTransaction: function () { + return this + }, + retrieve: jest.fn().mockImplementation(() => + Promise.resolve({ + id: "retrieved-by-id", + }) + ), + retrieveBySKU: jest.fn().mockImplementation(() => + Promise.resolve({ + id: "retrieved-by-sku", + }) + ), +} + +const regionServiceMock = { + withTransaction: function () { + return this + }, + retrieveByName: jest.fn().mockImplementation(() => + Promise.resolve({ + id: "reg_HMnixPlOicAs7aBlXuchAGxd", + name: "Denmark", + currency_code: "DKK", + currency: "DKK", + tax_rate: 0.25, + tax_code: null, + countries: [ + { + id: "1001", + iso_2: "DK", + iso_3: "DNK", + num_code: "208", + name: "denmark", + display_name: "Denmark", + }, + ], + }) + ), +} + +const managerMock = MockManager + +/* ******************** PRICE LIST IMPORT STRATEGY TESTS ******************** */ + +describe("Price List import strategy", () => { + afterAll(() => { + jest.clearAllMocks() + }) + + const priceListImportStrategy = new PriceListImportStrategy({ + manager: managerMock as EntityManager, + fileService: fileServiceMock as typeof FileService, + batchJobService: batchJobServiceMock as unknown as BatchJobService, + priceListService: priceListServiceMock as unknown as PriceListService, + productVariantService: + productVariantServiceMock as unknown as ProductVariantService, + regionService: regionServiceMock as unknown as RegionService, + } as unknown as InjectedProps) + + it("`preProcessBatchJob` should generate import ops and upload them to a bucket using the file service", async () => { + const getImportInstructionsSpy = jest.spyOn( + priceListImportStrategy, + "getImportInstructions" + ) + + await priceListImportStrategy.preProcessBatchJob(fakeJob.id) + + expect(getImportInstructionsSpy).toBeCalledTimes(1) + expect(fileServiceMock.getUploadStreamDescriptor).toBeCalledTimes(1) + expect(fileServiceMock.getUploadStreamDescriptor).toHaveBeenCalledWith({ + ext: "json", + name: `imports/price-lists/ops/${fakeJob.id}-PRICE_LIST_PRICE_CREATE`, + }) + + getImportInstructionsSpy.mockRestore() + }) +}) From e623021b98cf06636628939148f3b6ca14a03985 Mon Sep 17 00:00:00 2001 From: Sebastian Rindom Date: Fri, 16 Sep 2022 14:34:32 +0200 Subject: [PATCH 03/11] fix: add integration test for price list imports --- .../__tests__/batch-jobs/price-list/import.js | 211 ++++++++++++++++++ .../price-list/price-list-import-template.csv | 3 + 2 files changed, 214 insertions(+) create mode 100644 integration-tests/api/__tests__/batch-jobs/price-list/import.js create mode 100644 integration-tests/api/__tests__/batch-jobs/price-list/price-list-import-template.csv diff --git a/integration-tests/api/__tests__/batch-jobs/price-list/import.js b/integration-tests/api/__tests__/batch-jobs/price-list/import.js new file mode 100644 index 0000000000000..31f17fbfb0863 --- /dev/null +++ b/integration-tests/api/__tests__/batch-jobs/price-list/import.js @@ -0,0 +1,211 @@ +const fs = require("fs") +const path = require("path") + +const setupServer = require("../../../../helpers/setup-server") +const { useApi } = require("../../../../helpers/use-api") +const { initDb, useDb } = require("../../../../helpers/use-db") + +const adminSeeder = require("../../../helpers/admin-seeder") +const { + simpleRegionFactory, + simplePriceListFactory, + simpleProductFactory, +} = require("../../../factories") + +const adminReqConfig = { + headers: { + Authorization: "Bearer test_token", + }, +} + +jest.setTimeout(1000000) + +function cleanTempData() { + // cleanup tmp ops files + const opsFiles = path.resolve( + "__tests__", + "batch-jobs", + "price-list", + "imports" + ) + + fs.rmSync(opsFiles, { recursive: true, force: true }) +} + +function getImportFile() { + return path.resolve( + "__tests__", + "batch-jobs", + "price-list", + "price-list-import.csv" + ) +} + +function copyTemplateFile() { + const csvTemplate = path.resolve( + "__tests__", + "batch-jobs", + "price-list", + "price-list-import-template.csv" + ) + const destination = getImportFile() + fs.copyFileSync(csvTemplate, destination) +} + +describe("Price list import batch job", () => { + let medusaProcess + let dbConnection + + beforeAll(async () => { + const cwd = path.resolve(path.join(__dirname, "..", "..", "..")) + dbConnection = await initDb({ cwd }) + + cleanTempData() // cleanup if previous process didn't manage to do it + + medusaProcess = await setupServer({ + cwd, + redisUrl: "redis://127.0.0.1:6379", + uploadDir: __dirname, + verbose: false, + }) + }) + + afterAll(async () => { + const db = useDb() + await db.shutdown() + + cleanTempData() + + medusaProcess.kill() + }) + + beforeEach(async () => { + await adminSeeder(dbConnection) + copyTemplateFile() + }) + + afterEach(async () => { + const db = useDb() + await db.teardown() + }) + + it("should import a csv file", async () => { + jest.setTimeout(1000000) + const api = useApi() + + const product = await simpleProductFactory(dbConnection, { + variants: [ + { + id: "test-pl-variant", + }, + { + id: "test-pl-sku-variant", + sku: "pl-sku", + }, + ], + }) + + await simpleRegionFactory(dbConnection, { + id: "test-pl-region", + name: "PL Region", + currency_code: "eur", + }) + + const priceList = await simplePriceListFactory(dbConnection, { + id: "pl_my_price_list", + name: "Test price list", + prices: [ + { + variant_id: product.variants[0].id, + currency_code: "usd", + amount: 1000, + }, + { + variant_id: product.variants[0].id, + currency_code: "eur", + amount: 2080, + }, + ], + }) + + const response = await api.post( + "/admin/batch-jobs", + { + type: "price-list-import", + context: { + price_list_id: priceList.id, + fileKey: "price-list-import.csv", + }, + }, + adminReqConfig + ) + + const batchJobId = response.data.batch_job.id + + expect(batchJobId).toBeTruthy() + + // Pull to check the status until it is completed + let batchJob + let shouldContinuePulling = true + while (shouldContinuePulling) { + const res = await api.get( + `/admin/batch-jobs/${batchJobId}`, + adminReqConfig + ) + + await new Promise((resolve, _) => { + setTimeout(resolve, 1000) + }) + + batchJob = res.data.batch_job + + shouldContinuePulling = !( + batchJob.status === "completed" || batchJob.status === "failed" + ) + } + + expect(batchJob.status).toBe("completed") + + const priceListRes = await api.get( + "/admin/price-lists/pl_my_price_list", + adminReqConfig + ) + + // Verify that file service deleted file + const importFilePath = getImportFile() + expect(fs.existsSync(importFilePath)).toBe(false) + + expect(priceListRes.data.price_list.prices.length).toEqual(5) + expect(priceListRes.data.price_list.prices).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + variant_id: "test-pl-variant", + currency_code: "usd", + amount: 1111, + }), + expect.objectContaining({ + variant_id: "test-pl-variant", + currency_code: "eur", + region_id: "test-pl-region", + amount: 2222, + }), + expect.objectContaining({ + variant_id: "test-pl-variant", + currency_code: "jpy", + amount: 3333, + }), + expect.objectContaining({ + variant_id: "test-pl-sku-variant", + currency_code: "usd", + amount: 4444, + }), + expect.objectContaining({ + variant_id: "test-pl-sku-variant", + currency_code: "eur", + region_id: "test-pl-region", + amount: 5555, + }), + ]) + ) + }) +}) diff --git a/integration-tests/api/__tests__/batch-jobs/price-list/price-list-import-template.csv b/integration-tests/api/__tests__/batch-jobs/price-list/price-list-import-template.csv new file mode 100644 index 0000000000000..5489b8ca5ace3 --- /dev/null +++ b/integration-tests/api/__tests__/batch-jobs/price-list/price-list-import-template.csv @@ -0,0 +1,3 @@ +Product Variant ID,SKU,Price USD,Price PL Region [EUR], Price JPY +test-pl-variant,,11.11,22.22,3333 +,pl-sku,44.441,55.55, From 739fa9775a2e75bbe6f34b57bc74c918cac58f5c Mon Sep 17 00:00:00 2001 From: Sebastian Rindom Date: Fri, 16 Sep 2022 14:43:08 +0200 Subject: [PATCH 04/11] fix: add integration test for price list imports --- .../api/factories/simple-product-variant-factory.ts | 2 ++ .../src/strategies/batch-jobs/price-list/import.ts | 9 +++++---- .../medusa/src/strategies/batch-jobs/product/import.ts | 8 +++++--- 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/integration-tests/api/factories/simple-product-variant-factory.ts b/integration-tests/api/factories/simple-product-variant-factory.ts index 8d6f6f9cc9b45..fe04c38adc01c 100644 --- a/integration-tests/api/factories/simple-product-variant-factory.ts +++ b/integration-tests/api/factories/simple-product-variant-factory.ts @@ -10,6 +10,7 @@ export type ProductVariantFactoryData = { product_id: string id?: string is_giftcard?: boolean + sku?: string inventory_quantity?: number title?: string options?: { option_id: string; value: string }[] @@ -31,6 +32,7 @@ export const simpleProductVariantFactory = async ( const toSave = manager.create(ProductVariant, { id, product_id: data.product_id, + sku: data.sku ?? null, inventory_quantity: typeof data.inventory_quantity !== "undefined" ? data.inventory_quantity diff --git a/packages/medusa/src/strategies/batch-jobs/price-list/import.ts b/packages/medusa/src/strategies/batch-jobs/price-list/import.ts index 3d15396df7083..9b60de1d58a81 100644 --- a/packages/medusa/src/strategies/batch-jobs/price-list/import.ts +++ b/packages/medusa/src/strategies/batch-jobs/price-list/import.ts @@ -437,7 +437,7 @@ class PriceListImportStrategy extends AbstractBatchJobStrategy { .withTransaction(transactionManager) .delete({ fileKey }) - // await this.deleteOpsFiles(batchJob.id) + await this.deleteOpsFiles(batchJob.id) } /** @@ -494,7 +494,7 @@ const CSVSchema: PriceListImportCsvSchema = { { name: "SKU", mapTo: PriceListRowKeys.VARIANT_SKU }, { name: "Price Region", - match: /Price .* \[([A-Z]{3})\]/, + match: /Price (.*) \[([A-Z]{3})\]/, reducer: ( builtLine: TBuiltPriceListImportLine, key: string, @@ -507,7 +507,8 @@ const CSVSchema: PriceListImportCsvSchema = { return builtLine } - const regionName = key.split(" ")[1] + const [, regionName] = + key.trim().match(/Price (.*) \[([A-Z]{3})\]/) || [] builtLine[PriceListRowKeys.PRICES].push({ amount: parseFloat(value), region_name: regionName, @@ -531,7 +532,7 @@ const CSVSchema: PriceListImportCsvSchema = { return builtLine } - const currency = key.split(" ")[1] + const currency = key.trim().split(" ")[1] builtLine[PriceListRowKeys.PRICES].push({ amount: parseFloat(value), currency_code: currency.toLowerCase(), diff --git a/packages/medusa/src/strategies/batch-jobs/product/import.ts b/packages/medusa/src/strategies/batch-jobs/product/import.ts index 8aa34cd887909..ef45a1e30058b 100644 --- a/packages/medusa/src/strategies/batch-jobs/product/import.ts +++ b/packages/medusa/src/strategies/batch-jobs/product/import.ts @@ -838,7 +838,7 @@ const CSVSchema: ProductImportCsvSchema = { // PRICES { name: "Price Region", - match: /Price .* \[([A-Z]{3})\]/, + match: /Price (.*) \[([A-Z]{3})\]/, reducer: ( builtLine: TParsedProductImportRowData, key, @@ -850,7 +850,8 @@ const CSVSchema: ProductImportCsvSchema = { return builtLine } - const regionName = key.split(" ")[1] + const [, regionName] = + key.trim().match(/Price (.*) \[([A-Z]{3})\]/) || [] ;( builtLine["variant.prices"] as Record[] ).push({ @@ -875,7 +876,8 @@ const CSVSchema: ProductImportCsvSchema = { return builtLine } - const currency = key.split(" ")[1] + const currency = key.trim().split(" ")[1] + ;( builtLine["variant.prices"] as Record[] ).push({ From 75f81dc2e327ab8e9c355f5be7e5e5a8fd2b88e2 Mon Sep 17 00:00:00 2001 From: Sebastian Rindom Date: Fri, 16 Sep 2022 15:09:05 +0200 Subject: [PATCH 05/11] fix: add test for failing parse --- .../__tests__/batch-jobs/price-list/import.js | 81 ++++++++++++++++++- .../batch-jobs/price-list/invalid-format.csv | 3 + packages/medusa/src/subscribers/batch-job.ts | 4 +- 3 files changed, 85 insertions(+), 3 deletions(-) create mode 100644 integration-tests/api/__tests__/batch-jobs/price-list/invalid-format.csv diff --git a/integration-tests/api/__tests__/batch-jobs/price-list/import.js b/integration-tests/api/__tests__/batch-jobs/price-list/import.js index 31f17fbfb0863..8979f8e533151 100644 --- a/integration-tests/api/__tests__/batch-jobs/price-list/import.js +++ b/integration-tests/api/__tests__/batch-jobs/price-list/import.js @@ -81,7 +81,6 @@ describe("Price list import batch job", () => { beforeEach(async () => { await adminSeeder(dbConnection) - copyTemplateFile() }) afterEach(async () => { @@ -93,6 +92,8 @@ describe("Price list import batch job", () => { jest.setTimeout(1000000) const api = useApi() + copyTemplateFile() + const product = await simpleProductFactory(dbConnection, { variants: [ { @@ -208,4 +209,82 @@ describe("Price list import batch job", () => { ]) ) }) + + it("should fail with invalid import format", async () => { + jest.setTimeout(1000000) + const api = useApi() + + const product = await simpleProductFactory(dbConnection, { + variants: [ + { id: "test-pl-variant" }, + { id: "test-pl-sku-variant", sku: "pl-sku" }, + ], + }) + + await simpleRegionFactory(dbConnection, { + id: "test-pl-region", + name: "PL Region", + currency_code: "eur", + }) + + const priceList = await simplePriceListFactory(dbConnection, { + id: "pl_my_price_list", + name: "Test price list", + prices: [ + { + variant_id: product.variants[0].id, + currency_code: "usd", + amount: 1000, + }, + { + variant_id: product.variants[0].id, + currency_code: "eur", + amount: 2080, + }, + ], + }) + + const response = await api.post( + "/admin/batch-jobs", + { + type: "price-list-import", + context: { + price_list_id: priceList.id, + fileKey: "invalid-format.csv", + }, + }, + adminReqConfig + ) + + const batchJobId = response.data.batch_job.id + + expect(batchJobId).toBeTruthy() + + // Pull to check the status until it is completed + let batchJob + let shouldContinuePulling = true + while (shouldContinuePulling) { + const res = await api.get( + `/admin/batch-jobs/${batchJobId}`, + adminReqConfig + ) + + await new Promise((resolve, _) => { + setTimeout(resolve, 1000) + }) + + batchJob = res.data.batch_job + + shouldContinuePulling = !( + batchJob.status === "completed" || batchJob.status === "failed" + ) + } + + expect(batchJob.status).toBe("failed") + expect(batchJob.result).toEqual({ + errors: [ + "The csv file parsing failed due to: Unable to treat column non-descript-column from the csv file. No target column found in the provided schema", + ], + }) + }) }) diff --git a/integration-tests/api/__tests__/batch-jobs/price-list/invalid-format.csv b/integration-tests/api/__tests__/batch-jobs/price-list/invalid-format.csv new file mode 100644 index 0000000000000..94cea4cd40e4c --- /dev/null +++ b/integration-tests/api/__tests__/batch-jobs/price-list/invalid-format.csv @@ -0,0 +1,3 @@ +non-descript-column,SKU,Price USD,Price PL Region [EUR], Price JPY +test-pl-variant,,11.11,22.22,3333 +,pl-sku,44.441,55.55, diff --git a/packages/medusa/src/subscribers/batch-job.ts b/packages/medusa/src/subscribers/batch-job.ts index f362b194b643e..002044c89584b 100644 --- a/packages/medusa/src/subscribers/batch-job.ts +++ b/packages/medusa/src/subscribers/batch-job.ts @@ -47,7 +47,7 @@ class BatchJobSubscriber { .preProcessBatchJob(batchJob.id) await batchJobServiceTx.setPreProcessingDone(batchJob.id) } catch (e) { - await this.batchJobService_.setFailed(batchJob.id) + await this.batchJobService_.setFailed(batchJob.id, e.message) throw e } }) @@ -68,7 +68,7 @@ class BatchJobSubscriber { await batchJobStrategy.withTransaction(manager).processJob(batchJob.id) await batchJobServiceTx.complete(batchJob.id) } catch (e) { - await this.batchJobService_.setFailed(batchJob.id) + await this.batchJobService_.setFailed(batchJob.id, e.message) throw e } }) From dcd550d1174b4567e687b1a20b144a377af38828 Mon Sep 17 00:00:00 2001 From: Sebastian Rindom Date: Fri, 16 Sep 2022 15:19:06 +0200 Subject: [PATCH 06/11] fix: product import should take human price amounts --- .../strategies/batch-jobs/product/import.ts | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/packages/medusa/src/strategies/batch-jobs/product/import.ts b/packages/medusa/src/strategies/batch-jobs/product/import.ts index ef45a1e30058b..a0a050769a90e 100644 --- a/packages/medusa/src/strategies/batch-jobs/product/import.ts +++ b/packages/medusa/src/strategies/batch-jobs/product/import.ts @@ -1,6 +1,6 @@ /* eslint-disable valid-jsdoc */ import { EntityManager } from "typeorm" -import { MedusaError } from "medusa-core-utils" +import { computerizeAmount, MedusaError } from "medusa-core-utils" import { AbstractBatchJobStrategy, IFileService } from "../../../interfaces" import CsvParser from "../../../services/csv-parser" @@ -198,11 +198,12 @@ class ProductImportStrategy extends AbstractBatchJobStrategy { if (price.regionName) { try { - record.region_id = ( - await this.regionService_ - .withTransaction(transactionManager) - .retrieveByName(price.regionName) - )?.id + const region = await this.regionService_ + .withTransaction(transactionManager) + .retrieveByName(price.regionName) + + record.region_id = region.id + record.currency_code = region.currency_code } catch (e) { throw new MedusaError( MedusaError.Types.INVALID_DATA, @@ -213,6 +214,7 @@ class ProductImportStrategy extends AbstractBatchJobStrategy { record.currency_code = price.currency_code } + record.amount = computerizeAmount(record.amount, record.currency_code) prices.push(record) } @@ -855,7 +857,7 @@ const CSVSchema: ProductImportCsvSchema = { ;( builtLine["variant.prices"] as Record[] ).push({ - amount: value, + amount: parseFloat(value), regionName, }) @@ -881,7 +883,7 @@ const CSVSchema: ProductImportCsvSchema = { ;( builtLine["variant.prices"] as Record[] ).push({ - amount: value, + amount: parseFloat(value), currency_code: currency, }) @@ -955,3 +957,4 @@ const SalesChannelsSchema: ProductImportCsvSchema = { }, ], } + From 2171462c7ddfe58f3a21c88ae2dfeedb3caaabc7 Mon Sep 17 00:00:00 2001 From: Sebastian Rindom Date: Fri, 16 Sep 2022 15:27:44 +0200 Subject: [PATCH 07/11] fix: update import test to use human amounts --- .../api/__tests__/batch-jobs/product/product-import-ss.csv | 6 +++--- .../api/__tests__/batch-jobs/product/product-import.csv | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/integration-tests/api/__tests__/batch-jobs/product/product-import-ss.csv b/integration-tests/api/__tests__/batch-jobs/product/product-import-ss.csv index 0316762d9cf4c..f4ec2425afd8c 100644 --- a/integration-tests/api/__tests__/batch-jobs/product/product-import-ss.csv +++ b/integration-tests/api/__tests__/batch-jobs/product/product-import-ss.csv @@ -1,4 +1,4 @@ Product id,Product Handle,Product Title,Product Subtitle,Product Description,Product Status,Product Thumbnail,Product Weight,Product Length,Product Width,Product Height,Product HS Code,Product Origin Country,Product MID Code,Product Material,Product Collection Title,Product Collection Handle,Product Type,Product Tags,Product Discountable,Product External ID,Product Profile Name,Product Profile Type,Variant id,Variant Title,Variant SKU,Variant Barcode,Variant Inventory Quantity,Variant Allow backorder,Variant Manage inventory,Variant Weight,Variant Length,Variant Width,Variant Height,Variant HS Code,Variant Origin Country,Variant MID Code,Variant Material,Price ImportLand [EUR],Price USD,Price denmark [DKK],Price Denmark [DKK],Option 1 Name,Option 1 Value,Option 2 Name,Option 2 Value,Image 1 Url,Sales Channel 1 Name,Sales Channel 2 Name,Sales Channel 1 Id,Sales Channel 2 Id -O6S1YQ6mKm,test-product-product-1,Test product,,test-product-description-1,draft,,,,,,,,,,Test collection 1,test-collection1,test-type-1,123_1,TRUE,,profile_1,profile_type_1,,Test variant,test-sku-1,test-barcode-1,10,FALSE,TRUE,,,,,,,,,100,110,130,,test-option-1,option 1 value red,test-option-2,option 2 value 1,test-image.png,Import Sales Channel 1,Import Sales Channel 2,, -5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,,Test variant,test-sku-2,test-barcode-2,10,FALSE,TRUE,,,,,,,,,,,,110,test-option,Option 1 value 1,,,test-image.png,,,, -5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,,Test variant,test-sku-3,test-barcode-3,10,FALSE,TRUE,,,,,,,,,,120,,,test-option,Option 1 Value blue,,,test-image.png,,,, \ No newline at end of file +O6S1YQ6mKm,test-product-product-1,Test product,,test-product-description-1,draft,,,,,,,,,,Test collection 1,test-collection1,test-type-1,123_1,TRUE,,profile_1,profile_type_1,,Test variant,test-sku-1,test-barcode-1,10,FALSE,TRUE,,,,,,,,,1.00,1.10,1.30,,test-option-1,option 1 value red,test-option-2,option 2 value 1,test-image.png,Import Sales Channel 1,Import Sales Channel 2,, +5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,,Test variant,test-sku-2,test-barcode-2,10,FALSE,TRUE,,,,,,,,,,,,1.10,test-option,Option 1 value 1,,,test-image.png,,,, +5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,,Test variant,test-sku-3,test-barcode-3,10,FALSE,TRUE,,,,,,,,,,1.20,,,test-option,Option 1 Value blue,,,test-image.png,,,, diff --git a/integration-tests/api/__tests__/batch-jobs/product/product-import.csv b/integration-tests/api/__tests__/batch-jobs/product/product-import.csv index d7579724eeaac..ebc8d3d61fa41 100644 --- a/integration-tests/api/__tests__/batch-jobs/product/product-import.csv +++ b/integration-tests/api/__tests__/batch-jobs/product/product-import.csv @@ -1,4 +1,4 @@ Product id,Product Handle,Product Title,Product Subtitle,Product Description,Product Status,Product Thumbnail,Product Weight,Product Length,Product Width,Product Height,Product HS Code,Product Origin Country,Product MID Code,Product Material,Product Collection Title,Product Collection Handle,Product Type,Product Tags,Product Discountable,Product External ID,Product Profile Name,Product Profile Type,Variant id,Variant Title,Variant SKU,Variant Barcode,Variant Inventory Quantity,Variant Allow backorder,Variant Manage inventory,Variant Weight,Variant Length,Variant Width,Variant Height,Variant HS Code,Variant Origin Country,Variant MID Code,Variant Material,Price ImportLand [EUR],Price USD,Price denmark [DKK],Price Denmark [DKK],Option 1 Name,Option 1 Value,Option 2 Name,Option 2 Value,Image 1 Url -O6S1YQ6mKm,test-product-product-1,Test product,,"Hopper Stripes Bedding, available as duvet cover, pillow sham and sheet.\n100% organic cotton, soft and crisp to the touch. Made in Portugal.",draft,,,,,,,,,,Test collection 1,test-collection1,test-type-1,123_1,TRUE,,profile_1,profile_type_1,,Test variant,test-sku-1,test-barcode-1,10,FALSE,TRUE,,,,,,,,,100,110,130,,test-option-1,option 1 value red,test-option-2,option 2 value 1,test-image.png -5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,,Test variant,test-sku-2,test-barcode-2,10,FALSE,TRUE,,,,,,,,,,,,110,test-option,Option 1 value 1,,,test-image.png -5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,,Test variant,test-sku-3,test-barcode-3,10,FALSE,TRUE,,,,,,,,,,120,,,test-option,Option 1 Value blue,,,test-image.png \ No newline at end of file +O6S1YQ6mKm,test-product-product-1,Test product,,"Hopper Stripes Bedding, available as duvet cover, pillow sham and sheet.\n100% organic cotton, soft and crisp to the touch. Made in Portugal.",draft,,,,,,,,,,Test collection 1,test-collection1,test-type-1,123_1,TRUE,,profile_1,profile_type_1,,Test variant,test-sku-1,test-barcode-1,10,FALSE,TRUE,,,,,,,,,1.00,1.10,1.30,,test-option-1,option 1 value red,test-option-2,option 2 value 1,test-image.png +5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,,Test variant,test-sku-2,test-barcode-2,10,FALSE,TRUE,,,,,,,,,,,,1.10,test-option,Option 1 value 1,,,test-image.png +5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,,Test variant,test-sku-3,test-barcode-3,10,FALSE,TRUE,,,,,,,,,,1.20,,,test-option,Option 1 Value blue,,,test-image.png From 5e4edf21af159d7001ac810130e5d170dae2be2a Mon Sep 17 00:00:00 2001 From: Sebastian Rindom Date: Fri, 16 Sep 2022 15:37:10 +0200 Subject: [PATCH 08/11] Create lazy-apes-unite.md --- .changeset/lazy-apes-unite.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .changeset/lazy-apes-unite.md diff --git a/.changeset/lazy-apes-unite.md b/.changeset/lazy-apes-unite.md new file mode 100644 index 0000000000000..7482c48c81ff0 --- /dev/null +++ b/.changeset/lazy-apes-unite.md @@ -0,0 +1,5 @@ +--- +"medusa-core-utils": minor +--- + +Adds `computerizeAmount` utility to convert human money format into the DB format Medusa uses (integer of lowest currency unit) From 67246c56beba3cd5958528a259d202eaacc150df Mon Sep 17 00:00:00 2001 From: Sebastian Rindom Date: Fri, 16 Sep 2022 15:38:10 +0200 Subject: [PATCH 09/11] Create green-snakes-return.md --- .changeset/green-snakes-return.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .changeset/green-snakes-return.md diff --git a/.changeset/green-snakes-return.md b/.changeset/green-snakes-return.md new file mode 100644 index 0000000000000..c211c1b858a9c --- /dev/null +++ b/.changeset/green-snakes-return.md @@ -0,0 +1,5 @@ +--- +"@medusajs/medusa": minor +--- + +Adds a BatchJob strategy for importing prices to PriceLists From 8db458e319950f8434b2d6282425ab49080c3d46 Mon Sep 17 00:00:00 2001 From: Sebastian Rindom Date: Tue, 20 Sep 2022 13:21:38 +0200 Subject: [PATCH 10/11] fix: pr feedback --- .../batch-jobs/price-list/import.ts | 42 +++++-------------- .../strategies/batch-jobs/price-list/types.ts | 1 - 2 files changed, 10 insertions(+), 33 deletions(-) diff --git a/packages/medusa/src/strategies/batch-jobs/price-list/import.ts b/packages/medusa/src/strategies/batch-jobs/price-list/import.ts index 9b60de1d58a81..4699f86d95d10 100644 --- a/packages/medusa/src/strategies/batch-jobs/price-list/import.ts +++ b/packages/medusa/src/strategies/batch-jobs/price-list/import.ts @@ -1,4 +1,3 @@ -/* eslint-disable valid-jsdoc */ import { EntityManager } from "typeorm" import { MedusaError, computerizeAmount } from "medusa-core-utils" @@ -10,8 +9,6 @@ import { ProductVariantService, PriceListService, RegionService, - SalesChannelService, - ShippingProfileService, } from "../../../services" import { CreateBatchJobInput } from "../../../types/batch-job" import { @@ -53,9 +50,7 @@ class PriceListImportStrategy extends AbstractBatchJobStrategy { protected readonly priceListService_: PriceListService protected readonly productService_: ProductService protected readonly batchJobService_: BatchJobService - protected readonly salesChannelService_: SalesChannelService protected readonly productVariantService_: ProductVariantService - protected readonly shippingProfileService_: ShippingProfileService protected readonly csvParser_: CsvParser< PriceListImportCsvSchema, @@ -110,6 +105,8 @@ class PriceListImportStrategy extends AbstractBatchJobStrategy { batchJob: CreateBatchJobInput, reqContext: any ): Promise { + const manager = this.transactionManager_ ?? this.manager_ + if (!batchJob.context?.price_list_id) { throw new MedusaError( MedusaError.Types.INVALID_DATA, @@ -117,24 +114,29 @@ class PriceListImportStrategy extends AbstractBatchJobStrategy { ) } + // Validate that PriceList exists + const priceListId = batchJob.context.price_list_id as string + await this.priceListService_.withTransaction(manager).retrieve(priceListId) + return batchJob } /** - * Generate instructions for update/create of products/variants from parsed CSV rows. + * Generate instructions for creation of prices from parsed CSV rows. * + * @param priceListId - the ID of the price list where the prices will be created * @param csvData - An array of parsed CSV rows. */ async getImportInstructions( priceListId: string, csvData: TParsedPriceListImportRowData[] ): Promise> { - const manager = this.transactionManager_ ?? this.manager_ - // Validate that PriceList exists + const manager = this.transactionManager_ ?? this.manager_ await this.priceListService_.withTransaction(manager).retrieve(priceListId) const pricesToCreate: PriceListImportOperation[] = [] + for (const row of csvData) { let variantId = row[PriceListRowKeys.VARIANT_ID] @@ -440,30 +442,6 @@ class PriceListImportStrategy extends AbstractBatchJobStrategy { await this.deleteOpsFiles(batchJob.id) } - /** - * Store the progress in the batch job `result` column. - * Method is called after every update/create operation, - * but after every `BATCH_SIZE` processed rows info is written to the DB. - * - * @param batchJobId - An id of the current batch job being processed. - */ - private async updateProgress(batchJobId: string): Promise { - const newCount = (this.processedCounter[batchJobId] || 0) + 1 - this.processedCounter[batchJobId] = newCount - - if (newCount % BATCH_SIZE !== 0) { - return - } - - await this.batchJobService_ - .withTransaction(this.transactionManager_ ?? this.manager_) - .update(batchJobId, { - result: { - advancement_count: newCount, - }, - }) - } - private static buildFilename( batchJobId: string, operation: string, diff --git a/packages/medusa/src/strategies/batch-jobs/price-list/types.ts b/packages/medusa/src/strategies/batch-jobs/price-list/types.ts index 6280cc6fc249b..62e7003e6a2f4 100644 --- a/packages/medusa/src/strategies/batch-jobs/price-list/types.ts +++ b/packages/medusa/src/strategies/batch-jobs/price-list/types.ts @@ -78,7 +78,6 @@ export type PriceListImportCsvSchema = CsvSchema< */ export type PriceListImportJobContext = { price_list_id: string - total: number fileKey: string } From aebef2e2bf5c0f5d264eda2c60d2a998ead5c4be Mon Sep 17 00:00:00 2001 From: Sebastian Rindom Date: Wed, 28 Sep 2022 12:26:34 +0200 Subject: [PATCH 11/11] fix: cleanup unused props --- .../src/strategies/batch-jobs/price-list/import.ts | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/packages/medusa/src/strategies/batch-jobs/price-list/import.ts b/packages/medusa/src/strategies/batch-jobs/price-list/import.ts index 4699f86d95d10..d59a997da14dd 100644 --- a/packages/medusa/src/strategies/batch-jobs/price-list/import.ts +++ b/packages/medusa/src/strategies/batch-jobs/price-list/import.ts @@ -5,7 +5,6 @@ import { AbstractBatchJobStrategy, IFileService } from "../../../interfaces" import CsvParser from "../../../services/csv-parser" import { BatchJobService, - ProductService, ProductVariantService, PriceListService, RegionService, @@ -22,14 +21,8 @@ import { TBuiltPriceListImportLine, TParsedPriceListImportRowData, } from "./types" -import { FlagRouter } from "../../../utils/flag-router" -/** - * Process this many variant rows before reporting progress. - */ -const BATCH_SIZE = 100 - -/** +/* * Default strategy class used for a batch import of products/variants. */ class PriceListImportStrategy extends AbstractBatchJobStrategy { @@ -39,8 +32,6 @@ class PriceListImportStrategy extends AbstractBatchJobStrategy { private processedCounter: Record = {} - protected readonly featureFlagRouter_: FlagRouter - protected manager_: EntityManager protected transactionManager_: EntityManager | undefined @@ -48,7 +39,6 @@ class PriceListImportStrategy extends AbstractBatchJobStrategy { protected readonly regionService_: RegionService protected readonly priceListService_: PriceListService - protected readonly productService_: ProductService protected readonly batchJobService_: BatchJobService protected readonly productVariantService_: ProductVariantService