Skip to content

Commit

Permalink
Merge pull request #1489 from lnash94/stack_master
Browse files Browse the repository at this point in the history
[master] Fix for stackoverflow error when records have interdependencies
  • Loading branch information
NipunaRanasinghe authored Aug 11, 2023
2 parents d0b7856 + fd52ec7 commit a6955c4
Show file tree
Hide file tree
Showing 8 changed files with 575 additions and 34 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ public class OpenAPIComponentMapper {

private final Components components;
private final List<OpenAPIConverterDiagnostic> diagnostics;
private final List<String> visitedTypeDefinitionNames = new ArrayList<>();
private final HashSet<String> visitedTypeDefinitionNames = new HashSet<>();

public OpenAPIComponentMapper(Components components) {
this.components = components;
Expand Down Expand Up @@ -117,7 +117,10 @@ public void createComponentSchema(Map<String, Schema> schema, TypeSymbol typeSym
type.getName().orElseThrow().trim())));
components.setSchemas(schema);
TypeReferenceTypeSymbol referredType = (TypeReferenceTypeSymbol) type;
createComponentSchema(schema, referredType);
if (!visitedTypeDefinitionNames.contains(componentName)) {
visitedTypeDefinitionNames.add(componentName);
createComponentSchema(schema, referredType);
}
break;
case STRING:
schema.put(componentName, new StringSchema().description(typeDoc));
Expand Down Expand Up @@ -147,14 +150,19 @@ public void createComponentSchema(Map<String, Schema> schema, TypeSymbol typeSym
components.setSchemas(schema);
break;
case UNION:
if (typeRef.definition() instanceof EnumSymbol) {
EnumSymbol enumSymbol = (EnumSymbol) typeRef.definition();
Schema enumSchema = mapEnumValues(enumSymbol);
schema.put(componentName, enumSchema.description(typeDoc));
components.setSchemas(schema);
} else {
Schema unionSchema = handleUnionType((UnionTypeSymbol) type, new Schema<>(), componentName);
schema.put(componentName, unionSchema.description(typeDoc));
if (!visitedTypeDefinitionNames.contains(componentName)) {
visitedTypeDefinitionNames.add(componentName);
if (typeRef.definition() instanceof EnumSymbol) {
EnumSymbol enumSymbol = (EnumSymbol) typeRef.definition();
Schema enumSchema = mapEnumValues(enumSymbol);
schema.put(componentName, enumSchema.description(typeDoc));
} else {
Schema unionSchema = handleUnionType((UnionTypeSymbol) type, new Schema<>(), componentName);
schema.put(componentName, unionSchema.description(typeDoc));
}
if (components.getSchemas() != null) {
schema.putAll(components.getSchemas());
}
components.setSchemas(schema);
}
break;
Expand Down Expand Up @@ -217,19 +225,17 @@ private void handleRecordTypeSymbol(RecordTypeSymbol recordTypeSymbol, Map<Strin
visitedTypeDefinitionNames.add(componentName);
List<TypeSymbol> typeInclusions = recordTypeSymbol.typeInclusions();
Map<String, RecordFieldSymbol> rfields = recordTypeSymbol.fieldDescriptors();
HashSet<String> unionKeys = new HashSet<>(rfields.keySet());
if (typeInclusions.isEmpty()) {
generateObjectSchemaFromRecordFields(schema, componentName, rfields, apiDocs);
} else {
mapTypeInclusionToAllOfSchema(schema, componentName, typeInclusions, rfields, unionKeys, apiDocs);
mapTypeInclusionToAllOfSchema(schema, componentName, recordTypeSymbol, apiDocs);
}
}

/**
* Creating API docs related to given record fields.
*/
private Map<String, String> getRecordFieldsAPIDocsMap(TypeReferenceTypeSymbol typeSymbol, String componentName) {

Map<String, String> apiDocs = new LinkedHashMap<>();
Symbol recordSymbol = typeSymbol.definition();
Optional<Documentation> documentation = ((Documentable) recordSymbol).documentation();
Expand Down Expand Up @@ -258,10 +264,12 @@ private Map<String, String> getRecordFieldsAPIDocsMap(TypeReferenceTypeSymbol ty
/**
* This function is to map the ballerina typeInclusion to OAS allOf composedSchema.
*/
private void mapTypeInclusionToAllOfSchema(Map<String, Schema> schema,
String componentName, List<TypeSymbol> typeInclusions, Map<String,
RecordFieldSymbol> rfields, HashSet<String> unionKeys, Map<String, String> apiDocs) {
private void mapTypeInclusionToAllOfSchema(Map<String, Schema> schema, String componentName,
RecordTypeSymbol recordTypeSymbol, Map<String, String> apiDocs) {

List<TypeSymbol> typeInclusions = recordTypeSymbol.typeInclusions();
Map<String, RecordFieldSymbol> recordFields = recordTypeSymbol.fieldDescriptors();
HashSet<String> recordFieldNames = new HashSet<>(recordFields.keySet());
// Map to allOF need to check the status code inclusion there
ComposedSchema allOfSchema = new ComposedSchema();
// Set schema
Expand All @@ -273,19 +281,20 @@ private void mapTypeInclusionToAllOfSchema(Map<String, Schema> schema,
allOfSchemaList.add(referenceSchema);
if (typeInclusion.typeKind().equals(TypeDescKind.TYPE_REFERENCE)) {
TypeReferenceTypeSymbol typeRecord = (TypeReferenceTypeSymbol) typeInclusion;
if (typeRecord.typeDescriptor() instanceof RecordTypeSymbol) {
if (typeRecord.typeDescriptor() instanceof RecordTypeSymbol &&
!isSameRecord(typeInclusionName, typeRecord)) {
RecordTypeSymbol typeInclusionRecord = (RecordTypeSymbol) typeRecord.typeDescriptor();
Map<String, RecordFieldSymbol> tInFields = typeInclusionRecord.fieldDescriptors();
unionKeys.addAll(tInFields.keySet());
unionKeys.removeAll(tInFields.keySet());
recordFieldNames.addAll(tInFields.keySet());
recordFieldNames.removeAll(tInFields.keySet());
generateObjectSchemaFromRecordFields(schema, typeInclusionName, tInFields, apiDocs);
// Update the schema value
schema = this.components.getSchemas();
}
}
}
Map<String, RecordFieldSymbol> filteredField = new LinkedHashMap<>();
rfields.forEach((key1, value) -> unionKeys.stream().filter(key ->
recordFields.forEach((key1, value) -> recordFieldNames.stream().filter(key ->
ConverterCommonUtils.unescapeIdentifier(key1.trim()).
equals(ConverterCommonUtils.unescapeIdentifier(key))).forEach(key ->
filteredField.put(ConverterCommonUtils.unescapeIdentifier(key1), value)));
Expand All @@ -295,6 +304,9 @@ private void mapTypeInclusionToAllOfSchema(Map<String, Schema> schema,
if (schema != null && !schema.containsKey(componentName)) {
// Set properties for the schema
schema.put(componentName, allOfSchema);
if (this.components.getSchemas() != null) {
schema.putAll(this.components.getSchemas());
}
this.components.setSchemas(schema);
} else if (schema == null) {
schema = new LinkedHashMap<>();
Expand Down Expand Up @@ -331,8 +343,9 @@ private ObjectSchema generateObjectSchemaFromRecordFields(Map<String, Schema> sc

if (fieldTypeKind == TypeDescKind.TYPE_REFERENCE) {
TypeReferenceTypeSymbol typeReference = (TypeReferenceTypeSymbol) field.getValue().typeDescriptor();
property = handleTypeReference(schema, typeReference, property, isSameRecord(componentName,
typeReference));
property = handleTypeReference(schema, typeReference, property,
isSameRecord(ConverterCommonUtils.unescapeIdentifier(
typeReference.definition().getName().get()), typeReference));
schema = components.getSchemas();
} else if (fieldTypeKind == TypeDescKind.UNION) {
property = handleUnionType((UnionTypeSymbol) field.getValue().typeDescriptor(), property,
Expand Down Expand Up @@ -360,12 +373,16 @@ private ObjectSchema generateObjectSchemaFromRecordFields(Map<String, Schema> sc
if (componentName != null && schema != null && !schema.containsKey(componentName)) {
// Set properties for the schema
schema.put(componentName, componentSchema);
if (this.components.getSchemas() != null) {
schema.putAll(this.components.getSchemas());
}
this.components.setSchemas(schema);
} else if (schema == null && componentName != null) {
schema = new LinkedHashMap<>();
schema.put(componentName, componentSchema);
this.components.setSchemas(schema);
}
visitedTypeDefinitionNames.add(componentName);
return componentSchema;
}

Expand Down Expand Up @@ -431,10 +448,10 @@ private Schema handleUnionType(UnionTypeSymbol unionType, Schema property, Strin
}
property = ConverterCommonUtils.getOpenApiSchema(union.typeKind().getName().trim());
TypeReferenceTypeSymbol typeReferenceTypeSymbol = (TypeReferenceTypeSymbol) union;
property = handleTypeReference(this.components.getSchemas(), typeReferenceTypeSymbol, property,
property = handleTypeReference(components.getSchemas(), typeReferenceTypeSymbol, property,
isSameRecord(parentComponentName, typeReferenceTypeSymbol));
visitedTypeDefinitionNames.add(typeReferenceTypeSymbol.getName().get());
properties.add(property);
// TODO: uncomment after fixing ballerina lang union type handling issue
} else if (union.typeKind() == TypeDescKind.UNION) {
property = handleUnionType((UnionTypeSymbol) union, property, parentComponentName);
properties.add(property);
Expand All @@ -447,9 +464,10 @@ private Schema handleUnionType(UnionTypeSymbol unionType, Schema property, Strin
Schema openApiSchema = ConverterCommonUtils.getOpenApiSchema(typeDescKind.getName());
property = new ObjectSchema().additionalProperties(openApiSchema);
properties.add(property);
Map<String, Schema> schemas = components.getSchemas();
if (schemas != null) {
if (components.getSchemas() != null) {
Map<String, Schema> schemas = components.getSchemas();
schemas.put(parentComponentName, property);
components.setSchemas(schemas);
} else {
Map<String, Schema> schema = new HashMap<>();
schema.put(parentComponentName, property);
Expand All @@ -460,7 +478,6 @@ private Schema handleUnionType(UnionTypeSymbol unionType, Schema property, Strin
properties.add(property);
}
}

property = generateOneOfSchema(property, properties);
if (nullable) {
property.setNullable(true);
Expand Down Expand Up @@ -514,6 +531,7 @@ private Schema mapEnumValues(EnumSymbol enumSymbol) {
*/
private ArraySchema mapArrayToArraySchema(Map<String, Schema> schema, TypeSymbol symbol,
String componentName) {
visitedTypeDefinitionNames.add(componentName);
ArraySchema property = new ArraySchema();
int arrayDimensions = 0;
while (symbol instanceof ArrayTypeSymbol) {
Expand Down Expand Up @@ -594,17 +612,17 @@ private Schema getSchemaForUnionType(UnionTypeSymbol symbol, Schema symbolProper
/**
* This util function is to handle the type reference symbol is record type or enum type.
*/
private Schema getSchemaForTypeReferenceSymbol(TypeSymbol arrayType, Schema symbolProperty, String componentName,
Map<String, Schema> schema) {
private Schema getSchemaForTypeReferenceSymbol(TypeSymbol referenceType, Schema symbolProperty,
String componentName, Map<String, Schema> schema) {

if (((TypeReferenceTypeSymbol) arrayType).definition().kind() == SymbolKind.ENUM) {
TypeReferenceTypeSymbol typeRefEnum = (TypeReferenceTypeSymbol) arrayType;
if (((TypeReferenceTypeSymbol) referenceType).definition().kind() == SymbolKind.ENUM) {
TypeReferenceTypeSymbol typeRefEnum = (TypeReferenceTypeSymbol) referenceType;
EnumSymbol enumSymbol = (EnumSymbol) typeRefEnum.definition();
symbolProperty = mapEnumValues(enumSymbol);
} else {
symbolProperty.set$ref(ConverterCommonUtils.unescapeIdentifier(
arrayType.getName().orElseThrow().trim()));
TypeReferenceTypeSymbol typeRecord = (TypeReferenceTypeSymbol) arrayType;
referenceType.getName().orElseThrow().trim()));
TypeReferenceTypeSymbol typeRecord = (TypeReferenceTypeSymbol) referenceType;
if (!isSameRecord(componentName, typeRecord)) {
createComponentSchema(schema, typeRecord);
}
Expand All @@ -624,5 +642,4 @@ private ArraySchema handleArray(int arrayDimensions, Schema property, ArraySchem
}
return arrayProperty;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,24 @@ public void testReadOnlyRecord() throws IOException {
TestUtils.compareWithGeneratedFile(ballerinaFilePath, "record/with_read_only_keyword.yaml");
}

@Test(description = "Test for records having cyclic dependencies and same record inclusions")
public void testRecordsWithCyclicDependenciesIncludingSameTypeInclusion() throws IOException {
Path ballerinaFilePath = RES_DIR.resolve("record/cyclic_record_with_typeInclusion.bal");
TestUtils.compareWithGeneratedFile(ballerinaFilePath, "record/cyclic_record_with_typeInclusion.yaml");
}

@Test(description = "Test for record type definitions with interdependencies")
public void testInterdependenceRecordWithTypeRef() throws IOException {
Path ballerinaFilePath = RES_DIR.resolve("record/typeref_records_with_interdependency.bal");
TestUtils.compareWithGeneratedFile(ballerinaFilePath, "record/typeref_records_with_interdependency.yaml");
}

@Test(description = "Test for union type with interdependent record members")
public void testInterdependenceRecordWithUnionType() throws IOException {
Path ballerinaFilePath = RES_DIR.resolve("record/union_records_with_interdependency.bal");
TestUtils.compareWithGeneratedFile(ballerinaFilePath, "record/union_records_with_interdependency.yaml");
}

@AfterMethod
public void cleanUp() {
TestUtils.deleteDirectory(this.tempDir);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
openapi: 3.0.1
info:
title: PayloadV
version: 0.0.0
servers:
- url: "{server}:{port}/payloadV"
variables:
server:
default: http://localhost
port:
default: "9090"
paths:
/fhir/r4/Patient/{id}:
get:
operationId: getFhirR4PatientId
parameters:
- name: id
in: path
required: true
schema:
type: string
responses:
"200":
description: Ok
content:
application/fhir+json:
schema:
$ref: '#/components/schemas/Patient'
application/fhir+xml:
schema:
$ref: '#/components/schemas/Patient'
components:
schemas:
Identifier:
allOf:
- $ref: '#/components/schemas/Element'
- type: object
properties:
value:
type: string
assigner:
$ref: '#/components/schemas/Reference'
id:
type: string
element:
type: integer
format: int64
Reference:
allOf:
- $ref: '#/components/schemas/Element'
- type: object
properties:
reference:
type: string
type:
type: string
identifier:
$ref: '#/components/schemas/Identifier'
display:
type: string
Element:
type: object
properties:
id:
type: string
element:
type: integer
format: int64
Patient:
type: object
properties:
id:
type: string
ref:
$ref: '#/components/schemas/Reference'
Loading

0 comments on commit a6955c4

Please sign in to comment.