diff --git a/adlchecker/build.gradle b/adlchecker/build.gradle index 3cabe1e92..e0a995718 100644 --- a/adlchecker/build.gradle +++ b/adlchecker/build.gradle @@ -12,7 +12,7 @@ dependencies { } run { - args = ['archetypes']//, '--outputFlat'] + args = ['archetypes', '--lint']//, '--outputFlat'] } -mainClassName='com.nedap.archie.adlchecker.AdlChecker' \ No newline at end of file +mainClassName='com.nedap.archie.adlchecker.AdlChecker' diff --git a/adlchecker/src/main/java/com/nedap/archie/adlchecker/AdlChecker.java b/adlchecker/src/main/java/com/nedap/archie/adlchecker/AdlChecker.java index 0468d2dfe..5635b212e 100644 --- a/adlchecker/src/main/java/com/nedap/archie/adlchecker/AdlChecker.java +++ b/adlchecker/src/main/java/com/nedap/archie/adlchecker/AdlChecker.java @@ -1,5 +1,6 @@ package com.nedap.archie.adlchecker; +import com.google.common.io.CharStreams; import com.nedap.archie.adlparser.ADLParser; import com.nedap.archie.antlr.errors.ANTLRParserMessage; import com.nedap.archie.aom.Archetype; @@ -10,6 +11,7 @@ import com.nedap.archie.rminfo.ArchieRMInfoLookup; import com.nedap.archie.rminfo.ReferenceModels; import com.nedap.archie.serializer.adl.ADLArchetypeSerializer; +import com.nedap.archie.serializer.adl.ADLDefinitionSerializer; import net.sourceforge.argparse4j.ArgumentParsers; import net.sourceforge.argparse4j.impl.Arguments; import net.sourceforge.argparse4j.inf.ArgumentParser; @@ -19,7 +21,10 @@ import java.io.File; import java.io.FileInputStream; +import java.io.FileNotFoundException; import java.io.IOException; +import java.io.InputStreamReader; +import java.io.StringReader; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; @@ -39,6 +44,10 @@ public static void main(String[] args) { .action(Arguments.storeTrue()) .help("if the --outputFlat flag is present, also output the flat ADL"); + parser.addArgument("-l", "--lint") + .action(Arguments.storeTrue()) + .help("if the --lint flag is present, also output the linted ADL, which formats and adds missing id codes"); + Namespace ns = null; try { ns = parser.parseArgs(args); @@ -51,11 +60,12 @@ public static void main(String[] args) { parser.printUsage(); parser.printHelp(); } else { - validateArchetypes(ns.getList("path"), ns.getBoolean("outputFlat")); + validateArchetypes(ns.getList("path"), ns.getBoolean("outputFlat"), ns.getBoolean("lint")); } } - private static void validateArchetypes(List directories, boolean printFlatAdl) { + private static void validateArchetypes(List directories, boolean printFlatAdl, boolean lint) { + InMemoryFullArchetypeRepository repository = new InMemoryFullArchetypeRepository(); for (String directory : directories) { System.out.println("step 1: parsing archetypes"); @@ -78,20 +88,42 @@ private static void validateArchetypes(List directories, boolean printFl } } + + if(lint) { + System.out.println("step 3: running archetypes through linter"); + System.out.println(); + for (String directory : directories) { + try { + Files.walk(Paths.get(directory)).forEach((path) -> lint(path)); + } catch (IOException e) { + e.printStackTrace(); + } + } + } } - private static void printValidationResult(ValidationResult result) { - System.out.println(); - System.out.print("============= "); - System.out.print(result.getArchetypeId()); - System.out.print(" "); - if(result.passes()) { - System.out.print("PASSED"); - } else { - System.out.print("FAILED"); + private static void lint(Path path) { + File file = path.toFile(); + if(file.isDirectory()) { + return; } - System.out.print(" ============="); - System.out.println(); + try (FileInputStream stream = new FileInputStream(file)) { + String fileContent = CharStreams.toString(new InputStreamReader(stream)); + System.out.println("linting " + file.getAbsolutePath()); + System.out.println(); + TerminologyContentGenerator generator = new TerminologyContentGenerator(BuiltinReferenceModels.getMetaModels()); + Archetype resultingArchetype = generator.addTerms(fileContent); + System.out.println(ADLArchetypeSerializer.serialize(resultingArchetype)); + System.out.println(); + } catch (Exception e) { + e.printStackTrace(); + } + + + } + + private static void printValidationResult(ValidationResult result) { + printHeader(result.getArchetypeId(), result.passes() ? "PASSED" : "FAILED"); for(ValidationMessage error:result.getErrors()) { System.out.println(error.toString()); } @@ -100,31 +132,35 @@ private static void printValidationResult(ValidationResult result) { } } + private static void printHeader(String archetypeId, String status) { + System.out.println(); + System.out.print("============= "); + System.out.print(archetypeId); + System.out.print(" "); + System.out.print(status); + System.out.print(" ============="); + System.out.println(); + System.out.println(); + } + private static void parseArchetype(Path path, InMemoryFullArchetypeRepository repository) { File file = path.toFile(); if(file.isDirectory()) { return; } ADLParser adlParser = new ADLParser(); + adlParser.setLogEnabled(false); try (FileInputStream stream = new FileInputStream(file)) { try { Archetype parsed = adlParser.parse(stream); if(adlParser.getErrors().hasNoErrors()) { repository.addArchetype(parsed); } - if(adlParser.getErrors().hasNoMessages()){ - System.out.println(path.getFileName() + " has no messages, ok!"); - } else { - System.out.println("errors found for " + path.getFileName()); - - for(ANTLRParserMessage message:adlParser.getErrors().getWarnings()) { - System.err.println("warning: " + message.getMessage()); - } - for(ANTLRParserMessage message:adlParser.getErrors().getErrors()) { - System.err.println("error: " + message.getMessage()); - } + if(!adlParser.getErrors().hasNoMessages()){ + printParseErrors(path, adlParser); } } catch (Exception e) { + printParseErrors(path, adlParser); e.printStackTrace(); } } catch (IOException e) { @@ -133,4 +169,27 @@ private static void parseArchetype(Path path, InMemoryFullArchetypeRepository re } } + private static void printParseErrors(Path path, ADLParser adlParser) { + if(adlParser.getErrors() == null) { + printHeader(path.getFileName().toString(), "PARSING FAILED"); + return; + } + else if(adlParser.getErrors().hasNoErrors()) { + printHeader(path.getFileName().toString(), "PARSING GENERATED WARNINGS"); + } else { + printHeader(path.getFileName().toString(), "PARSING FAILED"); + } + System.out.println("errors found for " + path.getFileName()); + + if(adlParser.getErrors() != null) { + for (ANTLRParserMessage message : adlParser.getErrors().getWarnings()) { + System.err.println("warning: " + message.getMessage()); + } + for (ANTLRParserMessage message : adlParser.getErrors().getErrors()) { + System.err.println("error: " + message.getMessage()); + } + + } + } + } \ No newline at end of file diff --git a/adlchecker/src/main/java/com/nedap/archie/adlchecker/TerminologyContentGenerator.java b/adlchecker/src/main/java/com/nedap/archie/adlchecker/TerminologyContentGenerator.java new file mode 100644 index 000000000..093b86b77 --- /dev/null +++ b/adlchecker/src/main/java/com/nedap/archie/adlchecker/TerminologyContentGenerator.java @@ -0,0 +1,225 @@ +package com.nedap.archie.adlchecker; + +import com.nedap.archie.adlparser.ADLParser; +import com.nedap.archie.aom.Archetype; +import com.nedap.archie.aom.CAttribute; +import com.nedap.archie.aom.CObject; +import com.nedap.archie.aom.primitives.CTerminologyCode; +import com.nedap.archie.aom.terminology.ArchetypeTerm; +import com.nedap.archie.aom.terminology.ValueSet; +import com.nedap.archie.aom.utils.AOMUtils; +import com.nedap.archie.rminfo.MetaModels; + +import java.io.IOException; +import java.util.Comparator; +import java.util.LinkedList; +import java.util.List; +import java.util.ArrayList; +import java.util.Map; +import java.util.Stack; +import java.util.TreeMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * If you create an archetype, generates a line of empty terminology for every missing term in every language. + * Adds the comments as default text + */ +public class TerminologyContentGenerator { + + private MetaModels models; + Pattern commentPattern = Pattern.compile(".*\\[(?id[0-9]+)(,|\\]).*--(?.*)"); + + public TerminologyContentGenerator(MetaModels models) { + this.models = models; + } + + public Archetype addTerms(String adlContent) { + ADLParser parser = new ADLParser(); + try { + Archetype archetype = parser.parse(adlContent); + models.selectModel(archetype); + + //We could just run the CodeValidation in the context of an archetype repository, and process the error messages? + //however, these cannot yet be automatically processed because they do not generate an easily readable id-code + //in a separate field, so for now it has been added here + if(parser.getErrors().hasErrors()) { + throw new RuntimeException("parse errors!" + parser.getErrors().toString()); + } + Archetype resultingArchetype = parser.parse(adlContent); //instantiate twice + + walkArchetype(adlContent, archetype, resultingArchetype); + visitValueSets(adlContent, resultingArchetype); + + sortTerminology(resultingArchetype); + return resultingArchetype; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private void visitValueSets(String adlContent, Archetype resultingArchetype) { + Map valueSets = resultingArchetype.getTerminology().getValueSets(); + for(String acCode:valueSets.keySet()) { + if(!terminologyHasCodeForAllLanguages(resultingArchetype, acCode)) { + addCodeToTerminology(adlContent, resultingArchetype, acCode); + } + for(String atCode:valueSets.get(acCode).getMembers()) { + if(!terminologyHasCodeForAllLanguages(resultingArchetype, atCode)) { + addCodeToTerminology(adlContent, resultingArchetype, atCode); + } + } + } + } + + private void sortTerminology(Archetype resultingArchetype) { + Map> termDefinitions = resultingArchetype.getTerminology().getTermDefinitions(); + for(String language: termDefinitions.keySet()) { + Map stringArchetypeTermMap = termDefinitions.get(language); + Map sortedArchetypeTermMap = new TreeMap<>(new Comparator() { + @Override + public int compare(String o1, String o2) { + String code1 = o1.substring(0, 2); + String code2 = o2.substring(0, 2); + if(code1.equalsIgnoreCase(code2)) { + int number1 = Integer.parseInt(o1.substring(2)); + int number2 = Integer.parseInt(o2.substring(2)); + return number1 - number2; + } else { + return code2.compareTo(code1); + } + } + }); + sortedArchetypeTermMap.putAll(stringArchetypeTermMap); + termDefinitions.put(language, sortedArchetypeTermMap); + + } + } + + private void walkArchetype(String sourceFile, Archetype archetype, Archetype resultingArchetype) { + LinkedList workList = new LinkedList<>(); + workList.push(archetype.getDefinition()); + while(!workList.isEmpty()) { + CObject next = workList.pop(); + if(!terminologyHasCodeForAllLanguages(archetype, next)) { + addCodeToTerminology(sourceFile, resultingArchetype, next.getNodeId()); + } + for(CAttribute attribute:next.getAttributes()) { + for(CObject child:attribute.getChildren()) { + workList.push(child); + if(child instanceof CTerminologyCode) { + checkTerminologyCode(sourceFile, resultingArchetype, (CTerminologyCode) child); + } + } + } + } + } + + private void checkTerminologyCode(String sourceFile, Archetype archetype, CTerminologyCode child) { + for(String constraint:child.getConstraint()) { + if(!terminologyHasCodeForAllLanguages(archetype, constraint)) { + addCodeToTerminology(sourceFile, archetype, constraint); + } + } + } + + private void addCodeToTerminology(String sourceFile, Archetype resultingArchetype, String code) { + String text = getCommentName(sourceFile, code); + if(text == null) { + text = "Add term for me!"; + } + String description = text; + Map> termDefinitions = resultingArchetype.getTerminology().getTermDefinitions(); + for(String language: termDefinitions.keySet()) { + if(termDefinitions.get(language).get(code) == null) { + ArchetypeTerm newCode = new ArchetypeTerm(); + newCode.setCode(code); + newCode.setText(text); + newCode.setDescription(description); + termDefinitions.get(language).put(code, newCode); + } + } + } + + /** + * Gets the comment text for the given nodeId. + * Walks through the entire source file each time, so a bit inefficient, but that's fine for now + */ + private String getCommentName(String sourceFile, String nodeId) { + String[] lines = sourceFile.split("\n"); + for(String line:lines) { + Matcher matcher = commentPattern.matcher(line); + if(matcher.matches()) { + String idcode = matcher.group("idcode"); + String comment = matcher.group("comment"); + if(idcode.equalsIgnoreCase(nodeId)) { + return comment.trim(); + } + } + + } + return null; + } + + public boolean terminologyHasCodeForAllLanguages(Archetype archetype, CObject cObject) { + String nodeId = cObject.getNodeId(); + int codeSpecializationDepth = AOMUtils.getSpecializationDepthFromCode(nodeId); + int archetypeSpecializationDepth = archetype.specializationDepth(); + if(codeSpecializationDepth > archetypeSpecializationDepth) { + //this is a validation failure that needs to be fixed. But it's impossible to fix in this archetype. + // It will be logged by the archetype validator + return true; + } else if (cObject.isRoot() || parentIsMultiple(cObject)) { + if( codeSpecializationDepth == archetypeSpecializationDepth && !archetype.getTerminology().hasIdCodeInAllLanguages(nodeId)) { + return false; + } + } + return true; + } + + /** + * Check if terminology has a code. Use only for at- and ac-codes, NOT For id codes!! + * @param archetype + * @param code + * @return + */ + public boolean terminologyHasCodeForAllLanguages(Archetype archetype, String code) { + if(AOMUtils.isIdCode(code)) { + throw new IllegalArgumentException("this method only checks at- and ac- codes, not id codes"); + } + int codeSpecializationDepth = AOMUtils.getSpecializationDepthFromCode(code); + int archetypeSpecializationDepth = archetype.specializationDepth(); + if (codeSpecializationDepth > archetypeSpecializationDepth) { + return true;//not exactly, this is a validation failure that needs to be fixed. log? + } else { + if (codeSpecializationDepth == archetypeSpecializationDepth && !archetype.getTerminology().hasCodeInAllLanguages(code)) { + return false; + } + } + return true; + } + + private boolean parentIsMultiple(CObject cObject) { + if(cObject.getParent() != null) { + + CAttribute parent = cObject.getParent(); + CObject owningObject = parent.getParent(); + if (parent.getDifferentialPath() != null) { + //not supported yet here. + return false; + /*&& + } flatParent != null) { + CAttribute attributeFromParent = (CAttribute) AOMUtils.getDifferentialPathFromParent(flatParent, parent); + if(attributeFromParent != null) { + owningObject = attributeFromParent.getParent(); + }*/ + + } + if(owningObject != null) { + return models.isMultiple(owningObject.getRmTypeName(), parent.getRmAttributeName()); + } + } + return false; + } + +} \ No newline at end of file diff --git a/aom/src/main/java/com/nedap/archie/adlparser/ADLParser.java b/aom/src/main/java/com/nedap/archie/adlparser/ADLParser.java index be57e4373..6cb8c02c4 100644 --- a/aom/src/main/java/com/nedap/archie/adlparser/ADLParser.java +++ b/aom/src/main/java/com/nedap/archie/adlparser/ADLParser.java @@ -7,7 +7,6 @@ import com.nedap.archie.antlr.errors.ANTLRParserErrors; import com.nedap.archie.aom.Archetype; import com.nedap.archie.aom.utils.ArchetypeParsePostProcesser; -import org.antlr.v4.runtime.ANTLRInputStream; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.tree.ParseTreeWalker; @@ -15,6 +14,7 @@ import java.io.IOException; import java.io.InputStream; +import java.nio.charset.Charset; /** @@ -48,11 +48,11 @@ public ADLParser(ModelConstraintImposer modelConstraintImposer) { public Archetype parse(String adl) throws IOException { - return parse(new ANTLRInputStream(adl)); + return parse(CharStreams.fromString(adl)); } public Archetype parse(InputStream stream) throws IOException { - return parse(new ANTLRInputStream(new BOMInputStream(stream))); + return parse(CharStreams.fromStream(new BOMInputStream(stream))); } public Archetype parse(CharStream stream) { diff --git a/aom/src/main/java/com/nedap/archie/adlparser/modelconstraints/BMMConstraintImposer.java b/aom/src/main/java/com/nedap/archie/adlparser/modelconstraints/BMMConstraintImposer.java new file mode 100644 index 000000000..1af8ec4b1 --- /dev/null +++ b/aom/src/main/java/com/nedap/archie/adlparser/modelconstraints/BMMConstraintImposer.java @@ -0,0 +1,56 @@ +package com.nedap.archie.adlparser.modelconstraints; + +import com.google.common.collect.Sets; +import com.nedap.archie.aom.CAttribute; +import com.nedap.archie.aom.utils.AOMUtils; +import com.nedap.archie.base.Cardinality; +import com.nedap.archie.base.MultiplicityInterval; +import com.nedap.archie.rminfo.MetaModel; +import org.openehr.bmm.core.BmmContainerProperty; +import org.openehr.bmm.core.BmmModel; +import org.openehr.bmm.core.BmmProperty; + +import java.util.HashSet; +import java.util.Set; + +public class BMMConstraintImposer implements ModelConstraintImposer { + + private final BmmModel model; + + private Set nonOrderedContainerTypes = Sets.newHashSet("set", "hash", "bag"); + private Set uniqueContainerTypes = Sets.newHashSet("set", "hash"); + + public BMMConstraintImposer(BmmModel model) { + this.model = model; + } + + @Override + public CAttribute getDefaultAttribute(String typeId, String attribute) { + BmmProperty property = AOMUtils.getPropertyAtPath(model, typeId, attribute); + if(property == null) { + return null; + } + CAttribute result = new CAttribute(); + MultiplicityInterval existence = property.getExistence(); + result.setExistence(existence); + + if(property instanceof BmmContainerProperty) { + BmmContainerProperty containerProperty = (BmmContainerProperty) property; + Cardinality cardinality = new Cardinality(); + cardinality.setInterval(containerProperty.getCardinality()); + String name = containerProperty.getType().getContainerType().getName(); + cardinality.setOrdered(!nonOrderedContainerTypes.contains(name.toLowerCase())); + cardinality.setUnique(uniqueContainerTypes.contains(name.toLowerCase())); + + result.setCardinality(cardinality); + result.setMultiple(true); + + } else { + result.setMultiple(false); + } + + + + return result; + } +} diff --git a/aom/src/main/java/com/nedap/archie/adlparser/treewalkers/BaseTreeWalker.java b/aom/src/main/java/com/nedap/archie/adlparser/treewalkers/BaseTreeWalker.java index cb18a0e20..35f0685f6 100644 --- a/aom/src/main/java/com/nedap/archie/adlparser/treewalkers/BaseTreeWalker.java +++ b/aom/src/main/java/com/nedap/archie/adlparser/treewalkers/BaseTreeWalker.java @@ -17,10 +17,18 @@ public void addError(String error) { errors.addError(error); } + public void addError(String warning, int line, int charPositionInLine) { + errors.addError(warning, line, charPositionInLine); + } + public void addWarning(String warning) { errors.addError(warning); } + public void addWarning(String warning, int line, int charPositionInLine) { + errors.addWarning(warning, line, charPositionInLine); + } + public ANTLRParserErrors getErrors() { return errors; } diff --git a/aom/src/main/java/com/nedap/archie/adlparser/treewalkers/CComplexObjectParser.java b/aom/src/main/java/com/nedap/archie/adlparser/treewalkers/CComplexObjectParser.java index 8ef0c5475..0f1b61fcc 100644 --- a/aom/src/main/java/com/nedap/archie/adlparser/treewalkers/CComplexObjectParser.java +++ b/aom/src/main/java/com/nedap/archie/adlparser/treewalkers/CComplexObjectParser.java @@ -251,6 +251,8 @@ private Cardinality parseCardinalityInterval(C_cardinalityContext context) { for(Multiplicity_modContext modContext:modContexts) { if(modContext.ordering_mod() != null) { cardinality.setOrdered(modContext.ordering_mod().SYM_ORDERED() != null); + } else { + cardinality.setOrdered(true);//TODO: this should retrieve it from the RM. This now matches the serializer, but both should be fixed! } if(modContext.unique_mod() != null) { cardinality.setUnique(true); diff --git a/aom/src/main/java/com/nedap/archie/adlparser/treewalkers/PrimitivesConstraintParser.java b/aom/src/main/java/com/nedap/archie/adlparser/treewalkers/PrimitivesConstraintParser.java index e18da0582..7ddbf4d10 100644 --- a/aom/src/main/java/com/nedap/archie/adlparser/treewalkers/PrimitivesConstraintParser.java +++ b/aom/src/main/java/com/nedap/archie/adlparser/treewalkers/PrimitivesConstraintParser.java @@ -1,15 +1,14 @@ package com.nedap.archie.adlparser.treewalkers; -import com.nedap.archie.antlr.errors.ANTLRParserErrors; -import com.nedap.archie.adlparser.antlr.ContainedRegexLexer; -import com.nedap.archie.adlparser.antlr.ContainedRegexParser; import com.nedap.archie.adlparser.antlr.AdlLexer; import com.nedap.archie.adlparser.antlr.AdlParser; import com.nedap.archie.adlparser.antlr.AdlParser.Boolean_list_valueContext; import com.nedap.archie.adlparser.antlr.AdlParser.Boolean_valueContext; import com.nedap.archie.adlparser.antlr.AdlParser.String_list_valueContext; import com.nedap.archie.adlparser.antlr.AdlParser.String_valueContext; -import com.nedap.archie.serializer.odin.OdinValueParser; +import com.nedap.archie.adlparser.antlr.ContainedRegexLexer; +import com.nedap.archie.adlparser.antlr.ContainedRegexParser; +import com.nedap.archie.antlr.errors.ANTLRParserErrors; import com.nedap.archie.aom.CPrimitiveObject; import com.nedap.archie.aom.primitives.CBoolean; import com.nedap.archie.aom.primitives.CDate; @@ -19,7 +18,8 @@ import com.nedap.archie.aom.primitives.CTerminologyCode; import com.nedap.archie.aom.primitives.CTime; import com.nedap.archie.base.terminology.TerminologyCode; -import org.antlr.v4.runtime.ANTLRInputStream; +import com.nedap.archie.serializer.odin.OdinValueParser; +import org.antlr.v4.runtime.CharStreams; import org.antlr.v4.runtime.CommonTokenStream; import org.antlr.v4.runtime.tree.TerminalNode; @@ -168,7 +168,7 @@ public CDate parseCDate(AdlParser.C_dateContext context) { } public CPrimitiveObject parseRegex(TerminalNode terminalNode) { - ContainedRegexLexer lexer = new ContainedRegexLexer(new ANTLRInputStream(terminalNode.getText())); + ContainedRegexLexer lexer = new ContainedRegexLexer(CharStreams.fromString(terminalNode.getText())); ContainedRegexParser parser = new ContainedRegexParser(new CommonTokenStream(lexer)); ContainedRegexParser.RegexContext regex = parser.regex(); CString result = new CString(); diff --git a/aom/src/main/java/com/nedap/archie/aom/Archetype.java b/aom/src/main/java/com/nedap/archie/aom/Archetype.java index bdd149e3c..7f781afc7 100644 --- a/aom/src/main/java/com/nedap/archie/aom/Archetype.java +++ b/aom/src/main/java/com/nedap/archie/aom/Archetype.java @@ -1,8 +1,11 @@ package com.nedap.archie.aom; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.google.common.base.Strings; import com.nedap.archie.aom.primitives.CTerminologyCode; import com.nedap.archie.aom.terminology.ArchetypeTerm; import com.nedap.archie.aom.terminology.ArchetypeTerminology; +import com.nedap.archie.aom.terminology.ValueSet; import com.nedap.archie.aom.utils.AOMUtils; import com.nedap.archie.aom.utils.ArchetypeParsePostProcesser; import com.nedap.archie.definitions.AdlCodeDefinitions; @@ -23,6 +26,7 @@ import java.util.Objects; import java.util.Set; import java.util.Stack; +import java.util.stream.Collectors; /** * Note: this Archetype does not conform to the UML model completely: @@ -175,7 +179,8 @@ public void addOtherMetadata(String text, String value) { * @return the ArchetypeTerm corresponding to the given CObject in the given language */ public ArchetypeTerm getTerm(CObject object, String language) { - return getTerminology().getTermDefinition(language, object.getNodeId()); + ArchetypeTerminology terminology = getTerminology(); + return terminology == null ? null : terminology.getTermDefinition(language, object.getNodeId()); } /** @@ -227,6 +232,7 @@ public boolean isSpecialized() { return parentArchetypeId != null; } + @JsonIgnore public int specializationDepth() { return AOMUtils.getSpecializationDepthFromCode(definition.getNodeId()); } @@ -236,6 +242,7 @@ public int specializationDepth() { * ac codes references in C_TERMINOLOGY_CODE objects and ac codes from value sets keys * @return */ + @JsonIgnore public Set getAllUsedCodes() { Stack workList = new Stack(); Set result = new LinkedHashSet<>(); @@ -254,25 +261,61 @@ public Set getAllUsedCodes() { workList.addAll(attribute.getChildren()); } } - result.addAll(terminology.getValueSets().keySet()); - return result; - } + if(terminology != null && terminology.getValueSets() != null) { + for (ValueSet set : terminology.getValueSets().values()) { + result.add(set.getId()); + for (String code : set.getMembers()) { + result.add(code); + } - public Set getUsedAtCodes() { - Stack workList = new Stack(); - Set result = new LinkedHashSet<>(); - workList.add(definition); - while(!workList.isEmpty()) { - CObject cObject = workList.pop(); - if(!Objects.equals(cObject.getNodeId(), AdlCodeDefinitions.PRIMITIVE_NODE_ID)){ - result.add(cObject.getNodeId()); - } - for(CAttribute attribute:cObject.getAttributes()) { - workList.addAll(attribute.getChildren()); } } + return result; } + @JsonIgnore + public Set getUsedIdCodes() { + return getAllUsedCodes().stream().filter(code -> AOMUtils.isIdCode(code)).collect(Collectors.toSet()); + } + + @JsonIgnore + public Set getUsedValueCodes() { + return getAllUsedCodes().stream().filter(code -> AOMUtils.isValueCode(code)).collect(Collectors.toSet()); + + } + + @JsonIgnore + public Set getUsedValueSetCodes() { + return getAllUsedCodes().stream().filter(code -> AOMUtils.isValidValueSetCode(code)).collect(Collectors.toSet()); + } + + + private String generateNextCode(String prefix, Set usedCodes) { + int specializationDepth = this.specializationDepth(); + int maximumIdCode = AOMUtils.getMaximumIdCode(specializationDepth, usedCodes); + return prefix + generateSpecializationDepthCodePrefix(specializationDepth()) + (maximumIdCode+1); + } + + public String generateNextIdCode() { + return generateNextCode(AdlCodeDefinitions.ID_CODE_LEADER, getUsedIdCodes()); + } + + public String generateNextValueCode() { + return generateNextCode(AdlCodeDefinitions.VALUE_CODE_LEADER, getUsedValueCodes()); + } + + public String generateNextValueSetCode() { + return generateNextCode(AdlCodeDefinitions.VALUE_SET_CODE_LEADER, getUsedValueSetCodes()); + + } + + private String generateSpecializationDepthCodePrefix (int specializationDepth) { + String prefix = ""; + for(int i = 0; i < specializationDepth; i++) { + prefix += "0" + AdlCodeDefinitions.SPECIALIZATION_SEPARATOR; + } + return prefix; + } } diff --git a/aom/src/main/java/com/nedap/archie/aom/ArchetypeConstraint.java b/aom/src/main/java/com/nedap/archie/aom/ArchetypeConstraint.java index c4efb57cd..bace5efff 100644 --- a/aom/src/main/java/com/nedap/archie/aom/ArchetypeConstraint.java +++ b/aom/src/main/java/com/nedap/archie/aom/ArchetypeConstraint.java @@ -64,10 +64,12 @@ public String path() { /** * True if this node is the root of the tree. */ + @JsonIgnore public boolean isRoot() { return parent == null; } + @JsonIgnore public abstract boolean isLeaf(); diff --git a/aom/src/main/java/com/nedap/archie/aom/AuthoredResource.java b/aom/src/main/java/com/nedap/archie/aom/AuthoredResource.java index d19ae9d37..bc8ec7061 100644 --- a/aom/src/main/java/com/nedap/archie/aom/AuthoredResource.java +++ b/aom/src/main/java/com/nedap/archie/aom/AuthoredResource.java @@ -1,6 +1,7 @@ package com.nedap.archie.aom; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; import com.nedap.archie.base.terminology.TerminologyCode; import com.nedap.archie.xml.adapters.ResourceDescriptionAdapter; import com.nedap.archie.xml.adapters.TranslationDetailsAdapter; @@ -99,6 +100,7 @@ public void setTranslations(Map translations) { @XmlElement(name="translations") @XmlJavaTypeAdapter(TranslationDetailsAdapter.class) + @JsonIgnore public List getTranslationList() { return new ArrayList(content.getTranslations().values());} public void setTranslationList(List translationList) { diff --git a/aom/src/main/java/com/nedap/archie/aom/CAttribute.java b/aom/src/main/java/com/nedap/archie/aom/CAttribute.java index 6f32f75b8..79e80e46b 100644 --- a/aom/src/main/java/com/nedap/archie/aom/CAttribute.java +++ b/aom/src/main/java/com/nedap/archie/aom/CAttribute.java @@ -2,8 +2,10 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import com.nedap.archie.aom.utils.AOMUtils; import com.nedap.archie.base.Cardinality; import com.nedap.archie.base.MultiplicityInterval; +import com.nedap.archie.definitions.AdlCodeDefinitions; import com.nedap.archie.paths.PathSegment; import com.nedap.archie.query.APathQuery; @@ -83,6 +85,41 @@ public void setMultiple(boolean multiple) { } public CObject getChild(String nodeId) { + //first don't look through CComplexObject proxies, then if no result, do lookup through the proxies + CObject result = getChild(nodeId, false); + if(result == null) { + result = getChild(nodeId, true); + } + return result; + } + + /** + * Get the child cobject with the given nodeid. If it does not exist but a specialized version + * does exist, returns that one. + * If multiple specialized children exist, returns the first it can find. TODO: this should probably be better defined :) + * @param nodeId + * @return + */ + public CObject getPossiblySpecializedChild(String nodeId) { + //if there's an exact node id match, return that first + CObject result = getChild(nodeId, false); + if(result != null) { + return result; + } + for(CObject child:children) { + if(nodeId.equals(child.getNodeId()) || AOMUtils.codesConformant(child.getNodeId(), nodeId)) { + return child; + } else if(child instanceof CArchetypeRoot) { + //TODO: Should we look for specialized archetype roots as well? :) + if (((CArchetypeRoot) child).getArchetypeRef().equals(nodeId)) { + return child; + } + } + } + return null; + } + + private CObject getChild(String nodeId, boolean lookThroughProxies) { for(CObject child:children) { if(nodeId.equals(child.getNodeId())) { return child; @@ -90,7 +127,7 @@ public CObject getChild(String nodeId) { if (((CArchetypeRoot) child).getArchetypeRef().equals(nodeId)) { return child; } - } else if(child instanceof CComplexObjectProxy) { + } else if(lookThroughProxies && child instanceof CComplexObjectProxy) { String targetPath = ((CComplexObjectProxy) child).getTargetPath(); APathQuery aPathQuery = new APathQuery(targetPath); PathSegment lastPathSegment = aPathQuery.getPathSegments().get(aPathQuery.getPathSegments().size() - 1); @@ -187,6 +224,15 @@ public void removeChild(String nodeId) { } } + public void removeChild(CObject child) { + int index = getIndexOfMatchingCObjectChild(child); + if(index > -1) { + children.remove(index); + } + } + + + /** * Replace the child at node nodeId with all the objects from the parameter newChildren. * If keepOriginal is true, it will not replace the original, but keep it in place @@ -212,6 +258,24 @@ public void replaceChildren(String nodeId, List newChildren, boolean ke } + public int getIndexOfMatchingCObjectChild(CObject child) { + if(child instanceof CPrimitiveObject) { + return getIndexOfChildWithMatchingRmTypeName(child.getRmTypeName()); + } else { + return getIndexOfChildWithNodeId(child.getNodeId()); + } + } + + public int getIndexOfChildWithMatchingRmTypeName(String rmTypeName) { + for(int i = 0; i < children.size(); i++) { + CObject child = children.get(i); + if(rmTypeName.equals(child.getRmTypeName())) { + return i; + } + } + return -1; + } + public int getIndexOfChildWithNodeId(String nodeId) { for(int i = 0; i < children.size(); i++) { CObject child = children.get(i); @@ -301,6 +365,7 @@ public boolean isProhibited() { @Override + @JsonIgnore public boolean isLeaf() { return children != null && children.size() > 0; } @@ -369,6 +434,7 @@ public Boolean cardinalityConformsTo(CAttribute other) { } } + @JsonIgnore public boolean isSecondOrderConstrained() { return getSocParent() != null || (getParent() != null && getParent().getSocParent() != null); } @@ -379,6 +445,7 @@ public boolean isSecondOrderConstrained() { * calculates sum of all occurrences lower bounds; where no occurrences are stated, 0 is assumed * @return */ + @JsonIgnore public int getAggregateOccurrencesLowerSum() { int sum = 0; for(CObject cObject:getChildren()) { @@ -394,6 +461,7 @@ public int getAggregateOccurrencesLowerSum() { * object, and 1 for all optional objects * @return */ + @JsonIgnore public int getMinimumChildCount() { int result = 0; boolean foundOptional = false; diff --git a/aom/src/main/java/com/nedap/archie/aom/CComplexObject.java b/aom/src/main/java/com/nedap/archie/aom/CComplexObject.java index 3b45e2572..673ca15c4 100644 --- a/aom/src/main/java/com/nedap/archie/aom/CComplexObject.java +++ b/aom/src/main/java/com/nedap/archie/aom/CComplexObject.java @@ -10,6 +10,7 @@ import javax.xml.bind.annotation.XmlType; import java.util.ArrayList; import java.util.List; +import java.util.stream.Collectors; /** * Created by pieter.bos on 15/10/15. @@ -70,14 +71,16 @@ public T itemAtPath(String path) { } /** - * get attribute by name. - * @param name + * get attribute by name or differential path + * @param nameOrDifferentialpath name of the attribute, or the full differential path of the attribute * @return */ @Override - public CAttribute getAttribute(String name) { + public CAttribute getAttribute(String nameOrDifferentialpath) { for(CAttribute attribute:attributes) { - if(attribute.getRmAttributeName().equals(name)) { + if(attribute.getRmAttributeName().equals(nameOrDifferentialpath) && attribute.getDifferentialPath() == null) { + return attribute; + } else if(attribute.getDifferentialPath() != null && attribute.getDifferentialPath().equals(nameOrDifferentialpath)) { return attribute; } } @@ -104,6 +107,37 @@ public void addAttribute(CAttribute attribute) { attributes.add(attribute); } + public void removeAttribute(CAttribute attribute) { + removeAttribute(attribute, false); + } + + private void removeAttribute(CAttribute attribute, boolean allowRemovingTupleMembers) { + int indexOfAttribute = -1; + + for(int i = 0; i < attributes.size(); i++) { + CAttribute existingAttribute = attributes.get(i); + if(existingAttribute.getDifferentialPath() != null) { + + if(existingAttribute.getDifferentialPath().equals(attribute.getDifferentialPath())) { + indexOfAttribute = i; + break; + } + } else { + if(existingAttribute.getRmAttributeName().equals(attribute.getRmAttributeName())) { + indexOfAttribute = i; + } + } + } + if(indexOfAttribute >= 0) { + CAttribute foundAttribute = attributes.get(indexOfAttribute); + if(!allowRemovingTupleMembers && foundAttribute.getSocParent() != null) { + throw new IllegalArgumentException("cannot remove a tuple attribute with removeAttribute, remove the tuple attribute instead and rebuild tuple attributes."); + } else { + attributes.remove(indexOfAttribute); + } + } + } + public List getAttributeTuples() { return attributeTuples; } @@ -141,4 +175,31 @@ public void replaceAttribute(CAttribute newAttribute) { public boolean isLeaf() { return (attributes == null || attributes.isEmpty()) && (attributeTuples == null || attributeTuples.isEmpty()); } + + /** + * Remove the attribute tuple with the given member names from this CComplexObject + * @param parameterMemberNames the name of the attribute tuple members to remove + */ + public void removeAttributeTuple(List parameterMemberNames) { + int index = getIndexOfMatchingAttributeTuple(parameterMemberNames); + if(index >= 0) { + CAttributeTuple tuple = attributeTuples.get(index); + attributeTuples.remove(index); + for(CAttribute attribute:tuple.getMembers()) { + this.removeAttribute(attribute, true); + } + } + } + + public int getIndexOfMatchingAttributeTuple(List parameterMemberNames) { + for(int i = 0; i < this.getAttributeTuples().size(); i++) { + CAttributeTuple cAttributeTuple = getAttributeTuples().get(i); + cAttributeTuple.getMemberNames(); + List memberNames = cAttributeTuple.getMemberNames(); + if(memberNames.equals(parameterMemberNames)) { + return i; + } + } + return -1; + } } diff --git a/aom/src/main/java/com/nedap/archie/aom/ResourceDescription.java b/aom/src/main/java/com/nedap/archie/aom/ResourceDescription.java index 85192a7ad..5d1f54a62 100644 --- a/aom/src/main/java/com/nedap/archie/aom/ResourceDescription.java +++ b/aom/src/main/java/com/nedap/archie/aom/ResourceDescription.java @@ -28,7 +28,7 @@ public class ResourceDescription extends ArchetypeModelObject { private Map conversionDetails = new ConcurrentHashMap<>(); private Map otherDetails = new ConcurrentHashMap<>(); - private Map details; + private Map details = new ConcurrentHashMap<>(); public Map getOriginalAuthor() { return originalAuthor; diff --git a/aom/src/main/java/com/nedap/archie/aom/primitives/CInteger.java b/aom/src/main/java/com/nedap/archie/aom/primitives/CInteger.java index 59b65b7ad..da5849bb2 100644 --- a/aom/src/main/java/com/nedap/archie/aom/primitives/CInteger.java +++ b/aom/src/main/java/com/nedap/archie/aom/primitives/CInteger.java @@ -1,11 +1,13 @@ package com.nedap.archie.aom.primitives; +import com.fasterxml.jackson.annotation.JsonIgnore; import com.nedap.archie.base.Interval; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; +import java.beans.Transient; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -45,6 +47,8 @@ public void addConstraint(Interval constraint) { this.constraint.add(constraint); } + @JsonIgnore + @Transient public List getConstraintValues() { List result = new ArrayList<>(); for(Interval singleConstraint:constraint) { diff --git a/aom/src/main/java/com/nedap/archie/aom/terminology/ArchetypeTerminology.java b/aom/src/main/java/com/nedap/archie/aom/terminology/ArchetypeTerminology.java index 7dbf6232f..236e800d7 100644 --- a/aom/src/main/java/com/nedap/archie/aom/terminology/ArchetypeTerminology.java +++ b/aom/src/main/java/com/nedap/archie/aom/terminology/ArchetypeTerminology.java @@ -196,6 +196,22 @@ public boolean hasIdCode(String code) { return AOMUtils.isIdCode(code) && hasCode(code); } + public boolean hasCodeInAllLanguages(String code) { + if(termDefinitions == null) { + return false; + } + for(String language:getTermDefinitions().keySet()) { + if(!getTermDefinitions().get(language).containsKey(code)) { + return false; + } + } + return true; + } + + public boolean hasIdCodeInAllLanguages(String code) { + return AOMUtils.isIdCode(code) && hasCodeInAllLanguages(code); + } + public boolean hasValueSetCode(String code) { return AOMUtils.isValueSetCode(code) && hasCode(code); } diff --git a/aom/src/main/java/com/nedap/archie/aom/terminology/TerminologyCodeWithArchetypeTerm.java b/aom/src/main/java/com/nedap/archie/aom/terminology/TerminologyCodeWithArchetypeTerm.java index 85bab2681..2e96c6408 100644 --- a/aom/src/main/java/com/nedap/archie/aom/terminology/TerminologyCodeWithArchetypeTerm.java +++ b/aom/src/main/java/com/nedap/archie/aom/terminology/TerminologyCodeWithArchetypeTerm.java @@ -8,6 +8,10 @@ public class TerminologyCodeWithArchetypeTerm { private String code; private ArchetypeTerm term; + public TerminologyCodeWithArchetypeTerm() { + + } + public TerminologyCodeWithArchetypeTerm(String code, ArchetypeTerm term) { this.code = code; this.term = term; diff --git a/aom/src/main/java/com/nedap/archie/aom/utils/AOMUtils.java b/aom/src/main/java/com/nedap/archie/aom/utils/AOMUtils.java index f22e006d8..136d12277 100644 --- a/aom/src/main/java/com/nedap/archie/aom/utils/AOMUtils.java +++ b/aom/src/main/java/com/nedap/archie/aom/utils/AOMUtils.java @@ -1,6 +1,8 @@ package com.nedap.archie.aom.utils; +import com.google.common.base.CharMatcher; import com.google.common.base.Joiner; +import com.google.common.base.Strings; import com.nedap.archie.aom.Archetype; import com.nedap.archie.aom.ArchetypeHRID; import com.nedap.archie.aom.ArchetypeModelObject; @@ -28,7 +30,9 @@ import org.openehr.bmm.persistence.validation.BmmDefinitions; import java.util.ArrayList; +import java.util.Collection; import java.util.List; +import java.util.Set; import java.util.regex.Pattern; public class AOMUtils { @@ -81,9 +85,21 @@ public static String pathAtSpecializationLevel(List pathSegments, i public static String codeAtLevel(String nodeId, int level) { NodeIdUtil nodeIdUtil = new NodeIdUtil(nodeId); List codes = new ArrayList<>(); - for(int i = 0; i <= level;i++) { + for(int i = 0; i <= level && i < nodeIdUtil.getCodes().size();i++) { codes.add(nodeIdUtil.getCodes().get(i)); } + //remove leading .0 codes - they are not present in the code at the given level + int numberOfCodesToRemove = 0; + for(int i = codes.size()-1; i >= 0 ; i--) { + if(codes.get(i).intValue() == 0) { + numberOfCodesToRemove++; + } else { + break; + } + } + if(numberOfCodesToRemove > 0) { + codes = codes.subList(0, codes.size()-numberOfCodesToRemove); + } return nodeIdUtil.getPrefix() + Joiner.on(AdlCodeDefinitions.SPECIALIZATION_SEPARATOR).join(codes); } @@ -128,7 +144,7 @@ public static boolean isArchetypePath(String path) { public static boolean isPhantomPathAtLevel(List pathSegments, int specializationDepth) { for(int i = pathSegments.size()-1; i >=0; i--) { String nodeId = pathSegments.get(i).getNodeId(); - if(nodeId != null && AOMUtils.isValidCode(nodeId) && specializationDepth < AOMUtils.getSpecializationDepthFromCode(nodeId)) { + if(nodeId != null && AOMUtils.isValidCode(nodeId) && specializationDepth > AOMUtils.getSpecializationDepthFromCode(nodeId)) { return codeExistsAtLevel(nodeId, specializationDepth); } } @@ -138,7 +154,7 @@ public static boolean isPhantomPathAtLevel(List pathSegments, int s public static boolean codeExistsAtLevel(String nodeId, int specializationDepth) { NodeIdUtil nodeIdUtil = new NodeIdUtil(nodeId); int specializationDepthOfCode = AOMUtils.getSpecializationDepthFromCode(nodeId); - if(specializationDepth > specializationDepthOfCode) { + if(specializationDepth < specializationDepthOfCode) { String code = ""; for(int i = 0; i <= specializationDepth; i++) { code += nodeIdUtil.getCodes().get(i); @@ -244,7 +260,16 @@ public static BmmProperty getPropertyAtPath(BmmModel bmmModel, String rmTypeName } property = classDefinition.flattenBmmClass().getProperties().get(segment.getNodeName()); if(property == null) { - return null; + for(String descendant: classDefinition.findAllDescendants()) { + BmmProperty bmmProperty = bmmModel.getClassDefinition(descendant).flattenBmmClass().getProperties().get(segment.getNodeName()); + if(bmmProperty != null) { + property = bmmProperty; + break; + } + } + if(property == null) { + return null; + } } classDefinition = property.getType().getBaseClass(); } @@ -275,4 +300,39 @@ public static RMAttributeInfo getAttributeInfoAtPath(ModelInfoLookup selectedMod } return attribute; } + + /** Get the maximum code used at the given specialization level. useful for generating new codes*/ + public static int getMaximumIdCode(int specializationDepth, Collection usedIdCodes) { + + int maximumIdCode = 0; + for(String code:usedIdCodes) { + if (code.length() > 2) { + int numberOfDots = getSpecializationDepthFromCode(code); + if(specializationDepth == numberOfDots) { + int numericCode = numberOfDots == 0 ? Integer.parseInt(code.substring(2)) : Integer.parseInt(code.substring(code.lastIndexOf('.')+1)); + maximumIdCode = Math.max(numericCode, maximumIdCode); + } + } + } + return maximumIdCode; + } + + /** Get the maximum code used at the given specialization level. useful for generating new codes*/ + public static int getMaximumIdCode(int specializationDepth, String prefix, Collection usedIdCodes) { + if(specializationDepth == 0) { + throw new IllegalArgumentException("can only get the maximum code with prefix at a specialization depth > 0"); + } + int maximumIdCode = 0; + for(String code:usedIdCodes) { + if(code.startsWith(prefix + ".")) { + int numberOfDots = CharMatcher.is(AdlCodeDefinitions.SPECIALIZATION_SEPARATOR).countIn(code); + if(specializationDepth == numberOfDots) { + int numericCode = Integer.parseInt(code.substring(code.lastIndexOf('.')+1)); + maximumIdCode = Math.max(numericCode, maximumIdCode); + } + } + } + return maximumIdCode; + } + } diff --git a/aom/src/main/java/com/nedap/archie/query/AOMPathQuery.java b/aom/src/main/java/com/nedap/archie/query/AOMPathQuery.java index b334ca9e4..73f47d822 100644 --- a/aom/src/main/java/com/nedap/archie/query/AOMPathQuery.java +++ b/aom/src/main/java/com/nedap/archie/query/AOMPathQuery.java @@ -1,6 +1,7 @@ package com.nedap.archie.query; +import com.google.common.collect.Lists; import com.nedap.archie.aom.ArchetypeModelObject; import com.nedap.archie.aom.CAttribute; import com.nedap.archie.aom.CComplexObject; @@ -30,6 +31,8 @@ public class AOMPathQuery { /** If true, extend the search through C_COMPLEX_OBJECT_PROXY objects by looking up the replacement first.*/ private final boolean findThroughCComplexObjectProxies; + private boolean findThroughDifferentialPaths = true; + public AOMPathQuery(String query) { APathQuery apathQuery = new APathQuery(query); this.pathSegments = apathQuery.getPathSegments(); @@ -60,19 +63,102 @@ public AOMPathQuery dontFindThroughCComplexObjectProxies() { return new AOMPathQuery(pathSegments, false); } + public void setFindThroughDifferentialPaths(boolean find) { + this.findThroughDifferentialPaths = find; + } + public List findList(CComplexObject root) { + return findList(root, false); + } + + /** + * Find a list of matching objects to the path. If matchSpecializedNodes is true, [id6] in the query will first try to + * find a node with id id6. If not, it will find specialized nodes like id6.1 or id6.0.0.3.1 + * @param root + * @param matchSpecializedNodes + * @param + * @return + */ + public List findList(CComplexObject root, boolean matchSpecializedNodes) { List result = new ArrayList<>(); result.add(root); - for(PathSegment segment:this.pathSegments) { + for(int i = 0; i < pathSegments.size(); i++) { + PathSegment segment = pathSegments.get(i); if (result.size() == 0) { return Collections.emptyList(); } - result = findOneSegment(segment, result); + + + CAttribute differentialAttribute = null; + if(findThroughDifferentialPaths) { + differentialAttribute = findMatchingDifferentialPath(pathSegments.subList(i, pathSegments.size()), result); + } + if(differentialAttribute != null) { + //skip a few pathsegments for this differential path match + i = i + new APathQuery(differentialAttribute.getDifferentialPath()).getPathSegments().size()-1; + PathSegment lastPathSegment = pathSegments.get(i); + ArchetypeModelObject oneMatchingObject = findOneMatchingObject(differentialAttribute, lastPathSegment, matchSpecializedNodes); + if(oneMatchingObject != null) { + result = Lists.newArrayList(oneMatchingObject); + } else { + result = findOneSegment(segment, result, matchSpecializedNodes); + } + + + } else { + result = findOneSegment(segment, result, matchSpecializedNodes); + } } return (List)result.stream().filter((object) -> object != null).collect(Collectors.toList()); } - private List findOneSegment(PathSegment pathSegment, List objects) { + protected CAttribute findMatchingDifferentialPath(List pathSegments, List objects) { + if(pathSegments.size() < 2) { + return null; + } + List result = new ArrayList<>(); + for(ArchetypeModelObject object:objects) { + if (object instanceof CObject) { + for(CAttribute attribute:((CObject) object).getAttributes()) { + if(attribute.getDifferentialPath() != null) { + List differentialPathSegments = new APathQuery(attribute.getDifferentialPath()).getPathSegments(); + if(checkDifferentialMatch(pathSegments, differentialPathSegments)) { + return attribute; + } + } + + } + } + } + return null; + } + + private boolean checkDifferentialMatch(List pathSegments, List differentialPathSegments) { + if(differentialPathSegments.size() <= pathSegments.size()) { + for(int i = 0; i < differentialPathSegments.size(); i++) { + PathSegment segment = pathSegments.get(i); + PathSegment differentialPathSegment = differentialPathSegments.get(i); + if(!matches(segment, differentialPathSegment)) { + return false; + } + } + return true; + } + return false; + + } + + private boolean matches(PathSegment segment, PathSegment differentialPathSegment) { + if(differentialPathSegment.getNodeId() == null) { + return segment.getNodeName().equalsIgnoreCase(differentialPathSegment.getNodeName()); + } else { + return segment.getNodeName().equalsIgnoreCase(differentialPathSegment.getNodeName()) && + segment.getNodeId().equals(differentialPathSegment.getNodeId()); + } + } + + + protected List findOneSegment(PathSegment pathSegment, List objects, boolean matchSpecializedNodes) { List result = new ArrayList<>(); List preProcessedObjects = new ArrayList<>(); @@ -99,14 +185,9 @@ private List findOneSegment(PathSegment pathSegment, List< CObject cobject = (CObject) object; CAttribute attribute = cobject.getAttribute(pathSegment.getNodeName()); if(attribute != null) { - if (pathSegment.hasIdCode() || pathSegment.hasArchetypeRef()) { - result.add(attribute.getChild(pathSegment.getNodeId())); - } else if (pathSegment.hasNumberIndex()) { - result.add(attribute.getChildren().get(pathSegment.getIndex() - 1));//APath path numbers start at 1 instead of 0 - } else if (pathSegment.getNodeId() != null) { - result.add(attribute.getChildByMeaning(pathSegment.getNodeId()));//TODO: the ANTLR grammar removes all whitespace. what to do here? - } else { - result.add(attribute); + ArchetypeModelObject r = findOneMatchingObject(attribute, pathSegment, matchSpecializedNodes); + if(r != null) { + result.add(r); } } } @@ -114,6 +195,21 @@ private List findOneSegment(PathSegment pathSegment, List< return result; } + protected ArchetypeModelObject findOneMatchingObject(CAttribute attribute, PathSegment pathSegment, boolean matchSpecializedNodes) { + if (pathSegment.hasIdCode() || pathSegment.hasArchetypeRef()) { + if(matchSpecializedNodes) { + return attribute.getPossiblySpecializedChild(pathSegment.getNodeId()); + } + return attribute.getChild(pathSegment.getNodeId()); + } else if (pathSegment.hasNumberIndex()) { + return attribute.getChildren().get(pathSegment.getIndex() - 1);//APath path numbers start at 1 instead of 0 + } else if (pathSegment.getNodeId() != null) { + return attribute.getChildByMeaning(pathSegment.getNodeId());//TODO: the ANTLR grammar removes all whitespace. what to do here? + } else { + return attribute; + } + } + //TODO: get diagnostic information about where the finder stopped in the path - could be very useful! @@ -133,7 +229,7 @@ public CComplexObjectProxy findAnyInternalReference(CComplexObject root) { if (result.size() == 0) { return null; } - result = findOneSegment(segment, result); + result = findOneSegment(segment, result, false); if(result.size() == 1 && result.get(0) instanceof CComplexObjectProxy) { return (CComplexObjectProxy) result.get(0); } diff --git a/aom/src/main/java/com/nedap/archie/query/ComplexObjectProxyReplacement.java b/aom/src/main/java/com/nedap/archie/query/ComplexObjectProxyReplacement.java index e82792ac2..96a9bcd0d 100644 --- a/aom/src/main/java/com/nedap/archie/query/ComplexObjectProxyReplacement.java +++ b/aom/src/main/java/com/nedap/archie/query/ComplexObjectProxyReplacement.java @@ -5,6 +5,7 @@ import com.nedap.archie.aom.CComplexObject; import com.nedap.archie.aom.CComplexObjectProxy; import com.nedap.archie.aom.CObject; +import java.util.List; /** * little class used for a CompelxObjectProxyReplacement because we cannot replace in a collection @@ -14,9 +15,9 @@ public class ComplexObjectProxyReplacement { private final CComplexObject replacement; private final CComplexObjectProxy proxy; - public ComplexObjectProxyReplacement(CComplexObjectProxy proxy, CComplexObject object) { + public ComplexObjectProxyReplacement(CComplexObjectProxy proxy, CComplexObject replacement) { this.proxy = proxy; - this.replacement = object; + this.replacement = replacement; } public void replace() { @@ -24,11 +25,12 @@ public void replace() { } public static ComplexObjectProxyReplacement getComplexObjectProxyReplacement(CComplexObjectProxy proxy) { - CObject newObject = new AOMPathQuery(proxy.getTargetPath()).find(getNearestArchetypeRoot(proxy)); - if (newObject == null) { + List newObjects = new AOMPathQuery(proxy.getTargetPath()).findList(getNearestArchetypeRoot(proxy), true); + if (newObjects == null || newObjects.isEmpty()) { return null; } else { - CComplexObject clone = (CComplexObject) newObject.clone(); + CObject found = newObjects.get(0); + CComplexObject clone = (CComplexObject) found.clone(); clone.setNodeId(proxy.getNodeId()); if (proxy.getOccurrences() != null) { clone.setOccurrences(proxy.getOccurrences()); diff --git a/aom/src/main/java/com/nedap/archie/rminfo/MetaModel.java b/aom/src/main/java/com/nedap/archie/rminfo/MetaModel.java index 984afb933..cf3c484c2 100644 --- a/aom/src/main/java/com/nedap/archie/rminfo/MetaModel.java +++ b/aom/src/main/java/com/nedap/archie/rminfo/MetaModel.java @@ -198,6 +198,9 @@ public boolean hasReferenceModelPath(String rmTypeName, String path) { public MultiplicityInterval referenceModelPropMultiplicity(String rmTypeName, String rmAttributeNameOrPath) { if(selectedBmmModel != null) { BmmProperty bmmProperty = AOMUtils.getPropertyAtPath(selectedBmmModel, rmTypeName, rmAttributeNameOrPath); + if(bmmProperty == null) { + return null; + } if(isMultiple(bmmProperty)) { return MultiplicityInterval.createUpperUnbounded(0); } else { @@ -209,6 +212,9 @@ public MultiplicityInterval referenceModelPropMultiplicity(String rmTypeName, St } } else { RMAttributeInfo attributeInfo = AOMUtils.getAttributeInfoAtPath(selectedModel, rmTypeName, rmAttributeNameOrPath); + if(attributeInfo == null) { + return null; + } if (attributeInfo.isMultipleValued()) { return MultiplicityInterval.createUpperUnbounded(0); } else { diff --git a/aom/src/main/java/com/nedap/archie/rminfo/ReflectionModelInfoLookup.java b/aom/src/main/java/com/nedap/archie/rminfo/ReflectionModelInfoLookup.java index 91f704b95..0e4f49796 100644 --- a/aom/src/main/java/com/nedap/archie/rminfo/ReflectionModelInfoLookup.java +++ b/aom/src/main/java/com/nedap/archie/rminfo/ReflectionModelInfoLookup.java @@ -178,7 +178,7 @@ private void addRMAttributeInfo(Class clazz, RMTypeInfo typeInfo, TypeToken type setMethod = getMethod(clazz, "set" + javaFieldNameUpperCased, getMethod.getReturnType()); addMethod = getAddMethod(clazz, typeToken, attributeName, javaFieldNameUpperCased, getMethod); } else { - logger.warn("No get method found for attribute {} on class {}", attributeName, clazz.getSimpleName()); + logger.debug("No get method found for attribute {} on class {}", attributeName, clazz.getSimpleName()); } TypeToken fieldType = typeToken.resolveType(getMethod.getGenericReturnType());; @@ -219,7 +219,7 @@ private void addRMAttributeInfo(Class clazz, RMTypeInfo typeInfo, TypeToken type setMethod = getMethod(clazz, "set" + javaFieldNameUpperCased, getMethod.getReturnType()); addMethod = getAddMethod(clazz, typeToken, attributeName, javaFieldNameUpperCased, getMethod); } else { - logger.warn("No get method found for field {} on class {}", field.getName(), clazz.getSimpleName()); + logger.debug("No get method found for field {} on class {}", field.getName(), clazz.getSimpleName()); } TypeToken fieldType = null; @@ -245,7 +245,7 @@ private void addRMAttributeInfo(Class clazz, RMTypeInfo typeInfo, TypeToken type ); typeInfo.addAttribute(attributeInfo); } else { - logger.info("property without a set method ignored for field {} on class {}", field.getName(), clazz.getSimpleName()); + logger.debug("property without a set method ignored for field {} on class {}", field.getName(), clazz.getSimpleName()); } } @@ -283,7 +283,7 @@ private Method getAddMethod(Class clazz, TypeToken typeToken, String name, Strin if (allAddMethods.size() == 1) { addMethod = allAddMethods.iterator().next(); } else { - logger.warn("strange number of add methods for field {} on class {}", name, clazz.getSimpleName()); + logger.debug("strange number of add methods for field {} on class {}", name, clazz.getSimpleName()); } } } diff --git a/aom/src/main/java/com/nedap/archie/rules/Expression.java b/aom/src/main/java/com/nedap/archie/rules/Expression.java index d243d1422..331efcd86 100644 --- a/aom/src/main/java/com/nedap/archie/rules/Expression.java +++ b/aom/src/main/java/com/nedap/archie/rules/Expression.java @@ -7,13 +7,13 @@ public class Expression extends RuleElement { /** * If true, this statement originally was placed between ()-signs */ - private boolean precedenceOverriden = false; + private boolean precedenceOverridden = false; - public boolean isPrecedenceOverriden() { - return precedenceOverriden; + public boolean isPrecedenceOverridden() { + return precedenceOverridden; } - public void setPrecedenceOverridden(boolean precedenceOverriden) { - this.precedenceOverriden = precedenceOverriden; + public void setPrecedenceOverridden(boolean precedenceOverridden) { + this.precedenceOverridden = precedenceOverridden; } } diff --git a/aom/src/main/java/com/nedap/archie/serializer/adl/ADLArchetypeSerializer.java b/aom/src/main/java/com/nedap/archie/serializer/adl/ADLArchetypeSerializer.java index 8d7ef7b2e..db9387b55 100644 --- a/aom/src/main/java/com/nedap/archie/serializer/adl/ADLArchetypeSerializer.java +++ b/aom/src/main/java/com/nedap/archie/serializer/adl/ADLArchetypeSerializer.java @@ -56,6 +56,8 @@ protected void appendAnnotations() { protected void appendRules() { if(archetype.getRules() == null) return; + if(archetype.getRules().getRules().isEmpty()) return; + builder.newline().append("rules").newIndentedLine(); rulesSerializer.appendRules(archetype.getRules()); builder.newUnindentedLine(); diff --git a/aom/src/main/java/com/nedap/archie/serializer/adl/ADLAuthoredArchetypeSerializer.java b/aom/src/main/java/com/nedap/archie/serializer/adl/ADLAuthoredArchetypeSerializer.java index bed6aa27d..bfc5cebb1 100644 --- a/aom/src/main/java/com/nedap/archie/serializer/adl/ADLAuthoredArchetypeSerializer.java +++ b/aom/src/main/java/com/nedap/archie/serializer/adl/ADLAuthoredArchetypeSerializer.java @@ -34,11 +34,13 @@ protected void appendLanguage() { @Override protected void appendDescription() { - if (archetype.getDescription() == null) return; - - builder.newline().append("description").newIndentedLine() - .odin(archetype.getDescription()) - .unindent(); + if (archetype.getDescription() == null) { + builder.newline().append("description"); + } else { + builder.newline().append("description").newIndentedLine() + .odin(archetype.getDescription()) + .unindent(); + } } @Override diff --git a/aom/src/main/java/com/nedap/archie/serializer/adl/ADLRulesSerializer.java b/aom/src/main/java/com/nedap/archie/serializer/adl/ADLRulesSerializer.java index 2d3ee26e0..6d1aa1bad 100644 --- a/aom/src/main/java/com/nedap/archie/serializer/adl/ADLRulesSerializer.java +++ b/aom/src/main/java/com/nedap/archie/serializer/adl/ADLRulesSerializer.java @@ -58,7 +58,7 @@ public void serializeRuleElement(RuleElement element) { } private boolean isPrecedenceOverride(RuleElement element) { - return element instanceof Expression && ((Expression) element).isPrecedenceOverriden(); + return element instanceof Expression && ((Expression) element).isPrecedenceOverridden(); } private RuleElementSerializer getSerializer(RuleElement element) { diff --git a/aom/src/main/java/com/nedap/archie/serializer/adl/ADLTemplateOverlaySerializer.java b/aom/src/main/java/com/nedap/archie/serializer/adl/ADLTemplateOverlaySerializer.java index 5682ea37e..9dc6755e2 100644 --- a/aom/src/main/java/com/nedap/archie/serializer/adl/ADLTemplateOverlaySerializer.java +++ b/aom/src/main/java/com/nedap/archie/serializer/adl/ADLTemplateOverlaySerializer.java @@ -26,4 +26,9 @@ protected void appendLanguage() { @Override protected void appendDescription() { } + + @Override + protected void appendRules() { + + } } diff --git a/aom/src/main/java/com/nedap/archie/serializer/adl/constraints/CBooleanSerializer.java b/aom/src/main/java/com/nedap/archie/serializer/adl/constraints/CBooleanSerializer.java index 72cea726d..d09cd4a89 100644 --- a/aom/src/main/java/com/nedap/archie/serializer/adl/constraints/CBooleanSerializer.java +++ b/aom/src/main/java/com/nedap/archie/serializer/adl/constraints/CBooleanSerializer.java @@ -71,4 +71,9 @@ public void serialize(CBoolean cobj) { } + @Override + public boolean isEmpty(CBoolean cobj) { + return cobj.getConstraint() == null || cobj.getConstraint().isEmpty(); + } + } diff --git a/aom/src/main/java/com/nedap/archie/serializer/adl/constraints/CComplexObjectSerializer.java b/aom/src/main/java/com/nedap/archie/serializer/adl/constraints/CComplexObjectSerializer.java index 82e790160..99ab182af 100644 --- a/aom/src/main/java/com/nedap/archie/serializer/adl/constraints/CComplexObjectSerializer.java +++ b/aom/src/main/java/com/nedap/archie/serializer/adl/constraints/CComplexObjectSerializer.java @@ -101,7 +101,6 @@ private void buildAttribute(CAttribute cattr) { builder.append("}"); } if (!cattr.getChildren().isEmpty()) { - builder.append(" matches "); buildAttributeChildConstraints(cattr); } } @@ -140,14 +139,15 @@ private void buildTuple(CAttributeTuple cAttributeTuple) { private void buildAttributeChildConstraints(CAttribute cattr) { List children = filterNonEmptyChildren(cattr.getChildren()); + if(children.isEmpty()) { + return; + } + builder.append(" matches "); boolean indent = !children.isEmpty() && (children.size() > 1 || !(children.get(0) instanceof CPrimitiveObject)); builder.append("{"); children.forEach(serializer::appendCObject); - if (children.isEmpty()) { - builder.append("*"); - } if (indent) { builder.newline(); diff --git a/aom/src/main/java/com/nedap/archie/serializer/adl/constraints/COrderedSerializer.java b/aom/src/main/java/com/nedap/archie/serializer/adl/constraints/COrderedSerializer.java index ba6f7ccc2..6560e6181 100644 --- a/aom/src/main/java/com/nedap/archie/serializer/adl/constraints/COrderedSerializer.java +++ b/aom/src/main/java/com/nedap/archie/serializer/adl/constraints/COrderedSerializer.java @@ -21,7 +21,8 @@ public final void serialize(T cobj) { serializeAssumedValue(cobj); if (original == builder.mark()) { - builder.append("*"); + builder.append("*");//TODO: this is deprecated ADL. Should be fixed, but hard to check, so left in for now + //because leaving it out would mean invalid instead of deprecated ADL } } @@ -69,4 +70,9 @@ private boolean isSingleValueConstraint(T cobj) { Interval interval = cobj.getConstraint().get(0); return isSingleValueInterval(interval); } + + @Override + public boolean isEmpty(T object) { + return object.getConstraint() == null || object.getConstraint().isEmpty(); + } } diff --git a/aom/src/main/java/com/nedap/archie/serializer/adl/constraints/CStringSerializer.java b/aom/src/main/java/com/nedap/archie/serializer/adl/constraints/CStringSerializer.java index 71fb82cd2..95b131bb2 100644 --- a/aom/src/main/java/com/nedap/archie/serializer/adl/constraints/CStringSerializer.java +++ b/aom/src/main/java/com/nedap/archie/serializer/adl/constraints/CStringSerializer.java @@ -21,6 +21,7 @@ package com.nedap.archie.serializer.adl.constraints; +import com.nedap.archie.aom.primitives.CBoolean; import com.nedap.archie.aom.primitives.CString; import com.nedap.archie.serializer.adl.ADLDefinitionSerializer; @@ -66,4 +67,9 @@ private boolean isRegex(String str) { if (str.charAt(str.length()-1)!=c) return false; return c=='/' || c=='^'; } + + @Override + public boolean isEmpty(CString cobj) { + return cobj.getConstraint() == null || cobj.getConstraint().isEmpty(); + } } diff --git a/aom/src/test/java/com/nedap/archie/aom/ArchetypeTest.java b/aom/src/test/java/com/nedap/archie/aom/ArchetypeTest.java new file mode 100644 index 000000000..5d9a05952 --- /dev/null +++ b/aom/src/test/java/com/nedap/archie/aom/ArchetypeTest.java @@ -0,0 +1,58 @@ +package com.nedap.archie.aom; + +import com.google.common.collect.Sets; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +public class ArchetypeTest { + + @Test + public void testGetUsedIdCodes() { + Archetype archetype = createLevel0Archetype(); + assertEquals(Sets.newHashSet("id1", "id2"), archetype.getUsedIdCodes()); + } + + @Test + public void testLevel0GenerateIdCode() { + Archetype archetype = createLevel0Archetype(); + assertEquals("id3", archetype.generateNextIdCode()); + } + + @Test + public void testLevel1GenerateIdCode() { + Archetype archetype = createLevel1Archetype(); + assertEquals("id0.3", archetype.generateNextIdCode()); + + } + + private Archetype createLevel0Archetype() { + Archetype archetype = new Archetype(); + CComplexObject definition = new CComplexObject(); + definition.setRmTypeName("CLUSTER"); + definition.setNodeId("id1"); + CAttribute attribute1 = new CAttribute("items"); + definition.addAttribute(attribute1); + CComplexObject element = new CComplexObject(); + element.setRmTypeName("ELEMENT"); + element.setNodeId("id2"); + attribute1.addChild(element); + archetype.setDefinition(definition); + return archetype; + } + + private Archetype createLevel1Archetype() { + Archetype archetype = new Archetype(); + CComplexObject definition = new CComplexObject(); + definition.setRmTypeName("CLUSTER"); + definition.setNodeId("id1.1"); + CAttribute attribute1 = new CAttribute("items"); + definition.addAttribute(attribute1); + CComplexObject element = new CComplexObject(); + element.setRmTypeName("ELEMENT"); + element.setNodeId("id0.2"); + attribute1.addChild(element); + archetype.setDefinition(definition); + return archetype; + } +} diff --git a/aom/src/test/java/com/nedap/archie/aom/utils/AOMUtilsTest.java b/aom/src/test/java/com/nedap/archie/aom/utils/AOMUtilsTest.java new file mode 100644 index 000000000..19161bbbb --- /dev/null +++ b/aom/src/test/java/com/nedap/archie/aom/utils/AOMUtilsTest.java @@ -0,0 +1,17 @@ +package com.nedap.archie.aom.utils; + +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +public class AOMUtilsTest { + + @Test + public void codeAtLevel() { + assertEquals("id1", AOMUtils.codeAtLevel("id1", 0)); + assertEquals("id1", AOMUtils.codeAtLevel("id1.1", 0)); + assertEquals("id1.1", AOMUtils.codeAtLevel("id1.1", 1)); + assertEquals("id1.1", AOMUtils.codeAtLevel("id1.1.1", 1)); + assertEquals("id1", AOMUtils.codeAtLevel("id1.0.1", 1)); + } +} diff --git a/base/src/main/java/com/nedap/archie/antlr/errors/ANTLRParserErrors.java b/base/src/main/java/com/nedap/archie/antlr/errors/ANTLRParserErrors.java index 3a65e99b9..22669ec45 100644 --- a/base/src/main/java/com/nedap/archie/antlr/errors/ANTLRParserErrors.java +++ b/base/src/main/java/com/nedap/archie/antlr/errors/ANTLRParserErrors.java @@ -20,10 +20,18 @@ public void addError(String error) { errors.add(new ANTLRParserMessage(error)); } + public void addError(String error, int line, int charPositionInLine) { + errors.add(new ANTLRParserMessage(error, line, charPositionInLine)); + } + public void addWarning(String error) { warnings.add(new ANTLRParserMessage(error)); } + public void addWarning(String error, int line, int charPositionInLine) { + warnings.add(new ANTLRParserMessage(error, line, charPositionInLine)); + } + public void logToLogger() { for(ANTLRParserMessage message:warnings) { logger.warn(message.getMessage()); diff --git a/base/src/main/java/com/nedap/archie/antlr/errors/ANTLRParserMessage.java b/base/src/main/java/com/nedap/archie/antlr/errors/ANTLRParserMessage.java index dfa1abb73..660f2f7ea 100644 --- a/base/src/main/java/com/nedap/archie/antlr/errors/ANTLRParserMessage.java +++ b/base/src/main/java/com/nedap/archie/antlr/errors/ANTLRParserMessage.java @@ -7,13 +7,30 @@ */ public class ANTLRParserMessage { + private Integer lineNumber; + private Integer columnNumber; private String message; public ANTLRParserMessage(String message) { this.message = message; } + public ANTLRParserMessage(String message, Integer lineNumber, Integer columnNumber) { + this.message = message; + this.lineNumber = lineNumber; + this.columnNumber = columnNumber; + } + + public String getMessage() { return message; } + + public int getLineNumber() { + return lineNumber; + } + + public int getColumnNumber() { + return columnNumber; + } } diff --git a/base/src/main/java/com/nedap/archie/antlr/errors/ArchieErrorListener.java b/base/src/main/java/com/nedap/archie/antlr/errors/ArchieErrorListener.java index 437137788..1e8cdfbab 100644 --- a/base/src/main/java/com/nedap/archie/antlr/errors/ArchieErrorListener.java +++ b/base/src/main/java/com/nedap/archie/antlr/errors/ArchieErrorListener.java @@ -41,7 +41,7 @@ public void syntaxError(Recognizer recognizer, Object offendingSymbol, int if(logEnabled) { logger.warn(error); } - errors.addError(error); + errors.addError(error, line, charPositionInLine); } @Override diff --git a/base/src/main/java/com/nedap/archie/base/Cardinality.java b/base/src/main/java/com/nedap/archie/base/Cardinality.java index 6a56d7503..a97d0f6d8 100644 --- a/base/src/main/java/com/nedap/archie/base/Cardinality.java +++ b/base/src/main/java/com/nedap/archie/base/Cardinality.java @@ -1,5 +1,6 @@ package com.nedap.archie.base; +import com.fasterxml.jackson.annotation.JsonIgnore; import com.google.common.base.Joiner; import javax.xml.bind.annotation.XmlAccessType; @@ -24,7 +25,7 @@ public class Cardinality extends OpenEHRBase { private MultiplicityInterval interval; @XmlElement(name="is_ordered") - private boolean isOrdered = false; + private boolean isOrdered = true; @XmlElement(name="is_unique") private boolean isUnique = false; @@ -33,8 +34,8 @@ public Cardinality() { } public Cardinality(int lower, int higher) { - isOrdered = false; - isUnique = lower == 1 && higher == 1; + isOrdered = true;//default: list semantics + isUnique = false; interval = new MultiplicityInterval(lower, higher); } @@ -79,6 +80,7 @@ public static Cardinality mandatoryAndUnbounded() { * * @return */ + @JsonIgnore public Boolean isBag() { return !isOrdered && !isUnique; } @@ -88,6 +90,7 @@ public Boolean isBag() { * * @return */ + @JsonIgnore public Boolean isList() { return isOrdered && !isUnique; } @@ -97,6 +100,7 @@ public Boolean isList() { * * @return */ + @JsonIgnore public Boolean isSet() { return !isOrdered && isUnique; } diff --git a/base/src/main/java/com/nedap/archie/base/MultiplicityInterval.java b/base/src/main/java/com/nedap/archie/base/MultiplicityInterval.java index e46252a14..af826afd0 100644 --- a/base/src/main/java/com/nedap/archie/base/MultiplicityInterval.java +++ b/base/src/main/java/com/nedap/archie/base/MultiplicityInterval.java @@ -1,5 +1,7 @@ package com.nedap.archie.base; +import com.fasterxml.jackson.annotation.JsonIgnore; + /** * Created by pieter.bos on 15/10/15. */ @@ -41,7 +43,7 @@ public static MultiplicityInterval unbounded() { result.setUpperUnbounded(true); return result; } - + public boolean isOpen() { return Integer.valueOf(0).equals(getLower()) && isUpperUnbounded() && isLowerIncluded(); } @@ -102,6 +104,7 @@ public static MultiplicityInterval createBounded(int lower, int upper) { return new MultiplicityInterval(lower, true, false, upper, true, false); } + @JsonIgnore public boolean upperIsOne() { return has(1) && !has(2); } diff --git a/bmm/src/main/java/org/openehr/bmm/core/BmmClass.java b/bmm/src/main/java/org/openehr/bmm/core/BmmClass.java index 6cbf34e32..5d6b1c9cd 100644 --- a/bmm/src/main/java/org/openehr/bmm/core/BmmClass.java +++ b/bmm/src/main/java/org/openehr/bmm/core/BmmClass.java @@ -317,7 +317,16 @@ public List findAllAncestors() { * @return */ public List findAllDescendants() { - throw new UnsupportedOperationException("Not implemented yet"); + List allDescendants = new ArrayList(); + List descendants = getImmediateDescendants(); + allDescendants.addAll(descendants); + for(String descendant:descendants) { + BmmClass classDefinition = this.getBmmModel().getClassDefinition(descendant); + if(classDefinition != null) { + allDescendants.addAll(classDefinition.findAllDescendants()); + } + } + return allDescendants; } /** @@ -422,7 +431,7 @@ public BmmClass flattenBmmClass() { flattenedClassCache = duplicate(); } else { final BmmClass target = this.duplicate(); - target.setAncestors(new HashMap());//Clear out ancestors since we are flattening the hierarchy. + //add all properties from all ancestors the new flattened class ancestorMap.forEach( (ancestorName, ancestor) -> { populateTarget(ancestor, target); }); flattenedClassCache = target; } @@ -469,9 +478,11 @@ public BmmClass duplicate() { target.setAbstract(this.isAbstract); target.setSourceSchemaId(this.getSourceSchemaId()); target.getAncestors().putAll(this.getAncestors()); + target.setImmediateDescendants(this.getImmediateDescendants()); target.setOverride(this.isOverride); target.setPrimitiveType(this.isPrimitiveType); target.setPackage(this.getPackage()); + target.setBmmModel(this.getBmmModel()); return target; } diff --git a/bmm/src/main/java/org/openehr/bmm/persistence/PersistedBmmEnumeration.java b/bmm/src/main/java/org/openehr/bmm/persistence/PersistedBmmEnumeration.java index d77793795..18d029325 100644 --- a/bmm/src/main/java/org/openehr/bmm/persistence/PersistedBmmEnumeration.java +++ b/bmm/src/main/java/org/openehr/bmm/persistence/PersistedBmmEnumeration.java @@ -35,6 +35,15 @@ */ public abstract class PersistedBmmEnumeration extends PersistedBmmClass implements Serializable { + /** + * Names of enumeration elements. + */ + private List itemNames; + /** + * Values associated with enumeration elements. + */ + private List itemValues; + public PersistedBmmEnumeration() { super(); this.itemNames = new ArrayList<>(); @@ -47,15 +56,6 @@ public PersistedBmmEnumeration(String name) { this.itemValues = new ArrayList<>(); } - /** - * Names of enumeration elements. - */ - private List itemNames; - /** - * Values associated with enumeration elements. - */ - private List itemValues; - /** * Returns list of enumeration element names. * diff --git a/bmm/src/main/java/org/openehr/bmm/persistence/PersistedBmmGenericParameter.java b/bmm/src/main/java/org/openehr/bmm/persistence/PersistedBmmGenericParameter.java index 0d008f2cc..e1dda00f8 100644 --- a/bmm/src/main/java/org/openehr/bmm/persistence/PersistedBmmGenericParameter.java +++ b/bmm/src/main/java/org/openehr/bmm/persistence/PersistedBmmGenericParameter.java @@ -36,20 +36,6 @@ */ public class PersistedBmmGenericParameter extends PersistedBmmModelElement implements Serializable { - public PersistedBmmGenericParameter() { - super(); - } - - public PersistedBmmGenericParameter(String name) { - this(); - this.name = name; - } - - public PersistedBmmGenericParameter(String name, String conformsToType) { - this(name); - this.conformsToType = conformsToType; - } - /** * Name of the parameter, e.g. 'T' etc. Persisted attribute. */ @@ -65,6 +51,21 @@ public PersistedBmmGenericParameter(String name, String conformsToType) { */ private transient BmmGenericParameter bmmGenericParameter; + + public PersistedBmmGenericParameter() { + super(); + } + + public PersistedBmmGenericParameter(String name) { + this(); + this.name = name; + } + + public PersistedBmmGenericParameter(String name, String conformsToType) { + this(name); + this.conformsToType = conformsToType; + } + /** * Returns the name of the parameter, e.g. 'T' etc. * @return diff --git a/bmm/src/main/java/org/openehr/bmm/persistence/PersistedBmmSimpleType.java b/bmm/src/main/java/org/openehr/bmm/persistence/PersistedBmmSimpleType.java index 7e07c3206..1824f5e60 100644 --- a/bmm/src/main/java/org/openehr/bmm/persistence/PersistedBmmSimpleType.java +++ b/bmm/src/main/java/org/openehr/bmm/persistence/PersistedBmmSimpleType.java @@ -39,6 +39,12 @@ public class PersistedBmmSimpleType extends PersistedBmmType impl public static final String P_BMM_SIMPLE_TYPE = "P_BMM_SIMPLE_TYPE"; + /** + * Name of type - must be a simple class name. + */ + private String type; + + public PersistedBmmSimpleType() { } @@ -46,10 +52,7 @@ public PersistedBmmSimpleType(String type) { this.type = type; } - /** - * Name of type - must be a simple class name. - */ - private String type; + /** * Returns the name of type - must be a simple class name. diff --git a/bmm/src/main/java/org/openehr/bmm/rmaccess/ReferenceModelAccess.java b/bmm/src/main/java/org/openehr/bmm/rmaccess/ReferenceModelAccess.java index 71ac7eb17..fe014e135 100644 --- a/bmm/src/main/java/org/openehr/bmm/rmaccess/ReferenceModelAccess.java +++ b/bmm/src/main/java/org/openehr/bmm/rmaccess/ReferenceModelAccess.java @@ -24,6 +24,7 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.IOFileFilter; import org.apache.commons.io.filefilter.TrueFileFilter; +import org.openehr.bmm.core.BmmClass; import org.openehr.bmm.core.BmmModel; import org.openehr.bmm.persistence.BmmIncludeSpecification; import org.openehr.bmm.persistence.PersistedBmmSchema; @@ -486,47 +487,20 @@ public void loadSchemas() { } //set list of schemas to load; used later to determine what to put in `top_level_schemas' - List itemsToRemove = new ArrayList(); - if (!schemaLoadList.isEmpty()) { - for (String listItem : schemaLoadList) { - if (!allSchemas.containsKey(listItem)) { - validator.addWarning(BmmMessageIds.ec_bmm_schema_invalid_load_list, listItem); - itemsToRemove.add(listItem); - } - } - schemaLoadList.removeAll(itemsToRemove); - } else { - schemaLoadList = new ArrayList<>(); - populateLoadListInDependencyOrder(); - validator.addWarning(BmmMessageIds.ec_bmm_schemas_no_load_list_found); - } + fillSchemaLoadList(); //initial load of all schemas, which populates `schema_inclusion_map'; - schemaLoadList.forEach((aSchemaId) -> { - SchemaDescriptor aSchemaDescriptor = allSchemas.get(aSchemaId); - if (aSchemaDescriptor.getSchemaValidator().hasPassed()) { - loadSchemaIncludeClosure(aSchemaId); - if (aSchemaDescriptor.getSchemaValidator().getMessageLogger().hasWarnings()) { - validator.addWarning(BmmMessageIds.ec_bmm_schema_passed_with_warnings, aSchemaDescriptor.getSchemaValidator().getErrorStrings()); - } - - } else { - validator.addError(BmmMessageIds.ec_bmm_schema_basic_validation_failed, aSchemaId, aSchemaDescriptor.getSchemaValidator().getErrorStrings()); - - if (!aSchemaDescriptor.isBmmCompatible()) { - validator.addError(BmmMessageIds.ec_bmm_schema_version_incompatible_with_tool, - aSchemaId, - BmmDefinitions.BMM_INTERNAL_VERSION); - } - } - }); + loadSchemasOrAddValidationErrors(); //propagate errors found so far - //Also here: mark the 'top-level' schemas, inferred from the inclusion maps in each schema allSchemas.forEach((aSchemaId, aSchemaDescriptor) -> { if (!aSchemaDescriptor.getSchemaValidator().hasPassed()) { mergeValidationErrors(aSchemaDescriptor); } + }); + + //mark the 'top-level' schemas, inferred from the inclusion maps in each schema + allSchemas.forEach((aSchemaId, aSchemaDescriptor) -> { if (!schemaInclusionMap.containsKey(aSchemaDescriptor.getSchemaId())) { aSchemaDescriptor.setTopLevel(true); } @@ -541,126 +515,202 @@ public void loadSchemas() { } }); - boolean finished = false; + //Now we process the include relations on the P_BMM top-level schemas, creating fully populated schemas - for (int index = 0; index < MAXIMUM_INCLUSION_DEPTH && !finished; index++) { - finished = true; - for (String key : schemaInclusionMap.keySet()) { - List includeList = schemaInclusionMap.get(key); - if (candidateSchemas.containsKey(key)) { - PersistedBmmSchema includedSchema = candidateSchemas.get(key).getPersistentSchema(); - //only process current schema if its lower level includes have already been copied into it, - //or if it had no includes, since only then is it ready to be itself included in the next one up the chain - //If this included schema is in this state, merge its contents into each schema that includes it - if (includedSchema.getState() == PersistedBmmSchemaState.STATE_INCLUDES_PROCESSED) { - //Iterate over the schemas that include `included_schema' and process the inclusion - for (String includeItem : includeList) { - if (candidateSchemas.containsKey(includeItem)) { - PersistedBmmSchema includingSchema = candidateSchemas.get(includeItem).getPersistentSchema(); - if (includingSchema.getState() == PersistedBmmSchemaState.STATE_INCLUDES_PENDING) { - includingSchema.merge(includedSchema); - includingSchema.caseInsensitiveIncludeRemoval(key); - if(includingSchema.getIncludes().size() == 0) { - includingSchema.setState(PersistedBmmSchemaState.STATE_INCLUDES_PROCESSED); - } - validator.addInfo(BmmMessageIds.ec_bmm_schema_merged_schema, - includedSchema.getSchemaId(), - candidateSchemas.get(includeItem).getSchemaId()); - finished = false; + processIncludeRelations(); + + //By this point the P_BMM schemas have been merged, and the top-level P_BMM schemas can be validated + //This will cause each schema to potentially create errors to do with included schemas as well as itself + //These errors then need to be integrated with the original schemas, so as to be reported correctly + validateAndSetTopLevelSchema(); + + createModelsByClosureAndVersion(); + + createSchemasByPublisher(); + + calculateDescendants(); + + loadCount += 1; + + + } catch (Exception e) { + exceptionEncountered = true; + validator.addError(BmmMessageIds.ec_bmm_schema_assertion_violation, e.getClass() + ": " + e.getMessage()); + + e.printStackTrace(); + } + } + + + private void fillSchemaLoadList() { + List itemsToRemove = new ArrayList(); + if (!schemaLoadList.isEmpty()) { + for (String listItem : schemaLoadList) { + if (!allSchemas.containsKey(listItem)) { + validator.addWarning(BmmMessageIds.ec_bmm_schema_invalid_load_list, listItem); + itemsToRemove.add(listItem); + } + } + schemaLoadList.removeAll(itemsToRemove); + } else { + schemaLoadList = new ArrayList<>(); + populateLoadListInDependencyOrder(); + validator.addWarning(BmmMessageIds.ec_bmm_schemas_no_load_list_found); + } + } + + private void loadSchemasOrAddValidationErrors() { + schemaLoadList.forEach((aSchemaId) -> { + SchemaDescriptor aSchemaDescriptor = allSchemas.get(aSchemaId); + if (aSchemaDescriptor.getSchemaValidator().hasPassed()) { + loadSchemaIncludeClosure(aSchemaId); + if (aSchemaDescriptor.getSchemaValidator().getMessageLogger().hasWarnings()) { + validator.addWarning(BmmMessageIds.ec_bmm_schema_passed_with_warnings, aSchemaDescriptor.getSchemaValidator().getErrorStrings()); + } + + } else { + validator.addError(BmmMessageIds.ec_bmm_schema_basic_validation_failed, aSchemaId, aSchemaDescriptor.getSchemaValidator().getErrorStrings()); + + if (!aSchemaDescriptor.isBmmCompatible()) { + validator.addError(BmmMessageIds.ec_bmm_schema_version_incompatible_with_tool, + aSchemaId, + BmmDefinitions.BMM_INTERNAL_VERSION); + } + } + }); + } + + private void processIncludeRelations() { + boolean finished = false; + for (int index = 0; index < MAXIMUM_INCLUSION_DEPTH && !finished; index++) { + finished = true; + for (String key : schemaInclusionMap.keySet()) { + List includeList = schemaInclusionMap.get(key); + if (candidateSchemas.containsKey(key)) { + PersistedBmmSchema includedSchema = candidateSchemas.get(key).getPersistentSchema(); + //only process current schema if its lower level includes have already been copied into it, + //or if it had no includes, since only then is it ready to be itself included in the next one up the chain + //If this included schema is in this state, merge its contents into each schema that includes it + if (includedSchema.getState() == PersistedBmmSchemaState.STATE_INCLUDES_PROCESSED) { + //Iterate over the schemas that include `included_schema' and process the inclusion + for (String includeItem : includeList) { + if (candidateSchemas.containsKey(includeItem)) { + PersistedBmmSchema includingSchema = candidateSchemas.get(includeItem).getPersistentSchema(); + if (includingSchema.getState() == PersistedBmmSchemaState.STATE_INCLUDES_PENDING) { + includingSchema.merge(includedSchema); + includingSchema.caseInsensitiveIncludeRemoval(key); + if(includingSchema.getIncludes().size() == 0) { + includingSchema.setState(PersistedBmmSchemaState.STATE_INCLUDES_PROCESSED); } - } else { - validator.addError(BmmMessageIds.ec_bmm_schema_including_schema_not_valid, includeItem); + validator.addInfo(BmmMessageIds.ec_bmm_schema_merged_schema, + includedSchema.getSchemaId(), + candidateSchemas.get(includeItem).getSchemaId()); + finished = false; } + } else { + validator.addError(BmmMessageIds.ec_bmm_schema_including_schema_not_valid, includeItem); } } - } else { - validator.addError(BmmMessageIds.ec_bmm_schema_included_schema_not_found, key); } + } else { + validator.addError(BmmMessageIds.ec_bmm_schema_included_schema_not_found, key); } } + } + } - //By this point the P_BMM schemas have been merged, and the top-level P_BMM schemas can be validated - //This will cause each schema to potentially create errors to do with included schemas as well as itself - //These errors then need to be integrated with the original schemas, so as to be reported correctly - BmmModel topLevelSchema = null; - for (String aKey : candidateSchemas.keySet()) { - SchemaDescriptor aSchemaDescriptor = candidateSchemas.get(aKey); - if (aSchemaDescriptor.isTopLevel() && schemaLoadList.contains(aSchemaDescriptor.getSchemaId())) { - if (aSchemaDescriptor.getSchemaValidator().hasPassed() && aSchemaDescriptor.getPersistentSchema().getState() == PersistedBmmSchemaState.STATE_INCLUDES_PROCESSED) { - try { - //validate the schema & if passed, put it into `top_level_schemas' - aSchemaDescriptor.validate(); - mergeValidationErrors(aSchemaDescriptor); - if (aSchemaDescriptor.getSchemaValidator().hasPassed()) { - //now we create a BMM_SCHEMA from a fully merged P_BMM_SCHEMA - aSchemaDescriptor.createSchema(); - if (aSchemaDescriptor.getSchema() != null) { - topLevelSchema = aSchemaDescriptor.getSchema(); - } - validModels.put(aSchemaDescriptor.getSchemaId(), topLevelSchema); - if (aSchemaDescriptor.getSchemaValidator().getMessageLogger().hasWarnings()) { - validator.addWarning(BmmMessageIds.ec_bmm_schema_passed_with_warnings, - aSchemaDescriptor.getSchemaId(), - aSchemaDescriptor.getSchemaValidator().getErrorStrings()); - } - } else { - validator.addError(BmmMessageIds.ec_bmm_schema_post_merge_validate_fail, + private void validateAndSetTopLevelSchema() { + BmmModel topLevelSchema = null; + for (String aKey : candidateSchemas.keySet()) { + SchemaDescriptor aSchemaDescriptor = candidateSchemas.get(aKey); + if (aSchemaDescriptor.isTopLevel() && schemaLoadList.contains(aSchemaDescriptor.getSchemaId())) { + if (aSchemaDescriptor.getSchemaValidator().hasPassed() && aSchemaDescriptor.getPersistentSchema().getState() == PersistedBmmSchemaState.STATE_INCLUDES_PROCESSED) { + try { + //validate the schema & if passed, put it into `top_level_schemas' + aSchemaDescriptor.validate(); + mergeValidationErrors(aSchemaDescriptor); + if (aSchemaDescriptor.getSchemaValidator().hasPassed()) { + //now we create a BMM_SCHEMA from a fully merged P_BMM_SCHEMA + aSchemaDescriptor.createSchema(); + if (aSchemaDescriptor.getSchema() != null) { + topLevelSchema = aSchemaDescriptor.getSchema(); + } + validModels.put(aSchemaDescriptor.getSchemaId(), topLevelSchema); + if (aSchemaDescriptor.getSchemaValidator().getMessageLogger().hasWarnings()) { + validator.addWarning(BmmMessageIds.ec_bmm_schema_passed_with_warnings, aSchemaDescriptor.getSchemaId(), aSchemaDescriptor.getSchemaValidator().getErrorStrings()); } - } catch (Exception e) { - validator.addError(BmmMessageIds.ec_bmm_schema_post_merge_validate_fail, aSchemaDescriptor.getSchemaId(), e.getClass().getSimpleName() + ": " + e.getMessage()); - log.error("error validating schema", e); + } else { + validator.addError(BmmMessageIds.ec_bmm_schema_post_merge_validate_fail, + aSchemaDescriptor.getSchemaId(), + aSchemaDescriptor.getSchemaValidator().getErrorStrings()); } + } catch (Exception e) { + validator.addError(BmmMessageIds.ec_bmm_schema_post_merge_validate_fail, aSchemaDescriptor.getSchemaId(), e.getClass().getSimpleName() + ": " + e.getMessage()); + log.error("error validating schema", e); } } } + } + } - //now populate the `models_by_closure' table - modelsByClosureAndVersion.clear(); - List rmClosures = new ArrayList<>(); - String modelPublisher = null; - for (String aSchemaId : validModels.keySet()) { - BmmModel model = validModels.get(aSchemaId); - modelPublisher = model.getRmPublisher(); - //put a ref to schema, keyed by the model_publisher-package_name key (lower-case) for later lookup by compiler - rmClosures = model.getArchetypeRmClosurePackages(); - for (String rmClosure : rmClosures) { - String qualifiedRmClosureName = BmmDefinitions.publisherQualifiedRmClosureName(modelPublisher, rmClosure); - BmmModel existingSchema = getReferenceModelForClosure(qualifiedRmClosureName, model.getRmRelease()); - if (existingSchema != null) { - validator.addInfo(BmmMessageIds.ec_bmm_schema_duplicate_found, - qualifiedRmClosureName, - existingSchema.getSchemaId(), - aSchemaId); - } else { - addModelForClosure(qualifiedRmClosureName, model); - } + private void createModelsByClosureAndVersion() { + //now populate the `models_by_closure' table + modelsByClosureAndVersion.clear(); + List rmClosures = new ArrayList<>(); + String modelPublisher = null; + for (String aSchemaId : validModels.keySet()) { + BmmModel model = validModels.get(aSchemaId); + modelPublisher = model.getRmPublisher(); + //put a ref to schema, keyed by the model_publisher-package_name key (lower-case) for later lookup by compiler + rmClosures = model.getArchetypeRmClosurePackages(); + for (String rmClosure : rmClosures) { + String qualifiedRmClosureName = BmmDefinitions.publisherQualifiedRmClosureName(modelPublisher, rmClosure); + BmmModel existingSchema = getReferenceModelForClosure(qualifiedRmClosureName, model.getRmRelease()); + if (existingSchema != null) { + validator.addInfo(BmmMessageIds.ec_bmm_schema_duplicate_found, + qualifiedRmClosureName, + existingSchema.getSchemaId(), + aSchemaId); + } else { + addModelForClosure(qualifiedRmClosureName, model); } } + } + } - //add entry to top_level_schemas_by_publisher - List publisherSchemas = null; - for (String aSchemaId : allSchemas.keySet()) { - SchemaDescriptor aSchemaDescriptor = allSchemas.get(aSchemaId); - if (aSchemaDescriptor.isTopLevel()) { - modelPublisher = aSchemaDescriptor.getMetadata().get(BmmDefinitions.METADATA_RM_PUBLISHER); - if (!topLevelSchemasByPublisher.containsKey(modelPublisher)) { - publisherSchemas = new ArrayList<>(); - topLevelSchemasByPublisher.put(modelPublisher, publisherSchemas); - } else { - publisherSchemas = topLevelSchemasByPublisher.get(modelPublisher); - } - publisherSchemas.add(aSchemaDescriptor); + private void createSchemasByPublisher() { + String modelPublisher;//add entry to top_level_schemas_by_publisher + List publisherSchemas = null; + for (String aSchemaId : allSchemas.keySet()) { + SchemaDescriptor aSchemaDescriptor = allSchemas.get(aSchemaId); + if (aSchemaDescriptor.isTopLevel()) { + modelPublisher = aSchemaDescriptor.getMetadata().get(BmmDefinitions.METADATA_RM_PUBLISHER); + if (!topLevelSchemasByPublisher.containsKey(modelPublisher)) { + publisherSchemas = new ArrayList<>(); + topLevelSchemasByPublisher.put(modelPublisher, publisherSchemas); + } else { + publisherSchemas = topLevelSchemasByPublisher.get(modelPublisher); } + publisherSchemas.add(aSchemaDescriptor); } + } + } - loadCount += 1; - } catch (Exception e) { - exceptionEncountered = true; - validator.addError(BmmMessageIds.ec_bmm_schema_assertion_violation, e.getClass() + ": " + e.getMessage()); + /** + * Calculate the descendants of each bmmClass and set it into the immediateDescendants property + */ + private void calculateDescendants() { + //calculate descendants + for(BmmModel model:this.getValidModels().values()) { + final Map classDefinitions = model.getClassDefinitions(); - e.printStackTrace(); + for(BmmClass bmmClass:classDefinitions.values()) { + for(BmmClass ancestor:bmmClass.getAncestors().values()) { + ancestor.addImmediateDescendant(bmmClass.getName()); + } + } } } diff --git a/grammars/build.gradle b/grammars/build.gradle index 8009d7bb4..b31fff869 100644 --- a/grammars/build.gradle +++ b/grammars/build.gradle @@ -1,7 +1,7 @@ description = "grammars for parsing ADL, ODIN and xpath" apply plugin: 'antlr' -ext.antlrVersion = '4.5.3' +ext.antlrVersion = '4.7.1' generateGrammarSource { //antlr4 outputDirectory = new File("${project.buildDir}/generated-src/antlr/main/com/nedap/archie/adlparser/antlr".toString()) @@ -15,5 +15,5 @@ classes { dependencies { antlr "org.antlr:antlr4:${antlrVersion}" - compile 'org.antlr:antlr4-runtime:4.5.3' + compile 'org.antlr:antlr4-runtime:4.7.1' } \ No newline at end of file diff --git a/grammars/src/main/antlr/XPath.g4 b/grammars/src/main/antlr/XPath.g4 index 36820f3c9..00a8083bc 100644 --- a/grammars/src/main/antlr/XPath.g4 +++ b/grammars/src/main/antlr/XPath.g4 @@ -227,4 +227,4 @@ GE : '>='; COLON : ':'; CC : '::'; APOS : '\''; -QUOT : '\"'; +QUOT : '"'; diff --git a/odin/src/main/java/com/nedap/archie/serializer/odin/OdinObjectParser.java b/odin/src/main/java/com/nedap/archie/serializer/odin/OdinObjectParser.java index 32662a161..bcd0909d7 100644 --- a/odin/src/main/java/com/nedap/archie/serializer/odin/OdinObjectParser.java +++ b/odin/src/main/java/com/nedap/archie/serializer/odin/OdinObjectParser.java @@ -3,8 +3,8 @@ import com.fasterxml.jackson.databind.JavaType; import com.nedap.archie.adlparser.antlr.AdlLexer; import com.nedap.archie.adlparser.antlr.AdlParser; +import org.antlr.v4.runtime.CharStreams; import org.antlr.v4.runtime.CommonTokenStream; -import org.antlr.v4.runtime.ANTLRInputStream; import java.io.IOException; import java.io.InputStream; @@ -31,19 +31,19 @@ public static T convert(AdlParser.Odin_textContext odin, JavaType clazz) { } public static T convert(InputStream odin, Class clazz) throws IOException { - AdlLexer adlLexer = new AdlLexer(new ANTLRInputStream(odin)); + AdlLexer adlLexer = new AdlLexer(CharStreams.fromStream(odin)); AdlParser parser = new AdlParser(new CommonTokenStream(adlLexer)); return convert(parser.odin_text(), clazz); } public static T convert(String odin, Class clazz) { - AdlLexer adlLexer = new AdlLexer(new ANTLRInputStream(odin)); + AdlLexer adlLexer = new AdlLexer(CharStreams.fromString(odin)); AdlParser parser = new AdlParser(new CommonTokenStream(adlLexer)); return convert(parser.odin_text(), clazz); } public static T convert(String odin, JavaType clazz) { - AdlLexer adlLexer = new AdlLexer(new ANTLRInputStream(odin)); + AdlLexer adlLexer = new AdlLexer(CharStreams.fromString(odin)); AdlParser parser = new AdlParser(new CommonTokenStream(adlLexer)); return convert(parser.odin_text(), clazz); } diff --git a/odin/src/main/java/com/nedap/archie/serializer/odin/OdinToJsonConverter.java b/odin/src/main/java/com/nedap/archie/serializer/odin/OdinToJsonConverter.java index a80126a02..ed7b22951 100644 --- a/odin/src/main/java/com/nedap/archie/serializer/odin/OdinToJsonConverter.java +++ b/odin/src/main/java/com/nedap/archie/serializer/odin/OdinToJsonConverter.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.databind.PropertyNamingStrategy; import com.nedap.archie.adlparser.antlr.AdlParser; import com.nedap.archie.adlparser.antlr.AdlParser.*; +import org.apache.commons.lang.StringEscapeUtils; import java.util.List; @@ -185,9 +186,11 @@ private void outputEscaped(String text) { //strip " if present, all the other "-characters will have to be escaped if(text.startsWith("\"") && text.endsWith("\"")) { String textWithoutQuotationMarks = text.substring(1, text.length()-1); - output.append(objectMapper.writeValueAsString(textWithoutQuotationMarks)); + + String textQuotesReplaced = StringEscapeUtils.unescapeJava(textWithoutQuotationMarks); + output.append(objectMapper.writeValueAsString(textQuotesReplaced)); } else { - output.append(text); + output.append(objectMapper.writeValueAsString(text)); } } catch (JsonProcessingException e) { throw new RuntimeException(e); diff --git a/odin/src/main/java/com/nedap/archie/serializer/odin/OdinValueParser.java b/odin/src/main/java/com/nedap/archie/serializer/odin/OdinValueParser.java index ed7bcb0ed..2159e42b8 100644 --- a/odin/src/main/java/com/nedap/archie/serializer/odin/OdinValueParser.java +++ b/odin/src/main/java/com/nedap/archie/serializer/odin/OdinValueParser.java @@ -1,6 +1,7 @@ package com.nedap.archie.serializer.odin; import com.nedap.archie.adlparser.antlr.AdlParser; +import org.apache.commons.lang.StringEscapeUtils; /** * Created by pieter.bos on 15/10/15. @@ -27,6 +28,6 @@ public static String parseOdinStringValue(AdlParser.String_valueContext context) if(text.length() == 2) { // empty string, "" return ""; } - return text.substring(1, text.length() - 1); + return StringEscapeUtils.unescapeJava(text.substring(1, text.length() - 1)); } } diff --git a/odin/src/main/java/org/openehr/odin/loader/OdinLoaderImpl.java b/odin/src/main/java/org/openehr/odin/loader/OdinLoaderImpl.java index e2610e747..87022a74e 100644 --- a/odin/src/main/java/org/openehr/odin/loader/OdinLoaderImpl.java +++ b/odin/src/main/java/org/openehr/odin/loader/OdinLoaderImpl.java @@ -25,16 +25,14 @@ import com.nedap.archie.adlparser.antlr.odinParser; import com.nedap.archie.antlr.errors.ANTLRParserErrors; import com.nedap.archie.antlr.errors.ArchieErrorListener; -import org.apache.commons.io.IOUtils; -import org.openehr.odin.antlr.OdinVisitorImpl; -import org.antlr.v4.runtime.ANTLRInputStream; +import org.antlr.v4.runtime.CharStream; +import org.antlr.v4.runtime.CharStreams; import org.antlr.v4.runtime.CommonTokenStream; import org.antlr.v4.runtime.tree.ParseTree; +import org.openehr.odin.antlr.OdinVisitorImpl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.File; -import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; @@ -43,56 +41,46 @@ public class OdinLoaderImpl { private static Logger log = LoggerFactory.getLogger(OdinLoaderImpl.class); public OdinVisitorImpl loadOdinFile(String bmmFilePath) { - File file = new File(bmmFilePath); - OdinVisitorImpl visitor = null; - try (FileInputStream fis = new FileInputStream(file)) { - visitor = loadOdinFile(fis); + try { + return loadOdinFile(CharStreams.fromFileName(bmmFilePath)); } catch (IOException ioe) { ioe.printStackTrace(); log.error("Error loading reference model", ioe); throw new RuntimeException("Error loading reference model", ioe); } - return visitor; + } + + public OdinVisitorImpl loadOdinFromString(String odinContent) { + return loadOdinFile(CharStreams.fromString(odinContent)); } public OdinVisitorImpl loadOdinFile(InputStream inputStream) { - OdinVisitorImpl visitor = new OdinVisitorImpl<>(); try { - ANTLRInputStream input = new ANTLRInputStream(inputStream); - odinLexer lexer = new odinLexer(input); - CommonTokenStream tokens = new CommonTokenStream(lexer); - odinParser parser = new odinParser(tokens); - ANTLRParserErrors errors = new ANTLRParserErrors(); - ArchieErrorListener listener = new ArchieErrorListener(errors); - parser.addErrorListener(listener); - ParseTree tree = parser.odin_text(); - visitor.visit(tree); - if(errors.hasErrors()) { - throw new RuntimeException("errors parsing ODIN file: " + errors); - } + return loadOdinFile(CharStreams.fromStream(inputStream)); } catch (IOException ioe) { ioe.printStackTrace(); log.error("Error loading odin file", ioe); throw new RuntimeException("Error loading odin file", ioe); } - return visitor; } - public OdinVisitorImpl loadOdinFromString(String odinContent) { + public OdinVisitorImpl loadOdinFile(CharStream input) { + OdinVisitorImpl visitor = new OdinVisitorImpl<>(); - try { - InputStream is = IOUtils.toInputStream(odinContent, "UTF-8"); - ANTLRInputStream input = new ANTLRInputStream(is); - odinLexer lexer = new odinLexer(input); - CommonTokenStream tokens = new CommonTokenStream(lexer); - odinParser parser = new odinParser(tokens); - ParseTree tree = parser.odin_text(); - visitor.visit(tree); - } catch (IOException ioe) { - ioe.printStackTrace(); - log.error("Error loading odin file", ioe); - throw new RuntimeException("Error loading odin file", ioe); + odinLexer lexer = new odinLexer(input); + CommonTokenStream tokens = new CommonTokenStream(lexer); + odinParser parser = new odinParser(tokens); + ANTLRParserErrors errors = new ANTLRParserErrors(); + ArchieErrorListener listener = new ArchieErrorListener(errors); + parser.addErrorListener(listener); + ParseTree tree = parser.odin_text(); + visitor.visit(tree); + if (errors.hasErrors()) { + throw new RuntimeException("errors parsing ODIN file: " + errors); } return visitor; + } + + } diff --git a/path-queries/src/main/java/com/nedap/archie/query/RMPathQuery.java b/path-queries/src/main/java/com/nedap/archie/query/RMPathQuery.java index c0356bf32..ae93fab90 100644 --- a/path-queries/src/main/java/com/nedap/archie/query/RMPathQuery.java +++ b/path-queries/src/main/java/com/nedap/archie/query/RMPathQuery.java @@ -322,6 +322,7 @@ private boolean equalsName(String name, String nameFromQuery) { return false; } name = name.replaceAll("( |\\t|\\n|\\r)+", ""); + nameFromQuery = nameFromQuery.replaceAll("( |\\t|\\n|\\r)+", ""); return name.equalsIgnoreCase(nameFromQuery); } diff --git a/tools/src/main/java/com/nedap/archie/archetypevalidator/ArchetypeValidation.java b/tools/src/main/java/com/nedap/archie/archetypevalidator/ArchetypeValidation.java index a4fefee82..8d502df40 100644 --- a/tools/src/main/java/com/nedap/archie/archetypevalidator/ArchetypeValidation.java +++ b/tools/src/main/java/com/nedap/archie/archetypevalidator/ArchetypeValidation.java @@ -11,6 +11,6 @@ */ public interface ArchetypeValidation { - public List validate(MetaModels models, Archetype archetype, Archetype flatParent, ArchetypeRepository repository); + List validate(MetaModels models, Archetype archetype, Archetype flatParent, ArchetypeRepository repository, ArchetypeValidationSettings settings); } diff --git a/tools/src/main/java/com/nedap/archie/archetypevalidator/ArchetypeValidationBase.java b/tools/src/main/java/com/nedap/archie/archetypevalidator/ArchetypeValidationBase.java index 24460bdc1..919d62c8c 100644 --- a/tools/src/main/java/com/nedap/archie/archetypevalidator/ArchetypeValidationBase.java +++ b/tools/src/main/java/com/nedap/archie/archetypevalidator/ArchetypeValidationBase.java @@ -16,17 +16,20 @@ public abstract class ArchetypeValidationBase implements ArchetypeValidation { protected List messages; protected ModelInfoLookup lookup; protected MetaModels combinedModels; + protected ArchetypeValidationSettings settings; public ArchetypeValidationBase() { } @Override - public List validate(MetaModels models, Archetype archetype, Archetype flatParent, ArchetypeRepository repository) { + public List validate(MetaModels models, Archetype archetype, Archetype flatParent, ArchetypeRepository repository, ArchetypeValidationSettings settings) { this.archetype = archetype; this.flatParent = flatParent; this.repository = repository; this.lookup = models.getSelectedModelInfoLookup(); this.combinedModels = models; + this.settings = settings; + messages = new ArrayList<>(); validate(); return messages; @@ -98,4 +101,8 @@ public ArchetypeRepository getRepository() { public List getMessages() { return messages; } + + public ArchetypeValidationSettings getSettings() { + return settings; + } } diff --git a/tools/src/main/java/com/nedap/archie/archetypevalidator/ArchetypeValidationSettings.java b/tools/src/main/java/com/nedap/archie/archetypevalidator/ArchetypeValidationSettings.java new file mode 100644 index 000000000..f748567b8 --- /dev/null +++ b/tools/src/main/java/com/nedap/archie/archetypevalidator/ArchetypeValidationSettings.java @@ -0,0 +1,32 @@ +package com.nedap.archie.archetypevalidator; + +public class ArchetypeValidationSettings { + + /** + * When specializing cardinality and existence, is it allowed to specificy the exact same as in the parent object? + */ + private boolean strictMultiplicitiesSpecializationValidation = true; + /** + * Whether to always try to flatten, even on validation errors + */ + private boolean alwaysTryToFlatten = false; + + public ArchetypeValidationSettings() { + } + + public boolean isStrictMultiplicitiesSpecializationValidation() { + return strictMultiplicitiesSpecializationValidation; + } + + public void setStrictMultiplicitiesSpecializationValidation(boolean strictMultiplicitiesSpecializationValidation) { + this.strictMultiplicitiesSpecializationValidation = strictMultiplicitiesSpecializationValidation; + } + + public boolean isAlwaysTryToFlatten() { + return alwaysTryToFlatten; + } + + public void setAlwaysTryToFlatten(boolean alwaysTryToFlatten) { + this.alwaysTryToFlatten = alwaysTryToFlatten; + } +} diff --git a/tools/src/main/java/com/nedap/archie/archetypevalidator/ArchetypeValidator.java b/tools/src/main/java/com/nedap/archie/archetypevalidator/ArchetypeValidator.java index c15df6e03..37ba8cbeb 100644 --- a/tools/src/main/java/com/nedap/archie/archetypevalidator/ArchetypeValidator.java +++ b/tools/src/main/java/com/nedap/archie/archetypevalidator/ArchetypeValidator.java @@ -57,12 +57,15 @@ public ArchetypeValidator(MetaModels models) { //but there's no reason this cannot be parsed, so check them here validationsPhase0.add(new AttributeUniquenessValidation()); validationsPhase0.add(new NodeIdValidation()); - validationsPhase0.add(new MultiplicitiesValidation()); + validationsPhase0.add(new AttributeTupleValidation()); validationsPhase1 = new ArrayList<>(); //conforms to spec validationsPhase1.add(new BasicChecks()); + //MultiplicitiesValidation is a phase 0 (parser) validation in the archetype editor. However, that would just prevent too many checks, including one of the example checks + //so it has been moved to phase 1 + validationsPhase1.add(new MultiplicitiesValidation()); validationsPhase1.add(new AuthoredArchetypeMetadataChecks()); validationsPhase1.add(new DefinitionStructureValidation()); validationsPhase1.add(new BasicTerminologyValidation()); @@ -79,8 +82,6 @@ public ArchetypeValidator(MetaModels models) { validationsPhase3 = new ArrayList<>(); validationsPhase3.add(new FlatFormValidation()); - - } public ValidationResult validate(Archetype archetype) { @@ -105,6 +106,10 @@ public ValidationResult validate(Archetype archetype) { * @return */ public ValidationResult validate(Archetype archetype, FullArchetypeRepository repository) { + ArchetypeValidationSettings settings = repository == null ? null : repository.getArchetypeValidationSettings(); + if(settings == null) { + settings = new ArchetypeValidationSettings(); + } if(archetype instanceof Template) { //in the case of a template, add a repository that can store the overlays separate from the rest of the archetypes //later they can be retrieved and handled as extra archetypes, that are not top level archetypes usable in other @@ -152,23 +157,29 @@ public ValidationResult validate(Archetype archetype, FullArchetypeRepository re repository = new InMemoryFullArchetypeRepository(); } - List messages = runValidations(archetype, repository, flatParent, validationsPhase0); - messages.addAll(runValidations(archetype, repository, flatParent, validationsPhase1)); - - //the separate validations will check if the archtype is specialized and if they need this in phase 2 - //because the RM validations are technically phase 2 and required to run - //also the separate validations are implemented so that they can run with errors in phase 1 without exceptions - //plus exceptions will nicely be logged as an OTHER error type - we can safely run it and you will get - //more errors in one go - could be useful - messages.addAll(runValidations(archetype, repository, flatParent, validationsPhase2)); - + List messages = runValidations(archetype, repository, settings, flatParent, validationsPhase0); ValidationResult result = new ValidationResult(archetype); result.setErrors(messages); if(result.passes()) { + //continue running only if the basic phase 0 validation run, otherwise we get annoying exceptions + messages.addAll(runValidations(archetype, repository, settings, flatParent, validationsPhase1)); + + //the separate validations will check if the archtype is specialized and if they need this in phase 2 + //because the RM validations are technically phase 2 and required to run + //also the separate validations are implemented so that they can run with errors in phase 1 without exceptions + //plus exceptions will nicely be logged as an OTHER error type - we can safely run it and you will get + //more errors in one go - could be useful + messages.addAll(runValidations(archetype, repository, settings, flatParent, validationsPhase2)); + } + + result.setErrors(messages); + if(result.passes() || settings.isAlwaysTryToFlatten()) { try { Archetype flattened = new Flattener(repository, combinedModels).flatten(archetype); result.setFlattened(flattened); - messages.addAll(runValidations(flattened, repository, flatParent, validationsPhase3)); + if(result.passes()) { + messages.addAll(runValidations(flattened, repository, settings, flatParent, validationsPhase3)); + } } catch (Exception e) { messages.add(new ValidationMessage(ErrorType.OTHER, "flattening failed with exception " + e)); logger.error("error during validation", e); @@ -209,15 +220,15 @@ private ValidationResult getParentValidationResult(Archetype archetype, FullArch * @return */ private ValidationResult getValidationResult(String archetypeId, FullArchetypeRepository repository) { - Archetype parent = repository.getArchetype(archetypeId); - if(parent == null) { + Archetype archetype = repository.getArchetype(archetypeId); + if(archetype == null) { return null; //this situation will trigger the correct message later } ValidationResult validationResult = repository.getValidationResult(archetypeId); if(validationResult == null) { - //parent not yet validated. do it now. - validationResult = validate(parent, repository); + //archetype not yet validated. do it now. + validationResult = validate(archetype, repository); } return validationResult; } @@ -228,12 +239,12 @@ private Archetype cloneAndPreprocess(MetaModels models, Archetype archetype) { return preprocessed; } - private List runValidations(Archetype archetype, ArchetypeRepository repository, Archetype flatParent, List validations) { + private List runValidations(Archetype archetype, ArchetypeRepository repository, ArchetypeValidationSettings settings, Archetype flatParent, List validations) { List messages = new ArrayList<>(); for(ArchetypeValidation validation: validations) { try { - messages.addAll(validation.validate(combinedModels, archetype, flatParent, repository)); + messages.addAll(validation.validate(combinedModels, archetype, flatParent, repository, settings)); } catch (Exception e) { logger.error("error running validation processor", e); e.printStackTrace(); diff --git a/tools/src/main/java/com/nedap/archie/archetypevalidator/ErrorType.java b/tools/src/main/java/com/nedap/archie/archetypevalidator/ErrorType.java index e0a452162..9cf210947 100644 --- a/tools/src/main/java/com/nedap/archie/archetypevalidator/ErrorType.java +++ b/tools/src/main/java/com/nedap/archie/archetypevalidator/ErrorType.java @@ -78,7 +78,7 @@ public enum ErrorType { VDSEV("archetype slot 'exclude' constraint validity. The 'exclude' constraint in an archetype slot must conform to the slot constraint validity rules."), VACSO("single-valued attribute child object occurrences validity: the occurrences of a child object of a single-valued attribute cannot have an upper limit greater than 1."), VACMCU(" cardinality/occurrences upper bound validity: where a cardinality with a finite upper bound is stated on an attribute, for all immediate child objects for which an occurrences constraint is stated, the occurrences must either have an open upper bound (i.e. n..*) which is interpreted as the maximum value allowed within the cardinality, or else a finite upper bound which is ⇐ the cardinality upper bound."), - WOUC("codein terminology not used in archetype definition"); + WOUC("code in terminology not used in archetype definition"); diff --git a/tools/src/main/java/com/nedap/archie/archetypevalidator/ValidationResult.java b/tools/src/main/java/com/nedap/archie/archetypevalidator/ValidationResult.java index 954b78395..c6fa4fbbf 100644 --- a/tools/src/main/java/com/nedap/archie/archetypevalidator/ValidationResult.java +++ b/tools/src/main/java/com/nedap/archie/archetypevalidator/ValidationResult.java @@ -1,9 +1,11 @@ package com.nedap.archie.archetypevalidator; import com.nedap.archie.aom.Archetype; +import com.nedap.archie.aom.TemplateOverlay; import java.util.ArrayList; import java.util.List; +import java.util.Map; /** * The result of a validation. Contains: @@ -111,6 +113,8 @@ public List getOverlayValidations() { return overlayValidations; } + + public String toString() { StringBuilder result = new StringBuilder(); result.append("archetype: " + archetypeId); diff --git a/tools/src/main/java/com/nedap/archie/archetypevalidator/validations/AttributeTupleValidation.java b/tools/src/main/java/com/nedap/archie/archetypevalidator/validations/AttributeTupleValidation.java new file mode 100644 index 000000000..45b827981 --- /dev/null +++ b/tools/src/main/java/com/nedap/archie/archetypevalidator/validations/AttributeTupleValidation.java @@ -0,0 +1,44 @@ +package com.nedap.archie.archetypevalidator.validations; + +import com.nedap.archie.aom.CAttribute; +import com.nedap.archie.aom.CAttributeTuple; +import com.nedap.archie.aom.CComplexObject; +import com.nedap.archie.aom.CObject; +import com.nedap.archie.aom.CPrimitiveTuple; +import com.nedap.archie.aom.utils.AOMUtils; +import com.nedap.archie.archetypevalidator.ErrorType; +import com.nedap.archie.archetypevalidator.ValidatingVisitor; + +import java.util.List; + +public class AttributeTupleValidation extends ValidatingVisitor { + + /** + * Override for validation on complex objects + * @param cObject + * @return + */ + protected void validate(CComplexObject cObject) { + if(cObject.getAttributeTuples() != null) { + for(CAttributeTuple tuple : cObject.getAttributeTuples()) { + List members = tuple.getMembers(); + + if(members == null) { + addMessageWithPath(ErrorType.OTHER, cObject.getPath(), "An attribute tuple must have members"); + } else { + for(CAttribute cAttribute:tuple.getMembers()) { + if (!combinedModels.attributeExists(cObject.getRmTypeName(), cAttribute.getRmAttributeName())) { + addMessageWithPath(ErrorType.VCARM, cObject.getPath(), + "Tuple member attribute " + cAttribute.getRmAttributeName() + " is not a known attribute of " + cObject.getRmTypeName() + " or it is has not been implemented in Archie"); + } + } + for(CPrimitiveTuple primitiveTuple:tuple.getTuples()) { + if(primitiveTuple.getMembers().size() != members.size()) { + addMessageWithPath(ErrorType.OTHER, cObject.getPath(), "There should be " + members.size() + " tuple members, but there were " + primitiveTuple.getMembers().size()); + } + } + } + } + } + } +} diff --git a/tools/src/main/java/com/nedap/archie/archetypevalidator/validations/MultiplicitiesValidation.java b/tools/src/main/java/com/nedap/archie/archetypevalidator/validations/MultiplicitiesValidation.java index 492af8124..ea1a18772 100644 --- a/tools/src/main/java/com/nedap/archie/archetypevalidator/validations/MultiplicitiesValidation.java +++ b/tools/src/main/java/com/nedap/archie/archetypevalidator/validations/MultiplicitiesValidation.java @@ -24,7 +24,8 @@ public void validate(CObject cObject) { if(cObject.getParent() != null) { CAttribute attribute = cObject.getParent(); if(attribute.getDifferentialPath() == null) { - //we cannot validate differential paths here becaue we do not know the type + //we cannot validate differential paths here because we do not know the type + //TODO: lookup differential path types to validate as well? if(attribute.isSingle()) { if(cObject.getOccurrences() != null && (cObject.getOccurrences().isUpperUnbounded() || cObject.getOccurrences().getUpper() > 1)) { diff --git a/tools/src/main/java/com/nedap/archie/archetypevalidator/validations/ValidateAgainstReferenceModel.java b/tools/src/main/java/com/nedap/archie/archetypevalidator/validations/ValidateAgainstReferenceModel.java index 390f06c88..515f33479 100644 --- a/tools/src/main/java/com/nedap/archie/archetypevalidator/validations/ValidateAgainstReferenceModel.java +++ b/tools/src/main/java/com/nedap/archie/archetypevalidator/validations/ValidateAgainstReferenceModel.java @@ -19,9 +19,6 @@ */ public class ValidateAgainstReferenceModel extends ValidatingVisitor { - //TODO: what is this? - boolean strictValidation = true; - public ValidateAgainstReferenceModel() { super(); } @@ -98,7 +95,7 @@ public void validate(CAttribute cAttribute) { if(cAttribute.getExistence() != null) { if(!defaultAttribute.getExistence().contains(cAttribute.getExistence())) { if(!archetype.isSpecialized() && defaultAttribute.getExistence().equals(cAttribute.getExistence())) { - if(strictValidation) { + if(settings.isStrictMultiplicitiesSpecializationValidation()) { addMessageWithPath(ErrorType.VCAEX, cAttribute.path()); } else { //TODO: warn @@ -112,7 +109,7 @@ public void validate(CAttribute cAttribute) { if(defaultAttribute.isMultiple()) { if(defaultAttribute.getCardinality() != null && cAttribute.getCardinality() != null && !defaultAttribute.getCardinality().contains(cAttribute.getCardinality())){ if(defaultAttribute.getCardinality().equals(cAttribute.getCardinality())) { - if(strictValidation) { + if(settings.isStrictMultiplicitiesSpecializationValidation()) { addMessageWithPath(ErrorType.VCACA, cAttribute.path()); } else { //TODO: warning diff --git a/tools/src/main/java/com/nedap/archie/flattener/CAttributeFlattener.java b/tools/src/main/java/com/nedap/archie/flattener/CAttributeFlattener.java index 697effe83..8c042a0c1 100644 --- a/tools/src/main/java/com/nedap/archie/flattener/CAttributeFlattener.java +++ b/tools/src/main/java/com/nedap/archie/flattener/CAttributeFlattener.java @@ -284,10 +284,12 @@ private boolean shouldReplaceParent(CObject parent, List differentialNo } else if(occurrences != null && occurrences.upperIsOne()) { //REFINE the parent node case 1, the parent has occurrences upper == 1 return true; - } else if (differentialNodes.size() == 1 - && differentialNodes.get(0).effectiveOccurrences(flattener.getMetaModels()::referenceModelPropMultiplicity).upperIsOne()) { - //REFINE the parent node case 2, only one child with occurrences upper == 1 - return true; + } else if (differentialNodes.size() == 1) { + MultiplicityInterval effectiveOccurrences = differentialNodes.get(0).effectiveOccurrences(flattener.getMetaModels()::referenceModelPropMultiplicity); + if(effectiveOccurrences != null && effectiveOccurrences.upperIsOne()) { + //REFINE the parent node case 2, only one child with occurrences upper == 1 + return true; + } } return false; } diff --git a/tools/src/main/java/com/nedap/archie/flattener/Flattener.java b/tools/src/main/java/com/nedap/archie/flattener/Flattener.java index 1181dfcb8..f3586a0c9 100644 --- a/tools/src/main/java/com/nedap/archie/flattener/Flattener.java +++ b/tools/src/main/java/com/nedap/archie/flattener/Flattener.java @@ -37,6 +37,7 @@ public class Flattener { private boolean createOperationalTemplate = false; private boolean removeLanguagesFromMetaData = false; private boolean useComplexObjectForArchetypeSlotReplacement = false; + private boolean removeZeroOccurrencesObjects = false; private String[] languagesToKeep = null; @@ -48,6 +49,7 @@ public class Flattener { private OperationalTemplateCreator optCreator = new OperationalTemplateCreator(this); + public Flattener(ArchetypeRepository repository, ReferenceModels models) { this.repository = new OverridingArchetypeRepository(repository); this.metaModels = new MetaModels(models, null); @@ -58,11 +60,28 @@ public Flattener(ArchetypeRepository repository, MetaModels models) { this.metaModels = models; } + /** + * Create operational templates in addition to flattening. Default is false; + * @param makeTemplate + * @return + */ public Flattener createOperationalTemplate(boolean makeTemplate) { this.createOperationalTemplate = makeTemplate; return this; } + /** + * Remove zero occurrences constraints, instead of leaving them but removing all of their children + * + * Default is false + * @param remove + * @return + */ + public Flattener removeZeroOccurrencesConstraints(boolean remove) { + this.removeZeroOccurrencesObjects = remove; + return this; + } + /** * if this flattener is setup to create operational templates, also set it to remove all languages from the terminology * except for the given languages @@ -130,7 +149,16 @@ public Archetype flatten(Archetype toFlatten) { result = optCreator.createOperationalTemplate(parent); optCreator.overrideArchetypeId(result, child); } else { - result = parent.clone(); + result = child.clone(); + + Archetype clonedParent = parent.clone(); + //definition, terminology and rules will be replaced later, but must be set to that of the parent + // for this flattener to work correctly. I would not write it this way when creating another flattener, but + //it's the way it is :) + //parent needs to be cloned because this updates references to parent archetype as well + result.setDefinition(clonedParent.getDefinition()); + result.setTerminology(clonedParent.getTerminology()); + result.setRules(clonedParent.getRules()); } //1. redefine structure @@ -154,16 +182,25 @@ public Archetype flatten(Archetype toFlatten) { } result.getDefinition().setArchetype(result); result.setDifferential(false);//note this archetype as being flat - if(!createOperationalTemplate) { - //set metadata to specialized archetype - result.setOriginalLanguage(child.getOriginalLanguage()); - result.setDescription(child.getDescription()); - result.setOtherMetaData(child.getOtherMetaData()); - result.setGenerated(child.getGenerated()); - result.setControlled(child.getControlled()); - result.setBuildUid(child.getBuildUid()); - result.setTranslations(child.getTranslations()); - } //else as well, but is done elsewhere. needs refactor. + + if(child instanceof Template && !createOperationalTemplate) { + Template resultTemplate = (Template) result; + resultTemplate.setTemplateOverlays(new ArrayList<>()); + Template childTemplate = (Template) child; + //we need to add the flattened template overlays. For operational template these have been added to the archetype structure, so not needed + for(TemplateOverlay overlay:((Template) child).getTemplateOverlays()){ + TemplateOverlay flatOverlay = (TemplateOverlay) getNewFlattener().flatten(overlay); + ResourceDescription description = (ResourceDescription) result.getDescription().clone(); + //not sure whether to do this or to implement these methods using the owningTemplate param. + //in many cases you do want this information... + flatOverlay.setDescription(description); + flatOverlay.setOriginalLanguage(result.getOriginalLanguage()); + flatOverlay.setTranslationList(result.getTranslationList()); + ArchetypeParsePostProcesser.fixArchetype(flatOverlay); + resultTemplate.getTemplateOverlays().add(flatOverlay); + } + } + ArchetypeParsePostProcesser.fixArchetype(result); return result; } @@ -182,9 +219,15 @@ private void prohibitZeroOccurrencesConstraints(Archetype archetype) { List objectsToRemove = new ArrayList<>(); for (CObject child : attribute.getChildren()) { if (!child.isAllowed()) { - objectsToRemove.add(child); + if(child instanceof CComplexObject) { + ((CComplexObject) child).setAttributes(new ArrayList<>()); + } + if(this.removeZeroOccurrencesObjects) { + objectsToRemove.add(child); + } + } else { + workList.push(child); } - workList.push(child); } attribute.getChildren().removeAll(objectsToRemove); } diff --git a/tools/src/main/java/com/nedap/archie/flattener/FullArchetypeRepository.java b/tools/src/main/java/com/nedap/archie/flattener/FullArchetypeRepository.java index 1098e45cf..fbbb6f27d 100644 --- a/tools/src/main/java/com/nedap/archie/flattener/FullArchetypeRepository.java +++ b/tools/src/main/java/com/nedap/archie/flattener/FullArchetypeRepository.java @@ -1,10 +1,9 @@ package com.nedap.archie.flattener; import com.nedap.archie.aom.Archetype; -import com.nedap.archie.aom.ArchetypeHRID; import com.nedap.archie.aom.OperationalTemplate; +import com.nedap.archie.archetypevalidator.ArchetypeValidationSettings; import com.nedap.archie.archetypevalidator.ArchetypeValidator; -import com.nedap.archie.archetypevalidator.ValidationMessage; import com.nedap.archie.archetypevalidator.ValidationResult; import com.nedap.archie.rminfo.MetaModels; import com.nedap.archie.rminfo.ReferenceModels; @@ -24,28 +23,52 @@ public interface FullArchetypeRepository extends ArchetypeRepository { void setOperationalTemplate(OperationalTemplate template); + /** + * Removes the validation result and the operational template of the given archetype id. Keeps the archetype + * + * @param archetypeId + */ + void removeValidationResult(String archetypeId); + List getAllValidationResults(); + ArchetypeValidationSettings getArchetypeValidationSettings(); + default void compile(ReferenceModels models) { ArchetypeValidator validator = new ArchetypeValidator(models); - for(Archetype archetype:getAllArchetypes()) { - if(getValidationResult(archetype.getArchetypeId().toString()) == null) { - validator.validate(archetype, this); - } - } + compile(validator); } default void compile(MetaModels models) { ArchetypeValidator validator = new ArchetypeValidator(models); - for(Archetype archetype:getAllArchetypes()) { - if(getValidationResult(archetype.getArchetypeId().toString()) == null) { - validator.validate(archetype, this); - } + compile(validator); + } + + /** + * validate the validation result if necessary, and return either the newly validated one or + * the existing validation result + * @param models + * @return + */ + default ValidationResult compileAndRetrieveValidationResult(String archetypeId, MetaModels models) { + ValidationResult validationResult = getValidationResult(archetypeId); + if(validationResult != null) { + return validationResult; } + Archetype archetype = getArchetype(archetypeId); + if(archetype == null) { + return null; + } + ArchetypeValidator validator = new ArchetypeValidator(models); + return validator.validate(archetype, this); } default void compile(ReferenceModels models, ReferenceModelAccess bmmModels) { ArchetypeValidator validator = new ArchetypeValidator(models, bmmModels); + compile(validator); + } + + default void compile(ArchetypeValidator validator) { for(Archetype archetype:getAllArchetypes()) { if(getValidationResult(archetype.getArchetypeId().toString()) == null) { validator.validate(archetype, this); diff --git a/tools/src/main/java/com/nedap/archie/flattener/InMemoryFullArchetypeRepository.java b/tools/src/main/java/com/nedap/archie/flattener/InMemoryFullArchetypeRepository.java index 4902decc7..ee2b5cc2a 100644 --- a/tools/src/main/java/com/nedap/archie/flattener/InMemoryFullArchetypeRepository.java +++ b/tools/src/main/java/com/nedap/archie/flattener/InMemoryFullArchetypeRepository.java @@ -3,6 +3,7 @@ import com.nedap.archie.aom.Archetype; import com.nedap.archie.aom.ArchetypeHRID; import com.nedap.archie.aom.OperationalTemplate; +import com.nedap.archie.archetypevalidator.ArchetypeValidationSettings; import com.nedap.archie.archetypevalidator.ValidationResult; import java.util.ArrayList; @@ -16,6 +17,7 @@ public class InMemoryFullArchetypeRepository extends SimpleArchetypeRepository i private Map flattenedArchetypes = new ConcurrentHashMap<>(); private Map operationalTemplates = new ConcurrentHashMap<>(); + private ArchetypeValidationSettings archetypeValidationSettings; @Override public Archetype getFlattenedArchetype(String archetypeId) { @@ -42,10 +44,23 @@ public void setOperationalTemplate(OperationalTemplate template) { operationalTemplates.put(template.getArchetypeId().getSemanticId(), template); } + @Override + public void removeValidationResult(String archetypeId) { + operationalTemplates.remove(new ArchetypeHRID(archetypeId).getSemanticId()); + validationResult.remove(new ArchetypeHRID(archetypeId).getSemanticId()); + } + @Override public List getAllValidationResults() { return new ArrayList<>(validationResult.values()); } + @Override + public ArchetypeValidationSettings getArchetypeValidationSettings() { + return archetypeValidationSettings; + } + public void setArchetypeValidationSettings(ArchetypeValidationSettings settings) { + this.archetypeValidationSettings = settings; + } } diff --git a/tools/src/main/java/com/nedap/archie/flattener/OverridingInMemFullArchetypeRepository.java b/tools/src/main/java/com/nedap/archie/flattener/OverridingInMemFullArchetypeRepository.java index aa18aa782..b8c8818b9 100644 --- a/tools/src/main/java/com/nedap/archie/flattener/OverridingInMemFullArchetypeRepository.java +++ b/tools/src/main/java/com/nedap/archie/flattener/OverridingInMemFullArchetypeRepository.java @@ -2,6 +2,7 @@ import com.nedap.archie.aom.Archetype; import com.nedap.archie.aom.OperationalTemplate; +import com.nedap.archie.archetypevalidator.ArchetypeValidationSettings; import com.nedap.archie.archetypevalidator.ValidationResult; import org.apache.commons.lang.mutable.Mutable; @@ -83,6 +84,16 @@ public void setOperationalTemplate(OperationalTemplate template) { originalRepository.setOperationalTemplate(template); } + @Override + public void removeValidationResult(String archetypeId) { + ValidationResult result = extraArchetypes.getValidationResult(archetypeId); + if(result != null) { + extraArchetypes.removeValidationResult(archetypeId); + } else { + originalRepository.removeValidationResult(archetypeId); + } + } + @Override public List getAllValidationResults() { List result = new ArrayList<>(extraArchetypes.getAllValidationResults()); @@ -90,6 +101,11 @@ public List getAllValidationResults() { return result; } + @Override + public ArchetypeValidationSettings getArchetypeValidationSettings() { + return originalRepository.getArchetypeValidationSettings(); + } + @Override public Archetype getArchetype(String archetypeId) { Archetype result = extraArchetypes.getArchetype(archetypeId); diff --git a/tools/src/main/java/com/nedap/archie/query/APathToXPathConverter.java b/tools/src/main/java/com/nedap/archie/query/APathToXPathConverter.java index 4551bc255..ed638e61f 100644 --- a/tools/src/main/java/com/nedap/archie/query/APathToXPathConverter.java +++ b/tools/src/main/java/com/nedap/archie/query/APathToXPathConverter.java @@ -8,7 +8,7 @@ import com.nedap.archie.adlparser.antlr.XPathParser.MainContext; import com.nedap.archie.adlparser.antlr.XPathParser.PredicateContext; import com.nedap.archie.adlparser.antlr.XPathParser.RelativeLocationPathContext; -import org.antlr.v4.runtime.ANTLRInputStream; +import org.antlr.v4.runtime.CharStreams; import org.antlr.v4.runtime.CommonTokenStream; import org.antlr.v4.runtime.tree.ParseTree; import org.antlr.v4.runtime.tree.TerminalNode; @@ -49,7 +49,7 @@ public static String convertQueryToXPath(String query, String firstNodeName) { public static String convertWithAntlr(String query) { - XPathLexer lexer = new XPathLexer(new ANTLRInputStream(query)); + XPathLexer lexer = new XPathLexer(CharStreams.fromString(query)); XPathParser parser = new XPathParser(new CommonTokenStream(lexer)); MainContext mainCtx = parser.main(); StringBuilder output = new StringBuilder(); diff --git a/tools/src/test/java/com/nedap/archie/adlparser/AOMPathQueryTest.java b/tools/src/test/java/com/nedap/archie/adlparser/AOMPathQueryTest.java index 819358b74..5b8a7b9c0 100644 --- a/tools/src/test/java/com/nedap/archie/adlparser/AOMPathQueryTest.java +++ b/tools/src/test/java/com/nedap/archie/adlparser/AOMPathQueryTest.java @@ -14,6 +14,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; /** * Test APath queries with archetype model objects @@ -55,6 +56,22 @@ public void basicPaths() throws Exception { assertEquals("ITEM_TREE", ((CComplexObject) archetypeModelObject).getRmTypeName()); } + @Test + public void differentialPaths() throws Exception { + Archetype archetype = TestUtil.parseFailOnErrors("/adl2-tests/features/specialisation/openEHR-EHR-OBSERVATION.redefine_1_value.v1.0.0.adls"); + + //query with a differential path halfway + AOMPathQuery query = new AOMPathQuery("/data/events[id3]/data/items[id4.1]/value[id0.6]"); + ArchetypeModelObject archetypeModelObject = query.find(archetype.getDefinition()); + assertEquals("id0.6", ((CComplexObject) archetypeModelObject).getNodeId()); + + //partial match of differential path should not return result + query = new AOMPathQuery("/data/events[id3]"); + archetypeModelObject = query.find(archetype.getDefinition()); + assertNull(archetypeModelObject); + + } + @Test public void nameAttributeIgnoredForNow() throws Exception { AOMPathQuery query = new AOMPathQuery("/context[id11 and name=\"ignored\"]"); diff --git a/tools/src/test/java/com/nedap/archie/adlparser/CStringParserTest.java b/tools/src/test/java/com/nedap/archie/adlparser/CStringParserTest.java new file mode 100644 index 000000000..f06931cb9 --- /dev/null +++ b/tools/src/test/java/com/nedap/archie/adlparser/CStringParserTest.java @@ -0,0 +1,39 @@ +package com.nedap.archie.adlparser; + +import com.google.common.collect.Lists; +import com.nedap.archie.aom.Archetype; +import com.nedap.archie.aom.CAttribute; +import com.nedap.archie.aom.primitives.CString; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; + +/** + * Test CString parsing. Does not test regexpes, see @RegexTest for that + */ +public class CStringParserTest { + + @Test + public void testStringWithQuotes() throws Exception { + ADLParser parser = new ADLParser(); + Archetype archetype = parser.parse(this.getClass().getResourceAsStream("openehr-TEST_PKG-WHOLE.escaped_strings.v1.0.0.adls")); + + assertFalse(parser.getErrors().hasErrors()); + CAttribute attributeWithoutQuotes = archetype.getDefinition().getAttribute("string_attr1"); + CAttribute attributeWithQuotes = archetype.getDefinition().getAttribute("string_attr2"); + CAttribute attributeWithBackslash = archetype.getDefinition().getAttribute("string_attr3"); + + CString cStringWithoutQuotes = (CString) attributeWithoutQuotes.getChildren().get(0); + CString cStringWithQuotes = (CString) attributeWithQuotes.getChildren().get(0); + CString cStringWithBlackslash = (CString) attributeWithBackslash.getChildren().get(0); + + assertEquals(Lists.newArrayList("something"), cStringWithoutQuotes.getConstraint()); + assertEquals(Lists.newArrayList("something with a \"-mark"), cStringWithQuotes.getConstraint()); + assertEquals(Lists.newArrayList("something with a \\-mark"), cStringWithBlackslash.getConstraint()); + } + + + + +} diff --git a/tools/src/test/java/com/nedap/archie/adlparser/RegexTest.java b/tools/src/test/java/com/nedap/archie/adlparser/RegexTest.java index 89a6aa934..8b7c5bc7e 100644 --- a/tools/src/test/java/com/nedap/archie/adlparser/RegexTest.java +++ b/tools/src/test/java/com/nedap/archie/adlparser/RegexTest.java @@ -43,7 +43,7 @@ public void extended_regexp() throws Exception { { //path and regexpes can clash if the parser is wrong - CAttribute pathAttribute = archetype.getDefinition().getAttribute("start"); + CAttribute pathAttribute = archetype.getDefinition().getAttribute("/start"); assertEquals("/start", pathAttribute.getDifferentialPath()); CString regex = (CString) pathAttribute.getChildren().get(0); assertEquals("/this should work/", regex.getConstraint().get(0)); @@ -52,7 +52,7 @@ public void extended_regexp() throws Exception { //the following still fails due to https://github.com/openEHR/adl-antlr/issues/20 { //path and regexpes can clash if the parser is wrong - CAttribute pathAttribute = archetype.getDefinition().getAttribute("end"); + CAttribute pathAttribute = archetype.getDefinition().getAttribute("/start[id2]/end"); assertEquals("/start[id2]/end", pathAttribute.getDifferentialPath()); CString regex = (CString) pathAttribute.getChildren().get(0); assertEquals("/this should work/", regex.getConstraint().get(0)); diff --git a/tools/src/test/java/com/nedap/archie/archetypevalidator/ArchetypeValidatorTest.java b/tools/src/test/java/com/nedap/archie/archetypevalidator/ArchetypeValidatorTest.java index 73669e43a..914474835 100644 --- a/tools/src/test/java/com/nedap/archie/archetypevalidator/ArchetypeValidatorTest.java +++ b/tools/src/test/java/com/nedap/archie/archetypevalidator/ArchetypeValidatorTest.java @@ -2,6 +2,7 @@ import com.nedap.archie.adlparser.ADLParser; import com.nedap.archie.aom.Archetype; +import com.nedap.archie.flattener.InMemoryFullArchetypeRepository; import com.nedap.archie.openehrtestrm.TestRMInfoLookup; import com.nedap.archie.rminfo.ArchieRMInfoLookup; import com.nedap.archie.rminfo.ReferenceModels; @@ -11,8 +12,7 @@ import java.io.IOException; import java.util.List; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.Assert.*; /** * Created by pieter.bos on 05/04/2017. @@ -48,6 +48,23 @@ public void VCARMNonExistantType() throws Exception { System.out.println(messages); assertEquals(1, messages.size()); assertEquals(ErrorType.VCARM, messages.get(0).getType()); + assertNull(validationResult.getFlattened()); + } + + @Test + public void VCARMNonExistantTypeAlwaysFlatten() throws Exception { + archetype = parse("/adl2-tests/validity/rm_checking/openEHR-EHR-EVALUATION.VCARM_rm_non_existent_attribute.v1.0.0.adls"); + InMemoryFullArchetypeRepository repository = new InMemoryFullArchetypeRepository(); + repository.addArchetype(archetype); + ArchetypeValidationSettings settings = new ArchetypeValidationSettings(); + settings.setAlwaysTryToFlatten(true); + repository.setArchetypeValidationSettings(settings); + ValidationResult validationResult = new ArchetypeValidator(models).validate(archetype, repository); + List messages = validationResult.getErrors(); + System.out.println(messages); + assertEquals(1, messages.size()); + assertEquals(ErrorType.VCARM, messages.get(0).getType()); + assertNotNull(validationResult.getFlattened()); } @Test @@ -82,6 +99,37 @@ public void VATDFatCodeNotPresent() throws Exception { } + @Test + public void tupleMemberSizeMismatch() throws Exception { + archetype = parse("openEHR-EHR-CLUSTER.invalid_tuple_1.v1.0.0.adls"); + ValidationResult validationResult = new ArchetypeValidator(models).validate(archetype); + List messages = validationResult.getErrors(); + System.out.println(messages); + assertEquals(2, messages.size()); + assertEquals(ErrorType.OTHER, messages.get(0).getType()); + assertTrue("message should complain about tuple members being incorrect", messages.get(0).getMessage().contains("There should be 3 tuple members")); + } + + @Test + public void tupleMemberTypeMismatch() throws Exception { + archetype = parse("openEHR-EHR-CLUSTER.invalid_tuple_2.v1.0.0.adls"); + ValidationResult validationResult = new ArchetypeValidator(models).validate(archetype); + List messages = validationResult.getErrors(); + System.out.println(messages); + assertEquals(1, messages.size()); + assertEquals(ErrorType.VCARM, messages.get(0).getType()); + } + + @Test + public void tuplePrimitiveTypeMismatch() throws Exception { + archetype = parse("openEHR-EHR-CLUSTER.invalid_tuple_3.v1.0.0.adls"); + ValidationResult validationResult = new ArchetypeValidator(models).validate(archetype); + List messages = validationResult.getErrors(); + System.out.println(messages); + assertEquals(1, messages.size()); + assertEquals(ErrorType.VCORMT, messages.get(0).getType()); + } + private Archetype parse(String filename) throws IOException { archetype = parser.parse(ArchetypeValidatorTest.class.getResourceAsStream(filename)); diff --git a/tools/src/test/java/com/nedap/archie/archetypevalidator/BigArchetypeValidatorTest.java b/tools/src/test/java/com/nedap/archie/archetypevalidator/BigArchetypeValidatorTest.java index faaeb4061..71ac58561 100644 --- a/tools/src/test/java/com/nedap/archie/archetypevalidator/BigArchetypeValidatorTest.java +++ b/tools/src/test/java/com/nedap/archie/archetypevalidator/BigArchetypeValidatorTest.java @@ -1,5 +1,6 @@ package com.nedap.archie.archetypevalidator; +import com.google.common.base.Joiner; import com.google.common.collect.HashMultimap; import com.google.common.collect.Multimap; import com.google.common.io.Resources; @@ -29,6 +30,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import java.util.regex.Pattern; @@ -153,6 +155,7 @@ public void testInner(MetaModels metaModels) { int shouldBeFineButWasinvalid = 0; int notImplemented = 0; int unexpectedParseErrors = 0; + List errorStrings = new ArrayList<>(); ArchetypeValidator validator = new ArchetypeValidator(metaModels); SimpleArchetypeRepository repository = new SimpleArchetypeRepository(); for(String file:adlFiles) { @@ -178,8 +181,10 @@ public void testInner(MetaModels metaModels) { if(exception != null) { if(errors != null) { log.error(errors.toString()); + errorStrings.add(archetype.getArchetypeId() + " has unexpected parse errors: " + errors.toString()); } log.error("exception:", exception); + errorStrings.add(archetype.getArchetypeId() + " has exception: " + exception.getMessage()); } else { log.error(errors.toString()); } @@ -192,8 +197,10 @@ public void testInner(MetaModels metaModels) { log.info("{} has parse errors, but we don't know if it's fine:", file); if(errors != null) { log.error(errors.toString()); + errorStrings.add(archetype.getArchetypeId() + " has unknown parse errors: " + errors.toString()); } log.error("exception:", exception); + errorStrings.add(archetype.getArchetypeId() + " has exception: " + exception.getMessage()); unexpectedParseErrors++; } } catch (IOException e) { @@ -214,19 +221,29 @@ public void testInner(MetaModels metaModels) { notImplemented++; } else { if (!validation.hasWarningsOrErrors()) { + log.error("test failed: archetype {} considered valid, it should not", archetype.getArchetypeId()); + errorStrings.add("test failed: archetype " + archetype.getArchetypeId() + " considered valid, it should not"); errorCount++; } else { boolean found = false; + Set errorTypes = new LinkedHashSet<>(); List errors = validation.getErrors(); for (ValidationMessage message : errors) { + errorTypes.add(message.getType()); if (errorMatches(message.getType(), regression)) { found = true; correctCount++; } } if (!found) { - log.error("validation failed: archetype {} invalid but with wrong message", archetype.getArchetypeId()); + log.error("validation failed: archetype {} invalid but with wrong message, was {} but should be {}", + archetype.getArchetypeId(), + Joiner.on(", ").join(errorTypes), + regression); + errorStrings.add("validation failed: archetype " + archetype.getArchetypeId() + " invalid but with wrong message, was " + + Joiner.on(", ").join(errorTypes) + " but should be " + + regression); printErrors(validation); errorCount++; } @@ -236,6 +253,7 @@ public void testInner(MetaModels metaModels) { } else { if(validation.hasWarningsOrErrors()) { log.error("Validation should pass, but it failed for archetype {}, {}", archetype.getArchetypeId(), regression); + errorStrings.add("Validation should pass, but it failed for archetype " + archetype.getArchetypeId() + ", " + regression); printErrors(validation); shouldBeFineButWasinvalid++; } else { @@ -244,7 +262,7 @@ public void testInner(MetaModels metaModels) { } } if(errorCount > 0) { - Assert.fail(String.format("%s validated but should not, %s correct, %s did not validate but should, %s not yet implemented, %s unexpected parser errors", errorCount, correctCount, shouldBeFineButWasinvalid, notImplemented, unexpectedParseErrors)); + Assert.fail(String.format("%s validated but should not, %s correct, %s did not validate but should, %s not yet implemented, %s unexpected parser errors: \n%s", errorCount, correctCount, shouldBeFineButWasinvalid, notImplemented, unexpectedParseErrors, Joiner.on(", ").join(errorStrings))); } log.info("{} not implemented yet", notImplemented); diff --git a/tools/src/test/java/com/nedap/archie/flattener/specexamples/FlattenerExamplesFromSpecTest.java b/tools/src/test/java/com/nedap/archie/flattener/specexamples/FlattenerExamplesFromSpecTest.java index b1cd0de25..c47281f55 100644 --- a/tools/src/test/java/com/nedap/archie/flattener/specexamples/FlattenerExamplesFromSpecTest.java +++ b/tools/src/test/java/com/nedap/archie/flattener/specexamples/FlattenerExamplesFromSpecTest.java @@ -44,6 +44,7 @@ public void specializationPaths() throws Exception { repository.addArchetype(labTest); Archetype specializationPaths = parse("specialization_paths.adls"); Archetype flattened = new Flattener(repository, models).flatten(specializationPaths); + assertEquals(specializationPaths.getParentArchetypeId(), flattened.getParentArchetypeId()); CObject originalConstraint = flattened.itemAtPath("/data[id2]/events[id3]/data[id4]/items[id79]"); CObject firstAddedConstraint = flattened.itemAtPath("/data[id2]/events[id3]/data[id4]/items[id79.2]"); @@ -167,13 +168,13 @@ public void cardinalityRedefinition() throws Exception { @Test - public void exclusion() throws Exception { + public void exclusionRemoval() throws Exception { Archetype occurrencesParent = parse("openEHR-EHR-CLUSTER.occurrences_parent.v1.0.0.adls"); repository.addArchetype(occurrencesParent); Archetype occurrencesSpecialized = parse("openEHR-EHR-CLUSTER.occurrences_specialized.v1.0.0.adls"); - Archetype flat = new Flattener(repository, models).flatten(occurrencesSpecialized); + Archetype flat = new Flattener(repository, models).removeZeroOccurrencesConstraints(true).flatten(occurrencesSpecialized); CAttribute attribute = flat.itemAtPath("/items[id3]/value"); assertNotNull(flat.itemAtPath("/items[id3]/value[id5]")); assertNotNull(flat.itemAtPath("/items[id3]/value[id6]")); @@ -183,6 +184,28 @@ public void exclusion() throws Exception { } + @Test + public void exclusionDefault() throws Exception { + Archetype occurrencesParent = parse("openEHR-EHR-CLUSTER.occurrences_parent.v1.0.0.adls"); + repository.addArchetype(occurrencesParent); + + Archetype occurrencesSpecialized = parse("openEHR-EHR-CLUSTER.occurrences_specialized.v1.0.0.adls"); + + Archetype flat = new Flattener(repository, models).flatten(occurrencesSpecialized); + CAttribute attribute = flat.itemAtPath("/items[id3]/value"); + assertNotNull(flat.itemAtPath("/items[id3]/value[id5]")); + assertNotNull(flat.itemAtPath("/items[id3]/value[id6]")); + assertNotNull(flat.itemAtPath("/items[id3]/value[id4]")); + assertTrue(((CComplexObject) flat.itemAtPath("/items[id3]/value[id4]")).getOccurrences().isProhibited()); + assertEquals(0, ((CComplexObject) flat.itemAtPath("/items[id3]/value[id4]")).getAttributes().size()); + + assertNotNull(flat.itemAtPath("/items[id3]/value[id7]")); + assertEquals(0, ((CComplexObject) flat.itemAtPath("/items[id3]/value[id7]")).getAttributes().size()); + assertTrue(((CComplexObject) flat.itemAtPath("/items[id3]/value[id7]")).getOccurrences().isProhibited()); + assertEquals(4, attribute.getChildren().size()); + + } + //the spec has an issue here in the given examples which does not validate in the ADL workbench. Which means the test cannot be run yet until the spec issue has been resolved //because we don't know what it should do in this case diff --git a/tools/src/test/java/com/nedap/archie/rm/LocatableTest.java b/tools/src/test/java/com/nedap/archie/rm/LocatableTest.java index 05ee049d7..a6820f37d 100644 --- a/tools/src/test/java/com/nedap/archie/rm/LocatableTest.java +++ b/tools/src/test/java/com/nedap/archie/rm/LocatableTest.java @@ -52,7 +52,7 @@ public void testItemAtPath() { assertEquals(event, composition.itemAtPath("/content[id1]/data[id2]/events[id3]")); assertEquals(itemTree, composition.itemAtPath("/content[id1]/data[id2]/events[id3]/data[id4]")); assertEquals(itemTree, composition.itemAtPath("/content[id1]/data[id2]/events[1]/data[id4]")); - assertEquals(itemTree, composition.itemAtPath("/content[id1]/data[id2]/events[custom event]/data[id4]")); + assertEquals(itemTree, composition.itemAtPath("/content[id1]/data[id2]/events[\"custom event\"]/data[id4]")); assertEquals(element, composition.itemAtPath("/content[id1]/data[id2]/events[id3]/data[id4]/items[id5]")); assertEquals(text, composition.itemAtPath("/content[id1]/data[id2]/events[id3]/data[id4]/items[id5]/value")); @@ -93,7 +93,7 @@ public void testItemsAtPath() { assertEquals(Lists.newArrayList(event), composition.itemsAtPath("/content[id1]/data[id2]/events[id3]")); assertEquals(Lists.newArrayList(itemTree), composition.itemsAtPath("/content[id1]/data[id2]/events[id3]/data[id4]")); assertEquals(Lists.newArrayList(itemTree), composition.itemsAtPath("/content[id1]/data[id2]/events[1]/data[id4]")); - assertEquals(Lists.newArrayList(itemTree), composition.itemsAtPath("/content[id1]/data[id2]/events[custom event]/data[id4]")); + assertEquals(Lists.newArrayList(itemTree), composition.itemsAtPath("/content[id1]/data[id2]/events[\"custom event\"]/data[id4]")); assertEquals(Lists.newArrayList(element), composition.itemsAtPath("/content[id1]/data[id2]/events[id3]/data[id4]/items[id5]")); assertEquals(Lists.newArrayList(text), composition.itemsAtPath("/content[id1]/data[id2]/events[id3]/data[id4]/items[id5]/value")); diff --git a/tools/src/test/java/com/nedap/archie/rules/evaluation/ParsedRulesEvaluationTest.java b/tools/src/test/java/com/nedap/archie/rules/evaluation/ParsedRulesEvaluationTest.java index 30b99553d..cf2180c8f 100644 --- a/tools/src/test/java/com/nedap/archie/rules/evaluation/ParsedRulesEvaluationTest.java +++ b/tools/src/test/java/com/nedap/archie/rules/evaluation/ParsedRulesEvaluationTest.java @@ -49,13 +49,13 @@ public void precedenceOverride() throws Exception { ExpressionVariable booleanExtendedTest = (ExpressionVariable) getVariableDeclarationByName(archetype, "boolean_extended_test"); BinaryOperator operator = (BinaryOperator) booleanExtendedTest.getExpression(); - assertTrue(operator.getLeftOperand().isPrecedenceOverriden()); - assertFalse(operator.getRightOperand().isPrecedenceOverriden()); + assertTrue(operator.getLeftOperand().isPrecedenceOverridden()); + assertFalse(operator.getRightOperand().isPrecedenceOverridden()); ExpressionVariable arithmeticParentheses = (ExpressionVariable) getVariableDeclarationByName(archetype, "arithmetic_parentheses"); BinaryOperator arithmeticOperator = (BinaryOperator) arithmeticParentheses.getExpression(); - assertTrue(arithmeticOperator.getLeftOperand().isPrecedenceOverriden()); - assertFalse(arithmeticOperator.getRightOperand().isPrecedenceOverriden()); + assertTrue(arithmeticOperator.getLeftOperand().isPrecedenceOverridden()); + assertFalse(arithmeticOperator.getRightOperand().isPrecedenceOverridden()); } diff --git a/tools/src/test/java/com/nedap/archie/serializer/adl/ADLArchetypeSerializerParserRoundtripTest.java b/tools/src/test/java/com/nedap/archie/serializer/adl/ADLArchetypeSerializerParserRoundtripTest.java index d47c09ca9..fdcf41f4a 100644 --- a/tools/src/test/java/com/nedap/archie/serializer/adl/ADLArchetypeSerializerParserRoundtripTest.java +++ b/tools/src/test/java/com/nedap/archie/serializer/adl/ADLArchetypeSerializerParserRoundtripTest.java @@ -9,6 +9,7 @@ import com.nedap.archie.flattener.FlattenerTest; import com.nedap.archie.flattener.SimpleArchetypeRepository; import com.nedap.archie.testutil.TestUtil; +import org.junit.Assert; import org.junit.Test; import org.openehr.referencemodels.BuiltinReferenceModels; import org.slf4j.Logger; @@ -58,6 +59,39 @@ public void device() throws Exception { assertThat(archetype.getDescription().getOriginalAuthor().get("name"), equalTo("Heather Leslie")); } + @Test + public void escapeQuotes() throws Exception { + Archetype archetype = load("openEHR-EHR-COMPOSITION.report.v1.adls"); + archetype.getDescription().setLicence("license with a \"-mark"); + String serialized = ADLArchetypeSerializer.serialize(archetype); + + Assert.assertThat(serialized, containsString("license with a \\\"-mark" )); + Archetype parsed = new ADLParser().parse(serialized); + Assert.assertThat(parsed.getDescription().getLicence(), is("license with a \"-mark" )); + } + + @Test + public void escapeQuotes2() throws Exception { + Archetype archetype = load("openEHR-EHR-COMPOSITION.report.v1.adls"); + archetype.getDescription().setLicence("license with a \\-mark"); + String serialized = ADLArchetypeSerializer.serialize(archetype); + + Assert.assertThat(serialized, containsString("license with a \\\\-mark" )); + Archetype parsed = new ADLParser().parse(serialized); + Assert.assertThat(parsed.getDescription().getLicence(), is("license with a \\-mark" )); + } + + @Test + public void escapeQuotes3() throws Exception { + Archetype archetype = load("openEHR-EHR-COMPOSITION.report.v1.adls"); + archetype.getDescription().setLicence("license with a \\\"-mark"); + String serialized = ADLArchetypeSerializer.serialize(archetype); + + Assert.assertThat(serialized, containsString("license with a \\\\\\\"-mark" )); + Archetype parsed = new ADLParser().parse(serialized); + Assert.assertThat(parsed.getDescription().getLicence(), is("license with a \\\"-mark" )); + } + private Archetype roundtrip(Archetype archetype) throws IOException { String serialized = ADLArchetypeSerializer.serialize(archetype); logger.info(serialized); diff --git a/tools/src/test/resources/com/nedap/archie/adlparser/openehr-TEST_PKG-WHOLE.escaped_strings.v1.0.0.adls b/tools/src/test/resources/com/nedap/archie/adlparser/openehr-TEST_PKG-WHOLE.escaped_strings.v1.0.0.adls new file mode 100644 index 000000000..23a05f944 --- /dev/null +++ b/tools/src/test/resources/com/nedap/archie/adlparser/openehr-TEST_PKG-WHOLE.escaped_strings.v1.0.0.adls @@ -0,0 +1,42 @@ +archetype (adl_version=2.0.5; rm_release=1.0.2) + openehr-TEST_PKG-WHOLE.escaped_strings.v1.0.0 + +language + original_language = <[ISO_639-1::en]> + +description + original_author = < + ["name"] = <"Pieter Bos"> + ["email"] = <"pieter.bos@nedap.com"> + ["organisation"] = <"N.V. Nederlandsche Apparatenfabriek (Nedap)>"> + ["date"] = <"2018-06-06"> + > + details = < + ["en"] = < + language = <[ISO_639-1::en]> + purpose = <"Test escaped string values in a CString"> + keywords = <"ADL", "test"> + > + > + lifecycle_state = <"published"> + other_details = < + ["regression"] = <"PASS"> + > + copyright = <"copyright © 2004 openEHR Foundation "> + +definition + WHOLE[id1] matches { -- test entry + string_attr1 matches {"something"} + string_attr2 matches {"something with a \"-mark"} + string_attr3 matches {"something with a \\-mark"} + } + +terminology + term_definitions = < + ["en"] = < + ["id1"] = < + text = <"test entry"> + description = <"test entry"> + > + > + > diff --git a/tools/src/test/resources/com/nedap/archie/archetypevalidator/openEHR-EHR-CLUSTER.invalid_tuple_1.v1.0.0.adls b/tools/src/test/resources/com/nedap/archie/archetypevalidator/openEHR-EHR-CLUSTER.invalid_tuple_1.v1.0.0.adls new file mode 100644 index 000000000..f986aba6a --- /dev/null +++ b/tools/src/test/resources/com/nedap/archie/archetypevalidator/openEHR-EHR-CLUSTER.invalid_tuple_1.v1.0.0.adls @@ -0,0 +1,51 @@ +archetype (adl_version=2.0.5; rm_release=1.0.2; generated) + openEHR-EHR-CLUSTER.invalid_tuple_1.v1.0.0 + +language + original_language = <[ISO_639-1::en]> + +description + + original_author = < + ["name"] = <"Pieter Bos"> + > + + details = < + ["en"] = < + language = <[ISO_639-1::en]> + purpose = <"Test some basic tuple validity checks"> + > + > + + lifecycle_state = <"unmanaged"> + +definition + CLUSTER[id1] matches { -- invalid tuple + items matches { + ELEMENT[id52] occurrences matches {0..1} matches { -- Invalid tuple element + value matches { + DV_QUANTITY[id55] matches { + [magnitude, units, precision] matches { + [{|0.0..50.0|}, {"l/m"}], + [{|0.0..50000.0|}, {"ml/min"}] + } + } + } + } + } + } + +terminology + term_definitions = < + ["en"] = < + ["id1"] = < + text = <"Invalid tuple"> + description = <"An invalid tuple"> + > + ["id52"] = < + text = <"invalid tuple element"> + description = <"Invalid tuple element"> + > + + > + > diff --git a/tools/src/test/resources/com/nedap/archie/archetypevalidator/openEHR-EHR-CLUSTER.invalid_tuple_2.v1.0.0.adls b/tools/src/test/resources/com/nedap/archie/archetypevalidator/openEHR-EHR-CLUSTER.invalid_tuple_2.v1.0.0.adls new file mode 100644 index 000000000..a6266ce66 --- /dev/null +++ b/tools/src/test/resources/com/nedap/archie/archetypevalidator/openEHR-EHR-CLUSTER.invalid_tuple_2.v1.0.0.adls @@ -0,0 +1,54 @@ +archetype (adl_version=2.0.5; rm_release=1.0.2; generated) + openEHR-EHR-CLUSTER.invalid_tuple_1.v1.0.0 + +language + original_language = <[ISO_639-1::en]> + +description + + original_author = < + ["name"] = <"Pieter Bos"> + > + + details = < + ["en"] = < + language = <[ISO_639-1::en]> + purpose = <"Test some basic tuple validity checks"> + > + > + + lifecycle_state = <"unmanaged"> + other_details = < + ["regression"] = <"VCARM"> + > + +definition + CLUSTER[id1] matches { -- invalid tuple + items matches { + ELEMENT[id52] occurrences matches {0..1} matches { -- Invalid tuple element + value matches { + DV_QUANTITY[id55] matches { + [magniflude, units] matches { + [{|0.0..50.0|}, {"l/m"}], + [{|0.0..50000.0|}, {"ml/min"}] + } + } + } + } + } + } + +terminology + term_definitions = < + ["en"] = < + ["id1"] = < + text = <"Invalid tuple"> + description = <"An invalid tuple"> + > + ["id52"] = < + text = <"invalid tuple element"> + description = <"Invalid tuple element"> + > + + > + > diff --git a/tools/src/test/resources/com/nedap/archie/archetypevalidator/openEHR-EHR-CLUSTER.invalid_tuple_3.v1.0.0.adls b/tools/src/test/resources/com/nedap/archie/archetypevalidator/openEHR-EHR-CLUSTER.invalid_tuple_3.v1.0.0.adls new file mode 100644 index 000000000..81e0699e0 --- /dev/null +++ b/tools/src/test/resources/com/nedap/archie/archetypevalidator/openEHR-EHR-CLUSTER.invalid_tuple_3.v1.0.0.adls @@ -0,0 +1,54 @@ +archetype (adl_version=2.0.5; rm_release=1.0.2; generated) + openEHR-EHR-CLUSTER.invalid_tuple_1.v1.0.0 + +language + original_language = <[ISO_639-1::en]> + +description + + original_author = < + ["name"] = <"Pieter Bos"> + > + + details = < + ["en"] = < + language = <[ISO_639-1::en]> + purpose = <"Test some basic tuple validity checks"> + > + > + + lifecycle_state = <"unmanaged"> + other_details = < + ["regression"] = <"VCORMT"> + > + +definition + CLUSTER[id1] matches { -- invalid tuple + items matches { + ELEMENT[id52] occurrences matches {0..1} matches { -- Invalid tuple element + value matches { + DV_QUANTITY[id55] matches { + [magnitude, units] matches { + [{"kg"}, {"l/m"}], + [{|0.0..50000.0|}, {"ml/min"}] + } + } + } + } + } + } + +terminology + term_definitions = < + ["en"] = < + ["id1"] = < + text = <"Invalid tuple"> + description = <"An invalid tuple"> + > + ["id52"] = < + text = <"invalid tuple element"> + description = <"Invalid tuple element"> + > + + > + > diff --git a/utils/src/main/java/com/nedap/archie/query/APathQuery.java b/utils/src/main/java/com/nedap/archie/query/APathQuery.java index 6bcbbab56..cc7de945d 100644 --- a/utils/src/main/java/com/nedap/archie/query/APathQuery.java +++ b/utils/src/main/java/com/nedap/archie/query/APathQuery.java @@ -65,6 +65,8 @@ public APathQuery(String query) { String expression = equalityExprContext.getText(); if (isDigit.matcher(expression).matches()) { pathSegment.setIndex(Integer.parseInt(expression)); + } else if(expression.matches("\".*\"") || expression.matches("'.*'")) { + pathSegment.setNodeId(expression.substring(1, expression.length()-1)); } else { pathSegment.setNodeId(expression); }