Skip to content

Commit

Permalink
Use getExpansion function in artifact diff operation (#766)
Browse files Browse the repository at this point in the history
* use expandValueSet function in artifact-diff operation

There is an issue with additions and deletions being recorded incorrectly. Some code included here is to investigate this issue.

* Refactor to include dao as first source of expansion, vsac/naive as backup

-add test for naive expansion in artifact diff

* Remove naive expansion test which was no longer relevant

* Fix imports
  • Loading branch information
Chris0296 committed Apr 25, 2024
1 parent 59a35b5 commit 2a98f59
Show file tree
Hide file tree
Showing 3 changed files with 35 additions and 16 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1274,7 +1274,7 @@ private Parameters advancedValueSetDiff(MetadataResource theSourceValueSet,Metad
}
return vsDiff;
}
private void doesValueSetNeedExpansion(ValueSet vset, IFhirResourceDaoValueSet<ValueSet> dao) {
private void doesValueSetNeedExpansion(ValueSet vset, IFhirResourceDaoValueSet<ValueSet> dao, Parameters expansionParams) {
Optional<Date> lastExpanded = Optional.ofNullable(vset.getExpansion()).map(e -> e.getTimestamp());
Optional<Date> lastUpdated = Optional.ofNullable(vset.getMeta()).map(m -> m.getLastUpdated());
if (lastExpanded.isPresent() && lastUpdated.isPresent() && lastExpanded.get().equals(lastUpdated.get())) {
Expand All @@ -1286,12 +1286,16 @@ private void doesValueSetNeedExpansion(ValueSet vset, IFhirResourceDaoValueSet<V
ValueSetExpansionOptions options = new ValueSetExpansionOptions();
options.setIncludeHierarchy(true);

// Expand via server first, if this is not successful then proceed with VSAC/naive expansion.
ValueSet e = dao.expand(vset,options);
// we need to do this because dao.expand sets the expansion to a subclass and then that breaks the FhirPatch
// `copy` creates the superclass again
vset.setExpansion(e.getExpansion().copy());
return;
}
if (e != null && e.hasExpansion()) {
// we need to do this because dao.expand sets the expansion to a subclass and then that breaks the FhirPatch
// `copy` creates the superclass again
vset.setExpansion(e.getExpansion().copy());
} else {
expandValueSet(vset, expansionParams);
}
}
}

public void expandValueSet(ValueSet valueSet, Parameters expansionParameters) {
Expand Down Expand Up @@ -1558,14 +1562,31 @@ private void checkForChangesInChildren(Parameters baseDiff, MetadataResource the
List<RelatedArtifact> targetRefs = combineComponentsAndDependencies(new KnowledgeArtifactAdapter<MetadataResource>(theTargetBase));
List<RelatedArtifact> sourceRefs = combineComponentsAndDependencies(new KnowledgeArtifactAdapter<MetadataResource>(theSourceBase));
additionsAndDeletions<RelatedArtifact> fixed = extractAdditionsAndDeletions(sourceRefs, targetRefs, RelatedArtifact.class);

Parameters sourceParams = new Parameters();
Parameters targetParams = new Parameters();

Extension sourceExpansionParamsExtension = theSourceBase.getExtensionByUrl(expansionParametersUrl);
Extension targetExpansionParamsExtension = theTargetBase.getExtensionByUrl(expansionParametersUrl);

if (sourceExpansionParamsExtension != null) {
Reference sourceExpansionReference = (Reference) sourceExpansionParamsExtension.getValue();
sourceParams = getExpansionParams((Library)theSourceBase, sourceExpansionReference.getReference());
}

if (targetExpansionParamsExtension != null) {
Reference targetExpansionReference = (Reference) targetExpansionParamsExtension.getValue();
targetParams = getExpansionParams((Library)theTargetBase, targetExpansionReference.getReference());
}

if (fixed.getSourceMatches().size() > 0) {
for(int i = 0; i < fixed.getSourceMatches().size(); i++) {
String sourceCanonical = fixed.getSourceMatches().get(i).getResource();
String targetCanonical = fixed.getTargetMatches().get(i).getResource();
boolean diffNotAlreadyComputedAndPresent = baseDiff.getParameter(Canonicals.getUrl(targetCanonical)) == null;
if (diffNotAlreadyComputedAndPresent) {
MetadataResource source = checkOrUpdateResourceCache(sourceCanonical, cache, hapiFhirRepository, dao);
MetadataResource target = checkOrUpdateResourceCache(targetCanonical, cache, hapiFhirRepository, dao);
MetadataResource source = checkOrUpdateResourceCache(sourceCanonical, cache, hapiFhirRepository, dao, sourceParams);
MetadataResource target = checkOrUpdateResourceCache(targetCanonical, cache, hapiFhirRepository, dao, targetParams);
// need to do something smart here to expand the executable or computable resources
checkOrUpdateDiffCache(sourceCanonical, targetCanonical, source, target, patch, cache, ctx, compareComputable, compareExecutable, dao)
.ifPresentOrElse(diffToAppend -> {
Expand Down Expand Up @@ -1681,7 +1702,7 @@ private boolean conceptSetEquals(ConceptSetComponent ref1, ConceptSetComponent r
private boolean ValueSetContainsEquals(ValueSetExpansionContainsComponent ref1, ValueSetExpansionContainsComponent ref2) {
return ref1.getSystem().equals(ref2.getSystem()) && ref1.getCode().equals(ref2.getCode());
}
private MetadataResource checkOrUpdateResourceCache(String url, diffCache cache, HapiFhirRepository hapiFhirRepository, IFhirResourceDaoValueSet<ValueSet> dao) throws UnprocessableEntityException {
private MetadataResource checkOrUpdateResourceCache(String url, diffCache cache, HapiFhirRepository hapiFhirRepository, IFhirResourceDaoValueSet<ValueSet> dao, Parameters expansionParams) throws UnprocessableEntityException {
MetadataResource resource = cache.getResource(url);
if (resource == null) {
try {
Expand All @@ -1692,7 +1713,7 @@ private MetadataResource checkOrUpdateResourceCache(String url, diffCache cache,
if (resource != null) {
if (resource instanceof ValueSet) {
try {
doesValueSetNeedExpansion((ValueSet)resource, dao);
doesValueSetNeedExpansion((ValueSet)resource, dao, expansionParams);
} catch (Exception e) {
throw new UnprocessableEntityException("Could not expand ValueSet: " + e.getMessage());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.ValueSet;
import org.jetbrains.annotations.NotNull;
import org.opencds.cqf.cql.evaluator.fhir.util.Canonicals;

import java.nio.charset.StandardCharsets;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
package org.opencds.cqf.ruler.cr.r4;

import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
Expand Down Expand Up @@ -1757,10 +1755,11 @@ void artifact_diff_compare_executable() {
List<ParametersParameterComponent> deleteOperations = getOperationsByType(grouperChanges.getParameter(), "delete");
List<ParametersParameterComponent> insertOperations = getOperationsByType(grouperChanges.getParameter(), "insert");
// old codes removed
assertTrue(deleteOperations.size() == 23);
assertEquals(23, deleteOperations.size());
// new codes added
assertTrue(insertOperations.size() == 32);
assertEquals(32, insertOperations.size());
}

private List<ParametersParameterComponent> getOperationsByType(List<ParametersParameterComponent> parameters, String type) {
return parameters.stream().filter(
p -> p.getName().equals("operation")
Expand Down

0 comments on commit 2a98f59

Please sign in to comment.