diff --git a/src/main/java/org/ohdsi/webapi/conceptset/ConceptSetComparison.java b/src/main/java/org/ohdsi/webapi/conceptset/ConceptSetComparison.java index 1a3defd2f8..1149b1904e 100644 --- a/src/main/java/org/ohdsi/webapi/conceptset/ConceptSetComparison.java +++ b/src/main/java/org/ohdsi/webapi/conceptset/ConceptSetComparison.java @@ -1,8 +1,3 @@ -/* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. - */ package org.ohdsi.webapi.conceptset; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @@ -18,45 +13,154 @@ @JsonInclude(JsonInclude.Include.ALWAYS) @JsonIgnoreProperties(ignoreUnknown = true) public class ConceptSetComparison { - @JsonProperty("conceptId") - public Long conceptId; - - @JsonProperty("conceptIn1Only") - public Long conceptIn1Only; - - @JsonProperty("conceptIn2Only") - public Long conceptIn2Only; - - @JsonProperty("conceptIn1And2") - public Long conceptIn1And2; - - @JsonProperty("conceptName") - public String conceptName; - - @JsonProperty("standardConcept") - public String standardConcept; - - @JsonProperty("invalidReason") - public String invalidReason; - - @JsonProperty("conceptCode") - public String conceptCode; - - @JsonProperty("domainId") - public String domainId; - - @JsonProperty("vocabularyId") - public String vocabularyId; - - @JsonProperty("validStartDate") - public Date validStartDate; - - @JsonProperty("validEndDate") - public Date validEndDate; - - @JsonProperty("conceptClassId") - public String conceptClassId; - - @JsonProperty("nameMismatch") - public boolean nameMismatch; -} + @JsonProperty("isSourceCode") + public boolean isSourceCode; + + @JsonProperty("conceptId") + public Long conceptId; + + // Concept Set membership flags + @JsonProperty("conceptInCS1Only") + public Long conceptInCS1Only; + + @JsonProperty("conceptInCS2Only") + public Long conceptInCS2Only; + + @JsonProperty("conceptInCS1AndCS2") + public Long conceptInCS1AndCS2; + + // Concept names from both vocabularies + @JsonProperty("vocab1ConceptName") + public String vocab1ConceptName; + + @JsonProperty("vocab2ConceptName") + public String vocab2ConceptName; + + // Standard concept from both vocabularies + @JsonProperty("vocab1StandardConcept") + public String vocab1StandardConcept; + + @JsonProperty("vocab2StandardConcept") + public String vocab2StandardConcept; + + @JsonProperty("standardConcept") + public String standardConcept; + + // Invalid reason from both vocabularies + @JsonProperty("vocab1InvalidReason") + public String vocab1InvalidReason; + + @JsonProperty("vocab2InvalidReason") + public String vocab2InvalidReason; + + @JsonProperty("invalidReason") + public String invalidReason; + + // Concept code from both vocabularies + @JsonProperty("vocab1ConceptCode") + public String vocab1ConceptCode; + + @JsonProperty("vocab2ConceptCode") + public String vocab2ConceptCode; + + @JsonProperty("conceptCode") + public String conceptCode; + + // Domain ID from both vocabularies + @JsonProperty("vocab1DomainId") + public String vocab1DomainId; + + @JsonProperty("vocab2DomainId") + public String vocab2DomainId; + + @JsonProperty("domainId") + public String domainId; + + // Vocabulary ID from both vocabularies + @JsonProperty("vocab1VocabularyId") + public String vocab1VocabularyId; + + @JsonProperty("vocab2VocabularyId") + public String vocab2VocabularyId; + + @JsonProperty("vocabularyId") + public String vocabularyId; + + // Concept class ID from both vocabularies + @JsonProperty("vocab1ConceptClassId") + public String vocab1ConceptClassId; + + @JsonProperty("vocab2ConceptClassId") + public String vocab2ConceptClassId; + + @JsonProperty("conceptClassId") + public String conceptClassId; + + // Valid start date from both vocabularies + @JsonProperty("vocab1ValidStartDate") + public Date vocab1ValidStartDate; + + @JsonProperty("vocab2ValidStartDate") + public Date vocab2ValidStartDate; + + @JsonProperty("validStartDate") + public Date validStartDate; + + // Valid end date from both vocabularies + @JsonProperty("vocab1ValidEndDate") + public Date vocab1ValidEndDate; + + @JsonProperty("vocab2ValidEndDate") + public Date vocab2ValidEndDate; + + @JsonProperty("validEndDate") + public Date validEndDate; + + // Mismatch flags for each field + @JsonProperty("nameMismatch") + public boolean nameMismatch; + + @JsonProperty("standardConceptMismatch") + public boolean standardConceptMismatch; + + @JsonProperty("invalidReasonMismatch") + public boolean invalidReasonMismatch; + + @JsonProperty("conceptCodeMismatch") + public boolean conceptCodeMismatch; + + @JsonProperty("domainIdMismatch") + public boolean domainIdMismatch; + + @JsonProperty("vocabularyIdMismatch") + public boolean vocabularyIdMismatch; + + @JsonProperty("conceptClassIdMismatch") + public boolean conceptClassIdMismatch; + + @JsonProperty("validStartDateMismatch") + public boolean validStartDateMismatch; + + @JsonProperty("validEndDateMismatch") + public boolean validEndDateMismatch; + + // Vocabulary info for vocab 1 + @JsonProperty("vocab1SourceKey") + public String vocab1SourceKey; + + @JsonProperty("vocab1SourceName") + public String vocab1SourceName; + + @JsonProperty("vocab1SourceVersion") + public String vocab1SourceVersion; + + // Vocabulary info for vocab 2 + @JsonProperty("vocab2SourceKey") + public String vocab2SourceKey; + + @JsonProperty("vocab2SourceName") + public String vocab2SourceName; + + @JsonProperty("vocab2SourceVersion") + public String vocab2SourceVersion; +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/conceptset/ConceptSetRepository.java b/src/main/java/org/ohdsi/webapi/conceptset/ConceptSetRepository.java index bdd42445d1..555a711e5b 100644 --- a/src/main/java/org/ohdsi/webapi/conceptset/ConceptSetRepository.java +++ b/src/main/java/org/ohdsi/webapi/conceptset/ConceptSetRepository.java @@ -15,10 +15,12 @@ */ package org.ohdsi.webapi.conceptset; +import org.springframework.data.jpa.repository.JpaSpecificationExecutor; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.CrudRepository; import org.springframework.data.repository.query.Param; +import java.util.Date; import java.util.Collection; import java.util.List; import java.util.Optional; @@ -27,7 +29,7 @@ * * @author fdefalco */ -public interface ConceptSetRepository extends CrudRepository { +public interface ConceptSetRepository extends CrudRepository, JpaSpecificationExecutor { ConceptSet findById(Integer conceptSetId); @Deprecated @@ -44,4 +46,31 @@ public interface ConceptSetRepository extends CrudRepository findByTags(@Param("tagNames") List tagNames); + + @Query("SELECT cs FROM ConceptSet cs WHERE " + + "(:createdFrom IS NULL OR cs.createdDate >= :createdFrom) AND " + + "(:createdTo IS NULL OR cs.createdDate <= :createdTo) AND " + + "(:updatedFrom IS NULL OR cs.modifiedDate >= :updatedFrom) AND " + + "(:updatedTo IS NULL OR cs.modifiedDate <= :updatedTo)") + List findByDateFilters( + @Param("createdFrom") Date createdFrom, + @Param("createdTo") Date createdTo, + @Param("updatedFrom") Date updatedFrom, + @Param("updatedTo") Date updatedTo + ); + + @Query("SELECT DISTINCT cs FROM ConceptSet cs " + + "JOIN cs.tags t " + + "WHERE t.id IN :tagIds AND " + + "(:createdFrom IS NULL OR cs.createdDate >= :createdFrom) AND " + + "(:createdTo IS NULL OR cs.createdDate <= :createdTo) AND " + + "(:updatedFrom IS NULL OR cs.modifiedDate >= :updatedFrom) AND " + + "(:updatedTo IS NULL OR cs.modifiedDate <= :updatedTo)") + List findByTagsAndDateFilters( + @Param("tagIds") List tagIds, + @Param("createdFrom") Date createdFrom, + @Param("createdTo") Date createdTo, + @Param("updatedFrom") Date updatedFrom, + @Param("updatedTo") Date updatedTo + ); } diff --git a/src/main/java/org/ohdsi/webapi/estimation/EstimationController.java b/src/main/java/org/ohdsi/webapi/estimation/EstimationController.java index 680134a8f0..f0594ec762 100644 --- a/src/main/java/org/ohdsi/webapi/estimation/EstimationController.java +++ b/src/main/java/org/ohdsi/webapi/estimation/EstimationController.java @@ -26,7 +26,7 @@ import org.springframework.core.convert.support.GenericConversionService; import org.springframework.stereotype.Controller; -import javax.transaction.Transactional; +import org.springframework.transaction.annotation.Transactional; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; diff --git a/src/main/java/org/ohdsi/webapi/estimation/EstimationServiceImpl.java b/src/main/java/org/ohdsi/webapi/estimation/EstimationServiceImpl.java index 23db08e233..5788df4b9f 100644 --- a/src/main/java/org/ohdsi/webapi/estimation/EstimationServiceImpl.java +++ b/src/main/java/org/ohdsi/webapi/estimation/EstimationServiceImpl.java @@ -56,7 +56,7 @@ import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; -import javax.transaction.Transactional; +import org.springframework.transaction.annotation.Transactional; import javax.ws.rs.InternalServerErrorException; import java.io.ByteArrayOutputStream; import java.io.File; diff --git a/src/main/java/org/ohdsi/webapi/executionengine/service/ScriptExecutionServiceImpl.java b/src/main/java/org/ohdsi/webapi/executionengine/service/ScriptExecutionServiceImpl.java index 573b7df988..450c934acf 100644 --- a/src/main/java/org/ohdsi/webapi/executionengine/service/ScriptExecutionServiceImpl.java +++ b/src/main/java/org/ohdsi/webapi/executionengine/service/ScriptExecutionServiceImpl.java @@ -45,7 +45,7 @@ import javax.annotation.PostConstruct; import javax.net.ssl.HttpsURLConnection; -import javax.transaction.Transactional; +import org.springframework.transaction.annotation.Transactional; import javax.ws.rs.InternalServerErrorException; import javax.ws.rs.NotFoundException; import javax.ws.rs.client.Entity; diff --git a/src/main/java/org/ohdsi/webapi/job/JobArtifactController.java b/src/main/java/org/ohdsi/webapi/job/JobArtifactController.java new file mode 100644 index 0000000000..c18bca722e --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/job/JobArtifactController.java @@ -0,0 +1,45 @@ +package org.ohdsi.webapi.job; + +import org.ohdsi.webapi.job.artifact.JobArtifactService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Transactional; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; + +/** + * Controller for managing job execution artifacts (reports, results, etc.) + */ +@Path("/job") +@Component +public class JobArtifactController { + + private static final Logger logger = LoggerFactory.getLogger(JobArtifactController.class); + + private final JobArtifactService artifactService; + + public JobArtifactController(JobArtifactService artifactService) { + this.artifactService = artifactService; + } + + /** + * Download artifact for a specific job execution + * + * @param executionId the job execution ID + * @return Response containing the artifact file + */ + @GET + @Path("/{executionId}/artifact") + @Produces(MediaType.APPLICATION_OCTET_STREAM) + @Transactional + public Response downloadArtifact(@PathParam("executionId") Long executionId) { + logger.info("Artifact download requested for job execution ID: {}", executionId); + return artifactService.downloadArtifact(executionId); + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/job/JobExecutionResource.java b/src/main/java/org/ohdsi/webapi/job/JobExecutionResource.java index fcedee54b9..196c8097f9 100644 --- a/src/main/java/org/ohdsi/webapi/job/JobExecutionResource.java +++ b/src/main/java/org/ohdsi/webapi/job/JobExecutionResource.java @@ -33,8 +33,11 @@ public class JobExecutionResource { @JsonProperty("ownerType") private JobOwnerType ownerType; - - public JobExecutionResource() { + + @JsonProperty("hasArtifact") + private Boolean hasArtifact; + + public JobExecutionResource() { //needed for json deserialization } @@ -129,4 +132,12 @@ public JobOwnerType getOwnerType() { public void setOwnerType(JobOwnerType ownerType) { this.ownerType = ownerType; } + + public Boolean getHasArtifact() { + return hasArtifact; + } + + public void setHasArtifact(Boolean hasArtifact) { + this.hasArtifact = hasArtifact; + } } diff --git a/src/main/java/org/ohdsi/webapi/job/JobUtils.java b/src/main/java/org/ohdsi/webapi/job/JobUtils.java index 7cceb307ee..deeca38ea1 100644 --- a/src/main/java/org/ohdsi/webapi/job/JobUtils.java +++ b/src/main/java/org/ohdsi/webapi/job/JobUtils.java @@ -3,6 +3,8 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.util.*; +import java.util.function.Function; +import java.util.function.Supplier; import java.util.stream.Collectors; import org.ohdsi.webapi.Constants; @@ -27,14 +29,19 @@ public static JobInstanceResource toJobInstanceResource(final JobInstance jobIns job.setName(jobInstance.getJobName()); return job; } - - public static JobExecutionResource toJobExecutionResource(final JobExecution jobExecution) { + + public static JobExecutionResource toJobExecutionResource(final JobExecution jobExecution) { + return JobUtils.toJobExecutionResource(jobExecution, o -> Boolean.FALSE); + } + + public static JobExecutionResource toJobExecutionResource(final JobExecution jobExecution, Function hasArtifactProvider) { final JobExecutionResource execution = new JobExecutionResource( toJobInstanceResource(jobExecution.getJobInstance()), jobExecution.getId()); execution.setStatus(jobExecution.getStatus().name()); execution.setStartDate(jobExecution.getStartTime()); execution.setEndDate(jobExecution.getEndTime()); execution.setExitStatus(jobExecution.getExitStatus().getExitCode()); + execution.setHasArtifact(hasArtifactProvider.apply(jobExecution)); JobParameters jobParams = jobExecution.getJobParameters(); if (jobParams != null) { Map params = jobParams.getParameters(); @@ -51,7 +58,11 @@ public static JobExecutionResource toJobExecutionResource(final JobExecution job } return execution; } - + + public static List toJobExecutionResource(final ResultSet rs) throws SQLException { + return JobUtils.toJobExecutionResource(rs, o-> Boolean.FALSE); + } + /** * Create List of JobExecutionResource objects containing job parameters. *

@@ -61,7 +72,7 @@ public static JobExecutionResource toJobExecutionResource(final JobExecution job * @return * @throws SQLException */ - public static List toJobExecutionResource(final ResultSet rs) throws SQLException { + public static List toJobExecutionResource(final ResultSet rs, Function hasArtifactProvider) throws SQLException { //TODO order by executionId List jobs = new ArrayList<>(); JobExecutionResource jobexec = null; @@ -91,7 +102,7 @@ public static List toJobExecutionResource(final ResultSet jobExecution.setCreateTime(rs.getTimestamp(7)); jobExecution.setLastUpdated(rs.getTimestamp(8)); jobExecution.setVersion(rs.getInt(9)); - jobexec = toJobExecutionResource(jobExecution); + jobexec = toJobExecutionResource(jobExecution, hasArtifactProvider); } //parameters starts at 12 diff --git a/src/main/java/org/ohdsi/webapi/job/artifact/ConceptSetBatchCompareArtifactGenerator.java b/src/main/java/org/ohdsi/webapi/job/artifact/ConceptSetBatchCompareArtifactGenerator.java new file mode 100644 index 0000000000..87564defd0 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/job/artifact/ConceptSetBatchCompareArtifactGenerator.java @@ -0,0 +1,222 @@ +package org.ohdsi.webapi.job.artifact; + +import org.ohdsi.webapi.job.artifact.cscompare.CsvFileGenerator; +import org.ohdsi.webapi.job.artifact.cscompare.ReadmeFileGenerator; +import org.ohdsi.webapi.job.artifact.cscompare.SummaryFileGenerator; +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobDiffEntity; +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobEntity; +import org.ohdsi.webapi.service.cscompare.repository.ConceptSetCompareJobDiffRepository; +import org.ohdsi.webapi.service.cscompare.repository.ConceptSetCompareJobRepository; +import org.ohdsi.webapi.util.GenericFileWriter; +import org.ohdsi.webapi.util.TempFileUtils; +import org.ohdsi.webapi.util.archive.ArchiveStrategies; +import org.ohdsi.webapi.util.archive.ArchiveStrategy; +import org.ohdsi.webapi.util.archive.TemporaryArchive; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobParameters; +import org.springframework.core.io.FileSystemResource; +import org.springframework.core.io.Resource; +import org.springframework.stereotype.Component; + +import java.io.IOException; +import java.nio.file.Path; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Optional; + +/** + * Artifact generator for Concept Set Batch Compare jobs. + * Orchestrates the generation of comparison reports in ZIP format. + */ +@Component +public class ConceptSetBatchCompareArtifactGenerator implements JobArtifactGenerator { + + private static final Logger logger = LoggerFactory.getLogger(ConceptSetBatchCompareArtifactGenerator.class); + private static final String JOB_NAME = "conceptSetBatchCompareJob"; + private static final String CONTENT_TYPE = "application/zip"; + private static final String DATE_FORMAT = "yyyy-MM-dd_HHmmss"; + private static final String TEMP_DIR_PREFIX = "concept-set-compare-"; + private static final String ARCHIVE_PREFIX = "cs_compare_"; + private static final String ARCHIVE_SUFFIX = ".zip"; + + private final GenericFileWriter fileWriter; + private final ArchiveStrategy archiveStrategy; + private final ConceptSetCompareJobRepository compareJobRepository; + private final ConceptSetCompareJobDiffRepository compareJobDiffRepository; + + // Specialized generators + private final ReadmeFileGenerator readmeGenerator; + private final SummaryFileGenerator summaryGenerator; + private final CsvFileGenerator csvGenerator; + + public ConceptSetBatchCompareArtifactGenerator( + GenericFileWriter fileWriter, + ConceptSetCompareJobRepository compareJobRepository, + ConceptSetCompareJobDiffRepository compareJobDiffRepository, + ReadmeFileGenerator readmeGenerator, + SummaryFileGenerator summaryGenerator, + CsvFileGenerator csvGenerator) { + + this.fileWriter = fileWriter; + this.archiveStrategy = ArchiveStrategies.zip(ARCHIVE_PREFIX, ARCHIVE_SUFFIX); + this.compareJobRepository = compareJobRepository; + this.compareJobDiffRepository = compareJobDiffRepository; + this.readmeGenerator = readmeGenerator; + this.summaryGenerator = summaryGenerator; + this.csvGenerator = csvGenerator; + } + + @Override + public boolean supports(JobExecution jobExecution) { + return jobExecution.getJobInstance() != null && + JOB_NAME.equals(jobExecution.getJobInstance().getJobName()); + } + + @Override + public boolean hasArtifact(JobExecution jobExecution) { + if (!"COMPLETED".equals(jobExecution.getStatus().toString())) { + logger.debug("Job execution {} is not completed, no artifact available", jobExecution.getId()); + return false; + } + Long executionId = jobExecution.getId(); + Optional compareJobOpt = compareJobRepository.findByExecutionId(executionId); + + if (!compareJobOpt.isPresent()) { + logger.warn("No compare job found for execution ID: {}", executionId); + return false; + } + + ConceptSetCompareJobEntity compareJob = compareJobOpt.get(); + logger.debug("Found compare job with ID: {}, conceptSetsAnalyzed: {}", + compareJob.getId(), compareJob.getConceptSetsAnalyzed()); + + // Check if there are any diffs (can be zero if no differences were found) + Long diffCount = compareJobDiffRepository.countByCompareJobId(compareJob.getId()); + + logger.info("Job execution {} has {} diff results in database", executionId, diffCount); + return true; + } + + @Override + public Resource getArtifact(JobExecution jobExecution) throws IOException { + logger.info("Generating artifact for job execution: {}", jobExecution.getId()); + try { + TemporaryArchive archive = generateReport(jobExecution); + return new FileSystemResource(archive.getArchivePath().toFile()); + } catch (Exception e) { + logger.error("Failed to generate artifact for execution {}", jobExecution.getId(), e); + throw new IOException("Failed to generate artifact", e); + } + } + + @Override + public String getArtifactFilename(JobExecution jobExecution) { + SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT); + String timestamp = sdf.format(new Date()); + return String.format("concept_set_batch_compare_%d_%s.zip", + jobExecution.getId(), timestamp); + } + + @Override + public String getContentType() { + return CONTENT_TYPE; + } + + /** + * Generate the comparison report as a ZIP file + */ + private TemporaryArchive generateReport(JobExecution jobExecution) { + return TempFileUtils.doInDirectory(TEMP_DIR_PREFIX, workDir -> { + try { + logger.info("Creating report structure in temporary directory"); + + JobParameters jobParams = jobExecution.getJobParameters(); + Long executionId = jobExecution.getId(); + + Optional compareJobOpt = + compareJobRepository.findByExecutionId(executionId); + + if (!compareJobOpt.isPresent()) { + logger.warn("No compare job found for execution ID: {}", executionId); + createMinimalReport(workDir, jobExecution, jobParams); + } else { + createFullReport(workDir, jobExecution, jobParams, compareJobOpt.get()); + } + + // Package everything into a ZIP + logger.info("Packaging report into ZIP archive..."); + Path archivePath = archiveStrategy.apply(workDir); + String filename = getArtifactFilename(jobExecution); + + logger.info("Successfully created artifact: {} at {}", filename, archivePath); + return new TemporaryArchive(filename, archivePath); + + } catch (IOException e) { + logger.error("Failed to create report structure", e); + throw new RuntimeException("Failed to generate report", e); + } + }); + } + + /** + * Create minimal report when no compare job is found + */ + private void createMinimalReport(Path workDir, JobExecution jobExecution, + JobParameters jobParams) throws IOException { + logger.info("Creating minimal report (no compare job found)"); + readmeGenerator.generate(workDir, jobExecution, jobParams, null, null); + createNoResultsFile(workDir); + } + + /** + * Create full report with all comparison results + */ + private void createFullReport(Path workDir, JobExecution jobExecution, JobParameters jobParams, + ConceptSetCompareJobEntity compareJob) throws IOException { + logger.info("Creating full report for compare job ID: {}", compareJob.getId()); + + List allDiffs = + compareJobDiffRepository.findByCompareJobIdOrdered(compareJob.getId()); + + logger.info("Found {} diff entities for compare job", allDiffs.size()); + + // Generate all report files + readmeGenerator.generate(workDir, jobExecution, jobParams, compareJob, allDiffs); + summaryGenerator.generate(workDir, compareJob, allDiffs); + + if (!allDiffs.isEmpty()) { + // Generate consolidated CSVs - separate files for included codes and source codes + csvGenerator.generateIncludedCodesConsolidatedCsv(workDir, allDiffs); + csvGenerator.generateSourceCodesConsolidatedCsv(workDir, allDiffs); + + // Generate per-concept-set CSVs + csvGenerator.generatePerConceptSetCsvs(workDir, allDiffs, false); // Included concepts + + // Generate source code CSVs if applicable + if (compareJob.getCompareSourceCodes() != null && compareJob.getCompareSourceCodes()) { + csvGenerator.generatePerConceptSetCsvs(workDir, allDiffs, true); // Source codes + } + } else { + logger.info("No diffs found, creating no-differences report"); + createNoResultsFile(workDir); + } + } + + /** + * Create a file indicating no results were found + */ + private void createNoResultsFile(Path workDir) throws IOException { + StringBuilder content = new StringBuilder(); + content.append("No Differences Found\n"); + content.append("====================\n\n"); + content.append("The batch comparison completed successfully but found no differences\n"); + content.append("between the concept sets in the two sources.\n"); + + Path noResultsPath = workDir.resolve("no_results.txt"); + fileWriter.writeTextFile(noResultsPath, pw -> pw.print(content.toString())); + logger.debug("Created no results file at {}", noResultsPath); + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/job/artifact/JobArtifactGenerator.java b/src/main/java/org/ohdsi/webapi/job/artifact/JobArtifactGenerator.java new file mode 100644 index 0000000000..21c3096f91 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/job/artifact/JobArtifactGenerator.java @@ -0,0 +1,54 @@ +package org.ohdsi.webapi.job.artifact; + +import org.springframework.batch.core.JobExecution; +import org.springframework.core.io.Resource; + +import java.io.IOException; + +/** + * Interface for generating downloadable artifacts from job executions + */ +public interface JobArtifactGenerator { + + /** + * Check if this generator supports the given job execution + * + * @param jobExecution the job execution to check + * @return true if this generator can handle this job type + */ + boolean supports(JobExecution jobExecution); + + /** + * Check if an artifact exists for the given job execution + * + * @param jobExecution the job execution + * @return true if an artifact is available + */ + boolean hasArtifact(JobExecution jobExecution); + + /** + * Get the artifact as a Spring Resource + * + * @param jobExecution the job execution + * @return the artifact resource + * @throws IOException if artifact cannot be retrieved + */ + Resource getArtifact(JobExecution jobExecution) throws IOException; + + /** + * Get the filename for the artifact + * + * @param jobExecution the job execution + * @return the suggested filename for download + */ + String getArtifactFilename(JobExecution jobExecution); + + /** + * Get the content type of the artifact + * + * @return MIME type string (e.g., "application/zip", "text/csv") + */ + String getContentType(); + + +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/job/artifact/JobArtifactGeneratorFactory.java b/src/main/java/org/ohdsi/webapi/job/artifact/JobArtifactGeneratorFactory.java new file mode 100644 index 0000000000..e1082ef573 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/job/artifact/JobArtifactGeneratorFactory.java @@ -0,0 +1,32 @@ +package org.ohdsi.webapi.job.artifact; + +import org.springframework.batch.core.JobExecution; +import org.springframework.stereotype.Component; + +import java.util.List; + +/** + * Factory for retrieving the appropriate JobArtifactGenerator for a given job + */ +@Component +public class JobArtifactGeneratorFactory { + + private final List generators; + + public JobArtifactGeneratorFactory(List generators) { + this.generators = generators; + } + + /** + * Get the appropriate generator for the given job execution + * + * @param jobExecution the job execution + * @return the matching generator, or null if none found + */ + public JobArtifactGenerator getGenerator(JobExecution jobExecution) { + return generators.stream() + .filter(generator -> generator.supports(jobExecution)) + .findFirst() + .orElse(null); + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/job/artifact/JobArtifactPaths.java b/src/main/java/org/ohdsi/webapi/job/artifact/JobArtifactPaths.java new file mode 100644 index 0000000000..d6802a35f8 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/job/artifact/JobArtifactPaths.java @@ -0,0 +1,100 @@ +package org.ohdsi.webapi.job.artifact; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; + +/** + * Utility class for managing job artifact file locations + */ +public class JobArtifactPaths { + + private static final Logger logger = LoggerFactory.getLogger(JobArtifactPaths.class); + private static final String ARTIFACT_DIR_NAME = "atlas-job-artifacts"; + + /** + * Get the base directory for storing job artifacts + * Creates the directory if it doesn't exist + */ + public static Path getArtifactBaseDirectory() throws IOException { + String tempDir = System.getProperty("java.io.tmpdir"); + Path baseDir = Paths.get(tempDir, ARTIFACT_DIR_NAME); + + logger.debug("Artifact base directory path: {}", baseDir.toAbsolutePath()); + + if (!Files.exists(baseDir)) { + logger.info("Creating artifact base directory: {}", baseDir.toAbsolutePath()); + Files.createDirectories(baseDir); + logger.info("Successfully created artifact base directory"); + } else { + logger.debug("Artifact base directory already exists: {}", baseDir.toAbsolutePath()); + } + + return baseDir; + } + + /** + * Get the path for a specific job's artifact file + * @param jobExecutionId the job execution ID + * @return the path where the artifact should be stored + */ + public static Path getArtifactPath(Long jobExecutionId) throws IOException { + Path baseDir = getArtifactBaseDirectory(); + Path artifactPath = baseDir.resolve(jobExecutionId + ".zip"); + logger.debug("Artifact path for job {}: {}", jobExecutionId, artifactPath.toAbsolutePath()); + return artifactPath; + } + + /** + * Check if a pre-generated artifact exists for a job + * @param jobExecutionId the job execution ID + * @return true if the artifact file exists + */ + public static boolean artifactExists(Long jobExecutionId) { + try { + Path artifactPath = getArtifactPath(jobExecutionId); + boolean exists = Files.exists(artifactPath) && Files.isRegularFile(artifactPath); + + if (exists) { + long fileSize = Files.size(artifactPath); + logger.debug("Artifact exists for job {} at {} ({} bytes)", + jobExecutionId, artifactPath, fileSize); + } else { + logger.debug("Artifact does not exist for job {} at {}", + jobExecutionId, artifactPath); + } + + return exists; + } catch (IOException e) { + logger.warn("Error checking for artifact existence for job {}: {}", + jobExecutionId, e.getMessage()); + return false; + } + } + + /** + * Delete the artifact file for a job if it exists + * @param jobExecutionId the job execution ID + * @return true if the file was deleted, false otherwise + */ + public static boolean deleteArtifact(Long jobExecutionId) { + try { + Path artifactPath = getArtifactPath(jobExecutionId); + if (Files.exists(artifactPath)) { + Files.delete(artifactPath); + logger.info("Deleted artifact file for job {} at {}", jobExecutionId, artifactPath); + return true; + } else { + logger.debug("No artifact file to delete for job {}", jobExecutionId); + } + } catch (IOException e) { + logger.error("Failed to delete artifact for job {}: {}", + jobExecutionId, e.getMessage(), e); + } + return false; + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/job/artifact/JobArtifactService.java b/src/main/java/org/ohdsi/webapi/job/artifact/JobArtifactService.java new file mode 100644 index 0000000000..ac36b240fb --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/job/artifact/JobArtifactService.java @@ -0,0 +1,259 @@ +package org.ohdsi.webapi.job.artifact; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.explore.JobExplorer; +import org.springframework.core.io.FileSystemResource; +import org.springframework.core.io.Resource; +import org.springframework.stereotype.Service; + +import javax.ws.rs.core.HttpHeaders; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.StreamingOutput; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; + +/** + * Service for handling job artifact operations including pre-generated artifact downloads + */ +@Service +public class JobArtifactService { + + private static final Logger logger = LoggerFactory.getLogger(JobArtifactService.class); + private static final int BUFFER_SIZE = 8192; + + private final JobExplorer jobExplorer; + private final JobArtifactGeneratorFactory artifactGeneratorFactory; + + public JobArtifactService( + JobExplorer jobExplorer, + JobArtifactGeneratorFactory artifactGeneratorFactory) { + this.jobExplorer = jobExplorer; + this.artifactGeneratorFactory = artifactGeneratorFactory; + } + + /** + * Main entry point for downloading job artifacts + * Checks for pre-generated artifacts first, falls back to on-demand generation + * + * @param executionId the job execution ID + * @return Response containing the artifact or appropriate error status + */ + public Response downloadArtifact(Long executionId) { + try { + logger.info("Processing artifact download for job execution ID: {}", executionId); + + // First, check if a pre-generated artifact exists + if (hasPreGeneratedArtifact(executionId)) { + logger.info("Found pre-generated artifact for job execution {}", executionId); + return downloadPreGeneratedArtifact(executionId); + } + + logger.info("No pre-generated artifact found, generating on-demand for job execution {}", executionId); + return generateAndDownloadArtifact(executionId); + + } catch (Exception e) { + logger.error("Unexpected error while processing artifact for execution {}", executionId, e); + return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build(); + } + } + + /** + * Generate artifact on-demand and stream to client + */ + private Response generateAndDownloadArtifact(Long executionId) { + try { + // Retrieve job execution + JobExecution jobExecution = jobExplorer.getJobExecution(executionId); + + if (jobExecution == null) { + logger.warn("Job execution not found: {}", executionId); + return Response.status(Response.Status.NOT_FOUND).build(); + } + + // Get appropriate artifact generator for this job + JobArtifactGenerator generator = artifactGeneratorFactory.getGenerator(jobExecution); + + if (generator == null) { + logger.warn("No artifact generator found for job: {}", + jobExecution.getJobInstance().getJobName()); + return Response.status(Response.Status.NOT_IMPLEMENTED).build(); + } + + // Check if artifact exists + if (!generator.hasArtifact(jobExecution)) { + logger.info("No artifact available for job execution: {}", executionId); + return Response.noContent().build(); + } + + // Generate/retrieve the artifact + Resource resource = generator.getArtifact(jobExecution); + + if (resource == null || !resource.exists()) { + logger.warn("Artifact resource not found or does not exist for execution: {}", executionId); + return Response.noContent().build(); + } + + String filename = generator.getArtifactFilename(jobExecution); + String contentType = generator.getContentType(); + + logger.info("Successfully prepared on-demand artifact '{}' for download", filename); + + return createStreamingResponse(resource, filename, contentType, "on-demand").build(); + + } catch (IOException e) { + logger.error("IOException while generating artifact for execution {}", executionId, e); + return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build(); + } + } + + /** + * Download a pre-generated artifact from the file system + */ + private Response downloadPreGeneratedArtifact(Long executionId) { + try { + logger.info("Attempting to download pre-generated artifact for job execution {}", executionId); + + Path artifactPath = JobArtifactPaths.getArtifactPath(executionId); + + if (!Files.exists(artifactPath) || !Files.isRegularFile(artifactPath)) { + logger.warn("Pre-generated artifact does not exist or is not a file: {}", artifactPath); + return Response.status(Response.Status.NOT_FOUND).build(); + } + + long fileSize = Files.size(artifactPath); + logger.info("Found pre-generated artifact at {} with size {} bytes", artifactPath, fileSize); + + Resource resource = new FileSystemResource(artifactPath.toFile()); + + if (!resource.exists() || !resource.isReadable()) { + logger.error("Artifact resource exists but is not readable: {}", artifactPath); + return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build(); + } + + String filename = String.format("job_%d_artifact.zip", executionId); + String contentType = "application/zip"; + + logger.info("Serving pre-generated artifact '{}' from {}", filename, artifactPath); + + return Response + .ok(Files.newInputStream(artifactPath)) + .header(HttpHeaders.CONTENT_TYPE, "application/zip") + .header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + filename + "\"") + .build(); +// byte[] data = Files.readAllBytes(artifactPath); +// +// return Response.ok(data, contentType) +// .header("Content-Disposition", "attachment; filename=\"" + filename + "\"") +// .header("Content-Length", data.length) +// .header("X-Atlas-Artifact-Source", "pre-generated-direct") +// .build(); +// return createStreamingResponse(resource, filename, contentType, "pre-generated") +// .header("Content-Length", fileSize) +// .build(); + + } catch (IOException e) { + logger.error("IOException while accessing pre-generated artifact for execution {}", executionId, e); + return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build(); + } + } + + /** + * Create a streaming response for the artifact resource + */ + private Response.ResponseBuilder createStreamingResponse( + Resource resource, + String filename, + String contentType, + String source) { + + StreamingOutput stream = output -> { + try (InputStream input = resource.getInputStream()) { + byte[] buffer = new byte[BUFFER_SIZE]; + int bytesRead; + long totalBytesWritten = 0; + + while ((bytesRead = input.read(buffer)) != -1) { + output.write(buffer, 0, bytesRead); + totalBytesWritten += bytesRead; + } + output.flush(); + + logger.debug("Streamed {} bytes for artifact {}", totalBytesWritten, filename); + } catch (IOException e) { + logger.error("Error streaming artifact {}: {}", filename, e.getMessage(), e); + throw e; + } + }; + + return Response.ok(stream, contentType) + .header("Content-Disposition", "attachment; filename=\"" + filename + "\"") + .header("X-Atlas-Artifact-Source", source); + } + + /** + * Check if a pre-generated artifact is available for download + */ + public boolean hasPreGeneratedArtifact(Long executionId) { + return JobArtifactPaths.artifactExists(executionId); + } + + /** + * Get information about a pre-generated artifact + * + * @param executionId the job execution ID + * @return artifact info or null if not found + */ + public ArtifactInfo getArtifactInfo(Long executionId) { + try { + if (!JobArtifactPaths.artifactExists(executionId)) { + return null; + } + + Path artifactPath = JobArtifactPaths.getArtifactPath(executionId); + long fileSize = Files.size(artifactPath); + long lastModified = Files.getLastModifiedTime(artifactPath).toMillis(); + + return new ArtifactInfo(executionId, fileSize, lastModified, artifactPath.toString()); + } catch (IOException e) { + logger.error("Error getting artifact info for job {}: {}", executionId, e.getMessage()); + return null; + } + } + + /** + * Container for artifact metadata + */ + public static class ArtifactInfo { + private final Long executionId; + private final long fileSizeBytes; + private final long lastModifiedMillis; + private final String path; + + public ArtifactInfo(Long executionId, long fileSizeBytes, long lastModifiedMillis, String path) { + this.executionId = executionId; + this.fileSizeBytes = fileSizeBytes; + this.lastModifiedMillis = lastModifiedMillis; + this.path = path; + } + + public Long getExecutionId() { + return executionId; + } + + public long getFileSizeBytes() { + return fileSizeBytes; + } + + public long getLastModifiedMillis() { + return lastModifiedMillis; + } + + public String getPath() { + return path; + } + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/job/artifact/cscompare/ComparisonStatistics.java b/src/main/java/org/ohdsi/webapi/job/artifact/cscompare/ComparisonStatistics.java new file mode 100644 index 0000000000..351ce7e5f1 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/job/artifact/cscompare/ComparisonStatistics.java @@ -0,0 +1,142 @@ +package org.ohdsi.webapi.job.artifact.cscompare; + +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobDiffEntity; + +import java.util.List; + +/** + * Calculates statistics from comparison diff entities + */ +public class ComparisonStatistics { + + private final long totalDiffs; + private final long includedConceptDiffs; + private final long sourceCodeDiffs; + private final long uniqueConcepts; + private final long nameMismatches; + private final long standardConceptMismatches; + private final long invalidReasonMismatches; + private final long conceptCodeMismatches; + private final long domainIdMismatches; + private final long vocabularyIdMismatches; + private final long conceptClassIdMismatches; + private final long validStartDateMismatches; + private final long validEndDateMismatches; + private final long cs1Only; + private final long cs2Only; + private final long bothCS; + + public ComparisonStatistics(List diffs) { + this.totalDiffs = diffs.size(); + + this.includedConceptDiffs = diffs.stream() + .filter(d -> d.getIsSourceCode() == null || !d.getIsSourceCode()) + .count(); + + this.sourceCodeDiffs = diffs.stream() + .filter(d -> d.getIsSourceCode() != null && d.getIsSourceCode()) + .count(); + + this.uniqueConcepts = diffs.stream() + .map(ConceptSetCompareJobDiffEntity::getConceptId) + .distinct() + .count(); + + this.nameMismatches = countMismatch(diffs, ConceptSetCompareJobDiffEntity::getNameMismatch); + this.standardConceptMismatches = countMismatch(diffs, ConceptSetCompareJobDiffEntity::getStandardConceptMismatch); + this.invalidReasonMismatches = countMismatch(diffs, ConceptSetCompareJobDiffEntity::getInvalidReasonMismatch); + this.conceptCodeMismatches = countMismatch(diffs, ConceptSetCompareJobDiffEntity::getConceptCodeMismatch); + this.domainIdMismatches = countMismatch(diffs, ConceptSetCompareJobDiffEntity::getDomainIdMismatch); + this.vocabularyIdMismatches = countMismatch(diffs, ConceptSetCompareJobDiffEntity::getVocabularyIdMismatch); + this.conceptClassIdMismatches = countMismatch(diffs, ConceptSetCompareJobDiffEntity::getConceptClassIdMismatch); + this.validStartDateMismatches = countMismatch(diffs, ConceptSetCompareJobDiffEntity::getValidStartDateMismatch); + this.validEndDateMismatches = countMismatch(diffs, ConceptSetCompareJobDiffEntity::getValidEndDateMismatch); + + this.cs1Only = diffs.stream() + .filter(d -> d.getConceptInCS1Only() != null && d.getConceptInCS1Only() > 0) + .count(); + + this.cs2Only = diffs.stream() + .filter(d -> d.getConceptInCS2Only() != null && d.getConceptInCS2Only() > 0) + .count(); + + this.bothCS = diffs.stream() + .filter(d -> d.getConceptInCS1AndCS2() != null && d.getConceptInCS1AndCS2() > 0) + .count(); + } + + private long countMismatch(List diffs, + java.util.function.Function getter) { + return diffs.stream() + .filter(d -> { + Boolean value = getter.apply(d); + return value != null && value; + }) + .count(); + } + + // Getters + public long getTotalDiffs() { + return totalDiffs; + } + + public long getIncludedConceptDiffs() { + return includedConceptDiffs; + } + + public long getSourceCodeDiffs() { + return sourceCodeDiffs; + } + + public long getUniqueConcepts() { + return uniqueConcepts; + } + + public long getNameMismatches() { + return nameMismatches; + } + + public long getStandardConceptMismatches() { + return standardConceptMismatches; + } + + public long getInvalidReasonMismatches() { + return invalidReasonMismatches; + } + + public long getConceptCodeMismatches() { + return conceptCodeMismatches; + } + + public long getDomainIdMismatches() { + return domainIdMismatches; + } + + public long getVocabularyIdMismatches() { + return vocabularyIdMismatches; + } + + public long getConceptClassIdMismatches() { + return conceptClassIdMismatches; + } + + public long getValidStartDateMismatches() { + return validStartDateMismatches; + } + + public long getValidEndDateMismatches() { + return validEndDateMismatches; + } + + public long getCs1Only() { + return cs1Only; + } + + public long getCs2Only() { + return cs2Only; + } + + public long getBothCS() { + return bothCS; + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/job/artifact/cscompare/ConceptSetComparisonCsvBuilder.java b/src/main/java/org/ohdsi/webapi/job/artifact/cscompare/ConceptSetComparisonCsvBuilder.java new file mode 100644 index 0000000000..67770e7a88 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/job/artifact/cscompare/ConceptSetComparisonCsvBuilder.java @@ -0,0 +1,163 @@ +package org.ohdsi.webapi.job.artifact.cscompare; + +import org.ohdsi.webapi.conceptset.ConceptSet; +import org.ohdsi.webapi.conceptset.ConceptSetRepository; +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobDiffEntity; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; + +import java.util.Optional; + +/** + * Handles CSV row generation for concept set comparison differences + */ +@Component +public class ConceptSetComparisonCsvBuilder { + + private static final Logger logger = LoggerFactory.getLogger(ConceptSetComparisonCsvBuilder.class); + + private final ConceptSetRepository conceptSetRepository; + + public ConceptSetComparisonCsvBuilder(ConceptSetRepository conceptSetRepository) { + this.conceptSetRepository = conceptSetRepository; + } + + /** + * Build CSV header with all fields + */ + public String buildHeader() { + return "Concept Set ID," + + "Concept Set Name," + + "Concept ID," + + "In CS1 Only," + + "In CS2 Only," + + "In Both CS," + + "Name Mismatch," + + "Base Concept Name," + + "Target Concept Name," + + "Standard Concept Mismatch," + + "Base Standard Concept," + + "Target Standard Concept," + + "Invalid Reason Mismatch," + + "Base Invalid Reason," + + "Target Invalid Reason," + + "Concept Code Mismatch," + + "Base Concept Code," + + "Target Concept Code," + + "Domain ID Mismatch," + + "Base Domain ID," + + "Target Domain ID," + + "Vocabulary ID Mismatch," + + "Base Vocabulary ID," + + "Target Vocabulary ID," + + "Concept Class ID Mismatch," + + "Base Concept Class ID," + + "Target Concept Class ID," + + "Valid Start Date Mismatch," + + "Base Valid Start Date," + + "Target Valid Start Date," + + "Valid End Date Mismatch," + + "Base Valid End Date," + + "Target Valid End Date\n"; + } + + /** + * Build CSV row for a single diff entity + */ + public String buildRow(ConceptSetCompareJobDiffEntity diff) { + Integer conceptSetId = diff.getConceptSetId(); + String conceptSetName = getConceptSetName(conceptSetId); + + StringBuilder row = new StringBuilder(); + + // Basic info + row.append(escapeCsv(conceptSetId.toString())).append(","); + row.append(escapeCsv(conceptSetName)).append(","); + row.append(escapeCsv(diff.getConceptId() != null ? diff.getConceptId().toString() : "")).append(","); + + // Concept set membership - using Yes/No format + row.append(escapeCsv(booleanToYesNo(diff.getConceptInCS1Only() != null && diff.getConceptInCS1Only() > 0))).append(","); + row.append(escapeCsv(booleanToYesNo(diff.getConceptInCS2Only() != null && diff.getConceptInCS2Only() > 0))).append(","); + row.append(escapeCsv(booleanToYesNo(diff.getConceptInCS1AndCS2() != null && diff.getConceptInCS1AndCS2() > 0))).append(","); + + // Name mismatch + row.append(escapeCsv(booleanToYesNo(diff.getNameMismatch()))).append(","); + row.append(escapeCsv(diff.getVocab1ConceptName())).append(","); + row.append(escapeCsv(diff.getVocab2ConceptName())).append(","); + + // Standard concept mismatch + row.append(escapeCsv(booleanToYesNo(diff.getStandardConceptMismatch()))).append(","); + row.append(escapeCsv(diff.getVocab1StandardConcept())).append(","); + row.append(escapeCsv(diff.getVocab2StandardConcept())).append(","); + + // Invalid reason mismatch + row.append(escapeCsv(booleanToYesNo(diff.getInvalidReasonMismatch()))).append(","); + row.append(escapeCsv(diff.getVocab1InvalidReason())).append(","); + row.append(escapeCsv(diff.getVocab2InvalidReason())).append(","); + + // Concept code mismatch + row.append(escapeCsv(booleanToYesNo(diff.getConceptCodeMismatch()))).append(","); + row.append(escapeCsv(diff.getVocab1ConceptCode())).append(","); + row.append(escapeCsv(diff.getVocab2ConceptCode())).append(","); + + // Domain ID mismatch + row.append(escapeCsv(booleanToYesNo(diff.getDomainIdMismatch()))).append(","); + row.append(escapeCsv(diff.getVocab1DomainId())).append(","); + row.append(escapeCsv(diff.getVocab2DomainId())).append(","); + + // Vocabulary ID mismatch + row.append(escapeCsv(booleanToYesNo(diff.getVocabularyIdMismatch()))).append(","); + row.append(escapeCsv(diff.getVocab1VocabularyId())).append(","); + row.append(escapeCsv(diff.getVocab2VocabularyId())).append(","); + + // Concept class ID mismatch + row.append(escapeCsv(booleanToYesNo(diff.getConceptClassIdMismatch()))).append(","); + row.append(escapeCsv(diff.getVocab1ConceptClassId())).append(","); + row.append(escapeCsv(diff.getVocab2ConceptClassId())).append(","); + + // Valid start date mismatch + row.append(escapeCsv(booleanToYesNo(diff.getValidStartDateMismatch()))).append(","); + row.append(escapeCsv(diff.getVocab1ValidStartDate() != null ? diff.getVocab1ValidStartDate().toString() : "")).append(","); + row.append(escapeCsv(diff.getVocab2ValidStartDate() != null ? diff.getVocab2ValidStartDate().toString() : "")).append(","); + + // Valid end date mismatch + row.append(escapeCsv(booleanToYesNo(diff.getValidEndDateMismatch()))).append(","); + row.append(escapeCsv(diff.getVocab1ValidEndDate() != null ? diff.getVocab1ValidEndDate().toString() : "")).append(","); + row.append(escapeCsv(diff.getVocab2ValidEndDate() != null ? diff.getVocab2ValidEndDate().toString() : "")); + + row.append("\n"); + + return row.toString(); + } + + private String getConceptSetName(Integer conceptSetId) { + try { + ConceptSet conceptSet = conceptSetRepository.findById(conceptSetId); + return Optional.ofNullable(conceptSet).map(ConceptSet::getName).orElse("Unknown"); + } catch (Exception e) { + logger.warn("Failed to retrieve concept set name for ID {}", conceptSetId, e); + return "Unknown"; + } + } + + private String escapeCsv(String value) { + if (value == null) { + return ""; + } + if (value.contains(",") || value.contains("\"") || value.contains("\n")) { + return "\"" + value.replace("\"", "\"\"") + "\""; + } + return value; + } + + /** + * Convert boolean to Yes/No string + */ + private String booleanToYesNo(Boolean value) { + if (value == null) { + return "No"; + } + return value ? "Yes" : "No"; + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/job/artifact/cscompare/CsvFileGenerator.java b/src/main/java/org/ohdsi/webapi/job/artifact/cscompare/CsvFileGenerator.java new file mode 100644 index 0000000000..81b362696b --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/job/artifact/cscompare/CsvFileGenerator.java @@ -0,0 +1,135 @@ +package org.ohdsi.webapi.job.artifact.cscompare; + +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobDiffEntity; +import org.ohdsi.webapi.util.GenericFileWriter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Generates CSV files for concept set comparison differences + */ +@Component +public class CsvFileGenerator { + + private static final Logger logger = LoggerFactory.getLogger(CsvFileGenerator.class); + + private final GenericFileWriter fileWriter; + private final ConceptSetComparisonCsvBuilder csvBuilder; + + public CsvFileGenerator(GenericFileWriter fileWriter, ConceptSetComparisonCsvBuilder csvBuilder) { + this.fileWriter = fileWriter; + this.csvBuilder = csvBuilder; + } + + /** + * Generate consolidated CSV for included codes + */ + public void generateIncludedCodesConsolidatedCsv(Path workDir, List allDiffs) throws IOException { + List includedCodesDiffs = allDiffs.stream() + .filter(d -> d.getIsSourceCode() == null || !d.getIsSourceCode()) + .collect(Collectors.toList()); + + if (includedCodesDiffs.isEmpty()) { + logger.info("No included codes differences found, skipping all_included_codes_diff.csv"); + return; + } + + StringBuilder csv = new StringBuilder(); + csv.append(csvBuilder.buildHeader()); + + for (ConceptSetCompareJobDiffEntity diff : includedCodesDiffs) { + csv.append(csvBuilder.buildRow(diff)); + } + + Path csvPath = workDir.resolve("all_included_codes_diff.csv"); + fileWriter.writeTextFile(csvPath, pw -> pw.print(csv.toString())); + logger.debug("Created included codes consolidated diff CSV at {}", csvPath); + } + + /** + * Generate consolidated CSV for source codes + */ + public void generateSourceCodesConsolidatedCsv(Path workDir, List allDiffs) throws IOException { + List sourceCodesDiffs = allDiffs.stream() + .filter(d -> d.getIsSourceCode() != null && d.getIsSourceCode()) + .collect(Collectors.toList()); + + if (sourceCodesDiffs.isEmpty()) { + logger.info("No source codes differences found, skipping all_source_codes_diff.csv"); + return; + } + + StringBuilder csv = new StringBuilder(); + csv.append(csvBuilder.buildHeader()); + + for (ConceptSetCompareJobDiffEntity diff : sourceCodesDiffs) { + csv.append(csvBuilder.buildRow(diff)); + } + + Path csvPath = workDir.resolve("all_source_codes_diff.csv"); + fileWriter.writeTextFile(csvPath, pw -> pw.print(csv.toString())); + logger.debug("Created source codes consolidated diff CSV at {}", csvPath); + } + + /** + * Generate individual CSV files per concept set + */ + public void generatePerConceptSetCsvs(Path workDir, List allDiffs, + boolean isSourceCode) throws IOException { + // Filter diffs based on isSourceCode flag + List filteredDiffs = allDiffs.stream() + .filter(d -> { + if (isSourceCode) { + return d.getIsSourceCode() != null && d.getIsSourceCode(); + } else { + return d.getIsSourceCode() == null || !d.getIsSourceCode(); + } + }) + .collect(Collectors.toList()); + + if (filteredDiffs.isEmpty()) { + logger.info("No {} differences found", isSourceCode ? "source code" : "included concept"); + return; + } + + // Group diffs by concept set ID + Map> diffsByConceptSet = filteredDiffs.stream() + .collect(Collectors.groupingBy(ConceptSetCompareJobDiffEntity::getConceptSetId)); + + logger.info("Creating individual CSV files for {} concept sets ({} mode)", + diffsByConceptSet.size(), + isSourceCode ? "source code" : "included concept"); + + for (Map.Entry> entry : diffsByConceptSet.entrySet()) { + Integer conceptSetId = entry.getKey(); + List diffs = entry.getValue(); + + String filename = isSourceCode + ? String.format("concept_set_%d_source_codes_diff.csv", conceptSetId) + : String.format("concept_set_%d_diff.csv", conceptSetId); + + generateSingleConceptSetCsv(workDir, filename, diffs); + } + } + + private void generateSingleConceptSetCsv(Path workDir, String filename, + List diffs) throws IOException { + StringBuilder csv = new StringBuilder(); + csv.append(csvBuilder.buildHeader()); + + for (ConceptSetCompareJobDiffEntity diff : diffs) { + csv.append(csvBuilder.buildRow(diff)); + } + + Path csvPath = workDir.resolve(filename); + fileWriter.writeTextFile(csvPath, pw -> pw.print(csv.toString())); + logger.debug("Created concept set diff CSV at {}", csvPath); + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/job/artifact/cscompare/ReadmeFileGenerator.java b/src/main/java/org/ohdsi/webapi/job/artifact/cscompare/ReadmeFileGenerator.java new file mode 100644 index 0000000000..123ca0da71 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/job/artifact/cscompare/ReadmeFileGenerator.java @@ -0,0 +1,233 @@ +package org.ohdsi.webapi.job.artifact.cscompare; + +import org.apache.commons.lang3.StringUtils; +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobAuthorEntity; +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobDiffEntity; +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobEntity; +import org.ohdsi.webapi.util.GenericFileWriter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobParameters; +import org.springframework.stereotype.Component; + +import java.io.IOException; +import java.nio.file.Path; +import java.time.Duration; +import java.util.List; +import java.util.stream.Collectors; + +/** + * Generates README.txt metadata file for comparison reports + */ +@Component +public class ReadmeFileGenerator { + + private static final Logger logger = LoggerFactory.getLogger(ReadmeFileGenerator.class); + + private final GenericFileWriter fileWriter; + + public ReadmeFileGenerator(GenericFileWriter fileWriter) { + this.fileWriter = fileWriter; + } + + public void generate(Path workDir, JobExecution jobExecution, JobParameters jobParams, + ConceptSetCompareJobEntity compareJob, List diffs) throws IOException { + + StringBuilder metadata = new StringBuilder(); + + appendHeader(metadata); + appendExecutionDetails(metadata, jobExecution); + appendComparisonParameters(metadata, jobParams, compareJob); + appendResultsSummary(metadata, compareJob, diffs); + appendReportStructure(metadata, compareJob); + + Path metadataPath = workDir.resolve("README.txt"); + fileWriter.writeTextFile(metadataPath, pw -> pw.print(metadata.toString())); + + logger.debug("Created README file at {}", metadataPath); + } + + private void appendHeader(StringBuilder sb) { + sb.append("=====================================\n"); + sb.append("Concept Set Batch Comparison Report\n"); + sb.append("=====================================\n\n"); + } + + private void appendExecutionDetails(StringBuilder sb, JobExecution jobExecution) { + sb.append("JOB EXECUTION DETAILS\n"); + sb.append("---------------------\n"); + sb.append("Execution ID: ").append(jobExecution.getId()).append("\n"); + sb.append("Job Name: ").append(jobExecution.getJobInstance().getJobName()).append("\n"); + sb.append("Status: ").append(jobExecution.getStatus()).append("\n"); + sb.append("Start Time: ").append(jobExecution.getStartTime()).append("\n"); + sb.append("End Time: ").append(jobExecution.getEndTime()).append("\n"); + + if (jobExecution.getStartTime() != null && jobExecution.getEndTime() != null) { + Duration duration = Duration.between( + jobExecution.getStartTime().toInstant(), + jobExecution.getEndTime().toInstant() + ); + sb.append("Duration: ").append(formatDuration(duration)).append("\n"); + } + sb.append("\n"); + } + + private void appendComparisonParameters(StringBuilder sb, JobParameters jobParams, + ConceptSetCompareJobEntity compareJob) { + sb.append("COMPARISON PARAMETERS\n"); + sb.append("---------------------\n"); + sb.append("Source 1: ").append(jobParams.getString("source1Key")).append("\n"); + sb.append("Source 2: ").append(jobParams.getString("source2Key")).append("\n"); + + if (compareJob != null) { + sb.append("Vocabulary 1 Version: ") + .append(compareJob.getVocab1Version() != null ? compareJob.getVocab1Version() : "N/A") + .append("\n"); + sb.append("Vocabulary 2 Version: ") + .append(compareJob.getVocab2Version() != null ? compareJob.getVocab2Version() : "N/A") + .append("\n"); + + // Handle multiple authors + appendAuthorsInfo(sb, compareJob); + + sb.append("Compare Source Codes: ") + .append(compareJob.getCompareSourceCodes() != null ? compareJob.getCompareSourceCodes() : false) + .append("\n"); + + // Add concept set IDs filter info + if (StringUtils.isNotBlank(compareJob.getConceptSetIds())) { + sb.append("Concept Set IDs Filter: ").append(compareJob.getConceptSetIds()).append("\n"); + } + } + + sb.append("Created Date From: ") + .append(jobParams.getString("createdDateFrom") != null ? jobParams.getString("createdDateFrom") : "N/A") + .append("\n"); + sb.append("Created Date To: ") + .append(jobParams.getString("createdDateTo") != null ? jobParams.getString("createdDateTo") : "N/A") + .append("\n"); + sb.append("Updated Date From: ") + .append(jobParams.getString("updatedDateFrom") != null ? jobParams.getString("updatedDateFrom") : "N/A") + .append("\n"); + sb.append("Updated Date To: ") + .append(jobParams.getString("updatedDateTo") != null ? jobParams.getString("updatedDateTo") : "N/A") + .append("\n"); + sb.append("Tags: ") + .append(jobParams.getString("tagsIds") != null ? jobParams.getString("tagsIds") : "N/A") + .append("\n"); + sb.append("\n"); + } + + /** + * Append authors information to the StringBuilder + */ + private void appendAuthorsInfo(StringBuilder sb, ConceptSetCompareJobEntity compareJob) { + if (compareJob.getAuthors() != null && !compareJob.getAuthors().isEmpty()) { + if (compareJob.getAuthors().size() == 1) { + // Single author - display on one line + ConceptSetCompareJobAuthorEntity authorEntity = compareJob.getAuthors().iterator().next(); + String authorName = getAuthorDisplayName(authorEntity); + sb.append("Author: ").append(authorName).append("\n"); + } else { + // Multiple authors - display as list + sb.append("Authors (").append(compareJob.getAuthors().size()).append("):\n"); + List authorNames = compareJob.getAuthors().stream() + .map(this::getAuthorDisplayName) + .sorted() + .collect(Collectors.toList()); + + for (String authorName : authorNames) { + sb.append(" - ").append(authorName).append("\n"); + } + } + } else { + sb.append("Author: N/A\n"); + } + } + + /** + * Get display name for an author (name or login) + */ + private String getAuthorDisplayName(ConceptSetCompareJobAuthorEntity authorEntity) { + if (authorEntity == null || authorEntity.getUser() == null) { + return "Unknown"; + } + + String name = authorEntity.getUser().getName(); + String login = authorEntity.getUser().getLogin(); + + if (name != null && !name.trim().isEmpty()) { + return String.format("%s (%s)", name, login); + } else { + return login; + } + } + + private void appendResultsSummary(StringBuilder sb, ConceptSetCompareJobEntity compareJob, + List diffs) { + sb.append("RESULTS SUMMARY\n"); + sb.append("---------------\n"); + + if (compareJob != null) { + sb.append("Concept Sets Analyzed: ") + .append(compareJob.getConceptSetsAnalyzed() != null ? compareJob.getConceptSetsAnalyzed() : 0) + .append("\n"); + sb.append("Concept Sets with Differences: ") + .append(compareJob.getConceptSetsWithDiffs() != null ? compareJob.getConceptSetsWithDiffs() : 0) + .append("\n\n"); + } + + if (diffs != null && !diffs.isEmpty()) { + ComparisonStatistics stats = new ComparisonStatistics(diffs); + + sb.append("Total Differences Found: ").append(stats.getTotalDiffs()).append("\n"); + sb.append(" - Included Concept Differences: ").append(stats.getIncludedConceptDiffs()).append("\n"); + sb.append(" - Source Code Differences: ").append(stats.getSourceCodeDiffs()).append("\n"); + sb.append("Unique Concepts with Differences: ").append(stats.getUniqueConcepts()).append("\n"); + sb.append("\n"); + sb.append("Mismatch Counts:\n"); + sb.append(" - Name Mismatches: ").append(stats.getNameMismatches()).append("\n"); + sb.append(" - Standard Concept Mismatches: ").append(stats.getStandardConceptMismatches()).append("\n"); + sb.append(" - Invalid Reason Mismatches: ").append(stats.getInvalidReasonMismatches()).append("\n"); + sb.append(" - Concept Code Mismatches: ").append(stats.getConceptCodeMismatches()).append("\n"); + sb.append(" - Domain ID Mismatches: ").append(stats.getDomainIdMismatches()).append("\n"); + sb.append(" - Vocabulary ID Mismatches: ").append(stats.getVocabularyIdMismatches()).append("\n"); + sb.append(" - Concept Class ID Mismatches: ").append(stats.getConceptClassIdMismatches()).append("\n"); + sb.append(" - Valid Start Date Mismatches: ").append(stats.getValidStartDateMismatches()).append("\n"); + sb.append(" - Valid End Date Mismatches: ").append(stats.getValidEndDateMismatches()).append("\n"); + } else { + sb.append("No differences found between the two sources.\n"); + } + sb.append("\n"); + } + + private void appendReportStructure(StringBuilder sb, ConceptSetCompareJobEntity compareJob) { + sb.append("REPORT STRUCTURE\n"); + sb.append("----------------\n"); + sb.append("summary.txt - Overall statistics summary\n"); + sb.append("README.txt - Overall log and runtime statistics\n"); + sb.append("all_included_codes_diff.csv - A diff report file for all of the affected concept sets in a single file – for Included Codes\n"); + sb.append("all_source_codes_diff.csv - A diff report file for all of the affected concept sets in a single file – for Included Source Codes\n"); + sb.append("concept_set__diff.csv - A diff report file per concept set – for Included Codes\n"); + + if (compareJob != null && compareJob.getCompareSourceCodes() != null && compareJob.getCompareSourceCodes()) { + sb.append("concept_set__source_codes_diff.csv - A diff report file per concept set – for Included Source Codes\n"); + } + sb.append("\n"); + } + + private String formatDuration(Duration duration) { + long hours = duration.toHours(); + long minutes = duration.toMinutes() % 60; + long seconds = duration.getSeconds() % 60; + + if (hours > 0) { + return String.format("%dh %dm %ds", hours, minutes, seconds); + } else if (minutes > 0) { + return String.format("%dm %ds", minutes, seconds); + } else { + return String.format("%ds", seconds); + } + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/job/artifact/cscompare/SummaryFileGenerator.java b/src/main/java/org/ohdsi/webapi/job/artifact/cscompare/SummaryFileGenerator.java new file mode 100644 index 0000000000..47d8fcbfc4 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/job/artifact/cscompare/SummaryFileGenerator.java @@ -0,0 +1,268 @@ +package org.ohdsi.webapi.job.artifact.cscompare; + +import org.ohdsi.webapi.conceptset.ConceptSet; +import org.ohdsi.webapi.conceptset.ConceptSetRepository; +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobDiffEntity; +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobEntity; +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobStatsEntity; +import org.ohdsi.webapi.service.cscompare.repository.ConceptSetCompareJobStatsRepository; +import org.ohdsi.webapi.util.GenericFileWriter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * Generates summary.txt statistics file for comparison reports + */ +@Component +public class SummaryFileGenerator { + + private static final Logger logger = LoggerFactory.getLogger(SummaryFileGenerator.class); + + private final GenericFileWriter fileWriter; + private final ConceptSetRepository conceptSetRepository; + private final ConceptSetCompareJobStatsRepository compareJobStatsRepository; + + public SummaryFileGenerator( + GenericFileWriter fileWriter, + ConceptSetRepository conceptSetRepository, + ConceptSetCompareJobStatsRepository compareJobStatsRepository) { + this.fileWriter = fileWriter; + this.conceptSetRepository = conceptSetRepository; + this.compareJobStatsRepository = compareJobStatsRepository; + } + + public void generate(Path workDir, ConceptSetCompareJobEntity compareJob, + List diffs) throws IOException { + + StringBuilder summary = new StringBuilder(); + + appendHeader(summary); + appendOverallStatistics(summary, compareJob, diffs); + appendConceptSetMembershipStatistics(summary, diffs); + appendMismatchStatistics(summary, diffs); + appendPerConceptSetBreakdown(summary, compareJob, diffs); + + Path summaryPath = workDir.resolve("summary.txt"); + fileWriter.writeTextFile(summaryPath, pw -> pw.print(summary.toString())); + + logger.debug("Created summary file at {}", summaryPath); + } + + private void appendHeader(StringBuilder sb) { + sb.append("Concept Set Batch Comparison - Statistical Summary\n"); + sb.append("==================================================\n\n"); + } + + private void appendOverallStatistics(StringBuilder sb, ConceptSetCompareJobEntity compareJob, + List diffs) { + sb.append("OVERALL STATISTICS\n"); + sb.append("------------------\n"); + + sb.append("Concept Sets Analyzed: ") + .append(compareJob.getConceptSetsAnalyzed() != null ? compareJob.getConceptSetsAnalyzed() : 0) + .append("\n"); + sb.append("Concept Sets with Differences: ") + .append(compareJob.getConceptSetsWithDiffs() != null ? compareJob.getConceptSetsWithDiffs() : 0) + .append("\n\n"); + + ComparisonStatistics stats = new ComparisonStatistics(diffs); + + sb.append("Total Difference Records: ").append(stats.getTotalDiffs()).append("\n"); + sb.append(" - Included Concept Differences: ").append(stats.getIncludedConceptDiffs()).append("\n"); + sb.append(" - Source Code Differences: ").append(stats.getSourceCodeDiffs()).append("\n\n"); + } + + private void appendConceptSetMembershipStatistics(StringBuilder sb, List diffs) { + sb.append("CONCEPT SET MEMBERSHIP DIFFERENCES\n"); + sb.append("----------------------------------\n"); + + ComparisonStatistics stats = new ComparisonStatistics(diffs); + + sb.append("Concepts in CS1 Only: ").append(stats.getCs1Only()).append("\n"); + sb.append("Concepts in CS2 Only: ").append(stats.getCs2Only()).append("\n"); + sb.append("Concepts in Both CS: ").append(stats.getBothCS()).append("\n\n"); + } + + private void appendMismatchStatistics(StringBuilder sb, List diffs) { + sb.append("ATTRIBUTE MISMATCH STATISTICS\n"); + sb.append("-----------------------------\n"); + + ComparisonStatistics stats = new ComparisonStatistics(diffs); + + sb.append("Name Mismatches: ").append(stats.getNameMismatches()).append("\n"); + sb.append("Standard Concept Mismatches: ").append(stats.getStandardConceptMismatches()).append("\n"); + sb.append("Invalid Reason Mismatches: ").append(stats.getInvalidReasonMismatches()).append("\n"); + sb.append("Concept Code Mismatches: ").append(stats.getConceptCodeMismatches()).append("\n"); + sb.append("Domain ID Mismatches: ").append(stats.getDomainIdMismatches()).append("\n"); + sb.append("Vocabulary ID Mismatches: ").append(stats.getVocabularyIdMismatches()).append("\n"); + sb.append("Concept Class ID Mismatches: ").append(stats.getConceptClassIdMismatches()).append("\n"); + sb.append("Valid Start Date Mismatches: ").append(stats.getValidStartDateMismatches()).append("\n"); + sb.append("Valid End Date Mismatches: ").append(stats.getValidEndDateMismatches()).append("\n\n"); + } + + private void appendPerConceptSetBreakdown(StringBuilder sb, ConceptSetCompareJobEntity compareJob, + List diffs) { + // Get all stats for this compare job + List allStats = + compareJobStatsRepository.findByCompareJobId(compareJob.getId()); + + // Create lookup map by concept set ID + Map statsMap = allStats.stream() + .collect(Collectors.toMap( + ConceptSetCompareJobStatsEntity::getConceptSetId, + stats -> stats + )); + + // Group diffs by concept set ID + Map includedConceptSetCounts = diffs.stream() + .filter(d -> d.getIsSourceCode() == null || !d.getIsSourceCode()) + .collect(Collectors.groupingBy( + ConceptSetCompareJobDiffEntity::getConceptSetId, + Collectors.counting() + )); + + // Included concepts breakdown + sb.append("PER CONCEPT SET BREAKDOWN - INCLUDED CONCEPTS\n"); + sb.append("=============================================\n\n"); + + appendConceptSetDetailedTable(sb, includedConceptSetCounts, statsMap, false); + + // Source codes breakdown (if applicable) + if (compareJob.getCompareSourceCodes() != null && compareJob.getCompareSourceCodes()) { + sb.append("\n\n"); + sb.append("PER CONCEPT SET BREAKDOWN - SOURCE CODES\n"); + sb.append("========================================\n\n"); + + Map sourceCodeConceptSetCounts = diffs.stream() + .filter(d -> d.getIsSourceCode() != null && d.getIsSourceCode()) + .collect(Collectors.groupingBy( + ConceptSetCompareJobDiffEntity::getConceptSetId, + Collectors.counting() + )); + + appendConceptSetDetailedTable(sb, sourceCodeConceptSetCounts, statsMap, true); + } + } + + private void appendConceptSetDetailedTable( + StringBuilder sb, + Map conceptSetDiffCounts, + Map statsMap, + boolean isSourceCode) { + + // Table header + sb.append(String.format("%-10s %-40s %-15s %-15s %-12s\n", + "CS ID", "Name", "CS1 Count", "CS2 Count", "Diff Count")); + sb.append(String.format("%-10s %-40s %-15s %-15s %-12s\n", + "-----", "----", "---------", "---------", "----------")); + + // Get all unique concept set IDs that have either diffs or stats + Set allConceptSetIds = new HashSet<>(); + allConceptSetIds.addAll(conceptSetDiffCounts.keySet()); + allConceptSetIds.addAll(statsMap.keySet()); + + // Sort by diff count (descending), then by concept set ID + List sortedIds = allConceptSetIds.stream() + .sorted((id1, id2) -> { + Long count1 = conceptSetDiffCounts.getOrDefault(id1, 0L); + Long count2 = conceptSetDiffCounts.getOrDefault(id2, 0L); + int countCompare = count2.compareTo(count1); // Descending + return countCompare != 0 ? countCompare : id1.compareTo(id2); + }) + .collect(Collectors.toList()); + + // Generate table rows + for (Integer csId : sortedIds) { + ConceptSetCompareJobStatsEntity stats = statsMap.get(csId); + Long diffCount = conceptSetDiffCounts.getOrDefault(csId, 0L); + String csName = getConceptSetName(csId); + + int cs1Count; + int cs2Count; + + if (isSourceCode) { + cs1Count = stats != null ? stats.getCs1IncludedSourceCodesCount() : 0; + cs2Count = stats != null ? stats.getCs2IncludedSourceCodesCount() : 0; + } else { + cs1Count = stats != null ? stats.getCs1IncludedConceptsCount() : 0; + cs2Count = stats != null ? stats.getCs2IncludedConceptsCount() : 0; + } + + sb.append(String.format("%-10d %-40s %-15d %-15d %-12d\n", + csId, + truncate(csName, 40), + cs1Count, + cs2Count, + diffCount)); + } + + // Summary statistics + sb.append("\n"); + sb.append("Summary:\n"); + sb.append("--------\n"); + + if (statsMap.isEmpty()) { + sb.append("No statistics available\n"); + } else { + long totalCs1Count = statsMap.values().stream() + .mapToLong(s -> isSourceCode ? s.getCs1IncludedSourceCodesCount() : s.getCs1IncludedConceptsCount()) + .sum(); + + long totalCs2Count = statsMap.values().stream() + .mapToLong(s -> isSourceCode ? s.getCs2IncludedSourceCodesCount() : s.getCs2IncludedConceptsCount()) + .sum(); + + long totalDiffs = conceptSetDiffCounts.values().stream() + .mapToLong(Long::longValue) + .sum(); + + sb.append(String.format("Total %s in CS1: %d\n", + isSourceCode ? "Source Codes" : "Concepts", totalCs1Count)); + sb.append(String.format("Total %s in CS2: %d\n", + isSourceCode ? "Source Codes" : "Concepts", totalCs2Count)); + sb.append(String.format("Total Differences: %d\n", totalDiffs)); + + // Calculate average counts + int conceptSetCount = statsMap.size(); + if (conceptSetCount > 0) { + double avgCs1 = (double) totalCs1Count / conceptSetCount; + double avgCs2 = (double) totalCs2Count / conceptSetCount; + + sb.append(String.format("Average %s per Concept Set in CS1: %.1f\n", + isSourceCode ? "Source Codes" : "Concepts", avgCs1)); + sb.append(String.format("Average %s per Concept Set in CS2: %.1f\n", + isSourceCode ? "Source Codes" : "Concepts", avgCs2)); + } + } + } + + private String getConceptSetName(Integer conceptSetId) { + try { + ConceptSet conceptSet = conceptSetRepository.findById(conceptSetId); + return Optional.ofNullable(conceptSet).map(ConceptSet::getName).orElse("Unknown"); + } catch (Exception e) { + logger.warn("Failed to retrieve concept set name for ID {}", conceptSetId, e); + return "Unknown"; + } + } + + private String truncate(String value, int maxLength) { + if (value == null) { + return ""; + } + if (value.length() <= maxLength) { + return value; + } + return value.substring(0, maxLength - 3) + "..."; + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/pathway/PathwayController.java b/src/main/java/org/ohdsi/webapi/pathway/PathwayController.java index dcf53b5cc5..5b5d692d0d 100644 --- a/src/main/java/org/ohdsi/webapi/pathway/PathwayController.java +++ b/src/main/java/org/ohdsi/webapi/pathway/PathwayController.java @@ -33,7 +33,7 @@ import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestBody; -import javax.transaction.Transactional; +import org.springframework.transaction.annotation.Transactional; import javax.ws.rs.*; import javax.ws.rs.core.MediaType; import java.util.ArrayList; diff --git a/src/main/java/org/ohdsi/webapi/prediction/PredictionServiceImpl.java b/src/main/java/org/ohdsi/webapi/prediction/PredictionServiceImpl.java index 2795daf6c1..042c0a070d 100644 --- a/src/main/java/org/ohdsi/webapi/prediction/PredictionServiceImpl.java +++ b/src/main/java/org/ohdsi/webapi/prediction/PredictionServiceImpl.java @@ -45,7 +45,7 @@ import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; -import javax.transaction.Transactional; +import org.springframework.transaction.annotation.Transactional; import javax.ws.rs.InternalServerErrorException; import java.io.*; import java.math.BigDecimal; diff --git a/src/main/java/org/ohdsi/webapi/service/AbstractDaoService.java b/src/main/java/org/ohdsi/webapi/service/AbstractDaoService.java index bf7a4877e8..a850862127 100644 --- a/src/main/java/org/ohdsi/webapi/service/AbstractDaoService.java +++ b/src/main/java/org/ohdsi/webapi/service/AbstractDaoService.java @@ -72,6 +72,8 @@ import java.util.Optional; import java.util.Properties; import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.TimeUnit; import java.util.function.BiFunction; import java.util.function.Function; import java.util.stream.Collectors; @@ -116,6 +118,23 @@ public abstract class AbstractDaoService extends AbstractAdminService { @Autowired protected UserRepository userRepository; + private static class DataSourceCacheEntry { + final DriverManagerDataSource dataSource; + final long timestamp; + + DataSourceCacheEntry(DriverManagerDataSource dataSource) { + this.dataSource = dataSource; + this.timestamp = System.currentTimeMillis(); + } + + boolean isExpired() { + return System.currentTimeMillis() - timestamp > TimeUnit.HOURS.toMillis(1); + } + } + + private final Map dataSourceCache = new ConcurrentHashMap<>(); + private static final int MAX_CACHE_SIZE = 100; + public static final List INVALIDATE_STATUSES = new ArrayList() {{ add(GenerationStatus.PENDING); add(GenerationStatus.RUNNING); @@ -186,14 +205,14 @@ public JdbcTemplate getJdbcTemplate() { public CancelableJdbcTemplate getSourceJdbcTemplate(Source source) { - DriverManagerDataSource dataSource = getDriverManagerDataSource(source); + DriverManagerDataSource dataSource = getCachedDriverManagerDataSource(source); CancelableJdbcTemplate jdbcTemplate = new CancelableJdbcTemplate(dataSource); jdbcTemplate.setSuppressApiException(suppressApiException); return jdbcTemplate; } public T executeInTransaction(Source source, Function> callbackFunction) { - DriverManagerDataSource dataSource = getDriverManagerDataSource(source); + DriverManagerDataSource dataSource = getCachedDriverManagerDataSource(source); CancelableJdbcTemplate jdbcTemplate = new CancelableJdbcTemplate(dataSource); jdbcTemplate.setSuppressApiException(suppressApiException); DataSourceTransactionManager transactionManager = new DataSourceTransactionManager(dataSource); @@ -203,8 +222,18 @@ public T executeInTransaction(Source source, Function= MAX_CACHE_SIZE) { + String oldestKey = dataSourceCache.keySet().iterator().next(); + dataSourceCache.remove(oldestKey); + } + dataSourceCache.put(sourceKey, new DataSourceCacheEntry(dataSource)); + } return dataSource; } @@ -489,4 +526,16 @@ protected List listByTags(List entry.getValue().isExpired()); + } + + protected void cleanSourceCache() { + dataSourceCache.clear(); + } } diff --git a/src/main/java/org/ohdsi/webapi/service/CachingDataSourceAbstractDaoService.java b/src/main/java/org/ohdsi/webapi/service/CachingDataSourceAbstractDaoService.java new file mode 100644 index 0000000000..e21ff549e5 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/service/CachingDataSourceAbstractDaoService.java @@ -0,0 +1,122 @@ +package org.ohdsi.webapi.service; + +import com.odysseusinc.arachne.execution_engine_common.api.v1.dto.DataSourceUnsecuredDTO; +import com.odysseusinc.datasourcemanager.krblogin.KerberosService; +import com.odysseusinc.datasourcemanager.krblogin.KrbConfig; +import com.odysseusinc.datasourcemanager.krblogin.RuntimeServiceMode; +import com.zaxxer.hikari.HikariConfig; +import com.zaxxer.hikari.HikariDataSource; +import org.ohdsi.webapi.source.Source; +import org.ohdsi.webapi.util.CancelableJdbcTemplate; +import org.ohdsi.webapi.util.DataSourceDTOParser; +import org.ohdsi.webapi.source.SourceHelper; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.stereotype.Service; + +import javax.annotation.PreDestroy; +import java.io.File; +import java.io.IOException; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +@Service +public abstract class CachingDataSourceAbstractDaoService { + + @Value("${spring.datasource.hikari.connection-test-query}") + private String testQuery; + @Value("${spring.datasource.hikari.connection-test-query-timeout}") + private Long validationTimeout; + @Value("${spring.datasource.hikari.maximum-pool-size}") + private int maxPoolSize; + @Value("${spring.datasource.hikari.minimum-idle}") + private int minPoolIdle; + @Value("${spring.datasource.hikari.connection-timeout}") + private int connectionTimeout; + @Value("${spring.datasource.hikari.register-mbeans}") + private boolean registerMbeans; + @Value("${spring.datasource.hikari.mbean-name}") + private String mbeanName; + + @Autowired + private SourceHelper sourceHelper; + + @Autowired + private KerberosService kerberosService; + + // Cache: key is source ID (or name), value is HikariDataSource + private final ConcurrentMap dataSourceCache = new ConcurrentHashMap<>(); + + /** + * Returns a JdbcTemplate for the given Source, using a cached HikariDataSource. + * If not cached, creates and caches a new HikariDataSource. + */ + public JdbcTemplate getSourceJdbcTemplate(Source source) { + String cacheKey = getSourceCacheKey(source); + + HikariDataSource hikariDataSource = dataSourceCache.computeIfAbsent(cacheKey, key -> { + DataSourceUnsecuredDTO dataSourceData = DataSourceDTOParser.parseDTO(source); + + if (dataSourceData.getUseKerberos()) { + loginToKerberos(dataSourceData); + } + + String connectionString = sourceHelper.getSourceConnectionString(source); + + HikariConfig config = new HikariConfig(); + config.setJdbcUrl(connectionString); + config.setConnectionTestQuery(testQuery); + config.setValidationTimeout(validationTimeout); + config.setMaximumPoolSize(maxPoolSize); + config.setMinimumIdle(minPoolIdle); + config.setConnectionTimeout(connectionTimeout); + config.setRegisterMbeans(registerMbeans); + config.setPoolName(mbeanName); + + if (dataSourceData.getUsername() != null && dataSourceData.getPassword() != null) { + config.setUsername(dataSourceData.getUsername()); + config.setPassword(dataSourceData.getPassword()); + } + + // Optionally set schema, etc. if needed + + return new HikariDataSource(config); + }); + + // You can use CancelableJdbcTemplate if needed, or just JdbcTemplate + return new JdbcTemplate(hikariDataSource); + } + + /** + * Generates a unique cache key for a Source. + * You can use source.getSourceKey(), source.getSourceId(), or source.getSourceName(). + */ + private String getSourceCacheKey(Source source) { + // Prefer a unique, immutable identifier + return String.valueOf(source.getSourceId()); + } + + private void loginToKerberos(DataSourceUnsecuredDTO dataSourceData) { + File temporaryDir = com.google.common.io.Files.createTempDir(); + try { + kerberosService.runKinit(dataSourceData, RuntimeServiceMode.SINGLE, temporaryDir); + } catch (RuntimeException | IOException e) { + // log error if needed + } + try { + org.apache.commons.io.FileUtils.forceDelete(temporaryDir); + } catch (IOException e) { + // log warning if needed + } + } + + /** + * Clean up all cached DataSources on shutdown. + */ + @PreDestroy + public void closeAllDataSources() { + dataSourceCache.values().forEach(HikariDataSource::close); + dataSourceCache.clear(); + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/service/CohortDefinitionService.java b/src/main/java/org/ohdsi/webapi/service/CohortDefinitionService.java index 815bc7edfe..f852f7e994 100644 --- a/src/main/java/org/ohdsi/webapi/service/CohortDefinitionService.java +++ b/src/main/java/org/ohdsi/webapi/service/CohortDefinitionService.java @@ -106,7 +106,7 @@ import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.servlet.ServletContext; -import javax.transaction.Transactional; +import org.springframework.transaction.annotation.Transactional; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; diff --git a/src/main/java/org/ohdsi/webapi/service/ConceptSetExpressionResolver.java b/src/main/java/org/ohdsi/webapi/service/ConceptSetExpressionResolver.java new file mode 100644 index 0000000000..11c2610c59 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/service/ConceptSetExpressionResolver.java @@ -0,0 +1,144 @@ +package org.ohdsi.webapi.service; + +import org.ohdsi.circe.vocabulary.ConceptSetExpression; +import org.ohdsi.vocabulary.Concept; +import org.ohdsi.webapi.conceptset.ConceptSetItem; +import org.ohdsi.webapi.conceptset.ConceptSetItemRepository; +import org.ohdsi.webapi.service.vocabulary.ConceptSetStrategy; +import org.ohdsi.webapi.source.Source; +import org.ohdsi.webapi.source.SourceDaimon; +import org.ohdsi.webapi.util.PreparedSqlRender; +import org.ohdsi.webapi.util.PreparedStatementRenderer; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.jdbc.core.RowCallbackHandler; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.stereotype.Service; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; + +@Service +public class ConceptSetExpressionResolver extends CachingDataSourceAbstractDaoService { + + @Autowired + private ConceptSetItemRepository conceptSetItemRepository; + + public ConceptSetItemRepository getConceptSetItemRepository() { + return conceptSetItemRepository; + } + + private final RowMapper rowMapper = (resultSet, arg1) -> { + final Concept concept = new Concept(); + concept.conceptId = resultSet.getLong("CONCEPT_ID"); + concept.conceptCode = resultSet.getString("CONCEPT_CODE"); + concept.conceptName = resultSet.getString("CONCEPT_NAME"); + concept.standardConcept = resultSet.getString("STANDARD_CONCEPT"); + concept.invalidReason = resultSet.getString("INVALID_REASON"); + concept.conceptClassId = resultSet.getString("CONCEPT_CLASS_ID"); + concept.vocabularyId = resultSet.getString("VOCABULARY_ID"); + concept.domainId = resultSet.getString("DOMAIN_ID"); + concept.validStartDate = resultSet.getDate("VALID_START_DATE"); + concept.validEndDate = resultSet.getDate("VALID_END_DATE"); + return concept; + }; + + public ConceptSetExpression getConceptSetExpression(Source source, int conceptSetId) { + HashMap map = new HashMap<>(); + + // create our expression to return + ConceptSetExpression expression = new ConceptSetExpression(); + ArrayList expressionItems = new ArrayList<>(); + + List repositoryItems = new ArrayList<>(getConceptSetItemRepository().findAllByConceptSetId(conceptSetId)); + + + // collect the unique concept IDs so we can load the concept object later. + for (ConceptSetItem csi : repositoryItems) { + map.put(csi.getConceptId(), null); + } + + // lookup the concepts we need information for + long[] identifiers = new long[map.size()]; + int identifierIndex = 0; + for (Long identifier : map.keySet()) { + identifiers[identifierIndex] = identifier; + identifierIndex++; + } + +// String sourceKey; +// if (Objects.isNull(sourceInfo)) { +// sourceKey = sourceService.getPriorityVocabularySource().getSourceKey(); +// } else { +// sourceKey = sourceInfo.sourceKey; +// } + + Collection concepts = executeIdentifierLookup(source, identifiers); + + for (Concept concept : concepts) { + map.put(concept.conceptId, concept); // associate the concept object to the conceptID in the map + } + + // put the concept information into the expression along with the concept set item information + for (ConceptSetItem repositoryItem : repositoryItems) { + ConceptSetExpression.ConceptSetItem currentItem = new ConceptSetExpression.ConceptSetItem(); + currentItem.concept = map.get(repositoryItem.getConceptId()); + currentItem.includeDescendants = (repositoryItem.getIncludeDescendants() == 1); + currentItem.includeMapped = (repositoryItem.getIncludeMapped() == 1); + currentItem.isExcluded = (repositoryItem.getIsExcluded() == 1); + if (currentItem.concept != null) { + expressionItems.add(currentItem); + } + } + expression.items = expressionItems.toArray(new ConceptSetExpression.ConceptSetItem[0]); // this will return a new array + + return expression; + } + + public Collection resolveConceptSetExpression(Source source, ConceptSetExpression conceptSetExpression) { + PreparedStatementRenderer psr = new ConceptSetStrategy(conceptSetExpression).prepareStatement(source, null); + final ArrayList identifiers = new ArrayList<>(); + getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), new RowCallbackHandler() { + @Override + public void processRow(ResultSet rs) throws SQLException { + identifiers.add(rs.getLong("CONCEPT_ID")); + } + }); + + return identifiers; + } + + public Collection executeIdentifierLookup(Source source, long[] identifiers) { + Collection concepts = new ArrayList<>(); + if (identifiers.length == 0) { + return concepts; + } else { + // Determine if we need to chunk up ther request based on the parameter + // limit of the source RDBMS + int parameterLimit = PreparedSqlRender.getParameterLimit(source); + if (parameterLimit > 0 && identifiers.length > parameterLimit) { + concepts = executeIdentifierLookup(source, Arrays.copyOfRange(identifiers, parameterLimit, identifiers.length)); + identifiers = Arrays.copyOfRange(identifiers, 0, parameterLimit); + } + + PreparedStatementRenderer psr = prepareExecuteIdentifierLookup(identifiers, source); + return concepts.addAll(getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), this.rowMapper)) + ? concepts : new ArrayList<>(); + } + } + + protected PreparedStatementRenderer prepareExecuteIdentifierLookup(long[] identifiers, Source source) { + + String sqlPath = "/resources/vocabulary/sql/lookupIdentifiers.sql"; + String tqName = "CDM_schema"; + String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Vocabulary); + + return new PreparedStatementRenderer(source, sqlPath, tqName, tqValue, "identifiers", identifiers); + } + + +} diff --git a/src/main/java/org/ohdsi/webapi/service/ConceptSetService.java b/src/main/java/org/ohdsi/webapi/service/ConceptSetService.java index 84059bc4a6..e43ee87dfa 100644 --- a/src/main/java/org/ohdsi/webapi/service/ConceptSetService.java +++ b/src/main/java/org/ohdsi/webapi/service/ConceptSetService.java @@ -19,8 +19,10 @@ import java.util.*; import java.util.stream.Collectors; import java.util.stream.StreamSupport; +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; -import javax.transaction.Transactional; +import org.springframework.transaction.annotation.Transactional; import javax.ws.rs.*; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; @@ -29,7 +31,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import javax.cache.CacheManager; import javax.cache.configuration.MutableConfiguration; - +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; import org.apache.shiro.authz.UnauthorizedException; import org.ohdsi.circe.vocabulary.ConceptSetExpression; import org.ohdsi.vocabulary.Concept; @@ -43,8 +46,13 @@ import org.ohdsi.webapi.conceptset.dto.ConceptSetVersionFullDTO; import org.ohdsi.webapi.conceptset.annotation.ConceptSetAnnotation; import org.ohdsi.webapi.exception.ConceptNotExistException; +import org.ohdsi.webapi.job.JobExecutionResource; +import org.ohdsi.webapi.job.JobTemplate; import org.ohdsi.webapi.security.PermissionService; import org.ohdsi.webapi.service.annotations.SearchDataTransformer; +import org.ohdsi.webapi.service.cscompare.ConceptSetBatchCompareRequest; +import org.ohdsi.webapi.service.cscompare.ConceptSetBatchCompareTasklet; +import org.ohdsi.webapi.service.cscompare.ConceptSetFilterService; import org.ohdsi.webapi.service.dto.AnnotationDetailsDTO; import org.ohdsi.webapi.service.dto.ConceptSetDTO; import org.ohdsi.webapi.service.dto.SaveConceptSetAnnotationsRequest; @@ -70,7 +78,11 @@ import org.ohdsi.webapi.versioning.dto.VersionDTO; import org.ohdsi.webapi.versioning.dto.VersionUpdateDTO; import org.ohdsi.webapi.versioning.service.VersionService; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.JobParametersBuilder; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.cache.JCacheManagerCustomizer; import org.springframework.cache.annotation.CacheEvict; @@ -79,6 +91,8 @@ import org.springframework.dao.EmptyResultDataAccessException; import org.springframework.stereotype.Component; +import static org.ohdsi.webapi.Constants.Params.JOB_NAME; + /** * Provides REST services for working with * concept sets. @@ -145,7 +159,19 @@ public void customize(CacheManager cacheManager) { @Autowired private ObjectMapper mapper; + @Autowired + private ConceptSetExpressionResolver conceptSetExpressionResolver; + + @Autowired + @Qualifier("conceptSetBatchCompareJob") + private Job conceptSetBatchCompareJob; + @Autowired + private JobTemplate jobTemplate; + + @Autowired + private ConceptSetFilterService filterService; + @Value("${security.defaultGlobalReadPermissions}") private boolean defaultGlobalReadPermissions; @@ -353,7 +379,6 @@ private ConceptSetExpression getConceptSetExpression(int id, Integer version, So * @summary DO NOT USE * @deprecated * @param id The concept set ID - * @param sourceKey The source key * @return The concept set expression */ @Deprecated @@ -621,7 +646,7 @@ public Collection getConceptSetGenerationInfo(@PathPar * @param id The concept set ID */ @DELETE - @Transactional(rollbackOn = Exception.class, dontRollbackOn = EmptyResultDataAccessException.class) + @Transactional(rollbackFor = Exception.class, noRollbackFor = EmptyResultDataAccessException.class) @Path("{id}") @CacheEvict(cacheNames = CachingSetup.CONCEPT_SET_LIST_CACHE, allEntries = true) public void deleteConceptSet(@PathParam("id") final int id) { @@ -1038,4 +1063,185 @@ public Response deleteConceptSetAnnotation(@PathParam("conceptSetId") final int return Response.ok().build(); } else throw new NotFoundException("Concept set annotation not found"); } + + @POST + @Path("compare-batch") + @Produces(MediaType.APPLICATION_JSON) + @Consumes(MediaType.APPLICATION_JSON) + @Transactional(readOnly = true) + public JobExecutionResource queueConceptSetBatchCompareJob(ConceptSetBatchCompareRequest task) throws Exception { + if (task == null) { + log.warn("Received null ConceptSetBatchCompareRequest"); + return null; + } + + log.info("Starting Concept Set Batch Compare Job with parameters: " + + "source1Key={}, source2Key={}, createdDateFrom={}, createdDateTo={}, " + + "updatedDateFrom={}, updatedDateTo={}, tags={}, " + + "authors={}, compareSourceCodes={}, conceptSetIds={}", + task.getSource1Key(), + task.getSource2Key(), + task.getCreatedDateFrom(), + task.getCreatedDateTo(), + task.getUpdatedDateFrom(), + task.getUpdatedDateTo(), + task.getTags(), + task.getAuthors(), + task.isCompareSourceCodes(), + task.getConceptSetIds()); + + JobParametersBuilder builder = new JobParametersBuilder(); + builder.addString(JOB_NAME, "Running batch concept set compare"); + builder.addString("source1Key", task.getSource1Key()); + builder.addString("source2Key", task.getSource2Key()); + builder.addString("source1Version", vocabService.getInfo(task.getSource1Key()).version); + builder.addString("source2Version", vocabService.getInfo(task.getSource2Key()).version); + builder.addString("createdDateFrom", task.getCreatedDateFrom()); + builder.addString("createdDateTo", task.getCreatedDateTo()); + builder.addString("updatedDateFrom", task.getUpdatedDateFrom()); + builder.addString("updatedDateTo", task.getUpdatedDateTo()); + + List tags = task.getTags(); + if (CollectionUtils.isNotEmpty(tags)) { + String tagsIds = String.join(",", tags); + builder.addString("tagsIds", tagsIds); + log.debug("Tags filter applied: {}", tagsIds); + } else { + log.debug("No tags filter applied"); + } + + if (CollectionUtils.isNotEmpty(task.getAuthors())) { + String authorIds = task.getAuthors().stream() + .map(String::valueOf) + .collect(Collectors.joining(",")); + builder.addString("authorIds", authorIds); + log.debug("Author filter applied: {}", authorIds); + } + + if (CollectionUtils.isNotEmpty(task.getConceptSetIds())) { + String conceptSetIds = task.getConceptSetIds().stream() + .map(String::valueOf) + .collect(Collectors.joining(",")); + builder.addString("conceptSetIds", conceptSetIds); + log.debug("Concept set IDs filter applied: {}", conceptSetIds); + } else { + log.debug("No concept set IDs filter applied"); + } + + builder.addString("compareSourceCodes", Boolean.toString(task.isCompareSourceCodes())); + log.debug("Compare source codes: {}", task.isCompareSourceCodes()); + + final JobParameters jobParameters = builder.toJobParameters(); + + log.info("Launching conceptSetBatchCompareJob with job parameters: {}", jobParameters); + + JobExecutionResource result = this.jobTemplate.launch(conceptSetBatchCompareJob, jobParameters); + + log.info("Concept Set Batch Compare Job queued successfully with execution ID: {}", + result != null ? result.getExecutionId() : "unknown"); + + return result; + } + + /** + * Check how many concept sets match the specified filter criteria + * without running the actual batch comparison job. + * + * @summary Check concept set filter count + * @param filterRequest The filter criteria + * @return Count of matching concept sets + */ + @POST + @Path("check-filter-count") + @Produces(MediaType.APPLICATION_JSON) + @Consumes(MediaType.APPLICATION_JSON) + @Transactional(readOnly = true) + public Response checkFilterCount(ConceptSetBatchCompareRequest filterRequest) { + try { + if (filterRequest == null) { + log.warn("Received null filter request"); + return Response.status(Response.Status.BAD_REQUEST) + .entity("Filter request cannot be null") + .build(); + } + + log.info("Checking concept set filter count with parameters: " + + "createdDateFrom={}, createdDateTo={}, updatedDateFrom={}, updatedDateTo={}, " + + "tags={}, authors={}", + filterRequest.getCreatedDateFrom(), + filterRequest.getCreatedDateTo(), + filterRequest.getUpdatedDateFrom(), + filterRequest.getUpdatedDateTo(), + filterRequest.getTags(), + filterRequest.getAuthors()); + + // Build filter criteria + ConceptSetFilterService.ConceptSetFilterCriteria criteria = + buildFilterCriteriaFromRequest(filterRequest); + + // Get filtered concept sets + List conceptSets = filterService.filterConceptSets(criteria); + + int count = conceptSets.size(); + log.info("Found {} concept sets matching filter criteria", count); + + Map response = new HashMap<>(); + response.put("count", count); + + return Response.ok(response).build(); + + } catch (Exception e) { + log.error("Error checking filter count", e); + return Response.status(Response.Status.INTERNAL_SERVER_ERROR) + .entity("Error checking filter count: " + e.getMessage()) + .build(); + } + } + + /** + * Helper method to build filter criteria from batch compare request + */ + private ConceptSetFilterService.ConceptSetFilterCriteria buildFilterCriteriaFromRequest( + ConceptSetBatchCompareRequest request) { + + ConceptSetFilterService.ConceptSetFilterCriteria criteria = + new ConceptSetFilterService.ConceptSetFilterCriteria(); + + DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE; + + if (StringUtils.isNotBlank(request.getCreatedDateFrom())) { + criteria.setCreatedFrom(LocalDate.parse(request.getCreatedDateFrom(), formatter)); + } + if (StringUtils.isNotBlank(request.getCreatedDateTo())) { + criteria.setCreatedTo(LocalDate.parse(request.getCreatedDateTo(), formatter)); + } + if (StringUtils.isNotBlank(request.getUpdatedDateFrom())) { + criteria.setUpdatedFrom(LocalDate.parse(request.getUpdatedDateFrom(), formatter)); + } + if (StringUtils.isNotBlank(request.getUpdatedDateTo())) { + criteria.setUpdatedTo(LocalDate.parse(request.getUpdatedDateTo(), formatter)); + } + + if (CollectionUtils.isNotEmpty(request.getTags())) { + List tagIds = request.getTags().stream() + .map(Integer::parseInt) + .collect(Collectors.toList()); + criteria.setTagIds(tagIds); + } + + // Handle multiple authors + if (CollectionUtils.isNotEmpty(request.getAuthors())) { + criteria.setAuthorIds(request.getAuthors()); + log.debug("Setting author filter with {} authors: {}", + request.getAuthors().size(), request.getAuthors()); + } + + if (CollectionUtils.isNotEmpty(request.getConceptSetIds())) { + criteria.setConceptSetIds(request.getConceptSetIds()); + log.debug("Setting concept set ID filter with {} IDs: {}", + request.getConceptSetIds().size(), request.getConceptSetIds()); + } + + return criteria; + } } \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/service/JobService.java b/src/main/java/org/ohdsi/webapi/service/JobService.java index 129fc2ab7c..d227933e05 100644 --- a/src/main/java/org/ohdsi/webapi/service/JobService.java +++ b/src/main/java/org/ohdsi/webapi/service/JobService.java @@ -5,6 +5,8 @@ import org.ohdsi.webapi.job.JobInstanceResource; import org.ohdsi.webapi.job.JobTemplate; import org.ohdsi.webapi.job.JobUtils; +import org.ohdsi.webapi.job.artifact.JobArtifactGenerator; +import org.ohdsi.webapi.job.artifact.JobArtifactGeneratorFactory; import org.ohdsi.webapi.util.PreparedStatementRenderer; import org.springframework.batch.admin.service.SearchableJobExecutionDao; import org.springframework.batch.core.BatchStatus; @@ -63,14 +65,17 @@ public class JobService extends AbstractDaoService { private final JobTemplate jobTemplate; - private Map jobMap = new HashMap<>(); + private final JobArtifactGeneratorFactory artifactGeneratorFactory; - public JobService(JobExplorer jobExplorer, SearchableJobExecutionDao jobExecutionDao, JobRepository jobRepository, JobTemplate jobTemplate) { + private Map jobMap = new HashMap<>(); + + public JobService(JobExplorer jobExplorer, SearchableJobExecutionDao jobExecutionDao, JobRepository jobRepository, JobTemplate jobTemplate, JobArtifactGeneratorFactory artifactGeneratorFactory) { this.jobExplorer = jobExplorer; this.jobExecutionDao = jobExecutionDao; this.jobRepository = jobRepository; this.jobTemplate = jobTemplate; + this.artifactGeneratorFactory = artifactGeneratorFactory; } /** @@ -106,7 +111,7 @@ public JobExecutionResource findJobByName(@PathParam("jobName") final String job final Optional jobExecution = jobExplorer.findRunningJobExecutions(jobType).stream() .filter(job -> jobName.equals(job.getJobParameters().getString(Constants.Params.JOB_NAME))) .findFirst(); - return jobExecution.isPresent() ? JobUtils.toJobExecutionResource(jobExecution.get()) : null; + return jobExecution.map(JobUtils::toJobExecutionResource).orElse(null); } /** @@ -194,25 +199,39 @@ public Page list(@QueryParam("jobName") final String jobNa String tqName = "ohdsi_schema"; String tqValue = getOhdsiSchema(); PreparedStatementRenderer psr = new PreparedStatementRenderer(null, sqlPath, tqName, tqValue); - resources = getJdbcTemplate().query(psr.getSql(), psr.getSetter(), new ResultSetExtractor>() { - @Override - public List extractData(ResultSet rs) throws SQLException, DataAccessException { - - return JobUtils.toJobExecutionResource(rs); - } + resources = getJdbcTemplate().query(psr.getSql(), psr.getSetter(), rs -> { + return JobUtils.toJobExecutionResource(rs, this::checkArtifactAvailability); }); return new PageImpl<>(resources, new PageRequest(0, pageSize), resources.size()); } else { resources = new ArrayList<>(); for (final JobExecution jobExecution : (jobName == null ? this.jobExecutionDao.getJobExecutions(pageIndex, pageSize) : this.jobExecutionDao.getJobExecutions(jobName, pageIndex, pageSize))) { - resources.add(JobUtils.toJobExecutionResource(jobExecution)); + JobExecutionResource resource = JobUtils.toJobExecutionResource(jobExecution, this::checkArtifactAvailability); + resources.add(resource); } return new PageImpl<>(resources, new PageRequest(pageIndex, pageSize), this.jobExecutionDao.countJobExecutions()); } } + private Boolean checkArtifactAvailability(JobExecution jobExecution) { + try { + // Only check for completed jobs + if (!"COMPLETED".equals(jobExecution.getStatus().toString())) { + return false; + } + JobArtifactGenerator generator = artifactGeneratorFactory.getGenerator(jobExecution); + if (generator == null) { + return false; + } + return generator.hasArtifact(jobExecution); + } catch (Exception e) { + log.warn("Failed to check artifact availability for execution {}", + jobExecution.getId(), e); + return false; + } + } public void stopJob(JobExecution jobExecution, Job job) { diff --git a/src/main/java/org/ohdsi/webapi/service/VocabularyService.java b/src/main/java/org/ohdsi/webapi/service/VocabularyService.java index ffd7a5c361..33e44e6240 100644 --- a/src/main/java/org/ohdsi/webapi/service/VocabularyService.java +++ b/src/main/java/org/ohdsi/webapi/service/VocabularyService.java @@ -30,8 +30,10 @@ import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; +import com.google.common.primitives.Longs; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.math.NumberUtils; import org.ohdsi.analysis.Utils; import org.ohdsi.circe.cohortdefinition.ConceptSet; @@ -50,6 +52,8 @@ import org.ohdsi.webapi.service.cscompare.CompareArbitraryDto; import org.ohdsi.webapi.service.cscompare.ConceptSetCompareService; import org.ohdsi.webapi.service.cscompare.ExpressionFileUtils; +import org.ohdsi.webapi.service.cscompare.ExpressionType; +import org.ohdsi.webapi.service.dto.CompareConceptSetsResponse; import org.ohdsi.webapi.service.vocabulary.ConceptSetStrategy; import org.ohdsi.webapi.source.Source; import org.ohdsi.webapi.source.SourceService; @@ -82,8 +86,10 @@ import org.springframework.jdbc.core.RowMapper; import org.springframework.stereotype.Component; import org.ohdsi.webapi.vocabulary.MappedRelatedConcept; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.bind.annotation.RequestBody; - /** +/** * Provides REST services for working with * the OMOP standardized vocabularies * @@ -145,7 +151,9 @@ public void customize(CacheManager cacheManager) { @Autowired private ObjectMapper objectMapper; - + + @Autowired + private ConceptSetExpressionResolver conceptSetExpressionResolver; @Value("${datasource.driverClassName}") private String driver; @@ -1025,18 +1033,21 @@ public Collection getCommonAncestors(Object[] identifiers) { @Consumes(MediaType.APPLICATION_JSON) public Collection resolveConceptSetExpression(@PathParam("sourceKey") String sourceKey, ConceptSetExpression conceptSetExpression) { Source source = getSourceRepository().findBySourceKey(sourceKey); - PreparedStatementRenderer psr = new ConceptSetStrategy(conceptSetExpression).prepareStatement(source, null); - final ArrayList identifiers = new ArrayList<>(); - getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), new RowCallbackHandler() { - @Override - public void processRow(ResultSet rs) throws SQLException { - identifiers.add(rs.getLong("CONCEPT_ID")); - } - }); - - return identifiers; - } + return resolveConceptSetExpression(source, conceptSetExpression); + } + protected Collection resolveConceptSetExpression(Source source, ConceptSetExpression conceptSetExpression) { + PreparedStatementRenderer psr = new ConceptSetStrategy(conceptSetExpression).prepareStatement(source, null); + final ArrayList identifiers = new ArrayList<>(); + getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), new RowCallbackHandler() { + @Override + public void processRow(ResultSet rs) throws SQLException { + identifiers.add(rs.getLong("CONCEPT_ID")); + } + }); + return identifiers; + } + /** * Resolve a concept set expression into a collection * of concept identifiers using the default vocabulary source. @@ -1252,7 +1263,6 @@ public Vocabulary mapRow(final ResultSet resultSet, final int arg1) throws SQLEx * default vocabulary * * @summary Get vocabularies (default vocabulary) - * @param sourceKey The source containing the vocabulary * @return A collection of vocabularies */ @GET @@ -1620,38 +1630,331 @@ public Collection compareConceptSets(@PathParam("sourceKey // Execute the query Collection returnVal = getSourceJdbcTemplate(source).query(sql_statement, CONCEPT_SET_COMPARISON_ROW_MAPPER); - return returnVal; - } + return returnVal.stream() + .map(c -> enrichComparisonWithVocabularyInfo(c, source, source)) + .collect(Collectors.toList()); + } + @Path("compare-diff-vocab") + @POST + @Produces(MediaType.APPLICATION_JSON) + @Consumes(MediaType.APPLICATION_JSON) + public CompareConceptSetsResponse compareConceptSetsOverDiffVocabs(@RequestBody CompareConceptSetsRequest compareConceptSetsRequest) { + String source1Key = compareConceptSetsRequest.source1Key; + String source2Key = compareConceptSetsRequest.source2Key; + Validate.notBlank(source1Key); + Validate.notBlank(source2Key); + + Source source1 = getSourceRepository().findBySourceKey(source1Key); + Source source2 = getSourceRepository().findBySourceKey(source2Key); + + ConceptSetExpression expression1 = compareConceptSetsRequest.expression1; + ConceptSetExpression expression2 = compareConceptSetsRequest.expression2; + + Validate.notNull(expression1); + Validate.notNull(expression2); + + // Resolve concept sets to get all descendant/mapped concepts in each vocabulary + Collection resolvedConceptIds1 = conceptSetExpressionResolver.resolveConceptSetExpression(source1, expression1); + Collection resolvedConceptIds2 = conceptSetExpressionResolver.resolveConceptSetExpression(source2, expression2); + + // Track counts + int cs1IncludedConceptsCount = resolvedConceptIds1.size(); + int cs2IncludedConceptsCount = resolvedConceptIds2.size(); + int cs1IncludedSourceCodesCount = 0; + int cs2IncludedSourceCodesCount = 0; + + // Convert to Sets for efficient lookup + Set resolvedSet1 = new HashSet<>(resolvedConceptIds1); + Set resolvedSet2 = new HashSet<>(resolvedConceptIds2); + + // Look up concepts in each vocabulary + Collection includedConcepts1 = conceptSetExpressionResolver.executeIdentifierLookup(source1, Longs.toArray(resolvedConceptIds1)); + Collection includedConcepts2 = conceptSetExpressionResolver.executeIdentifierLookup(source2, Longs.toArray(resolvedConceptIds2)); + + Map conceptMap1 = includedConcepts1.stream() + .collect(Collectors.toMap(c -> c.conceptId, c -> c)); + Map conceptMap2 = includedConcepts2.stream() + .collect(Collectors.toMap(c -> c.conceptId, c -> c)); + + // Union of all unique concept IDs (from both resolved sets) + Set allConceptIds = new HashSet<>(); + allConceptIds.addAll(resolvedConceptIds1); + allConceptIds.addAll(resolvedConceptIds2); + + List results = new ArrayList<>(); + + for (Long conceptId : allConceptIds) { + ConceptSetComparison sourceCodeComparison = compareSingleConcept(conceptId, conceptMap1, conceptMap2, resolvedSet1, resolvedSet2); + enrichComparisonWithVocabularyInfo(sourceCodeComparison, source1, source2); + results.add(sourceCodeComparison); + } - @Path("{sourceKey}/compare-arbitrary") - @POST - @Produces(MediaType.APPLICATION_JSON) - @Consumes(MediaType.APPLICATION_JSON) - public Collection compareConceptSetsCsv(final @PathParam("sourceKey") String sourceKey, - final CompareArbitraryDto dto) throws Exception { - final ConceptSetExpression[] csExpressionList = dto.compareTargets; - if (csExpressionList.length != 2) { - throw new Exception("You must specify two concept set expressions in order to use this method."); - } + if(compareConceptSetsRequest.compareSourceCodes) { + Set includedConceptsIds1 = includedConcepts1.stream().map(concept -> concept.conceptId).collect(Collectors.toSet()); + Set includedConceptsIds2 = includedConcepts2.stream().map(concept -> concept.conceptId).collect(Collectors.toSet()); - final Collection returnVal = conceptSetCompareService.compareConceptSets(sourceKey, dto); + // Look up source codes for all included concepts in each vocabulary + Collection includedSourceCodes1 = executeMappedLookup(source1, Longs.toArray(includedConceptsIds1)); + Collection includedSourceCodes2 = executeMappedLookup(source2, Longs.toArray(includedConceptsIds2)); - // maps for items "not found in DB from input1", "not found in DB from input2" - final Map input1Ex = ExpressionFileUtils.toExclusionMap(csExpressionList[0].items, returnVal); - final Map input2ex = ExpressionFileUtils.toExclusionMap(csExpressionList[1].items, returnVal); + // Track source code counts + cs1IncludedSourceCodesCount = includedSourceCodes1.size(); + cs2IncludedSourceCodesCount = includedSourceCodes2.size(); - // compare/combine exclusion maps and add the result to the output - returnVal.addAll(ExpressionFileUtils.combine(input1Ex, input2ex)); + Map sourceConceptsMap1 = includedSourceCodes1.stream() + .collect(Collectors.toMap(c -> c.conceptId, c -> c)); + Map sourceConceptsMap2 = includedSourceCodes2.stream() + .collect(Collectors.toMap(c -> c.conceptId, c -> c)); - // concept names to display mismatches - final Map names = ExpressionFileUtils.toNamesMap(csExpressionList[0].items, csExpressionList[1].items); - returnVal.forEach(item -> { - final String name = names.get(ExpressionFileUtils.getKey(item)); - item.nameMismatch = name != null && !name.equals(item.conceptName); - }); + Set resolvedSourceConceptIdsSet1 = sourceConceptsMap1.keySet(); + Set resolvedSourceConceptIdsSet2 = sourceConceptsMap2.keySet(); - return returnVal; - } + // Union of all unique source concept IDs (from both resolved sets) + Set allSourceConceptIds = new HashSet<>(); + allSourceConceptIds.addAll(resolvedSourceConceptIdsSet1); + allSourceConceptIds.addAll(resolvedSourceConceptIdsSet2); + + for (Long sourceConceptId : allSourceConceptIds) { + ConceptSetComparison sourceCodeComparison = compareSingleConcept(sourceConceptId, sourceConceptsMap1, sourceConceptsMap2, resolvedSourceConceptIdsSet1, resolvedSourceConceptIdsSet2); + sourceCodeComparison.isSourceCode = true; + enrichComparisonWithVocabularyInfo(sourceCodeComparison, source1, source2); + results.add(sourceCodeComparison); + } + } + + return new CompareConceptSetsResponse( + results, + cs1IncludedConceptsCount, + cs1IncludedSourceCodesCount, + cs2IncludedConceptsCount, + cs2IncludedSourceCodesCount + ); + } + + private ConceptSetComparison compareSingleConcept(Long conceptId, Map conceptMap1, Map conceptMap2, Set resolvedSet1, Set resolvedSet2){ + ConceptSetComparison comparison = new ConceptSetComparison(); + comparison.conceptId = conceptId; + + Concept concept1 = conceptMap1.get(conceptId); + Concept concept2 = conceptMap2.get(conceptId); + + // Determine CONCEPT SET membership based on RESOLVED expressions + boolean inCS1 = resolvedSet1.contains(conceptId); + boolean inCS2 = resolvedSet2.contains(conceptId); + + comparison.conceptInCS1Only = (inCS1 && !inCS2) ? 1L : 0L; + comparison.conceptInCS2Only = (!inCS1 && inCS2) ? 1L : 0L; + comparison.conceptInCS1AndCS2 = (inCS1 && inCS2) ? 1L : 0L; + + // Populate fields from vocab1 + if (concept1 != null) { + comparison.vocab1ConceptName = concept1.conceptName; + comparison.vocab1StandardConcept = concept1.standardConcept; + comparison.vocab1InvalidReason = concept1.invalidReason; + comparison.vocab1ConceptCode = concept1.conceptCode; + comparison.vocab1DomainId = concept1.domainId; + comparison.vocab1VocabularyId = concept1.vocabularyId; + comparison.vocab1ConceptClassId = concept1.conceptClassId; + + if (concept1 instanceof org.ohdsi.vocabulary.Concept) { + org.ohdsi.vocabulary.Concept extendedConcept1 = (org.ohdsi.vocabulary.Concept) concept1; + comparison.vocab1ValidStartDate = extendedConcept1.validStartDate != null ? + new java.sql.Date(extendedConcept1.validStartDate.getTime()) : null; + comparison.vocab1ValidEndDate = extendedConcept1.validEndDate != null ? + new java.sql.Date(extendedConcept1.validEndDate.getTime()) : null; + } + } + + // Populate fields from vocab2 + if (concept2 != null) { + comparison.vocab2ConceptName = concept2.conceptName; + comparison.vocab2StandardConcept = concept2.standardConcept; + comparison.vocab2InvalidReason = concept2.invalidReason; + comparison.vocab2ConceptCode = concept2.conceptCode; + comparison.vocab2DomainId = concept2.domainId; + comparison.vocab2VocabularyId = concept2.vocabularyId; + comparison.vocab2ConceptClassId = concept2.conceptClassId; + + if (concept2 instanceof org.ohdsi.vocabulary.Concept) { + org.ohdsi.vocabulary.Concept extendedConcept2 = (org.ohdsi.vocabulary.Concept) concept2; + comparison.vocab2ValidStartDate = extendedConcept2.validStartDate != null ? + new java.sql.Date(extendedConcept2.validStartDate.getTime()) : null; + comparison.vocab2ValidEndDate = extendedConcept2.validEndDate != null ? + new java.sql.Date(extendedConcept2.validEndDate.getTime()) : null; + } + } + + // Use concept from whichever source has it (prefer source1 if in both) + Concept conceptToUse = concept1 != null ? concept1 : concept2; + + if (conceptToUse != null) { + comparison.standardConcept = conceptToUse.standardConcept; + comparison.invalidReason = conceptToUse.invalidReason; + comparison.conceptCode = conceptToUse.conceptCode; + comparison.domainId = conceptToUse.domainId; + comparison.vocabularyId = conceptToUse.vocabularyId; + comparison.conceptClassId = conceptToUse.conceptClassId; + + if (conceptToUse instanceof org.ohdsi.vocabulary.Concept) { + org.ohdsi.vocabulary.Concept extendedConcept = (org.ohdsi.vocabulary.Concept) conceptToUse; + comparison.validStartDate = extendedConcept.validStartDate != null ? + new java.sql.Date(extendedConcept.validStartDate.getTime()) : null; + comparison.validEndDate = extendedConcept.validEndDate != null ? + new java.sql.Date(extendedConcept.validEndDate.getTime()) : null; + } + } + + // Check for mismatches if concept exists in both vocabularies + if (concept1 != null && concept2 != null) { + comparison.nameMismatch = !Objects.equals(concept1.conceptName, concept2.conceptName); + comparison.standardConceptMismatch = !Objects.equals(concept1.standardConcept, concept2.standardConcept); + comparison.invalidReasonMismatch = !Objects.equals(concept1.invalidReason, concept2.invalidReason); + comparison.conceptCodeMismatch = !Objects.equals(concept1.conceptCode, concept2.conceptCode); + comparison.domainIdMismatch = !Objects.equals(concept1.domainId, concept2.domainId); + comparison.vocabularyIdMismatch = !Objects.equals(concept1.vocabularyId, concept2.vocabularyId); + comparison.conceptClassIdMismatch = !Objects.equals(concept1.conceptClassId, concept2.conceptClassId); + + if (concept1 instanceof org.ohdsi.vocabulary.Concept && concept2 instanceof org.ohdsi.vocabulary.Concept) { + org.ohdsi.vocabulary.Concept ext1 = (org.ohdsi.vocabulary.Concept) concept1; + org.ohdsi.vocabulary.Concept ext2 = (org.ohdsi.vocabulary.Concept) concept2; + comparison.validStartDateMismatch = !Objects.equals(ext1.validStartDate, ext2.validStartDate); + comparison.validEndDateMismatch = !Objects.equals(ext1.validEndDate, ext2.validEndDate); + } else { + comparison.validStartDateMismatch = false; + comparison.validEndDateMismatch = false; + } + } else { + // If concept not in both vocabularies, no mismatch comparison possible + comparison.nameMismatch = false; + comparison.standardConceptMismatch = false; + comparison.invalidReasonMismatch = false; + comparison.conceptCodeMismatch = false; + comparison.domainIdMismatch = false; + comparison.vocabularyIdMismatch = false; + comparison.conceptClassIdMismatch = false; + comparison.validStartDateMismatch = false; + comparison.validEndDateMismatch = false; + } + return comparison; + } + + public ConceptSetComparison enrichComparisonWithVocabularyInfo(ConceptSetComparison comparison, Source source1, Source source2) { + VocabularyInfo vocabularyInfo1 = getInfo(source1.getSourceKey()); + VocabularyInfo vocabularyInfo2 = getInfo(source2.getSourceKey()); + + comparison.vocab1SourceKey = source1.getSourceKey(); + comparison.vocab1SourceName = source1.getSourceName(); + comparison.vocab1SourceVersion = vocabularyInfo1.version; + + comparison.vocab2SourceKey = source2.getSourceKey(); + comparison.vocab2SourceName = source2.getSourceName(); + comparison.vocab2SourceVersion = vocabularyInfo2.version; + + return comparison; + } + @Path("compare-arbitrary-diff-vocab") + @POST + @Produces(MediaType.APPLICATION_JSON) + @Consumes(MediaType.APPLICATION_JSON) + public Collection compareConceptSetsCsvOverDiffVocabs(@RequestBody CompareConceptSetsArbitraryRequest compareConceptSetsArbitraryRequest) throws Exception { + + CompareConceptSetsRequest compareConceptSetsRequest = new CompareConceptSetsRequest(); + compareConceptSetsRequest.source1Key = compareConceptSetsArbitraryRequest.source1Key; + compareConceptSetsRequest.source2Key = compareConceptSetsArbitraryRequest.source2Key; + compareConceptSetsRequest.expression1 = compareConceptSetsArbitraryRequest.expression1; + compareConceptSetsRequest.expression2 = compareConceptSetsArbitraryRequest.expression2; + + Source source1 = getSourceRepository().findBySourceKey(compareConceptSetsRequest.source1Key); + Source source2 = getSourceRepository().findBySourceKey(compareConceptSetsRequest.source2Key); + + Collection regularCompareResults = compareConceptSetsOverDiffVocabs(compareConceptSetsRequest).getComparisons(); + List finalResults = new ArrayList<>(regularCompareResults); + + // maps for items "not found in DB from input1", "not found in DB from input2" + final Map input1Ex = ExpressionFileUtils.toExclusionMap(compareConceptSetsArbitraryRequest.expression1.items, regularCompareResults); + final Map input2ex = ExpressionFileUtils.toExclusionMap(compareConceptSetsArbitraryRequest.expression2.items, regularCompareResults); + + // compare/combine exclusion maps and add the result to the output + finalResults.addAll(ExpressionFileUtils.combine(input1Ex, input2ex)); + + // concept field maps to display mismatches + final Map names1 = ExpressionFileUtils.toNamesMap(compareConceptSetsArbitraryRequest.expression1.items); + final Map names2 = ExpressionFileUtils.toNamesMap(compareConceptSetsArbitraryRequest.expression2.items); + + finalResults.forEach(item -> { + final String key = ExpressionFileUtils.getKey(item); + final String name1 = names1.get(key); + final String name2 = names2.get(key); + + // Check name mismatch + if (name1 != null && name2 != null) { + item.nameMismatch = item.nameMismatch || !name1.equals(name2); + } else if (name1 != null && item.vocab1ConceptName != null) { + item.nameMismatch = item.nameMismatch || !name1.equals(item.vocab1ConceptName); + } else if (name2 != null && item.vocab2ConceptName != null) { + item.nameMismatch = item.nameMismatch || !name2.equals(item.vocab2ConceptName); + } + }); + + return finalResults.stream() + .map(c -> enrichComparisonWithVocabularyInfo(c, source1, source2)) + .collect(Collectors.toList()); + } + + public static class CompareConceptSetsRequest { + public CompareConceptSetsRequest() {} + public String source1Key; + public String source2Key; + public ConceptSetExpression expression1; + public ConceptSetExpression expression2; + public boolean compareSourceCodes; + } + public static class CompareConceptSetsArbitraryRequest extends CompareConceptSetsRequest { + public CompareConceptSetsArbitraryRequest() {} + public ExpressionType expressionType1; + public ExpressionType expressionType2; + } + + @Path("{sourceKey}/compare-arbitrary") + @POST + @Produces(MediaType.APPLICATION_JSON) + @Consumes(MediaType.APPLICATION_JSON) + public Collection compareConceptSetsCsv(final @PathParam("sourceKey") String sourceKey, + final CompareArbitraryDto dto) throws Exception { + final ConceptSetExpression[] csExpressionList = dto.compareTargets; + if (csExpressionList.length != 2) { + throw new Exception("You must specify two concept set expressions in order to use this method."); + } + Source source = getSourceRepository().findBySourceKey(sourceKey); + + final Collection returnVal = conceptSetCompareService.compareConceptSets(source, dto); + + // maps for items "not found in DB from input1", "not found in DB from input2" + final Map input1Ex = ExpressionFileUtils.toExclusionMap(csExpressionList[0].items, returnVal); + final Map input2ex = ExpressionFileUtils.toExclusionMap(csExpressionList[1].items, returnVal); + + // compare/combine exclusion maps and add the result to the output + returnVal.addAll(ExpressionFileUtils.combine(input1Ex, input2ex)); + + // concept names to display mismatches - updated to use vocab1ConceptName and vocab2ConceptName + final Map names1 = ExpressionFileUtils.toNamesMap(csExpressionList[0].items); + final Map names2 = ExpressionFileUtils.toNamesMap(csExpressionList[1].items); + + returnVal.forEach(item -> { + final String key = ExpressionFileUtils.getKey(item); + final String name1 = names1.get(key); + final String name2 = names2.get(key); + + // Check if there's a mismatch between the two concept names + item.nameMismatch = (name1 != null && name2 != null && !name1.equals(name2)) || + (name1 != null && item.vocab1ConceptName != null && !name1.equals(item.vocab1ConceptName)) || + (name2 != null && item.vocab2ConceptName != null && !name2.equals(item.vocab2ConceptName)); + }); + + return returnVal.stream() + .map(c -> enrichComparisonWithVocabularyInfo(c, source, source)) + .collect(Collectors.toList()); + } /** * Compares two concept set expressions to find which concepts are @@ -1680,7 +1983,6 @@ public Collection compareConceptSets(ConceptSetExpression[ * in a concept set expression. * * @summary Optimize concept set (default vocabulary) - * @param sourceKey The source containing the vocabulary * @param conceptSetExpression The concept set expression to optimize * @return A concept set optimization */ @@ -1781,47 +2083,4 @@ public ConceptSetOptimizationResult optimizeConceptSet(@PathParam("sourceKey") S return result; } - - - private String JoinArray(final long[] array) { - String result = ""; - - for (int i = 0; i < array.length; i++) { - if (i > 0) { - result += ","; - } - - result += array[i]; - } - - return result; - } - - private String JoinArray(final String[] array) { - String result = ""; - - for (int i = 0; i < array.length; i++) { - if (i > 0) { - result += ","; - } - - result += "'" + array[i] + "'"; - } - - return result; - } - - private String JoinArrayList(final ArrayList array){ - String result = ""; - - for (int i = 0; i < array.size(); i++) { - if (i > 0) { - result += " AND "; - } - - result += array.get(i); - } - - return result; - } } diff --git a/src/main/java/org/ohdsi/webapi/service/cscompare/ConceptSetBatchCompareJobListener.java b/src/main/java/org/ohdsi/webapi/service/cscompare/ConceptSetBatchCompareJobListener.java new file mode 100644 index 0000000000..63c3bc861d --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/service/cscompare/ConceptSetBatchCompareJobListener.java @@ -0,0 +1,109 @@ +package org.ohdsi.webapi.service.cscompare; + +import org.ohdsi.webapi.job.artifact.ConceptSetBatchCompareArtifactGenerator; +import org.ohdsi.webapi.job.artifact.JobArtifactPaths; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobExecutionListener; +import org.springframework.core.io.Resource; +import org.springframework.stereotype.Component; + +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; + +@Component +public class ConceptSetBatchCompareJobListener implements JobExecutionListener { + + private static final Logger log = LoggerFactory.getLogger(ConceptSetBatchCompareJobListener.class); + + private final ConceptSetBatchCompareArtifactGenerator artifactGenerator; + + public ConceptSetBatchCompareJobListener(ConceptSetBatchCompareArtifactGenerator artifactGenerator) { + this.artifactGenerator = artifactGenerator; + } + + @Override + public void beforeJob(JobExecution jobExecution) { + log.info("Starting batch compare job with execution ID: {}", jobExecution.getId()); + } + + @Override + public void afterJob(JobExecution jobExecution) { + Long executionId = jobExecution.getId(); + + log.info("=== ARTIFACT GENERATION START for execution {} ===", executionId); + log.info("Job Status: {}, Exit Status: {}", + jobExecution.getStatus(), + jobExecution.getExitStatus().getExitCode()); + + // Only generate artifact for successful jobs + if (jobExecution.getStatus().isUnsuccessful()) { + log.warn("Skipping artifact generation for unsuccessful job execution {}", executionId); + log.info("=== ARTIFACT GENERATION END for execution {} ===", executionId); + return; + } + + try { + generateArtifact(jobExecution); + } catch (Exception e) { + log.error("Failed to generate artifact for job execution {}", executionId, e); + // Don't throw - let the job complete successfully even if artifact generation fails + } + + log.info("=== ARTIFACT GENERATION END for execution {} ===", executionId); + } + + private void generateArtifact(JobExecution jobExecution) throws Exception { + Long executionId = jobExecution.getId(); + + if (!artifactGenerator.supports(jobExecution)) { + log.warn("Artifact generator does not support job execution {}", executionId); + return; + } + + if (!artifactGenerator.hasArtifact(jobExecution)) { + log.info("No artifact to generate for job execution {}", executionId); + return; + } + + Path targetPath = JobArtifactPaths.getArtifactPath(executionId); + log.info("Target artifact path: {}", targetPath.toAbsolutePath()); + + // Ensure parent directory exists + Path parentDir = targetPath.getParent(); + if (parentDir != null && !Files.exists(parentDir)) { + Files.createDirectories(parentDir); + log.info("Created artifact directory: {}", parentDir.toAbsolutePath()); + } + + // Generate the artifact + log.info("Generating artifact..."); + long startTime = System.currentTimeMillis(); + + Resource artifactResource = artifactGenerator.getArtifact(jobExecution); + + if (artifactResource == null || !artifactResource.exists()) { + log.error("Generated artifact resource is null or does not exist"); + return; + } + + // Copy the artifact to the target location + log.info("Copying artifact to target location..."); + try (InputStream inputStream = artifactResource.getInputStream()) { + Files.copy(inputStream, targetPath, StandardCopyOption.REPLACE_EXISTING); + + if (Files.exists(targetPath)) { + long fileSize = Files.size(targetPath); + long totalTime = System.currentTimeMillis() - startTime; + log.info("SUCCESS: Artifact created at: {} ({} bytes, {} ms)", + targetPath.toAbsolutePath(), fileSize, totalTime); + } else { + log.error("FAILURE: Artifact file was NOT created at: {}", + targetPath.toAbsolutePath()); + } + } + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/service/cscompare/ConceptSetBatchCompareRequest.java b/src/main/java/org/ohdsi/webapi/service/cscompare/ConceptSetBatchCompareRequest.java new file mode 100644 index 0000000000..1a334c7d3f --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/service/cscompare/ConceptSetBatchCompareRequest.java @@ -0,0 +1,106 @@ +package org.ohdsi.webapi.service.cscompare; + +import java.util.List; + +public class ConceptSetBatchCompareRequest { + + private String jobName; + private String source1Key; + private String source2Key; + private String createdDateFrom; + private String createdDateTo; + private String updatedDateFrom; + private String updatedDateTo; + private List tags; + private List authors; + private boolean compareSourceCodes; + private List conceptSetIds; + + public String getJobName() { + return jobName; + } + + public void setJobName(String jobName) { + this.jobName = jobName; + } + + public String getSource1Key() { + return source1Key; + } + + public void setSource1Key(String source1Key) { + this.source1Key = source1Key; + } + + public String getSource2Key() { + return source2Key; + } + + public void setSource2Key(String source2Key) { + this.source2Key = source2Key; + } + + public String getCreatedDateFrom() { + return createdDateFrom; + } + + public void setCreatedDateFrom(String createdDateFrom) { + this.createdDateFrom = createdDateFrom; + } + + public String getCreatedDateTo() { + return createdDateTo; + } + + public void setCreatedDateTo(String createdDateTo) { + this.createdDateTo = createdDateTo; + } + + public List getTags() { + return tags; + } + + public void setTags(List tags) { + this.tags = tags; + } + + public String getUpdatedDateFrom() { + return updatedDateFrom; + } + + public void setUpdatedDateFrom(String updatedDateFrom) { + this.updatedDateFrom = updatedDateFrom; + } + + public String getUpdatedDateTo() { + return updatedDateTo; + } + + public void setUpdatedDateTo(String updatedDateTo) { + this.updatedDateTo = updatedDateTo; + } + + public List getAuthors() { + return authors; + } + + public void setAuthors(List authors) { + this.authors = authors; + } + + public boolean isCompareSourceCodes() { + return compareSourceCodes; + } + + public void setCompareSourceCodes(boolean compareSourceCodes) { + this.compareSourceCodes = compareSourceCodes; + } + + public List getConceptSetIds() { + return conceptSetIds; + } + + public void setConceptSetIds(List conceptSetIds) { + this.conceptSetIds = conceptSetIds; + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/service/cscompare/ConceptSetBatchCompareTasklet.java b/src/main/java/org/ohdsi/webapi/service/cscompare/ConceptSetBatchCompareTasklet.java new file mode 100644 index 0000000000..8538204a49 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/service/cscompare/ConceptSetBatchCompareTasklet.java @@ -0,0 +1,469 @@ +package org.ohdsi.webapi.service.cscompare; + +import org.apache.commons.lang3.StringUtils; +import org.ohdsi.circe.vocabulary.ConceptSetExpression; +import org.ohdsi.webapi.conceptset.ConceptSet; +import org.ohdsi.webapi.conceptset.ConceptSetComparison; +import org.ohdsi.webapi.conceptset.ConceptSetRepository; +import org.ohdsi.webapi.executionengine.job.BaseExecutionTasklet; +import org.ohdsi.webapi.job.artifact.ConceptSetBatchCompareArtifactGenerator; +import org.ohdsi.webapi.service.ConceptSetExpressionResolver; +import org.ohdsi.webapi.service.VocabularyService; +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobAuthorEntity; +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobDiffEntity; +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobEntity; +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobStatsEntity; +import org.ohdsi.webapi.service.cscompare.repository.ConceptSetCompareJobAuthorRepository; +import org.ohdsi.webapi.service.cscompare.repository.ConceptSetCompareJobDiffRepository; +import org.ohdsi.webapi.service.cscompare.repository.ConceptSetCompareJobRepository; +import org.ohdsi.webapi.service.cscompare.repository.ConceptSetCompareJobStatsRepository; +import org.ohdsi.webapi.service.dto.CompareConceptSetsResponse; +import org.ohdsi.webapi.shiro.Entities.UserEntity; +import org.ohdsi.webapi.shiro.Entities.UserRepository; +import org.ohdsi.webapi.source.Source; +import org.ohdsi.webapi.source.SourceRepository; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.explore.JobExplorer; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +@Service +public class ConceptSetBatchCompareTasklet extends BaseExecutionTasklet { + + protected final Logger log = LoggerFactory.getLogger(getClass()); + + private final VocabularyService vocabularyService; + private final ConceptSetRepository conceptSetRepository; + private final SourceRepository sourceRepository; + private final ConceptSetExpressionResolver conceptSetExpressionResolver; + private final ConceptSetCompareJobRepository compareJobRepository; + private final ConceptSetCompareJobDiffRepository compareJobDiffRepository; + private final ConceptSetCompareJobStatsRepository compareJobStatsRepository; + private final ConceptSetFilterService filterService; + private final ConceptSetCompareJobAuthorRepository compareJobAuthorRepository; + private final UserRepository userRepository; + private final ConceptSetBatchCompareArtifactGenerator artifactGenerator; + private final ApplicationEventPublisher eventPublisher; + private final JobExplorer jobExplorer; + + public ConceptSetBatchCompareTasklet( + VocabularyService vocabularyService, + ConceptSetRepository conceptSetRepository, + SourceRepository sourceRepository, + ConceptSetExpressionResolver conceptSetExpressionResolver, + ConceptSetCompareJobRepository compareJobRepository, + ConceptSetCompareJobDiffRepository compareJobDiffRepository, + ConceptSetCompareJobStatsRepository compareJobStatsRepository, + ConceptSetFilterService filterService, + ConceptSetCompareJobAuthorRepository compareJobAuthorRepository, + UserRepository userRepository, + ConceptSetBatchCompareArtifactGenerator artifactGenerator, + ApplicationEventPublisher eventPublisher, + JobExplorer jobExplorer + ) { + this.vocabularyService = vocabularyService; + this.conceptSetRepository = conceptSetRepository; + this.sourceRepository = sourceRepository; + this.conceptSetExpressionResolver = conceptSetExpressionResolver; + this.compareJobRepository = compareJobRepository; + this.compareJobDiffRepository = compareJobDiffRepository; + this.compareJobStatsRepository = compareJobStatsRepository; + this.filterService = filterService; + this.compareJobAuthorRepository = compareJobAuthorRepository; + this.userRepository = userRepository; + this.artifactGenerator = artifactGenerator; + this.eventPublisher = eventPublisher; + this.jobExplorer = jobExplorer; + } + + @Override + @Transactional + public RepeatStatus execute(StepContribution stepContribution, ChunkContext context) throws Exception { + try { + Map jobParams = context.getStepContext().getJobParameters(); + Long executionId = context.getStepContext().getStepExecution().getJobExecutionId(); + + String source1Key = (String) jobParams.get("source1Key"); + String source2Key = (String) jobParams.get("source2Key"); + String source1Version = (String) jobParams.get("source1Version"); + String source2Version = (String) jobParams.get("source2Version"); + String createdDateFrom = (String) jobParams.get("createdDateFrom"); + String createdDateTo = (String) jobParams.get("createdDateTo"); + String updatedDateFrom = (String) jobParams.get("updatedDateFrom"); + String updatedDateTo = (String) jobParams.get("updatedDateTo"); + String tagsParam = (String) jobParams.get("tagsIds"); + String authorIdsParam = (String) jobParams.get("authorIds"); + Boolean compareSourceCodes = Boolean.parseBoolean((String) jobParams.get("compareSourceCodes")); + String conceptSetIdsParam = (String) jobParams.get("conceptSetIds"); + + log.info("Executing batch compare with parameters: source1Key={}, source2Key={}, " + + "createdDateFrom={}, createdDateTo={}, updatedDateFrom={}, updatedDateTo={}, " + + "tags={}, authorIds={}, compareSourceCodes={}, conceptSetIds={}", + source1Key, source2Key, createdDateFrom, createdDateTo, + updatedDateFrom, updatedDateTo, tagsParam, authorIdsParam, + compareSourceCodes, conceptSetIdsParam); + + Source source1 = sourceRepository.findBySourceKey(source1Key); + Source source2 = sourceRepository.findBySourceKey(source2Key); + + if (source1 == null || source2 == null) { + throw new IllegalArgumentException("Invalid source keys provided"); + } + + // Build filter criteria + ConceptSetFilterService.ConceptSetFilterCriteria criteria = buildFilterCriteria( + createdDateFrom, createdDateTo, updatedDateFrom, updatedDateTo, + tagsParam, authorIdsParam, conceptSetIdsParam + ); + + // Get filtered concept sets + List conceptSets = filterService.filterConceptSets(criteria); + + log.info("Found {} concept sets matching filter criteria", conceptSets.size()); + + // Save ConceptSetCompareJobEntity with vocabulary versions + ConceptSetCompareJobEntity compareJob = createAndSaveCompareJob( + source1, source2, source1Version, source2Version, createdDateFrom, createdDateTo, + updatedDateFrom, updatedDateTo, tagsParam, authorIdsParam, compareSourceCodes, conceptSetIdsParam, + executionId, conceptSets.size() + ); + + if (conceptSets.isEmpty()) { + log.warn("No concept sets found matching the specified criteria"); + compareJob.setConceptSetsWithDiffs(0); + compareJobRepository.save(compareJob); + return RepeatStatus.FINISHED; + } + + // Track which concept sets have differences + Set conceptSetsWithDiffs = new HashSet<>(); + + // Process comparisons + List results = conceptSets.stream() + .map(conceptSet -> { + try { + VocabularyService.CompareConceptSetsRequest request = toCompareRequest(conceptSet, source1, source2, compareSourceCodes); + CompareConceptSetsResponse response = vocabularyService.compareConceptSetsOverDiffVocabs(request); + return new ConceptSetComparisonResult(conceptSet.getId(), response); + } catch (Exception e) { + log.error("Error comparing concept set ID {}: {}", conceptSet.getId(), e.getMessage(), e); + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + + log.info("Successfully compared {} concept sets", results.size()); + + // Save comparison results, statistics, and track concept sets with differences + results.forEach(result -> { + boolean hasDiffs = saveJobDiff(result.conceptSetId, result.response.getComparisons(), compareJob); + if (hasDiffs) { + conceptSetsWithDiffs.add(result.conceptSetId); + } + + // Save statistics for this concept set + saveJobStats(result.conceptSetId, result.response, hasDiffs, compareJob); + }); + + // Update the compare job with the count of concept sets with differences + compareJob.setConceptSetsWithDiffs(conceptSetsWithDiffs.size()); + compareJobRepository.save(compareJob); + + log.info("Batch compare job completed successfully. Analyzed: {}, With Differences: {}", + conceptSets.size(), conceptSetsWithDiffs.size()); + + return RepeatStatus.FINISHED; + + } catch (Exception e) { + log.error("Error executing batch compare tasklet", e); + throw e; + } + } + + private ConceptSetFilterService.ConceptSetFilterCriteria buildFilterCriteria( + String createdDateFrom, String createdDateTo, + String updatedDateFrom, String updatedDateTo, + String tagsParam, String authorIdsParam, + String conceptSetIdsParam + ) { + ConceptSetFilterService.ConceptSetFilterCriteria criteria = + new ConceptSetFilterService.ConceptSetFilterCriteria(); + + DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE; + + if (StringUtils.isNotBlank(createdDateFrom)) { + criteria.setCreatedFrom(LocalDate.parse(createdDateFrom, formatter)); + } + if (StringUtils.isNotBlank(createdDateTo)) { + criteria.setCreatedTo(LocalDate.parse(createdDateTo, formatter)); + } + if (StringUtils.isNotBlank(updatedDateFrom)) { + criteria.setUpdatedFrom(LocalDate.parse(updatedDateFrom, formatter)); + } + if (StringUtils.isNotBlank(updatedDateTo)) { + criteria.setUpdatedTo(LocalDate.parse(updatedDateTo, formatter)); + } + if (StringUtils.isNotBlank(tagsParam)) { + List tagIds = Arrays.stream(tagsParam.split(",")) + .map(String::trim) + .filter(StringUtils::isNotBlank) + .map(Integer::parseInt) + .collect(Collectors.toList()); + criteria.setTagIds(tagIds); + } + + if (StringUtils.isNotBlank(authorIdsParam)) { + List authorIds = Arrays.stream(authorIdsParam.split(",")) + .map(String::trim) + .filter(StringUtils::isNotBlank) + .map(Long::parseLong) + .collect(Collectors.toList()); + criteria.setAuthorIds(authorIds); + } + + if (StringUtils.isNotBlank(conceptSetIdsParam)) { + List conceptSetIds = Arrays.stream(conceptSetIdsParam.split(",")) + .map(String::trim) + .filter(StringUtils::isNotBlank) + .map(Integer::parseInt) + .collect(Collectors.toList()); + criteria.setConceptSetIds(conceptSetIds); + log.debug("Setting concept set ID filter with {} IDs: {}", + conceptSetIds.size(), conceptSetIds); + } + + return criteria; + } + + private ConceptSetCompareJobEntity createAndSaveCompareJob( + Source source1, Source source2, String source1Version, String source2Version, + String createdDateFrom, String createdDateTo, String updatedDateFrom, String updatedDateTo, + String tags, String authorIdsParam, Boolean compareSourceCodes, String conceptSetIdsParam, Long executionId, + int conceptSetsAnalyzed + ) { + ConceptSetCompareJobEntity compareJob = new ConceptSetCompareJobEntity(); + compareJob.setExecutionId(executionId); + compareJob.setSource1Key(source1.getSourceKey()); + compareJob.setSource2Key(source2.getSourceKey()); + compareJob.setVocab1Version(source1Version); + compareJob.setVocab2Version(source2Version); + compareJob.setCompareSourceCodes(compareSourceCodes != null ? compareSourceCodes : false); + compareJob.setConceptSetsAnalyzed(conceptSetsAnalyzed); + + DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE; + + if (StringUtils.isNotBlank(createdDateFrom)) { + compareJob.setCreatedFrom(LocalDate.parse(createdDateFrom, formatter)); + } + if (StringUtils.isNotBlank(createdDateTo)) { + compareJob.setCreatedTo(LocalDate.parse(createdDateTo, formatter)); + } + if (StringUtils.isNotBlank(updatedDateFrom)) { + compareJob.setUpdatedFrom(LocalDate.parse(updatedDateFrom, formatter)); + } + if (StringUtils.isNotBlank(updatedDateTo)) { + compareJob.setUpdatedTo(LocalDate.parse(updatedDateTo, formatter)); + } + if (StringUtils.isNotBlank(tags)) { + compareJob.setTags(tags); + } + + if (StringUtils.isNotBlank(conceptSetIdsParam)) { + compareJob.setConceptSetIds(conceptSetIdsParam); + log.debug("Stored concept set IDs filter: {}", conceptSetIdsParam); + } + + compareJob = compareJobRepository.save(compareJob); + + if (StringUtils.isNotBlank(authorIdsParam)) { + List authorIds = Arrays.stream(authorIdsParam.split(",")) + .map(String::trim) + .filter(StringUtils::isNotBlank) + .map(Long::parseLong) + .collect(Collectors.toList()); + + for (Long authorId : authorIds) { + UserEntity user = userRepository.findOne(authorId); + if (user != null) { + ConceptSetCompareJobAuthorEntity authorEntity = new ConceptSetCompareJobAuthorEntity(); + authorEntity.setUser(user); + compareJob.addAuthor(authorEntity); + } + } + + compareJob = compareJobRepository.save(compareJob); + } + + return compareJob; + } + + private boolean saveJobDiff(Integer conceptSetId, Collection comparisons, + ConceptSetCompareJobEntity compareJob) { + if (comparisons == null || comparisons.isEmpty()) { + return false; + } + + List diffEntities = comparisons.stream() + .filter(this::hasDifference) + .map(comparison -> createDiffEntity(conceptSetId, comparison, compareJob)) + .collect(Collectors.toList()); + + if (!diffEntities.isEmpty()) { + diffEntities.forEach(compareJobDiffRepository::save); + log.debug("Saved {} diff entries for concept set ID {}", diffEntities.size(), conceptSetId); + return true; + } + + return false; + } + + private void saveJobStats(Integer conceptSetId, CompareConceptSetsResponse response, + boolean hasDifferences, ConceptSetCompareJobEntity compareJob) { + ConceptSetCompareJobStatsEntity statsEntity = new ConceptSetCompareJobStatsEntity(); + statsEntity.setCompareJob(compareJob); + statsEntity.setConceptSetId(conceptSetId); + statsEntity.setCs1IncludedConceptsCount(response.getCs1IncludedConceptsCount()); + statsEntity.setCs1IncludedSourceCodesCount(response.getCs1IncludedSourceCodesCount()); + statsEntity.setCs2IncludedConceptsCount(response.getCs2IncludedConceptsCount()); + statsEntity.setCs2IncludedSourceCodesCount(response.getCs2IncludedSourceCodesCount()); + statsEntity.setHasDifferences(hasDifferences); + + compareJobStatsRepository.save(statsEntity); + log.debug("Saved statistics for concept set ID {}: CS1 concepts={}, CS1 source codes={}, " + + "CS2 concepts={}, CS2 source codes={}, hasDifferences={}", + conceptSetId, response.getCs1IncludedConceptsCount(), response.getCs1IncludedSourceCodesCount(), + response.getCs2IncludedConceptsCount(), response.getCs2IncludedSourceCodesCount(), hasDifferences); + } + + private boolean hasDifference(ConceptSetComparison comparison) { + return (comparison.conceptInCS1Only != null && comparison.conceptInCS1Only > 0) || + (comparison.conceptInCS2Only != null && comparison.conceptInCS2Only > 0) || + comparison.nameMismatch || + comparison.standardConceptMismatch || + comparison.invalidReasonMismatch || + comparison.conceptCodeMismatch || + comparison.domainIdMismatch || + comparison.vocabularyIdMismatch || + comparison.conceptClassIdMismatch || + comparison.validStartDateMismatch || + comparison.validEndDateMismatch; + } + + private ConceptSetCompareJobDiffEntity createDiffEntity( + Integer conceptSetId, ConceptSetComparison comparison, ConceptSetCompareJobEntity compareJob + ) { + ConceptSetCompareJobDiffEntity diffEntity = new ConceptSetCompareJobDiffEntity(); + + diffEntity.setCompareJob(compareJob); + diffEntity.setConceptSetId(conceptSetId); + diffEntity.setConceptId(comparison.conceptId != null ? comparison.conceptId.intValue() : null); + diffEntity.setIsSourceCode(comparison.isSourceCode); + diffEntity.setConceptInCS1Only(comparison.conceptInCS1Only); + diffEntity.setConceptInCS2Only(comparison.conceptInCS2Only); + diffEntity.setConceptInCS1AndCS2(comparison.conceptInCS1AndCS2); + + if (comparison.nameMismatch) { + diffEntity.setVocab1ConceptName(comparison.vocab1ConceptName); + diffEntity.setVocab2ConceptName(comparison.vocab2ConceptName); + } + + if (comparison.standardConceptMismatch) { + diffEntity.setVocab1StandardConcept(comparison.vocab1StandardConcept); + diffEntity.setVocab2StandardConcept(comparison.vocab2StandardConcept); + } + + if (comparison.invalidReasonMismatch) { + diffEntity.setVocab1InvalidReason(comparison.vocab1InvalidReason); + diffEntity.setVocab2InvalidReason(comparison.vocab2InvalidReason); + } + + if (comparison.conceptCodeMismatch) { + diffEntity.setVocab1ConceptCode(comparison.vocab1ConceptCode); + diffEntity.setVocab2ConceptCode(comparison.vocab2ConceptCode); + } + + if (comparison.domainIdMismatch) { + diffEntity.setVocab1DomainId(comparison.vocab1DomainId); + diffEntity.setVocab2DomainId(comparison.vocab2DomainId); + } + + if (comparison.vocabularyIdMismatch) { + diffEntity.setVocab1VocabularyId(comparison.vocab1VocabularyId); + diffEntity.setVocab2VocabularyId(comparison.vocab2VocabularyId); + } + + if (comparison.conceptClassIdMismatch) { + diffEntity.setVocab1ConceptClassId(comparison.vocab1ConceptClassId); + diffEntity.setVocab2ConceptClassId(comparison.vocab2ConceptClassId); + } + + if (comparison.validStartDateMismatch) { + diffEntity.setVocab1ValidStartDate(comparison.vocab1ValidStartDate); + diffEntity.setVocab2ValidStartDate(comparison.vocab2ValidStartDate); + } + + if (comparison.validEndDateMismatch) { + diffEntity.setVocab1ValidEndDate(comparison.vocab1ValidEndDate); + diffEntity.setVocab2ValidEndDate(comparison.vocab2ValidEndDate); + } + + diffEntity.setNameMismatch(comparison.nameMismatch); + diffEntity.setStandardConceptMismatch(comparison.standardConceptMismatch); + diffEntity.setInvalidReasonMismatch(comparison.invalidReasonMismatch); + diffEntity.setConceptCodeMismatch(comparison.conceptCodeMismatch); + diffEntity.setDomainIdMismatch(comparison.domainIdMismatch); + diffEntity.setVocabularyIdMismatch(comparison.vocabularyIdMismatch); + diffEntity.setConceptClassIdMismatch(comparison.conceptClassIdMismatch); + diffEntity.setValidStartDateMismatch(comparison.validStartDateMismatch); + diffEntity.setValidEndDateMismatch(comparison.validEndDateMismatch); + + return diffEntity; + } + + private VocabularyService.CompareConceptSetsRequest toCompareRequest( + ConceptSet conceptSet, Source source1, Source source2, Boolean compareSourceCodes + ) { + VocabularyService.CompareConceptSetsRequest compareConceptSetsRequest = + new VocabularyService.CompareConceptSetsRequest(); + + Integer id = conceptSet.getId(); + + ConceptSetExpression expression1 = conceptSetExpressionResolver.getConceptSetExpression(source1, id); + ConceptSetExpression expression2 = conceptSetExpressionResolver.getConceptSetExpression(source2, id); + + compareConceptSetsRequest.source1Key = source1.getSourceKey(); + compareConceptSetsRequest.source2Key = source2.getSourceKey(); + compareConceptSetsRequest.expression1 = expression1; + compareConceptSetsRequest.expression2 = expression2; + compareConceptSetsRequest.compareSourceCodes = compareSourceCodes != null ? compareSourceCodes : false; + + return compareConceptSetsRequest; + } + + private static class ConceptSetComparisonResult { + final Integer conceptSetId; + final CompareConceptSetsResponse response; + + ConceptSetComparisonResult(Integer conceptSetId, CompareConceptSetsResponse response) { + this.conceptSetId = conceptSetId; + this.response = response; + } + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/service/cscompare/ConceptSetCompareService.java b/src/main/java/org/ohdsi/webapi/service/cscompare/ConceptSetCompareService.java index 49777b955f..20bb414c02 100644 --- a/src/main/java/org/ohdsi/webapi/service/cscompare/ConceptSetCompareService.java +++ b/src/main/java/org/ohdsi/webapi/service/cscompare/ConceptSetCompareService.java @@ -34,10 +34,11 @@ public class ConceptSetCompareService extends AbstractDaoService { public static final RowMapper CONCEPT_SET_COMPARISON_ROW_MAPPER = (rs, rowNum) -> { ConceptSetComparison csc = new ConceptSetComparison(); csc.conceptId = rs.getLong("concept_id"); - csc.conceptIn1Only = rs.getLong("concept_in_1_only"); - csc.conceptIn2Only = rs.getLong("concept_in_2_only"); - csc.conceptIn1And2 = rs.getLong("concept_in_both_1_and_2"); - csc.conceptName = rs.getString("concept_name"); + csc.conceptInCS1Only = rs.getLong("concept_in_1_only"); + csc.conceptInCS2Only = rs.getLong("concept_in_2_only"); + csc.conceptInCS1AndCS2 = rs.getLong("concept_in_both_1_and_2"); + csc.vocab1ConceptName = rs.getString("concept_name"); + csc.vocab2ConceptName = rs.getString("concept_name"); csc.standardConcept = rs.getString("standard_concept"); csc.invalidReason = rs.getString("invalid_reason"); csc.conceptCode = rs.getString("concept_code"); @@ -49,14 +50,13 @@ public class ConceptSetCompareService extends AbstractDaoService { return csc; }; - public Collection compareConceptSets(final String sourceKey, + public Collection compareConceptSets(final Source source, final CompareArbitraryDto dto) throws Exception { final ConceptSetExpression[] csExpressionList = dto.compareTargets; if (csExpressionList.length != 2) { throw new Exception("You must specify two concept set expressions in order to use this method."); } - final Source source = getSourceRepository().findBySourceKey(sourceKey); final String vocabSchema = source.getTableQualifier(SourceDaimon.DaimonType.Vocabulary); final Function>> callbackFunction = diff --git a/src/main/java/org/ohdsi/webapi/service/cscompare/ConceptSetFilterService.java b/src/main/java/org/ohdsi/webapi/service/cscompare/ConceptSetFilterService.java new file mode 100644 index 0000000000..019423126d --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/service/cscompare/ConceptSetFilterService.java @@ -0,0 +1,165 @@ +package org.ohdsi.webapi.service.cscompare; + +import org.ohdsi.webapi.conceptset.ConceptSet; +import org.ohdsi.webapi.conceptset.ConceptSetRepository; +import org.ohdsi.webapi.service.cscompare.repository.ConceptSetSpecifications; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.data.jpa.domain.Specification; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.ZoneId; +import java.util.Date; +import java.util.List; + +@Service +public class ConceptSetFilterService { + + private static final Logger log = LoggerFactory.getLogger(ConceptSetFilterService.class); + + private final ConceptSetRepository conceptSetRepository; + + public ConceptSetFilterService( + ConceptSetRepository conceptSetRepository + ) { + this.conceptSetRepository = conceptSetRepository; + } + + @Transactional(readOnly = true) + public List filterConceptSets(ConceptSetFilterCriteria criteria) { + log.info("Filtering concept sets with criteria: {}", criteria); + + // Convert LocalDate to Date with proper boundaries + Date createdFrom = toDateStartOfDay(criteria.getCreatedFrom()); + Date createdTo = toDateEndOfDay(criteria.getCreatedTo()); + Date updatedFrom = toDateStartOfDay(criteria.getUpdatedFrom()); + Date updatedTo = toDateEndOfDay(criteria.getUpdatedTo()); + + log.debug("Converted date filters: createdFrom={}, createdTo={}, updatedFrom={}, updatedTo={}", + createdFrom, createdTo, updatedFrom, updatedTo); + + // Build specification + Specification spec = ConceptSetSpecifications.withAllFilters( + createdFrom, + createdTo, + updatedFrom, + updatedTo, + criteria.getTagIds(), + criteria.getAuthorIds(), + criteria.getConceptSetIds() + ); + + // Execute query + List filteredSets = conceptSetRepository.findAll(spec); + + log.info("Found {} concept sets after date, tag, and author filtering", filteredSets.size()); + + return filteredSets; + } + + /** + * Convert LocalDate to Date at start of day (00:00:00.000) + * Returns null if localDate is null (treated as minus infinity) + */ + private Date toDateStartOfDay(LocalDate localDate) { + if (localDate == null) { + return null; + } + return Date.from(localDate.atStartOfDay(ZoneId.systemDefault()).toInstant()); + } + + /** + * Convert LocalDate to Date at end of day (23:59:59.999) + * Returns null if localDate is null (treated as plus infinity) + */ + private Date toDateEndOfDay(LocalDate localDate) { + if (localDate == null) { + return null; + } + return Date.from(localDate.atTime(LocalTime.MAX).atZone(ZoneId.systemDefault()).toInstant()); + } + + /** + * Criteria class for filtering concept sets + */ + public static class ConceptSetFilterCriteria { + private LocalDate createdFrom; + private LocalDate createdTo; + private LocalDate updatedFrom; + private LocalDate updatedTo; + private List tagIds; + private List authorIds; + private List conceptSetIds; + + public LocalDate getCreatedFrom() { + return createdFrom; + } + + public void setCreatedFrom(LocalDate createdFrom) { + this.createdFrom = createdFrom; + } + + public LocalDate getCreatedTo() { + return createdTo; + } + + public void setCreatedTo(LocalDate createdTo) { + this.createdTo = createdTo; + } + + public LocalDate getUpdatedFrom() { + return updatedFrom; + } + + public void setUpdatedFrom(LocalDate updatedFrom) { + this.updatedFrom = updatedFrom; + } + + public LocalDate getUpdatedTo() { + return updatedTo; + } + + public void setUpdatedTo(LocalDate updatedTo) { + this.updatedTo = updatedTo; + } + + public List getTagIds() { + return tagIds; + } + + public void setTagIds(List tagIds) { + this.tagIds = tagIds; + } + + public List getAuthorIds() { + return authorIds; + } + + public void setAuthorIds(List authorIds) { + this.authorIds = authorIds; + } + + @Override + public String toString() { + return "ConceptSetFilterCriteria{" + + "createdFrom=" + createdFrom + + ", createdTo=" + createdTo + + ", updatedFrom=" + updatedFrom + + ", updatedTo=" + updatedTo + + ", tagIds=" + tagIds + + ", authorIds=" + authorIds + + '}'; + } + + public List getConceptSetIds() { + return conceptSetIds; + } + + public void setConceptSetIds(List conceptSetIds) { + this.conceptSetIds = conceptSetIds; + } + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/service/cscompare/ExpressionFileUtils.java b/src/main/java/org/ohdsi/webapi/service/cscompare/ExpressionFileUtils.java index e4f4e3615a..b76b05fbe0 100644 --- a/src/main/java/org/ohdsi/webapi/service/cscompare/ExpressionFileUtils.java +++ b/src/main/java/org/ohdsi/webapi/service/cscompare/ExpressionFileUtils.java @@ -4,63 +4,128 @@ import org.ohdsi.circe.vocabulary.ConceptSetExpression; import org.ohdsi.webapi.conceptset.ConceptSetComparison; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; import java.util.stream.Collector; import java.util.stream.Collectors; public class ExpressionFileUtils { - private static final String CODE_AND_VOCABID_KEY = "%s:%s"; - private static final Collector> CONCEPT_MAP_COLLECTOR = - Collectors.toMap(ExpressionFileUtils::getKey, item -> item.concept); - private static final Collector> NAMES_MAP_COLLECTOR = - Collectors.toMap(ExpressionFileUtils::getKey, item -> item.concept.conceptName); - - public static String getKey(final ConceptSetExpression.ConceptSetItem item) { - return String.format(CODE_AND_VOCABID_KEY, item.concept.conceptCode, item.concept.vocabularyId); - } - - public static String getKey(final ConceptSetComparison item) { - return String.format(CODE_AND_VOCABID_KEY, item.conceptCode, item.vocabularyId); - } - - public static Collection combine(final Map input1ex, - final Map input2ex) { - final Collection outValues = new ArrayList<>(); - - // combine "not found in DB from input1" and "not found in DB from input2" in one map (to deal with doubles) - final Map combinedMap = new HashMap<>(input1ex); - combinedMap.putAll(input2ex); - - combinedMap.forEach((key, value) -> { - final ConceptSetComparison out = new ConceptSetComparison(); - final boolean isInIntersection = input1ex.containsKey(key) && input2ex.containsKey(key); - final boolean isIn1Only = !isInIntersection && input1ex.containsKey(key); - final boolean isIn2Only = !isInIntersection && input2ex.containsKey(key); - out.conceptIn1Only = isIn1Only ? 1L : 0; - out.conceptIn2Only = isIn2Only ? 1L : 0; - out.conceptIn1And2 = isInIntersection ? 1L : 0; - out.conceptName = value.conceptName; - out.conceptCode = value.conceptCode; - out.vocabularyId = value.vocabularyId; - - outValues.add(out); - }); - return outValues; - } - - - public static Map toExclusionMap(final ConceptSetExpression.ConceptSetItem[] in1, final Collection fromDb) { - return Arrays.stream(in1).filter(item -> - fromDb.stream().noneMatch(out -> out.conceptCode.equals(item.concept.conceptCode) && out.vocabularyId.equals(item.concept.vocabularyId)) - ).collect(CONCEPT_MAP_COLLECTOR); - } - - public static Map toNamesMap(final ConceptSetExpression.ConceptSetItem[] in1, - final ConceptSetExpression.ConceptSetItem[] in2) { - final Map names1 = Arrays.stream(in1).collect(NAMES_MAP_COLLECTOR); - final Map names2 = Arrays.stream(in2).collect(NAMES_MAP_COLLECTOR); - final Map namesCombined = new HashMap<>(names1); - namesCombined.putAll(names2); - return namesCombined; - } -} + private static final String CODE_AND_VOCABID_KEY = "%s:%s"; + private static final Collector> CONCEPT_MAP_COLLECTOR = + Collectors.toMap(ExpressionFileUtils::getKey, item -> item.concept); + private static final Collector> NAMES_MAP_COLLECTOR = + Collectors.toMap(ExpressionFileUtils::getKey, item -> item.concept.conceptName); + + public static String getKey(final ConceptSetExpression.ConceptSetItem item) { + return String.format(CODE_AND_VOCABID_KEY, item.concept.conceptCode, item.concept.vocabularyId); + } + + public static String getKey(final ConceptSetComparison item) { + return String.format(CODE_AND_VOCABID_KEY, item.conceptCode, item.vocabularyId); + } + + public static Collection combine(final Map input1ex, + final Map input2ex) { + final Collection outValues = new ArrayList<>(); + + // combine "not found in DB from input1" and "not found in DB from input2" in one map + final Map combinedMap = new HashMap<>(input1ex); + combinedMap.putAll(input2ex); + + combinedMap.forEach((key, value) -> { + final ConceptSetComparison out = new ConceptSetComparison(); + final boolean isInIntersection = input1ex.containsKey(key) && input2ex.containsKey(key); + final boolean isIn1Only = !isInIntersection && input1ex.containsKey(key); + final boolean isIn2Only = !isInIntersection && input2ex.containsKey(key); + + // Set concept set membership flags + out.conceptInCS1Only = isIn1Only ? 1L : 0; + out.conceptInCS2Only = isIn2Only ? 1L : 0; + out.conceptInCS1AndCS2 = isInIntersection ? 1L : 0; + + // Get concepts from each input if present + Concept concept1 = input1ex.get(key); + Concept concept2 = input2ex.get(key); + + // Set vocab1 fields if from input1 + if (concept1 != null) { + out.vocab1ConceptName = concept1.conceptName; + out.vocab1StandardConcept = concept1.standardConcept; + out.vocab1InvalidReason = concept1.invalidReason; + out.vocab1ConceptCode = concept1.conceptCode; + out.vocab1DomainId = concept1.domainId; + out.vocab1VocabularyId = concept1.vocabularyId; + out.vocab1ConceptClassId = concept1.conceptClassId; + } + + // Set vocab2 fields if from input2 + if (concept2 != null) { + out.vocab2ConceptName = concept2.conceptName; + out.vocab2StandardConcept = concept2.standardConcept; + out.vocab2InvalidReason = concept2.invalidReason; + out.vocab2ConceptCode = concept2.conceptCode; + out.vocab2DomainId = concept2.domainId; + out.vocab2VocabularyId = concept2.vocabularyId; + out.vocab2ConceptClassId = concept2.conceptClassId; + } + + // Use whichever concept is available for the general fields + Concept conceptToUse = concept1 != null ? concept1 : concept2; + out.conceptCode = conceptToUse.conceptCode; + out.vocabularyId = conceptToUse.vocabularyId; + + // Check for mismatches if in both + if (isInIntersection && concept1 != null && concept2 != null) { + out.nameMismatch = !Objects.equals(concept1.conceptName, concept2.conceptName); + out.standardConceptMismatch = !Objects.equals(concept1.standardConcept, concept2.standardConcept); + out.invalidReasonMismatch = !Objects.equals(concept1.invalidReason, concept2.invalidReason); + out.conceptCodeMismatch = !Objects.equals(concept1.conceptCode, concept2.conceptCode); + out.domainIdMismatch = !Objects.equals(concept1.domainId, concept2.domainId); + out.vocabularyIdMismatch = !Objects.equals(concept1.vocabularyId, concept2.vocabularyId); + out.conceptClassIdMismatch = !Objects.equals(concept1.conceptClassId, concept2.conceptClassId); + } else { + out.nameMismatch = false; + out.standardConceptMismatch = false; + out.invalidReasonMismatch = false; + out.conceptCodeMismatch = false; + out.domainIdMismatch = false; + out.vocabularyIdMismatch = false; + out.conceptClassIdMismatch = false; + out.validStartDateMismatch = false; + out.validEndDateMismatch = false; + } + + outValues.add(out); + }); + return outValues; + } + + public static Map toExclusionMap(final ConceptSetExpression.ConceptSetItem[] in1, + final Collection fromDb) { + return Arrays.stream(in1).filter(item -> + fromDb.stream().noneMatch(out -> + out.conceptCode.equals(item.concept.conceptCode) && + out.vocabularyId.equals(item.concept.vocabularyId)) + ).collect(CONCEPT_MAP_COLLECTOR); + } + + // Overloaded method that returns a single map (for single expression) + public static Map toNamesMap(final ConceptSetExpression.ConceptSetItem[] items) { + return Arrays.stream(items).collect(NAMES_MAP_COLLECTOR); + } + + // Keep the old method signature for backward compatibility, but mark as deprecated + @Deprecated + public static Map toNamesMap(final ConceptSetExpression.ConceptSetItem[] in1, + final ConceptSetExpression.ConceptSetItem[] in2) { + final Map names1 = Arrays.stream(in1).collect(NAMES_MAP_COLLECTOR); + final Map names2 = Arrays.stream(in2).collect(NAMES_MAP_COLLECTOR); + final Map namesCombined = new HashMap<>(names1); + namesCombined.putAll(names2); + return namesCombined; + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/service/cscompare/config/ConceptSetBatchCompareJobConfig.java b/src/main/java/org/ohdsi/webapi/service/cscompare/config/ConceptSetBatchCompareJobConfig.java new file mode 100644 index 0000000000..4aad4f576a --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/service/cscompare/config/ConceptSetBatchCompareJobConfig.java @@ -0,0 +1,37 @@ +package org.ohdsi.webapi.service.cscompare.config; + +import org.ohdsi.webapi.service.cscompare.ConceptSetBatchCompareJobListener; +import org.ohdsi.webapi.service.cscompare.ConceptSetBatchCompareTasklet; +import org.springframework.batch.core.Job; +import org.springframework.batch.core.Step; +import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; +import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +public class ConceptSetBatchCompareJobConfig { + + @Autowired + private ConceptSetBatchCompareJobListener jobListener; + + @Autowired + private ConceptSetBatchCompareTasklet tasklet; + + @Bean + public Job conceptSetBatchCompareJob(JobBuilderFactory jobBuilderFactory, + Step conceptSetBatchCompareStep) { + return jobBuilderFactory.get("conceptSetBatchCompareJob") + .listener(jobListener) + .start(conceptSetBatchCompareStep) + .build(); + } + + @Bean + public Step conceptSetBatchCompareStep(StepBuilderFactory stepBuilderFactory) { + return stepBuilderFactory.get("conceptSetBatchCompareStep") + .tasklet(tasklet) + .build(); + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/service/cscompare/entity/ConceptSetCompareJobAuthorEntity.java b/src/main/java/org/ohdsi/webapi/service/cscompare/entity/ConceptSetCompareJobAuthorEntity.java new file mode 100644 index 0000000000..764877cdb1 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/service/cscompare/entity/ConceptSetCompareJobAuthorEntity.java @@ -0,0 +1,66 @@ +package org.ohdsi.webapi.service.cscompare.entity; + +import org.hibernate.annotations.GenericGenerator; +import org.hibernate.annotations.Parameter; +import org.ohdsi.webapi.shiro.Entities.UserEntity; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.GeneratedValue; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; +import javax.persistence.Table; +import java.io.Serializable; + +@Entity +@Table(name = "CONCEPT_SET_COMPARE_JOB_AUTHOR") +public class ConceptSetCompareJobAuthorEntity implements Serializable { + + @Id + @Column(name = "ID") + @GenericGenerator( + name = "concept_set_compare_job_author_generator", + strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator", + parameters = { + @Parameter(name = "sequence_name", value = "CONCEPT_SET_COMPARE_JOB_AUTHOR_SEQUENCE"), + @Parameter(name = "initial_value", value = "1"), + @Parameter(name = "increment_size", value = "1") + } + ) + @GeneratedValue(generator = "concept_set_compare_job_author_generator") + private Integer id; + + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "COMPARE_JOB_ID", nullable = false) + private ConceptSetCompareJobEntity compareJob; + + @ManyToOne(fetch = FetchType.EAGER) + @JoinColumn(name = "USER_ID", nullable = false) + private UserEntity user; + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public ConceptSetCompareJobEntity getCompareJob() { + return compareJob; + } + + public void setCompareJob(ConceptSetCompareJobEntity compareJob) { + this.compareJob = compareJob; + } + + public UserEntity getUser() { + return user; + } + + public void setUser(UserEntity user) { + this.user = user; + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/service/cscompare/entity/ConceptSetCompareJobDiffEntity.java b/src/main/java/org/ohdsi/webapi/service/cscompare/entity/ConceptSetCompareJobDiffEntity.java new file mode 100644 index 0000000000..c59fe5b004 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/service/cscompare/entity/ConceptSetCompareJobDiffEntity.java @@ -0,0 +1,427 @@ +package org.ohdsi.webapi.service.cscompare.entity; + +import org.hibernate.annotations.GenericGenerator; +import org.hibernate.annotations.Parameter; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.GeneratedValue; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; +import javax.persistence.Table; +import java.sql.Date; + +@Entity +@Table(name = "concept_set_compare_job_diff") +public class ConceptSetCompareJobDiffEntity { + + @Id + @GenericGenerator( + name = "concept_set_compare_job_diff_generator", + strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator", + parameters = { + @Parameter(name = "sequence_name", value = "concept_set_compare_job_diff_sequence"), + @Parameter(name = "increment_size", value = "1") + } + ) + @GeneratedValue(generator = "concept_set_compare_job_diff_generator") + private Integer id; + + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "compare_job_id", nullable = false) + private ConceptSetCompareJobEntity compareJob; + + @Column(name = "concept_set_id", nullable = false) + private Integer conceptSetId; + + @Column(name = "concept_id", nullable = false) + private Integer conceptId; + + @Column(name = "is_source_code", nullable = false) + private Boolean isSourceCode = false; + + // Concept Set membership counts + @Column(name = "concept_in_cs1_only") + private Long conceptInCS1Only; + + @Column(name = "concept_in_cs2_only") + private Long conceptInCS2Only; + + @Column(name = "concept_in_cs1_and_cs2") + private Long conceptInCS1AndCS2; + + // Concept names from both vocabularies + @Column(name = "vocab1_concept_name", length = 1000) + private String vocab1ConceptName; + + @Column(name = "vocab2_concept_name", length = 1000) + private String vocab2ConceptName; + + // Standard concept from both vocabularies + @Column(name = "vocab1_standard_concept", length = 1) + private String vocab1StandardConcept; + + @Column(name = "vocab2_standard_concept", length = 1) + private String vocab2StandardConcept; + + // Invalid reason from both vocabularies + @Column(name = "vocab1_invalid_reason", length = 1) + private String vocab1InvalidReason; + + @Column(name = "vocab2_invalid_reason", length = 1) + private String vocab2InvalidReason; + + // Concept code from both vocabularies + @Column(name = "vocab1_concept_code", length = 50) + private String vocab1ConceptCode; + + @Column(name = "vocab2_concept_code", length = 50) + private String vocab2ConceptCode; + + // Domain ID from both vocabularies + @Column(name = "vocab1_domain_id", length = 20) + private String vocab1DomainId; + + @Column(name = "vocab2_domain_id", length = 20) + private String vocab2DomainId; + + // Vocabulary ID from both vocabularies + @Column(name = "vocab1_vocabulary_id", length = 20) + private String vocab1VocabularyId; + + @Column(name = "vocab2_vocabulary_id", length = 20) + private String vocab2VocabularyId; + + // Concept class ID from both vocabularies + @Column(name = "vocab1_concept_class_id", length = 20) + private String vocab1ConceptClassId; + + @Column(name = "vocab2_concept_class_id", length = 20) + private String vocab2ConceptClassId; + + // Valid dates from both vocabularies + @Column(name = "vocab1_valid_start_date") + private Date vocab1ValidStartDate; + + @Column(name = "vocab2_valid_start_date") + private Date vocab2ValidStartDate; + + @Column(name = "vocab1_valid_end_date") + private Date vocab1ValidEndDate; + + @Column(name = "vocab2_valid_end_date") + private Date vocab2ValidEndDate; + + // Mismatch flags + @Column(name = "name_mismatch", nullable = false) + private Boolean nameMismatch; + + @Column(name = "standard_concept_mismatch", nullable = false) + private Boolean standardConceptMismatch; + + @Column(name = "invalid_reason_mismatch", nullable = false) + private Boolean invalidReasonMismatch; + + @Column(name = "concept_code_mismatch", nullable = false) + private Boolean conceptCodeMismatch; + + @Column(name = "domain_id_mismatch", nullable = false) + private Boolean domainIdMismatch; + + @Column(name = "vocabulary_id_mismatch", nullable = false) + private Boolean vocabularyIdMismatch; + + @Column(name = "concept_class_id_mismatch", nullable = false) + private Boolean conceptClassIdMismatch; + + @Column(name = "valid_start_date_mismatch", nullable = false) + private Boolean validStartDateMismatch; + + @Column(name = "valid_end_date_mismatch", nullable = false) + private Boolean validEndDateMismatch; + + public ConceptSetCompareJobDiffEntity() { + } + + // Getters and Setters + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public ConceptSetCompareJobEntity getCompareJob() { + return compareJob; + } + + public void setCompareJob(ConceptSetCompareJobEntity compareJob) { + this.compareJob = compareJob; + } + + public Integer getConceptSetId() { + return conceptSetId; + } + + public void setConceptSetId(Integer conceptSetId) { + this.conceptSetId = conceptSetId; + } + + public Integer getConceptId() { + return conceptId; + } + + public void setConceptId(Integer conceptId) { + this.conceptId = conceptId; + } + + public Boolean getIsSourceCode() { + return isSourceCode; + } + + public void setIsSourceCode(Boolean isSourceCode) { + this.isSourceCode = isSourceCode; + } + + public Long getConceptInCS1Only() { + return conceptInCS1Only; + } + + public void setConceptInCS1Only(Long conceptInCS1Only) { + this.conceptInCS1Only = conceptInCS1Only; + } + + public Long getConceptInCS2Only() { + return conceptInCS2Only; + } + + public void setConceptInCS2Only(Long conceptInCS2Only) { + this.conceptInCS2Only = conceptInCS2Only; + } + + public Long getConceptInCS1AndCS2() { + return conceptInCS1AndCS2; + } + + public void setConceptInCS1AndCS2(Long conceptInCS1AndCS2) { + this.conceptInCS1AndCS2 = conceptInCS1AndCS2; + } + public String getVocab1ConceptName() { + return vocab1ConceptName; + } + + public void setVocab1ConceptName(String vocab1ConceptName) { + this.vocab1ConceptName = vocab1ConceptName; + } + + public String getVocab2ConceptName() { + return vocab2ConceptName; + } + + public void setVocab2ConceptName(String vocab2ConceptName) { + this.vocab2ConceptName = vocab2ConceptName; + } + + public String getVocab1StandardConcept() { + return vocab1StandardConcept; + } + + public void setVocab1StandardConcept(String vocab1StandardConcept) { + this.vocab1StandardConcept = vocab1StandardConcept; + } + + public String getVocab2StandardConcept() { + return vocab2StandardConcept; + } + + public void setVocab2StandardConcept(String vocab2StandardConcept) { + this.vocab2StandardConcept = vocab2StandardConcept; + } + + public String getVocab1InvalidReason() { + return vocab1InvalidReason; + } + + public void setVocab1InvalidReason(String vocab1InvalidReason) { + this.vocab1InvalidReason = vocab1InvalidReason; + } + + public String getVocab2InvalidReason() { + return vocab2InvalidReason; + } + + public void setVocab2InvalidReason(String vocab2InvalidReason) { + this.vocab2InvalidReason = vocab2InvalidReason; + } + + public String getVocab1ConceptCode() { + return vocab1ConceptCode; + } + + public void setVocab1ConceptCode(String vocab1ConceptCode) { + this.vocab1ConceptCode = vocab1ConceptCode; + } + + public String getVocab2ConceptCode() { + return vocab2ConceptCode; + } + + public void setVocab2ConceptCode(String vocab2ConceptCode) { + this.vocab2ConceptCode = vocab2ConceptCode; + } + + public String getVocab1DomainId() { + return vocab1DomainId; + } + + public void setVocab1DomainId(String vocab1DomainId) { + this.vocab1DomainId = vocab1DomainId; + } + + public String getVocab2DomainId() { + return vocab2DomainId; + } + + public void setVocab2DomainId(String vocab2DomainId) { + this.vocab2DomainId = vocab2DomainId; + } + + public String getVocab1VocabularyId() { + return vocab1VocabularyId; + } + + public void setVocab1VocabularyId(String vocab1VocabularyId) { + this.vocab1VocabularyId = vocab1VocabularyId; + } + + public String getVocab2VocabularyId() { + return vocab2VocabularyId; + } + + public void setVocab2VocabularyId(String vocab2VocabularyId) { + this.vocab2VocabularyId = vocab2VocabularyId; + } + + public String getVocab1ConceptClassId() { + return vocab1ConceptClassId; + } + + public void setVocab1ConceptClassId(String vocab1ConceptClassId) { + this.vocab1ConceptClassId = vocab1ConceptClassId; + } + + public String getVocab2ConceptClassId() { + return vocab2ConceptClassId; + } + + public void setVocab2ConceptClassId(String vocab2ConceptClassId) { + this.vocab2ConceptClassId = vocab2ConceptClassId; + } + + public Date getVocab1ValidStartDate() { + return vocab1ValidStartDate; + } + + public void setVocab1ValidStartDate(Date vocab1ValidStartDate) { + this.vocab1ValidStartDate = vocab1ValidStartDate; + } + + public Date getVocab2ValidStartDate() { + return vocab2ValidStartDate; + } + + public void setVocab2ValidStartDate(Date vocab2ValidStartDate) { + this.vocab2ValidStartDate = vocab2ValidStartDate; + } + + public Date getVocab1ValidEndDate() { + return vocab1ValidEndDate; + } + + public void setVocab1ValidEndDate(Date vocab1ValidEndDate) { + this.vocab1ValidEndDate = vocab1ValidEndDate; + } + + public Date getVocab2ValidEndDate() { + return vocab2ValidEndDate; + } + + public void setVocab2ValidEndDate(Date vocab2ValidEndDate) { + this.vocab2ValidEndDate = vocab2ValidEndDate; + } + + public Boolean getNameMismatch() { + return nameMismatch; + } + + public void setNameMismatch(Boolean nameMismatch) { + this.nameMismatch = nameMismatch; + } + + public Boolean getStandardConceptMismatch() { + return standardConceptMismatch; + } + + public void setStandardConceptMismatch(Boolean standardConceptMismatch) { + this.standardConceptMismatch = standardConceptMismatch; + } + + public Boolean getInvalidReasonMismatch() { + return invalidReasonMismatch; + } + + public void setInvalidReasonMismatch(Boolean invalidReasonMismatch) { + this.invalidReasonMismatch = invalidReasonMismatch; + } + + public Boolean getConceptCodeMismatch() { + return conceptCodeMismatch; + } + + public void setConceptCodeMismatch(Boolean conceptCodeMismatch) { + this.conceptCodeMismatch = conceptCodeMismatch; + } + + public Boolean getDomainIdMismatch() { + return domainIdMismatch; + } + + public void setDomainIdMismatch(Boolean domainIdMismatch) { + this.domainIdMismatch = domainIdMismatch; + } + + public Boolean getVocabularyIdMismatch() { + return vocabularyIdMismatch; + } + + public void setVocabularyIdMismatch(Boolean vocabularyIdMismatch) { + this.vocabularyIdMismatch = vocabularyIdMismatch; + } + + public Boolean getConceptClassIdMismatch() { + return conceptClassIdMismatch; + } + + public void setConceptClassIdMismatch(Boolean conceptClassIdMismatch) { + this.conceptClassIdMismatch = conceptClassIdMismatch; + } + + public Boolean getValidStartDateMismatch() { + return validStartDateMismatch; + } + + public void setValidStartDateMismatch(Boolean validStartDateMismatch) { + this.validStartDateMismatch = validStartDateMismatch; + } + + public Boolean getValidEndDateMismatch() { + return validEndDateMismatch; + } + + public void setValidEndDateMismatch(Boolean validEndDateMismatch) { + this.validEndDateMismatch = validEndDateMismatch; + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/service/cscompare/entity/ConceptSetCompareJobEntity.java b/src/main/java/org/ohdsi/webapi/service/cscompare/entity/ConceptSetCompareJobEntity.java new file mode 100644 index 0000000000..45a2de07f3 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/service/cscompare/entity/ConceptSetCompareJobEntity.java @@ -0,0 +1,275 @@ +package org.ohdsi.webapi.service.cscompare.entity; + +import org.hibernate.annotations.GenericGenerator; +import org.hibernate.annotations.Parameter; + +import javax.persistence.CascadeType; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.GeneratedValue; +import javax.persistence.Id; +import javax.persistence.OneToMany; +import javax.persistence.Table; +import java.io.Serializable; +import java.time.LocalDate; +import java.util.HashSet; +import java.util.Set; + +@Entity +@Table(name = "CONCEPT_SET_COMPARE_JOB") +public class ConceptSetCompareJobEntity implements Serializable { + + @Id + @Column(name = "ID") + @GenericGenerator( + name = "concept_set_compare_job_generator", + strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator", + parameters = { + @Parameter(name = "sequence_name", value = "CONCEPT_SET_COMPARE_JOB_SEQUENCE"), + @Parameter(name = "initial_value", value = "1"), + @Parameter(name = "increment_size", value = "1") + } + ) + @GeneratedValue(generator = "concept_set_compare_job_generator") + private Integer id; + + @Column(name = "EXECUTION_ID", unique = true) + private Long executionId; + + @Column(name = "SOURCE_1_KEY", nullable = false, length = 50) + private String source1Key; + + @Column(name = "SOURCE_2_KEY", nullable = false, length = 50) + private String source2Key; + + @Column(name = "VOCAB_1_VERSION", length = 255) + private String vocab1Version; + + @Column(name = "VOCAB_2_VERSION", length = 255) + private String vocab2Version; + + @Column(name = "CREATED_FROM") + private LocalDate createdFrom; + + @Column(name = "CREATED_TO") + private LocalDate createdTo; + + @Column(name = "UPDATED_FROM") + private LocalDate updatedFrom; + + @Column(name = "UPDATED_TO") + private LocalDate updatedTo; + + @Column(name = "TAGS", length = 5000) + private String tags; + + @Column(name = "SKIP_LOCKED", nullable = false) + private Boolean skipLocked; + + @Column(name = "COMPARE_SOURCE_CODES", nullable = false) + private Boolean compareSourceCodes; + + @Column(name = "CONCEPT_SETS_ANALYZED") + private Integer conceptSetsAnalyzed; + + @Column(name = "CONCEPT_SETS_WITH_DIFFS") + private Integer conceptSetsWithDiffs; + + @OneToMany(mappedBy = "compareJob", cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.EAGER) + private Set authors = new HashSet<>(); + + @OneToMany(mappedBy = "compareJob", cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.LAZY) + private Set differences = new HashSet<>(); + + @OneToMany(mappedBy = "compareJob", cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.LAZY) + private Set statistics = new HashSet<>(); + + @Column(name = "concept_set_ids", length = 5000) + private String conceptSetIds; + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public Long getExecutionId() { + return executionId; + } + + public void setExecutionId(Long executionId) { + this.executionId = executionId; + } + + public String getSource1Key() { + return source1Key; + } + + public void setSource1Key(String source1Key) { + this.source1Key = source1Key; + } + + public String getSource2Key() { + return source2Key; + } + + public void setSource2Key(String source2Key) { + this.source2Key = source2Key; + } + + public String getVocab1Version() { + return vocab1Version; + } + + public void setVocab1Version(String vocab1Version) { + this.vocab1Version = vocab1Version; + } + + public String getVocab2Version() { + return vocab2Version; + } + + public void setVocab2Version(String vocab2Version) { + this.vocab2Version = vocab2Version; + } + + public LocalDate getCreatedFrom() { + return createdFrom; + } + + public void setCreatedFrom(LocalDate createdFrom) { + this.createdFrom = createdFrom; + } + + public LocalDate getCreatedTo() { + return createdTo; + } + + public void setCreatedTo(LocalDate createdTo) { + this.createdTo = createdTo; + } + + public LocalDate getUpdatedFrom() { + return updatedFrom; + } + + public void setUpdatedFrom(LocalDate updatedFrom) { + this.updatedFrom = updatedFrom; + } + + public LocalDate getUpdatedTo() { + return updatedTo; + } + + public void setUpdatedTo(LocalDate updatedTo) { + this.updatedTo = updatedTo; + } + + public String getTags() { + return tags; + } + + public void setTags(String tags) { + this.tags = tags; + } + + public Boolean getSkipLocked() { + return skipLocked; + } + + public void setSkipLocked(Boolean skipLocked) { + this.skipLocked = skipLocked; + } + + public Boolean getCompareSourceCodes() { + return compareSourceCodes; + } + + public void setCompareSourceCodes(Boolean compareSourceCodes) { + this.compareSourceCodes = compareSourceCodes; + } + + public Integer getConceptSetsAnalyzed() { + return conceptSetsAnalyzed; + } + + public void setConceptSetsAnalyzed(Integer conceptSetsAnalyzed) { + this.conceptSetsAnalyzed = conceptSetsAnalyzed; + } + + public Integer getConceptSetsWithDiffs() { + return conceptSetsWithDiffs; + } + + public void setConceptSetsWithDiffs(Integer conceptSetsWithDiffs) { + this.conceptSetsWithDiffs = conceptSetsWithDiffs; + } + + public Set getAuthors() { + return authors; + } + + public void setAuthors(Set authors) { + this.authors = authors; + } + + public Set getDifferences() { + return differences; + } + + public void setDifferences(Set differences) { + this.differences = differences; + } + + public Set getStatistics() { + return statistics; + } + + public void setStatistics(Set statistics) { + this.statistics = statistics; + } + + public String getConceptSetIds() { + return conceptSetIds; + } + + public void setConceptSetIds(String conceptSetIds) { + this.conceptSetIds = conceptSetIds; + } + + // Helper methods for managing authors + public void addAuthor(ConceptSetCompareJobAuthorEntity author) { + authors.add(author); + author.setCompareJob(this); + } + + public void removeAuthor(ConceptSetCompareJobAuthorEntity author) { + authors.remove(author); + author.setCompareJob(null); + } + + // Helper methods for managing differences + public void addDifference(ConceptSetCompareJobDiffEntity difference) { + differences.add(difference); + difference.setCompareJob(this); + } + + public void removeDifference(ConceptSetCompareJobDiffEntity difference) { + differences.remove(difference); + difference.setCompareJob(null); + } + + // Helper methods for managing statistics + public void addStatistic(ConceptSetCompareJobStatsEntity statistic) { + statistics.add(statistic); + statistic.setCompareJob(this); + } + + public void removeStatistic(ConceptSetCompareJobStatsEntity statistic) { + statistics.remove(statistic); + statistic.setCompareJob(null); + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/service/cscompare/entity/ConceptSetCompareJobStatsEntity.java b/src/main/java/org/ohdsi/webapi/service/cscompare/entity/ConceptSetCompareJobStatsEntity.java new file mode 100644 index 0000000000..8278742161 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/service/cscompare/entity/ConceptSetCompareJobStatsEntity.java @@ -0,0 +1,119 @@ +package org.ohdsi.webapi.service.cscompare.entity; + +import org.hibernate.annotations.GenericGenerator; +import org.hibernate.annotations.Parameter; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.GeneratedValue; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; +import javax.persistence.Table; +import java.io.Serializable; + +@Entity +@Table(name = "CONCEPT_SET_COMPARE_JOB_STATS") +public class ConceptSetCompareJobStatsEntity implements Serializable { + + @Id + @Column(name = "ID") + @GenericGenerator( + name = "concept_set_compare_job_stats_generator", + strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator", + parameters = { + @Parameter(name = "sequence_name", value = "CONCEPT_SET_COMPARE_JOB_STATS_SEQUENCE"), + @Parameter(name = "initial_value", value = "1"), + @Parameter(name = "increment_size", value = "1") + } + ) + @GeneratedValue(generator = "concept_set_compare_job_stats_generator") + private Integer id; + + @ManyToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "COMPARE_JOB_ID", nullable = false) + private ConceptSetCompareJobEntity compareJob; + + @Column(name = "CONCEPT_SET_ID", nullable = false) + private Integer conceptSetId; + + @Column(name = "CS1_INCLUDED_CONCEPTS_COUNT", nullable = false) + private Integer cs1IncludedConceptsCount; + + @Column(name = "CS1_INCLUDED_SOURCE_CODES_COUNT", nullable = false) + private Integer cs1IncludedSourceCodesCount; + + @Column(name = "CS2_INCLUDED_CONCEPTS_COUNT", nullable = false) + private Integer cs2IncludedConceptsCount; + + @Column(name = "CS2_INCLUDED_SOURCE_CODES_COUNT", nullable = false) + private Integer cs2IncludedSourceCodesCount; + + @Column(name = "HAS_DIFFERENCES", nullable = false) + private Boolean hasDifferences; + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public ConceptSetCompareJobEntity getCompareJob() { + return compareJob; + } + + public void setCompareJob(ConceptSetCompareJobEntity compareJob) { + this.compareJob = compareJob; + } + + public Integer getConceptSetId() { + return conceptSetId; + } + + public void setConceptSetId(Integer conceptSetId) { + this.conceptSetId = conceptSetId; + } + + public Integer getCs1IncludedConceptsCount() { + return cs1IncludedConceptsCount; + } + + public void setCs1IncludedConceptsCount(Integer cs1IncludedConceptsCount) { + this.cs1IncludedConceptsCount = cs1IncludedConceptsCount; + } + + public Integer getCs1IncludedSourceCodesCount() { + return cs1IncludedSourceCodesCount; + } + + public void setCs1IncludedSourceCodesCount(Integer cs1IncludedSourceCodesCount) { + this.cs1IncludedSourceCodesCount = cs1IncludedSourceCodesCount; + } + + public Integer getCs2IncludedConceptsCount() { + return cs2IncludedConceptsCount; + } + + public void setCs2IncludedConceptsCount(Integer cs2IncludedConceptsCount) { + this.cs2IncludedConceptsCount = cs2IncludedConceptsCount; + } + + public Integer getCs2IncludedSourceCodesCount() { + return cs2IncludedSourceCodesCount; + } + + public void setCs2IncludedSourceCodesCount(Integer cs2IncludedSourceCodesCount) { + this.cs2IncludedSourceCodesCount = cs2IncludedSourceCodesCount; + } + + public Boolean getHasDifferences() { + return hasDifferences; + } + + public void setHasDifferences(Boolean hasDifferences) { + this.hasDifferences = hasDifferences; + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/service/cscompare/repository/ConceptSetCompareJobAuthorRepository.java b/src/main/java/org/ohdsi/webapi/service/cscompare/repository/ConceptSetCompareJobAuthorRepository.java new file mode 100644 index 0000000000..f54b8256b7 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/service/cscompare/repository/ConceptSetCompareJobAuthorRepository.java @@ -0,0 +1,7 @@ +package org.ohdsi.webapi.service.cscompare.repository; + +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobAuthorEntity; +import org.springframework.data.jpa.repository.JpaRepository; + +public interface ConceptSetCompareJobAuthorRepository extends JpaRepository { +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/service/cscompare/repository/ConceptSetCompareJobDiffRepository.java b/src/main/java/org/ohdsi/webapi/service/cscompare/repository/ConceptSetCompareJobDiffRepository.java new file mode 100644 index 0000000000..49f0cfb1ef --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/service/cscompare/repository/ConceptSetCompareJobDiffRepository.java @@ -0,0 +1,113 @@ +package org.ohdsi.webapi.service.cscompare.repository; + +import com.cosium.spring.data.jpa.entity.graph.repository.EntityGraphJpaRepository; +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobDiffEntity; +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobEntity; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; + +import java.util.List; +import java.util.Optional; + +public interface ConceptSetCompareJobDiffRepository extends EntityGraphJpaRepository { + + Optional findById(Integer id); + + @Query("SELECT diff FROM ConceptSetCompareJobDiffEntity diff WHERE diff.compareJob = :compareJob") + List findByCompareJob(@Param("compareJob") ConceptSetCompareJobEntity compareJob); + + @Query("SELECT diff FROM ConceptSetCompareJobDiffEntity diff WHERE diff.compareJob.id = :jobId") + List findByCompareJobId(@Param("jobId") Integer jobId); + + @Query("SELECT diff FROM ConceptSetCompareJobDiffEntity diff WHERE diff.conceptSetId = :conceptSetId") + List findByConceptSetId(@Param("conceptSetId") Integer conceptSetId); + + @Query("SELECT diff FROM ConceptSetCompareJobDiffEntity diff WHERE diff.conceptId = :conceptId") + List findByConceptId(@Param("conceptId") Integer conceptId); + + // Concept Set membership queries (CS1/CS2) + @Query("SELECT diff FROM ConceptSetCompareJobDiffEntity diff WHERE diff.compareJob.id = :jobId AND diff.conceptInCS1Only > 0") + List findInCS1OnlyByJobId(@Param("jobId") Integer jobId); + + @Query("SELECT diff FROM ConceptSetCompareJobDiffEntity diff WHERE diff.compareJob.id = :jobId AND diff.conceptInCS2Only > 0") + List findInCS2OnlyByJobId(@Param("jobId") Integer jobId); + + @Query("SELECT diff FROM ConceptSetCompareJobDiffEntity diff WHERE diff.compareJob.id = :jobId AND diff.conceptInCS1AndCS2 > 0") + List findInBothCSByJobId(@Param("jobId") Integer jobId); + + // Name mismatch queries + @Query("SELECT diff FROM ConceptSetCompareJobDiffEntity diff WHERE diff.compareJob.id = :jobId AND diff.nameMismatch = true") + List findNameMismatchesByJobId(@Param("jobId") Integer jobId); + + // Count queries + @Query("SELECT COUNT(diff) FROM ConceptSetCompareJobDiffEntity diff WHERE diff.compareJob.id = :jobId") + Long countByCompareJobId(@Param("jobId") Integer jobId); + + @Query("SELECT COUNT(diff) FROM ConceptSetCompareJobDiffEntity diff WHERE diff.compareJob.id = :jobId AND diff.conceptInCS1Only > 0") + Long countInCS1OnlyByJobId(@Param("jobId") Integer jobId); + + @Query("SELECT COUNT(diff) FROM ConceptSetCompareJobDiffEntity diff WHERE diff.compareJob.id = :jobId AND diff.conceptInCS2Only > 0") + Long countInCS2OnlyByJobId(@Param("jobId") Integer jobId); + + @Query("SELECT COUNT(diff) FROM ConceptSetCompareJobDiffEntity diff WHERE diff.compareJob.id = :jobId AND diff.conceptInCS1AndCS2 > 0") + Long countInBothCSByJobId(@Param("jobId") Integer jobId); + + @Query("SELECT COUNT(diff) FROM ConceptSetCompareJobDiffEntity diff WHERE diff.compareJob.id = :jobId AND diff.nameMismatch = true") + Long countNameMismatchesByJobId(@Param("jobId") Integer jobId); + + // Combined queries + @Query("SELECT diff FROM ConceptSetCompareJobDiffEntity diff WHERE diff.compareJob.id = :jobId AND diff.conceptSetId = :conceptSetId") + List findByJobIdAndConceptSetId(@Param("jobId") Integer jobId, @Param("conceptSetId") Integer conceptSetId); + + @Query("SELECT diff FROM ConceptSetCompareJobDiffEntity diff " + + "WHERE diff.compareJob.id = :jobId AND diff.conceptSetId = :conceptSetId AND diff.conceptInCS1Only > 0") + List findInCS1OnlyByJobIdAndConceptSetId(@Param("jobId") Integer jobId, @Param("conceptSetId") Integer conceptSetId); + + @Query("SELECT diff FROM ConceptSetCompareJobDiffEntity diff " + + "WHERE diff.compareJob.id = :jobId AND diff.conceptSetId = :conceptSetId AND diff.conceptInCS2Only > 0") + List findInCS2OnlyByJobIdAndConceptSetId(@Param("jobId") Integer jobId, @Param("conceptSetId") Integer conceptSetId); + + @Query("SELECT diff FROM ConceptSetCompareJobDiffEntity diff " + + "WHERE diff.compareJob.id = :jobId AND diff.conceptSetId = :conceptSetId AND diff.nameMismatch = true") + List findNameMismatchesByJobIdAndConceptSetId(@Param("jobId") Integer jobId, @Param("conceptSetId") Integer conceptSetId); + + // Summary statistics query + @Query("SELECT new map(" + + "COUNT(diff) as totalDiffs, " + + "SUM(CASE WHEN diff.conceptInCS1Only > 0 THEN 1 ELSE 0 END) as cs1OnlyCount, " + + "SUM(CASE WHEN diff.conceptInCS2Only > 0 THEN 1 ELSE 0 END) as cs2OnlyCount, " + + "SUM(CASE WHEN diff.conceptInCS1AndCS2 > 0 THEN 1 ELSE 0 END) as bothCSCount, " + + "SUM(CASE WHEN diff.nameMismatch = true THEN 1 ELSE 0 END) as nameMismatchCount) " + + "FROM ConceptSetCompareJobDiffEntity diff WHERE diff.compareJob.id = :jobId") + Object getSummaryStatsByJobId(@Param("jobId") Integer jobId); + + // Get all concept sets with differences for a job + @Query("SELECT DISTINCT diff.conceptSetId FROM ConceptSetCompareJobDiffEntity diff WHERE diff.compareJob.id = :jobId") + List findDistinctConceptSetIdsByJobId(@Param("jobId") Integer jobId); + + // Delete operations + @Query("DELETE FROM ConceptSetCompareJobDiffEntity diff WHERE diff.compareJob.id = :jobId") + void deleteByCompareJobId(@Param("jobId") Integer jobId); + + @Query("DELETE FROM ConceptSetCompareJobDiffEntity diff WHERE diff.compareJob = :compareJob") + void deleteByCompareJob(@Param("compareJob") ConceptSetCompareJobEntity compareJob); + + // Fetch with eager loading for performance + @Query("SELECT diff FROM ConceptSetCompareJobDiffEntity diff " + + "LEFT JOIN FETCH diff.compareJob " + + "WHERE diff.compareJob.id = :jobId") + List findByCompareJobIdWithJob(@Param("jobId") Integer jobId); + + // Find differences for specific concept across all jobs + @Query("SELECT diff FROM ConceptSetCompareJobDiffEntity diff " + + "LEFT JOIN FETCH diff.compareJob " + + "WHERE diff.conceptId = :conceptId " + + "ORDER BY diff.compareJob.id DESC") + List findByConceptIdWithJob(@Param("conceptId") Integer conceptId); + + // Paginated query for large result sets + @Query("SELECT diff FROM ConceptSetCompareJobDiffEntity diff " + + "WHERE diff.compareJob.id = :jobId " + + "ORDER BY diff.conceptSetId, diff.conceptId") + List findByCompareJobIdOrdered(@Param("jobId") Integer jobId); +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/service/cscompare/repository/ConceptSetCompareJobRepository.java b/src/main/java/org/ohdsi/webapi/service/cscompare/repository/ConceptSetCompareJobRepository.java new file mode 100644 index 0000000000..4f96fe2a43 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/service/cscompare/repository/ConceptSetCompareJobRepository.java @@ -0,0 +1,35 @@ +package org.ohdsi.webapi.service.cscompare.repository; + +import com.cosium.spring.data.jpa.entity.graph.repository.EntityGraphJpaRepository; +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobEntity; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; + +import java.time.LocalDate; +import java.util.List; +import java.util.Optional; + +public interface ConceptSetCompareJobRepository extends EntityGraphJpaRepository { + + Optional findById(Integer id); + + @Query("SELECT job FROM ConceptSetCompareJobEntity job WHERE job.source1Key = :source1Key AND job.source2Key = :source2Key") + List findBySourceKeys(@Param("source1Key") String source1Key, @Param("source2Key") String source2Key); + + @Query("SELECT job FROM ConceptSetCompareJobEntity job WHERE job.source1Key = :sourceKey OR job.source2Key = :sourceKey") + List findBySourceKey(@Param("sourceKey") String sourceKey); + + @Query("SELECT job FROM ConceptSetCompareJobEntity job WHERE job.createdFrom >= :fromDate AND job.createdTo <= :toDate") + List findByCreatedDateRange(@Param("fromDate") LocalDate fromDate, @Param("toDate") LocalDate toDate); + + @Query("SELECT job FROM ConceptSetCompareJobEntity job WHERE job.updatedFrom >= :fromDate AND job.updatedTo <= :toDate") + List findByUpdatedDateRange(@Param("fromDate") LocalDate fromDate, @Param("toDate") LocalDate toDate); + + @Query("SELECT job FROM ConceptSetCompareJobEntity job WHERE job.skipLocked = :skipLocked") + List findBySkipLocked(@Param("skipLocked") Boolean skipLocked); + + @Query("SELECT job FROM ConceptSetCompareJobEntity job JOIN FETCH job.differences WHERE job.id = :jobId") + Optional findByIdWithDifferences(@Param("jobId") Integer jobId); + + Optional findByExecutionId(Long executionId); +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/service/cscompare/repository/ConceptSetCompareJobStatsRepository.java b/src/main/java/org/ohdsi/webapi/service/cscompare/repository/ConceptSetCompareJobStatsRepository.java new file mode 100644 index 0000000000..59554359de --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/service/cscompare/repository/ConceptSetCompareJobStatsRepository.java @@ -0,0 +1,26 @@ +package org.ohdsi.webapi.service.cscompare.repository; + +import com.cosium.spring.data.jpa.entity.graph.repository.EntityGraphJpaRepository; +import org.ohdsi.webapi.service.cscompare.entity.ConceptSetCompareJobStatsEntity; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; + +import java.util.List; +import java.util.Optional; + +public interface ConceptSetCompareJobStatsRepository extends EntityGraphJpaRepository { + + List findByCompareJobId(Integer compareJobId); + + @Query("SELECT stats FROM ConceptSetCompareJobStatsEntity stats WHERE stats.compareJob.id = :compareJobId AND stats.conceptSetId = :conceptSetId") + Optional findByCompareJobIdAndConceptSetId( + @Param("compareJobId") Integer compareJobId, + @Param("conceptSetId") Integer conceptSetId + ); + + @Query("SELECT stats FROM ConceptSetCompareJobStatsEntity stats WHERE stats.compareJob.id = :compareJobId AND stats.hasDifferences = true") + List findByCompareJobIdWithDifferences(@Param("compareJobId") Integer compareJobId); + + @Query("SELECT COUNT(stats) FROM ConceptSetCompareJobStatsEntity stats WHERE stats.compareJob.id = :compareJobId AND stats.hasDifferences = true") + Long countByCompareJobIdWithDifferences(@Param("compareJobId") Integer compareJobId); +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/service/cscompare/repository/ConceptSetSpecifications.java b/src/main/java/org/ohdsi/webapi/service/cscompare/repository/ConceptSetSpecifications.java new file mode 100644 index 0000000000..f65e51b45d --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/service/cscompare/repository/ConceptSetSpecifications.java @@ -0,0 +1,74 @@ +package org.ohdsi.webapi.service.cscompare.repository; + +import org.ohdsi.webapi.conceptset.ConceptSet; +import org.ohdsi.webapi.shiro.Entities.UserEntity; +import org.ohdsi.webapi.tag.domain.Tag; +import org.springframework.data.jpa.domain.Specification; + +import javax.persistence.criteria.Join; +import javax.persistence.criteria.JoinType; +import javax.persistence.criteria.Predicate; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +public class ConceptSetSpecifications { + + public static Specification withAllFilters( + Date createdFrom, + Date createdTo, + Date updatedFrom, + Date updatedTo, + List tagIds, + List authorIds, + List conceptSetIds + ) { + return (root, query, criteriaBuilder) -> { + List predicates = new ArrayList<>(); + + // Created date range filter + if (createdFrom != null) { + predicates.add(criteriaBuilder.greaterThanOrEqualTo( + root.get("createdDate"), createdFrom)); + } + if (createdTo != null) { + predicates.add(criteriaBuilder.lessThanOrEqualTo( + root.get("createdDate"), createdTo)); + } + + // Modified date range filter + if (updatedFrom != null) { + predicates.add(criteriaBuilder.greaterThanOrEqualTo( + root.get("modifiedDate"), updatedFrom)); + } + if (updatedTo != null) { + predicates.add(criteriaBuilder.lessThanOrEqualTo( + root.get("modifiedDate"), updatedTo)); + } + + // Tag filter + if (tagIds != null && !tagIds.isEmpty()) { + Join tagJoin = root.join("tags", JoinType.INNER); + predicates.add(tagJoin.get("id").in(tagIds)); + } + + // Author filter (by user IDs) + if (authorIds != null && !authorIds.isEmpty()) { + Join createdByJoin = root.join("createdBy", JoinType.INNER); + predicates.add(createdByJoin.get("id").in(authorIds)); + } + + // Make DISTINCT to avoid duplicates when joining tags or multiple authors + if ((tagIds != null && !tagIds.isEmpty()) || (authorIds != null && !authorIds.isEmpty())) { + query.distinct(true); + } + + // Concept Set ID filter + if (conceptSetIds != null && !conceptSetIds.isEmpty()) { + predicates.add(root.get("id").in(conceptSetIds)); + } + + return criteriaBuilder.and(predicates.toArray(new Predicate[0])); + }; + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/service/dto/CompareConceptSetsResponse.java b/src/main/java/org/ohdsi/webapi/service/dto/CompareConceptSetsResponse.java new file mode 100644 index 0000000000..dd91667ae7 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/service/dto/CompareConceptSetsResponse.java @@ -0,0 +1,42 @@ +package org.ohdsi.webapi.service.dto; + +import org.ohdsi.webapi.conceptset.ConceptSetComparison; + +import java.util.Collection; + +public class CompareConceptSetsResponse { + + private Collection comparisons; + private int cs1IncludedConceptsCount; + private int cs1IncludedSourceCodesCount; + private int cs2IncludedConceptsCount; + private int cs2IncludedSourceCodesCount; + + public CompareConceptSetsResponse(Collection comparisons, int cs1IncludedConceptsCount, int cs1IncludedSourceCodesCount, int cs2IncludedConceptsCount, int cs2IncludedSourceCodesCount) { + this.comparisons = comparisons; + this.cs1IncludedConceptsCount = cs1IncludedConceptsCount; + this.cs1IncludedSourceCodesCount = cs1IncludedSourceCodesCount; + this.cs2IncludedConceptsCount = cs2IncludedConceptsCount; + this.cs2IncludedSourceCodesCount = cs2IncludedSourceCodesCount; + } + + public Collection getComparisons() { + return comparisons; + } + + public int getCs1IncludedConceptsCount() { + return cs1IncludedConceptsCount; + } + + public int getCs1IncludedSourceCodesCount() { + return cs1IncludedSourceCodesCount; + } + + public int getCs2IncludedConceptsCount() { + return cs2IncludedConceptsCount; + } + + public int getCs2IncludedSourceCodesCount() { + return cs2IncludedSourceCodesCount; + } +} diff --git a/src/main/java/org/ohdsi/webapi/service/lock/ConceptSetLockingService.java b/src/main/java/org/ohdsi/webapi/service/lock/ConceptSetLockingService.java new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/main/java/org/ohdsi/webapi/source/SourceController.java b/src/main/java/org/ohdsi/webapi/source/SourceController.java index 3ebb2450c6..3ac8d68aa0 100644 --- a/src/main/java/org/ohdsi/webapi/source/SourceController.java +++ b/src/main/java/org/ohdsi/webapi/source/SourceController.java @@ -349,7 +349,7 @@ public Response delete(@PathParam("sourceId") Integer sourceId) throws Exception @Produces(MediaType.APPLICATION_JSON) @Transactional(noRollbackFor = CannotGetJdbcConnectionException.class) public SourceInfo checkConnection(@PathParam("key") final String sourceKey) { - + cleanSourceCache(); final Source source = sourceService.findBySourceKey(sourceKey); sourceService.checkConnection(source); return source.getSourceInfo(); diff --git a/src/main/java/org/ohdsi/webapi/util/GenericFileWriter.java b/src/main/java/org/ohdsi/webapi/util/GenericFileWriter.java new file mode 100644 index 0000000000..97d6910a4f --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/util/GenericFileWriter.java @@ -0,0 +1,90 @@ +package org.ohdsi.webapi.util; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; + +import javax.ws.rs.InternalServerErrorException; +import java.io.IOException; +import java.io.OutputStream; +import java.io.PrintWriter; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.function.Consumer; + +/** + * Generic utility for writing various types of files + */ +@Component +public class GenericFileWriter { + + private static final Logger LOG = LoggerFactory.getLogger(GenericFileWriter.class); + private final ObjectMapper objectMapper; + + public GenericFileWriter(ObjectMapper objectMapper) { + this.objectMapper = objectMapper; + } + + public GenericFileWriter() { + this.objectMapper = new ObjectMapper(); + } + + /** + * Write text content to a file using a PrintWriter consumer + * + * @param path the file path to write to + * @param writer consumer that writes content using PrintWriter + * @return the path to the written file + * @throws InternalServerErrorException if writing fails + */ + public Path writeTextFile(Path path, Consumer writer) { + try (OutputStream out = Files.newOutputStream(path); + PrintWriter printWriter = new PrintWriter(out)) { + writer.accept(printWriter); + return path; + } catch (IOException e) { + LOG.error("Failed to write text file to {}", path, e); + throw new InternalServerErrorException("Failed to write text file: " + e.getMessage()); + } + } + + /** + * Write an object as JSON to a file + * + * @param parentDir the parent directory + * @param object the object to serialize + * @param filename the filename + * @return the path to the written file + * @throws InternalServerErrorException if writing fails + */ + public Path writeObjectAsJsonFile(Path parentDir, Object object, String filename) { + try { + Path file = Files.createFile(parentDir.resolve(filename)); + try (OutputStream out = Files.newOutputStream(file)) { + objectMapper.writeValue(out, object); + } + return file; + } catch (IOException e) { + LOG.error("Failed to write JSON file {} in {}", filename, parentDir, e); + throw new InternalServerErrorException("Failed to write JSON file: " + e.getMessage()); + } + } + + /** + * Write a JsonNode directly to a file + * + * @param jsonNode the JSON content + * @param path the file path + * @throws InternalServerErrorException if writing fails + */ + public void writeJsonNodeToFile(JsonNode jsonNode, Path path) { + try { + objectMapper.writeValue(path.toFile(), jsonNode); + } catch (IOException e) { + LOG.error("Failed to write JsonNode to {}", path, e); + throw new InternalServerErrorException("Failed to write JSON file: " + e.getMessage()); + } + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/util/TempFileUtils.java b/src/main/java/org/ohdsi/webapi/util/TempFileUtils.java index 091211cd6a..11f0071c1f 100644 --- a/src/main/java/org/ohdsi/webapi/util/TempFileUtils.java +++ b/src/main/java/org/ohdsi/webapi/util/TempFileUtils.java @@ -4,7 +4,6 @@ import org.apache.commons.io.IOUtils; import java.io.File; -import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -12,32 +11,62 @@ import java.nio.file.Path; import java.util.function.Function; +/** + * Utilities for working with temporary files and directories + */ public class TempFileUtils { - public static File copyResourceToTempFile(String resource, String prefix, String suffix) throws IOException { + /** + * Copy a resource from classpath to a temporary file + * + * @param resource the resource path + * @param prefix temp file prefix + * @param suffix temp file suffix + * @return the temporary file + * @throws IOException if copying fails + */ + public static File copyResourceToTempFile(String resource, String prefix, String suffix) throws IOException { + File tempFile = File.createTempFile(prefix, suffix); + try (InputStream in = TempFileUtils.class.getResourceAsStream(resource)) { + try (OutputStream out = Files.newOutputStream(tempFile.toPath())) { + if (in == null) { + throw new IOException("Resource not found: " + resource); + } + IOUtils.copy(in, out); + } + } + return tempFile; + } - File tempFile = File.createTempFile(prefix, suffix); - try(InputStream in = TempFileUtils.class.getResourceAsStream(resource)) { - try(OutputStream out = Files.newOutputStream(tempFile.toPath())) { - if(in == null) { - throw new IOException("File not found: " + resource); - } - IOUtils.copy(in, out); - } - } - return tempFile; - } + /** + * Execute an action in a temporary directory that is automatically cleaned up + * + * @param action the action to execute + * @param the return type + * @return the result of the action + */ + public static T doInDirectory(Function action) { + return doInDirectory("temp-", action); + } - public static F doInDirectory(Function action) { - try { - Path tempDir = Files.createTempDirectory("webapi-"); - try { - return action.apply(tempDir); - } finally { - FileUtils.deleteQuietly(tempDir.toFile()); - } - } catch (IOException e) { - throw new RuntimeException("Failed to create temp directory, " + e.getMessage()); - } - } -} + /** + * Execute an action in a temporary directory with custom prefix + * + * @param prefix the directory prefix + * @param action the action to execute + * @param the return type + * @return the result of the action + */ + public static T doInDirectory(String prefix, Function action) { + try { + Path tempDir = Files.createTempDirectory(prefix); + try { + return action.apply(tempDir); + } finally { + FileUtils.deleteQuietly(tempDir.toFile()); + } + } catch (IOException e) { + throw new RuntimeException("Failed to create temp directory: " + e.getMessage(), e); + } + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/util/archive/ArchiveStrategies.java b/src/main/java/org/ohdsi/webapi/util/archive/ArchiveStrategies.java new file mode 100644 index 0000000000..ac10b8f929 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/util/archive/ArchiveStrategies.java @@ -0,0 +1,154 @@ +package org.ohdsi.webapi.util.archive; + +import com.odysseusinc.arachne.commons.utils.ZipUtils; +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.ArchiveOutputStream; +import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; +import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream; +import org.apache.commons.io.IOUtils; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Objects; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * Factory for creating various archive packaging strategies + */ +public class ArchiveStrategies { + + /** + * Create a ZIP packaging strategy + * + * @return strategy that creates ZIP archives + */ + public static ArchiveStrategy zip() { + return new ZipArchiveStrategy(); + } + + /** + * Create a ZIP packaging strategy with custom prefix and suffix + * + * @param prefix the temp file prefix + * @param suffix the temp file suffix + * @return strategy that creates ZIP archives + */ + public static ArchiveStrategy zip(String prefix, String suffix) { + return new ZipArchiveStrategy(prefix, suffix); + } + + /** + * Create a TAR.GZ packaging strategy + * + * @return strategy that creates TAR.GZ archives + */ + public static ArchiveStrategy targz() { + return new TarGzArchiveStrategy(); + } + + /** + * Create a TAR.GZ packaging strategy with custom prefix and suffix + * + * @param prefix the temp file prefix + * @param suffix the temp file suffix + * @return strategy that creates TAR.GZ archives + */ + public static ArchiveStrategy targz(String prefix, String suffix) { + return new TarGzArchiveStrategy(prefix, suffix); + } + + /** + * ZIP archive strategy implementation + */ + private static class ZipArchiveStrategy implements ArchiveStrategy { + private final String prefix; + private final String suffix; + + public ZipArchiveStrategy() { + this("archive_", ".zip"); + } + + public ZipArchiveStrategy(String prefix, String suffix) { + this.prefix = prefix; + this.suffix = suffix; + } + + @Override + public Path apply(Path path) { + try { + Path archive = Files.createTempFile(prefix, suffix); + ZipUtils.zipDirectory(archive, path); + return archive; + } catch (IOException e) { + throw new RuntimeException("Failed to create ZIP archive: " + e.getMessage(), e); + } + } + } + + /** + * TAR.GZ archive strategy implementation + */ + private static class TarGzArchiveStrategy implements ArchiveStrategy { + private final String prefix; + private final String suffix; + + public TarGzArchiveStrategy() { + this("archive_", ".tar.gz"); + } + + public TarGzArchiveStrategy(String prefix, String suffix) { + this.prefix = prefix; + this.suffix = suffix; + } + + @Override + public Path apply(Path path) { + try { + Path archive = Files.createTempFile(prefix, suffix); + try (OutputStream out = Files.newOutputStream(archive); + OutputStream gzout = new GzipCompressorOutputStream(out); + ArchiveOutputStream arch = new TarArchiveOutputStream(gzout)) { + packDirectoryFiles(path, arch); + } + return archive; + } catch (IOException e) { + throw new RuntimeException("Failed to create TAR.GZ archive: " + e.getMessage(), e); + } + } + + private void packDirectoryFiles(Path path, ArchiveOutputStream arch) throws IOException { + packDirectoryFiles(path, null, arch); + } + + private void packDirectoryFiles(Path path, String parentDir, ArchiveOutputStream arch) throws IOException { + try (Stream files = Files.list(path)) { + files.forEach(p -> { + try { + File file = p.toFile(); + String filePath = Stream.of(parentDir, p.getFileName().toString()) + .filter(Objects::nonNull) + .collect(Collectors.joining("/")); + ArchiveEntry entry = arch.createArchiveEntry(file, filePath); + arch.putArchiveEntry(entry); + if (file.isFile()) { + try (InputStream in = Files.newInputStream(p)) { + IOUtils.copy(in, arch); + } + } + arch.closeArchiveEntry(); + if (file.isDirectory()) { + packDirectoryFiles(p, filePath, arch); + } + } catch (IOException e) { + throw new RuntimeException("Failed to pack file: " + p, e); + } + }); + } + } + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/util/archive/ArchiveStrategy.java b/src/main/java/org/ohdsi/webapi/util/archive/ArchiveStrategy.java new file mode 100644 index 0000000000..d14be3d4cf --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/util/archive/ArchiveStrategy.java @@ -0,0 +1,18 @@ +package org.ohdsi.webapi.util.archive; + +import java.nio.file.Path; +import java.util.function.Function; + +/** + * Strategy interface for packaging directories into archive files + */ +public interface ArchiveStrategy extends Function { + /** + * Package the directory at the given path into an archive + * + * @param sourcePath the directory to package + * @return the path to the created archive file + */ + @Override + Path apply(Path sourcePath); +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/util/archive/TemporaryArchive.java b/src/main/java/org/ohdsi/webapi/util/archive/TemporaryArchive.java new file mode 100644 index 0000000000..2f8048ab01 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/util/archive/TemporaryArchive.java @@ -0,0 +1,28 @@ +package org.ohdsi.webapi.util.archive; + +import java.nio.file.Path; + +/** + * Represents a temporary archive file with metadata + */ +public class TemporaryArchive { + private final String filename; + private final Path archivePath; + + public TemporaryArchive(String filename, Path archivePath) { + this.filename = filename; + this.archivePath = archivePath; + } + + public String getFilename() { + return filename; + } + + public Path getArchivePath() { + return archivePath; + } + + public Path getPath() { + return archivePath; + } +} \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.16.0.20251015000001__add_vocab_compare_diff_permission_if_not_exists.sql b/src/main/resources/db/migration/postgresql/V2.16.0.20251015000001__add_vocab_compare_diff_permission_if_not_exists.sql new file mode 100644 index 0000000000..44aed5aae4 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.16.0.20251015000001__add_vocab_compare_diff_permission_if_not_exists.sql @@ -0,0 +1,26 @@ +INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), 'vocabulary:*:compare-diff-vocab:post', 'Concept sets comparison permission (compare over different vocabularies method)' +WHERE NOT EXISTS ( + SELECT NULL FROM ${ohdsiSchema}.sec_permission + WHERE value = 'vocabulary:*:compare-diff-vocab:post' +); + +INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), 'vocabulary:*:compare-arbitrary-diff-vocab:post', 'Concept sets comparison permission (compare-arbitrary over different vocabularies method)' +WHERE NOT EXISTS ( + SELECT NULL FROM ${ohdsiSchema}.sec_permission + WHERE value = 'vocabulary:*:compare-arbitrary-diff-vocab:post' +); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'vocabulary:*:compare-diff-vocab:post', + 'vocabulary:*:compare-arbitrary-diff-vocab:post' + ) AND sr.name IN ('Atlas users') + AND NOT EXISTS ( + SELECT NULL FROM ${ohdsiSchema}.sec_role_permission + WHERE permission_id = sp.id and role_id = sr.id); + + diff --git a/src/main/resources/db/migration/postgresql/V2.16.0.20251028000001__add_conceptset_batch_compare_diff_permission_if_not_exists.sql b/src/main/resources/db/migration/postgresql/V2.16.0.20251028000001__add_conceptset_batch_compare_diff_permission_if_not_exists.sql new file mode 100644 index 0000000000..f0ac04a46e --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.16.0.20251028000001__add_conceptset_batch_compare_diff_permission_if_not_exists.sql @@ -0,0 +1,17 @@ +INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:compare-batch:post', 'Concept sets batch comparison permission (batch compare over different vocabularies job)' +WHERE NOT EXISTS ( + SELECT NULL FROM ${ohdsiSchema}.sec_permission + WHERE value = 'conceptset:compare-batch:post' +); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ('conceptset:compare-batch:post') +AND sr.name IN ('admin') + AND NOT EXISTS ( + SELECT NULL FROM ${ohdsiSchema}.sec_role_permission + WHERE permission_id = sp.id and role_id = sr.id); + + diff --git a/src/main/resources/db/migration/postgresql/V2.16.0.20251030000001__add_batch_compare_tables.sql b/src/main/resources/db/migration/postgresql/V2.16.0.20251030000001__add_batch_compare_tables.sql new file mode 100644 index 0000000000..7aa7f0dac4 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.16.0.20251030000001__add_batch_compare_tables.sql @@ -0,0 +1,50 @@ +CREATE SEQUENCE ${ohdsiSchema}.CONCEPT_SET_COMPARE_JOB_SEQUENCE + START WITH 1 + INCREMENT BY 1 + MAXVALUE 9223372036854775807 + NO CYCLE; + +CREATE SEQUENCE ${ohdsiSchema}.CONCEPT_SET_COMPARE_JOB_DIFF_SEQUENCE + START WITH 1 + INCREMENT BY 1 + MAXVALUE 9223372036854775807 + NO CYCLE; + +CREATE TABLE ${ohdsiSchema}.CONCEPT_SET_COMPARE_JOB ( + ID INTEGER NOT NULL PRIMARY KEY DEFAULT NEXTVAL('${ohdsiSchema}.CONCEPT_SET_COMPARE_JOB_SEQUENCE'), + SOURCE_1_KEY VARCHAR(50) NOT NULL, + SOURCE_2_KEY VARCHAR(50) NOT NULL, + VOCAB_1_VERSION VARCHAR(255), + VOCAB_2_VERSION VARCHAR(255), + CREATED_FROM DATE, + CREATED_TO DATE, + UPDATED_FROM DATE, + UPDATED_TO DATE, + TAGS VARCHAR(5000), + SKIP_LOCKED BOOLEAN NOT NULL +); + +CREATE TABLE ${ohdsiSchema}.CONCEPT_SET_COMPARE_JOB_DIFF ( + ID INTEGER NOT NULL PRIMARY KEY DEFAULT NEXTVAL('${ohdsiSchema}.CONCEPT_SET_COMPARE_JOB_DIFF_SEQUENCE'), + COMPARE_JOB_ID INTEGER NOT NULL, + CONCEPT_SET_ID INTEGER NOT NULL, + CONCEPT_ID INTEGER NOT NULL, + + -- Concept Set membership counts + CONCEPT_IN_CS1_ONLY BIGINT, + CONCEPT_IN_CS2_ONLY BIGINT, + CONCEPT_IN_CS1_AND_CS2 BIGINT, + + -- Concept names (only populated if name mismatch exists) + VOCAB1_CONCEPT_NAME VARCHAR(1000), + VOCAB2_CONCEPT_NAME VARCHAR(1000), + + NAME_MISMATCH BOOLEAN NOT NULL, + + FOREIGN KEY (COMPARE_JOB_ID) REFERENCES ${ohdsiSchema}.CONCEPT_SET_COMPARE_JOB(ID) ON DELETE CASCADE +); + +-- Create indexes for better query performance +CREATE INDEX idx_cs_compare_job_diff_job_id ON ${ohdsiSchema}.CONCEPT_SET_COMPARE_JOB_DIFF(COMPARE_JOB_ID); +CREATE INDEX idx_cs_compare_job_diff_cs_id ON ${ohdsiSchema}.CONCEPT_SET_COMPARE_JOB_DIFF(CONCEPT_SET_ID); +CREATE INDEX idx_cs_compare_job_diff_concept_id ON ${ohdsiSchema}.CONCEPT_SET_COMPARE_JOB_DIFF(CONCEPT_ID); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.16.0.20251118000001__add_batch_compare_get_artifact_permission_if_not_exists.sql b/src/main/resources/db/migration/postgresql/V2.16.0.20251118000001__add_batch_compare_get_artifact_permission_if_not_exists.sql new file mode 100644 index 0000000000..3c6d728138 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.16.0.20251118000001__add_batch_compare_get_artifact_permission_if_not_exists.sql @@ -0,0 +1,18 @@ +INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), 'job:*:artifact:get', 'Concept sets artifact download permission' +WHERE NOT EXISTS ( + SELECT NULL FROM ${ohdsiSchema}.sec_permission + WHERE value = 'job:*:artifact:get' +); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'job:*:artifact:get' + ) AND sr.name IN ('admin') + AND NOT EXISTS ( + SELECT NULL FROM ${ohdsiSchema}.sec_role_permission + WHERE permission_id = sp.id and role_id = sr.id); + + diff --git a/src/main/resources/db/migration/postgresql/V2.16.0.20251118000002__add_batch_compare_result_execution_id.sql b/src/main/resources/db/migration/postgresql/V2.16.0.20251118000002__add_batch_compare_result_execution_id.sql new file mode 100644 index 0000000000..a298877188 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.16.0.20251118000002__add_batch_compare_result_execution_id.sql @@ -0,0 +1,8 @@ +ALTER TABLE ${ohdsiSchema}.concept_set_compare_job +ADD COLUMN execution_id BIGINT; + + +ALTER TABLE ${ohdsiSchema}.concept_set_compare_job +ADD CONSTRAINT uk_compare_job_execution_id UNIQUE (execution_id); + + diff --git a/src/main/resources/db/migration/postgresql/V2.16.0.20251121000001__add_compare_concepts_by_all_fields.sql b/src/main/resources/db/migration/postgresql/V2.16.0.20251121000001__add_compare_concepts_by_all_fields.sql new file mode 100644 index 0000000000..5b123a588f --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.16.0.20251121000001__add_compare_concepts_by_all_fields.sql @@ -0,0 +1,57 @@ +-- Add new columns for vocab1 concept attributes +ALTER TABLE ${ohdsiSchema}.concept_set_compare_job_diff +ADD COLUMN vocab1_standard_concept VARCHAR(1), +ADD COLUMN vocab1_invalid_reason VARCHAR(1), +ADD COLUMN vocab1_concept_code VARCHAR(50), +ADD COLUMN vocab1_domain_id VARCHAR(20), +ADD COLUMN vocab1_vocabulary_id VARCHAR(20), +ADD COLUMN vocab1_concept_class_id VARCHAR(20), +ADD COLUMN vocab1_valid_start_date DATE, +ADD COLUMN vocab1_valid_end_date DATE; + +-- Add new columns for vocab2 concept attributes +ALTER TABLE ${ohdsiSchema}.concept_set_compare_job_diff +ADD COLUMN vocab2_standard_concept VARCHAR(1), +ADD COLUMN vocab2_invalid_reason VARCHAR(1), +ADD COLUMN vocab2_concept_code VARCHAR(50), +ADD COLUMN vocab2_domain_id VARCHAR(20), +ADD COLUMN vocab2_vocabulary_id VARCHAR(20), +ADD COLUMN vocab2_concept_class_id VARCHAR(20), +ADD COLUMN vocab2_valid_start_date DATE, +ADD COLUMN vocab2_valid_end_date DATE; + +-- Add mismatch flag columns +ALTER TABLE ${ohdsiSchema}.concept_set_compare_job_diff +ADD COLUMN standard_concept_mismatch BOOLEAN NOT NULL DEFAULT FALSE, +ADD COLUMN invalid_reason_mismatch BOOLEAN NOT NULL DEFAULT FALSE, +ADD COLUMN concept_code_mismatch BOOLEAN NOT NULL DEFAULT FALSE, +ADD COLUMN domain_id_mismatch BOOLEAN NOT NULL DEFAULT FALSE, +ADD COLUMN vocabulary_id_mismatch BOOLEAN NOT NULL DEFAULT FALSE, +ADD COLUMN concept_class_id_mismatch BOOLEAN NOT NULL DEFAULT FALSE, +ADD COLUMN valid_start_date_mismatch BOOLEAN NOT NULL DEFAULT FALSE, +ADD COLUMN valid_end_date_mismatch BOOLEAN NOT NULL DEFAULT FALSE; + +-- Add indexes for performance on mismatch columns (works on all databases) +CREATE INDEX idx_cscdiff_standard_concept_mismatch +ON ${ohdsiSchema}.concept_set_compare_job_diff(standard_concept_mismatch); + +CREATE INDEX idx_cscdiff_invalid_reason_mismatch +ON ${ohdsiSchema}.concept_set_compare_job_diff(invalid_reason_mismatch); + +CREATE INDEX idx_cscdiff_concept_code_mismatch +ON ${ohdsiSchema}.concept_set_compare_job_diff(concept_code_mismatch); + +CREATE INDEX idx_cscdiff_domain_id_mismatch +ON ${ohdsiSchema}.concept_set_compare_job_diff(domain_id_mismatch); + +CREATE INDEX idx_cscdiff_vocabulary_id_mismatch +ON ${ohdsiSchema}.concept_set_compare_job_diff(vocabulary_id_mismatch); + +CREATE INDEX idx_cscdiff_concept_class_id_mismatch +ON ${ohdsiSchema}.concept_set_compare_job_diff(concept_class_id_mismatch); + +CREATE INDEX idx_cscdiff_valid_start_date_mismatch +ON ${ohdsiSchema}.concept_set_compare_job_diff(valid_start_date_mismatch); + +CREATE INDEX idx_cscdiff_valid_end_date_mismatch +ON ${ohdsiSchema}.concept_set_compare_job_diff(valid_end_date_mismatch); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.16.0.20251125000001__add_compare_source_codes_results.sql b/src/main/resources/db/migration/postgresql/V2.16.0.20251125000001__add_compare_source_codes_results.sql new file mode 100644 index 0000000000..2177b97e2f --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.16.0.20251125000001__add_compare_source_codes_results.sql @@ -0,0 +1,20 @@ +-- Add author and compareSourceCodes columns to concept_set_compare_job table +ALTER TABLE ${ohdsiSchema}.concept_set_compare_job +ADD COLUMN author VARCHAR(1024), +ADD COLUMN compare_source_codes BOOLEAN NOT NULL DEFAULT FALSE; + +-- Add isSourceCode column to concept_set_compare_job_diff table +ALTER TABLE ${ohdsiSchema}.concept_set_compare_job_diff +ADD COLUMN is_source_code BOOLEAN NOT NULL DEFAULT FALSE; + +-- Add index on author for filtering +CREATE INDEX idx_cs_compare_job_author +ON ${ohdsiSchema}.concept_set_compare_job(author); + +-- Add index on is_source_code for filtering +CREATE INDEX idx_cs_compare_job_diff_is_source_code +ON ${ohdsiSchema}.concept_set_compare_job_diff(is_source_code); + +-- Add composite index for common query patterns +CREATE INDEX idx_cs_compare_job_diff_job_source_code +ON ${ohdsiSchema}.concept_set_compare_job_diff(compare_job_id, is_source_code); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.16.0.20251128000001__add_batch_compare_concept_set_counts_stats.sql b/src/main/resources/db/migration/postgresql/V2.16.0.20251128000001__add_batch_compare_concept_set_counts_stats.sql new file mode 100644 index 0000000000..53108f5faf --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.16.0.20251128000001__add_batch_compare_concept_set_counts_stats.sql @@ -0,0 +1,9 @@ +ALTER TABLE ${ohdsiSchema}.concept_set_compare_job +ADD COLUMN concept_sets_analyzed INTEGER, +ADD COLUMN concept_sets_with_diffs INTEGER; + +CREATE INDEX idx_cs_compare_job_analyzed +ON ${ohdsiSchema}.concept_set_compare_job(concept_sets_analyzed); + +CREATE INDEX idx_cs_compare_job_with_diffs +ON ${ohdsiSchema}.concept_set_compare_job(concept_sets_with_diffs); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.16.0.20251128000002__batch_compare_conceptset_filter_counts_permission.sql b/src/main/resources/db/migration/postgresql/V2.16.0.20251128000002__batch_compare_conceptset_filter_counts_permission.sql new file mode 100644 index 0000000000..b8dc96e41b --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.16.0.20251128000002__batch_compare_conceptset_filter_counts_permission.sql @@ -0,0 +1,18 @@ +INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description) +SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), 'conceptset:check-filter-count:post', 'Access check conceptset filter count for batch Concept Set compare' +WHERE NOT EXISTS ( + SELECT NULL FROM ${ohdsiSchema}.sec_permission + WHERE value = 'conceptset:check-filter-count:post' +); + +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'conceptset:check-filter-count:post' + ) AND sr.name IN ('admin') + AND NOT EXISTS ( + SELECT NULL FROM ${ohdsiSchema}.sec_role_permission + WHERE permission_id = sp.id and role_id = sr.id); + + diff --git a/src/main/resources/db/migration/postgresql/V2.16.0.20251128000003__batch_compare_multiple_authors.sql b/src/main/resources/db/migration/postgresql/V2.16.0.20251128000003__batch_compare_multiple_authors.sql new file mode 100644 index 0000000000..58ab713ae4 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.16.0.20251128000003__batch_compare_multiple_authors.sql @@ -0,0 +1,26 @@ +-- Create a join table for the many-to-many relationship between compare jobs and authors +CREATE SEQUENCE ${ohdsiSchema}.CONCEPT_SET_COMPARE_JOB_AUTHOR_SEQUENCE + START WITH 1 + INCREMENT BY 1 + MAXVALUE 9223372036854775807 + NO CYCLE; + +CREATE TABLE ${ohdsiSchema}.CONCEPT_SET_COMPARE_JOB_AUTHOR ( + ID INTEGER NOT NULL PRIMARY KEY DEFAULT NEXTVAL('${ohdsiSchema}.CONCEPT_SET_COMPARE_JOB_AUTHOR_SEQUENCE'), + COMPARE_JOB_ID INTEGER NOT NULL, + USER_ID INTEGER NOT NULL, + CONSTRAINT FK_COMPARE_JOB_AUTHOR_JOB FOREIGN KEY (COMPARE_JOB_ID) + REFERENCES ${ohdsiSchema}.CONCEPT_SET_COMPARE_JOB(ID) ON DELETE CASCADE, + CONSTRAINT FK_COMPARE_JOB_AUTHOR_USER FOREIGN KEY (USER_ID) + REFERENCES ${ohdsiSchema}.SEC_USER(ID) ON DELETE CASCADE +); + +-- Create index for better query performance +CREATE INDEX idx_cs_compare_job_author_job_id + ON ${ohdsiSchema}.CONCEPT_SET_COMPARE_JOB_AUTHOR(COMPARE_JOB_ID); + +CREATE INDEX idx_cs_compare_job_author_user_id + ON ${ohdsiSchema}.CONCEPT_SET_COMPARE_JOB_AUTHOR(USER_ID); + +-- Remove the old author column from concept_set_compare_job +ALTER TABLE ${ohdsiSchema}.concept_set_compare_job DROP COLUMN author; \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.16.0.20251207000001__batch_compare_remap_all_permissions_to_admin.sql b/src/main/resources/db/migration/postgresql/V2.16.0.20251207000001__batch_compare_remap_all_permissions_to_admin.sql new file mode 100644 index 0000000000..2c569cb202 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.16.0.20251207000001__batch_compare_remap_all_permissions_to_admin.sql @@ -0,0 +1,30 @@ +-- Remap permissions from 'Atlas users' to 'admin' role + +-- Remove existing role_permission mappings for 'Atlas users' +DELETE FROM ${ohdsiSchema}.sec_role_permission +WHERE permission_id IN ( + SELECT id FROM ${ohdsiSchema}.sec_permission + WHERE value IN ( + 'job:*:artifact:get', + 'conceptset:check-filter-count:post', + 'conceptset:compare-batch:post' + ) +) +AND role_id IN ( + SELECT id FROM ${ohdsiSchema}.sec_role WHERE name = 'Atlas users' +); + +-- Add role_permission mappings for 'admin' role +INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id) +SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id +FROM ${ohdsiSchema}.sec_permission sp, ${ohdsiSchema}.sec_role sr +WHERE sp.value IN ( + 'job:*:artifact:get', + 'conceptset:check-filter-count:post', + 'conceptset:compare-batch:post' +) +AND sr.name = 'admin' +AND NOT EXISTS ( + SELECT NULL FROM ${ohdsiSchema}.sec_role_permission + WHERE permission_id = sp.id AND role_id = sr.id +); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.16.0.20251213000001__batch_compare_add_concept_set_ids.sql b/src/main/resources/db/migration/postgresql/V2.16.0.20251213000001__batch_compare_add_concept_set_ids.sql new file mode 100644 index 0000000000..0ca90f4013 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.16.0.20251213000001__batch_compare_add_concept_set_ids.sql @@ -0,0 +1,7 @@ +-- Add concept_set_ids column to store the filter +ALTER TABLE ${ohdsiSchema}.concept_set_compare_job +ADD COLUMN concept_set_ids VARCHAR(5000); + +-- Add index for potential filtering +CREATE INDEX idx_cs_compare_job_concept_set_ids +ON ${ohdsiSchema}.concept_set_compare_job(concept_set_ids); \ No newline at end of file diff --git a/src/main/resources/db/migration/postgresql/V2.16.0.20260109000001__batch_compare_add_stats_table_per_job_concept_set.sql b/src/main/resources/db/migration/postgresql/V2.16.0.20260109000001__batch_compare_add_stats_table_per_job_concept_set.sql new file mode 100644 index 0000000000..6615f0adf3 --- /dev/null +++ b/src/main/resources/db/migration/postgresql/V2.16.0.20260109000001__batch_compare_add_stats_table_per_job_concept_set.sql @@ -0,0 +1,48 @@ +-- Create sequence for concept_set_compare_job_stats +CREATE SEQUENCE ${ohdsiSchema}.concept_set_compare_job_stats_sequence START WITH 1 INCREMENT BY 1; + +-- Create concept_set_compare_job_stats table +CREATE TABLE ${ohdsiSchema}.concept_set_compare_job_stats ( + id INTEGER NOT NULL, + compare_job_id INTEGER NOT NULL, + concept_set_id INTEGER NOT NULL, + cs1_included_concepts_count INTEGER NOT NULL, + cs1_included_source_codes_count INTEGER NOT NULL DEFAULT 0, + cs2_included_concepts_count INTEGER NOT NULL, + cs2_included_source_codes_count INTEGER NOT NULL DEFAULT 0, + has_differences BOOLEAN NOT NULL DEFAULT FALSE, + CONSTRAINT pk_cs_compare_job_stats PRIMARY KEY (id), + CONSTRAINT fk_cs_compare_job_stats_job + FOREIGN KEY (compare_job_id) + REFERENCES ${ohdsiSchema}.concept_set_compare_job(id) + ON DELETE CASCADE +); + +-- Add indexes for common query patterns +CREATE INDEX idx_cs_compare_job_stats_job_id +ON ${ohdsiSchema}.concept_set_compare_job_stats(compare_job_id); + +CREATE INDEX idx_cs_compare_job_stats_concept_set_id +ON ${ohdsiSchema}.concept_set_compare_job_stats(concept_set_id); + +CREATE INDEX idx_cs_compare_job_stats_has_diffs +ON ${ohdsiSchema}.concept_set_compare_job_stats(has_differences); + +CREATE INDEX idx_cs_compare_job_stats_job_concept_set +ON ${ohdsiSchema}.concept_set_compare_job_stats(compare_job_id, concept_set_id); + +CREATE INDEX idx_cs_compare_job_stats_job_diffs +ON ${ohdsiSchema}.concept_set_compare_job_stats(compare_job_id, has_differences); + +-- Add indexes for filtering by counts +CREATE INDEX idx_cs_compare_job_stats_cs1_concepts +ON ${ohdsiSchema}.concept_set_compare_job_stats(cs1_included_concepts_count); + +CREATE INDEX idx_cs_compare_job_stats_cs2_concepts +ON ${ohdsiSchema}.concept_set_compare_job_stats(cs2_included_concepts_count); + +CREATE INDEX idx_cs_compare_job_stats_cs1_source_codes +ON ${ohdsiSchema}.concept_set_compare_job_stats(cs1_included_source_codes_count); + +CREATE INDEX idx_cs_compare_job_stats_cs2_source_codes +ON ${ohdsiSchema}.concept_set_compare_job_stats(cs2_included_source_codes_count); \ No newline at end of file diff --git a/src/main/resources/i18n/messages_en.json b/src/main/resources/i18n/messages_en.json index 3415864006..3f00ce0ed0 100644 --- a/src/main/resources/i18n/messages_en.json +++ b/src/main/resources/i18n/messages_en.json @@ -1790,7 +1790,7 @@ "headingMessage": "Use this utility to compare the contents of two concept sets to see which concepts they may share", "modalTitle": "Choose a concept set", "sameConcepts": "Compared Concept Sets contain Concepts which are identical", - "sameWarning": "You cannot compare the same concept sets.", + "sameWarning": "You cannot compare the same concept set over the same vocabulary.", "saveFromComparisonNameTail": " - From Comparison", "saveMessage": "Save New Concept Set From Selection Below", "saveWarning": "You must save the current concept set before you can perform this comparison.", diff --git a/src/main/resources/i18n/messages_ko.json b/src/main/resources/i18n/messages_ko.json index fe8548a298..a23093ace9 100644 --- a/src/main/resources/i18n/messages_ko.json +++ b/src/main/resources/i18n/messages_ko.json @@ -1723,7 +1723,7 @@ "headingMessage": "이 유틸리티를 사용하여 두 컨셉 세트가 공유하고 있는 컨셉을 확인하세요", "modalTitle": "컨셉 세트 선택", "sameConcepts": "Compared Concept Sets contain Concepts which are identical", - "sameWarning": "동일한 컨셉 세트를 비교할 수 없습니다.", + "sameWarning": "같은 어휘로 같은 개념 집합을 비교할 수는 없습니다.", "saveFromComparisonNameTail": "- 비교", "saveMessage": "아래 선택에서 새 컨셉 세트 저장", "saveWarning": "이 비교 기능을 수행하기 전에 현재의 컨셉 세트를 저장해야합니다.", diff --git a/src/main/resources/i18n/messages_ru.json b/src/main/resources/i18n/messages_ru.json index 663ee22597..a30d433124 100644 --- a/src/main/resources/i18n/messages_ru.json +++ b/src/main/resources/i18n/messages_ru.json @@ -1854,7 +1854,7 @@ "conceptSet2": "Набор концепций 2:", "comparisonResults": "Результат сравнения", "sameConcepts": "Сравниваемые наборы концептов содержат идентичные концепты", - "sameWarning": "Невозможно сравнить один и тот же набор концепций", + "sameWarning": "Невозможно сравнить один и тот же набор концепций на одном и том же словаре", "saveMessage": "Сохранить набор концепций из списка ниже", "saveWarning": "Сохраните, пожалуйста, текущий набор концепций перед сравнением.", "saveFromComparisonNameTail": " - из сравнения" diff --git a/src/main/resources/i18n/messages_zh.json b/src/main/resources/i18n/messages_zh.json index a18fb2e53d..4797f47a0f 100644 --- a/src/main/resources/i18n/messages_zh.json +++ b/src/main/resources/i18n/messages_zh.json @@ -1723,7 +1723,7 @@ "headingMessage": "使用此实用程序可以比较两个概念集的内容,以查看它们可以共享哪些概念。", "modalTitle": "选择一个概念集", "sameConcepts": "Compared Concept Sets contain Concepts which are identical", - "sameWarning": "您无法比较相同的概念集。", + "sameWarning": "您不能将相同的概念集与相同的词汇进行比较。", "saveFromComparisonNameTail": " - 来自对照", "saveMessage": "从下面的选择中保存新概念集", "saveWarning": "您必须保存当前概念集,然后才能执行此比较。",