diff --git a/.gitignore b/.gitignore index 65d97f4691..925c44c0e8 100644 --- a/.gitignore +++ b/.gitignore @@ -20,6 +20,7 @@ specs/ docs .claude *.bak +logs ### Developer's personal properties ### **/resources/config/application*-dev-*.properties diff --git a/articles/EntityAccess_HOWTO.md b/articles/EntityAccess_HOWTO.md index fbcee3e38a..ef92a80802 100644 --- a/articles/EntityAccess_HOWTO.md +++ b/articles/EntityAccess_HOWTO.md @@ -32,7 +32,7 @@ public final AccessType READ = AccessType.READ; public final AccessType WRITE = AccessType.WRITE; ``` -- This enables expressions like `hasEntityAccess(#id, COHORT_DEFINITION, anyOf(READ, WRITE))` to resolve within `@PreAuthorize`. + - This enables expressions like `hasAnyEntityAccess(#id, COHORT_DEFINITION, anyOf(READ, WRITE))` to resolve within `@PreAuthorize`. **Adding a new EntityType (summary steps)** @@ -53,15 +53,14 @@ public final AccessType WRITE = AccessType.WRITE; - Example annotation (place on controller or — preferably — the `CohortDefinitionService` method): ```java -@PreAuthorize("isOwner(#id, COHORT_DEFINITION) or isPermitted('read:cohort') or isPermitted('write:cohort') or hasEntityAccess(#id, COHORT_DEFINITION, anyOf(READ, WRITE))") +@PreAuthorize("isOwner(#id, COHORT_DEFINITION) or isPermitted(anyOf('read:cohort','write:cohort') or hasAnyEntityAccess(#id, COHORT_DEFINITION, anyOf(READ, WRITE))") public CohortDTO getCohortDefinition(final int id) { ... } ``` - Explanation of the expression: - `isOwner(#id, COHORT_DEFINITION)` — short-circuits grant if caller created/owns the entity. - - `isPermitted('read:cohort')` — grant based on a global permission. - - `isPermitted('write:cohort')` — write implies read; include if desired. - - `hasEntityAccess(#id, COHORT_DEFINITION, anyOf(READ, WRITE))` — delegate to `EntityAccessService` to check explicit grants for this entity id. + - `isPermitted(anyOf('read:cohort','write:cohort'))` — reading a definition is allowed granted global read/write. + - `hasAnyEntityAccess(#id, COHORT_DEFINITION, anyOf(READ, WRITE))` — delegate to `EntityAccessService` to check explicit grants for this entity id. **Implementation checklist** diff --git a/articles/FE_Analysis_Rework.md b/articles/FE_Analysis_Rework.md new file mode 100644 index 0000000000..2607a1e378 --- /dev/null +++ b/articles/FE_Analysis_Rework.md @@ -0,0 +1,670 @@ +# Feature Analysis Entity Refactoring Analysis + +## Executive Summary + +The `FeAnalysisEntity` hierarchy in WebAPI uses multiple competing abstraction mechanisms (inheritance, generics, interfaces, discriminators) to handle different types of feature analyses. This creates confusion and violates the type contract defined in StandardizedAnalysisAPI. This document analyzes the current implementation and proposes refactoring strategies to simplify the design. + +--- + +## Current Implementation Analysis + +### Entity Hierarchy Overview + +``` +FeAnalysisEntity (abstract) +│ Table: fe_analysis +│ Strategy: SINGLE_TABLE inheritance +│ Generic: (unconstrained - PROBLEM!) +│ Implements: FeatureAnalysis +│ +├── FeAnalysisWithStringEntity +│ │ Discriminator: "not null" (catch-all) +│ │ Generic: extends FeAnalysisEntity +│ │ Handles: PRESET, CUSTOM_FE +│ │ Storage: inline 'design' column (String) +│ │ +│ └── Design Type: String (violates API contract!) +│ +└── FeAnalysisWithCriteriaEntity (abstract) + │ Generic: + │ Storage: @OneToMany relationships + │ + ├── FeAnalysisWithPrevalenceCriteriaEntity + │ │ Discriminator: "CRITERIA_SET_PREVALENCE" + │ │ Generic: extends FeAnalysisWithCriteriaEntity + │ │ Storage: → fe_analysis_criteria table + │ │ + │ └── Design Type: List + │ + └── FeAnalysisWithDistributionCriteriaEntity + │ Discriminator: "CRITERIA_SET_DISTRIBUTION" + │ Generic: extends FeAnalysisWithCriteriaEntity + │ Storage: → fe_analysis_dist_criteria table + │ + └── Design Type: List +``` + +### Discriminator Formula Logic + +```sql +CASE WHEN type = 'CRITERIA_SET' + THEN CONCAT(CONCAT(type,'_'),stat_type) + ELSE type +END +``` + +**Produces:** +- `'PRESET'` → FeAnalysisWithStringEntity +- `'CUSTOM_FE'` → FeAnalysisWithStringEntity +- `'CRITERIA_SET_PREVALENCE'` → FeAnalysisWithPrevalenceCriteriaEntity +- `'CRITERIA_SET_DISTRIBUTION'` → FeAnalysisWithDistributionCriteriaEntity + +--- + +## Problems Identified + +### 1. Type Contract Violation + +**StandardizedAnalysisAPI defines:** +```java +public interface FeatureAnalysis { + T getDesign(); +} +``` + +**WebAPI incorrectly implements:** +```java +public abstract class FeAnalysisEntity // T is unconstrained! + implements FeatureAnalysis + +public class FeAnalysisWithStringEntity extends FeAnalysisEntity + // String does NOT extend FeatureAnalysisDesign - VIOLATION! +``` + +### 2. Competing Abstraction Mechanisms + +Four different polymorphism strategies working at cross-purposes: + +1. **Single Table Inheritance** - Database-level polymorphism via discriminator +2. **Generic Type Parameters** - Compile-time type abstraction +3. **Interface Implementation** - Contract-based polymorphism +4. **Multiple Design Storage Strategies** - Inline String vs relational tables + +### 3. Inconsistent Field Usage + +```java +// Base class: FeAnalysisEntity +@Column(name = "design", insertable = false, updatable = false) +private String rawDesign; // READ-ONLY, shadowed by subclasses + +// FeAnalysisWithStringEntity +@Lob +private String design; // Maps to SAME column, writable + +// FeAnalysisWithCriteriaEntity +@OneToMany +private List design; // DIFFERENT storage - foreign key relationship +``` + +The `rawDesign` field is a phantom - declared but never properly used. + +### 4. Vague Discriminator Value + +```java +@DiscriminatorValue("not null") // Means: "everything else" +``` + +This is a workaround for JPA's limitation of one discriminator value per class. It obscures that this entity handles both `PRESET` and `CUSTOM_FE` types. + +### 5. Three Different Design Storage Patterns + +| Entity Type | Storage Strategy | Table(s) | +|------------|------------------|----------| +| String-based (PRESET/CUSTOM_FE) | Inline JSON column | `fe_analysis.design` | +| Prevalence Criteria | @OneToMany relationship | `fe_analysis` + `fe_analysis_criteria` | +| Distribution Criteria | @OneToMany relationship | `fe_analysis` + `fe_analysis_dist_criteria` | + +--- + +## StandardizedAnalysisAPI Contract + +### Required Design Types + +```java +// Base marker interface +public interface FeatureAnalysisDesign {} + +// Concrete implementations +public class PresetFeatureAnalysisDesign implements FeatureAnalysisDesign {} + +public class CriteriaFeatureDesign implements FeatureAnalysisDesign { + private List criteriaList; +} +``` + +### Interface Contract + +```java +public interface FeatureAnalysis + extends CommonEntity { + + StandardFeatureAnalysisType getType(); + String getName(); + T getDesign(); + StandardFeatureAnalysisDomain getDomain(); + String getDescr(); +} +``` + +**Key requirement:** `T` must extend `FeatureAnalysisDesign`, not be a raw `String` or `List`. + +--- + +## Refactoring Options + +### Option 1: Remove Generics, Push Interface to Subclasses (Recommended) + +**Rationale:** +- Eliminates type contract violation +- Maintains single table inheritance (minimal database changes) +- Each subclass implements interface with correct design type +- Clear separation: base entity handles persistence, subclasses handle domain contracts + +**Implementation:** + +```java +// Base entity - NO generics, NO interface +@Entity +@Table(name = "fe_analysis") +@Inheritance(strategy = InheritanceType.SINGLE_TABLE) +@DiscriminatorFormula( + "CASE WHEN type = 'CRITERIA_SET' THEN CONCAT(CONCAT(type,'_'),stat_type) " + + "ELSE type END" +) +public abstract class FeAnalysisEntity extends CommonEntity + implements Comparable { + + @Id + @GeneratedValue(generator = "fe_analysis_generator") + private Integer id; + + @Column + @Enumerated(EnumType.STRING) + private StandardFeatureAnalysisType type; + + @Column + private String name; + + @Column + @Enumerated(EnumType.STRING) + private StandardFeatureAnalysisDomain domain; + + @Column + private String descr; + + @Column(name = "is_locked") + private Boolean isLocked; + + @Column(name = "stat_type") + @Enumerated(value = EnumType.STRING) + private CcResultType statType; + + // Protected raw design storage for subclasses + @Lob + @JdbcTypeCode(SqlTypes.VARCHAR) + @Column(name = "design") + protected String rawDesign; + + // Getters for common fields + public Integer getId() { return id; } + public StandardFeatureAnalysisType getType() { return type; } + public String getName() { return name; } + public StandardFeatureAnalysisDomain getDomain() { return domain; } + public String getDescr() { return descr; } + + protected String getRawDesign() { return rawDesign; } + protected void setRawDesign(String rawDesign) { this.rawDesign = rawDesign; } +} +``` + +```java +// String-based designs (PRESET, CUSTOM_FE) +@Entity +@DiscriminatorValue("not null") +public class FeAnalysisWithStringEntity extends FeAnalysisEntity + implements FeatureAnalysis { + + private transient PresetFeatureAnalysisDesign design; + + @Override + public PresetFeatureAnalysisDesign getDesign() { + if (design == null && getRawDesign() != null) { + // Deserialize JSON string to PresetFeatureAnalysisDesign + design = JsonUtil.fromJson(getRawDesign(), PresetFeatureAnalysisDesign.class); + } + return design; + } + + public void setDesign(PresetFeatureAnalysisDesign design) { + this.design = design; + setRawDesign(JsonUtil.toJson(design)); + } + + // Delegate interface methods to base entity + // (Already inherited: getId, getType, getName, getDomain, getDescr) +} +``` + +```java +// Criteria-based designs - abstract middle layer +@Entity +public abstract class FeAnalysisWithCriteriaEntity extends FeAnalysisEntity + implements FeatureAnalysis { + + private transient CriteriaFeatureDesign design; + + // Subclasses provide concrete criteria lists + protected abstract List getCriteriaList(); + + @Override + public CriteriaFeatureDesign getDesign() { + if (design == null) { + design = new CriteriaFeatureDesign(); + design.setCriteriaList(getCriteriaList()); + } + return design; + } +} +``` + +```java +// Prevalence criteria +@Entity +@DiscriminatorValue("CRITERIA_SET_PREVALENCE") +public class FeAnalysisWithPrevalenceCriteriaEntity + extends FeAnalysisWithCriteriaEntity { + + @OneToMany(mappedBy = "featureAnalysis", cascade = CascadeType.ALL, orphanRemoval = true) + private List criteriaList = new ArrayList<>(); + + @Override + protected List getCriteriaList() { + return criteriaList; + } + + public List getCriteria() { + return criteriaList; + } + + public void setCriteria(List criteria) { + this.criteriaList = criteria; + } +} +``` + +```java +// Distribution criteria +@Entity +@DiscriminatorValue("CRITERIA_SET_DISTRIBUTION") +public class FeAnalysisWithDistributionCriteriaEntity + extends FeAnalysisWithCriteriaEntity { + + @OneToMany(mappedBy = "featureAnalysis", cascade = CascadeType.ALL, orphanRemoval = true) + private List criteriaList = new ArrayList<>(); + + @Override + protected List getCriteriaList() { + return criteriaList; + } + + public List getCriteria() { + return criteriaList; + } + + public void setCriteria(List criteria) { + this.criteriaList = criteria; + } +} +``` + +**Benefits:** +- ✅ Complies with StandardizedAnalysisAPI contract +- ✅ Eliminates raw type warnings +- ✅ No database schema changes required +- ✅ Each subclass has clear, typed API +- ✅ Separates persistence concerns from domain contracts + +**Drawbacks:** +- ⚠️ Interface implementation duplicated across subclasses (mitigated by inheritance) +- ⚠️ Still uses "not null" discriminator (JPA limitation) + +--- + +### Option 2: Unified Design Storage with Composition + +**Rationale:** +- Eliminate inheritance complexity +- Single consistent storage strategy +- Use composition over inheritance + +**Implementation:** + +```java +@Entity +@Table(name = "fe_analysis") +public class FeAnalysisEntity extends CommonEntity { + + @Id + @GeneratedValue(generator = "fe_analysis_generator") + private Integer id; + + @Column + @Enumerated(EnumType.STRING) + private StandardFeatureAnalysisType type; + + @Column(name = "stat_type") + @Enumerated(value = EnumType.STRING) + private CcResultType statType; + + // String-based design storage + @Lob + @JdbcTypeCode(SqlTypes.VARCHAR) + @Column(name = "design") + private String designJson; + + // Criteria-based design storage + @OneToMany(mappedBy = "featureAnalysis", cascade = CascadeType.ALL) + private List criteriaList = new ArrayList<>(); + + // Factory method returns appropriate FeatureAnalysis implementation + public FeatureAnalysis toFeatureAnalysis() { + switch (type) { + case PRESET: + case CUSTOM_FE: + return new StringBasedFeatureAnalysis(this); + case CRITERIA_SET: + if (statType == CcResultType.PREVALENCE) { + return new PrevalenceFeatureAnalysis(this); + } else { + return new DistributionFeatureAnalysis(this); + } + default: + throw new IllegalStateException("Unknown type: " + type); + } + } +} + +// Domain model implementations (not entities, just wrappers) +public class StringBasedFeatureAnalysis + implements FeatureAnalysis { + + private final FeAnalysisEntity entity; + private PresetFeatureAnalysisDesign design; + + public StringBasedFeatureAnalysis(FeAnalysisEntity entity) { + this.entity = entity; + } + + @Override + public PresetFeatureAnalysisDesign getDesign() { + if (design == null) { + design = JsonUtil.fromJson(entity.getDesignJson(), + PresetFeatureAnalysisDesign.class); + } + return design; + } + + @Override + public Integer getId() { return entity.getId(); } + // ... delegate other methods +} +``` + +**Benefits:** +- ✅ Single entity class - massive simplification +- ✅ No inheritance complexity +- ✅ Clear separation: entity = persistence, wrappers = domain +- ✅ No discriminator issues + +**Drawbacks:** +- ⚠️ Requires significant refactoring +- ⚠️ Database changes needed (remove discriminator column) +- ⚠️ All code using entity hierarchy must be updated +- ⚠️ Loses JPA polymorphic query capabilities + +--- + +### Option 3: Joined Table Inheritance with Explicit Subclasses + +**Rationale:** +- Clearer table structure +- Each analysis type gets its own table +- More explicit discriminators + +**Implementation:** + +```java +@Entity +@Table(name = "fe_analysis") +@Inheritance(strategy = InheritanceType.JOINED) +@DiscriminatorColumn(name = "analysis_category", discriminatorType = DiscriminatorType.STRING) +public abstract class FeAnalysisEntity extends CommonEntity { + // Common fields only +} + +@Entity +@Table(name = "fe_analysis_preset") +@DiscriminatorValue("PRESET") +public class PresetFeAnalysisEntity extends FeAnalysisEntity + implements FeatureAnalysis { + + @Lob + @Column(name = "design") + private String designJson; + + // Typed API +} + +@Entity +@Table(name = "fe_analysis_custom") +@DiscriminatorValue("CUSTOM_FE") +public class CustomFeAnalysisEntity extends FeAnalysisEntity + implements FeatureAnalysis { + + @Lob + @Column(name = "design") + private String designJson; + + // Typed API +} + +@Entity +@Table(name = "fe_analysis_prevalence") +@DiscriminatorValue("PREVALENCE") +public class PrevalenceFeAnalysisEntity extends FeAnalysisEntity + implements FeatureAnalysis { + + @OneToMany(mappedBy = "featureAnalysis") + private List criteriaList; + + // Typed API +} + +@Entity +@Table(name = "fe_analysis_distribution") +@DiscriminatorValue("DISTRIBUTION") +public class DistributionFeAnalysisEntity extends FeAnalysisEntity + implements FeatureAnalysis { + + @OneToMany(mappedBy = "featureAnalysis") + private List criteriaList; + + // Typed API +} +``` + +**Benefits:** +- ✅ Explicit discriminator values (no "not null") +- ✅ Clearer table structure +- ✅ Each type fully independent +- ✅ Complies with API contract + +**Drawbacks:** +- ⚠️ Requires database migration (schema changes) +- ⚠️ More tables to manage +- ⚠️ Potential performance impact (joins) +- ⚠️ PRESET and CUSTOM_FE are structurally identical (duplication) + +--- + +## Recommendation + +**Implement Option 1: Remove Generics, Push Interface to Subclasses** + +### Why This Option? + +1. **Minimal Breaking Changes** + - No database schema changes required + - Maintains single table structure + - Preserves discriminator logic + +2. **Fixes Type Safety Issues** + - Complies with StandardizedAnalysisAPI contract + - Eliminates raw type warnings + - Each subclass has proper typed API + +3. **Improves Clarity** + - Removes competing generic abstraction + - Clear separation: base = persistence, subclasses = domain contracts + - Easier to reason about + +4. **Practical Implementation Path** + - Can be done incrementally + - Update entity classes first + - Update services/converters second + - Update tests last + +### Migration Steps + +#### Phase 1: Update Entity Classes + +1. **Remove generic from FeAnalysisEntity** + - Remove `` parameter + - Remove `FeatureAnalysis` interface implementation + - Keep `rawDesign` field as protected + - Remove abstract `getDesign()`/`setDesign()` methods + +2. **Update FeAnalysisWithStringEntity** + - Remove `` parameter + - Add `implements FeatureAnalysis` + - Implement `getDesign()` returning `PresetFeatureAnalysisDesign` + - Add deserialization logic from `rawDesign` + +3. **Update FeAnalysisWithCriteriaEntity** + - Remove `` parameter + - Add `implements FeatureAnalysis` + - Change abstract method to return `CriteriaFeatureDesign` + +4. **Update concrete criteria entities** + - Remove generic parameters + - Keep specific typed methods for criteria lists + +#### Phase 2: Update Service Layer + +1. **Update method signatures** + ```java + // BEFORE + Optional findById(Integer id) + + // AFTER + Optional findById(Integer id) // Same, but no warning! + ``` + +2. **Update casting logic** + ```java + // BEFORE + if (entity instanceof FeAnalysisWithCriteriaEntity) { + FeAnalysisWithCriteriaEntity criteria = + (FeAnalysisWithCriteriaEntity) entity; + } + + // AFTER + if (entity instanceof FeAnalysisWithCriteriaEntity) { + FeAnalysisWithCriteriaEntity criteria = + (FeAnalysisWithCriteriaEntity) entity; + } + ``` + +#### Phase 3: Update Converters + +1. **Update converter interfaces** + ```java + // BEFORE + public class FeAnalysisDTOToFeAnalysisConverter + extends BaseConverter + + // AFTER + public class FeAnalysisDTOToFeAnalysisConverter + extends BaseConverter // No change! + ``` + +2. **Update design conversion logic** + - Handle `PresetFeatureAnalysisDesign` serialization/deserialization + - Handle `CriteriaFeatureDesign` conversion + +#### Phase 4: Testing + +1. **Unit tests** - verify type safety +2. **Integration tests** - verify persistence works +3. **API tests** - verify DTOs serialize correctly + +--- + +## Future Improvements + +### Address "not null" Discriminator + +While JPA doesn't support multiple discriminator values per class, consider: + +1. **Custom UserType** to map specific discriminator values +2. **Split into two classes** (PresetFeAnalysis, CustomFeAnalysis) +3. **Accept limitation** with clear documentation + +### Consider DTO Alignment + +Ensure DTOs mirror the entity structure: + +```java +public abstract class FeAnalysisDTO { + // Common fields +} + +public class PresetFeAnalysisDTO extends FeAnalysisDTO { + private PresetFeatureAnalysisDesign design; +} + +public class PrevalenceFeAnalysisDTO extends FeAnalysisDTO { + private CriteriaFeatureDesign design; +} +``` + +### Standardize Serialization + +Create consistent JSON serialization strategy: +- Entities store raw JSON in `rawDesign` +- Lazy deserialization on `getDesign()` call +- External systems work with typed design objects + +--- + +## Conclusion + +The current `FeAnalysisEntity` hierarchy suffers from over-engineering through multiple competing abstraction mechanisms. The recommended refactoring (Option 1) eliminates generic type parameters at the entity level while maintaining single table inheritance and pushing interface implementation to subclasses. This approach: + +- **Fixes type safety violations** with StandardizedAnalysisAPI +- **Eliminates raw type warnings** throughout the codebase +- **Simplifies reasoning** about the code structure +- **Requires minimal changes** to existing code and database +- **Provides clear migration path** through incremental updates + +The key insight is that **persistence entities and domain contracts are separate concerns** that should not be conflated through generic type parameters. Let inheritance handle database polymorphism and interface implementation handle domain contracts. diff --git a/articles/TransactionBoundaryChanges.md b/articles/TransactionBoundaryChanges.md new file mode 100644 index 0000000000..94cb21230d --- /dev/null +++ b/articles/TransactionBoundaryChanges.md @@ -0,0 +1,377 @@ +# Transaction Boundary Isolation for Concurrent Batch Jobs + +## Summary + +Fixed transaction manager conflicts that caused "transaction already open" errors during cohort characterization generation. All batch job tasklets now use `batchTransactionManager` consistently, enabling proper transaction isolation and concurrent job execution. + +## Changes Made + +### 1. Added Batch Transaction Template Beans ([JobConfig.java](src/main/java/org/ohdsi/webapi/JobConfig.java)) + +Created two new transaction template beans specifically for batch operations: +- **`batchTransactionTemplate`**: Uses `batchTransactionManager` with default propagation +- **`batchTransactionTemplateRequiresNew`**: Uses `batchTransactionManager` with `PROPAGATION_REQUIRES_NEW` for immediate commits + +These ensure all batch-related transactions use the same transaction manager as Spring Batch steps. + +### 2. Updated AbstractDaoService ([AbstractDaoService.java](src/main/java/org/ohdsi/webapi/service/AbstractDaoService.java)) + +Added autowired fields and getter methods for batch transaction templates: +- `getBatchTransactionTemplate()` +- `getBatchTransactionTemplateRequiresNew()` + +This allows all services extending AbstractDaoService to access batch transaction templates. + +### 3. Updated GenerationUtils ([GenerationUtils.java](src/main/java/org/ohdsi/webapi/common/generation/GenerationUtils.java)) + +- Injected `batchTransactionTemplate` in constructor +- Updated `buildJobForCohortBasedAnalysisTasklet()` to pass `batchTransactionTemplate` to all tasklets: + - CreateCohortTableTasklet + - GenerateLocalCohortTasklet + - TempTableCleanupManager + - DropCohortTableListener + +### 4. Updated CohortGenerationService ([CohortGenerationService.java](src/main/java/org/ohdsi/webapi/cohortdefinition/CohortGenerationService.java)) + +Changed `buildGenerateCohortJob()` to use batch transaction templates: +- GenerateCohortTasklet uses `getBatchTransactionTemplate()` +- TempTableCleanupManager uses `getBatchTransactionTemplate()` +- **GenerationJobExecutionListener uses `getTransactionTemplateRequiresNew()`** (jpaTransactionManager) + - Listeners run outside step execution context, need JPA for entity operations + - No transaction conflicts because they run before/after steps, not during + +### 5. Updated CohortSamplingService ([CohortSamplingService.java](src/main/java/org/ohdsi/webapi/cohortsample/CohortSamplingService.java)) + +Changed `createDeleteSamplesTasklet()` to use `getBatchTransactionTemplate()` for CleanupCohortSamplesTasklet. + +### 6. Fixed CcServiceImpl Job Launching ([CcServiceImpl.java](src/main/java/org/ohdsi/webapi/cohortcharacterization/CcServiceImpl.java)) + +- Added `@Transactional(propagation = Propagation.NOT_SUPPORTED)` to `generateCc()` method + - **Critical:** Suspends class-level transaction before launching Spring Batch job + - Without this, JobRepository detects active transaction and throws error +- Changed GenerateCohortCharacterizationTasklet to use `getBatchTransactionTemplate()` instead of `getTransactionTemplate()` + +### 7. Enhanced AnalysisTasklet ([AnalysisTasklet.java](src/main/java/org/ohdsi/webapi/common/generation/AnalysisTasklet.java)) + +Added null check when rolling back transaction in `saveInfoWithinTheSeparateTransaction()` to prevent NPE if transaction creation fails. + +## How It Works + +### Transaction Architecture + +**Two Transaction Managers:** +1. **`jpaTransactionManager`** (JpaTransactionManager) - For service layer WebAPI DB operations +2. **`batchTransactionManager`** (DataSourceTransactionManager) - For Spring Batch and all batch job operations + +**Both target the same WebAPI DataSource**, but using the same transaction manager throughout a batch job enables proper transaction suspension/resumption. + +### Transaction Scope Per Job + +**Before Job Starts:** +- Service layer can use `jpaTransactionManager` for pre-job setup +- Short-lived transactions commit before job launches + +**During Job Execution:** +- Each **step** has its own transaction (managed by `batchTransactionManager`) +- Step transaction commits when step completes +- JobRepository operations happen in separate transactions (automatic) + +**Job Listeners (beforeJob/afterJob):** +- Run **outside** step execution context +- Can use `jpaTransactionManager` for JPA entity operations +- No conflicts with step transactions because they run before/after, not during + +**Within Tasklets:** +- Tasklets receive `batchTransactionTemplate` using `batchTransactionManager` +- Can create nested `PROPAGATION_REQUIRES_NEW` transactions for immediate commits (e.g., cache updates) +- Proper suspend/resume because same transaction manager is used + +## Benefits + +### 1. Eliminates Transaction Conflicts +No more "transaction already open" errors when tasklets try to create nested transactions during step execution. + +### 2. Enables Concurrent Job Execution +Multiple jobs can run simultaneously without blocking each other: +- Each job updates its own `job_execution_id` rows (no row-level conflicts) +- Cache updates commit immediately and are visible to other jobs +- Connection pool properly handles concurrent transactions + +### 3. Short-Lived Transactions Per Step +- Each step's transaction commits when step completes (not held for entire job) +- Database locks released quickly +- No long-held transactions blocking WebAPI tables + +### 4. Proper Transaction Isolation +- Cache updates with `PROPAGATION_REQUIRES_NEW` commit immediately +- Analysis metadata saves don't roll back with step failure +- CDM queries (separate database) don't interfere with WebAPI transactions + +## Verification Steps + +To verify the changes work correctly: + +1. **Start Multiple Concurrent Jobs:** + ``` + POST /cohortcharacterization/{id1}/generation/{source1} + POST /cohortcharacterization/{id2}/generation/{source2} + ``` + +2. **Check Transaction Isolation:** + - Monitor `BATCH_STEP_EXECUTION` table - each step should commit independently + - Check cache updates are visible to concurrent jobs during execution + - Verify no long-held locks on WebAPI tables + +3. **Test Failure Scenarios:** + - If a step fails, its transaction should roll back + - But cache updates from earlier `REQUIRES_NEW` transactions should remain committed + - JobRepository metadata should persist correctly + +4. **Monitor Database Locks:** + - PostgreSQL: `SELECT * FROM pg_locks WHERE granted = true AND locktype = 'relation';` + - SQL Server: `SELECT * FROM sys.dm_tran_locks;` + - Should see short-lived locks, not locks held for entire job duration + +## Technical Details + +### When to Use Which Transaction Manager + +**Use `jpaTransactionManager` (JPA operations):** +- Service layer methods annotated with `@Transactional` +- JobExecutionListeners (beforeJob/afterJob callbacks) +- StepExecutionListeners (beforeStep/afterStep callbacks) +- Any code that needs to save/update JPA entities outside of batch steps +- Pre-job and post-job setup/cleanup + +**CRITICAL: Always use `@Transactional(propagation = Propagation.NOT_SUPPORTED)` when launching Spring Batch jobs** +- Service methods that call `jobService.runJob()` or `jobTemplate.launch()` MUST suspend active transactions +- Spring Batch JobRepository throws `IllegalStateException` if transaction is active during job launch +- Example: `CcServiceImpl.generateCc()`, `CohortGenerationService.generateCohortViaJob()` + +**Use `batchTransactionManager` (Batch operations):** +- Inside tasklet `execute()` or `doTask()` methods +- Nested transactions within steps (`PROPAGATION_REQUIRES_NEW`) +- Cache updates during step execution +- Any database operations within Spring Batch step context + +**Key Rule:** If code runs **inside a Spring Batch step**, use `batchTransactionManager`. If it runs **outside steps** (service layer, listeners), use `jpaTransactionManager`. When **launching a job**, ensure no transaction is active. + +### Why Same Transaction Manager Matters + +When a Spring Batch step creates a transaction with `batchTransactionManager` and a tasklet tries to create a `PROPAGATION_REQUIRES_NEW` transaction: + +**Before (Mixed Managers):** +``` +Step Transaction (batchTransactionManager) [ACTIVE] + └─> Tasklet tries REQUIRES_NEW with jpaTransactionManager + └─> CONFLICT: Both manage same DataSource + └─> Error: "transaction already open" +``` + +**After (Consistent Manager):** +``` +Step Transaction (batchTransactionManager) [ACTIVE] + └─> Tasklet creates REQUIRES_NEW with batchTransactionManager + └─> Step transaction SUSPENDED + └─> New transaction COMMITS + └─> Step transaction RESUMES +``` + +### Transaction Propagation Behaviors Used + +- **PROPAGATION_REQUIRED** (default): Join existing transaction or create new one +- **PROPAGATION_REQUIRES_NEW**: Suspend current transaction and create new independent one +- **PROPAGATION_NOT_SUPPORTED**: Execute non-transactionally, suspend any existing transaction + +## Related Files + +- Configuration: [JobConfig.java](src/main/java/org/ohdsi/webapi/JobConfig.java), [DataAccessConfig.java](src/main/java/org/ohdsi/webapi/DataAccessConfig.java) +- Base Classes: [AbstractDaoService.java](src/main/java/org/ohdsi/webapi/service/AbstractDaoService.java) +- Generation: [GenerationUtils.java](src/main/java/org/ohdsi/webapi/common/generation/GenerationUtils.java) +- Services: [CohortGenerationService.java](src/main/java/org/ohdsi/webapi/cohortdefinition/CohortGenerationService.java), [CcServiceImpl.java](src/main/java/org/ohdsi/webapi/cohortcharacterization/CcServiceImpl.java) +- Tasklets: [AnalysisTasklet.java](src/main/java/org/ohdsi/webapi/common/generation/AnalysisTasklet.java), [GenerateCohortCharacterizationTasklet.java](src/main/java/org/ohdsi/webapi/cohortcharacterization/GenerateCohortCharacterizationTasklet.java) + +## Spring Batch 5 Migration Issues and Fixes + +### Issue 1: JobParameter.toString() Breaking Change + +**Problem:** +Spring Batch 5.x changed the behavior of `JobParameter` objects. Calling `.toString()` directly on a `JobParameter` now returns its internal representation instead of just the value: + +```java +// Spring Batch 4.x behavior: +jobParameters.get(SOURCE_ID).toString() → "5" + +// Spring Batch 5.x behavior: +jobParameters.get(SOURCE_ID).toString() → "{value=5, type=class java.lang.String, identifying=true}" +``` + +This caused `NumberFormatException` when trying to parse integer parameters: +``` +java.lang.NumberFormatException: For input string: "{value=5, type=class java.lang.String, identifying=true}" +``` + +**Critical Distinction:** +- **`JobParameters.getParameters()`** → Returns `Map>` (wrapper objects) +- **`ChunkContext.getStepContext().getJobParameters()`** → Returns `Map` (already extracted values) + +**Solution:** + +Only use `.getValue()` when dealing with `JobParameter` objects from `JobParameters.getParameters()`: + +```java +// In JobExecutionListener (receives JobParameters): +private Object doTask(JobParameters parameters) { + final Map> jobParameters = parameters.getParameters(); + final Integer sourceId = Integer.valueOf(jobParameters.get(SOURCE_ID).getValue().toString()); + // ^^^^^^^^^^^^ CORRECT - need .getValue() +} + +// In Tasklets (receives ChunkContext): +private Integer doTask(ChunkContext chunkContext) { + Map jobParams = chunkContext.getStepContext().getJobParameters(); + int cohortDefinitionId = Integer.parseInt(jobParams.get(COHORT_DEFINITION_ID).toString()); + // ^^^^^^^^^^^^ CORRECT - values already extracted, no .getValue() needed +} +``` + +**Fixed File:** +- [DropCohortTableListener.java](src/main/java/org/ohdsi/webapi/cohortcharacterization/DropCohortTableListener.java) - Line 43 + +**Pattern Recognition:** +- If you see `Map>` → Use `.getValue().toString()` +- If you see `Map` → Use `.toString()` only + +### Issue 2: Mixed Transaction Managers in JobExecutionListener + +**Problem:** +After introducing separate transaction managers for Batch and JPA, we encountered this error in `DropCohortTableListener.afterJob()`: + +``` +java.lang.IllegalStateException: Already value [ConnectionHolder] for key [HikariDataSource] bound to thread + at org.springframework.orm.jpa.JpaTransactionManager.doBegin(JpaTransactionManager.java:442) +``` + +**Root Cause:** +1. Spring Batch binds a JDBC connection to the thread during job execution +2. The `afterJob()` listener ran on the same thread with that connection still bound +3. Inside `afterJob()`, the code used `batchTransactionTemplate` (DataSourceTransactionManager) +4. Within that transaction, it called `sourceService.findBySourceId()` which is `@Transactional` (JpaTransactionManager) +5. JPA tried to bind its own connection to the same thread/DataSource → **Conflict** + +**Incorrect Pattern (Before Fix):** +```java +@Override +public void afterJob(JobExecution jobExecution) { + // BAD: Using batch transaction manager + batchTransactionTemplate.execute(transactionStatus -> { + // This calls @Transactional service with JPA transaction manager + sourceService.findBySourceId(sourceId); // CONFLICT! + return doTask(jobExecution.getJobParameters()); + }); +} +``` + +**Correct Pattern (After Fix):** +```java +@Override +public void afterJob(JobExecution jobExecution) { + // GOOD: No transaction wrapper - let JPA services manage their own transactions + doTask(jobExecution.getJobParameters()); +} + +private Object doTask(JobParameters parameters) { + // sourceService.findBySourceId() has @Transactional - creates its own JPA transaction + final Source source = sourceService.findBySourceId(sourceId); + // DROP TABLE is DDL that auto-commits anyway + jdbcTemplate.execute(dropTableSql); +} +``` + +**Solution:** +Removed the transaction wrapper entirely from `DropCohortTableListener` because: +1. `sourceService.findBySourceId()` already has `@Transactional` and manages its own JPA transaction +2. The `DROP TABLE` statement is DDL and auto-commits anyway +3. No transactional rollback protection is needed for this cleanup operation + +**Fixed Files:** +- [DropCohortTableListener.java](src/main/java/org/ohdsi/webapi/cohortcharacterization/DropCohortTableListener.java) + - Removed `TransactionTemplate` field and constructor parameter + - Changed `afterJob()` to call `doTask()` directly without any transaction wrapper +- [GenerationUtils.java](src/main/java/org/ohdsi/webapi/common/generation/GenerationUtils.java) + - Updated listener instantiation to not pass `batchTransactionTemplate` + +### Best Practices for JobExecutionListener + +**Key Rules for JobExecutionListener.afterJob() and beforeJob():** + +1. **Never nest Batch and JPA transactions on the same thread** + - Don't wrap JPA service calls in `batchTransactionTemplate` + - Let `@Transactional` services manage their own transactions + +2. **Use only one transaction manager per execution path** + - If calling JPA services, let them use `jpaTransactionManager` + - If doing JDBC operations, use appropriate transaction manager or none for DDL + +3. **Prefer PROPAGATION_REQUIRES_NEW for JPA operations in listeners** + - Creates clean, isolated transaction independent of batch context + - Example: `GenerationJobExecutionListener` uses `transactionTemplateRequiresNew` + +4. **Understand when transactions are needed** + - DDL operations (CREATE, DROP, ALTER) auto-commit - no transaction needed + - JPA entity operations need transactions - let `@Transactional` handle it + - Batch operations within steps need `batchTransactionManager` + +**Mental Model:** + +``` +BAD - Mixed Transaction Managers: +afterJob() + └─> Batch TransactionTemplate + └─> calls @Transactional JPA service + └─> Same DataSource + └─> CONFLICT: "Already value bound to thread" + +GOOD - Single Transaction Manager: +afterJob() + └─> calls @Transactional JPA service (with REQUIRES_NEW) + └─> Clean, independent JPA transaction + └─> No conflict +``` + +**Comparison with GenerationJobExecutionListener:** + +`GenerationJobExecutionListener` (correct pattern) uses JPA transaction template directly: +```java +public GenerationJobExecutionListener( + SourceService sourceService, + CohortDefinitionRepository cohortDefinitionRepository, + TransactionTemplate transactionTemplate, // JPA transaction template (transactionTemplateRequiresNew) + JdbcTemplate sourceTemplate +) { ... } + +@Override +public void afterJob(JobExecution je) { + // Uses PROPAGATION_REQUIRES_NEW with jpaTransactionManager + transactionTemplate.getTransactionManager().getTransaction(...); + sourceService.findBySourceId(sourceId); // @Transactional - works correctly +} +``` + +This works because it consistently uses `jpaTransactionManager` throughout the listener, never mixing it with `batchTransactionManager`. + +### Summary of Transaction Manager Rules + +| Context | Transaction Manager | When to Use | +|---------|-------------------|-------------| +| **Service Layer** | `jpaTransactionManager` | Annotated with `@Transactional`, pre/post job operations | +| **Batch Steps** | `batchTransactionManager` | Inside tasklet `execute()`, nested transactions within steps | +| **Job Listeners** | `jpaTransactionManager` | If calling JPA services; let `@Transactional` manage it | +| **Job Launch** | None (`PROPAGATION_NOT_SUPPORTED`) | Must suspend active transactions before launching jobs | +| **DDL Operations** | None | DDL auto-commits, no transaction wrapper needed | + +**Critical Reminder:** When sharing a DataSource between multiple transaction managers, never nest transactions from different managers on the same thread. This is especially important in Spring Batch 5 / Spring Boot 3.x which has stricter transaction and resource binding behavior. + +## Date + +April 1, 2026 diff --git a/pom.xml b/pom.xml index 0aaea0646b..fc22270ada 100644 --- a/pom.xml +++ b/pom.xml @@ -25,16 +25,15 @@ 42.7.4 1.83 - 1.12.1 + 1.13.0 + 3.8.0 1.19.1 3.1.2 - 6.0.5 2.18.2 21 21 21 21 - 1.17.4 49 @@ -478,6 +477,11 @@ SqlRender ${SqlRender.version} + + org.ohdsi + featureExtraction + ${featureExtraction.version} + commons-io commons-io @@ -523,16 +527,7 @@ flyway-database-postgresql - - org.flywaydb - flyway-sqlserver - - - - org.flywaydb - flyway-database-oracle - - + com.cronutils cron-utils @@ -650,6 +645,29 @@ + + org.ohdsi + SkeletonCohortCharacterization + 2.0.2 + + + org.ohdsi + standardized-analysis-api + + + org.ohdsi.sql + SqlRender + + + org.ohdsi + circe + + + org.ohdsi + featureExtraction + + + com.cosium.spring.data spring-data-jpa-entity-graph diff --git a/src/main/java/org/ohdsi/webapi/Constants.java b/src/main/java/org/ohdsi/webapi/Constants.java index 38b50fdbdf..5f7f8612d5 100644 --- a/src/main/java/org/ohdsi/webapi/Constants.java +++ b/src/main/java/org/ohdsi/webapi/Constants.java @@ -6,8 +6,9 @@ public interface Constants { String DEFAULT_DIALECT = "sql server"; String GENERATE_COHORT = "generateCohort"; - String GENERATE_PREDICTION_ANALYSIS = "generatePredictionAnalysis"; - String GENERATE_ESTIMATION_ANALYSIS = "generateEstimationAnalysis"; + String GENERATE_COHORT_CHARACTERIZATION = "generateCohortCharacterization"; + String GENERATE_PATHWAY_ANALYSIS = "generatePathwayAnalysis"; + String GENERATE_IR_ANALYSIS = "irAnalysis"; String WARM_CACHE = "warmCache"; String USERS_IMPORT = "usersImport"; String JOB_IS_ALREADY_SCHEDULED = "Job for provider %s is already scheduled"; @@ -48,9 +49,8 @@ interface Params { String VOCABULARY_DATABASE_SCHEMA = "vocabulary_database_schema"; String COHORT_DEFINITION_ID = "cohort_definition_id"; - String PREDICTION_ANALYSIS_ID = "prediction_analysis_id"; - String PREDICTION_SKELETON_VERSION = "v0.0.1"; - String ESTIMATION_ANALYSIS_ID = "estimation_analysis_id"; + String COHORT_CHARACTERIZATION_ID = "cohort_characterization_id"; + String PATHWAY_ANALYSIS_ID = "pathway_analysis_id"; String UPDATE_PASSWORD = "update_password"; String SOURCE_ID = "source_id"; String SOURCE_KEY = "source_key"; diff --git a/src/main/java/org/ohdsi/webapi/JobConfig.java b/src/main/java/org/ohdsi/webapi/JobConfig.java index 84e68430ea..8d31f7b89c 100644 --- a/src/main/java/org/ohdsi/webapi/JobConfig.java +++ b/src/main/java/org/ohdsi/webapi/JobConfig.java @@ -32,6 +32,8 @@ import org.springframework.core.task.TaskExecutor; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.support.TransactionTemplate; /** * Spring Batch 5.x configuration for Java 21 / Spring Boot 3.2 @@ -129,6 +131,33 @@ public JobTemplate jobTemplate(JobLauncher jobLauncher, JobRepository jobReposit return new JobTemplate(jobLauncher, jobRepository, authorizationService, batchTransactionManager); } + /** + * TransactionTemplate for batch tasklets using batchTransactionManager. + * This ensures tasklets use the same transaction manager as the Spring Batch step, + * preventing conflicts when creating nested transactions. + */ + @Bean("batchTransactionTemplate") + public TransactionTemplate batchTransactionTemplate( + @Qualifier("batchTransactionManager") PlatformTransactionManager batchTransactionManager) { + TransactionTemplate template = new TransactionTemplate(); + template.setTransactionManager(batchTransactionManager); + return template; + } + + /** + * TransactionTemplate with PROPAGATION_REQUIRES_NEW for batch tasklets. + * Used when tasklets need to commit data immediately (e.g., cache updates) + * independent of the step's transaction. + */ + @Bean("batchTransactionTemplateRequiresNew") + public TransactionTemplate batchTransactionTemplateRequiresNew( + @Qualifier("batchTransactionManager") PlatformTransactionManager batchTransactionManager) { + TransactionTemplate template = new TransactionTemplate(); + template.setTransactionManager(batchTransactionManager); + template.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); + return template; + } + @Bean public SearchableJobExecutionDao searchableJobExecutionDao(DataSource dataSource) { JdbcSearchableJobExecutionDao dao = new JdbcSearchableJobExecutionDao(); diff --git a/src/main/java/org/ohdsi/webapi/WebMvcConfig.java b/src/main/java/org/ohdsi/webapi/WebMvcConfig.java index fa98cb31d3..f056e5e480 100644 --- a/src/main/java/org/ohdsi/webapi/WebMvcConfig.java +++ b/src/main/java/org/ohdsi/webapi/WebMvcConfig.java @@ -3,6 +3,8 @@ import org.ohdsi.webapi.i18n.mvc.LocaleInterceptor; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; +import org.springframework.data.web.config.EnableSpringDataWebSupport; +import org.springframework.data.web.config.EnableSpringDataWebSupport.PageSerializationMode; import org.springframework.http.converter.HttpMessageConverter; import org.springframework.web.servlet.config.annotation.InterceptorRegistry; import org.springframework.web.servlet.config.annotation.PathMatchConfigurer; @@ -15,6 +17,7 @@ * Configures interceptors, message converters, and other MVC components. */ @Configuration +@EnableSpringDataWebSupport(pageSerializationMode = PageSerializationMode.VIA_DTO) public class WebMvcConfig implements WebMvcConfigurer { @Autowired(required = false) diff --git a/src/main/java/org/ohdsi/webapi/arachne/commons/types/CommonCDMVersionDTO.java b/src/main/java/org/ohdsi/webapi/arachne/commons/types/CommonCDMVersionDTO.java new file mode 100644 index 0000000000..fa40d15b91 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/arachne/commons/types/CommonCDMVersionDTO.java @@ -0,0 +1,27 @@ +/* + * + * Copyright 2018 Odysseus Data Services, inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Company: Odysseus Data Services, Inc. + * Product Owner/Architecture: Gregory Klebanov + * Authors: Pavel Grafkin, Alexandr Ryabokon, Vitaly Koulakov, Anton Gackovka, Maria Pozhidaeva, Mikhail Mironov + * Created: January 13, 2017 + * + */ + +package org.ohdsi.webapi.arachne.commons.types; + +public enum CommonCDMVersionDTO { + V4_0, V5_0, V5_0_1, V5_1, V5_2, V5_3, V5_3_1, V6_0 +} diff --git a/src/main/java/org/ohdsi/webapi/arachne/commons/types/CommonConstants.java b/src/main/java/org/ohdsi/webapi/arachne/commons/types/CommonConstants.java new file mode 100644 index 0000000000..f197161b97 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/arachne/commons/types/CommonConstants.java @@ -0,0 +1,6 @@ +package org.ohdsi.webapi.arachne.commons.types; + +public class CommonConstants { + + public static final String RESULT_ARCHIVE_SUFFIX = "_result.zip"; +} diff --git a/src/main/java/org/ohdsi/webapi/arachne/commons/types/DBMSType.java b/src/main/java/org/ohdsi/webapi/arachne/commons/types/DBMSType.java new file mode 100644 index 0000000000..afd2e2e9ca --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/arachne/commons/types/DBMSType.java @@ -0,0 +1,69 @@ +/* + * + * Copyright 2018 Odysseus Data Services, inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Company: Odysseus Data Services, Inc. + * Product Owner/Architecture: Gregory Klebanov + * Authors: Pavel Grafkin, Alexandr Ryabokon, Vitaly Koulakov, Anton Gackovka, Maria Pozhidaeva, Mikhail Mironov + * Created: January 31, 2017 + * + */ + +package org.ohdsi.webapi.arachne.commons.types; + +public enum DBMSType { + // Set of databases both supported by OHDSI/SqlRender and OHDSI/DatabaseConnector + POSTGRESQL("PostgreSQL", "postgresql"), + MS_SQL_SERVER("MS SQL Server", "sql server"), + PDW("SQL Server Parallel Data Warehouse", "pdw"), + REDSHIFT("Redshift", "redshift"), + ORACLE("Oracle", "oracle"), + IMPALA("Impala", "impala"), + BIGQUERY("Google BigQuery", "bigquery"), + NETEZZA("Netezza", "netezza"), + HIVE("Apache Hive", "hive"), + SPARK("Spark", "spark"), + SNOWFLAKE("Snowflake", "snowflake"), + SYNAPSE("Azure Synapse", "synapse"); + + private String label; + // For further pass into SqlRender.translateSql as "targetDialect" and DatabaseConnector as "dbms" + private String ohdsiDB; + + DBMSType(String label, String ohdsiDB) { + + this.label = label; + this.ohdsiDB = ohdsiDB; + } + + public String getValue() { + + return this.toString(); + } + + public String getLabel() { + + return label; + } + + public String getOhdsiDB() { + + return ohdsiDB; + } + + public void setOhdsiDB(String ohdsiDB) { + + this.ohdsiDB = ohdsiDB; + } +} diff --git a/src/main/java/org/ohdsi/webapi/arachne/commons/utils/CommonFilenameUtils.java b/src/main/java/org/ohdsi/webapi/arachne/commons/utils/CommonFilenameUtils.java new file mode 100644 index 0000000000..b60fab8e55 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/arachne/commons/utils/CommonFilenameUtils.java @@ -0,0 +1,128 @@ +package org.ohdsi.webapi.arachne.commons.utils; + +import java.util.regex.Pattern; + +/** + * Utility class for sanitizing filenames to ensure they are valid across different operating systems. + * + * Originally from com.odysseusinc.arachne.commons.utils + */ +public class CommonFilenameUtils { + + private static final Pattern WINDOWS_INVALID_CHARS = Pattern.compile("[\\\\/:*?\"<>|]"); + private static final Pattern POSIX_INVALID_CHARS = Pattern.compile("[/\\x00]"); + private static final Pattern CONTROL_CHARS = Pattern.compile("[\\x00-\\x1F\\x7F]"); + + private CommonFilenameUtils() { + // Utility class + } + + /** + * Sanitizes a filename to make it valid for most file systems (Windows, macOS, Linux). + * Removes or replaces characters that are invalid in Windows filenames, which are the most restrictive. + * + * @param filename the filename to sanitize + * @return a sanitized filename safe for use on Windows, macOS, and Linux + */ + public static String sanitizeFilename(String filename) { + if (filename == null || filename.isEmpty()) { + return "unnamed"; + } + + // Replace Windows invalid characters with underscore + String sanitized = WINDOWS_INVALID_CHARS.matcher(filename).replaceAll("_"); + + // Remove control characters + sanitized = CONTROL_CHARS.matcher(sanitized).replaceAll(""); + + // Remove leading/trailing dots and spaces (problematic on Windows) + sanitized = sanitized.replaceAll("^[.\\s]+", ""); + sanitized = sanitized.replaceAll("[.\\s]+$", ""); + + // Handle reserved Windows filenames + if (isReservedWindowsName(sanitized)) { + sanitized = "_" + sanitized; + } + + // If the filename is empty after sanitization, use a default + if (sanitized.isEmpty()) { + sanitized = "unnamed"; + } + + // Limit filename length (255 is common max for most filesystems) + if (sanitized.length() > 255) { + sanitized = sanitized.substring(0, 255); + } + + return sanitized; + } + + /** + * Sanitizes a filename for POSIX-compliant file systems (Linux, macOS, Unix). + * POSIX systems have fewer restrictions - mainly just forward slash and null characters. + * + * @param filename the filename to sanitize + * @return a sanitized filename safe for use on POSIX systems + */ + public static String sanitizeFilenamePosix(String filename) { + if (filename == null || filename.isEmpty()) { + return "unnamed"; + } + + // Replace POSIX invalid characters (forward slash and null) with underscore + String sanitized = POSIX_INVALID_CHARS.matcher(filename).replaceAll("_"); + + // Remove control characters for safety + sanitized = CONTROL_CHARS.matcher(sanitized).replaceAll(""); + + // Remove leading dots (hidden files on Unix/Linux) + sanitized = sanitized.replaceAll("^\\.", ""); + + // If the filename is empty after sanitization, use a default + if (sanitized.isEmpty()) { + sanitized = "unnamed"; + } + + // Limit filename length (255 is common for most POSIX filesystems) + if (sanitized.length() > 255) { + sanitized = sanitized.substring(0, 255); + } + + return sanitized; + } + + /** + * Checks if a filename matches a reserved Windows name. + * Reserved names: CON, PRN, AUX, NUL, COM1-9, LPT1-9 + * + * @param filename the filename to check + * @return true if the filename is a reserved Windows name + */ + private static boolean isReservedWindowsName(String filename) { + if (filename == null || filename.isEmpty()) { + return false; + } + + // Get the base name without extension + String baseName = filename; + int dotIndex = filename.lastIndexOf('.'); + if (dotIndex > 0) { + baseName = filename.substring(0, dotIndex); + } + + String upperName = baseName.toUpperCase(); + + // Check reserved names + if (upperName.equals("CON") || upperName.equals("PRN") || + upperName.equals("AUX") || upperName.equals("NUL")) { + return true; + } + + // Check COM1-9 and LPT1-9 + if (upperName.matches("^(COM|LPT)[1-9]$")) { + return true; + } + + return false; + } +} diff --git a/src/main/java/org/ohdsi/webapi/check/checker/characterization/CharacterizationChecker.java b/src/main/java/org/ohdsi/webapi/check/checker/characterization/CharacterizationChecker.java new file mode 100644 index 0000000000..f4a6bfb0eb --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/check/checker/characterization/CharacterizationChecker.java @@ -0,0 +1,41 @@ +package org.ohdsi.webapi.check.checker.characterization; + +import org.ohdsi.webapi.check.builder.ValidatorGroupBuilder; +import org.ohdsi.webapi.check.checker.BaseChecker; +import org.ohdsi.webapi.check.checker.tag.helper.TagHelper; +import org.ohdsi.webapi.cohortcharacterization.dto.CohortCharacterizationDTO; +import org.springframework.stereotype.Component; + +import jakarta.annotation.PostConstruct; +import java.util.Arrays; +import java.util.List; + +import static org.ohdsi.webapi.check.checker.characterization.helper.CharacterizationHelper.prepareCohortBuilder; +import static org.ohdsi.webapi.check.checker.characterization.helper.CharacterizationHelper.prepareFeatureAnalysesBuilder; +import static org.ohdsi.webapi.check.checker.characterization.helper.CharacterizationHelper.prepareStratifyRuleBuilder; + +@Component +public class CharacterizationChecker extends BaseChecker { + private final TagHelper tagHelper; + + public CharacterizationChecker(TagHelper tagHelper) { + this.tagHelper = tagHelper; + } + + @PostConstruct + public void init() { + createValidator(); + } + + @Override + protected List> getGroupBuilder() { + + return Arrays.asList( + tagHelper.prepareTagBuilder(), + prepareCohortBuilder(), + prepareFeatureAnalysesBuilder(), + prepareStratifyRuleBuilder() + ); + } + +} diff --git a/src/main/java/org/ohdsi/webapi/check/checker/characterization/helper/CharacterizationHelper.java b/src/main/java/org/ohdsi/webapi/check/checker/characterization/helper/CharacterizationHelper.java new file mode 100644 index 0000000000..15eb1e1205 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/check/checker/characterization/helper/CharacterizationHelper.java @@ -0,0 +1,44 @@ +package org.ohdsi.webapi.check.checker.characterization.helper; + +import org.ohdsi.webapi.check.builder.IterableForEachValidatorBuilder; +import org.ohdsi.webapi.check.builder.NotNullNotEmptyValidatorBuilder; +import org.ohdsi.webapi.check.builder.ValidatorGroupBuilder; +import org.ohdsi.webapi.cohortcharacterization.dto.BaseCcDTO; +import org.ohdsi.webapi.cohortcharacterization.dto.CcStrataDTO; +import org.ohdsi.webapi.cohortcharacterization.dto.CohortCharacterizationDTO; +import org.ohdsi.webapi.cohortdefinition.dto.CohortMetadataDTO; +import org.ohdsi.webapi.cohortdefinition.dto.CohortMetadataImplDTO; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisShortDTO; + +import java.util.Collection; + +public class CharacterizationHelper { + + public static ValidatorGroupBuilder> prepareFeatureAnalysesBuilder() { + + ValidatorGroupBuilder> builder = new ValidatorGroupBuilder>() + .attrName("feature analyses") + .valueGetter(BaseCcDTO::getFeatureAnalyses) + .validators(new NotNullNotEmptyValidatorBuilder<>()); + return builder; + } + + public static ValidatorGroupBuilder> prepareCohortBuilder() { + + ValidatorGroupBuilder> builder = new ValidatorGroupBuilder>() + .attrName("cohorts") + .valueGetter(BaseCcDTO::getCohorts) + .validators(new NotNullNotEmptyValidatorBuilder<>()); + return builder; + } + + public static ValidatorGroupBuilder> prepareStratifyRuleBuilder() { + + return new ValidatorGroupBuilder>() + .valueGetter(t -> t.getStratas()) + .validators( + new IterableForEachValidatorBuilder() + .groups(CharacterizationStrataHelper.prepareStrataBuilder()) + ); + } +} diff --git a/src/main/java/org/ohdsi/webapi/check/checker/characterization/helper/CharacterizationStrataHelper.java b/src/main/java/org/ohdsi/webapi/check/checker/characterization/helper/CharacterizationStrataHelper.java new file mode 100644 index 0000000000..85e3bb0611 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/check/checker/characterization/helper/CharacterizationStrataHelper.java @@ -0,0 +1,23 @@ +package org.ohdsi.webapi.check.checker.characterization.helper; + +import org.ohdsi.circe.cohortdefinition.CriteriaGroup; +import org.ohdsi.webapi.check.builder.ValidatorGroupBuilder; +import org.ohdsi.webapi.cohortcharacterization.dto.CcStrataDTO; + +import static org.ohdsi.webapi.check.checker.criteria.CorelatedCriteriaHelper.prepareCorelatedCriteriaBuilder; +import static org.ohdsi.webapi.check.checker.criteria.CriteriaGroupHelper.prepareCriteriaGroupArrayBuilder; +import static org.ohdsi.webapi.check.checker.criteria.DemographicHelper.prepareDemographicBuilder; + +public class CharacterizationStrataHelper { + public static ValidatorGroupBuilder prepareStrataBuilder() { + ValidatorGroupBuilder builder = new ValidatorGroupBuilder() + .attrName("subgroup analyses") + .valueGetter(t -> t.getCriteria()) + .groups( + prepareCriteriaGroupArrayBuilder(), + prepareDemographicBuilder(), + prepareCorelatedCriteriaBuilder() + ); + return builder; + } +} diff --git a/src/main/java/org/ohdsi/webapi/check/checker/ir/IRChecker.java b/src/main/java/org/ohdsi/webapi/check/checker/ir/IRChecker.java new file mode 100644 index 0000000000..7842258126 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/check/checker/ir/IRChecker.java @@ -0,0 +1,35 @@ +package org.ohdsi.webapi.check.checker.ir; + +import java.util.Arrays; +import java.util.List; +import jakarta.annotation.PostConstruct; +import org.ohdsi.webapi.check.builder.ValidatorGroupBuilder; +import org.ohdsi.webapi.check.checker.BaseChecker; +import org.ohdsi.webapi.check.checker.ir.helper.IRHelper; +import org.ohdsi.webapi.check.checker.tag.helper.TagHelper; +import org.ohdsi.webapi.ircalc.dto.IRAnalysisDTO; +import org.springframework.stereotype.Component; + +@Component +public class IRChecker extends BaseChecker { + private final TagHelper tagHelper; + + public IRChecker(TagHelper tagHelper) { + this.tagHelper = tagHelper; + } + + @PostConstruct + public void init() { + createValidator(); + } + + @Override + protected List> getGroupBuilder() { + + return Arrays.asList( + tagHelper.prepareTagBuilder(), + IRHelper.prepareAnalysisExpressionBuilder() + ); + } + +} diff --git a/src/main/java/org/ohdsi/webapi/check/checker/ir/helper/IRAnalysisExpressionHelper.java b/src/main/java/org/ohdsi/webapi/check/checker/ir/helper/IRAnalysisExpressionHelper.java new file mode 100644 index 0000000000..8a6d8f2341 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/check/checker/ir/helper/IRAnalysisExpressionHelper.java @@ -0,0 +1,59 @@ +package org.ohdsi.webapi.check.checker.ir.helper; + +import org.apache.commons.lang3.StringUtils; +import org.ohdsi.webapi.check.builder.IterableForEachValidatorBuilder; +import org.ohdsi.webapi.check.builder.NotNullNotEmptyValidatorBuilder; +import org.ohdsi.webapi.check.builder.PredicateValidatorBuilder; +import org.ohdsi.webapi.check.builder.ValidatorGroupBuilder; +import org.ohdsi.webapi.ircalc.DateRange; +import org.ohdsi.webapi.ircalc.IncidenceRateAnalysisExpression; +import org.ohdsi.webapi.ircalc.StratifyRule; + +import java.util.Collection; +import java.util.List; + +public class IRAnalysisExpressionHelper { + + + public static ValidatorGroupBuilder> prepareOutcomeCohortsBuilder() { + + ValidatorGroupBuilder> builder = new ValidatorGroupBuilder>() + .attrName("outcome cohorts") + .valueGetter(t -> t.outcomeIds) + .validators( + new NotNullNotEmptyValidatorBuilder<>() + ); + return builder; + } + + public static ValidatorGroupBuilder> prepareTargetCohortsBuilder() { + + ValidatorGroupBuilder> builder = new ValidatorGroupBuilder>() + .attrName("target cohorts") + .valueGetter(t -> t.targetIds) + .validators( + new NotNullNotEmptyValidatorBuilder<>() + ); + return builder; + } + + public static ValidatorGroupBuilder prepareStudyWindowBuilder() { + + return new ValidatorGroupBuilder() + .attrName("study window") + .valueGetter(t -> t.studyWindow) + .validators( + new PredicateValidatorBuilder().predicate(w -> StringUtils.isNotBlank(w.startDate) && StringUtils.isNotBlank(w.endDate)) + ); + } + + public static ValidatorGroupBuilder> prepareStratifyRuleBuilder() { + + return new ValidatorGroupBuilder>() + .valueGetter(t -> t.strata) + .validators( + new IterableForEachValidatorBuilder() + .groups(IRStrataHelper.prepareStrataBuilder()) + ); + } +} diff --git a/src/main/java/org/ohdsi/webapi/check/checker/ir/helper/IRHelper.java b/src/main/java/org/ohdsi/webapi/check/checker/ir/helper/IRHelper.java new file mode 100644 index 0000000000..f11ddb4f02 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/check/checker/ir/helper/IRHelper.java @@ -0,0 +1,31 @@ +package org.ohdsi.webapi.check.checker.ir.helper; + +import org.ohdsi.analysis.Utils; +import org.ohdsi.webapi.check.builder.NotNullNotEmptyValidatorBuilder; +import org.ohdsi.webapi.check.builder.ValidatorGroupBuilder; +import org.ohdsi.webapi.ircalc.IncidenceRateAnalysisExpression; +import org.ohdsi.webapi.ircalc.dto.IRAnalysisDTO; + +import java.util.function.Function; + +public class IRHelper { + + public static ValidatorGroupBuilder prepareAnalysisExpressionBuilder() { + + Function valueGetter = t -> Utils.deserialize(t.getExpression(), IncidenceRateAnalysisExpression.class); + + ValidatorGroupBuilder builder = new ValidatorGroupBuilder() + .valueGetter(valueGetter) + .validators( + new NotNullNotEmptyValidatorBuilder() + .attrName("expression") + ) + .groups( + IRAnalysisExpressionHelper.prepareTargetCohortsBuilder(), + IRAnalysisExpressionHelper.prepareOutcomeCohortsBuilder(), + IRAnalysisExpressionHelper.prepareStratifyRuleBuilder(), + IRAnalysisExpressionHelper.prepareStudyWindowBuilder() + ); + return builder; + } +} diff --git a/src/main/java/org/ohdsi/webapi/check/checker/ir/helper/IRStrataHelper.java b/src/main/java/org/ohdsi/webapi/check/checker/ir/helper/IRStrataHelper.java new file mode 100644 index 0000000000..23444bfdd7 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/check/checker/ir/helper/IRStrataHelper.java @@ -0,0 +1,23 @@ +package org.ohdsi.webapi.check.checker.ir.helper; + +import org.ohdsi.circe.cohortdefinition.CriteriaGroup; +import org.ohdsi.webapi.check.builder.ValidatorGroupBuilder; +import org.ohdsi.webapi.ircalc.StratifyRule; + +import static org.ohdsi.webapi.check.checker.criteria.CorelatedCriteriaHelper.prepareCorelatedCriteriaBuilder; +import static org.ohdsi.webapi.check.checker.criteria.CriteriaGroupHelper.prepareCriteriaGroupArrayBuilder; +import static org.ohdsi.webapi.check.checker.criteria.DemographicHelper.prepareDemographicBuilder; + +public class IRStrataHelper { + public static ValidatorGroupBuilder prepareStrataBuilder() { + ValidatorGroupBuilder builder = new ValidatorGroupBuilder() + .attrName("stratify criteria") + .valueGetter(t -> t.expression) + .groups( + prepareCriteriaGroupArrayBuilder(), + prepareDemographicBuilder(), + prepareCorelatedCriteriaBuilder() + ); + return builder; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/CcConst.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/CcConst.java new file mode 100644 index 0000000000..e039e24405 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/CcConst.java @@ -0,0 +1,5 @@ +package org.ohdsi.webapi.cohortcharacterization; + +public interface CcConst { + String dateFormat = "yyyy-MM-dd HH:mm:ss"; +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/CcController.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/CcController.java new file mode 100644 index 0000000000..3f3cee349f --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/CcController.java @@ -0,0 +1,673 @@ +package org.ohdsi.webapi.cohortcharacterization; + +import org.ohdsi.webapi.arachne.commons.utils.CommonFilenameUtils; +import org.ohdsi.webapi.arachne.commons.utils.ConverterUtils; +import com.opencsv.CSVWriter; +import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisType; +import org.ohdsi.featureExtraction.FeatureExtraction; +import org.ohdsi.webapi.Constants; +import org.ohdsi.webapi.Pagination; +import org.ohdsi.webapi.check.CheckResult; +import org.ohdsi.webapi.check.checker.characterization.CharacterizationChecker; +import org.ohdsi.webapi.cohortcharacterization.domain.CcGenerationEntity; +import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity; +import org.ohdsi.webapi.cohortcharacterization.dto.CcExportDTO; +import org.ohdsi.webapi.cohortcharacterization.dto.CcPrevalenceStat; +import org.ohdsi.webapi.cohortcharacterization.dto.CcResult; +import org.ohdsi.webapi.cohortcharacterization.dto.CcShortDTO; +import org.ohdsi.webapi.cohortcharacterization.dto.CcTemporalResult; +import org.ohdsi.webapi.cohortcharacterization.dto.CcVersionFullDTO; +import org.ohdsi.webapi.cohortcharacterization.dto.CohortCharacterizationDTO; +import org.ohdsi.webapi.cohortcharacterization.dto.ExportExecutionResultRequest; +import org.ohdsi.webapi.cohortcharacterization.dto.GenerationResults; +import org.ohdsi.webapi.cohortcharacterization.report.Report; +import org.ohdsi.webapi.common.SourceMapKey; +import org.ohdsi.webapi.common.generation.CommonGenerationDTO; +import org.ohdsi.webapi.common.sensitiveinfo.CommonGenerationSensitiveInfoService; +import org.ohdsi.webapi.conceptset.ConceptSetExport; +import org.ohdsi.webapi.feanalysis.FeAnalysisService; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisEntity; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisWithStringEntity; +import org.ohdsi.webapi.job.JobExecutionResource; +import org.ohdsi.webapi.security.authz.AuthorizationService; +import org.ohdsi.webapi.source.Source; +import org.ohdsi.webapi.source.SourceService; +import org.ohdsi.webapi.tag.dto.TagNameListRequestDTO; +import org.ohdsi.webapi.util.ExceptionUtils; +import org.ohdsi.webapi.util.ExportUtil; +import org.ohdsi.webapi.util.HttpUtils; +import org.ohdsi.webapi.versioning.dto.VersionDTO; +import org.ohdsi.webapi.versioning.dto.VersionUpdateDTO; +import org.springframework.core.convert.ConversionService; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.http.HttpHeaders; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.*; +import org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody; +import org.springframework.security.access.prepost.PreAuthorize; +import org.ohdsi.webapi.security.authz.access.AccessType; +import org.ohdsi.webapi.security.authz.access.EntityType; +import org.springframework.web.server.ResponseStatusException; +import org.springframework.http.HttpStatus; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.StringWriter; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; + +@RestController +@RequestMapping("/cohort-characterization") +@Transactional +public class CcController { + + private CcService service; + private FeAnalysisService feAnalysisService; + private ConversionService conversionService; + private ConverterUtils converterUtils; + private final CommonGenerationSensitiveInfoService sensitiveInfoService; + private final SourceService sourceService; + private CharacterizationChecker checker; + private AuthorizationService authorizationService; + + public CcController( + final CcService service, + final FeAnalysisService feAnalysisService, + final ConversionService conversionService, + final ConverterUtils converterUtils, + CommonGenerationSensitiveInfoService sensitiveInfoService, + SourceService sourceService, CharacterizationChecker checker, + AuthorizationService authorizationService) { + this.service = service; + this.feAnalysisService = feAnalysisService; + this.conversionService = conversionService; + this.converterUtils = converterUtils; + this.sensitiveInfoService = sensitiveInfoService; + this.sourceService = sourceService; + this.checker = checker; + this.authorizationService = authorizationService; + FeatureExtraction.init(null); + } + + /** + * Create a new cohort characterization + * + * @param dto A cohort characterization JSON definition (name, cohorts, featureAnalyses, etc.) + * @return The cohort characterization definition passed in as input + * with additional fields (createdDate, hasWriteAccess, tags, id, hashcode). + */ + @PostMapping(produces = MediaType.APPLICATION_JSON_VALUE, consumes = MediaType.APPLICATION_JSON_VALUE) + @Transactional + @PreAuthorize("isPermitted('create:cohort-characterization')") + public CohortCharacterizationDTO create(@RequestBody final CohortCharacterizationDTO dto) { + final CohortCharacterizationEntity createdEntity = service.createCc(conversionService.convert(dto, CohortCharacterizationEntity.class)); + return conversionService.convert(createdEntity, CohortCharacterizationDTO.class); + } + + /** + * Create a copy of an existing cohort characterization + * + * @param id An existing cohort characterization id + * @return The cohort characterization definition of the newly created copy + */ + @PostMapping(value = "/{id}", produces = MediaType.APPLICATION_JSON_VALUE) + @PreAuthorize("(isOwner(#id, COHORT_CHARACTERIZATION) or isPermitted(anyOf('read:cohort-characterization','write:cohort-characterization') or hasEntityAccess(#id, COHORT_CHARACTERIZATION, READ)) and isPermitted('create:cohort-characterization')") + public CohortCharacterizationDTO copy(@PathVariable("id") final Long id) { + CohortCharacterizationDTO dto = getDesign(id); + dto.setName(service.getNameForCopy(dto.getName())); + dto.setId(null); + dto.setTags(null); + dto.getStratas().forEach(s -> s.setId(null)); + dto.getParameters().forEach(p -> p.setId(null)); + return create(dto); + } + + /** + * Get information about the cohort characterization analyses in WebAPI + * + * @return A json object with information about the characterization analyses in WebAPI. + */ + @GetMapping(produces = MediaType.APPLICATION_JSON_VALUE) + public Page list(@Pagination Pageable pageable) { + return service.getPage(pageable).map(entity -> { + CcShortDTO dto = convertCcToShortDto(entity); + //authorizationService.fillWriteAccess(entity, dto); + //authorizationService.fillReadAccess(entity, dto); + return dto; + }); + } + + /** + * Get the design specification for every cohort-characterization analysis in WebAPI. + * + * @return A json object with all characterization design specifications. + */ + @GetMapping(value = "/design", produces = MediaType.APPLICATION_JSON_VALUE) + public Page listDesign(@Pagination Pageable pageable) { + return service.getPageWithLinkedEntities(pageable).map(entity -> { + CohortCharacterizationDTO dto = convertCcToDto(entity); + //authorizationService.fillWriteAccess(entity, dto); + //authorizationService.fillReadAccess(entity, dto); + return dto; + }); + } + + /** + * Get metadata about a cohort characterization. + * + * @param id The id for an existing cohort characterization + * @return name, createdDate, tags, etc for a single cohort characterization. + */ + @GetMapping(value = "/{id}", produces = MediaType.APPLICATION_JSON_VALUE) + @PreAuthorize("isOwner(#id, COHORT_CHARACTERIZATION) or isPermitted('read:cohort-characterization','write:cohort-characterization') or hasEntityAccess(#id, COHORT_CHARACTERIZATION, READ)") + public CcShortDTO get(@PathVariable("id") final Long id) { + return convertCcToShortDto(service.findById(id)); + } + + /** + * Get the complete design specification for a single cohort characterization. + * + * @param id The id for an existing cohort characterization + * @return JSON containing the cohort characterization specification + */ + @GetMapping(value = "/{id}/design", produces = MediaType.APPLICATION_JSON_VALUE) + @PreAuthorize("isOwner(#id, COHORT_CHARACTERIZATION) or isPermitted('read:cohort-characterization','write:cohort-characterization') or hasEntityAccess(#id, COHORT_CHARACTERIZATION, READ)") + public CohortCharacterizationDTO getDesign(@PathVariable("id") final Long id) { + CohortCharacterizationEntity cc = service.findByIdWithLinkedEntities(id); + ExceptionUtils.throwNotFoundExceptionIfNull(cc, String.format("There is no cohort characterization with id = %d.", id)); + return convertCcToDto(cc); + } + + /** + * Check if a cohort characterization with the same name exists + * + *

This endpoint is used to check that a desired name for a characterization does not already exist in WebAPI

+ * + * @param id The id for a new characterization that does not currently exist in WebAPI + * @param name The desired name for the new cohort characterization + * @return The number of existing characterizations with the same name that was passed as a query parameter + */ + @GetMapping(value = "/{id}/exists", produces = MediaType.APPLICATION_JSON_VALUE) + public int getCountCcWithSameName(@PathVariable(value = "id", required = false) final long id, @RequestParam("name") String name) { + return service.getCountCcWithSameName(id, name); + } + + /** + * Remove a characterization from WebAPI + * + * @param id The id for a characterization that currently exists in WebAPI + */ + @DeleteMapping(value = "/{id}") + @PreAuthorize("isOwner(#id, COHORT_CHARACTERIZATION) or isPermitted('write:cohort-characterization') or hasEntityAccess(#id, COHORT_CHARACTERIZATION, WRITE)") + public void deleteCc(@PathVariable("id") final Long id) { + service.deleteCc(id); + } + + private CohortCharacterizationDTO convertCcToDto(final CohortCharacterizationEntity entity) { + return conversionService.convert(entity, CohortCharacterizationDTO.class); + } + + private CcShortDTO convertCcToShortDto(final CohortCharacterizationEntity entity) { + return conversionService.convert(entity, CcShortDTO.class); + } + + @PutMapping(value = "/{id}", produces = MediaType.APPLICATION_JSON_VALUE, consumes = MediaType.APPLICATION_JSON_VALUE) + @PreAuthorize("isOwner(#id, COHORT_CHARACTERIZATION) or isPermitted('write:cohort-characterization') or hasEntityAccess(#id, COHORT_CHARACTERIZATION, WRITE)") + public CohortCharacterizationDTO update(@PathVariable("id") final Long id, @RequestBody final CohortCharacterizationDTO dto) { + service.saveVersion(dto.getId()); + final CohortCharacterizationEntity entity = conversionService.convert(dto, CohortCharacterizationEntity.class); + entity.setId(id); + final CohortCharacterizationEntity updatedEntity = service.updateCc(entity); + return convertCcToDto(updatedEntity); + } + + /** + * Add a new cohort characterization analysis to WebAPI + * + * @chrisknoll this endpoint did not work when I tried it. + * + * @param dto A cohort characterization definition + * @return The same cohort characterization definition that was passed as input + */ + @PostMapping(value = "/import", produces = MediaType.APPLICATION_JSON_VALUE, consumes = MediaType.APPLICATION_JSON_VALUE) + @PreAuthorize("isPermitted('create:cohort-characterization')") + public CohortCharacterizationDTO doImport(@RequestBody final CcExportDTO dto) { + dto.setName(service.getNameWithSuffix(dto.getName())); + dto.setTags(null); + final CohortCharacterizationEntity entity = conversionService.convert(dto, CohortCharacterizationEntity.class); + return conversionService.convert(service.importCc(entity), CohortCharacterizationDTO.class); + } + + /** + * Get a cohort characterization definition + * + * @param id The id of an existing cohort characterization definition + * @return JSON containing the cohort characterization definition + */ + @GetMapping(value = "/{id}/export", produces = MediaType.APPLICATION_JSON_VALUE) + @PreAuthorize("isOwner(#id, COHORT_CHARACTERIZATION) or isPermitted('read:cohort-characterization','write:cohort-characterization') or hasEntityAccess(#id, COHORT_CHARACTERIZATION, READ)") + public String export(@PathVariable("id") final Long id) { + return service.serializeCc(id); + } + + /** + * Get csv files containing concept sets used in a characterization analysis + * @param id The id for a cohort characterization analysis + * @return A zip file containing three csv files (mappedConcepts, includedConcepts, conceptSetExpression) + */ + @GetMapping(value = "/{id}/export/conceptset", produces = MediaType.APPLICATION_OCTET_STREAM_VALUE) + @PreAuthorize("isOwner(#id, COHORT_CHARACTERIZATION) or isPermitted('read:cohort-characterization') or hasEntityAccess(#id, COHORT_CHARACTERIZATION, READ)") + public ResponseEntity exportConceptSets(@PathVariable("id") final Long id) { + + CohortCharacterizationEntity cc = service.findById(id); + ExceptionUtils.throwNotFoundExceptionIfNull(cc, String.format("There is no cohort characterization with id = %d.", id)); + List exportList = service.exportConceptSets(cc); + ByteArrayOutputStream stream = ExportUtil.writeConceptSetExportToCSVAndZip(exportList); + return HttpUtils.respondBinary(stream, String.format("cc_%d_export.zip", id)); + } + + /** + * Check that a cohort characterization definition is correct + * @summary Check a cohort characterization definition + * @param characterizationDTO A cohort characterization definition object + * @return A list of warnings that is possibly empty + */ + @PostMapping(value = "/check", produces = MediaType.APPLICATION_JSON_VALUE, consumes = MediaType.APPLICATION_JSON_VALUE) + public CheckResult runDiagnostics(@RequestBody CohortCharacterizationDTO characterizationDTO){ + return new CheckResult(checker.check(characterizationDTO)); + } + + /** + * Generate a cohort characterization on a single data source + * @param id The id of an existing cohort characterization in WebAPI + * @param sourceKey The identifier for the data source to generate against + * @return A json object with information about the generation job included the status and execution id. + */ + @PostMapping(value = "/{id}/generation/{sourceKey}", produces = MediaType.APPLICATION_JSON_VALUE) + @PreAuthorize("(isOwner(#id, COHORT_CHARACTERIZATION) or isPermitted(anyOf('write:cohort-characterization','read:cohort-characterization')) or hasEntityAccess(#id, COHORT_CHARACTERIZATION, READ)) and (isPermitted('write:source') or hasSourceAccess(#sourceKey, WRITE))") + public JobExecutionResource generate(@PathVariable("id") final Long id, @PathVariable("sourceKey") final String sourceKey) { + CohortCharacterizationEntity cc = service.findByIdWithLinkedEntities(id); + ExceptionUtils.throwNotFoundExceptionIfNull(cc, String.format("There is no cohort characterization with id = %d.", id)); + CheckResult checkResult = runDiagnostics(convertCcToDto(cc)); + if (checkResult.hasCriticalErrors()) { + throw new RuntimeException("Cannot be generated due to critical errors in design. Call 'check' service for further details"); + } + return service.generateCc(id, sourceKey); + } + + /** + * Cancel a cohort characterization generation + * @param id The id of an existing cohort characterization + * @param sourceKey The sourceKey for the data source to generate against + * @return Status code + */ + @DeleteMapping(value = "/{id}/generation/{sourceKey}") + @PreAuthorize("(isOwner(#id, COHORT_CHARACTERIZATION) or isPermitted(anyOf('write:cohort-characterization','read:cohort-characterization')) or hasEntityAccess(#id, COHORT_CHARACTERIZATION, READ)) and (isPermitted('write:source') or hasSourceAccess(#sourceKey, WRITE))") + public ResponseEntity cancelGeneration(@PathVariable("id") final Long id, @PathVariable("sourceKey") final String sourceKey) { + service.cancelGeneration(id, sourceKey); + return ResponseEntity.ok().build(); + } + + /** + * Get all generations for a cohort characterization + * @param id The id for an existing cohort characterization + * @return An array of all generations that includes the generation id, sourceKey, start and end times + */ + @GetMapping(value = "/{id}/generation", produces = MediaType.APPLICATION_JSON_VALUE) + @PreAuthorize("isOwner(#id, COHORT_CHARACTERIZATION) or isPermitted(anyOf('read:cohort-characterization','write:cohort-characterization')) or hasEntityAccess(#id, COHORT_CHARACTERIZATION, READ)") + public List getGenerationList(@PathVariable("id") final Long id) { + + Map sourcesMap = sourceService.getSourcesMap(SourceMapKey.BY_SOURCE_KEY); + return sensitiveInfoService.filterSensitiveInfo(converterUtils.convertList(service.findGenerationsByCcId(id), CommonGenerationDTO.class), + info -> Collections.singletonMap(Constants.Variables.SOURCE, sourcesMap.get(info.getSourceKey()))); + } + + /** + * Get generation information by generation id + * @param generationId The generation id to look up + * @return Data about the generation including the generation id, sourceKey, hashcode, start and end times + */ + @GetMapping(value = "/generation/{generationId}", produces = MediaType.APPLICATION_JSON_VALUE) + public CommonGenerationDTO getGeneration(@PathVariable("generationId") final Long generationId) { + checkGenerationReadAccess(generationId); + + CcGenerationEntity generationEntity = service.findGenerationById(generationId); + return sensitiveInfoService.filterSensitiveInfo(conversionService.convert(generationEntity, CommonGenerationDTO.class), + Collections.singletonMap(Constants.Variables.SOURCE, generationEntity.getSource())); + } + + /** + * Delete a cohort characterization generation + * @param generationId + */ + @DeleteMapping(value = "/generation/{generationId}") + public void deleteGeneration(@PathVariable("generationId") final Long generationId) { + checkGenerationWriteAccess(generationId); + service.deleteCcGeneration(generationId); + } + + /** + * Get the definition of a cohort characterization for a given generation id + * @param generationId + * @return A cohort characterization definition + */ + @GetMapping(value = "/generation/{generationId}/design", produces = MediaType.APPLICATION_JSON_VALUE) + public CcExportDTO getGenerationDesign( + @PathVariable("generationId") final Long generationId) { + checkGenerationReadAccess(generationId); + return conversionService.convert(service.findDesignByGenerationId(generationId), CcExportDTO.class); + } + + /** + * Get the total number of analyses in a cohort characterization + * + * @param generationId + * @return The total number of analyses in the given cohort characterization + */ + @GetMapping(value = "/generation/{generationId}/result/count", produces = MediaType.APPLICATION_JSON_VALUE) + public Long getGenerationsResultsCount( @PathVariable("generationId") final Long generationId) { + checkGenerationReadAccess(generationId); + return service.getCCResultsTotalCount(generationId); + } + + /** + * Get cohort characterization results + * @param generationId id for generation + * @param thresholdLevel The max prevelance for a covariate. Covariates that occur in less than {threholdLevel}% + * of the cohort will not be returned. Default is 0.01 = 1% + * @return The complete set of characterization analyses filtered by the thresholdLevel parameter + */ + @GetMapping(value = "/generation/{generationId}/result", produces = MediaType.APPLICATION_JSON_VALUE) + public List getGenerationsResults( + @PathVariable("generationId") final Long generationId, @RequestParam(value = "thresholdLevel", defaultValue = "0.01") final float thresholdLevel) { + checkGenerationReadAccess(generationId); + return service.findResultAsList(generationId, thresholdLevel); + } + + @GetMapping(value = "/generation/{generationId}/temporalresult", produces = MediaType.APPLICATION_JSON_VALUE) + public List getGenerationTemporalResults(@PathVariable("generationId") final Long generationId) { + checkGenerationReadAccess(generationId); + return service.findTemporalResultAsList(generationId); + } + + @PostMapping(value = "/generation/{generationId}/result", produces = MediaType.APPLICATION_JSON_VALUE) + public GenerationResults getGenerationsResults( + @PathVariable("generationId") final Long generationId, @RequestBody ExportExecutionResultRequest params) { + checkGenerationReadAccess(generationId); + return service.findData(generationId, params); + } + + @PostMapping(value = "/generation/{generationId}/result/export", produces = MediaType.APPLICATION_OCTET_STREAM_VALUE, consumes = MediaType.APPLICATION_JSON_VALUE) + public ResponseEntity exportGenerationsResults( + @PathVariable("generationId") final Long generationId, @RequestBody ExportExecutionResultRequest params) { + checkGenerationReadAccess(generationId); + GenerationResults res = service.exportExecutionResult(generationId, params); + return prepareExecutionResultResponse(res.getReports()); + } + + private ResponseEntity prepareExecutionResultResponse(List reports) { + try (ByteArrayOutputStream baos = new ByteArrayOutputStream(); + ZipOutputStream zos = new ZipOutputStream(baos)) { + + for (Report report : reports) { + createZipEntry(zos, report); + } + + zos.closeEntry(); + baos.flush(); + + return ResponseEntity + .ok() + .contentType(MediaType.APPLICATION_OCTET_STREAM) + .header(HttpHeaders.CONTENT_DISPOSITION, String.format("attachment; filename=\"%s\"", "reports.zip")) + .body(baos.toByteArray()); + } catch (Exception ex) { + throw new RuntimeException(ex); + } + } + + private void createZipEntry(ZipOutputStream zos, Report report) throws IOException { + StringWriter sw = new StringWriter(); + CSVWriter csvWriter = new CSVWriter(sw, ',', CSVWriter.DEFAULT_QUOTE_CHARACTER, CSVWriter.DEFAULT_ESCAPE_CHARACTER); + csvWriter.writeAll(report.header); + csvWriter.writeAll(report.getResultArray()); + csvWriter.flush(); + + String filename = report.analysisName; + if (report.isComparative) { + filename = "Export comparison (" + filename + ")"; + } else { + filename = "Export (" + filename + ")"; + } + // trim the name so it can be opened by archiver, + // -1 is for dot character + if (filename.length() >= 64) { + filename = filename.substring(0, 63); + } + filename = CommonFilenameUtils.sanitizeFilename(filename); + ZipEntry resultsEntry = new ZipEntry(filename + ".csv"); + zos.putNextEntry(resultsEntry); + zos.write(sw.getBuffer().toString().getBytes()); + } + + @GetMapping(value = "/generation/{generationId}/explore/prevalence/{analysisId}/{cohortId}/{covariateId}", produces = MediaType.APPLICATION_JSON_VALUE) + public List getPrevalenceStat(@PathVariable("generationId") Long generationId, + @PathVariable("analysisId") Long analysisId, + @PathVariable("cohortId") Long cohortId, + @PathVariable("covariateId") Long covariateId) { + checkGenerationReadAccess(generationId); + + Integer presetId = convertPresetAnalysisIdToSystem(Math.toIntExact(analysisId)); + List stats = service.getPrevalenceStatsByGenerationId(generationId, Long.valueOf(presetId), cohortId, covariateId); + convertPresetAnalysesToLocal(stats); + return stats; + } + + /** + * Assign tag to Cohort Characterization + * + * @param id + * @param tagId + */ + @PostMapping(value = "/{id}/tag/", produces = MediaType.APPLICATION_JSON_VALUE) + @Transactional + @PreAuthorize("isOwner(#id, COHORT_CHARACTERIZATION) or isPermitted('admin:tags') or isPermitted('write:cohort-characterization') or hasEntityAccess(#id, COHORT_CHARACTERIZATION, WRITE)") + public void assignTag(@PathVariable("id") final long id, @RequestBody final int tagId) { + service.assignTag(id, tagId); + } + + /** + * Unassign tag from Cohort Characterization + * + * @param id + * @param tagId + */ + @DeleteMapping(value = "/{id}/tag/{tagId}") + @Transactional + @PreAuthorize("isOwner(#id, COHORT_CHARACTERIZATION) or isPermitted('admin:tags') or isPermitted('write:cohort-characterization') or hasEntityAccess(#id, COHORT_CHARACTERIZATION, WRITE)") + public void unassignTag(@PathVariable("id") final long id, @PathVariable("tagId") final int tagId) { + service.unassignTag(id, tagId); + } + + /** + * Assign protected tag to Cohort Characterization + * + * @param id + * @param tagId + */ + @PostMapping(value = "/{id}/protectedtag/") + @Transactional + @PreAuthorize("isOwner(#id, COHORT_CHARACTERIZATION) or isPermitted('admin:tags') or isPermitted('write:cohort-characterization') or hasEntityAccess(#id, COHORT_CHARACTERIZATION, WRITE)") + public void assignPermissionProtectedTag(@PathVariable("id") final long id, @RequestBody final int tagId) { + service.assignTag(id, tagId); + } + + /** + * Unassign protected tag from Cohort Characterization + * + * @param id + * @param tagId + */ + @DeleteMapping(value = "/{id}/protectedtag/{tagId}") + @Transactional + @PreAuthorize("isOwner(#id, COHORT_CHARACTERIZATION) or isPermitted('admin:tags') or isPermitted('write:cohort-characterization') or hasEntityAccess(#id, COHORT_CHARACTERIZATION, WRITE)") + public void unassignPermissionProtectedTag(@PathVariable("id") final long id, @PathVariable("tagId") final int tagId) { + service.unassignTag(id, tagId); + } + + /** + * Get list of versions of Cohort Characterization + * + * @param id + * @return + */ + @GetMapping(value = "/{id}/version/", produces = MediaType.APPLICATION_JSON_VALUE) + @PreAuthorize("isOwner(#id, COHORT_CHARACTERIZATION) or isPermitted(anyOf('read:cohort-characterization','write:cohort-characterization')) or hasEntityAccess(#id, COHORT_CHARACTERIZATION, READ)") + public List getVersions(@PathVariable("id") final long id) { + return service.getVersions(id); + } + + /** + * Get version of Cohort Characterization + * + * @param id + * @param version + * @return + */ + @GetMapping(value = "/{id}/version/{version}", produces = MediaType.APPLICATION_JSON_VALUE) + @PreAuthorize("isOwner(#id, COHORT_CHARACTERIZATION) or isPermitted(anyOf('read:cohort-characterization','write:cohort-characterization')) or hasEntityAccess(#id, COHORT_CHARACTERIZATION, READ)") + public CcVersionFullDTO getVersion(@PathVariable("id") final long id, @PathVariable("version") final int version) { + return service.getVersion(id, version); + } + + /** + * Update version of Cohort Characterization + * + * @param id + * @param version + * @param updateDTO + * @return + */ + @PutMapping(value = "/{id}/version/{version}", produces = MediaType.APPLICATION_JSON_VALUE, consumes = MediaType.APPLICATION_JSON_VALUE) + @PreAuthorize("isOwner(#id, COHORT_CHARACTERIZATION) or isPermitted('write:cohort-characterization') or hasEntityAccess(#id, COHORT_CHARACTERIZATION, WRITE)") + public VersionDTO updateVersion(@PathVariable("id") final long id, @PathVariable("version") final int version, + @RequestBody VersionUpdateDTO updateDTO) { + return service.updateVersion(id, version, updateDTO); + } + + /** + * Delete version of Cohort Characterization + * + * @param id + * @param version + */ + @DeleteMapping(value = "/{id}/version/{version}", produces = MediaType.APPLICATION_JSON_VALUE) + @PreAuthorize("isOwner(#id, COHORT_CHARACTERIZATION) or isPermitted('write:cohort-characterization') or hasEntityAccess(#id, COHORT_CHARACTERIZATION, WRITE)") + public void deleteVersion(@PathVariable("id") final long id, @PathVariable("version") final int version) { + service.deleteVersion(id, version); + } + + /** + * Create a new asset form version of Cohort Characterization + * + * @param id + * @param version + * @return + */ + @PutMapping(value = "/{id}/version/{version}/createAsset", produces = MediaType.APPLICATION_JSON_VALUE) + @PreAuthorize("isOwner(#id, COHORT_CHARACTERIZATION) or isPermitted('write:cohort-characterization') or hasEntityAccess(#id, COHORT_CHARACTERIZATION, WRITE)") + public CohortCharacterizationDTO copyAssetFromVersion(@PathVariable("id") final long id, + @PathVariable("version") final int version) { + return service.copyAssetFromVersion(id, version); + } + + /** + * Get list of cohort characterizations with assigned tags + * + * @param requestDTO + * @return + */ + @PostMapping(value = "/byTags", produces = MediaType.APPLICATION_JSON_VALUE, consumes = MediaType.APPLICATION_JSON_VALUE) + public List listByTags(@RequestBody TagNameListRequestDTO requestDTO) { + if (requestDTO == null || requestDTO.getNames() == null || requestDTO.getNames().isEmpty()) { + return Collections.emptyList(); + } + return service.listByTags(requestDTO); + } + + // --- generation-level authorization helpers (generationId-only endpoints) + private void checkGenerationReadAccess(Long generationId) { + CcGenerationEntity generationEntity = service.findGenerationById(generationId); + ExceptionUtils.throwNotFoundExceptionIfNull(generationEntity, String.format("There is no generation with id = %d.", generationId)); + CohortCharacterizationEntity cc = generationEntity.getCohortCharacterization(); + if (cc == null) { + throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Associated cohort characterization not found"); + } + String sourceKey = generationEntity.getSource() != null ? generationEntity.getSource().getSourceKey() : null; + + boolean ccAllowed = authorizationService.isOwner(cc.getId(), EntityType.COHORT_CHARACTERIZATION) + || authorizationService.isPermitted("read:cohort-characterization") + || authorizationService.isPermitted("write:cohort-characterization") + || authorizationService.hasEntityAccess(cc.getId(), EntityType.COHORT_CHARACTERIZATION, AccessType.READ); + + boolean sourceAllowed = sourceKey != null && (authorizationService.isPermitted("read:source") + || authorizationService.isPermitted("write:source") + || authorizationService.hasSourceAccess(sourceKey, AccessType.READ)); + + if (!ccAllowed || !sourceAllowed) { + throw new ResponseStatusException(HttpStatus.FORBIDDEN, "Access denied"); + } + } + + private void checkGenerationWriteAccess(Long generationId) { + CcGenerationEntity generationEntity = service.findGenerationById(generationId); + ExceptionUtils.throwNotFoundExceptionIfNull(generationEntity, String.format("There is no generation with id = %d.", generationId)); + CohortCharacterizationEntity cc = generationEntity.getCohortCharacterization(); + if (cc == null) { + throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Associated cohort characterization not found"); + } + String sourceKey = generationEntity.getSource() != null ? generationEntity.getSource().getSourceKey() : null; + + boolean ccAllowed = authorizationService.isOwner(cc.getId(), EntityType.COHORT_CHARACTERIZATION) + || authorizationService.isPermitted("read:cohort-characterization") + || authorizationService.isPermitted("write:cohort-characterization") + || authorizationService.hasEntityAccess(cc.getId(), EntityType.COHORT_CHARACTERIZATION, AccessType.READ); + + boolean sourceAllowed = sourceKey != null && (authorizationService.isPermitted("write:source") + || authorizationService.hasSourceAccess(sourceKey, AccessType.WRITE)); + + if (!ccAllowed || !sourceAllowed) { + throw new ResponseStatusException(HttpStatus.FORBIDDEN, "Access denied"); + } + } + + private void convertPresetAnalysesToLocal(List ccResults) { + + List presetFeAnalyses = feAnalysisService.findPresetAnalysesBySystemNames(ccResults.stream().map(CcResult::getAnalysisName).distinct().collect(Collectors.toList())); + ccResults.stream().filter(res -> Objects.equals(res.getFaType(), StandardFeatureAnalysisType.PRESET.name())) + .forEach(res -> { + presetFeAnalyses.stream().filter(fa -> fa.getDesign().equals(res.getAnalysisName())).findFirst().ifPresent(fa -> { + res.setAnalysisId(fa.getId()); + res.setAnalysisName(fa.getName()); + }); + }); + } + + private Integer convertPresetAnalysisIdToSystem(Integer analysisId) { + + FeAnalysisEntity fe = feAnalysisService.findById(analysisId).orElse(null); + if (fe instanceof FeAnalysisWithStringEntity && fe.isPreset()) { + FeatureExtraction.PrespecAnalysis prespecAnalysis = FeatureExtraction.getNameToPrespecAnalysis().get(((FeAnalysisWithStringEntity) fe).getDesign()); + return prespecAnalysis.analysisId; + } + return analysisId; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/CcImportEvent.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/CcImportEvent.java new file mode 100644 index 0000000000..7565c09f55 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/CcImportEvent.java @@ -0,0 +1,17 @@ +package org.ohdsi.webapi.cohortcharacterization; + +import java.util.List; + +public class CcImportEvent { + + // should keep list of ids to prevent error for duplication of permissions + private List savedAnalysesIds; + + public CcImportEvent(List savedAnalysesIds) { + this.savedAnalysesIds = savedAnalysesIds; + } + + public List getSavedAnalysesIds() { + return savedAnalysesIds; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/CcService.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/CcService.java new file mode 100644 index 0000000000..55df786515 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/CcService.java @@ -0,0 +1,107 @@ +package org.ohdsi.webapi.cohortcharacterization; + +import org.ohdsi.analysis.cohortcharacterization.design.CohortCharacterization; +import org.ohdsi.webapi.cohortcharacterization.domain.CcGenerationEntity; +import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity; +import org.ohdsi.webapi.cohortcharacterization.dto.CcPrevalenceStat; +import org.ohdsi.webapi.cohortcharacterization.dto.CcResult; +import org.ohdsi.webapi.cohortcharacterization.dto.CcShortDTO; +import org.ohdsi.webapi.cohortcharacterization.dto.CcTemporalResult; +import org.ohdsi.webapi.cohortcharacterization.dto.CcVersionFullDTO; +import org.ohdsi.webapi.cohortcharacterization.dto.CohortCharacterizationDTO; +import org.ohdsi.webapi.cohortcharacterization.dto.ExecutionResultRequest; +import org.ohdsi.webapi.cohortcharacterization.dto.ExportExecutionResultRequest; +import org.ohdsi.webapi.cohortcharacterization.dto.GenerationResults; +import org.ohdsi.webapi.conceptset.ConceptSetExport; +import org.ohdsi.webapi.cohortdefinition.event.CohortDefinitionChangedEvent; +import org.ohdsi.webapi.feanalysis.event.FeAnalysisChangedEvent; +import org.ohdsi.webapi.job.JobExecutionResource; +import org.ohdsi.webapi.tag.domain.HasTags; +import org.ohdsi.webapi.tag.dto.TagNameListRequestDTO; +import org.ohdsi.webapi.versioning.domain.CharacterizationVersion; +import org.ohdsi.webapi.versioning.dto.VersionDTO; +import org.ohdsi.webapi.versioning.dto.VersionUpdateDTO; +import org.springframework.context.event.EventListener; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; + +import java.io.OutputStream; +import java.util.List; + +public interface CcService extends HasTags { + CohortCharacterizationEntity createCc(CohortCharacterizationEntity entity); + + CohortCharacterizationEntity updateCc(CohortCharacterizationEntity entity); + + int getCountCcWithSameName(Long id, String name); + + void deleteCc(Long ccId); + + CohortCharacterizationEntity importCc(CohortCharacterizationEntity entity); + + String getNameForCopy(String dtoName); + + String getNameWithSuffix(String dtoName); + + String serializeCc(Long id); + + String serializeCc(CohortCharacterizationEntity cohortCharacterizationEntity); + + CohortCharacterizationEntity findById(Long id); + + CohortCharacterizationEntity findByIdWithLinkedEntities(Long id); + + CohortCharacterization findDesignByGenerationId(final Long id); + + Page getPageWithLinkedEntities(Pageable pageable); + + Page getPage(Pageable pageable); + + JobExecutionResource generateCc(Long id, final String sourceKey); + + List findGenerationsByCcId(Long id); + + CcGenerationEntity findGenerationById(final Long id); + + List findGenerationsByCcIdAndSource(Long id, String sourceKey); + + List findTemporalResultAsList(Long generationId); + + GenerationResults findResult(Long generationId, ExecutionResultRequest params); + + List findResultAsList(Long generationId, float thresholdLevel); + + List getPrevalenceStatsByGenerationId(final Long id, Long analysisId, final Long cohortId, final Long covariateId); + + void deleteCcGeneration(Long generationId); + + void cancelGeneration(Long id, String sourceKey); + + Long getCCResultsTotalCount(Long id); + + List exportConceptSets(CohortCharacterization cohortCharacterization); + + GenerationResults exportExecutionResult(Long generationId, ExportExecutionResultRequest params); + + GenerationResults findData(final Long generationId, ExecutionResultRequest params); + + @EventListener + void onCohortDefinitionChanged(CohortDefinitionChangedEvent event); + + @EventListener + void onFeAnalysisChanged(FeAnalysisChangedEvent event); + + List getVersions(long id); + + CcVersionFullDTO getVersion(long id, int version); + + VersionDTO updateVersion(long id, int version, VersionUpdateDTO updateDTO); + + void deleteVersion(long id, int version); + + CohortCharacterizationDTO copyAssetFromVersion(long id, int version); + + CharacterizationVersion saveVersion(long id); + + List listByTags(TagNameListRequestDTO requestDTO); +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/CcServiceImpl.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/CcServiceImpl.java new file mode 100644 index 0000000000..bd621d42d6 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/CcServiceImpl.java @@ -0,0 +1,1414 @@ +package org.ohdsi.webapi.cohortcharacterization; + +import static java.util.stream.Collectors.groupingBy; +import static org.ohdsi.analysis.cohortcharacterization.design.CcResultType.DISTRIBUTION; +import static org.ohdsi.analysis.cohortcharacterization.design.CcResultType.PREVALENCE; +import static org.ohdsi.webapi.Constants.GENERATE_COHORT_CHARACTERIZATION; +import static org.ohdsi.webapi.Constants.Params.COHORT_CHARACTERIZATION_ID; +import static org.ohdsi.webapi.Constants.Params.JOB_AUTHOR; +import static org.ohdsi.webapi.Constants.Params.JOB_NAME; +import static org.ohdsi.webapi.Constants.Params.SOURCE_ID; + +import java.io.File; +import java.io.IOException; +import java.io.OutputStream; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.function.BiConsumer; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Predicate; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.StringUtils; +import org.ohdsi.analysis.Utils; +import org.ohdsi.analysis.WithId; +import org.ohdsi.analysis.cohortcharacterization.design.CohortCharacterization; +import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisType; +import org.ohdsi.analysis.cohortincidence.design.CohortDefinition; +import org.ohdsi.circe.helper.ResourceHelper; +import org.ohdsi.featureExtraction.FeatureExtraction; +import org.ohdsi.sql.SqlSplit; +import org.ohdsi.sql.SqlTranslate; +import org.ohdsi.webapi.Constants; +import org.ohdsi.webapi.JobInvalidator; +import org.ohdsi.webapi.cohortcharacterization.converter.SerializedCcToCcConverter; +import org.ohdsi.webapi.cohortcharacterization.domain.CcFeAnalysisEntity; +import org.ohdsi.webapi.cohortcharacterization.domain.CcGenerationEntity; +import org.ohdsi.webapi.cohortcharacterization.domain.CcParamEntity; +import org.ohdsi.webapi.cohortcharacterization.domain.CcStrataConceptSetEntity; +import org.ohdsi.webapi.cohortcharacterization.domain.CcStrataEntity; +import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity; +import org.ohdsi.webapi.cohortcharacterization.dto.AbstractTemporalResult; +import org.ohdsi.webapi.cohortcharacterization.dto.CcDistributionStat; +import org.ohdsi.webapi.cohortcharacterization.dto.CcExportDTO; +import org.ohdsi.webapi.cohortcharacterization.dto.CcPrevalenceStat; +import org.ohdsi.webapi.cohortcharacterization.dto.CcResult; +import org.ohdsi.webapi.cohortcharacterization.dto.CcShortDTO; +import org.ohdsi.webapi.cohortcharacterization.dto.CcTemporalAnnualResult; +import org.ohdsi.webapi.cohortcharacterization.dto.CcTemporalResult; +import org.ohdsi.webapi.cohortcharacterization.dto.CcVersionFullDTO; +import org.ohdsi.webapi.cohortcharacterization.dto.CohortCharacterizationDTO; +import org.ohdsi.webapi.cohortcharacterization.dto.ExecutionResultRequest; +import org.ohdsi.webapi.cohortcharacterization.dto.ExportExecutionResultRequest; +import org.ohdsi.webapi.cohortcharacterization.dto.GenerationResults; +import org.ohdsi.webapi.cohortcharacterization.report.AnalysisItem; +import org.ohdsi.webapi.cohortcharacterization.report.AnalysisResultItem; +import org.ohdsi.webapi.cohortcharacterization.report.ExportItem; +import org.ohdsi.webapi.cohortcharacterization.report.PrevalenceItem; +import org.ohdsi.webapi.cohortcharacterization.report.Report; +import org.ohdsi.webapi.cohortcharacterization.report.TemporalAnnualItem; +import org.ohdsi.webapi.cohortcharacterization.report.TemporalItem; +import org.ohdsi.webapi.cohortcharacterization.repository.AnalysisGenerationInfoEntityRepository; +import org.ohdsi.webapi.cohortcharacterization.repository.CcConceptSetRepository; +import org.ohdsi.webapi.cohortcharacterization.repository.CcFeAnalysisRepository; +import org.ohdsi.webapi.cohortcharacterization.repository.CcGenerationEntityRepository; +import org.ohdsi.webapi.cohortcharacterization.repository.CcParamRepository; +import org.ohdsi.webapi.cohortcharacterization.repository.CcRepository; +import org.ohdsi.webapi.cohortcharacterization.repository.CcStrataRepository; +import org.ohdsi.webapi.cohortcharacterization.specification.CohortCharacterizationImpl; +import org.ohdsi.webapi.cohortdefinition.CohortDefinitionEntity; +import org.ohdsi.webapi.cohortdefinition.event.CohortDefinitionChangedEvent; +import org.ohdsi.webapi.common.DesignImportService; +import org.ohdsi.webapi.common.generation.AnalysisGenerationInfoEntity; +import org.ohdsi.webapi.common.generation.GenerationUtils; +import org.ohdsi.webapi.conceptset.ConceptSetExport; +import org.ohdsi.webapi.feanalysis.FeAnalysisService; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisCriteriaEntity; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisEntity; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisWithCriteriaEntity; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisWithStringEntity; +import org.ohdsi.webapi.feanalysis.event.FeAnalysisChangedEvent; +import org.ohdsi.webapi.job.GeneratesNotification; +import org.ohdsi.webapi.job.JobExecutionResource; +import org.ohdsi.webapi.security.authz.AuthorizationService; +import org.ohdsi.webapi.security.authz.UserEntity; +import org.ohdsi.webapi.service.AbstractDaoService; +import org.ohdsi.webapi.service.FeatureExtractionService; +import org.ohdsi.webapi.service.JobService; +import org.ohdsi.webapi.service.VocabularyService; +import org.ohdsi.webapi.source.Source; +import org.ohdsi.webapi.source.SourceInfo; +import org.ohdsi.webapi.source.SourceService; +import org.ohdsi.webapi.sqlrender.SourceAwareSqlRender; +import org.ohdsi.webapi.tag.dto.TagNameListRequestDTO; +import org.ohdsi.webapi.util.CancelableJdbcTemplate; +import org.ohdsi.webapi.util.EntityUtils; +import org.ohdsi.webapi.util.ExceptionUtils; +import org.ohdsi.webapi.util.ExportUtil; +import org.ohdsi.webapi.util.NameUtils; +import org.ohdsi.webapi.util.SessionUtils; +import org.ohdsi.webapi.util.SourceUtils; +import org.ohdsi.webapi.util.TempFileUtils; +import org.ohdsi.webapi.versioning.domain.CharacterizationVersion; +import org.ohdsi.webapi.versioning.domain.Version; +import org.ohdsi.webapi.versioning.domain.VersionBase; +import org.ohdsi.webapi.versioning.domain.VersionType; +import org.ohdsi.webapi.versioning.dto.VersionDTO; +import org.ohdsi.webapi.versioning.dto.VersionUpdateDTO; +import org.ohdsi.webapi.versioning.service.VersionService; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.JobParametersBuilder; +import org.springframework.batch.core.job.builder.SimpleJobBuilder; +import org.springframework.batch.core.repository.JobRepository; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.context.annotation.DependsOn; +import org.springframework.context.event.EventListener; +import org.springframework.core.convert.ConversionService; +import org.springframework.core.convert.support.GenericConversionService; +import org.springframework.core.env.Environment; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.http.HttpStatus; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Propagation; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.server.ResponseStatusException; + +import com.cosium.spring.data.jpa.entity.graph.domain2.EntityGraph; +import com.fasterxml.jackson.core.type.TypeReference; +import com.google.common.collect.ImmutableList; + +import jakarta.annotation.PostConstruct; +import jakarta.persistence.EntityManager; + +@Service +@Transactional +@DependsOn({"ccExportDTOToCcEntityConverter", "cohortDTOToCohortDefinitionConverter", "feAnalysisDTOToFeAnalysisConverter"}) +public class CcServiceImpl extends AbstractDaoService implements CcService, GeneratesNotification { + + private static final String GENERATION_NOT_FOUND_ERROR = "generation cannot be found by id %d"; + private static final String[] PARAMETERS_RESULTS = {"cohort_characterization_generation_id", "threshold_level", "vocabulary_schema"}; + private static final String[] PARAMETERS_RESULTS_FILTERED = {"cohort_characterization_generation_id", "threshold_level", + "analysis_ids", "cohort_ids", "vocabulary_schema"}; + private static final String[] PARAMETERS_COUNT = {"cohort_characterization_generation_id", "vocabulary_schema"}; + private static final String[] PREVALENCE_STATS_PARAMS = {"cdm_database_schema", "cdm_results_schema", "cc_generation_id", "analysis_id", "cohort_id", "covariate_id"}; + private final String QUERY_RESULTS = ResourceHelper.GetResourceAsString("/resources/cohortcharacterizations/sql/queryResults.sql"); + private final String QUERY_TEMPORAL_RESULTS = ResourceHelper.GetResourceAsString("/resources/cohortcharacterizations/sql/queryTemporalResults.sql"); + private final String QUERY_TEMPORAL_ANNUAL_RESULTS = ResourceHelper.GetResourceAsString("/resources/cohortcharacterizations/sql/queryTemporalAnnualResults.sql"); + private final String QUERY_COUNT = ResourceHelper.GetResourceAsString("/resources/cohortcharacterizations/sql/queryCountWithoutThreshold.sql"); + private final String DELETE_RESULTS = ResourceHelper.GetResourceAsString("/resources/cohortcharacterizations/sql/deleteResults.sql"); + private final String DELETE_EXECUTION = ResourceHelper.GetResourceAsString("/resources/cohortcharacterizations/sql/deleteExecution.sql"); + private final String QUERY_PREVALENCE_STATS = ResourceHelper.GetResourceAsString("/resources/cohortcharacterizations/sql/queryCovariateStatsVocab.sql"); + + private final String HYDRA_PACKAGE = "/resources/cohortcharacterizations/hydra/CohortCharacterization_v0.0.1.zip"; + + private final static List INCOMPLETE_STATUSES = ImmutableList.of(BatchStatus.STARTED, BatchStatus.STARTING, BatchStatus.STOPPING, BatchStatus.UNKNOWN) + .stream().map(BatchStatus::name).collect(Collectors.toList()); + + private Map prespecAnalysisMap = FeatureExtraction.getNameToPrespecAnalysis(); + + private final EntityGraph defaultEntityGraph = EntityUtils.fromAttributePaths( + "cohortDefinitions", + "featureAnalyses", + "stratas", + "parameters", + "createdBy", + "modifiedBy" + ); + + private final List executionPrevalenceHeaderLines = new ArrayList() {{ + add(new String[]{"Analysis ID", "Analysis name", "Strata ID", + "Strata name", "Cohort ID", "Cohort name", "Covariate ID", "Covariate name", "Covariate short name", + "Count", "Percent"}); + }}; + + private final List executionDistributionHeaderLines = new ArrayList() {{ + add(new String[]{"Analysis ID", "Analysis name", "Strata ID", + "Strata name", "Cohort ID", "Cohort name", "Covariate ID", "Covariate name", "Covariate short name", "Value field","Missing Means Zero", + "Count", "Avg", "StdDev", "Min", "P10", "P25", "Median", "P75", "P90", "Max"}); + }}; + + private final List executionComparativeHeaderLines = new ArrayList() {{ + add(new String[]{"Analysis ID", "Analysis name", "Strata ID", + "Strata name", "Target cohort ID", "Target cohort name", "Comparator cohort ID", "Comparator cohort name", + "Covariate ID", "Covariate name", "Covariate short name", "Target count", "Target percent", + "Comparator count", "Comparator percent", "Std. Diff Of Mean"}); + }}; + + private final CcRepository repository; + private final CcParamRepository paramRepository; + private final CcStrataRepository strataRepository; + private final CcConceptSetRepository conceptSetRepository; + private final FeAnalysisService analysisService; + private final CcGenerationEntityRepository ccGenerationRepository; + private final FeatureExtractionService featureExtractionService; + private final DesignImportService designImportService; + private final AnalysisGenerationInfoEntityRepository analysisGenerationInfoEntityRepository; + private final SourceService sourceService; + private final GenerationUtils generationUtils; + private final EntityManager entityManager; + private final ApplicationEventPublisher eventPublisher; + + private final JobRepository jobRepository; + private final SourceAwareSqlRender sourceAwareSqlRender; + private final JobService jobService; + private final JobInvalidator jobInvalidator; + private final GenericConversionService genericConversionService; + private final VocabularyService vocabularyService; + private final CcFeAnalysisRepository ccFeAnalysisRepository; + private VersionService versionService; + + private AuthorizationService authorizationService; + + @Value("${security.defaultGlobalReadPermissions}") + private boolean defaultGlobalReadPermissions; + + private final Environment env; + + public CcServiceImpl( + final CcRepository ccRepository, + final CcParamRepository paramRepository, + final CcStrataRepository strataRepository, + final CcConceptSetRepository conceptSetRepository, + final FeAnalysisService analysisService, + final CcGenerationEntityRepository ccGenerationRepository, + final FeatureExtractionService featureExtractionService, + final ConversionService conversionService, + final DesignImportService designImportService, + final JobRepository jobRepository, + final AnalysisGenerationInfoEntityRepository analysisGenerationInfoEntityRepository, + final SourceService sourceService, + final GenerationUtils generationUtils, + final SourceAwareSqlRender sourceAwareSqlRender, + final EntityManager entityManager, + final JobService jobService, + final ApplicationEventPublisher eventPublisher, + final JobInvalidator jobInvalidator, + final VocabularyService vocabularyService, + final CcFeAnalysisRepository ccFeAnalysisRepository, + final VersionService versionService, + final AuthorizationService authorizationService, + @Qualifier("conversionService") final GenericConversionService genericConversionService, + Environment env) { + this.repository = ccRepository; + this.paramRepository = paramRepository; + this.strataRepository = strataRepository; + this.conceptSetRepository = conceptSetRepository; + this.analysisService = analysisService; + this.ccGenerationRepository = ccGenerationRepository; + this.featureExtractionService = featureExtractionService; + this.designImportService = designImportService; + this.jobRepository = jobRepository; + this.analysisGenerationInfoEntityRepository = analysisGenerationInfoEntityRepository; + this.sourceService = sourceService; + this.generationUtils = generationUtils; + this.sourceAwareSqlRender = sourceAwareSqlRender; + this.entityManager = entityManager; + this.jobService = jobService; + this.eventPublisher = eventPublisher; + this.jobInvalidator = jobInvalidator; + this.vocabularyService = vocabularyService; + this.ccFeAnalysisRepository = ccFeAnalysisRepository; + this.authorizationService = authorizationService; + this.genericConversionService = genericConversionService; + this.versionService = versionService; + this.env = env; + SerializedCcToCcConverter.setConversionService(conversionService); + } + + @Override + public CohortCharacterizationEntity createCc(final CohortCharacterizationEntity entity) { + entity.setCreatedBy(getCurrentUser()); + entity.setCreatedDate(new Date()); + return saveCc(entity); + } + + private CohortCharacterizationEntity saveCc(final CohortCharacterizationEntity entity) { + CohortCharacterizationEntity savedEntity = repository.save(entity); + + for(CcStrataEntity strata: entity.getStratas()){ + strata.setCohortCharacterization(savedEntity); + strataRepository.save(strata); + } + + for(CcParamEntity param: entity.getParameters()){ + param.setCohortCharacterization(savedEntity); + paramRepository.save(param); + } + + for(CcFeAnalysisEntity analysis : entity.getCcFeatureAnalyses()) { + analysis.setCohortCharacterization(savedEntity); + ccFeAnalysisRepository.save(analysis); + } + + entityManager.flush(); + entityManager.refresh(savedEntity); + + savedEntity = findByIdWithLinkedEntities(savedEntity.getId()); + + Date modifiedDate = savedEntity.getModifiedDate(); + savedEntity.setModifiedDate(null); + final String serialized = this.serializeCc(savedEntity); + savedEntity.setHashCode(serialized.hashCode()); + savedEntity.setModifiedDate(modifiedDate); + + return repository.save(savedEntity); + } + + @EventListener + @Transactional + @Override + public void onCohortDefinitionChanged(CohortDefinitionChangedEvent event) { + + List ccList = repository.findByCohortDefinition(event.getCohortDefinition()); + ccList.forEach(this::saveCc); + } + + @EventListener + @Transactional + public void onFeAnalysisChanged(FeAnalysisChangedEvent event) { + + List ccList = repository.findByFeatureAnalysis(event.getFeAnalysis()); + ccList.forEach(this::saveCc); + } + + @Override + @Transactional + public void assignTag(Long id, int tagId) { + CohortCharacterizationEntity entity = findById(id); + assignTag(entity, tagId); + } + + @Override + @Transactional + public void unassignTag(Long id, int tagId) { + CohortCharacterizationEntity entity = findById(id); + unassignTag(entity, tagId); + } + + @Override + public int getCountCcWithSameName(Long id, String name) { + return repository.getCountCcWithSameName(id, name); + } + + @Override + public void deleteCc(Long ccId) { + repository.deleteById(ccId); + } + + @Override + public CohortCharacterizationEntity updateCc(final CohortCharacterizationEntity entity) { + final CohortCharacterizationEntity foundEntity = repository.findById(entity.getId()) + .orElseThrow(() -> new ResponseStatusException(HttpStatus.NOT_FOUND, String.format("CC entity isn't found", entity.getId())));; + + updateLinkedFields(entity, foundEntity); + + if (StringUtils.isNotEmpty(entity.getName())) { + foundEntity.setName(entity.getName()); + } + foundEntity.setDescription(entity.getDescription()); + foundEntity.setStratifiedBy(entity.getStratifiedBy()); + if (Objects.nonNull(entity.getStrataOnly())) { + foundEntity.setStrataOnly(entity.getStrataOnly()); + } + + foundEntity.setModifiedDate(new Date()); + foundEntity.setModifiedBy(getCurrentUser()); + + return saveCc(foundEntity); + } + + private void updateLinkedFields(final CohortCharacterizationEntity entity, final CohortCharacterizationEntity foundEntity) { + updateConceptSet(entity, foundEntity); + updateParams(entity, foundEntity); + updateAnalyses(entity, foundEntity); + updateCohorts(entity, foundEntity); + updateStratas(entity, foundEntity); + } + + private void updateConceptSet(CohortCharacterizationEntity entity, CohortCharacterizationEntity foundEntity) { + if (Objects.nonNull(foundEntity.getConceptSetEntity()) && Objects.nonNull(entity.getConceptSetEntity())) { + foundEntity.getConceptSetEntity().setRawExpression(entity.getConceptSetEntity().getRawExpression()); + } else if (Objects.nonNull(entity.getConceptSetEntity())) { + CcStrataConceptSetEntity cse = new CcStrataConceptSetEntity(); + cse.setCohortCharacterization(foundEntity); + cse.setRawExpression(entity.getConceptSetEntity().getRawExpression()); + foundEntity.setConceptSetEntity(cse); + } else { + foundEntity.setConceptSetEntity(null); + } + } + + private void updateStratas(CohortCharacterizationEntity entity, CohortCharacterizationEntity foundEntity) { + final List stratasToDelete = getLinksToDelete(foundEntity, + existingLink -> entity.getStratas().stream().noneMatch(newLink -> Objects.equals(newLink.getId(), existingLink.getId())), + CohortCharacterizationEntity::getStratas); + foundEntity.getStratas().removeAll(stratasToDelete); + strataRepository.deleteAll(stratasToDelete); + Map strataEntityMap = foundEntity.getStratas().stream() + .collect(Collectors.toMap(CcStrataEntity::getId, s -> s)); + + List updatedStratas = entity.getStratas().stream().map(updated -> { + updated.setCohortCharacterization(foundEntity); + if (Objects.nonNull(updated.getId())) { + CcStrataEntity strata = strataEntityMap.get(updated.getId()); + // strata will be null in case of importing new characterization + if (strata == null) { + return updated; + } + if (StringUtils.isNotBlank(updated.getName())) { + strata.setName(updated.getName()); + } + strata.setExpressionString(updated.getExpressionString()); + return strata; + } else { + return updated; + } + }).collect(Collectors.toList()); + entity.setStratas(new HashSet<>(strataRepository.saveAll(updatedStratas))); + } + + private void updateCohorts(final CohortCharacterizationEntity entity, final CohortCharacterizationEntity foundEntity) { + foundEntity.getCohortDefinitions().clear(); + foundEntity.getCohortDefinitions().addAll(entity.getCohortDefinitions()); + } + + private void updateAnalyses(final CohortCharacterizationEntity entity, final CohortCharacterizationEntity foundEntity) { + ccFeAnalysisRepository.deleteAll(foundEntity.getCcFeatureAnalyses()); + foundEntity.getCcFeatureAnalyses().clear(); + foundEntity.getCcFeatureAnalyses().addAll(entity.getCcFeatureAnalyses()); + ccFeAnalysisRepository.saveAll(foundEntity.getCcFeatureAnalyses()); + } + + private List getLinksToDelete(final CohortCharacterizationEntity foundEntity, + Predicate filterPredicate, + Function> getter) { + return getter.apply(foundEntity) + .stream() + .filter(filterPredicate) + .collect(Collectors.toList()); + } + + private void updateParams(final CohortCharacterizationEntity entity, final CohortCharacterizationEntity foundEntity) { + updateOrCreateParams(entity, foundEntity); + deleteParams(entity, foundEntity); + } + + private void deleteParams(final CohortCharacterizationEntity entity, final CohortCharacterizationEntity foundEntity) { + final Map nameToParamFromInputMap = buildParamNameToParamMap(entity); + List paramsForDelete = getLinksToDelete(foundEntity, + parameter -> !nameToParamFromInputMap.containsKey(parameter.getName()), + CohortCharacterizationEntity::getParameters); + foundEntity.getParameters().removeAll(paramsForDelete); + paramRepository.deleteAll(paramsForDelete); + } + + private void updateOrCreateParams(final CohortCharacterizationEntity entity, final CohortCharacterizationEntity foundEntity) { + final Map nameToParamFromDbMap = buildParamNameToParamMap(foundEntity); + final List paramsForCreateOrUpdate = new ArrayList<>(); + for (final CcParamEntity parameter : entity.getParameters()) { + final CcParamEntity entityFromMap = nameToParamFromDbMap.get(parameter.getName()); + parameter.setCohortCharacterization(foundEntity); + if (entityFromMap == null) { + paramsForCreateOrUpdate.add(parameter); + } else if (!StringUtils.equals(entityFromMap.getValue(), parameter.getValue())) { + entityFromMap.setValue(parameter.getValue()); + paramsForCreateOrUpdate.add(entityFromMap); + } + } + paramRepository.saveAll(paramsForCreateOrUpdate); + } + + @Override + public CohortCharacterizationEntity importCc(final CohortCharacterizationEntity entity) { + cleanIds(entity); + + final CohortCharacterizationEntity newCohortCharacterization = new CohortCharacterizationEntity(); + newCohortCharacterization.setName(entity.getName()); + final CohortCharacterizationEntity persistedCohortCharacterization = this.createCc(newCohortCharacterization); + + updateParams(entity, persistedCohortCharacterization); + updateStratas(entity, persistedCohortCharacterization); + updateConceptSet(entity, persistedCohortCharacterization); + + importCohorts(entity, persistedCohortCharacterization); + + final CohortCharacterizationEntity savedEntity = saveCc(persistedCohortCharacterization); + List savedAnalysesIds = importAnalyses(entity, savedEntity); + + eventPublisher.publishEvent(new CcImportEvent(savedAnalysesIds)); + + return savedEntity; + } + + @Override + public String getNameForCopy(String dtoName) { + return NameUtils.getNameForCopy(dtoName, this::getNamesLike, repository.findByName(dtoName)); + } + + @Override + public String getNameWithSuffix(String dtoName) { + return NameUtils.getNameWithSuffix(dtoName, this::getNamesLike); + } + + @Override + public String serializeCc(final Long id) { + + return Utils.serialize(exportCc(id), true); + } + + @Override + public String serializeCc(final CohortCharacterizationEntity cohortCharacterizationEntity) { + return new SerializedCcToCcConverter().convertToDatabaseColumn(cohortCharacterizationEntity); + } + + private CohortCharacterizationImpl exportCc(final Long id) { + final CohortCharacterizationEntity cohortCharacterizationEntity = repository.findById(id) + .orElseThrow(() -> new IllegalArgumentException("Cohort characterization cannot be found by id: " + id)); + CohortCharacterizationImpl cc = genericConversionService.convert(cohortCharacterizationEntity, CohortCharacterizationImpl.class); + ExportUtil.clearCreateAndUpdateInfo(cc); + cc.getFeatureAnalyses().forEach(ExportUtil::clearCreateAndUpdateInfo); + cc.getCohorts().forEach(ExportUtil::clearCreateAndUpdateInfo); + cc.setOrganizationName(env.getRequiredProperty("organization.name")); + return cc; + } + + @Override + public CohortCharacterizationEntity findById(final Long id) { + return repository.findById(id) + .orElseThrow(() -> new IllegalArgumentException("Cohort characterization with id: " + id + " cannot be found")); + } + + @Override + public CohortCharacterizationEntity findByIdWithLinkedEntities(final Long id) { + return repository.findById(id, defaultEntityGraph).orElseThrow(() -> new IllegalArgumentException("Characterizaiton with id: " + id + " cannot be found")); + } + + @Override + public CohortCharacterization findDesignByGenerationId(final Long id) { + final AnalysisGenerationInfoEntity entity = analysisGenerationInfoEntityRepository.findById(id) + .orElseThrow(() -> new IllegalArgumentException("Analysis with id: " + id + " cannot be found")); + return genericConversionService.convert(Utils.deserialize(entity.getDesign(), + new TypeReference() {}), CohortCharacterizationEntity.class); + } + + @Override + public Page getPageWithLinkedEntities(final Pageable pageable) { + return repository.findAll(pageable, defaultEntityGraph); + + } + + @Override + public Page getPage(final Pageable pageable) { + List ccList = repository.findAll() + .stream() + // TODO apply global read only filtering + //.filter(!defaultGlobalReadPermissions ? entity -> authorizationService.hasReadAccess(entity) : entity -> true) + .collect(Collectors.toList()); + return getPageFromResults(pageable, ccList); + } + + private Page getPageFromResults(Pageable pageable, List results) { + // Calculate the start and end indices for the current page + int startIndex = pageable.getPageNumber() * pageable.getPageSize(); + int endIndex = Math.min(startIndex + pageable.getPageSize(), results.size()); + + return new PageImpl<>(results.subList(startIndex, endIndex), pageable, results.size()); + } + + @Override + @Transactional(propagation = Propagation.NOT_SUPPORTED) + public JobExecutionResource generateCc(final Long id, final String sourceKey) { + + CcService ccService = this; + Source source = getSourceRepository().findBySourceKey(sourceKey); + + JobParametersBuilder builder = new JobParametersBuilder(); + + builder.addString(JOB_NAME, String.format("Generating cohort characterization %d : %s (%s)", id, source.getSourceName(), source.getSourceKey())); + builder.addString(COHORT_CHARACTERIZATION_ID, String.valueOf(id)); + builder.addString(SOURCE_ID, String.valueOf(source.getSourceId())); + builder.addString(JOB_AUTHOR, authorizationService.getCurrentUser().login()); + + CancelableJdbcTemplate jdbcTemplate = getSourceJdbcTemplate(source); + + SimpleJobBuilder generateCohortJob = generationUtils.buildJobForCohortBasedAnalysisTasklet( + GENERATE_COHORT_CHARACTERIZATION, + source, + builder, + jdbcTemplate, + chunkContext -> { + Long ccId = Long.valueOf(chunkContext.getStepContext().getJobParameters().get(COHORT_CHARACTERIZATION_ID).toString()); + return ccService.findById(ccId).getCohortDefinitions(); + }, + new GenerateCohortCharacterizationTasklet( + jdbcTemplate, + getBatchTransactionTemplate(), + ccService, + analysisGenerationInfoEntityRepository, + sourceService, + userRepository + ) + ); + + final JobParameters jobParameters = builder.toJobParameters(); + + return jobService.runJob(generateCohortJob.build(), jobParameters); + } + + @Override + public List findGenerationsByCcId(final Long id) { + return ccGenerationRepository.findByCohortCharacterizationIdOrderByIdDesc(id, EntityUtils.fromAttributePaths("source")); + } + + @Override + public CcGenerationEntity findGenerationById(final Long id) { + return ccGenerationRepository.findById(id, EntityUtils.fromAttributePaths("source")).orElseThrow(); + } + + @Override + public List findGenerationsByCcIdAndSource(final Long id, final String sourceKey) { + return ccGenerationRepository.findByCohortCharacterizationIdAndSourceSourceKeyOrderByIdDesc(id, sourceKey, EntityUtils.fromAttributePaths("source")); + } + + public List findAllIncompleteGenerations() { + return ccGenerationRepository.findByStatusIn(INCOMPLETE_STATUSES); + } + + protected List findResults(final Long generationId, ExecutionResultRequest params) { + return executeFindResults(generationId, params, QUERY_RESULTS, getGenerationResults()); + } + + private List findTemporalResults(final Long generationId, ExecutionResultRequest params) { + return executeFindResults(generationId, params, QUERY_TEMPORAL_RESULTS, getGenerationTemporalResult()); + } + + private List findTemporalAnnualResults(final Long generationId, ExecutionResultRequest params) { + return executeFindResults(generationId, params, QUERY_TEMPORAL_ANNUAL_RESULTS, getGenerationTemporalAnnualResult()); + } + + private List executeFindResults(final Long generationId, ExecutionResultRequest params, String query, RowMapper rowMapper) { + final CcGenerationEntity generationEntity = ccGenerationRepository.findById(generationId) + .orElseThrow(() -> new IllegalArgumentException(String.format(GENERATION_NOT_FOUND_ERROR, generationId))); + final Source source = generationEntity.getSource(); + String analysis = params.getAnalysisIds().stream().map(String::valueOf).collect(Collectors.joining(",")); + String cohorts = params.getCohortIds().stream().map(String::valueOf).collect(Collectors.joining(",")); + String generationResults = sourceAwareSqlRender.renderSql(source.getSourceId(), query, PARAMETERS_RESULTS_FILTERED, + new String[]{String.valueOf(generationId), String.valueOf(params.getThresholdValuePct()), + analysis, cohorts, SourceUtils.getVocabularyQualifier(source)}); + final String tempSchema = SourceUtils.getTempQualifier(source); + String translatedSql = SqlTranslate.translateSql(generationResults, source.getSourceDialect(), SessionUtils.sessionId(), tempSchema); + return this.getSourceJdbcTemplate(source).query(translatedSql, rowMapper); + } + + @Override + public Long getCCResultsTotalCount(final Long generationId) { + final CcGenerationEntity generationEntity = ccGenerationRepository.findById(generationId) + .orElseThrow(() -> new IllegalArgumentException(String.format(GENERATION_NOT_FOUND_ERROR, generationId))); + final Source source = generationEntity.getSource(); + String countReq = sourceAwareSqlRender.renderSql(source.getSourceId(), QUERY_COUNT, PARAMETERS_COUNT, + new String[]{String.valueOf(generationId), SourceUtils.getVocabularyQualifier(source)}); + final String tempSchema = SourceUtils.getTempQualifier(source); + String translatedSql = SqlTranslate.translateSql(countReq, source.getSourceDialect(), SessionUtils.sessionId(), tempSchema); + return this.getSourceJdbcTemplate(source).queryForObject(translatedSql, Long.class); + } + + @Override + public GenerationResults exportExecutionResult(final Long generationId, ExportExecutionResultRequest params) { + GenerationResults res = findResult(generationId, params); + + if (params.isFilterUsed()) { + res.setReports(res.getReports().stream() + .filter(r -> params.isComparative() == null || params.isComparative() == r.isComparative) + .filter(r -> params.isSummary() == null || params.isSummary() == r.isSummary) + .collect(Collectors.toList())); + } + + return res; + } + + @Override + public GenerationResults findData(final Long generationId, ExecutionResultRequest params) { + if (params.getShowEmptyResults()) { + params.setThresholdValuePct(Constants.DEFAULT_THRESHOLD); //Don't cut threshold results when all results requested + } + GenerationResults res = findResult(generationId, params); + boolean hasComparativeReports = res.getReports().stream() + .anyMatch(report -> report.isComparative); + if (hasComparativeReports) { + // if there're comparative reports - return only them as simple reports won't be shown on ui + res.setReports(res.getReports().stream() + .filter(report -> report.isComparative) + .collect(Collectors.toList())); + } + res.setPrevalenceThreshold(params.getThresholdValuePct()); + return res; + } + + @Override + public List findResultAsList(final Long generationId, float thresholdLevel) { + ExecutionResultRequest params = new ExecutionResultRequest(); + CcGenerationEntity generationEntity = ccGenerationRepository.findById(generationId) + .orElseThrow(() -> new IllegalArgumentException(String.format(GENERATION_NOT_FOUND_ERROR, generationId))); + CohortCharacterizationEntity characterization = generationEntity.getCohortCharacterization(); + params.setThresholdValuePct(thresholdLevel); + params.setCohortIds(characterization.getCohortDefinitions().stream() + .map(CohortDefinitionEntity::getId).collect(Collectors.toList())); + params.setAnalysisIds(characterization.getFeatureAnalyses().stream() + .map(this::mapFeatureAnalysisId).collect(Collectors.toList())); + params.setDomainIds(generationEntity.getCohortCharacterization().getFeatureAnalyses().stream() + .map(fa -> fa.getDomain().toString()).distinct().collect(Collectors.toList())); + return findResults(generationId, params); + } + + @Override + public List findTemporalResultAsList(final Long generationId) { + ExecutionResultRequest params = getExecutionResultRequest(generationId); + return findTemporalResults(generationId, params); + } + + public List findTemporalAnnualResultAsList(final Long generationId) { + ExecutionResultRequest params = getExecutionResultRequest(generationId); + return findTemporalAnnualResults(generationId, params); + } + + private ExecutionResultRequest getExecutionResultRequest(Long generationId) { + ExecutionResultRequest params = new ExecutionResultRequest(); + CcGenerationEntity generationEntity = ccGenerationRepository.findById(generationId) + .orElseThrow(() -> new IllegalArgumentException(String.format(GENERATION_NOT_FOUND_ERROR, generationId))); + CohortCharacterizationEntity characterization = generationEntity.getCohortCharacterization(); + params.setCohortIds(characterization.getCohortDefinitions().stream() + .map(CohortDefinitionEntity::getId).collect(Collectors.toList())); + params.setAnalysisIds(characterization.getFeatureAnalyses().stream() + .map(this::mapFeatureAnalysisId).collect(Collectors.toList())); + params.setDomainIds(generationEntity.getCohortCharacterization().getFeatureAnalyses().stream() + .map(fa -> fa.getDomain().toString()).distinct().collect(Collectors.toList())); + return params; + } + + @Override + public GenerationResults findResult(final Long generationId, ExecutionResultRequest params) { + CcGenerationEntity generationEntity = ccGenerationRepository.findById(generationId) + .orElseThrow(() -> new IllegalArgumentException(String.format(GENERATION_NOT_FOUND_ERROR, generationId))); + + CohortCharacterizationEntity characterization = generationEntity.getCohortCharacterization(); + Set cohortDefs = characterization.getCohorts(); + Set featureAnalyses = characterization.getFeatureAnalyses(); + + // if filter is not used then it must be initialized first + if (!params.isFilterUsed()) { + params.setCohortIds(characterization.getCohortDefinitions().stream() + .map(CohortDefinitionEntity::getId).collect(Collectors.toList())); + params.setAnalysisIds(featureAnalyses.stream().map(this::mapFeatureAnalysisId).collect(Collectors.toList())); + params.setDomainIds(generationEntity.getCohortCharacterization().getFeatureAnalyses().stream() + .map(fa -> fa.getDomain().toString()).distinct().collect(Collectors.toList())); + } else { + List analysisIds = params.getAnalysisIds().stream().map(analysisId -> { + FeAnalysisEntity fe = featureAnalyses.stream() + .filter(fa -> Objects.equals(fa.getId(), analysisId)) + .findFirst() + .orElseThrow(() -> new IllegalArgumentException(String.format("Feature with id=%s not found in analysis", analysisId))); + return mapFeatureAnalysisId(fe); + }).collect(Collectors.toList()); + params.setAnalysisIds(analysisIds); + } + // remove domains which cannot be used as corresponding analyses are not selected + params.getDomainIds().removeIf(s -> + featureAnalyses.stream() + .noneMatch(fe -> fe.getDomain().toString().equals(s) && params.getAnalysisIds().contains(mapFeatureAnalysisId(fe)))); + // remove analyses which cannot be used as corresponding domains are not selected + params.getAnalysisIds().removeIf(s -> + featureAnalyses.stream() + .noneMatch(fe -> mapFeatureAnalysisId(fe).equals(s) && params.getDomainIds().contains(fe.getDomain().toString()))); + + List ccResults = findResults(generationId, params); + List ccTemporalResults = findTemporalResults(generationId, params); + + // create initial structure and fill with results + Map analysisMap = new HashMap<>(); + ccResults + .stream() + .peek(cc -> { + if (StandardFeatureAnalysisType.PRESET.toString().equals(cc.getFaType())) { + featureAnalyses.stream() + .filter(fa -> Objects.equals(fa.getDesign(), cc.getAnalysisName())) + .findFirst() + .ifPresent(v -> cc.setAnalysisId(v.getId())); + } + }) + .forEach(ccResult -> { + if (ccResult instanceof CcPrevalenceStat) { + analysisMap.putIfAbsent(ccResult.getAnalysisId(), new AnalysisItem()); + AnalysisItem analysisItem = analysisMap.get(ccResult.getAnalysisId()); + analysisItem.setType(ccResult.getResultType()); + analysisItem.setName(ccResult.getAnalysisName()); + analysisItem.setFaType(ccResult.getFaType()); + List results = analysisItem.getOrCreateCovariateItem( + ((CcPrevalenceStat) ccResult).getCovariateId(), ccResult.getStrataId()); + results.add(ccResult); + } + }); + + cohortDefs = cohortDefs + .stream() + .filter(def -> params.getCohortIds().contains(def.getId())) + .collect(Collectors.toSet()); + + //Temporal + Map prespecAnalysisIdMap = FeatureExtraction.getNameToPrespecAnalysis().values() + .stream().collect(Collectors.toMap(a -> a.analysisId, a -> a)); + List temporalResult = findTemporalResultAsList(generationId); + List mappedResult = temporalResult.stream().map(tr -> mapTemporalResult(featureAnalyses, prespecAnalysisIdMap, tr, CcTemporalResult::new, + (source, target) -> { + target.setStartDay(source.getStartDay()); + target.setEndDay(source.getEndDay()); + target.setTimeId(source.getTimeId()); + })) + .collect(Collectors.toList()); + Map>>> temporalByCohort = groupByResult(mappedResult); + + List temporalAnnualResult = findTemporalAnnualResultAsList(generationId); + List mappedAnnualResult = temporalAnnualResult.stream().map(tr -> mapTemporalResult(featureAnalyses, prespecAnalysisIdMap, tr, CcTemporalAnnualResult::new, + (source, target) -> { + target.setYear(source.getYear()); + })) + .collect(Collectors.toList()); + Map>>> annualByCohort = groupByResult(mappedAnnualResult); + + List reports = prepareReportData(analysisMap, cohortDefs, featureAnalyses, params); + reports.forEach(r -> { + r.items.stream() + .filter(i -> Objects.equals(i.getFaType(), StandardFeatureAnalysisType.PRESET.toString())) + .filter(o -> !r.isComparative) + .map(this::toPrevalenceItem) + .forEach(item -> { + setTemporal(temporalByCohort, item, cov -> { + List temporalItems = cov.stream().map(temp -> { + TemporalItem ti = new TemporalItem(); + ti.setAvg(temp.getAvg()); + ti.setCount(temp.getCount()); + ti.setStartDay(temp.getStartDay()); + ti.setEndDay(temp.getEndDay()); + return ti; + }).collect(Collectors.toList()); + item.setTemporal(temporalItems); + }); + setTemporal(annualByCohort, item, cov -> { + List temporalAnnualItems = cov.stream().map(temp -> { + TemporalAnnualItem tai = new TemporalAnnualItem(); + tai.setYear(temp.getYear()); + tai.setAvg(temp.getAvg()); + tai.setCount(temp.getCount()); + return tai; + }).collect(Collectors.toList()); + item.setTemporalAnnual(temporalAnnualItems); + }); + }); + }); + + GenerationResults res = new GenerationResults(); + res.setReports(reports); + res.setCount(ccResults.size()); + return res; + } + + private PrevalenceItem toPrevalenceItem(ExportItem exportItem){ + return PrevalenceItem.class.cast(exportItem); + } + + private static void setTemporal(Map>>> temporalByCohort, PrevalenceItem item, Consumer> setter) { + Optional.ofNullable(temporalByCohort.get(item.getCohortId())) + .flatMap(cr -> Optional.ofNullable(cr.get(item.getAnalysisId())) + .flatMap(ar -> Optional.ofNullable(ar.get(item.getCovariateId())))) + .ifPresent(setter); + } + + private static Map>>> groupByResult(List mappedAnnualResult) { + return mappedAnnualResult.stream() + .collect(groupingBy(T::getCohortId, groupingBy(T::getAnalysisId, groupingBy(T::getCovariateId)))); + } + + private static T mapTemporalResult( + Set featureAnalyses, + Map prespecAnalysisIdMap, + T source, + Supplier constructor, + BiConsumer initializer + ) { + T result = constructor.get(); + String analysisName = prespecAnalysisIdMap.get(source.getAnalysisId()).analysisName; + Integer analysisId = featureAnalyses.stream().filter(fa -> Objects.equals(fa.getRawDesign(), analysisName)) + .findFirst() + .map(FeAnalysisEntity::getId) + .orElseThrow(() -> new IllegalArgumentException(String.format("Preset analysis [%s} is not mapped to feature", analysisName))); + result.setAnalysisId(analysisId); + result.setCovariateId(source.getCovariateId()); + result.setAvg(source.getAvg()); + result.setCount(source.getCount()); + result.setAnalysisName(source.getAnalysisName()); + result.setCovariateName(source.getCovariateName()); + result.setStrataId(source.getStrataId()); + result.setCohortId(source.getCohortId()); + result.setStrataName(source.getStrataName()); + result.setConceptId(source.getConceptId()); + initializer.accept(source, result); + return result; + } + + private Integer mapFeatureAnalysisId(FeAnalysisEntity feAnalysis) { + + if (feAnalysis.isPreset()) { + return prespecAnalysisMap.values().stream().filter(p -> Objects.equals(p.analysisName, feAnalysis.getDesign())) + .findFirst() + .orElseThrow(() -> new IllegalArgumentException(String.format("Preset analysis with id=%s does not exist", feAnalysis.getId()))) + .analysisId; + } else { + return feAnalysis.getId(); + } + } + + private String mapFeatureName(FeAnalysisEntity entity) { + + if (StandardFeatureAnalysisType.PRESET == entity.getType()) { + return entity.getDesign().toString(); + } + return entity.getName(); + } + + private List prepareReportData(Map analysisMap, Set cohortDefs, + Set featureAnalyses, ExecutionResultRequest params) { + // Create map to get cohort name by its id + final Map definitionMap = cohortDefs.stream() + .collect(Collectors.toMap(CohortDefinitionEntity::getId, Function.identity())); + // Create map to get feature analyses by its name + final Map feAnalysisMap = featureAnalyses.stream() + .collect(Collectors.toMap(this::mapFeatureName, entity -> entity.getDomain().toString())); + + List reports = new ArrayList<>(); + try { + // list to accumulate results from simple reports + List simpleResultSummary = new ArrayList<>(); + // list to accumulate results from comparative reports + List comparativeResultSummary = new ArrayList<>(); + // do not create summary reports when only one analyses is present + boolean ignoreSummary = analysisMap.keySet().size() == 1; + for (Integer analysisId : analysisMap.keySet()) { + analysisMap.putIfAbsent(analysisId, new AnalysisItem()); + AnalysisItem analysisItem = analysisMap.get(analysisId); + AnalysisResultItem resultItem = analysisItem.getSimpleItems(definitionMap, feAnalysisMap); + Report simpleReport = new Report(analysisItem.getName(), analysisId, resultItem); + simpleReport.faType = analysisItem.getFaType(); + simpleReport.domainId = feAnalysisMap.get(analysisItem.getName()); + + if (PREVALENCE.equals(analysisItem.getType())) { + simpleReport.header = executionPrevalenceHeaderLines; + simpleReport.resultType = PREVALENCE; + // Summary comparative reports are only available for prevalence type + simpleResultSummary.add(resultItem); + } else if (DISTRIBUTION.equals(analysisItem.getType())) { + simpleReport.header = executionDistributionHeaderLines; + simpleReport.resultType = DISTRIBUTION; + } + reports.add(simpleReport); + + // comparative mode + if (definitionMap.size() == 2) { + Iterator iter = definitionMap.values().iterator(); + CohortDefinitionEntity firstCohortDef = iter.next(); + CohortDefinitionEntity secondCohortDef = iter.next(); + AnalysisResultItem comparativeResultItem = analysisItem.getComparativeItems(firstCohortDef, + secondCohortDef, feAnalysisMap); + Report comparativeReport = new Report(analysisItem.getName(), analysisId, comparativeResultItem); + comparativeReport.header = executionComparativeHeaderLines; + comparativeReport.isComparative = true; + comparativeReport.faType = analysisItem.getFaType(); + comparativeReport.domainId = feAnalysisMap.get(analysisItem.getName()); + if (PREVALENCE.equals(analysisItem.getType())) { + comparativeReport.resultType = PREVALENCE; + // Summary comparative reports are only available for prevalence type + comparativeResultSummary.add(comparativeResultItem); + } else if (DISTRIBUTION.equals(analysisItem.getType())) { + comparativeReport.resultType = DISTRIBUTION; + } + reports.add(comparativeReport); + } + } + if (!ignoreSummary) { + // summary comparative reports are only available for prevalence type + if (!simpleResultSummary.isEmpty()) { + Report simpleSummaryData = new Report("All prevalence covariates", simpleResultSummary); + simpleSummaryData.header = executionPrevalenceHeaderLines; + simpleSummaryData.isSummary = true; + simpleSummaryData.resultType = PREVALENCE; + reports.add(simpleSummaryData); + } + // comparative mode + if (!comparativeResultSummary.isEmpty()) { + Report comparativeSummaryData = new Report("All prevalence covariates", comparativeResultSummary); + comparativeSummaryData.header = executionComparativeHeaderLines; + comparativeSummaryData.isSummary = true; + comparativeSummaryData.isComparative = true; + comparativeSummaryData.resultType = PREVALENCE; + reports.add(comparativeSummaryData); + } + } + + return reports; + } catch (Exception ex) { + throw new RuntimeException(ex); + } + } + + @Override + public List getPrevalenceStatsByGenerationId(Long id, Long analysisId, Long cohortId, Long covariateId) { + final CcGenerationEntity generationEntity = ccGenerationRepository.findById(id) + .orElseThrow(() -> new IllegalArgumentException(String.format(GENERATION_NOT_FOUND_ERROR, id))); + final Source source = generationEntity.getSource(); + final String cdmSchema = SourceUtils.getCdmQualifier(source); + final String resultSchema = SourceUtils.getResultsQualifier(source); + final String tempSchema = SourceUtils.getTempQualifier(source); + String prevalenceStats = sourceAwareSqlRender.renderSql(source.getSourceId(), QUERY_PREVALENCE_STATS, PREVALENCE_STATS_PARAMS, + new String[]{ cdmSchema, resultSchema, String.valueOf(id), String.valueOf(analysisId), String.valueOf(cohortId), String.valueOf(covariateId) }); + String translatedSql = SqlTranslate.translateSql(prevalenceStats, source.getSourceDialect(), SessionUtils.sessionId(), tempSchema); + String[] stmts = SqlSplit.splitSql(translatedSql); + if (stmts.length == 1) { // Some DBMS like HIVE fails when a single statement ends with dot-comma + translatedSql = StringUtils.removeEnd(translatedSql.trim(), ";"); + } + return getSourceJdbcTemplate(source).query(translatedSql, (rs, rowNum) -> { + CcPrevalenceStat stat = new CcPrevalenceStat(); + stat.setAvg(rs.getDouble("stat_value")); + stat.setConceptId(rs.getLong("concept_id")); + stat.setConceptName(rs.getString("concept_name")); + stat.setCount(rs.getLong("count_value")); + stat.setCovariateId(rs.getLong("covariate_id")); + stat.setCovariateName(rs.getString("covariate_name")); + stat.setAnalysisId(rs.getInt("analysis_id")); + stat.setAnalysisName(rs.getString("analysis_name")); + stat.setSourceKey(source.getSourceKey()); + stat.setDistance(rs.getInt("min_levels_of_separation")); + stat.setStrataId(rs.getLong("strata_id")); + stat.setStrataName(rs.getString("strata_name")); + stat.setFaType(rs.getString("fa_type")); + return stat; + }); + } + + @Override + public void deleteCcGeneration(Long generationId) { + final CcGenerationEntity generationEntity = ccGenerationRepository.findById(generationId) + .orElseThrow(() -> new IllegalArgumentException(String.format(GENERATION_NOT_FOUND_ERROR, generationId))); + final Source source = generationEntity.getSource(); + final String sql = sourceAwareSqlRender.renderSql(source.getSourceId(), DELETE_RESULTS, PARAMETERS_RESULTS, new String[]{ String.valueOf(generationId) }); + final String tempSchema = SourceUtils.getTempQualifier(source); + final String translatedSql = SqlTranslate.translateSql(sql, source.getSourceDialect(), SessionUtils.sessionId(), tempSchema); + getSourceJdbcTemplate(source).execute(translatedSql); + + final String deleteJobSql = sourceAwareSqlRender.renderSql(source.getSourceId(), DELETE_EXECUTION, + new String[]{ "ohdsiSchema", "execution_id" }, + new String[]{ getOhdsiSchema(), String.valueOf(generationId) } + ); + final String translatedJobSql = SqlTranslate.translateSql(deleteJobSql, getDialect()); + getJdbcTemplate().batchUpdate(translatedJobSql.split(";")); + } + + @Override + public void cancelGeneration(Long id, String sourceKey) { + + Source source = getSourceRepository().findBySourceKey(sourceKey); + ExceptionUtils.throwNotFoundExceptionIfNull(source, String.format("Source: %s not found", sourceKey)); + + jobService.cancelJobExecution(j -> { + JobParameters jobParameters = j.getJobParameters(); + String jobName = j.getJobInstance().getJobName(); + return Objects.equals(jobParameters.getString(SOURCE_ID), Integer.toString(source.getSourceId())) + && Objects.equals(jobParameters.getString(COHORT_CHARACTERIZATION_ID), Long.toString(id)) + && Objects.equals(getJobName(), jobName); + }); + } + + public List getVersions(final long id) { + List versions = versionService.getVersions(VersionType.CHARACTERIZATION, id); + return versions.stream() + .map(v -> genericConversionService.convert(v, VersionDTO.class)) + .collect(Collectors.toList()); + } + + public CcVersionFullDTO getVersion(final long id, final int version) { + checkVersion(id, version, false); + CharacterizationVersion characterizationVersion = versionService.getById(VersionType.CHARACTERIZATION, id, version); + + return genericConversionService.convert(characterizationVersion, CcVersionFullDTO.class); + } + + public VersionDTO updateVersion(final long id, final int version, + VersionUpdateDTO updateDTO) { + checkVersion(id, version); + updateDTO.setAssetId(id); + updateDTO.setVersion(version); + CharacterizationVersion updated = versionService.update(VersionType.CHARACTERIZATION, updateDTO); + + return genericConversionService.convert(updated, VersionDTO.class); + } + + public void deleteVersion(final long id, final int version) { + checkVersion(id, version); + versionService.delete(VersionType.CHARACTERIZATION, id, version); + } + + public CohortCharacterizationDTO copyAssetFromVersion(final long id, final int version) { + checkVersion(id, version, false); + CharacterizationVersion characterizationVersion = versionService.getById(VersionType.CHARACTERIZATION, id, version); + + CcVersionFullDTO fullDTO = genericConversionService.convert(characterizationVersion, CcVersionFullDTO.class); + CohortCharacterizationEntity entity = + genericConversionService.convert(fullDTO.getEntityDTO(), CohortCharacterizationEntity.class); + entity.setId(null); + entity.setTags(null); + entity.setName(NameUtils.getNameForCopy(entity.getName(), this::getNamesLike, repository.findByName(entity.getName()))); + + CohortCharacterizationEntity saved = createCc(entity); + return genericConversionService.convert(saved, CohortCharacterizationDTO.class); + } + + private void checkVersion(long id, int version) { + checkVersion(id, version, true); + } + + private void checkVersion(long id, int version, boolean checkOwnerShip) { + Version characterizationVersion = versionService.getById(VersionType.CHARACTERIZATION, id, version); + ExceptionUtils.throwNotFoundExceptionIfNull(characterizationVersion, + String.format("There is no cohort characterization version with id = %d.", version)); + + CohortCharacterizationEntity entity = findById(id); + if (checkOwnerShip) { + // TODO: determine how checkOwnership should work + // checkOwnerOrAdminOrGranted(entity); + } + } + + public CharacterizationVersion saveVersion(long id) { + CohortCharacterizationEntity def = findById(id); + CharacterizationVersion version = genericConversionService.convert(def, CharacterizationVersion.class); + + UserEntity user = Objects.nonNull(def.getModifiedBy()) ? def.getModifiedBy() : def.getCreatedBy(); + Date versionDate = Objects.nonNull(def.getModifiedDate()) ? def.getModifiedDate() : def.getCreatedDate(); + version.setCreatedBy(user); + version.setCreatedDate(versionDate); + return versionService.create(VersionType.CHARACTERIZATION, version); + } + + private List getNamesLike(String copyName) { + + return repository.findAllByNameStartsWith(copyName).stream().map(CohortCharacterizationEntity::getName).collect(Collectors.toList()); + } + + @Override + public String getJobName() { + return GENERATE_COHORT_CHARACTERIZATION; + } + + @Override + public String getExecutionFoldingKey() { + return COHORT_CHARACTERIZATION_ID; + } + + @Override + public List exportConceptSets(CohortCharacterization cohortCharacterization) { + + SourceInfo prioritySource = new SourceInfo(vocabularyService.getPriorityVocabularySource()); + return cohortCharacterization.getStrataConceptSets().stream() + .map(cs -> vocabularyService.exportConceptSet(cs, prioritySource)) + .collect(Collectors.toList()); + } + + @Override + public List listByTags(TagNameListRequestDTO requestDTO) { + List names = requestDTO.getNames().stream() + .map(name -> name.toLowerCase(Locale.ROOT)) + .collect(Collectors.toList()); + List entities = repository.findByTags(names); + return listByTags(entities, names, CcShortDTO.class); + } + + private RowMapper getGenerationResults() { + return (rs, rowNum) -> { + final String type = rs.getString("type"); + if (StringUtils.equals(type, DISTRIBUTION.toString())) { + final CcDistributionStat distributionStat = new CcDistributionStat(); + gatherForPrevalence(distributionStat, rs); + gatherForDistribution(distributionStat, rs); + return distributionStat; + } else if (StringUtils.equals(type, PREVALENCE.toString())){ + final CcPrevalenceStat prevalenceStat = new CcPrevalenceStat(); + gatherForPrevalence(prevalenceStat, rs); + return prevalenceStat; + } + return null; + }; + } + + private RowMapper getGenerationTemporalResult() { + return (rs, rowNum) -> { + CcTemporalResult result = new CcTemporalResult(); + result.setAnalysisName(rs.getString("analysis_name")); + result.setAvg(rs.getDouble("avg_value")); + result.setAnalysisId(rs.getInt("analysis_id")); + result.setCohortId(rs.getInt("cohort_definition_id")); + result.setConceptId(rs.getInt("concept_id")); + result.setCount(rs.getLong("count_value")); + result.setCovariateId(rs.getLong("covariate_id")); + result.setCovariateName(rs.getString("covariate_name")); + result.setStrataId(rs.getInt("strata_id")); + result.setEndDay(rs.getInt("end_day")); + result.setStartDay(rs.getInt("start_day")); + result.setStrataName(rs.getString("strata_name")); + result.setTimeId(rs.getInt("time_id")); + return result; + }; + } + + private RowMapper getGenerationTemporalAnnualResult() { + return (rs, rowNum) -> { + CcTemporalAnnualResult result = new CcTemporalAnnualResult(); + result.setAnalysisName(rs.getString("analysis_name")); + result.setAvg(rs.getDouble("avg_value")); + result.setAnalysisId(rs.getInt("analysis_id")); + result.setCohortId(rs.getInt("cohort_definition_id")); + result.setConceptId(rs.getInt("concept_id")); + result.setCount(rs.getLong("count_value")); + result.setCovariateId(rs.getLong("covariate_id")); + result.setCovariateName(rs.getString("covariate_name")); + result.setStrataId(rs.getInt("strata_id")); + result.setStrataName(rs.getString("strata_name")); + result.setYear(rs.getInt("event_year")); + return result; + }; + } + + private void gatherForPrevalence(final CcPrevalenceStat stat, final ResultSet rs) throws SQLException { + Long generationId = rs.getLong("cc_generation_id"); + CcGenerationEntity ccGeneration = ccGenerationRepository.findById(generationId) + .orElseThrow(() -> new IllegalArgumentException("Generation with id: " + generationId + " cannot be found")); + + stat.setFaType(rs.getString("fa_type")); + stat.setSourceKey(ccGeneration.getSource().getSourceKey()); + stat.setCohortId(rs.getInt("cohort_definition_id")); + stat.setAnalysisId(rs.getInt("analysis_id")); + stat.setAnalysisName(rs.getString("analysis_name")); + stat.setResultType(PREVALENCE); + stat.setCovariateId(rs.getLong("covariate_id")); + stat.setCovariateName(rs.getString("covariate_name")); + stat.setConceptName(rs.getString("concept_name")); + stat.setTimeWindow(getTimeWindow(rs.getString("analysis_name"))); + stat.setConceptId(rs.getLong("concept_id")); + stat.setAvg(rs.getDouble("avg_value")); + stat.setCount(rs.getLong("count_value")); + stat.setStrataId(rs.getLong("strata_id")); + stat.setStrataName(rs.getString("strata_name")); + } + + private void gatherForDistribution(final CcDistributionStat stat, final ResultSet rs) throws SQLException { + stat.setResultType(DISTRIBUTION); + stat.setAvg(rs.getDouble("avg_value")); + stat.setStdDev(rs.getDouble("stdev_value")); + stat.setMin(rs.getDouble("min_value")); + stat.setP10(rs.getDouble("p10_value")); + stat.setP25(rs.getDouble("p25_value")); + stat.setMedian(rs.getDouble("median_value")); + stat.setP75(rs.getDouble("p75_value")); + stat.setP90(rs.getDouble("p90_value")); + stat.setMax(rs.getDouble("max_value")); + stat.setAggregateId(rs.getInt("aggregate_id")); + stat.setAggregateName(rs.getString("aggregate_name")); + stat.setMissingMeansZero(rs.getInt("missing_means_zero")==1); + } + + public String getTimeWindow(String analysisName) { + if (analysisName.endsWith("LongTerm")) return "Long Term"; + if (analysisName.endsWith("MediumTerm")) return "Medium Term"; + if (analysisName.endsWith("ShortTerm")) return "Short Term"; + if (analysisName.endsWith("AnyTimePrior")) return "Any Time Prior"; + if (analysisName.endsWith("Overlapping")) return "Overlapping"; + + return "None"; + } + + private List importAnalyses(final CohortCharacterizationEntity entity, final CohortCharacterizationEntity persistedEntity) { + List savedAnalysesIds = new ArrayList<>(); + final Map presetAnalysesMap = buildPresetAnalysisMap(entity); + + final Set analysesSet = new HashSet<>(); + + for (final FeAnalysisEntity newAnalysis : entity.getFeatureAnalyses()) { + switch (newAnalysis.getType()) { + case CRITERIA_SET: + FeAnalysisWithCriteriaEntity criteriaAnalysis = (FeAnalysisWithCriteriaEntity) newAnalysis; + List design = criteriaAnalysis.getDesign(); + Optional entityCriteriaSet = analysisService.findByCriteriaListAndCsAndDomainAndStat(design, criteriaAnalysis); + this.>addAnalysis(savedAnalysesIds, analysesSet, criteriaAnalysis, entityCriteriaSet, a -> analysisService.createCriteriaAnalysis(a)); + break; + case PRESET: + analysesSet.add(presetAnalysesMap.get(newAnalysis.getDesign())); + break; + case CUSTOM_FE: + FeAnalysisWithStringEntity withStringEntity = (FeAnalysisWithStringEntity) newAnalysis; + Optional curAnalysis = analysisService.findByDesignAndName(withStringEntity, withStringEntity.getName()); + this.addAnalysis(savedAnalysesIds, analysesSet, newAnalysis, curAnalysis, a -> analysisService.createAnalysis(a)); + break; + default: + throw new IllegalArgumentException("Analysis with type: " + newAnalysis.getType() + " cannot be imported"); + } + } + + persistedEntity.getCcFeatureAnalyses().clear(); + Set featureAnalyses = analysesSet.stream().map(a -> { + CcFeAnalysisEntity feAnalysisEntity = new CcFeAnalysisEntity(); + feAnalysisEntity.setFeatureAnalysis(a); + feAnalysisEntity.setCohortCharacterization(persistedEntity); + return feAnalysisEntity; + }).collect(Collectors.toSet()); + ccFeAnalysisRepository.saveAll(featureAnalyses); + + persistedEntity.getCcFeatureAnalyses().addAll(featureAnalyses); + return savedAnalysesIds; + } + + private > void addAnalysis(List savedAnalysesIds, Set entityAnalyses, T newAnalysis, + Optional curAnalysis, Function func) { + if (curAnalysis.isPresent()) { + entityAnalyses.add(curAnalysis.get()); + } else { + newAnalysis.setName(NameUtils.getNameWithSuffix(newAnalysis.getName(), this::getFeNamesLike)); + FeAnalysisEntity created = func.apply(newAnalysis); + entityAnalyses.add(created); + savedAnalysesIds.add(created.getId()); + } + } + + private Map buildPresetAnalysisMap(final CohortCharacterizationEntity entity) { + return analysisService + .findPresetAnalysesBySystemNames(gatherPresetAnalyses(entity)) + .stream() + .collect(Collectors.toMap(FeAnalysisEntity::getDesign, Function.identity())); + } + + private List gatherPresetAnalyses(final CohortCharacterizationEntity entity) { + return entity.getFeatureAnalyses() + .stream() + .filter(a -> StandardFeatureAnalysisType.PRESET.equals(a.getType())) + .map(FeAnalysisEntity::getDesign) + .map(v -> (String)v) + .collect(Collectors.toList()); + } + + + private void importCohorts(final CohortCharacterizationEntity entity, final CohortCharacterizationEntity persistedEntity) { + final Set cohortList = entity.getCohortDefinitions().stream() + .map(designImportService::persistCohortOrGetExisting) + .collect(Collectors.toSet()); + persistedEntity.setCohortDefinitions(cohortList); + } + + private void cleanIds(final CohortCharacterizationEntity entity) { + entity.setId(null); + entity.getParameters().forEach(v -> v.setId(null)); + entity.getCohortDefinitions().forEach(v -> v.setId(null)); + entity.getFeatureAnalyses().forEach(v -> v.setId(null)); + } + + private Map buildParamNameToParamMap(final CohortCharacterizationEntity foundEntity) { + return foundEntity.getParameters() + .stream() + .collect(Collectors.toMap(CcParamEntity::getName, Function.identity())); + } + + @PostConstruct + public void init() { + invalidateGenerations(); + } + + private void invalidateGenerations() { + getTransactionTemplateRequiresNew().execute(transactionStatus -> { + List generations = findAllIncompleteGenerations(); + generations.forEach(gen -> { + JobExecution job = jobService.getJobExecution(gen.getId()); + jobInvalidator.invalidationJobExecution(job); + }); + return null; + }); + } + + private List getFeNamesLike(String name) { + return analysisService.getNamesLike(name); + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/CreateCohortTableTasklet.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/CreateCohortTableTasklet.java new file mode 100644 index 0000000000..b63695c56a --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/CreateCohortTableTasklet.java @@ -0,0 +1,61 @@ +package org.ohdsi.webapi.cohortcharacterization; + +import org.ohdsi.circe.helper.ResourceHelper; +import org.ohdsi.sql.SqlSplit; +import org.ohdsi.sql.SqlTranslate; +import org.ohdsi.webapi.source.SourceService; +import org.ohdsi.webapi.source.Source; +import org.ohdsi.webapi.sqlrender.SourceAwareSqlRender; +import org.ohdsi.webapi.util.SourceUtils; +import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.tasklet.Tasklet; +import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.transaction.support.TransactionTemplate; + +import java.util.Arrays; +import java.util.Map; + +import static org.ohdsi.webapi.Constants.Params.SOURCE_ID; +import static org.ohdsi.webapi.Constants.Params.TARGET_TABLE; + +public class CreateCohortTableTasklet implements Tasklet { + + private final String CREATE_COHORT_SQL = ResourceHelper.GetResourceAsString("/resources/cohortcharacterizations/sql/createCohortTable.sql"); + + private final JdbcTemplate jdbcTemplate; + private final TransactionTemplate transactionTemplate; + private final SourceService sourceService; + private final SourceAwareSqlRender sourceAwareSqlRender; + + public CreateCohortTableTasklet(JdbcTemplate jdbcTemplate, TransactionTemplate transactionTemplate, SourceService sourceService, SourceAwareSqlRender sourceAwareSqlRender) { + this.jdbcTemplate = jdbcTemplate; + this.transactionTemplate = transactionTemplate; + this.sourceService = sourceService; + this.sourceAwareSqlRender = sourceAwareSqlRender; + } + + @Override + public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception { + + transactionTemplate.execute(transactionStatus -> doTask(chunkContext)); + return RepeatStatus.FINISHED; + } + + private Object doTask(ChunkContext chunkContext) { + + final Map jobParameters = chunkContext.getStepContext().getJobParameters(); + final Integer sourceId = Integer.valueOf(jobParameters.get(SOURCE_ID).toString()); + final String targetTable = jobParameters.get(TARGET_TABLE).toString(); + final String sql = sourceAwareSqlRender.renderSql(sourceId, CREATE_COHORT_SQL, TARGET_TABLE, targetTable ); + + final Source source = sourceService.findBySourceId(sourceId); + final String resultsQualifier = SourceUtils.getResultsQualifier(source); + final String tempQualifier = SourceUtils.getTempQualifier(source, resultsQualifier); + final String translatedSql = SqlTranslate.translateSql(sql, source.getSourceDialect(), null, tempQualifier); + Arrays.stream(SqlSplit.splitSql(translatedSql)).forEach(jdbcTemplate::execute); + + return null; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/DropCohortTableListener.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/DropCohortTableListener.java new file mode 100644 index 0000000000..c7c9bb985d --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/DropCohortTableListener.java @@ -0,0 +1,63 @@ +package org.ohdsi.webapi.cohortcharacterization; + +import org.ohdsi.webapi.arachne.commons.types.DBMSType; +import java.util.Objects; +import org.apache.commons.lang3.StringUtils; +import org.ohdsi.circe.helper.ResourceHelper; +import org.ohdsi.sql.SqlTranslate; +import org.ohdsi.webapi.source.SourceService; +import org.ohdsi.webapi.source.Source; +import org.ohdsi.webapi.sqlrender.SourceAwareSqlRender; +import org.ohdsi.webapi.util.SourceUtils; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobParameter; +import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.listener.JobExecutionListenerSupport; +import org.springframework.jdbc.core.JdbcTemplate; + +import java.util.Map; + +import static org.ohdsi.webapi.Constants.Params.SOURCE_ID; +import static org.ohdsi.webapi.Constants.Params.TARGET_TABLE; + +public class DropCohortTableListener extends JobExecutionListenerSupport { + + private final String DROP_TABLE_SQL = ResourceHelper.GetResourceAsString("/resources/cohortcharacterizations/sql/dropCohortTable.sql"); + + private final JdbcTemplate jdbcTemplate; + private final SourceService sourceService; + private final SourceAwareSqlRender sourceAwareSqlRender; + + public DropCohortTableListener(JdbcTemplate jdbcTemplate, SourceService sourceService, SourceAwareSqlRender sourceAwareSqlRender) { + this.jdbcTemplate = jdbcTemplate; + this.sourceService = sourceService; + this.sourceAwareSqlRender = sourceAwareSqlRender; + } + + private Object doTask(JobParameters parameters) { + + final Map> jobParameters = parameters.getParameters(); + final Integer sourceId = Integer.valueOf(jobParameters.get(SOURCE_ID).getValue().toString()); + final String targetTable = jobParameters.get(TARGET_TABLE).getValue().toString(); + final String sql = sourceAwareSqlRender.renderSql(sourceId, DROP_TABLE_SQL, TARGET_TABLE, targetTable ); + + final Source source = sourceService.findBySourceId(sourceId); + final String resultsQualifier = SourceUtils.getResultsQualifier(source); + final String tempQualifier = SourceUtils.getTempQualifier(source, resultsQualifier); + String toRemove = SqlTranslate.translateSql(sql, source.getSourceDialect(), null, tempQualifier); + + if (Objects.equals(DBMSType.SPARK.getOhdsiDB(), source.getSourceDialect()) || + Objects.equals(DBMSType.HIVE.getOhdsiDB(), source.getSourceDialect())) { + toRemove = StringUtils.remove(toRemove, ';'); + } + jdbcTemplate.execute(toRemove); + return null; + } + + @Override + public void afterJob(JobExecution jobExecution) { + // No transaction wrapper needed - sourceService.findBySourceId() has @Transactional + // and DROP TABLE is DDL that auto-commits + doTask(jobExecution.getJobParameters()); + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/GenerateCohortCharacterizationTasklet.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/GenerateCohortCharacterizationTasklet.java new file mode 100644 index 0000000000..02f620615f --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/GenerateCohortCharacterizationTasklet.java @@ -0,0 +1,113 @@ +/* + * Copyright 2017 Observational Health Data Sciences and Informatics . + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ohdsi.webapi.cohortcharacterization; + +import com.google.common.collect.ImmutableList; +import org.ohdsi.webapi.arachne.commons.types.DBMSType; +import org.ohdsi.cohortcharacterization.CCQueryBuilder; +import org.ohdsi.sql.SqlSplit; +import org.ohdsi.sql.SqlTranslate; +import org.ohdsi.webapi.cohortcharacterization.converter.SerializedCcToCcConverter; +import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity; +import org.ohdsi.webapi.cohortcharacterization.repository.AnalysisGenerationInfoEntityRepository; +import org.ohdsi.webapi.common.generation.AnalysisTasklet; +import org.ohdsi.webapi.security.authz.UserEntity; +import org.ohdsi.webapi.security.authz.UserRepository; +import org.ohdsi.webapi.source.SourceService; +import org.ohdsi.webapi.source.Source; +import org.ohdsi.webapi.util.CancelableJdbcTemplate; +import org.ohdsi.webapi.util.SourceUtils; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.transaction.support.TransactionTemplate; + +import java.util.Map; +import java.util.Optional; + +import static org.ohdsi.webapi.Constants.Params.*; + +public class GenerateCohortCharacterizationTasklet extends AnalysisTasklet { + + private final CcService ccService; + private final SourceService sourceService; + private final UserRepository userRepository; + + public GenerateCohortCharacterizationTasklet( + final CancelableJdbcTemplate jdbcTemplate, + final TransactionTemplate transactionTemplate, + final CcService ccService, + final AnalysisGenerationInfoEntityRepository analysisGenerationInfoEntityRepository, + final SourceService sourceService, + final UserRepository userRepository + ) { + super(LoggerFactory.getLogger(GenerateCohortCharacterizationTasklet.class), jdbcTemplate, transactionTemplate, analysisGenerationInfoEntityRepository); + this.ccService = ccService; + this.sourceService = sourceService; + this.userRepository = userRepository; + } + + @Override + protected String[] prepareQueries(ChunkContext chunkContext, CancelableJdbcTemplate jdbcTemplate) { + Map jobParams = chunkContext.getStepContext().getJobParameters(); + CohortCharacterizationEntity cohortCharacterization = ccService.findByIdWithLinkedEntities( + Long.valueOf(jobParams.get(COHORT_CHARACTERIZATION_ID).toString()) + ); + final Long jobId = chunkContext.getStepContext().getStepExecution().getJobExecution().getId(); + final UserEntity userEntity = userRepository.findByLogin(jobParams.get(JOB_AUTHOR).toString()).orElseThrow(); + String serializedDesign = new SerializedCcToCcConverter().convertToDatabaseColumn(cohortCharacterization); + saveInfoWithinTheSeparateTransaction(jobId, serializedDesign, userEntity); + final Integer sourceId = Integer.valueOf(jobParams.get(SOURCE_ID).toString()); + final Source source = sourceService.findBySourceId(sourceId); + final String cohortTable = String.format("%s.%s", SourceUtils.getTempQualifier(source), jobParams.get(TARGET_TABLE).toString()); + final String sessionId = jobParams.get(SESSION_ID).toString(); + final String tempSchema = SourceUtils.getTempQualifier(source); + boolean includeAnnual = cohortCharacterization.getCcFeatureAnalyses().stream() + .anyMatch(fe -> Optional.ofNullable(fe.getIncludeAnnual()).orElse(false)); + boolean includeTemporal = cohortCharacterization.getCcFeatureAnalyses().stream() + .anyMatch(fe -> Optional.ofNullable(fe.getIncludeTemporal()).orElse(false)); + CCQueryBuilder ccQueryBuilder = new CCQueryBuilder(cohortCharacterization, cohortTable, sessionId, + SourceUtils.getCdmQualifier(source), SourceUtils.getResultsQualifier(source), + SourceUtils.getVocabularyQualifier(source), tempSchema, jobId, includeAnnual, includeTemporal); + String sql = ccQueryBuilder.build(); + + /* + * There is an issue with temp tables on sql server: Temp tables scope is session or stored procedure. + * To execute PreparedStatement sql server uses stored procedure sp_executesql + * and this is the reason why multiple PreparedStatements cannot share the same local temporary table. + * + * On the other side, temp tables cannot be re-used in the same PreparedStatement, e.g. temp table cannot be created, used, dropped + * and created again in the same PreparedStatement because sql optimizator detects object already exists and fails. + * When is required to re-use temp table it should be separated to several PreparedStatements. + * + * An option to use global temp tables also doesn't work since such tables can be not supported / disabled. + * + * Therefore, there are two ways: + * - either precisely group SQLs into statements so that temp tables aren't re-used in a single statement, + * - or use ‘permanent temporary tables’ + * + * The second option looks better since such SQL could be exported and executed manually, + * which is not the case with the first option. + */ + if (ImmutableList.of(DBMSType.MS_SQL_SERVER.getOhdsiDB(), DBMSType.PDW.getOhdsiDB()).contains(source.getSourceDialect())) { + sql = sql + .replaceAll("#", tempSchema + "." + sessionId + "_") + .replaceAll("tempdb\\.\\.", ""); + } + final String translatedSql = SqlTranslate.translateSql(sql, source.getSourceDialect(), sessionId, tempSchema); + return SqlSplit.splitSql(translatedSql); + } + +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/GenerateLocalCohortTasklet.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/GenerateLocalCohortTasklet.java new file mode 100644 index 0000000000..5d70a67e40 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/GenerateLocalCohortTasklet.java @@ -0,0 +1,151 @@ +package org.ohdsi.webapi.cohortcharacterization; + +import org.ohdsi.webapi.cohortdefinition.CohortDefinitionEntity; +import org.ohdsi.webapi.cohortdefinition.CohortDefinitionDetailsEntity; +import org.ohdsi.webapi.cohortdefinition.CohortGenerationRequestBuilder; +import org.ohdsi.webapi.cohortdefinition.CohortGenerationService; +import org.ohdsi.webapi.cohortdefinition.CohortGenerationUtils; +import org.ohdsi.webapi.generationcache.GenerationCacheHelper; +import org.ohdsi.webapi.source.Source; +import org.ohdsi.webapi.source.SourceService; +import org.ohdsi.webapi.util.CancelableJdbcTemplate; +import org.ohdsi.webapi.util.SessionUtils; +import org.ohdsi.webapi.util.SourceUtils; +import org.ohdsi.webapi.util.StatementCancel; +import org.ohdsi.webapi.util.StatementCancelException; +import org.ohdsi.webapi.util.TempTableCleanupManager; +import org.springframework.batch.core.StepContribution; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.batch.core.step.tasklet.StoppableTasklet; +import org.springframework.batch.repeat.RepeatStatus; +import org.springframework.transaction.support.TransactionTemplate; + +import java.sql.SQLException; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.Executors; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static org.ohdsi.webapi.Constants.Params.SOURCE_ID; +import static org.ohdsi.webapi.Constants.Params.TARGET_TABLE; +import static org.ohdsi.webapi.Constants.Params.DEMOGRAPHIC_STATS; + +public class GenerateLocalCohortTasklet implements StoppableTasklet { + + private static final String COPY_CACHED_RESULTS = "INSERT INTO %s.%s (cohort_definition_id, subject_id, cohort_start_date, cohort_end_date) SELECT %s as cohort_definition_id, subject_id, cohort_start_date, cohort_end_date FROM (%s) r"; + + protected TransactionTemplate transactionTemplate; + private final CancelableJdbcTemplate cancelableJdbcTemplate; + protected final CohortGenerationService cohortGenerationService; + protected final SourceService sourceService; + protected final Function> cohortGetter; + private final GenerationCacheHelper generationCacheHelper; + private boolean useAsyncCohortGeneration; + private Set statementCancels = ConcurrentHashMap.newKeySet(); + private volatile boolean stopped = false; + + public GenerateLocalCohortTasklet(TransactionTemplate transactionTemplate, + CancelableJdbcTemplate cancelableJdbcTemplate, + CohortGenerationService cohortGenerationService, + SourceService sourceService, + Function> cohortGetter, + GenerationCacheHelper generationCacheHelper, + boolean useAsyncCohortGeneration) { + + this.transactionTemplate = transactionTemplate; + this.cancelableJdbcTemplate = cancelableJdbcTemplate; + this.cohortGenerationService = cohortGenerationService; + this.sourceService = sourceService; + this.cohortGetter = cohortGetter; + this.generationCacheHelper = generationCacheHelper; + this.useAsyncCohortGeneration = useAsyncCohortGeneration; + } + + @Override + public void stop() { + + try { + stopped = true; + for (StatementCancel statementCancel: statementCancels) { + statementCancel.cancel(); + } + } catch (SQLException ignored) { + } + } + + @Override + public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) { + + Map jobParameters = chunkContext.getStepContext().getJobParameters(); + Source source = sourceService.findBySourceId(Integer.valueOf(jobParameters.get(SOURCE_ID).toString())); + String resultSchema = SourceUtils.getResultsQualifier(source); + String targetTable = jobParameters.get(TARGET_TABLE).toString(); + + Collection cohortDefinitions = cohortGetter.apply(chunkContext); + + if (useAsyncCohortGeneration) { + List executions = cohortDefinitions.stream() + .map(cd -> + CompletableFuture.supplyAsync(() -> generateCohort(cd, source, resultSchema, targetTable), + Executors.newSingleThreadExecutor() + ) + ).collect(Collectors.toList()); + CompletableFuture.allOf(executions.toArray(new CompletableFuture[]{})).join(); + } else { + CompletableFuture.runAsync(() -> + cohortDefinitions.stream().forEach(cd -> generateCohort(cd, source, resultSchema, targetTable)), + Executors.newSingleThreadExecutor() + ).join(); + } + + return RepeatStatus.FINISHED; + } + + private Object generateCohort(CohortDefinitionEntity cd, Source source, String resultSchema, String targetTable) { + if (stopped) { + return null; + } + String sessionId = SessionUtils.sessionId(); + CohortGenerationRequestBuilder generationRequestBuilder = new CohortGenerationRequestBuilder( + sessionId, + resultSchema + ); + CohortDefinitionDetailsEntity details = cd.getDetails(); + int designHash = this.generationCacheHelper.computeHash(details.getExpression()); + CohortGenerationUtils.insertInclusionRules(cd, source, designHash, resultSchema, sessionId, cancelableJdbcTemplate); + + try { + StatementCancel stmtCancel = new StatementCancel(); + statementCancels.add(stmtCancel); + GenerationCacheHelper.CacheResult res = generationCacheHelper.computeCacheIfAbsent(cd, source, generationRequestBuilder, (resId, sqls) -> { + try { + generationCacheHelper.runCancelableCohortGeneration(cancelableJdbcTemplate, stmtCancel, sqls); + } finally { + // Usage of the same sessionId for all cohorts would cause issues in databases w/o real temp tables support + // And we cannot postfix existing sessionId with some index because SqlRender requires sessionId to be only 8 symbols long + // So, relying on TempTableCleanupManager.removeTempTables from GenerationTaskExceptionHandler is not an option + // That's why explicit TempTableCleanupManager call is defined + TempTableCleanupManager cleanupManager = new TempTableCleanupManager( + cancelableJdbcTemplate, + transactionTemplate, + source.getSourceDialect(), + sessionId, + SourceUtils.getTempQualifier(source) + ); + cleanupManager.cleanupTempTables(); + } + }); + String sql = String.format(COPY_CACHED_RESULTS, SourceUtils.getTempQualifier(source), targetTable, cd.getId(), res.getSql()); + cancelableJdbcTemplate.batchUpdate(stmtCancel, sql); + statementCancels.remove(stmtCancel); + } catch (StatementCancelException ignored) { + // this exception must be caught to prevent "FAIL" status of the job + } + return null; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/BaseCcDTOToCcEntityConverter.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/BaseCcDTOToCcEntityConverter.java new file mode 100644 index 0000000000..cbaba6b333 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/BaseCcDTOToCcEntityConverter.java @@ -0,0 +1,82 @@ +package org.ohdsi.webapi.cohortcharacterization.converter; + +import org.apache.commons.lang3.StringUtils; +import org.ohdsi.analysis.Utils; +import org.ohdsi.analysis.cohortcharacterization.design.CcResultType; +import org.ohdsi.webapi.arachne.commons.utils.ConverterUtils; +import org.ohdsi.webapi.cohortcharacterization.domain.CcFeAnalysisEntity; +import org.ohdsi.webapi.cohortcharacterization.domain.CcStrataConceptSetEntity; +import org.ohdsi.webapi.cohortcharacterization.domain.CcParamEntity; +import org.ohdsi.webapi.cohortcharacterization.domain.CcStrataEntity; +import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity; +import org.ohdsi.webapi.cohortcharacterization.dto.BaseCcDTO; +import org.ohdsi.webapi.cohortdefinition.CohortDefinitionEntity; +import org.ohdsi.webapi.cohortdefinition.CohortMetadataExt; +import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisEntity; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisShortDTO; +import org.ohdsi.webapi.tag.domain.Tag; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisType.CRITERIA_SET; + +public abstract class BaseCcDTOToCcEntityConverter> + extends BaseConversionServiceAwareConverter { + + @Autowired + private ConverterUtils converterUtils; + + @Override + public CohortCharacterizationEntity convert(T source) { + + final CohortCharacterizationEntity cohortCharacterization = new CohortCharacterizationEntity(); + + cohortCharacterization.setName(StringUtils.trim(source.getName())); + cohortCharacterization.setDescription(source.getDescription()); + cohortCharacterization.setStratifiedBy(source.getStratifiedBy()); + cohortCharacterization.setStrataOnly(source.getStrataOnly()); + + cohortCharacterization.setId(source.getId()); + + cohortCharacterization.setCohortDefinitions(converterUtils.convertSet(source.getCohorts(), CohortDefinitionEntity.class)); + + source.getFeatureAnalyses().forEach(fa -> { + // Legacy Criteria Analyses didn't have statType, they were always PREVALENCE + if (Objects.equals(fa.getType(), CRITERIA_SET) && fa.getStatType() == null) { + fa.setStatType(CcResultType.PREVALENCE); + } + }); + cohortCharacterization.setFeatureAnalyses( + source.getFeatureAnalyses().stream().map(fa -> { + CcFeAnalysisEntity feAnalysisEntity = new CcFeAnalysisEntity(); + feAnalysisEntity.setFeatureAnalysis(conversionService.convert(fa, FeAnalysisEntity.class)); + feAnalysisEntity.setCohortCharacterization(cohortCharacterization); + feAnalysisEntity.setIncludeAnnual(fa.getIncludeAnnual()); + feAnalysisEntity.setIncludeTemporal(fa.getIncludeTemporal()); + return feAnalysisEntity; + }).collect(Collectors.toSet()) + ); + + cohortCharacterization.setParameters(converterUtils.convertSet(source.getParameters(), CcParamEntity.class)); + cohortCharacterization.setStratas(converterUtils.convertSet(source.getStratas(), CcStrataEntity.class)); + + CcStrataConceptSetEntity conceptSetEntity = new CcStrataConceptSetEntity(); + conceptSetEntity.setCohortCharacterization(cohortCharacterization); + conceptSetEntity.setRawExpression(Utils.serialize(source.getStrataConceptSets())); + cohortCharacterization.setConceptSetEntity(conceptSetEntity); + + if (Objects.nonNull(source.getTags())) { + Set tags = source.getTags().stream() + .map(tag -> conversionService.convert(tag, Tag.class)).collect(Collectors.toSet()); + cohortCharacterization.setTags(tags); + } + + return cohortCharacterization; + } + + +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/BaseCcToCcDTOConverter.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/BaseCcToCcDTOConverter.java new file mode 100644 index 0000000000..096a4641fe --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/BaseCcToCcDTOConverter.java @@ -0,0 +1,49 @@ +package org.ohdsi.webapi.cohortcharacterization.converter; + +import org.ohdsi.webapi.arachne.commons.utils.ConverterUtils; +import org.apache.commons.lang3.ObjectUtils; +import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity; +import org.ohdsi.webapi.cohortcharacterization.dto.BaseCcDTO; +import org.ohdsi.webapi.cohortcharacterization.dto.CcParameterDTO; +import org.ohdsi.webapi.cohortcharacterization.dto.CcStrataDTO; +import org.ohdsi.webapi.tag.dto.TagDTO; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.TreeSet; +import java.util.stream.Collectors; + +public abstract class BaseCcToCcDTOConverter extends BaseCcToCcShortDTOConverter { + + @Autowired + protected ConverterUtils converterUtils; + + @Override + public T convert(CohortCharacterizationEntity source) { + + final T cohortCharacterizationDTO = super.convert(source); + + Set convertedParameters = new TreeSet<>((o1, o2) -> ObjectUtils.compare(o1.getId(), o2.getId())); + convertedParameters.addAll(converterUtils.convertSet(source.getParameters(), CcParameterDTO.class)); + cohortCharacterizationDTO.setParameters(convertedParameters); + + Set convertedStratas = new TreeSet<>((o1, o2) -> ObjectUtils.compare(o1.getId(), o2.getId())); + convertedStratas.addAll(converterUtils.convertSet(source.getStratas(), CcStrataDTO.class)); + cohortCharacterizationDTO.setStratas(convertedStratas); + + cohortCharacterizationDTO.setStratifiedBy(source.getStratifiedBy()); + cohortCharacterizationDTO.setStrataOnly(source.getStrataOnly()); + cohortCharacterizationDTO.setStrataConceptSets(source.getStrataConceptSets()); + + if (Objects.nonNull(source.getTags())) { + Set tags = source.getTags().stream() + .map(tag -> conversionService.convert(tag, TagDTO.class)).collect(Collectors.toSet()); + cohortCharacterizationDTO.setTags(tags); + } + + return cohortCharacterizationDTO; + } + +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/BaseCcToCcExportDTOConverter.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/BaseCcToCcExportDTOConverter.java new file mode 100644 index 0000000000..9915086389 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/BaseCcToCcExportDTOConverter.java @@ -0,0 +1,27 @@ +package org.ohdsi.webapi.cohortcharacterization.converter; + +import org.apache.commons.lang3.ObjectUtils; +import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity; +import org.ohdsi.webapi.cohortcharacterization.dto.CcExportDTO; +import org.ohdsi.webapi.cohortdefinition.dto.CohortDTO; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisDTO; + +import java.util.Set; +import java.util.TreeSet; + +public abstract class BaseCcToCcExportDTOConverter extends BaseCcToCcDTOConverter { + @Override + public T convert(CohortCharacterizationEntity source) { + + final T exportDTO = super.convert(source); + + Set convertedCohortDTOs = new TreeSet<>((o1, o2) -> ObjectUtils.compare(o1.getId(), o2.getId())); + convertedCohortDTOs.addAll(converterUtils.convertSet(source.getCohortDefinitions(), CohortDTO.class)); + exportDTO.setCohorts(convertedCohortDTOs); + + Set convertedFeAnalysisDTOs = new TreeSet<>((o1, o2) -> ObjectUtils.compare(o1.getId(), o2.getId())); + convertedFeAnalysisDTOs.addAll(converterUtils.convertSet(source.getFeatureAnalyses(), FeAnalysisDTO.class)); + exportDTO.setFeatureAnalyses(convertedFeAnalysisDTOs); + return exportDTO; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/BaseCcToCcShortDTOConverter.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/BaseCcToCcShortDTOConverter.java new file mode 100644 index 0000000000..81acfc2a25 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/BaseCcToCcShortDTOConverter.java @@ -0,0 +1,23 @@ +package org.ohdsi.webapi.cohortcharacterization.converter; + +import org.apache.commons.lang3.StringUtils; +import org.ohdsi.webapi.cohortcharacterization.dto.CcShortDTO; +import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity; +import org.ohdsi.webapi.service.converters.BaseCommonEntityExtToDTOExtConverter; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.convert.ConversionService; + +public abstract class BaseCcToCcShortDTOConverter + extends BaseCommonEntityExtToDTOExtConverter { + + @Autowired + protected ConversionService conversionService; + + @Override + public void doConvert(final CohortCharacterizationEntity source, T target) { + target.setName(StringUtils.trim(source.getName())); + target.setDescription(source.getDescription()); + target.setId(source.getId()); + target.setHashCode(source.getHashCode()); + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcDTOToCcEntityConverter.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcDTOToCcEntityConverter.java new file mode 100644 index 0000000000..14551c4366 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcDTOToCcEntityConverter.java @@ -0,0 +1,8 @@ +package org.ohdsi.webapi.cohortcharacterization.converter; + +import org.ohdsi.webapi.cohortcharacterization.dto.CohortCharacterizationDTO; +import org.springframework.stereotype.Component; + +@Component +public class CcDTOToCcEntityConverter extends BaseCcDTOToCcEntityConverter { +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcExportDTOToCcEntityConverter.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcExportDTOToCcEntityConverter.java new file mode 100644 index 0000000000..a36bb3d000 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcExportDTOToCcEntityConverter.java @@ -0,0 +1,8 @@ +package org.ohdsi.webapi.cohortcharacterization.converter; + +import org.ohdsi.webapi.cohortcharacterization.dto.CcExportDTO; +import org.springframework.stereotype.Component; + +@Component +public class CcExportDTOToCcEntityConverter extends BaseCcDTOToCcEntityConverter { +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcParamEntityToCcParameterDTOConverter.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcParamEntityToCcParameterDTOConverter.java new file mode 100644 index 0000000000..9e31fd4cc1 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcParamEntityToCcParameterDTOConverter.java @@ -0,0 +1,18 @@ +package org.ohdsi.webapi.cohortcharacterization.converter; + +import org.ohdsi.webapi.cohortcharacterization.domain.CcParamEntity; +import org.ohdsi.webapi.cohortcharacterization.dto.CcParameterDTO; +import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter; +import org.springframework.stereotype.Component; + +@Component +public class CcParamEntityToCcParameterDTOConverter extends BaseConversionServiceAwareConverter { + @Override + public CcParameterDTO convert(final CcParamEntity source) { + final CcParameterDTO dto = new CcParameterDTO(); + dto.setValue(source.getValue()); + dto.setName(source.getName()); + dto.setId(source.getId()); + return dto; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcParameterDTOToCcParamEntityConverter.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcParameterDTOToCcParamEntityConverter.java new file mode 100644 index 0000000000..b793a5d8b1 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcParameterDTOToCcParamEntityConverter.java @@ -0,0 +1,20 @@ +package org.ohdsi.webapi.cohortcharacterization.converter; + +import org.ohdsi.webapi.cohortcharacterization.domain.CcParamEntity; +import org.ohdsi.webapi.cohortcharacterization.dto.CcParameterDTO; +import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter; +import org.springframework.stereotype.Component; + +@Component +public class CcParameterDTOToCcParamEntityConverter extends BaseConversionServiceAwareConverter { + @Override + public CcParamEntity convert(final CcParameterDTO source) { + final CcParamEntity result = new CcParamEntity(); + + result.setName(source.getName()); + result.setId(source.getId()); + result.setValue(source.getValue()); + + return result; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcStrataDTOToCcStrataEntityConverter.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcStrataDTOToCcStrataEntityConverter.java new file mode 100644 index 0000000000..c60d18b6b7 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcStrataDTOToCcStrataEntityConverter.java @@ -0,0 +1,24 @@ +package org.ohdsi.webapi.cohortcharacterization.converter; + + +import org.ohdsi.analysis.Utils; +import org.ohdsi.webapi.arachne.commons.converter.BaseConvertionServiceAwareConverter; +import org.ohdsi.webapi.cohortcharacterization.domain.CcStrataEntity; +import org.ohdsi.webapi.cohortcharacterization.dto.CcStrataDTO; +import org.springframework.stereotype.Component; + +@Component +public class CcStrataDTOToCcStrataEntityConverter extends BaseConvertionServiceAwareConverter { + + @Override + protected CcStrataEntity createResultObject(CcStrataDTO source) { + return new CcStrataEntity(); + } + + @Override + protected void convert(CcStrataDTO source, CcStrataEntity entity) { + entity.setId(source.getId()); + entity.setName(source.getName()); + entity.setExpressionString(Utils.serialize(source.getCriteria())); + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcStrataEntityToCcStrataDTOConverter.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcStrataEntityToCcStrataDTOConverter.java new file mode 100644 index 0000000000..e7fa0dd7d8 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcStrataEntityToCcStrataDTOConverter.java @@ -0,0 +1,22 @@ +package org.ohdsi.webapi.cohortcharacterization.converter; + +import org.ohdsi.webapi.arachne.commons.converter.BaseConvertionServiceAwareConverter; +import org.ohdsi.webapi.cohortcharacterization.domain.CcStrataEntity; +import org.ohdsi.webapi.cohortcharacterization.dto.CcStrataDTO; +import org.springframework.stereotype.Component; + +@Component +public class CcStrataEntityToCcStrataDTOConverter extends BaseConvertionServiceAwareConverter { + + @Override + protected CcStrataDTO createResultObject(CcStrataEntity ccStrataEntity) { + return new CcStrataDTO(); + } + + @Override + protected void convert(CcStrataEntity source, CcStrataDTO dto) { + dto.setId(source.getId()); + dto.setName(source.getName()); + dto.setCriteria(source.getCriteria()); + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcToCcDTOConverter.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcToCcDTOConverter.java new file mode 100644 index 0000000000..599ea6feb5 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcToCcDTOConverter.java @@ -0,0 +1,25 @@ +package org.ohdsi.webapi.cohortcharacterization.converter; + +import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity; +import org.ohdsi.webapi.cohortcharacterization.dto.CohortCharacterizationDTO; +import org.ohdsi.webapi.cohortdefinition.dto.CohortMetadataImplDTO; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisShortDTO; +import org.springframework.stereotype.Component; + +@Component +public class CcToCcDTOConverter extends BaseCcToCcDTOConverter { + + @Override + public CohortCharacterizationDTO convert(final CohortCharacterizationEntity source) { + final CohortCharacterizationDTO cohortCharacterizationDTO = super.convert(source); + + cohortCharacterizationDTO.setCohorts(converterUtils.convertSet(source.getCohortDefinitions(), CohortMetadataImplDTO.class)); + cohortCharacterizationDTO.setFeatureAnalyses(converterUtils.convertSet(source.getCcFeatureAnalyses(), FeAnalysisShortDTO.class)); + return cohortCharacterizationDTO; + } + + @Override + protected CohortCharacterizationDTO createResultObject() { + return new CohortCharacterizationDTO(); + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcToCcExportDTOConverter.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcToCcExportDTOConverter.java new file mode 100644 index 0000000000..73285cde9d --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcToCcExportDTOConverter.java @@ -0,0 +1,13 @@ +package org.ohdsi.webapi.cohortcharacterization.converter; + +import org.ohdsi.webapi.cohortcharacterization.dto.CcExportDTO; +import org.springframework.stereotype.Component; + +@Component +public class CcToCcExportDTOConverter extends BaseCcToCcExportDTOConverter { + + @Override + protected CcExportDTO createResultObject() { + return new CcExportDTO(); + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcToCcShortDTOConverter.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcToCcShortDTOConverter.java new file mode 100644 index 0000000000..7f819ef6cc --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcToCcShortDTOConverter.java @@ -0,0 +1,12 @@ +package org.ohdsi.webapi.cohortcharacterization.converter; + +import org.ohdsi.webapi.cohortcharacterization.dto.CcShortDTO; +import org.springframework.stereotype.Component; + +@Component +public class CcToCcShortDTOConverter extends BaseCcToCcShortDTOConverter { + @Override + protected CcShortDTO createResultObject() { + return new CcShortDTO(); + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcToCohortCharacterizationImplConverter.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcToCohortCharacterizationImplConverter.java new file mode 100644 index 0000000000..88b3fd127d --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CcToCohortCharacterizationImplConverter.java @@ -0,0 +1,12 @@ +package org.ohdsi.webapi.cohortcharacterization.converter; + +import org.ohdsi.webapi.cohortcharacterization.specification.CohortCharacterizationImpl; +import org.springframework.stereotype.Component; + +@Component +public class CcToCohortCharacterizationImplConverter extends BaseCcToCcExportDTOConverter { + @Override + protected CohortCharacterizationImpl createResultObject() { + return new CohortCharacterizationImpl(); + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CharacterizationToCharacterizationVersionConverter.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CharacterizationToCharacterizationVersionConverter.java new file mode 100644 index 0000000000..882b67916b --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CharacterizationToCharacterizationVersionConverter.java @@ -0,0 +1,40 @@ +package org.ohdsi.webapi.cohortcharacterization.converter; + +import org.ohdsi.analysis.Utils; +import org.ohdsi.webapi.cohortcharacterization.CcService; +import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity; +import org.ohdsi.webapi.cohortcharacterization.specification.CohortCharacterizationImpl; +import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter; +import org.ohdsi.webapi.util.ExportUtil; +import org.ohdsi.webapi.versioning.domain.CharacterizationVersion; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.env.Environment; +import org.springframework.stereotype.Component; + +@Component +public class CharacterizationToCharacterizationVersionConverter + extends BaseConversionServiceAwareConverter { + @Autowired + private CcService ccService; + + @Autowired + private Environment env; + + @Override + public CharacterizationVersion convert(CohortCharacterizationEntity source) { + CohortCharacterizationImpl characterizationImpl = + conversionService.convert(source, CohortCharacterizationImpl.class); + ExportUtil.clearCreateAndUpdateInfo(characterizationImpl); + characterizationImpl.getFeatureAnalyses().forEach(ExportUtil::clearCreateAndUpdateInfo); + characterizationImpl.getCohorts().forEach(ExportUtil::clearCreateAndUpdateInfo); + characterizationImpl.setOrganizationName(env.getRequiredProperty("organization.name")); + + String expression = Utils.serialize(characterizationImpl, true); + + CharacterizationVersion target = new CharacterizationVersion(); + target.setAssetId(source.getId()); + target.setAssetJson(expression); + + return target; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CharacterizationVersionToCharacterizationVersionFullDTOConverter.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CharacterizationVersionToCharacterizationVersionFullDTOConverter.java new file mode 100644 index 0000000000..0ea8c06185 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/CharacterizationVersionToCharacterizationVersionFullDTOConverter.java @@ -0,0 +1,61 @@ +package org.ohdsi.webapi.cohortcharacterization.converter; + +import org.ohdsi.analysis.Utils; +import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity; +import org.ohdsi.webapi.cohortcharacterization.dto.CcVersionFullDTO; +import org.ohdsi.webapi.cohortcharacterization.dto.CohortCharacterizationDTO; +import org.ohdsi.webapi.cohortcharacterization.repository.CcRepository; +import org.ohdsi.webapi.cohortcharacterization.specification.CohortCharacterizationImpl; +import org.ohdsi.webapi.cohortdefinition.CohortDefinitionEntity; +import org.ohdsi.webapi.cohortdefinition.dto.CohortMetadataDTO; +import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter; +import org.ohdsi.webapi.exception.ConversionAtlasException; +import org.ohdsi.webapi.cohortdefinition.CohortDefinitionService; +import org.ohdsi.webapi.versioning.domain.CharacterizationVersion; +import org.ohdsi.webapi.versioning.dto.VersionDTO; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import java.util.HashSet; +import java.util.List; +import java.util.stream.Collectors; + +@Component +public class CharacterizationVersionToCharacterizationVersionFullDTOConverter + extends BaseConversionServiceAwareConverter { + @Autowired + private CohortDefinitionService cohortService; + + @Autowired + private CcRepository ccRepository; + + @Override + public CcVersionFullDTO convert(CharacterizationVersion source) { + CohortCharacterizationEntity def = ccRepository.findById(source.getAssetId()).orElseThrow(); + CohortCharacterizationImpl characterizationImpl = + Utils.deserialize(source.getAssetJson(), CohortCharacterizationImpl.class); + CohortCharacterizationEntity entity = conversionService.convert(characterizationImpl, CohortCharacterizationEntity.class); + entity.setId(def.getId()); + entity.setTags(def.getTags()); + entity.setName(def.getName()); + entity.setCreatedBy(def.getCreatedBy()); + entity.setCreatedDate(def.getCreatedDate()); + entity.setModifiedBy(def.getModifiedBy()); + entity.setModifiedDate(def.getModifiedDate()); + + List ids = characterizationImpl.getCohorts().stream() + .map(CohortMetadataDTO::getId) + .collect(Collectors.toList()); + List cohorts = cohortService.getCohorts(ids); + if (cohorts.size() != ids.size()) { + throw new ConversionAtlasException("Could not load version because it contains deleted cohorts"); + } + entity.setCohortDefinitions(new HashSet<>(cohorts)); + + CcVersionFullDTO target = new CcVersionFullDTO(); + target.setVersionDTO(conversionService.convert(source, VersionDTO.class)); + target.setEntityDTO(conversionService.convert(entity, CohortCharacterizationDTO.class)); + + return target; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/SerializedCcToCcConverter.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/SerializedCcToCcConverter.java new file mode 100644 index 0000000000..10dabcc880 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/converter/SerializedCcToCcConverter.java @@ -0,0 +1,35 @@ +package org.ohdsi.webapi.cohortcharacterization.converter; + +import com.fasterxml.jackson.core.type.TypeReference; +import org.ohdsi.analysis.Utils; +import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity; +import org.ohdsi.webapi.cohortcharacterization.dto.CcExportDTO; +import org.springframework.core.convert.ConversionService; + +import jakarta.persistence.AttributeConverter; + +public class SerializedCcToCcConverter implements AttributeConverter { + + private static ConversionService conversionService; + + public static void setConversionService(ConversionService conversionService) { + + SerializedCcToCcConverter.conversionService = conversionService; + } + + @Override + public String convertToDatabaseColumn(CohortCharacterizationEntity data) { + + CcExportDTO cohortCharacterizationDTO = conversionService.convert(data, CcExportDTO.class); + cohortCharacterizationDTO.setModifiedDate(null); + return Utils.serialize(cohortCharacterizationDTO); + } + + @Override + public CohortCharacterizationEntity convertToEntityAttribute(String data) { + + TypeReference typeRef = new TypeReference() {}; + CcExportDTO dto = Utils.deserialize(data, typeRef); + return conversionService.convert(dto, CohortCharacterizationEntity.class); + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/domain/CcFeAnalysisEntity.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/domain/CcFeAnalysisEntity.java new file mode 100644 index 0000000000..b3325a71c3 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/domain/CcFeAnalysisEntity.java @@ -0,0 +1,115 @@ +package org.ohdsi.webapi.cohortcharacterization.domain; + +import org.hibernate.annotations.GenericGenerator; +import org.hibernate.annotations.Parameter; +import org.ohdsi.analysis.cohortcharacterization.design.FeatureAnalysis; +import org.ohdsi.analysis.cohortcharacterization.design.FeatureAnalysisDomain; +import org.ohdsi.analysis.cohortcharacterization.design.FeatureAnalysisType; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisEntity; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; +import java.util.Optional; +import java.util.function.Function; +import java.util.function.Supplier; + +@Entity +@Table(name = "cc_analysis") +public class CcFeAnalysisEntity implements FeatureAnalysis { + + @Id + @GenericGenerator( + name = "cc_analysis_generator", + strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator", + parameters = { + @Parameter(name = "sequence_name", value = "cc_analysis_seq"), + @Parameter(name = "increment_size", value = "1") + } + ) + @GeneratedValue(generator = "cc_analysis_generator") + private Long id; + @ManyToOne(optional = false) + @JoinColumn(name = "cohort_characterization_id") + private CohortCharacterizationEntity cohortCharacterization; + @ManyToOne(optional = false) + @JoinColumn(name = "fe_analysis_id") + private FeAnalysisEntity featureAnalysis; + @Column(name = "include_annual") + private Boolean includeAnnual; + @Column(name = "include_temporal") + private Boolean includeTemporal; + + public CohortCharacterizationEntity getCohortCharacterization() { + return cohortCharacterization; + } + + public void setCohortCharacterization(CohortCharacterizationEntity cohortCharacterization) { + this.cohortCharacterization = cohortCharacterization; + } + + public FeAnalysisEntity getFeatureAnalysis() { + return featureAnalysis; + } + + public void setFeatureAnalysis(FeAnalysisEntity featureAnalysis) { + this.featureAnalysis = featureAnalysis; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Boolean getIncludeAnnual() { + return includeAnnual; + } + + public void setIncludeAnnual(Boolean includeAnnual) { + this.includeAnnual = includeAnnual; + } + + public Boolean getIncludeTemporal() { + return includeTemporal; + } + + public void setIncludeTemporal(Boolean includeTemporal) { + this.includeTemporal = includeTemporal; + } + + private T mapFeatureAnalysis(Function getter) { + return Optional.ofNullable(featureAnalysis).map(getter).orElse(null); + } + + @Override + public FeatureAnalysisType getType() { + return mapFeatureAnalysis(FeatureAnalysis::getType); + } + + @Override + public String getName() { + return mapFeatureAnalysis(FeatureAnalysis::getName); + } + + @Override + public FeatureAnalysisDomain getDomain() { + return mapFeatureAnalysis(FeatureAnalysis::getDomain); + } + + @Override + public String getDescr() { + return mapFeatureAnalysis(FeatureAnalysis::getDescr); + } + + @Override + public Object getDesign() { + return mapFeatureAnalysis(FeatureAnalysis::getDesign); + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/domain/CcGenerationEntity.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/domain/CcGenerationEntity.java new file mode 100644 index 0000000000..5805506054 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/domain/CcGenerationEntity.java @@ -0,0 +1,22 @@ +package org.ohdsi.webapi.cohortcharacterization.domain; + +import jakarta.persistence.Entity; +import jakarta.persistence.FetchType; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; + +import org.ohdsi.webapi.common.generation.CommonGeneration; + +@Entity +@Table(name = "cc_generation") +public class CcGenerationEntity extends CommonGeneration { + + @ManyToOne(targetEntity = CohortCharacterizationEntity.class, fetch = FetchType.LAZY) + @JoinColumn(name = "cc_id") + private CohortCharacterizationEntity cohortCharacterization; + + public CohortCharacterizationEntity getCohortCharacterization() { + return cohortCharacterization; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/domain/CcParamEntity.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/domain/CcParamEntity.java new file mode 100644 index 0000000000..dae2464b6d --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/domain/CcParamEntity.java @@ -0,0 +1,88 @@ +package org.ohdsi.webapi.cohortcharacterization.domain; + +import java.util.Objects; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.FetchType; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; + +import org.hibernate.annotations.GenericGenerator; +import org.hibernate.annotations.Parameter; +import org.ohdsi.analysis.WithId; +import org.ohdsi.analysis.cohortcharacterization.design.CohortCharacterization; +import org.ohdsi.analysis.cohortcharacterization.design.CohortCharacterizationParam; + +@Entity +@Table(name = "cc_param") +public class CcParamEntity implements CohortCharacterizationParam, WithId { + + @Id + @GenericGenerator( + name = "cc_param_generator", + strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator", + parameters = { + @Parameter(name = "sequence_name", value = "cc_param_sequence"), + @Parameter(name = "increment_size", value = "1") + } + ) + @GeneratedValue(generator = "cc_param_generator") + private Long id; + @ManyToOne(optional = false, targetEntity = CohortCharacterizationEntity.class, fetch = FetchType.LAZY) + @JoinColumn(name = "cohort_characterization_id") + private CohortCharacterizationEntity cohortCharacterization; + @Column + private String name; + @Column + private String value; + + @Override + public String getName() { + return name; + } + + @Override + public String getValue() { + return value; + } + + public Long getId() { + return id; + } + + public void setId(final Long id) { + this.id = id; + } + + public CohortCharacterization getCohortCharacterization() { + return cohortCharacterization; + } + + public void setCohortCharacterization(final CohortCharacterizationEntity cohortCharacterization) { + this.cohortCharacterization = cohortCharacterization; + } + + public void setName(final String name) { + this.name = name; + } + + public void setValue(final String value) { + this.value = value; + } + + @Override + public boolean equals(final Object o) { + if (this == o) return true; + if (!(o instanceof CcParamEntity)) return false; + final CcParamEntity that = (CcParamEntity) o; + return Objects.equals(getId(), that.getId()); + } + + @Override + public int hashCode() { + return Objects.hash(getId(), super.hashCode()); + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/domain/CcStrataConceptSetEntity.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/domain/CcStrataConceptSetEntity.java new file mode 100644 index 0000000000..72be21a9d2 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/domain/CcStrataConceptSetEntity.java @@ -0,0 +1,48 @@ +package org.ohdsi.webapi.cohortcharacterization.domain; + +import jakarta.persistence.Entity; +import jakarta.persistence.FetchType; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.OneToOne; +import jakarta.persistence.Table; +import org.hibernate.annotations.GenericGenerator; +import org.hibernate.annotations.Parameter; +import org.ohdsi.webapi.common.CommonConceptSetEntity; + +@Entity +@Table(name = "cc_strata_conceptset") +public class CcStrataConceptSetEntity extends CommonConceptSetEntity { + @Id + @GenericGenerator( + name = "cc_strata_conceptset_generator", + strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator", + parameters = { + @Parameter(name = "sequence_name", value = "cc_strata_conceptset_seq"), + @Parameter(name = "increment_size", value = "1") + } + ) + @GeneratedValue(generator = "cc_strata_conceptset_generator") + private Long id; + + @OneToOne(optional = false, targetEntity = CohortCharacterizationEntity.class, fetch = FetchType.LAZY) + @JoinColumn(name = "cohort_characterization_id") + private CohortCharacterizationEntity cohortCharacterization; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public CohortCharacterizationEntity getCohortCharacterization() { + return cohortCharacterization; + } + + public void setCohortCharacterization(CohortCharacterizationEntity cohortCharacterization) { + this.cohortCharacterization = cohortCharacterization; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/domain/CcStrataEntity.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/domain/CcStrataEntity.java new file mode 100644 index 0000000000..ddda523873 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/domain/CcStrataEntity.java @@ -0,0 +1,79 @@ +package org.ohdsi.webapi.cohortcharacterization.domain; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.FetchType; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; + +import org.hibernate.annotations.GenericGenerator; +import org.hibernate.annotations.Parameter; +import org.ohdsi.analysis.Utils; +import org.ohdsi.analysis.WithId; +import org.ohdsi.analysis.cohortcharacterization.design.CohortCharacterizationStrata; +import org.ohdsi.circe.cohortdefinition.CriteriaGroup; + +@Entity +@Table(name = "cc_strata") +public class CcStrataEntity implements CohortCharacterizationStrata, WithId { + + @Id + @GenericGenerator( + name = "cc_strata_generator", + strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator", + parameters = { + @Parameter(name = "sequence_name", value = "cc_strata_seq"), + @Parameter(name = "increment_size", value = "1") + } + ) + @GeneratedValue(generator = "cc_strata_generator") + private Long id; + @Column(name = "name") + private String name; + @Column(name = "expression") + private String expressionString; + @ManyToOne(optional = false, targetEntity = CohortCharacterizationEntity.class, fetch = FetchType.LAZY) + @JoinColumn(name = "cohort_characterization_id") + private CohortCharacterizationEntity cohortCharacterization; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + @Override + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public CriteriaGroup getCriteria() { + return Utils.deserialize(expressionString, CriteriaGroup.class); + } + + public String getExpressionString() { + return expressionString; + } + + public void setExpressionString(String expressionString) { + this.expressionString = expressionString; + } + + public CohortCharacterizationEntity getCohortCharacterization() { + return cohortCharacterization; + } + + public void setCohortCharacterization(CohortCharacterizationEntity cohortCharacterization) { + this.cohortCharacterization = cohortCharacterization; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/domain/CohortCharacterizationEntity.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/domain/CohortCharacterizationEntity.java new file mode 100644 index 0000000000..91d9f98a6a --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/domain/CohortCharacterizationEntity.java @@ -0,0 +1,199 @@ +package org.ohdsi.webapi.cohortcharacterization.domain; + +import java.util.*; +import java.util.stream.Collectors; + +import jakarta.persistence.CascadeType; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.FetchType; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.JoinTable; +import jakarta.persistence.ManyToMany; +import jakarta.persistence.OneToMany; +import jakarta.persistence.OneToOne; +import jakarta.persistence.Table; +import org.hibernate.annotations.GenericGenerator; +import org.hibernate.annotations.Parameter; +import org.ohdsi.analysis.cohortcharacterization.design.CohortCharacterization; +import org.ohdsi.circe.cohortdefinition.ConceptSet; +import org.ohdsi.webapi.cohortdefinition.CohortDefinitionEntity; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisEntity; +import org.ohdsi.webapi.model.CommonEntity; +import org.ohdsi.webapi.model.CommonEntityExt; +import org.ohdsi.webapi.tag.domain.Tag; + +@Entity +@Table(name = "cohort_characterization") +public class CohortCharacterizationEntity extends CommonEntityExt implements CohortCharacterization { + + @Id + @GenericGenerator( + name = "cohort_characterization_generator", + strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator", + parameters = { + @Parameter(name = "sequence_name", value = "cohort_characterization_seq"), + @Parameter(name = "increment_size", value = "1") + } + ) + @GeneratedValue(generator = "cohort_characterization_generator") + private Long id; + + @Column + private String name; + + @Column + private String description; + + @ManyToMany(targetEntity = CohortDefinitionEntity.class, fetch = FetchType.LAZY) + @JoinTable(name = "cc_cohort", + joinColumns = @JoinColumn(name = "cohort_characterization_id", referencedColumnName = "id"), + inverseJoinColumns = @JoinColumn(name = "cohort_id", referencedColumnName = "id")) + private Set cohortDefinitions = new HashSet<>(); + + @OneToMany(orphanRemoval = true) + @JoinColumn(name = "cohort_characterization_id", insertable = false, updatable = false, nullable = false) + private Set featureAnalyses = new HashSet<>(); + + @OneToMany(mappedBy = "cohortCharacterization", fetch = FetchType.LAZY, targetEntity = CcParamEntity.class) + private Set parameters = new HashSet<>(); + + @OneToMany(mappedBy = "cohortCharacterization", fetch = FetchType.LAZY, targetEntity = CcStrataEntity.class) + private Set stratas = new HashSet<>(); + + @Column(name = "stratified_by") + private String stratifiedBy; + + @Column(name = "strata_only") + private Boolean strataOnly; + + @OneToOne(mappedBy = "cohortCharacterization", cascade = CascadeType.ALL) + private CcStrataConceptSetEntity conceptSetEntity; + + @Column(name = "hash_code") + private Integer hashCode; + + @ManyToMany(targetEntity = Tag.class, fetch = FetchType.LAZY) + @JoinTable(name = "cohort_characterization_tag", + joinColumns = @JoinColumn(name = "asset_id", referencedColumnName = "id"), + inverseJoinColumns = @JoinColumn(name = "tag_id", referencedColumnName = "id")) + private Set tags; + + @Override + public Set getCohorts() { + return cohortDefinitions; + } + + @Override + public Set getFeatureAnalyses() { + return featureAnalyses != null ? + featureAnalyses.stream().map(CcFeAnalysisEntity::getFeatureAnalysis).collect(Collectors.toSet()) : + Collections.emptySet(); + } + + public Set getCcFeatureAnalyses() { + return featureAnalyses; + } + + @Override + public Set getParameters() { + return parameters; + } + + public void setFeatureAnalyses(Set featureAnalyses) { + this.featureAnalyses = featureAnalyses; + } + + @Override + public Long getId() { + return id; + } + + public void setId(final Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(final String name) { + this.name = name; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public void setParameters(final Set parameters) { + this.parameters = parameters; + } + + public Set getCohortDefinitions() { + return cohortDefinitions; + } + + public void setCohortDefinitions(final Set cohortDefinitions) { + this.cohortDefinitions = cohortDefinitions; + } + + @Override + public Set getStratas() { + return stratas; + } + + public void setStratas(Set stratas) { + this.stratas = stratas; + } + + public String getStratifiedBy() { + return stratifiedBy; + } + + public void setStratifiedBy(String stratifiedBy) { + this.stratifiedBy = stratifiedBy; + } + + public Boolean getStrataOnly() { + return Objects.nonNull(strataOnly) ? strataOnly : false; + } + + public void setStrataOnly(Boolean strataOnly) { + this.strataOnly = strataOnly; + } + + public CcStrataConceptSetEntity getConceptSetEntity() { + return conceptSetEntity; + } + + public void setConceptSetEntity(CcStrataConceptSetEntity conceptSetEntity) { + this.conceptSetEntity = conceptSetEntity; + } + + @Override + public Collection getStrataConceptSets() { + return Objects.nonNull(this.conceptSetEntity) ? this.conceptSetEntity.getConceptSets() : Collections.emptyList(); + } + + public Integer getHashCode() { + return hashCode; + } + + public void setHashCode(final Integer hashCode) { + this.hashCode = hashCode; + } + + public Set getTags() { + return tags; + } + + public void setTags(Set tags) { + this.tags = tags; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/AbstractTemporalResult.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/AbstractTemporalResult.java new file mode 100644 index 0000000000..383ee8514a --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/AbstractTemporalResult.java @@ -0,0 +1,94 @@ +package org.ohdsi.webapi.cohortcharacterization.dto; + +public abstract class AbstractTemporalResult { + protected Integer analysisId; + protected String analysisName; + protected Long covariateId; + protected String covariateName; + protected Integer strataId; + protected String strataName; + protected Integer conceptId; + protected Integer cohortId; + protected Long count; + protected Double avg; + + public Integer getAnalysisId() { + return analysisId; + } + + public void setAnalysisId(Integer analysisId) { + this.analysisId = analysisId; + } + + public String getAnalysisName() { + return analysisName; + } + + public void setAnalysisName(String analysisName) { + this.analysisName = analysisName; + } + + public Long getCovariateId() { + return covariateId; + } + + public void setCovariateId(Long covariateId) { + this.covariateId = covariateId; + } + + public String getCovariateName() { + return covariateName; + } + + public void setCovariateName(String covariateName) { + this.covariateName = covariateName; + } + + public Integer getStrataId() { + return strataId; + } + + public void setStrataId(Integer strataId) { + this.strataId = strataId; + } + + public String getStrataName() { + return strataName; + } + + public void setStrataName(String strataName) { + this.strataName = strataName; + } + + public Integer getConceptId() { + return conceptId; + } + + public void setConceptId(Integer conceptId) { + this.conceptId = conceptId; + } + + public Long getCount() { + return count; + } + + public void setCount(Long count) { + this.count = count; + } + + public Double getAvg() { + return avg; + } + + public void setAvg(Double avg) { + this.avg = avg; + } + + public Integer getCohortId() { + return cohortId; + } + + public void setCohortId(Integer cohortId) { + this.cohortId = cohortId; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/BaseCcDTO.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/BaseCcDTO.java new file mode 100644 index 0000000000..14b5a2bcb6 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/BaseCcDTO.java @@ -0,0 +1,83 @@ +package org.ohdsi.webapi.cohortcharacterization.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.ohdsi.analysis.CohortMetadata; +import org.ohdsi.circe.cohortdefinition.ConceptSet; +import org.ohdsi.webapi.cohortdefinition.CohortMetadataExt; +import org.ohdsi.webapi.cohortdefinition.dto.CohortMetadataImplDTO; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisShortDTO; + +import java.util.ArrayList; +import java.util.Collection; + +public class BaseCcDTO extends CcShortDTO { + + private Collection cohorts = new ArrayList<>(); + private Collection featureAnalyses = new ArrayList<>(); + private Collection parameters = new ArrayList<>(); + @JsonProperty("stratifiedBy") + private String stratifiedBy; + @JsonProperty("strataOnly") + private Boolean strataOnly; + private Collection stratas = new ArrayList<>(); + @JsonProperty("strataConceptSets") + private Collection strataConceptSets = new ArrayList<>(); + + public Collection getCohorts() { + return cohorts; + } + + public void setCohorts(Collection cohorts) { + this.cohorts = cohorts; + } + + public Collection getParameters() { + return parameters; + } + + public void setParameters(final Collection parameters) { + this.parameters = parameters; + } + + public Collection getFeatureAnalyses() { + + return featureAnalyses; + } + + public void setFeatureAnalyses(final Collection featureAnalyses) { + + this.featureAnalyses = featureAnalyses; + } + + public Collection getStratas() { + return stratas; + } + + public void setStratas(Collection stratas) { + this.stratas = stratas; + } + + public String getStratifiedBy() { + return stratifiedBy; + } + + public void setStratifiedBy(String stratifiedBy) { + this.stratifiedBy = stratifiedBy; + } + + public Boolean getStrataOnly() { + return strataOnly; + } + + public void setStrataOnly(Boolean strataOnly) { + this.strataOnly = strataOnly; + } + + public Collection getStrataConceptSets() { + return strataConceptSets; + } + + public void setStrataConceptSets(Collection strataConceptSets) { + this.strataConceptSets = strataConceptSets; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcDistributionStat.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcDistributionStat.java new file mode 100644 index 0000000000..ec57963e25 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcDistributionStat.java @@ -0,0 +1,134 @@ +package org.ohdsi.webapi.cohortcharacterization.dto; + +import org.ohdsi.analysis.cohortcharacterization.result.DistributionStat; + +public class CcDistributionStat extends CcPrevalenceStat implements DistributionStat { + + private Double avg; + private Double stdDev; + private Double min; + private Double p10; + private Double p25; + private Double median; + private Double p75; + private Double p90; + private Double max; + private Integer aggregateId; + private String aggregateName; + private Boolean missingMeansZero; + + @Override + public Double getAvg() { + return avg; + } + + @Override + public Double getStdDev() { + return stdDev; + } + + @Override + public Double getMin() { + return min; + } + + @Override + public Double getP10() { + return p10; + } + + @Override + public Double getP25() { + return p25; + } + + @Override + public Double getMedian() { + return median; + } + + @Override + public Double getP75() { + return p75; + } + + @Override + public Double getP90() { + return p90; + } + + @Override + public Double getMax() { + return max; + } + + public void setAvg(final Double avg) { + + this.avg = avg; + } + + public void setStdDev(final Double stdDev) { + + this.stdDev = stdDev; + } + + public void setMin(final Double min) { + + this.min = min; + } + + public void setP10(final Double p10) { + + this.p10 = p10; + } + + public void setP25(final Double p25) { + + this.p25 = p25; + } + + public void setMedian(final Double median) { + + this.median = median; + } + + public void setP75(final Double p75) { + + this.p75 = p75; + } + + public void setP90(final Double p90) { + + this.p90 = p90; + } + + public void setMax(final Double max) { + + this.max = max; + } + + public Integer getAggregateId() { + return aggregateId; + } + + public void setAggregateId(Integer aggregateId) { + this.aggregateId = aggregateId; + } + + public String getAggregateName() { + return aggregateName; + } + + public void setAggregateName(String aggregateName) { + this.aggregateName = aggregateName; + } + + @Override + public Boolean isMissingMeansZero() { + return missingMeansZero; + } + + public void setMissingMeansZero(Boolean missingMeansZero) { + this.missingMeansZero = missingMeansZero; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcExportDTO.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcExportDTO.java new file mode 100644 index 0000000000..c7bf78f499 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcExportDTO.java @@ -0,0 +1,10 @@ +package org.ohdsi.webapi.cohortcharacterization.dto; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import org.ohdsi.analysis.cohortcharacterization.design.CohortCharacterization; +import org.ohdsi.webapi.cohortdefinition.dto.CohortDTO; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisDTO; + +@JsonIgnoreProperties(ignoreUnknown = true) +public class CcExportDTO extends BaseCcDTO implements CohortCharacterization { +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcParameterDTO.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcParameterDTO.java new file mode 100644 index 0000000000..1e39f2b4d4 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcParameterDTO.java @@ -0,0 +1,36 @@ +package org.ohdsi.webapi.cohortcharacterization.dto; + +import org.ohdsi.analysis.cohortcharacterization.design.CohortCharacterizationParam; + +public class CcParameterDTO implements CohortCharacterizationParam { + + private Long id; + private String name; + private String value; + + public String getName() { + return name; + } + + public void setName(final String name) { + this.name = name; + } + + public String getValue() { + return value; + } + + public void setValue(final String value) { + this.value = value; + } + + public Long getId() { + + return id; + } + + public void setId(final Long id) { + + this.id = id; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcPrevalenceStat.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcPrevalenceStat.java new file mode 100644 index 0000000000..38fe34be58 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcPrevalenceStat.java @@ -0,0 +1,97 @@ +package org.ohdsi.webapi.cohortcharacterization.dto; + +import org.ohdsi.analysis.cohortcharacterization.result.PrevalenceStat; + +public class CcPrevalenceStat extends CcResult implements PrevalenceStat { + + private String timeWindow; + private Long value; + private Double proportion; + private Double avg; + private Long covariateId; + private String covariateName; + private Long conceptId; + private String conceptName; + private Long count; + private long distance = 0; + + @Override + public Double getProportion() { + return this.proportion; + } + + @Override + public Long getCovariateId() { + return covariateId; + } + + @Override + public String getCovariateName() { + return covariateName; + } + + @Override + public Long getConceptId() { + return conceptId; + } + + @Override + public Long getCount() { + return count; + } + + public void setCovariateId(final Long covariateId) { + this.covariateId = covariateId; + } + + public void setCovariateName(final String covariateName) { + this.covariateName = covariateName; + } + + public void setConceptId(final Long conceptId) { + this.conceptId = conceptId; + } + + public void setCount(final Long count) { + this.count = count; + } + + public String getTimeWindow() { + return timeWindow; + } + + public void setTimeWindow(final String timeWindow) { + this.timeWindow = timeWindow; + } + + public void setProportion(Double proportion) { + + this.proportion = proportion; + } + + public Double getAvg() { + + return avg; + } + + public void setAvg(Double avg) { + + this.avg = avg; + } + + public long getDistance() { + return distance; + } + + public void setDistance(long distance) { + this.distance = distance; + } + + public String getConceptName() { + return conceptName; + } + + public void setConceptName(String conceptName) { + this.conceptName = conceptName; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcResult.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcResult.java new file mode 100644 index 0000000000..34c5d25878 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcResult.java @@ -0,0 +1,110 @@ +package org.ohdsi.webapi.cohortcharacterization.dto; + + +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeInfo; +import org.ohdsi.analysis.cohortcharacterization.design.CcResultType; + +@JsonTypeInfo(use = JsonTypeInfo.Id.NONE) +@JsonSubTypes({ + @JsonSubTypes.Type(value = CcPrevalenceStat.class), + @JsonSubTypes.Type(value = CcDistributionStat.class) +}) +public abstract class CcResult { + + private Long id; + private String faType; + private String sourceKey; + private Integer cohortId; + private Integer analysisId; + private String analysisName; + private CcResultType resultType; + private Long strataId; + private String strataName; + + public Long getId() { + + return id; + } + + public void setId(final Long id) { + + this.id = id; + } + + public String getFaType() { + + return faType; + } + + public void setFaType(String faType) { + + this.faType = faType; + } + + public String getSourceKey() { + + return sourceKey; + } + + public void setSourceKey(String sourceKey) { + + this.sourceKey = sourceKey; + } + + public CcResultType getResultType() { + + return resultType; + } + + public void setResultType(final CcResultType resultType) { + + this.resultType = resultType; + } + + public Integer getCohortId() { + + return cohortId; + } + + public void setCohortId(Integer cohortId) { + + this.cohortId = cohortId; + } + + public Integer getAnalysisId() { + + return analysisId; + } + + public void setAnalysisId(Integer analysisId) { + + this.analysisId = analysisId; + } + + public String getAnalysisName() { + + return analysisName; + } + + public void setAnalysisName(final String analysisName) { + + this.analysisName = analysisName; + } + + public Long getStrataId() { + return strataId; + } + + public void setStrataId(Long strataId) { + this.strataId = strataId; + } + + public String getStrataName() { + return strataName; + } + + public void setStrataName(String strataName) { + this.strataName = strataName; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcShortDTO.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcShortDTO.java new file mode 100644 index 0000000000..ea79ef50a5 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcShortDTO.java @@ -0,0 +1,43 @@ +package org.ohdsi.webapi.cohortcharacterization.dto; + +import org.ohdsi.webapi.service.dto.CommonEntityExtDTO; + +public class CcShortDTO extends CommonEntityExtDTO { + + private Long id; + private Integer hashCode; + private String name; + private String description; + + public Long getId() { + return id; + } + + public void setId(final Long id) { + this.id = id; + } + + public Integer getHashCode() { + return hashCode; + } + + public void setHashCode(final Integer hashCode) { + this.hashCode = hashCode; + } + + public String getName() { + return name; + } + + public void setName(final String name) { + this.name = name; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcStrataDTO.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcStrataDTO.java new file mode 100644 index 0000000000..30b96bba74 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcStrataDTO.java @@ -0,0 +1,35 @@ +package org.ohdsi.webapi.cohortcharacterization.dto; + +import org.ohdsi.analysis.cohortcharacterization.design.CohortCharacterizationStrata; +import org.ohdsi.circe.cohortdefinition.CriteriaGroup; + +public class CcStrataDTO implements CohortCharacterizationStrata { + + private Long id; + private String name; + private CriteriaGroup criteria; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public CriteriaGroup getCriteria() { + return criteria; + } + + public void setCriteria(CriteriaGroup criteria) { + this.criteria = criteria; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcTemporalAnnualResult.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcTemporalAnnualResult.java new file mode 100644 index 0000000000..c40c5078ef --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcTemporalAnnualResult.java @@ -0,0 +1,13 @@ +package org.ohdsi.webapi.cohortcharacterization.dto; + +public class CcTemporalAnnualResult extends AbstractTemporalResult{ + private Integer year; + + public Integer getYear() { + return year; + } + + public void setYear(Integer year) { + this.year = year; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcTemporalResult.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcTemporalResult.java new file mode 100644 index 0000000000..ae5bf55754 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcTemporalResult.java @@ -0,0 +1,31 @@ +package org.ohdsi.webapi.cohortcharacterization.dto; + +public class CcTemporalResult extends AbstractTemporalResult { + private Integer timeId; + private Integer startDay; + private Integer endDay; + + public Integer getTimeId() { + return timeId; + } + + public void setTimeId(Integer timeId) { + this.timeId = timeId; + } + + public Integer getStartDay() { + return startDay; + } + + public void setStartDay(Integer startDay) { + this.startDay = startDay; + } + + public Integer getEndDay() { + return endDay; + } + + public void setEndDay(Integer endDay) { + this.endDay = endDay; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcVersionFullDTO.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcVersionFullDTO.java new file mode 100644 index 0000000000..98d4076dde --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CcVersionFullDTO.java @@ -0,0 +1,6 @@ +package org.ohdsi.webapi.cohortcharacterization.dto; + +import org.ohdsi.webapi.versioning.dto.VersionFullDTO; + +public class CcVersionFullDTO extends VersionFullDTO { +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CohortCharacterizationDTO.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CohortCharacterizationDTO.java new file mode 100644 index 0000000000..0384d0611e --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/CohortCharacterizationDTO.java @@ -0,0 +1,7 @@ +package org.ohdsi.webapi.cohortcharacterization.dto; + +import org.ohdsi.webapi.cohortdefinition.dto.CohortMetadataImplDTO; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisShortDTO; + +public class CohortCharacterizationDTO extends BaseCcDTO { +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/ExecutionResultRequest.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/ExecutionResultRequest.java new file mode 100644 index 0000000000..45333755e7 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/ExecutionResultRequest.java @@ -0,0 +1,91 @@ +package org.ohdsi.webapi.cohortcharacterization.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.ohdsi.webapi.Constants; + +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +public class ExecutionResultRequest { + + @JsonProperty("cohortIds") + private List cohortIds; + + @JsonProperty("analysisIds") + private List analysisIds; + + @JsonProperty("domainIds") + private List domainIds; + + @JsonProperty("thresholdValuePct") + private Float thresholdValuePct; + + @JsonProperty("isSummary") + private Boolean isSummary; + + @JsonProperty("showEmptyResults") + private Boolean isShowEmptyResults = false; + + public List getCohortIds() { + if(cohortIds == null) { + return Collections.emptyList(); + } + return cohortIds; + } + + public void setCohortIds(List cohortIds) { + this.cohortIds = cohortIds; + } + + public List getAnalysisIds() { + if(analysisIds == null) { + return Collections.emptyList(); + } + return analysisIds; + } + + public void setAnalysisIds(List analysisIds) { + this.analysisIds = analysisIds; + } + + public List getDomainIds() { + if(domainIds == null) { + return Collections.emptyList(); + } + return domainIds; + } + + public void setDomainIds(List domainIds) { + this.domainIds = domainIds; + } + + public boolean isFilterUsed() { + return !(getAnalysisIds().isEmpty() && getDomainIds().isEmpty() && getCohortIds().isEmpty()); + } + + public Float getThresholdValuePct() { + return thresholdValuePct != null ? thresholdValuePct : Constants.DEFAULT_THRESHOLD; + } + + public void setThresholdValuePct(Float thresholdValuePct) { + this.thresholdValuePct = thresholdValuePct; + } + + public Boolean isSummary() { + return isSummary; + } + + public void setSummary(Boolean summary) { + isSummary = summary; + } + + public Boolean getShowEmptyResults() { + return Boolean.TRUE.equals(isShowEmptyResults); + } + + public void setShowEmptyResults(Boolean showEmptyResults) { + isShowEmptyResults = showEmptyResults; + } + +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/ExportExecutionResultRequest.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/ExportExecutionResultRequest.java new file mode 100644 index 0000000000..2a4fb72de6 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/ExportExecutionResultRequest.java @@ -0,0 +1,16 @@ +package org.ohdsi.webapi.cohortcharacterization.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class ExportExecutionResultRequest extends ExecutionResultRequest{ + @JsonProperty("isComparative") + private Boolean isComparative; + + public Boolean isComparative() { + return isComparative; + } + + public void setComparative(Boolean comparative) { + isComparative = comparative; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/GenerationResults.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/GenerationResults.java new file mode 100644 index 0000000000..80aff6ec12 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/GenerationResults.java @@ -0,0 +1,46 @@ +package org.ohdsi.webapi.cohortcharacterization.dto; + +import org.ohdsi.webapi.cohortcharacterization.report.Report; + +import java.util.List; + +public class GenerationResults { + private List reports; + private Float prevalenceThreshold; + private Boolean showEmptyResults; + private int count; + + public List getReports() { + return reports; + } + + public void setReports(List reports) { + this.reports = reports; + } + + public Float getPrevalenceThreshold() { + + return prevalenceThreshold; + } + + public void setPrevalenceThreshold(Float prevalenceThreshold) { + + this.prevalenceThreshold = prevalenceThreshold; + } + + public void setCount(int count) { + this.count = count; + } + + public int getCount() { + return count; + } + + public Boolean getShowEmptyResults() { + return showEmptyResults; + } + + public void setShowEmptyResults(Boolean showEmptyResults) { + this.showEmptyResults = showEmptyResults; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/UserDTO.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/UserDTO.java new file mode 100644 index 0000000000..495fac8114 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/dto/UserDTO.java @@ -0,0 +1,38 @@ +package org.ohdsi.webapi.cohortcharacterization.dto; + +public class UserDTO { + + private Long id; + private String name; + private String login; + + public String getName() { + + return name; + } + + public void setName(final String name) { + + this.name = name; + } + + public String getLogin() { + + return login; + } + + public void setLogin(final String login) { + + this.login = login; + } + + public Long getId() { + + return id; + } + + public void setId(final Long id) { + + this.id = id; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/AnalysisItem.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/AnalysisItem.java new file mode 100644 index 0000000000..a8573f5f94 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/AnalysisItem.java @@ -0,0 +1,102 @@ +package org.ohdsi.webapi.cohortcharacterization.report; + +import org.apache.commons.lang3.tuple.ImmutablePair; +import org.apache.commons.lang3.tuple.Pair; +import org.ohdsi.analysis.cohortcharacterization.design.CcResultType; +import org.ohdsi.webapi.cohortcharacterization.dto.CcResult; +import org.ohdsi.webapi.cohortdefinition.CohortDefinitionEntity; + +import java.util.*; + +public class AnalysisItem { + // Key is covariate id and strata id + private Map, List> map = new HashMap<>(); + private CcResultType type; + private String name; + private String faType; + + public List getOrCreateCovariateItem(Long covariateId, Long strataId) { + Pair key = new ImmutablePair<>(covariateId, strataId); + map.putIfAbsent(key, new ArrayList<>()); + return map.get(key); + } + + public void setType(CcResultType resultType) { + this.type = resultType; + } + + public CcResultType getType() { + return this.type; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public void setFaType(String faType) { + this.faType = faType; + } + + public String getFaType() { + return faType; + } + + public AnalysisResultItem getSimpleItems(Map definitionMap, + Map feAnalysisMap) { + Set values = new HashSet<>(); + Set domainIds = new HashSet<>(); + Set cohorts = new HashSet<>(); + ItemFactory factory = new ItemFactory(); + for (List results : map.values()) { + for (CcResult ccResult : results) { + CohortDefinitionEntity cohortDef = definitionMap.get(ccResult.getCohortId()); + ExportItem item = factory.createItem(ccResult, cohortDef.getName()); + String domainId = feAnalysisMap.get(ccResult.getAnalysisName()); + item.setDomainId(domainId); + domainIds.add(domainId); + cohorts.add(new Cohort(cohortDef.getId(), cohortDef.getName())); + values.add(item); + } + } + return new AnalysisResultItem(domainIds, cohorts, values); + } + + public AnalysisResultItem getComparativeItems(CohortDefinitionEntity firstCohortDef, CohortDefinitionEntity secondCohortDef, + Map feAnalysisMap) { + Set values = new HashSet<>(); + Set domainIds = new HashSet<>(); + Set cohorts = new HashSet<>(); + cohorts.add(new Cohort(firstCohortDef.getId(), firstCohortDef.getName())); + cohorts.add(new Cohort(secondCohortDef.getId(), secondCohortDef.getName())); + ItemFactory factory = new ItemFactory(); + for (List results : map.values()) { + // create default items, because we can have result for only one cohort + ExportItem first = null; + ExportItem second = null; + for (CcResult ccResult : results) { + if (Objects.equals(ccResult.getCohortId(), firstCohortDef.getId())) { + first = factory.createItem(ccResult, firstCohortDef.getName()); + } else { + second = factory.createItem(ccResult, secondCohortDef.getName()); + } + } + ExportItem comparativeItem; + if(first instanceof DistributionItem || second instanceof DistributionItem) { + comparativeItem = new ComparativeDistributionItem((DistributionItem) first, (DistributionItem) second, + firstCohortDef, secondCohortDef); + } else { + comparativeItem = new ComparativeItem((PrevalenceItem) first, (PrevalenceItem) second, + firstCohortDef, secondCohortDef); + } + String domainId = feAnalysisMap.get(comparativeItem.getAnalysisName()); + comparativeItem.setDomainId(domainId); + domainIds.add(domainId); + values.add(comparativeItem); + } + return new AnalysisResultItem(domainIds, cohorts, values); + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/AnalysisResultItem.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/AnalysisResultItem.java new file mode 100644 index 0000000000..be228ee17a --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/AnalysisResultItem.java @@ -0,0 +1,30 @@ +package org.ohdsi.webapi.cohortcharacterization.report; + +import org.apache.commons.lang3.tuple.Pair; + +import java.util.Set; + +public class AnalysisResultItem { + private Set domainIds; + private Set cohorts; + private Set exportItems; + + public AnalysisResultItem(Set domainIds, Set cohorts, + Set exportItems) { + this.domainIds = domainIds; + this.cohorts = cohorts; + this.exportItems = exportItems; + } + + public Set getDomainIds() { + return domainIds; + } + + public Set getCohorts() { + return cohorts; + } + + public Set getExportItems() { + return exportItems; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/Cohort.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/Cohort.java new file mode 100644 index 0000000000..d4d808c0c0 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/Cohort.java @@ -0,0 +1,41 @@ +package org.ohdsi.webapi.cohortcharacterization.report; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + +import java.util.Objects; + +public class Cohort { + private Integer cohortId; + private String cohortName; + + @JsonCreator + public Cohort(@JsonProperty("cohortId") Integer cohortId, + @JsonProperty("cohortName") String cohortName) { + this.cohortId = cohortId; + this.cohortName = cohortName; + } + + public Integer getCohortId() { + return cohortId; + } + + public String getCohortName() { + return cohortName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof Cohort)) return false; + Cohort cohort = (Cohort) o; + return Objects.equals(cohortId, cohort.cohortId) && + Objects.equals(cohortName, cohort.cohortName); + } + + @Override + public int hashCode() { + + return Objects.hash(cohortId, cohortName); + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/ComparativeDistributionItem.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/ComparativeDistributionItem.java new file mode 100644 index 0000000000..6520df2364 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/ComparativeDistributionItem.java @@ -0,0 +1,165 @@ +package org.ohdsi.webapi.cohortcharacterization.report; + +import org.ohdsi.webapi.cohortdefinition.CohortDefinitionEntity; + +import java.util.Objects; +import org.ohdsi.webapi.cohortcharacterization.dto.CcDistributionStat; + +public class ComparativeDistributionItem extends ComparativeItem { + private final Double targetStdDev; + private final Double targetMin; + private final Double targetP10; + private final Double targetP25; + private final Double targetMedian; + private final Double targetP75; + private final Double targetP90; + private final Double targetMax; + private final Double targetAvg; + + private final Double comparatorStdDev; + private final Double comparatorMin; + private final Double comparatorP10; + private final Double comparatorP25; + private final Double comparatorMedian; + private final Double comparatorP75; + private final Double comparatorP90; + private final Double comparatorMax; + private final Double comparatorAvg; + + private final Integer aggregateId; + private final String aggregateName; + private final Boolean missingMeansZero; + + private static final CcDistributionStat EMPTY_ITEM; + + public ComparativeDistributionItem(DistributionItem firstItem, DistributionItem secondItem, CohortDefinitionEntity firstCohortDef, + CohortDefinitionEntity secondCohortDef) { + super(firstItem, secondItem, firstCohortDef, secondCohortDef); + + DistributionItem item = Objects.nonNull(firstItem) ? firstItem : secondItem; + this.aggregateId = item.getAggregateId(); + this.aggregateName = item.getAggregateName(); + this.missingMeansZero = item.isMissingMeansZero(); + + this.targetStdDev = firstItem != null ? firstItem.getStdDev() : null; + this.targetMin = firstItem != null ? firstItem.getMin() : null; + this.targetP10 = firstItem != null ? firstItem.getP10() : null; + this.targetP25 = firstItem != null ? firstItem.getP25() : null; + this.targetMedian = firstItem != null ? firstItem.getMedian() : null; + this.targetP75 = firstItem != null ? firstItem.getP75() : null; + this.targetP90 = firstItem != null ? firstItem.getP90() : null; + this.targetMax = firstItem != null ? firstItem.getMax() : null; + this.targetAvg = firstItem != null ? firstItem.avg : null; + + this.comparatorStdDev = secondItem != null ? secondItem.getStdDev() : null; + this.comparatorMin = secondItem != null ? secondItem.getMin() : null; + this.comparatorP10 = secondItem != null ? secondItem.getP10() : null; + this.comparatorP25 = secondItem != null ? secondItem.getP25() : null; + this.comparatorMedian = secondItem != null ? secondItem.getMedian() : null; + this.comparatorP75 = secondItem != null ? secondItem.getP75() : null; + this.comparatorP90 = secondItem != null ? secondItem.getP90() : null; + this.comparatorMax = secondItem != null ? secondItem.getMax() : null; + this.comparatorAvg = secondItem != null ? secondItem.avg : null; + } + static { + EMPTY_ITEM = new CcDistributionStat(); + EMPTY_ITEM.setAvg(0.0d); + EMPTY_ITEM.setStdDev(0.0d); + } + + @Override + protected double calcDiff(ExportItem first, ExportItem second) { + if (first == null) { + first = new DistributionItem(EMPTY_ITEM, this.getTargetCohortName()); + } + + if (second == null) { + second = new DistributionItem(EMPTY_ITEM, this.getComparatorCohortName()); + } + return first.calcDiff(second); + } + + public Double getTargetStdDev() { + return targetStdDev; + } + + public Double getTargetMin() { + return targetMin; + } + + public Double getTargetP10() { + return targetP10; + } + + public Double getTargetP25() { + return targetP25; + } + + public Double getTargetMedian() { + return targetMedian; + } + + public Double getTargetP75() { + return targetP75; + } + + public Double getTargetP90() { + return targetP90; + } + + public Double getTargetMax() { + return targetMax; + } + + public Double getTargetAvg() { + return targetAvg; + } + + public Double getComparatorStdDev() { + return comparatorStdDev; + } + + public Double getComparatorMin() { + return comparatorMin; + } + + public Double getComparatorP10() { + return comparatorP10; + } + + public Double getComparatorP25() { + return comparatorP25; + } + + public Double getComparatorMedian() { + return comparatorMedian; + } + + public Double getComparatorP75() { + return comparatorP75; + } + + public Double getComparatorP90() { + return comparatorP90; + } + + public Double getComparatorMax() { + return comparatorMax; + } + + public Double getComparatorAvg() { + return comparatorAvg; + } + + public Integer getAggregateId() { + return aggregateId; + } + + public String getAggregateName() { + return aggregateName; + } + + public Boolean isMissingMeansZero() { + return missingMeansZero; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/ComparativeItem.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/ComparativeItem.java new file mode 100644 index 0000000000..0d02a180c4 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/ComparativeItem.java @@ -0,0 +1,160 @@ +package org.ohdsi.webapi.cohortcharacterization.report; + +import org.ohdsi.webapi.cohortdefinition.CohortDefinitionEntity; + +import java.util.ArrayList; +import java.util.List; +import org.ohdsi.webapi.cohortcharacterization.dto.CcPrevalenceStat; + +public class ComparativeItem extends ExportItem { + + private final boolean hasFirstItem; + private final boolean hasSecondItem; + private final Integer targetCohortId; + private final String targetCohortName; + private final Long targetCount; + private final Double targetPct; + private final Integer comparatorCohortId; + private final String comparatorCohortName; + private final Long comparatorCount; + private final Double comparatorPct; + private final double diff; + private static final CcPrevalenceStat EMPTY_ITEM; + + static { + EMPTY_ITEM = new CcPrevalenceStat(); + EMPTY_ITEM.setAvg(0.0d); + EMPTY_ITEM.setProportion(0.0d); + } + + public ComparativeItem(PrevalenceItem firstItem, PrevalenceItem secondItem, CohortDefinitionEntity firstCohortDef, + CohortDefinitionEntity secondCohortDef) { + super(firstItem != null ? firstItem : secondItem); + this.hasFirstItem = firstItem != null; + this.hasSecondItem = secondItem != null; + this.targetCohortId = firstCohortDef.getId(); + this.targetCohortName = firstCohortDef.getName(); + this.targetCount = firstItem != null ? firstItem.count : null; + this.targetPct = firstItem != null ? firstItem.pct : null; + this.comparatorCohortId = secondCohortDef.getId(); + this.comparatorCohortName = secondCohortDef.getName(); + this.comparatorCount = secondItem != null ? secondItem.count : null; + this.comparatorPct = secondItem != null ? secondItem.pct : null; + this.diff = calcDiff(firstItem, secondItem); + } + + protected double calcDiff(ExportItem first, ExportItem second) { + if (first == null) { + first = new PrevalenceItem(EMPTY_ITEM, this.targetCohortName); + } + + if (second == null) { + second = new PrevalenceItem(EMPTY_ITEM, this.comparatorCohortName); + } + return first.calcDiff(second); + } + + @Override + protected List getValueList() { + // Do not use parent function as this report has its own order of columns + List values = new ArrayList<>(); + values.add(String.valueOf(this.getAnalysisId())); + values.add(this.getAnalysisName()); + values.add(String.valueOf(this.getStrataId())); + values.add(this.getStrataName()); + values.add(String.valueOf(targetCohortId)); + values.add(targetCohortName); + values.add(String.valueOf(comparatorCohortId)); + values.add(comparatorCohortName); + values.add(String.valueOf(this.getCovariateId())); + values.add(this.getCovariateName()); + values.add(this.getCovariateShortName()); + values.add(this.targetCount != null ? String.valueOf(this.targetCount) : "0"); + values.add(this.targetPct != null ? String.valueOf(this.targetPct) : "0"); + values.add(this.comparatorCount != null ? String.valueOf(this.comparatorCount) : "0"); + values.add(this.comparatorPct != null ? String.valueOf(this.comparatorPct) : "0"); + values.add(String.format("%.4f", this.diff)); + return values; + } + + @Override + public int compareTo(ComparativeItem that) { + int res = getAnalysisId().compareTo(that.getAnalysisId()); + if (res == 0) { + getCovariateName().compareToIgnoreCase(that.getCovariateName()); + } + return res; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + + ComparativeItem that = (ComparativeItem) o; + + if (targetCohortId != null ? !targetCohortId.equals(that.targetCohortId) : that.targetCohortId != null) { + return false; + } + return comparatorCohortId != null ? comparatorCohortId.equals(that.comparatorCohortId) : that.comparatorCohortId == null; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + (targetCohortId != null ? targetCohortId.hashCode() : 0); + result = 31 * result + (comparatorCohortId != null ? comparatorCohortId.hashCode() : 0); + return result; + } + + public boolean isHasFirstItem() { + return hasFirstItem; + } + + public boolean isHasSecondItem() { + return hasSecondItem; + } + + public Integer getTargetCohortId() { + return targetCohortId; + } + + public String getTargetCohortName() { + return targetCohortName; + } + + public Long getTargetCount() { + return targetCount; + } + + public Double getTargetPct() { + return targetPct; + } + + public Integer getComparatorCohortId() { + return comparatorCohortId; + } + + public String getComparatorCohortName() { + return comparatorCohortName; + } + + public Long getComparatorCount() { + return comparatorCount; + } + + public Double getComparatorPct() { + return comparatorPct; + } + + public double getDiff() { + return diff; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/DistributionItem.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/DistributionItem.java new file mode 100644 index 0000000000..bbf86e4ed3 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/DistributionItem.java @@ -0,0 +1,120 @@ +package org.ohdsi.webapi.cohortcharacterization.report; + +import org.ohdsi.webapi.cohortcharacterization.dto.CcDistributionStat; + +import java.util.ArrayList; +import java.util.List; + +public class DistributionItem extends PrevalenceItem { + private final Double stdDev; + private final Double min; + private final Double p10; + private final Double p25; + private final Double median; + private final Double p75; + private final Double p90; + private final Double max; + private final Integer aggregateId; + private final String aggregateName; + private final Boolean missingMeansZero; + + public DistributionItem(CcDistributionStat distributionStat, String cohortName) { + super(distributionStat, cohortName); + this.stdDev = distributionStat.getStdDev(); + this.min = distributionStat.getMin(); + this.p10 = distributionStat.getP10(); + this.p25 = distributionStat.getP25(); + this.median = distributionStat.getMedian(); + this.p75 = distributionStat.getP75(); + this.p90 = distributionStat.getP90(); + this.max = distributionStat.getMax(); + this.aggregateId = distributionStat.getAggregateId(); + this.aggregateName = distributionStat.getAggregateName(); + this.missingMeansZero = distributionStat.isMissingMeansZero(); + } + + @Override + protected List getValueList() { + // Do not use parent function as this report has its own order of columns + List values = new ArrayList<>(); + values.add(String.valueOf(this.getAnalysisId())); + values.add(this.getAnalysisName()); + values.add(String.valueOf(this.getStrataId())); + values.add(this.getStrataName()); + values.add(String.valueOf(this.cohortId)); + values.add(this.cohortName); + values.add(String.valueOf(this.getCovariateId())); + values.add(this.getCovariateName()); + values.add(this.getCovariateShortName()); + values.add(this.aggregateName); + values.add(String.valueOf(this.missingMeansZero)); + values.add(String.valueOf(this.count)); + values.add(String.valueOf(this.avg)); + values.add(String.valueOf(this.stdDev)); + values.add(String.valueOf(this.min)); + values.add(String.valueOf(this.p10)); + values.add(String.valueOf(this.p25)); + values.add(String.valueOf(this.median)); + values.add(String.valueOf(this.p75)); + values.add(String.valueOf(this.p90)); + values.add(String.valueOf(this.max)); + return values; + } + + @Override + protected double calcDiff(DistributionItem another) { + if (stdDev == null || another.stdDev == null || avg == null || another.avg == null) { + return 0d; + } + double sd1 = stdDev; + double sd2 = another.stdDev; + + double sd = Math.sqrt((sd1 * sd1 + sd2 * sd2)/2.0); + // prevent division by zero + return sd != 0D ? (avg - another.avg) / sd : 0; + } + + public Double getStdDev() { + return stdDev; + } + + public Double getMin() { + return min; + } + + public Double getP10() { + return p10; + } + + public Double getP25() { + return p25; + } + + public Double getMedian() { + return median; + } + + public Double getP75() { + return p75; + } + + public Double getP90() { + return p90; + } + + public Double getMax() { + return max; + } + + public Integer getAggregateId() { + return aggregateId; + } + + public String getAggregateName() { + return aggregateName; + } + + public Boolean isMissingMeansZero() { + return missingMeansZero; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/ExportItem.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/ExportItem.java new file mode 100644 index 0000000000..d43d7b61a4 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/ExportItem.java @@ -0,0 +1,178 @@ +package org.ohdsi.webapi.cohortcharacterization.report; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeInfo; +import org.apache.commons.lang3.StringUtils; +import org.ohdsi.webapi.cohortcharacterization.dto.CcPrevalenceStat; + +import java.util.ArrayList; +import java.util.List; + +@JsonTypeInfo(use = JsonTypeInfo.Id.NONE) +@JsonSubTypes({ + @JsonSubTypes.Type(value = ComparativeDistributionItem.class), + @JsonSubTypes.Type(value = ComparativeItem.class), + @JsonSubTypes.Type(value = DistributionItem.class), + @JsonSubTypes.Type(value = PrevalenceItem.class) +}) +public abstract class ExportItem implements Comparable { + private final Integer analysisId; + private final String analysisName; + private final Long strataId; + private final String strataName; + private final Long covariateId; + private final String covariateName; + private final String covariateShortName; + private final String faType; + private String domainId; + private final Long conceptId; + private final String conceptName; + + public ExportItem(CcPrevalenceStat ccResult) { + this.analysisId = ccResult.getAnalysisId(); + this.analysisName = ccResult.getAnalysisName(); + this.strataId = ccResult.getStrataId(); + this.strataName = getStrataNameOrDefault(ccResult.getStrataName()); + this.covariateId = ccResult.getCovariateId(); + this.covariateName = ccResult.getCovariateName(); + this.covariateShortName = extractMeaningfulCovariateName(ccResult.getCovariateName()); + this.faType = ccResult.getFaType(); + this.conceptId = ccResult.getConceptId(); + this.conceptName = ccResult.getConceptName(); + } + + public ExportItem(PrevalenceItem item) { + this.analysisId = item.getAnalysisId(); + this.analysisName = item.getAnalysisName(); + this.strataId = item.getStrataId(); + this.strataName = getStrataNameOrDefault(item.getStrataName()); + this.covariateId = item.getCovariateId(); + this.covariateName = item.getCovariateName(); + this.covariateShortName = extractMeaningfulCovariateName(item.getCovariateName()); + this.faType = item.getFaType(); + this.conceptId = item.getConceptId(); + this.conceptName = item.getConceptName(); + } + + protected List getValueList() { + List values = new ArrayList<>(); + values.add(String.valueOf(this.analysisId)); + values.add(this.analysisName); + values.add(String.valueOf(this.strataId)); + values.add(this.strataName); + values.add(String.valueOf(this.covariateId)); + values.add(this.covariateName); + values.add(this.covariateShortName); + return values; + } + + @Override + public int compareTo(ExportItem that) { + int res = analysisId.compareTo(analysisId); + if (res == 0) { + covariateName.compareToIgnoreCase(that.covariateName); + } + return res; + } + + @JsonIgnore + public String[] getValueArray() { + List values = getValueList(); + return values.toArray(new String[values.size()]); + } + + protected String extractMeaningfulCovariateName(String fullName) { + if (fullName == null) { + return StringUtils.EMPTY; + } + String[] nameParts = fullName.split(":"); + if (nameParts.length < 2) { + nameParts = fullName.split("="); + } + if (nameParts.length != 2) { + return fullName; + } else { + return nameParts[1]; + } + } + + protected String getStrataNameOrDefault(String value) { + return StringUtils.isNotEmpty(value) ? value : "All stratas"; + } + + protected double calcDiff(T another) { + return 0; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + ExportItem that = (ExportItem) o; + + if (analysisId != null ? !analysisId.equals(that.analysisId) : that.analysisId != null) return false; + if (strataId != null ? !strataId.equals(that.strataId) : that.strataId != null) return false; + if (covariateId != null ? !covariateId.equals(that.covariateId) : that.covariateId != null) return false; + if (conceptId != null ? !conceptId.equals(that.conceptId) : that.conceptId != null) return false; + return conceptName != null ? conceptName.equals(that.conceptName) : that.conceptName == null; + } + + @Override + public int hashCode() { + int result = analysisId != null ? analysisId.hashCode() : 0; + result = 31 * result + (strataId != null ? strataId.hashCode() : 0); + result = 31 * result + (covariateId != null ? covariateId.hashCode() : 0); + result = 31 * result + (conceptId != null ? conceptId.hashCode() : 0); + return result; + } + + public Integer getAnalysisId() { + return analysisId; + } + + public String getAnalysisName() { + return analysisName; + } + + public Long getStrataId() { + return strataId; + } + + public String getStrataName() { + return strataName; + } + + public Long getCovariateId() { + return covariateId; + } + + public String getCovariateName() { + return covariateName; + } + + public String getCovariateShortName() { + return covariateShortName; + } + + public String getFaType() { + return faType; + } + + public String getDomainId() { + return domainId; + } + + public void setDomainId(String domainId) { + this.domainId = domainId; + } + + public Long getConceptId() { + return conceptId; + } + + public String getConceptName() { + return conceptName; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/ItemFactory.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/ItemFactory.java new file mode 100644 index 0000000000..c6a5dc4876 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/ItemFactory.java @@ -0,0 +1,27 @@ +package org.ohdsi.webapi.cohortcharacterization.report; + +import com.google.common.collect.ImmutableMap; +import org.ohdsi.analysis.cohortcharacterization.design.CcResultType; +import org.ohdsi.webapi.cohortcharacterization.dto.CcDistributionStat; +import org.ohdsi.webapi.cohortcharacterization.dto.CcPrevalenceStat; +import org.ohdsi.webapi.cohortcharacterization.dto.CcResult; + +import java.util.Map; +import java.util.Optional; +import java.util.function.BiFunction; + +import static org.ohdsi.analysis.cohortcharacterization.design.CcResultType.DISTRIBUTION; +import static org.ohdsi.analysis.cohortcharacterization.design.CcResultType.PREVALENCE; + +public class ItemFactory { + private static Map> itemMap = ImmutableMap.of( + PREVALENCE, (ccResult, cohortName) -> new PrevalenceItem((CcPrevalenceStat) ccResult, cohortName), + DISTRIBUTION, (ccResult, cohortName) -> new DistributionItem((CcDistributionStat) ccResult, cohortName) + ); + + public ExportItem createItem(CcResult ccResult, String cohortName) { + return Optional.ofNullable(itemMap.get(ccResult.getResultType())) + .orElseThrow(() -> new RuntimeException("ExportItem for result type: " + ccResult.getResultType() + " is not defined")) + .apply(ccResult, cohortName); + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/PrevalenceItem.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/PrevalenceItem.java new file mode 100644 index 0000000000..ce08d90ad7 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/PrevalenceItem.java @@ -0,0 +1,136 @@ +package org.ohdsi.webapi.cohortcharacterization.report; + +import org.ohdsi.webapi.cohortcharacterization.dto.CcPrevalenceStat; + +import java.util.ArrayList; +import java.util.List; + +public class PrevalenceItem extends ExportItem { + protected final double MAX_DIFF = 1000.0d; // we need to ensure a JSON-parsable value + protected final Integer cohortId; + protected final String cohortName; + protected final Long count; + protected final Double pct; + protected final Double avg; + private List temporal; + private List temporalAnnual; + + public PrevalenceItem(CcPrevalenceStat prevalenceStat, String cohortName) { + super(prevalenceStat); + this.cohortId = prevalenceStat.getCohortId(); + this.cohortName = cohortName; + this.count = prevalenceStat.getCount(); + this.avg = prevalenceStat.getAvg(); + this.pct = prevalenceStat.getAvg() * 100; + } + + @Override + protected List getValueList() { + List values = new ArrayList<>(); + values.add(String.valueOf(this.getAnalysisId())); + values.add(this.getAnalysisName()); + values.add(String.valueOf(this.getStrataId())); + values.add(this.getStrataName()); + values.add(String.valueOf(this.cohortId)); + values.add(this.cohortName); + values.add(String.valueOf(this.getCovariateId())); + values.add(this.getCovariateName()); + values.add(this.getCovariateShortName()); + values.add(String.valueOf(this.count)); + values.add(String.valueOf(this.pct)); + return values; + } + + + /** + * Calculate Standardized Mean Difference of dichotomous (binary) variable + * From https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3472075/pdf/sim0028-3083.pdf + * + **/ + @Override + protected double calcDiff(PrevalenceItem another) { + double pTarget = avg == null ? 0 : avg; + double pCompare = another.avg == null ? 0 : another.avg; + + double pooledError = Math.sqrt(((pTarget * (1.0 - pTarget)) + (pCompare * (1.0 - pCompare)))/2); + if (pooledError == 0) { + // undefined case where denom = 0 + if (pTarget != pCompare) { + // return +/- INF based on if T is bigger. + return pTarget > pCompare ? Double.POSITIVE_INFINITY : Double.NEGATIVE_INFINITY; + } else { + // pTreatment and pCompare are same, so return 0 + return 0.0d; + } + } else { + // calculate the standard mean differnce + return (pTarget - pCompare) / pooledError; + } + } + + @Override + public int compareTo(PrevalenceItem that) { + int res = getAnalysisId().compareTo(that.getAnalysisId()); + if (res == 0) { + getCovariateName().compareToIgnoreCase(that.getCovariateName()); + } + if (res == 0) { + res = cohortName.compareToIgnoreCase(that.cohortName); + } + return res; + } + + public Long getCount() { + return count; + } + + public Double getPct() { + return pct; + } + + public Double getAvg() { + return avg; + } + + public Integer getCohortId() { + return cohortId; + } + + public String getCohortName() { + return cohortName; + } + + public List getTemporal() { + return temporal; + } + + public void setTemporal(List temporal) { + this.temporal = temporal; + } + + public List getTemporalAnnual() { + return temporalAnnual; + } + + public void setTemporalAnnual(List temporalAnnual) { + this.temporalAnnual = temporalAnnual; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + + PrevalenceItem that = (PrevalenceItem) o; + + return cohortId != null ? cohortId.equals(that.cohortId) : that.cohortId == null; + } + + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + (cohortId != null ? cohortId.hashCode() : 0); + return result; + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/Report.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/Report.java new file mode 100644 index 0000000000..57cbb38ae1 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/Report.java @@ -0,0 +1,55 @@ +package org.ohdsi.webapi.cohortcharacterization.report; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import org.apache.commons.lang3.tuple.Pair; +import org.ohdsi.analysis.cohortcharacterization.design.CcResultType; + +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +public class Report { + public String analysisName; + public Integer analysisId; + public CcResultType resultType; + @JsonIgnore + public List header; + public Set domainIds; + public Set cohorts; + public Set items; + public boolean isComparative; + public boolean isSummary; + public String faType; + public String domainId; + + public Report(String analysisName, Integer analysisId, AnalysisResultItem resultItem) { + this.analysisName = analysisName; + this.analysisId = analysisId; + this.items = resultItem.getExportItems(); + cohorts = resultItem.getCohorts(); + domainIds = resultItem.getDomainIds(); + } + + public Report(String analysisName, List simpleResultSummary) { + this.analysisName = analysisName; + domainIds = new HashSet<>(); + cohorts = new HashSet<>(); + items = new HashSet<>(); + simpleResultSummary + .forEach(item -> { + domainIds.addAll(item.getDomainIds()); + cohorts.addAll(item.getCohorts()); + items.addAll(item.getExportItems()); + }); + } + + @JsonIgnore + public List getResultArray() { + return items + .stream() + .sorted() + .map(ExportItem::getValueArray) + .collect(Collectors.toList()); + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/TemporalAnnualItem.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/TemporalAnnualItem.java new file mode 100644 index 0000000000..901c66a106 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/TemporalAnnualItem.java @@ -0,0 +1,31 @@ +package org.ohdsi.webapi.cohortcharacterization.report; + +public class TemporalAnnualItem { + private Long count; + private Double avg; + private Integer year; + + public Long getCount() { + return count; + } + + public void setCount(Long count) { + this.count = count; + } + + public Double getAvg() { + return avg; + } + + public void setAvg(Double avg) { + this.avg = avg; + } + + public Integer getYear() { + return year; + } + + public void setYear(Integer year) { + this.year = year; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/TemporalItem.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/TemporalItem.java new file mode 100644 index 0000000000..2286d2dc3c --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/report/TemporalItem.java @@ -0,0 +1,40 @@ +package org.ohdsi.webapi.cohortcharacterization.report; + +public class TemporalItem { + private Long count; + private Double avg; + private Integer startDay; + private Integer endDay; + + public Long getCount() { + return count; + } + + public void setCount(Long count) { + this.count = count; + } + + public Double getAvg() { + return avg; + } + + public void setAvg(Double avg) { + this.avg = avg; + } + + public Integer getStartDay() { + return startDay; + } + + public void setStartDay(Integer startDay) { + this.startDay = startDay; + } + + public Integer getEndDay() { + return endDay; + } + + public void setEndDay(Integer endDay) { + this.endDay = endDay; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/AnalysisGenerationInfoEntityRepository.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/AnalysisGenerationInfoEntityRepository.java new file mode 100644 index 0000000000..6ec0327fbd --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/AnalysisGenerationInfoEntityRepository.java @@ -0,0 +1,10 @@ +package org.ohdsi.webapi.cohortcharacterization.repository; + +import com.cosium.spring.data.jpa.entity.graph.repository.EntityGraphJpaRepository; +import org.ohdsi.webapi.common.generation.AnalysisGenerationInfoEntity; + +import java.util.Optional; + +public interface AnalysisGenerationInfoEntityRepository extends EntityGraphJpaRepository { + Optional findById(Long generationId); +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/CcConceptSetRepository.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/CcConceptSetRepository.java new file mode 100644 index 0000000000..faa75d6221 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/CcConceptSetRepository.java @@ -0,0 +1,7 @@ +package org.ohdsi.webapi.cohortcharacterization.repository; + +import org.ohdsi.webapi.cohortcharacterization.domain.CcStrataConceptSetEntity; +import org.springframework.data.jpa.repository.JpaRepository; + +public interface CcConceptSetRepository extends JpaRepository { +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/CcFeAnalysisRepository.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/CcFeAnalysisRepository.java new file mode 100644 index 0000000000..b85fbe20a2 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/CcFeAnalysisRepository.java @@ -0,0 +1,7 @@ +package org.ohdsi.webapi.cohortcharacterization.repository; + +import org.ohdsi.webapi.cohortcharacterization.domain.CcFeAnalysisEntity; +import org.springframework.data.jpa.repository.JpaRepository; + +public interface CcFeAnalysisRepository extends JpaRepository { +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/CcGenerationEntityRepository.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/CcGenerationEntityRepository.java new file mode 100644 index 0000000000..afeda29687 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/CcGenerationEntityRepository.java @@ -0,0 +1,12 @@ +package org.ohdsi.webapi.cohortcharacterization.repository; + +import com.cosium.spring.data.jpa.entity.graph.domain2.EntityGraph; +import com.cosium.spring.data.jpa.entity.graph.repository.EntityGraphJpaRepository; +import java.util.List; +import org.ohdsi.webapi.cohortcharacterization.domain.CcGenerationEntity; + +public interface CcGenerationEntityRepository extends EntityGraphJpaRepository { + List findByCohortCharacterizationIdOrderByIdDesc(Long id, EntityGraph entityGraph); + List findByCohortCharacterizationIdAndSourceSourceKeyOrderByIdDesc(Long id, String sourceKey, EntityGraph entityGraph); + List findByStatusIn(List statuses); +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/CcParamRepository.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/CcParamRepository.java new file mode 100644 index 0000000000..a1770cf20a --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/CcParamRepository.java @@ -0,0 +1,10 @@ +package org.ohdsi.webapi.cohortcharacterization.repository; + +import java.util.Set; +import org.ohdsi.webapi.cohortcharacterization.domain.CcParamEntity; +import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity; +import org.springframework.data.jpa.repository.JpaRepository; + +public interface CcParamRepository extends JpaRepository { + Set findAllByCohortCharacterization(CohortCharacterizationEntity mainEntity); +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/CcRepository.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/CcRepository.java new file mode 100644 index 0000000000..f201faef94 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/CcRepository.java @@ -0,0 +1,32 @@ +package org.ohdsi.webapi.cohortcharacterization.repository; + +import java.util.List; +import java.util.Optional; + +import com.cosium.spring.data.jpa.entity.graph.repository.EntityGraphJpaRepository; +import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity; +import org.ohdsi.webapi.cohortdefinition.CohortDefinitionEntity; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisEntity; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; + +public interface CcRepository extends EntityGraphJpaRepository { + Optional findById(final Long id); + + @Query("SELECT cc FROM CohortCharacterizationEntity cc WHERE cc.name LIKE ?1 ESCAPE '\\'") + List findAllByNameStartsWith(String pattern); + + Optional findByName(String name); + + @Query("SELECT COUNT(cc) FROM CohortCharacterizationEntity cc WHERE cc.name = :ccName and cc.id <> :ccId") + int getCountCcWithSameName(@Param("ccId") Long ccId, @Param("ccName") String ccName); + + @Query("SELECT cc FROM CohortCharacterizationEntity cc JOIN cc.cohortDefinitions cd WHERE cd = ?1") + List findByCohortDefinition(CohortDefinitionEntity cd); + + @Query("SELECT cc FROM CohortCharacterizationEntity cc JOIN cc.featureAnalyses fa WHERE fa.featureAnalysis = :fa") + List findByFeatureAnalysis(@Param("fa") FeAnalysisEntity feAnalysis); + + @Query("SELECT DISTINCT cc FROM CohortCharacterizationEntity cc JOIN FETCH cc.tags t WHERE lower(t.name) in :tagNames") + List findByTags(@Param("tagNames") List tagNames); +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/CcStrataRepository.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/CcStrataRepository.java new file mode 100644 index 0000000000..466c742fcd --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/repository/CcStrataRepository.java @@ -0,0 +1,7 @@ +package org.ohdsi.webapi.cohortcharacterization.repository; + +import org.ohdsi.webapi.cohortcharacterization.domain.CcStrataEntity; +import org.springframework.data.jpa.repository.JpaRepository; + +public interface CcStrataRepository extends JpaRepository { +} diff --git a/src/main/java/org/ohdsi/webapi/cohortcharacterization/specification/CohortCharacterizationImpl.java b/src/main/java/org/ohdsi/webapi/cohortcharacterization/specification/CohortCharacterizationImpl.java new file mode 100644 index 0000000000..a2b57790a0 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/cohortcharacterization/specification/CohortCharacterizationImpl.java @@ -0,0 +1,55 @@ +package org.ohdsi.webapi.cohortcharacterization.specification; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; +import org.ohdsi.analysis.cohortcharacterization.design.CohortCharacterization; +import org.ohdsi.analysis.hydra.design.SkeletonTypeEnum; +import org.ohdsi.webapi.cohortcharacterization.dto.CcExportDTO; + +@JsonIgnoreProperties(ignoreUnknown = true) +public class CohortCharacterizationImpl extends CcExportDTO implements CohortCharacterization { + + @JsonProperty + private SkeletonTypeEnum skeletonType = SkeletonTypeEnum.COHORT_CHARACTERIZATION; + + @JsonProperty + private String skeletonVersion = "v0.0.1"; + + @JsonProperty + private String packageName = null; + + @JsonProperty + private String organizationName = null; + + public SkeletonTypeEnum getSkeletonType() { + return skeletonType; + } + + public void setSkeletonType(SkeletonTypeEnum skeletonType) { + this.skeletonType = skeletonType; + } + + public String getSkeletonVersion() { + return skeletonVersion; + } + + public void setSkeletonVersion(String skeletonVersion) { + this.skeletonVersion = skeletonVersion; + } + + public String getPackageName() { + return packageName; + } + + public void setPackageName(String packageName) { + this.packageName = packageName; + } + + public String getOrganizationName() { + return organizationName; + } + + public void setOrganizationName(String organizationName) { + this.organizationName = organizationName; + } +} diff --git a/src/main/java/org/ohdsi/webapi/cohortdefinition/CohortDefinitionEntity.java b/src/main/java/org/ohdsi/webapi/cohortdefinition/CohortDefinitionEntity.java index 8c13fff9a8..ddb8394f41 100644 --- a/src/main/java/org/ohdsi/webapi/cohortdefinition/CohortDefinitionEntity.java +++ b/src/main/java/org/ohdsi/webapi/cohortdefinition/CohortDefinitionEntity.java @@ -22,8 +22,6 @@ import org.ohdsi.webapi.model.CommonEntityExt; import org.ohdsi.webapi.tag.domain.Tag; -import jakarta.persistence.Access; -import jakarta.persistence.AccessType; import jakarta.persistence.CascadeType; import jakarta.persistence.Column; import jakarta.persistence.Entity; @@ -62,7 +60,6 @@ public class CohortDefinitionEntity extends CommonEntityExt implements @Id @SequenceGenerator(name = "cohort_definition_generator", sequenceName = "cohort_definition_sequence", allocationSize = 1) @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "cohort_definition_generator") - @Access(AccessType.PROPERTY) private Integer id; private String name; @@ -74,7 +71,6 @@ public class CohortDefinitionEntity extends CommonEntityExt implements private ExpressionType expressionType; @OneToOne(cascade = CascadeType.ALL, fetch = FetchType.LAZY, optional = false, orphanRemoval = true, mappedBy="definition") - @JoinColumn(name="id") private CohortDefinitionDetailsEntity details; @OneToMany(fetch = FetchType.LAZY, cascade = CascadeType.ALL, mappedBy = "cohortDefinition") diff --git a/src/main/java/org/ohdsi/webapi/cohortdefinition/CohortDefinitionService.java b/src/main/java/org/ohdsi/webapi/cohortdefinition/CohortDefinitionService.java index f5a82b7e4b..13d582d915 100644 --- a/src/main/java/org/ohdsi/webapi/cohortdefinition/CohortDefinitionService.java +++ b/src/main/java/org/ohdsi/webapi/cohortdefinition/CohortDefinitionService.java @@ -611,7 +611,7 @@ public CohortDTO saveCohortDefinition(@PathVariable("id") final int id, @Request */ @GetMapping(value = "/{id}/generate/{sourceKey}", produces = MediaType.APPLICATION_JSON_VALUE) @PreAuthorize(""" - (isOwner(#id, COHORT_DEFINITION) or isPermitted('write:cohort-definition') or isPermitted('read:cohort-definition') or hasEntityAccess(#id, COHORT_DEFINITION, READ)) + (isOwner(#id, COHORT_DEFINITION) or isPermitted(anyOf('write:cohort-definition','read:cohort-definition')) or hasEntityAccess(#id, COHORT_DEFINITION, READ)) and (isPermitted('write:source') or hasSourceAccess(#sourceKey, WRITE)) """) public JobExecutionResource generateCohort(@PathVariable("id") final int id, @@ -668,7 +668,7 @@ public JobExecutionResource generateCohort(@PathVariable("id") final int id, @PreAuthorize(""" (isOwner(#id, COHORT_DEFINITION) or isPermitted('write:cohort-definition') or isPermitted('read:cohort-definition') or hasEntityAccess(#id, COHORT_DEFINITION, READ)) and (isPermitted('write:source') or hasSourceAccess(#sourceKey, WRITE)) - """) + """) public ResponseEntity cancelGenerateCohort(@PathVariable("id") final int id, @PathVariable("sourceKey") final String sourceKey) { final Source source = Optional.ofNullable(getSourceRepository().findBySourceKey(sourceKey)) @@ -851,7 +851,7 @@ private List getConceptSetExports(CohortDefinitionEntity def, */ @GetMapping(value = "/{id}/export/conceptset", produces = MediaType.APPLICATION_OCTET_STREAM_VALUE) @PreAuthorize("isOwner(#id, COHORT_DEFINITION) or isPermitted('read:cohort-definition') or isPermitted('write:cohort-definition') or hasEntityAccess(#id, COHORT_DEFINITION, READ)") - public ResponseEntity exportConceptSets(@PathVariable("id") final int id) { + public ResponseEntity exportConceptSets(@PathVariable("id") final int id) { Source source = sourceService.getPriorityVocabularySource(); if (Objects.isNull(source)) { @@ -886,7 +886,7 @@ public ResponseEntity exportConceptSets(@PathVariable("id") final int id) { @Transactional @PreAuthorize(""" (isOwner(#id, COHORT_DEFINITION) or isPermitted('read:cohort-definition') or isPermitted('write:cohort-definition') or hasEntityAccess(#id, COHORT_DEFINITION, READ)) - and (isPermitted(anyOf('read:source','write:source')) or hasSourceAccess(#sourceKey, anyOf(READ, WRITE))) + and (isPermitted(anyOf('read:source','write:source')) or hasSourceAccess(#sourceKey, READ)) """) public InclusionRuleReport getInclusionRuleReport( @PathVariable("id") final int id, @@ -1191,7 +1191,6 @@ public CohortDTO copyAssetFromVersion(@PathVariable("id") final int id, @PathVar */ @PostMapping(value = "/byTags", produces = MediaType.APPLICATION_JSON_VALUE, consumes = MediaType.APPLICATION_JSON_VALUE) @Transactional - @PreAuthorize("isPermitted('read:cohort-definition') or isPermitted('write:cohort-definition')") public List listByTags(@RequestBody TagNameListRequestDTO requestDTO) { if (requestDTO == null || requestDTO.getNames() == null || requestDTO.getNames().isEmpty()) { return Collections.emptyList(); diff --git a/src/main/java/org/ohdsi/webapi/cohortdefinition/CohortGenerationService.java b/src/main/java/org/ohdsi/webapi/cohortdefinition/CohortGenerationService.java index fb1deb9d7e..8f06b8b8c0 100644 --- a/src/main/java/org/ohdsi/webapi/cohortdefinition/CohortGenerationService.java +++ b/src/main/java/org/ohdsi/webapi/cohortdefinition/CohortGenerationService.java @@ -132,14 +132,14 @@ private Job buildGenerateCohortJob(CohortDefinitionEntity cohortDefinition, Sour GenerateCohortTasklet generateTasklet = new GenerateCohortTasklet( getSourceJdbcTemplate(source), - getTransactionTemplate(), + getBatchTransactionTemplate(), generationCacheHelper, cohortDefinitionRepository, sourceService ); ExceptionHandler exceptionHandler = new GenerationTaskExceptionHandler(new TempTableCleanupManager(getSourceJdbcTemplate(source), - getTransactionTemplate(), + getBatchTransactionTemplate(), source.getSourceDialect(), jobParameters.getString(SESSION_ID), SourceUtils.getTempQualifierOrNull(source) @@ -152,6 +152,7 @@ private Job buildGenerateCohortJob(CohortDefinitionEntity cohortDefinition, Sour SimpleJobBuilder generateJobBuilder = new JobBuilder(GENERATE_COHORT, jobRepository).start(generateCohortStep); + // Listener runs outside step context, needs JpaTransactionManager for entity operations generateJobBuilder.listener(new GenerationJobExecutionListener(sourceService, cohortDefinitionRepository, this.getTransactionTemplateRequiresNew(), this.getSourceJdbcTemplate(source))); diff --git a/src/main/java/org/ohdsi/webapi/cohortsample/CohortSamplingService.java b/src/main/java/org/ohdsi/webapi/cohortsample/CohortSamplingService.java index c0f2fc47df..b013ce6a7f 100644 --- a/src/main/java/org/ohdsi/webapi/cohortsample/CohortSamplingService.java +++ b/src/main/java/org/ohdsi/webapi/cohortsample/CohortSamplingService.java @@ -478,7 +478,7 @@ public void launchDeleteSamplesTasklet(int cohortDefinitionId, int sourceId) { } public CleanupCohortSamplesTasklet createDeleteSamplesTasklet() { - return new CleanupCohortSamplesTasklet(getTransactionTemplate(), getSourceRepository(), this, sampleRepository); + return new CleanupCohortSamplesTasklet(getBatchTransactionTemplate(), getSourceRepository(), this, sampleRepository); } /** Maps a SQL result to a sample element. */ diff --git a/src/main/java/org/ohdsi/webapi/common/generation/AnalysisTasklet.java b/src/main/java/org/ohdsi/webapi/common/generation/AnalysisTasklet.java new file mode 100644 index 0000000000..f4942ed50d --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/common/generation/AnalysisTasklet.java @@ -0,0 +1,56 @@ +package org.ohdsi.webapi.common.generation; + +import org.ohdsi.webapi.cohortcharacterization.repository.AnalysisGenerationInfoEntityRepository; +import org.ohdsi.webapi.exception.AtlasException; +import org.ohdsi.webapi.security.authz.UserEntity; +import org.ohdsi.webapi.util.CancelableJdbcTemplate; +import org.slf4j.Logger; +import org.springframework.batch.core.step.tasklet.StoppableTasklet; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.support.DefaultTransactionDefinition; +import org.springframework.transaction.support.TransactionTemplate; + +public abstract class AnalysisTasklet extends CancelableTasklet implements StoppableTasklet { + + protected final AnalysisGenerationInfoEntityRepository analysisGenerationInfoEntityRepository; + + public AnalysisTasklet(Logger log, + CancelableJdbcTemplate jdbcTemplate, + TransactionTemplate transactionTemplate, + AnalysisGenerationInfoEntityRepository analysisGenerationInfoEntityRepository) { + + super(log, jdbcTemplate, transactionTemplate); + this.analysisGenerationInfoEntityRepository = analysisGenerationInfoEntityRepository; + } + + /** + * Saves analysis generation info in a separate transaction (PROPAGATION_REQUIRES_NEW). + * This ensures metadata is committed immediately and won't roll back with step failure. + * Uses the same transaction manager as the step transaction to properly suspend it. + */ + protected void saveInfoWithinTheSeparateTransaction(Long jobId, String serializedDesign, UserEntity userEntity) { + DefaultTransactionDefinition txDefinition = new DefaultTransactionDefinition(); + txDefinition.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); + TransactionStatus infoSaveTx = null; + try { + infoSaveTx = this.transactionTemplate.getTransactionManager().getTransaction(txDefinition); + saveInfo(jobId, serializedDesign, userEntity); + this.transactionTemplate.getTransactionManager().commit(infoSaveTx); + } catch (Exception ex) { + log.error("Cannot save sourceInfo for the job: {} ", jobId, ex); + if (infoSaveTx != null) { + this.transactionTemplate.getTransactionManager().rollback(infoSaveTx); + } + throw new AtlasException(ex); + } + } + + private void saveInfo(Long jobId, String serializedDesign, UserEntity userEntity) { + AnalysisGenerationInfoEntity generationInfoEntity = new AnalysisGenerationInfoEntity(); + generationInfoEntity.setId(jobId); + generationInfoEntity.setDesign(serializedDesign); + generationInfoEntity.setCreatedBy(userEntity); + analysisGenerationInfoEntityRepository.save(generationInfoEntity); + } +} diff --git a/src/main/java/org/ohdsi/webapi/common/generation/GenerationUtils.java b/src/main/java/org/ohdsi/webapi/common/generation/GenerationUtils.java index be8d862b8a..15df2885b5 100644 --- a/src/main/java/org/ohdsi/webapi/common/generation/GenerationUtils.java +++ b/src/main/java/org/ohdsi/webapi/common/generation/GenerationUtils.java @@ -1,6 +1,15 @@ package org.ohdsi.webapi.common.generation; +import static org.ohdsi.webapi.Constants.Params.SESSION_ID; +import static org.ohdsi.webapi.Constants.Params.TARGET_TABLE; + +import java.util.Collection; +import java.util.function.Function; + import org.ohdsi.webapi.Constants; +import org.ohdsi.webapi.cohortcharacterization.CreateCohortTableTasklet; +import org.ohdsi.webapi.cohortcharacterization.DropCohortTableListener; +import org.ohdsi.webapi.cohortcharacterization.GenerateLocalCohortTasklet; import org.ohdsi.webapi.cohortdefinition.CohortDefinitionEntity; import org.ohdsi.webapi.cohortdefinition.CohortGenerationService; import org.ohdsi.webapi.generationcache.GenerationCacheHelper; @@ -28,12 +37,6 @@ import org.springframework.transaction.support.TransactionTemplate; import jakarta.persistence.EntityManager; -import java.util.Collection; -import java.util.List; -import java.util.function.Function; - -import static org.ohdsi.webapi.Constants.Params.SESSION_ID; -import static org.ohdsi.webapi.Constants.Params.TARGET_TABLE; @Component public class GenerationUtils extends AbstractDaoService { @@ -41,6 +44,7 @@ public class GenerationUtils extends AbstractDaoService { private JobRepository jobRepository; private PlatformTransactionManager transactionManager; private TransactionTemplate transactionTemplate; + private TransactionTemplate batchTransactionTemplate; private CohortGenerationService cohortGenerationService; private SourceService sourceService; private JobService jobService; @@ -54,6 +58,7 @@ public class GenerationUtils extends AbstractDaoService { public GenerationUtils(JobRepository jobRepository, PlatformTransactionManager transactionManager, @Qualifier("transactionTemplate") TransactionTemplate transactionTemplate, + @Qualifier("batchTransactionTemplate") TransactionTemplate batchTransactionTemplate, CohortGenerationService cohortGenerationService, SourceService sourceService, SourceAwareSqlRender sourceAwareSqlRender, @@ -64,6 +69,7 @@ public GenerationUtils(JobRepository jobRepository, this.jobRepository = jobRepository; this.transactionManager = transactionManager; this.transactionTemplate = transactionTemplate; + this.batchTransactionTemplate = batchTransactionTemplate; this.cohortGenerationService = cohortGenerationService; this.sourceService = sourceService; this.sourceAwareSqlRender = sourceAwareSqlRender; @@ -77,6 +83,63 @@ public static String getTempCohortTableName(String sessionId) { return Constants.TEMP_COHORT_TABLE_PREFIX + sessionId; } + public SimpleJobBuilder buildJobForCohortBasedAnalysisTasklet( + String analysisTypeName, + Source source, + JobParametersBuilder builder, + JdbcTemplate jdbcTemplate, + Function> cohortGetter, + CancelableTasklet analysisTasklet + ) { + + final String sessionId = SessionUtils.sessionId(); + addSessionParams(builder, sessionId); + + TempTableCleanupManager cleanupManager = new TempTableCleanupManager( + getSourceJdbcTemplate(source), + batchTransactionTemplate, + source.getSourceDialect(), + sessionId, + SourceUtils.getTempQualifier(source) + ); + + GenerationTaskExceptionHandler exceptionHandler = new GenerationTaskExceptionHandler(cleanupManager); + + CreateCohortTableTasklet createCohortTableTasklet = new CreateCohortTableTasklet(jdbcTemplate, batchTransactionTemplate, sourceService, sourceAwareSqlRender); + Step createCohortTableStep = new StepBuilder(analysisTypeName + ".createCohortTable", jobRepository) + .tasklet(createCohortTableTasklet, transactionManager) + .build(); + + GenerateLocalCohortTasklet generateLocalCohortTasklet = new GenerateLocalCohortTasklet( + batchTransactionTemplate, + getSourceJdbcTemplate(source), + cohortGenerationService, + sourceService, + cohortGetter, + generationCacheHelper, + useAsyncCohortGeneration + ); + Step generateLocalCohortStep = new StepBuilder(analysisTypeName + ".generateCohort", jobRepository) + .tasklet(generateLocalCohortTasklet, transactionManager) + .build(); + + Step generateAnalysisStep = new StepBuilder(analysisTypeName + ".generate", jobRepository) + .tasklet(analysisTasklet, transactionManager) + .exceptionHandler(exceptionHandler) + .build(); + + DropCohortTableListener dropCohortTableListener = new DropCohortTableListener(jdbcTemplate, sourceService, sourceAwareSqlRender); + + SimpleJobBuilder generateJobBuilder = new JobBuilder(analysisTypeName, jobRepository) + .start(createCohortTableStep) + .next(generateLocalCohortStep) + .next(generateAnalysisStep) + .listener(dropCohortTableListener) + .listener(new AutoremoveJobListener(jobService)); + + return generateJobBuilder; + } + protected void addSessionParams(JobParametersBuilder builder, String sessionId) { builder.addString(SESSION_ID, sessionId); builder.addString(TARGET_TABLE, GenerationUtils.getTempCohortTableName(sessionId)); diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisController.java b/src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisController.java new file mode 100644 index 0000000000..d80c9da0ac --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisController.java @@ -0,0 +1,271 @@ +package org.ohdsi.webapi.feanalysis; + +import org.ohdsi.analysis.cohortcharacterization.design.FeatureAnalysis; +import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisDomain; +import org.ohdsi.webapi.Pagination; +import org.ohdsi.webapi.common.OptionDTO; +import org.ohdsi.webapi.conceptset.ConceptSetExport; +import org.ohdsi.webapi.feanalysis.domain.*; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisAggregateDTO; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisDTO; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisShortDTO; +import org.ohdsi.webapi.security.authz.AuthorizationService; +import org.springframework.security.access.prepost.PreAuthorize; +import org.ohdsi.webapi.security.authz.access.EntityType; +import org.ohdsi.webapi.security.authz.access.AccessType; +import org.ohdsi.webapi.util.ExceptionUtils; +import org.ohdsi.webapi.util.ExportUtil; +import org.ohdsi.webapi.util.HttpUtils; +import org.ohdsi.webapi.util.NameUtils; +import org.springframework.core.convert.ConversionService; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.*; +import org.springframework.web.server.ResponseStatusException; +import java.io.ByteArrayOutputStream; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +@RequestMapping("/feature-analysis") +@RestController +public class FeAnalysisController { + + private FeAnalysisService service; + private ConversionService conversionService; + private AuthorizationService authorizationService; + + FeAnalysisController( + final FeAnalysisService service, + final ConversionService conversionService, + AuthorizationService authorizationService) { + this.service = service; + this.conversionService = conversionService; + this.authorizationService = authorizationService; + } + + /** + * Get a pagable list of all feature analyses available in WebAPI + * @summary Feature analyses in WebAPI + * @param pageable + * @return + */ + @GetMapping(produces = MediaType.APPLICATION_JSON_VALUE) + public Page list(@Pagination Pageable pageable) { + return service.getPage(pageable).map(entity -> { + FeAnalysisShortDTO dto = convertFeAnaysisToShortDto(entity); + //TODO: figure out populating permissions on lists + // AuthorizationService.fillWriteAccess(entity, dto); + return dto; + }); + } + + /** + * Does a feature analysis name already exist? + * @param id The id for a new feature analysis that does not already exist + * @param name The desired name for the new feature analysis + * @return 1 if the name conflicts with an existing feature analysis name and 0 otherwise + */ + @GetMapping(value = "/{id}/exists", produces = MediaType.APPLICATION_JSON_VALUE) + public int getCountFeWithSameName(@PathVariable(value = "id", required = false) final int id, @RequestParam("name") String name) { + return service.getCountFeWithSameName(id, name); + } + + /** + * Feature analysis domains + * @return Feature analysis domains such as DRUG, DRUG_ERA, MEASUREMENT, etc. + */ + @GetMapping(value = "/domains", produces = MediaType.APPLICATION_JSON_VALUE) + public List listDomains() { + + List options = new ArrayList<>(); + for(StandardFeatureAnalysisDomain enumEntry: StandardFeatureAnalysisDomain.values()) { + options.add(new OptionDTO(enumEntry.name(), enumEntry.getName())); + } + return options; + } + + /** + * Create a new feature analysis + * @param dto Feature analysis specification + * @return + */ + @PostMapping(produces = MediaType.APPLICATION_JSON_VALUE, consumes = MediaType.APPLICATION_JSON_VALUE) + @PreAuthorize("isPermitted('create:feature-analysis')") + public FeAnalysisDTO createAnalysis(@RequestBody final FeAnalysisDTO dto) { + final FeAnalysisEntity createdEntity = service.createAnalysis(conversionService.convert(dto, FeAnalysisEntity.class)); + return convertFeAnalysisToDto(createdEntity); + } + + /** + * Update an existing feature analysis + * @param feAnalysisId ID of Feature analysis to update + * @param dto Feature analysis specification + * @return + */ + @PutMapping(value = "/{id}", produces = MediaType.APPLICATION_JSON_VALUE, consumes = MediaType.APPLICATION_JSON_VALUE) + @PreAuthorize("isOwner(#feAnalysisId, FE_ANALYSIS) or isPermitted('write:feature-analysis') or hasEntityAccess(#feAnalysisId, FE_ANALYSIS, WRITE)") + public FeAnalysisDTO updateAnalysis(@PathVariable("id") final Integer feAnalysisId, @RequestBody final FeAnalysisDTO dto) { + final FeAnalysisEntity updatedEntity = service.updateAnalysis(feAnalysisId, conversionService.convert(dto, FeAnalysisEntity.class)); + return convertFeAnalysisToDto(updatedEntity); + } + + /** + * Delete a feature analysis + * @param feAnalysisId ID of feature analysis to delete + */ + @DeleteMapping(value = "/{id}", produces = MediaType.APPLICATION_JSON_VALUE) + @PreAuthorize("isOwner(#feAnalysisId, FE_ANALYSIS) or isPermitted('write:feature-analysis') or hasEntityAccess(#feAnalysisId, FE_ANALYSIS, WRITE)") + public void deleteAnalysis(@PathVariable("id") final Integer feAnalysisId) { + final FeAnalysisEntity entity = service.findById(feAnalysisId).orElse(null); + ExceptionUtils.throwNotFoundExceptionIfNull(entity, String.format("There is no feature analysis with id = %d.", feAnalysisId)); + service.deleteAnalysis(entity); + } + + /** + * Get data about a specific feature analysis + * @param feAnalysisId ID of feature analysis to retrieve + * @return ID, type, name domain, description, etc of feature analysis + */ + @GetMapping(value = "/{id}", produces = MediaType.APPLICATION_JSON_VALUE) + @Transactional + @PreAuthorize("isOwner(#feAnalysisId, FE_ANALYSIS) or isPermitted('read:feature-analysis') or isPermitted('write:feature-analysis') or hasEntityAccess(#feAnalysisId, FE_ANALYSIS, READ)") + public FeAnalysisDTO getFeAnalysis(@PathVariable("id") final Integer feAnalysisId) { + final FeAnalysisEntity feAnalysis = service.findById(feAnalysisId).orElse(null); + ExceptionUtils.throwNotFoundExceptionIfNull(feAnalysis, String.format("There is no feature analysis with id = %d.", feAnalysisId)); + return convertFeAnalysisToDto(feAnalysis); + } + + @GetMapping(value = "/{id}/export/conceptset", produces = MediaType.APPLICATION_OCTET_STREAM_VALUE) + @PreAuthorize("isOwner(#feAnalysisId, FE_ANALYSIS) or isPermitted('read:feature-analysis') or isPermitted('write:feature-analysis') or hasEntityAccess(#feAnalysisId, FE_ANALYSIS, READ)") + public ResponseEntity exportConceptSets(@PathVariable("id") final Integer feAnalysisId) { + + final FeAnalysisEntity feAnalysis = service.findById(feAnalysisId).orElse(null); + ExceptionUtils.throwNotFoundExceptionIfNull(feAnalysis, String.format("There is no feature analysis with id = %d.", feAnalysisId)); + if (feAnalysis instanceof FeAnalysisWithCriteriaEntity) { + List exportList = service.exportConceptSets((FeAnalysisWithCriteriaEntity) feAnalysis); + + ByteArrayOutputStream stream = ExportUtil.writeConceptSetExportToCSVAndZip(exportList); + return HttpUtils.respondBinary(stream, String.format("featureAnalysis_%d_export.zip", feAnalysisId)); + } else { + throw new ResponseStatusException(HttpStatus.BAD_REQUEST); + } + } + + /** + * Create a copy of a feature analysis + * @param feAnalysisId ID of feature analysis to copy + * @return The design specification of the new copy + */ + @GetMapping(value = "/{id}/copy", produces = MediaType.APPLICATION_JSON_VALUE) + @Transactional + @PreAuthorize("(isOwner(#feAnalysisId, FE_ANALYSIS) or isPermitted(anyOf('read:feature-analysis','write:feature-analysis')) or hasEntityAccess(#feAnalysisId, FE_ANALYSIS, READ)) and isPermitted('create:feature-analysis')") + public FeAnalysisDTO copy(@PathVariable("id") final Integer feAnalysisId) { + final FeAnalysisEntity feAnalysis = service.findById(feAnalysisId).orElse(null); + ExceptionUtils.throwNotFoundExceptionIfNull(feAnalysis, String.format("There is no feature analysis with id = %d.", feAnalysisId)); + final FeAnalysisEntity feAnalysisForCopy = getNewEntityForCopy(feAnalysis); + + FeAnalysisEntity saved; + switch (feAnalysis.getType()) { + case CRITERIA_SET: + saved = service.createCriteriaAnalysis((FeAnalysisWithCriteriaEntity) feAnalysisForCopy); + break; + case CUSTOM_FE: + saved = service.createAnalysis(feAnalysisForCopy); + break; + default: + throw new IllegalArgumentException("Analysis with type: " + feAnalysis.getType() + " cannot be copied"); + } + + return convertFeAnalysisToDto(saved); + } + + private FeAnalysisEntity getNewEntityForCopy(FeAnalysisEntity entity) { + FeAnalysisEntity entityForCopy; + switch (entity.getType()) { + case CRITERIA_SET: + switch (entity.getStatType()) { + case PREVALENCE: + entityForCopy = new FeAnalysisWithPrevalenceCriteriaEntity((FeAnalysisWithCriteriaEntity) entity); + break; + case DISTRIBUTION: + entityForCopy = new FeAnalysisWithDistributionCriteriaEntity((FeAnalysisWithCriteriaEntity) entity); + break; + default: + throw new IllegalArgumentException(); + } + + // deep copy of criteria list... + final List criteriaList = new ArrayList<>(); + ((FeAnalysisWithCriteriaEntity) entity).getDesign().forEach(c -> { + final FeAnalysisCriteriaEntity criteria = createCriteriaEntity((FeAnalysisCriteriaEntity) c); + criteria.setName(((FeAnalysisCriteriaEntity) c).getName()); + criteria.setExpressionString(((FeAnalysisCriteriaEntity) c).getExpressionString()); + criteria.setAggregate(((FeAnalysisCriteriaEntity) c).getAggregate()); + criteriaList.add(criteria); + }); + entityForCopy.setDesign(criteriaList); + + // ...and concept sets + final FeAnalysisConcepsetEntity concepsetEntity = new FeAnalysisConcepsetEntity(); + concepsetEntity.setRawExpression(((FeAnalysisWithCriteriaEntity) entity).getConceptSetEntity().getRawExpression()); + ((FeAnalysisWithCriteriaEntity) entityForCopy).setConceptSetEntity(concepsetEntity); + break; + case CUSTOM_FE: + entityForCopy = new FeAnalysisWithStringEntity((FeAnalysisWithStringEntity) entity); + break; + default: + throw new IllegalArgumentException("Analysis with type: " + entity.getType() + " cannot be copied"); + } + entityForCopy.setId(null); + entityForCopy.setName( + NameUtils.getNameForCopy(entityForCopy.getName(), this::getNamesLike, service.findByName(entityForCopy.getName()))); + entityForCopy.setCreatedBy(null); + entityForCopy.setCreatedDate(null); + entityForCopy.setModifiedBy(null); + entityForCopy.setModifiedDate(null); + return entityForCopy; + } + + /** + * Get aggregation functions used in feature analyses + * @return + */ + @GetMapping(value = "/aggregates", produces = MediaType.APPLICATION_JSON_VALUE) + public List listAggregates() { + List result = service.findAggregates().stream() + .map(this::convertFeAnalysisAggregateToDto) + .collect(Collectors.toList()); + return result; + } + + private FeAnalysisShortDTO convertFeAnaysisToShortDto(final FeatureAnalysis entity) { + return conversionService.convert(entity, FeAnalysisShortDTO.class); + } + + private FeAnalysisDTO convertFeAnalysisToDto(final FeatureAnalysis entity) { + return conversionService.convert(entity, FeAnalysisDTO.class); + } + + private List getNamesLike(String copyName) { + return service.getNamesLike(copyName); + } + + private FeAnalysisAggregateDTO convertFeAnalysisAggregateToDto(final FeAnalysisAggregateEntity entity) { + return conversionService.convert(entity, FeAnalysisAggregateDTO.class); + } + + private FeAnalysisCriteriaEntity createCriteriaEntity(FeAnalysisCriteriaEntity basis) { + if (basis instanceof FeAnalysisWindowedCriteriaEntity) { + return new FeAnalysisWindowedCriteriaEntity(); + } else if (basis instanceof FeAnalysisDemographicCriteriaEntity) { + return new FeAnalysisDemographicCriteriaEntity(); + } else { + return new FeAnalysisCriteriaGroupEntity(); + } + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisDeserializer.java b/src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisDeserializer.java new file mode 100644 index 0000000000..19fea295da --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisDeserializer.java @@ -0,0 +1,121 @@ +package org.ohdsi.webapi.feanalysis; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.ObjectCodec; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +import com.fasterxml.jackson.databind.type.CollectionType; +import org.ohdsi.analysis.cohortcharacterization.design.CcResultType; +import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisDomain; +import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisType; +import org.ohdsi.circe.cohortdefinition.ConceptSet; +import org.ohdsi.webapi.feanalysis.dto.BaseFeAnalysisCriteriaDTO; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisDTO; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisWithConceptSetDTO; + +public class FeAnalysisDeserializer extends JsonDeserializer { + + // need to look around and find a way to override procedure of base mapping + // and handle only a design field + + @Override + public FeAnalysisDTO deserialize(final JsonParser parser, final DeserializationContext ctxt) throws IOException, JsonProcessingException { + + ObjectCodec codec = parser.getCodec(); + JsonNode node = codec.readTree(parser); + ObjectMapper objectMapper = (ObjectMapper) codec; + + FeAnalysisDTO dto = createDto(node); + + final JsonNode name = node.get("name"); + if (name != null && !name.isNull()) { + dto.setName(name.textValue()); + } + + final JsonNode description = node.get("description"); + if (description != null && !name.isNull()) { + dto.setDescription(description.textValue()); + } + + final JsonNode descr = node.get("descr"); + if (descr != null && !descr.isNull()) { + dto.setDescription(descr.textValue()); + } + + final JsonNode id = node.get("id"); + if (id != null && !id.isNull()) { + dto.setId(id.intValue()); + } + + final JsonNode domain = node.get("domain"); + if (domain != null && !domain.isNull()) { + final String domainString = domain.textValue(); + dto.setDomain(StandardFeatureAnalysisDomain.valueOf(domainString)); + } + + final StandardFeatureAnalysisType analysisType = getType(node); + if (analysisType != null) { + dto.setType(analysisType); + + final JsonNode design = node.get("design"); + if (analysisType == StandardFeatureAnalysisType.CRITERIA_SET) { + JsonNode statType = node.get("statType"); + if (statType != null) { + dto.setStatType(CcResultType.valueOf(statType.textValue())); + } + final List list = new ArrayList<>(); + for (final JsonNode jsonNode : design) { + list.add(convert(jsonNode, objectMapper)); + } + dto.setDesign(list); + + final JsonNode conceptSets = node.get("conceptSets"); + if (Objects.nonNull(conceptSets)) { + CollectionType typeRef = objectMapper.getTypeFactory().constructCollectionType(List.class, ConceptSet.class); + List conceptSetList = objectMapper.readValue(conceptSets.traverse(), typeRef); + ((FeAnalysisWithConceptSetDTO)dto).setConceptSets(conceptSetList); + } + } else { + dto.setDesign(design.textValue()); + } + } + + return dto; + } + + private StandardFeatureAnalysisType getType(JsonNode jsonNode) { + final JsonNode type = jsonNode.get("type"); + StandardFeatureAnalysisType result = null; + if (Objects.nonNull(type) && !type.isNull()) { + result = StandardFeatureAnalysisType.valueOf(type.textValue()); + } + return result; + } + + private FeAnalysisDTO createDto(JsonNode jsonNode) { + final StandardFeatureAnalysisType type = getType(jsonNode); + FeAnalysisDTO analysisDTO; + if (Objects.equals(StandardFeatureAnalysisType.CRITERIA_SET, type)) { + analysisDTO = new FeAnalysisWithConceptSetDTO(); + } else { + analysisDTO = new FeAnalysisDTO(); + } + return analysisDTO; + } + + private BaseFeAnalysisCriteriaDTO convert(final JsonNode node, final ObjectMapper objectMapper) { + try { + return objectMapper.treeToValue(node, BaseFeAnalysisCriteriaDTO.class); + } catch (JsonProcessingException e) { + throw new IllegalArgumentException(e); + } + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisService.java b/src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisService.java new file mode 100644 index 0000000000..2d4620604a --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisService.java @@ -0,0 +1,53 @@ +package org.ohdsi.webapi.feanalysis; + +import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity; +import org.ohdsi.webapi.conceptset.ConceptSetExport; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisAggregateEntity; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisCriteriaEntity; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisEntity; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisWithCriteriaEntity; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisWithStringEntity; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; + +import java.util.Collection; +import java.util.List; +import java.util.Optional; +import java.util.Set; + +public interface FeAnalysisService { + + Page getPage(final Pageable pageable); + + int getCountFeWithSameName(Integer id, String name); + + List findPresetAnalysesBySystemNames(Collection names); + + FeAnalysisEntity createAnalysis(FeAnalysisEntity analysis); + + Optional findById(Integer id); + + Optional findByName(String name); + + FeAnalysisWithCriteriaEntity createCriteriaAnalysis(FeAnalysisWithCriteriaEntity analysis); + + Set findByCohortCharacterization(CohortCharacterizationEntity cohortCharacterization); + + List findAllPresetAnalyses(); + + FeAnalysisEntity updateAnalysis(Integer feAnalysisId, FeAnalysisEntity convert); + + void deleteAnalysis(FeAnalysisEntity entity); + + void deleteAnalysis(int id); + + List getNamesLike(String name); + + List exportConceptSets(FeAnalysisWithCriteriaEntity analysisEntity); + + Optional findByDesignAndName(FeAnalysisWithStringEntity withStringEntity, final String name); + + Optional findByCriteriaListAndCsAndDomainAndStat(List newCriteriaList, FeAnalysisWithCriteriaEntity feAnalysis); + + List findAggregates(); +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisServiceImpl.java b/src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisServiceImpl.java new file mode 100644 index 0000000000..040c275182 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/FeAnalysisServiceImpl.java @@ -0,0 +1,278 @@ +package org.ohdsi.webapi.feanalysis; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.ohdsi.analysis.cohortcharacterization.design.CcResultType; +import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisType; +import org.ohdsi.circe.cohortdefinition.ConceptSet; +import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity; +import org.ohdsi.webapi.conceptset.ConceptSetExport; +import org.ohdsi.webapi.feanalysis.domain.*; +import org.ohdsi.webapi.feanalysis.event.FeAnalysisChangedEvent; +import org.ohdsi.webapi.feanalysis.repository.FeAnalysisAggregateRepository; +import org.ohdsi.webapi.feanalysis.repository.FeAnalysisCriteriaRepository; +import org.ohdsi.webapi.feanalysis.repository.FeAnalysisEntityRepository; +import org.ohdsi.webapi.feanalysis.repository.FeAnalysisWithStringEntityRepository; +import org.ohdsi.webapi.source.SourceInfo; +import org.ohdsi.webapi.util.EntityUtils; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import org.springframework.web.server.ResponseStatusException; + +import com.cosium.spring.data.jpa.entity.graph.domain2.EntityGraph; + +import org.springframework.http.HttpStatus; +import java.util.*; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import org.ohdsi.webapi.service.AbstractDaoService; +import org.ohdsi.webapi.service.VocabularyService; + +@Service +@Transactional(readOnly = true) +public class FeAnalysisServiceImpl extends AbstractDaoService implements FeAnalysisService { + + private final FeAnalysisEntityRepository analysisRepository; + private final FeAnalysisCriteriaRepository criteriaRepository; + private final FeAnalysisWithStringEntityRepository stringAnalysisRepository; + private final VocabularyService vocabularyService; + + private final ApplicationEventPublisher eventPublisher; + private FeAnalysisAggregateRepository aggregateRepository; + + private final EntityGraph defaultEntityGraph = EntityUtils.fromAttributePaths( + "createdBy", + "modifiedBy" + ); + + public FeAnalysisServiceImpl( + final FeAnalysisEntityRepository analysisRepository, + final FeAnalysisCriteriaRepository criteriaRepository, + final FeAnalysisWithStringEntityRepository stringAnalysisRepository, + final VocabularyService vocabularyService, + final FeAnalysisAggregateRepository aggregateRepository, + final ApplicationEventPublisher eventPublisher) { + this.analysisRepository = analysisRepository; + this.criteriaRepository = criteriaRepository; + this.stringAnalysisRepository = stringAnalysisRepository; + this.vocabularyService = vocabularyService; + this.aggregateRepository = aggregateRepository; + this.eventPublisher = eventPublisher; + } + + @Override + public Page getPage(final Pageable pageable) { + return analysisRepository.findAll(pageable, defaultEntityGraph); + } + + @Override + public int getCountFeWithSameName(Integer id, String name){ + return analysisRepository.getCountFeWithSameName(id, name); + } + + @Override + public List findPresetAnalysesBySystemNames(Collection names) { + return stringAnalysisRepository.findByDesignIn(names); + } + + @Override + @Transactional + public FeAnalysisEntity createAnalysis(final FeAnalysisEntity analysis) { + if (analysis.getStatType() == null) { + analysis.setStatType(CcResultType.PREVALENCE); + } + return saveNew(analysis); + } + + @Override + public Optional findById(Integer id) { + return analysisRepository.findById(id, defaultEntityGraph); + } + + @Override + public Optional findByName(String name) { + return analysisRepository.findByName(name); + } + + @Override + @Transactional + public FeAnalysisWithCriteriaEntity createCriteriaAnalysis(final FeAnalysisWithCriteriaEntity analysis) { + FeAnalysisWithCriteriaEntity newAnalysis = newAnalysis(analysis); + newAnalysis.setDesign(Collections.emptyList()); + final FeAnalysisWithCriteriaEntity entityWithMainFields = saveNew(newAnalysis); + if (createOrUpdateConceptSetEntity(entityWithMainFields, analysis.getConceptSetEntity())) { + analysisRepository.save(entityWithMainFields); + } + final List criteriaList = createCriteriaListForAnalysis(entityWithMainFields, analysis.getDesign()); + entityWithMainFields.setDesign(criteriaList); + return entityWithMainFields; + } + + private boolean createOrUpdateConceptSetEntity(FeAnalysisWithCriteriaEntity analysis, FeAnalysisConcepsetEntity modifiedConceptSet) { + + if (Objects.nonNull(modifiedConceptSet)) { + FeAnalysisConcepsetEntity concepsetEntity = Optional.ofNullable(analysis.getConceptSetEntity()) + .orElseGet(FeAnalysisConcepsetEntity::new); + concepsetEntity.setFeatureAnalysis(analysis); + concepsetEntity.setRawExpression(modifiedConceptSet.getRawExpression()); + analysis.setConceptSetEntity(concepsetEntity); + return true; + } else { + return false; + } + } + + private T saveNew(T entity) { + entity.setCreatedBy(getCurrentUser()); + entity.setCreatedDate(new Date()); + return analysisRepository.saveAndFlush(entity); + } + + private FeAnalysisWithCriteriaEntity newAnalysis(final FeAnalysisWithCriteriaEntity analysis) { + if (Objects.equals(analysis.getStatType(), CcResultType.PREVALENCE)) { + return new FeAnalysisWithPrevalenceCriteriaEntity(analysis); + } else if (Objects.equals(analysis.getStatType(), CcResultType.DISTRIBUTION)) { + return new FeAnalysisWithDistributionCriteriaEntity(analysis); + } + throw new IllegalArgumentException(); + } + + private List createCriteriaListForAnalysis(final FeAnalysisWithCriteriaEntity analysis, final List design) { + return design.stream() + .peek(criteria -> criteria.setFeatureAnalysis(analysis)) + .map(criteria -> criteriaRepository.save(criteria)) + .collect(Collectors.toList()); + } + + @Override + public Set findByCohortCharacterization(final CohortCharacterizationEntity cohortCharacterization) { + return analysisRepository.findAllByCohortCharacterizations(cohortCharacterization); + } + + @Override + public List findAllPresetAnalyses() { + return analysisRepository.findAllByType(StandardFeatureAnalysisType.PRESET).stream().map(a -> (FeAnalysisWithStringEntity) a).collect(Collectors.toList()); + } + + @Override + @Transactional + public FeAnalysisEntity updateAnalysis(Integer feAnalysisId, FeAnalysisEntity updatedEntity) { + + FeAnalysisEntity savedEntity = findById(feAnalysisId).orElseThrow(); + + checkEntityLocked(savedEntity); + savedEntity.setDescr(updatedEntity.getDescr()); + if (savedEntity instanceof FeAnalysisWithCriteriaEntity && updatedEntity instanceof FeAnalysisWithCriteriaEntity) { + FeAnalysisWithCriteriaEntity updatedWithCriteriaEntity = (FeAnalysisWithCriteriaEntity) updatedEntity, + savedWithCriteria = (FeAnalysisWithCriteriaEntity) savedEntity; + removeFeAnalysisCriteriaEntities(savedWithCriteria, updatedWithCriteriaEntity); + updatedWithCriteriaEntity.getDesign().forEach(criteria -> criteria.setFeatureAnalysis(savedWithCriteria)); + createOrUpdateConceptSetEntity((FeAnalysisWithCriteriaEntity) savedEntity, updatedWithCriteriaEntity.getConceptSetEntity()); + } + savedEntity.setDesign(updatedEntity.getDesign()); + if (Objects.nonNull(updatedEntity.getDomain())) { + savedEntity.setDomain(updatedEntity.getDomain()); + } + savedEntity.setLocked(updatedEntity.getLocked()); + if (StringUtils.isNotEmpty(updatedEntity.getName())) { + savedEntity.setName(updatedEntity.getName()); + } + if (updatedEntity.getStatType() != null) { + savedEntity.setStatType(updatedEntity.getStatType()); + } + if (Objects.nonNull(updatedEntity.getType())) { + savedEntity.setType(updatedEntity.getType()); + } + savedEntity.setModifiedBy(getCurrentUser()); + savedEntity.setModifiedDate(new Date()); + savedEntity = analysisRepository.save(savedEntity); + eventPublisher.publishEvent(new FeAnalysisChangedEvent(savedEntity)); + return savedEntity; + } + + private void removeFeAnalysisCriteriaEntities(FeAnalysisWithCriteriaEntity original, FeAnalysisWithCriteriaEntity updated) { + + List removed = original.getDesign().stream() + .filter(c -> updated.getDesign().stream().noneMatch(u -> Objects.equals(c.getId(), u.getId()))) + .collect(Collectors.toList()); + criteriaRepository.deleteAll(removed); + } + + @Override + @Transactional + public void deleteAnalysis(FeAnalysisEntity entity) { + checkEntityLocked(entity); + analysisRepository.delete(entity); + } + + @Override + @Transactional + public void deleteAnalysis(int id) { + deleteAnalysis(analysisRepository.findById(id).orElseThrow(() -> new RuntimeException("There is no Feature Analysis with id = " + id))); + } + + @Override + public List getNamesLike(String name) { + return analysisRepository.findAllByNameStartsWith(name).stream().map(FeAnalysisEntity::getName).collect(Collectors.toList()); + } + + @Override + public List exportConceptSets(FeAnalysisWithCriteriaEntity analysisEntity) { + + SourceInfo sourceInfo = new SourceInfo(vocabularyService.getPriorityVocabularySource()); + List conceptSets = analysisEntity.getConceptSets(); + return conceptSets.stream() + .map(cs -> vocabularyService.exportConceptSet(cs, sourceInfo)) + .collect(Collectors.toList()); + } + + @Override + public Optional findByDesignAndName(final FeAnalysisWithStringEntity withStringEntity, final String name) { + return this.findByDesignAndPredicate(withStringEntity.getDesign(), f -> Objects.equals(f.getName(), name)); + } + + @Override + public Optional findByCriteriaListAndCsAndDomainAndStat(List newCriteriaList, FeAnalysisWithCriteriaEntity newFeAnalysis) { + Map> feAnalysisEntityListMap = newCriteriaList.stream() + .map(c -> criteriaRepository.findAllByExpressionString(c.getExpressionString())) + .flatMap(List::stream).collect(Collectors.groupingBy(FeAnalysisCriteriaEntity::getFeatureAnalysis)); + return feAnalysisEntityListMap.entrySet().stream().filter(e -> { + FeAnalysisWithCriteriaEntity feAnalysis = e.getKey(); + return checkCriteriaList(e.getValue(), newCriteriaList) && + CollectionUtils.isEqualCollection(feAnalysis.getConceptSets(), newFeAnalysis.getConceptSets()) && + feAnalysis.getDomain().equals(newFeAnalysis.getDomain()) && + feAnalysis.getStatType().equals(newFeAnalysis.getStatType()); + }).findAny().map(Map.Entry::getKey); + } + + private boolean checkCriteriaList(List curCriteriaList, List newCriteriaList) { + List currentList = curCriteriaList.stream().map(FeAnalysisCriteriaEntity::getExpressionString).collect(Collectors.toList()); + List newList = newCriteriaList.stream().map(FeAnalysisCriteriaEntity::getExpressionString).collect(Collectors.toList()); + return CollectionUtils.isEqualCollection(currentList, newList); + } + + private Optional findByDesignAndPredicate(final String design, final Predicate f) { + List detailsFromDb = stringAnalysisRepository.findByDesign(design); + return detailsFromDb + .stream() + .filter(f) + .findFirst(); + } + + + private void checkEntityLocked(FeAnalysisEntity entity) { + if (entity.getLocked() == Boolean.TRUE) { + throw new IllegalArgumentException(String.format("Feature analysis %s is locked.", entity.getName())); + } + } + + @Override + public List findAggregates() { + + return aggregateRepository.findAll(); + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/converter/BaseFeAnalysisDTOToFeAnalysisConverter.java b/src/main/java/org/ohdsi/webapi/feanalysis/converter/BaseFeAnalysisDTOToFeAnalysisConverter.java new file mode 100644 index 0000000000..6063655470 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/converter/BaseFeAnalysisDTOToFeAnalysisConverter.java @@ -0,0 +1,26 @@ +package org.ohdsi.webapi.feanalysis.converter; + +import org.apache.commons.lang3.StringUtils; +import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisEntity; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisShortDTO; + +public abstract class BaseFeAnalysisDTOToFeAnalysisConverter + extends BaseConversionServiceAwareConverter { + + @Override + public T convert(D source) { + final T result = createResultObject(source); + + result.setId(source.getId()); + result.setDescr(source.getDescription()); + result.setDomain(source.getDomain()); + result.setName(StringUtils.trim(source.getName())); + result.setType(source.getType()); + result.setStatType(source.getStatType()); + result.setSupportsAnnual(source.getSupportsAnnual()); + result.setSupportsTemporal(source.getSupportsTemporal()); + + return result; + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/converter/BaseFeAnalysisEntityToFeAnalysisDTOConverter.java b/src/main/java/org/ohdsi/webapi/feanalysis/converter/BaseFeAnalysisEntityToFeAnalysisDTOConverter.java new file mode 100644 index 0000000000..65e9144031 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/converter/BaseFeAnalysisEntityToFeAnalysisDTOConverter.java @@ -0,0 +1,21 @@ +package org.ohdsi.webapi.feanalysis.converter; + +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisEntity; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisShortDTO; +import org.ohdsi.webapi.service.converters.BaseCommonEntityToDTOConverter; + +public abstract class BaseFeAnalysisEntityToFeAnalysisDTOConverter extends + BaseCommonEntityToDTOConverter, T> { + + @Override + public void doConvert(FeAnalysisEntity source, T target) { + target.setType(source.getType()); + target.setName(source.getName()); + target.setId(source.getId()); + target.setDomain(source.getDomain()); + target.setDescription(source.getDescr()); + target.setStatType(source.getStatType()); + target.setSupportsAnnual(source.getSupportsAnnual()); + target.setSupportsTemporal(source.getSupportsTemporal()); + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/converter/CcFeAnalysisEntityToFeAnalysisShortDTOConverter.java b/src/main/java/org/ohdsi/webapi/feanalysis/converter/CcFeAnalysisEntityToFeAnalysisShortDTOConverter.java new file mode 100644 index 0000000000..c43a985aeb --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/converter/CcFeAnalysisEntityToFeAnalysisShortDTOConverter.java @@ -0,0 +1,23 @@ +package org.ohdsi.webapi.feanalysis.converter; + +import org.ohdsi.webapi.cohortcharacterization.domain.CcFeAnalysisEntity; +import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisShortDTO; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.convert.support.GenericConversionService; +import org.springframework.stereotype.Component; + +@Component +public class CcFeAnalysisEntityToFeAnalysisShortDTOConverter extends BaseConversionServiceAwareConverter { + + @Autowired + private GenericConversionService conversionService; + + @Override + public FeAnalysisShortDTO convert(CcFeAnalysisEntity source) { + FeAnalysisShortDTO dto = conversionService.convert(source.getFeatureAnalysis(), FeAnalysisShortDTO.class); + dto.setIncludeAnnual(source.getIncludeAnnual()); + dto.setIncludeTemporal(source.getIncludeTemporal()); + return dto; + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/converter/CriteriaColumnListConverter.java b/src/main/java/org/ohdsi/webapi/feanalysis/converter/CriteriaColumnListConverter.java new file mode 100644 index 0000000000..b9ccd89c4e --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/converter/CriteriaColumnListConverter.java @@ -0,0 +1,38 @@ +package org.ohdsi.webapi.feanalysis.converter; + +import jakarta.persistence.AttributeConverter; +import jakarta.persistence.Converter; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; + +import org.ohdsi.circe.cohortdefinition.builders.CriteriaColumn; + +@Converter(autoApply = false) +public class CriteriaColumnListConverter implements AttributeConverter, String> { + + @Override + public String convertToDatabaseColumn(List attribute) { + if (attribute == null || attribute.isEmpty()) { + return null; // match old behavior (likely null, not empty string) + } + + return attribute.stream() + .map(Enum::name) + .collect(Collectors.joining(",")); + } + + @Override + public List convertToEntityAttribute(String dbData) { + if (dbData == null || dbData.isBlank()) { + return Collections.emptyList(); + } + + return Arrays.stream(dbData.split(",")) + .map(String::trim) + .map(CriteriaColumn::valueOf) + .collect(Collectors.toList()); + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisAggregateDTOToEntityConverter.java b/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisAggregateDTOToEntityConverter.java new file mode 100644 index 0000000000..c856fa7eda --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisAggregateDTOToEntityConverter.java @@ -0,0 +1,34 @@ +package org.ohdsi.webapi.feanalysis.converter; + +import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisAggregateEntity; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisAggregateDTO; +import org.ohdsi.webapi.feanalysis.repository.FeAnalysisAggregateRepository; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import java.util.Objects; + +@Component +public class FeAnalysisAggregateDTOToEntityConverter extends BaseConversionServiceAwareConverter { + + @Autowired + private FeAnalysisAggregateRepository aggregateRepository; + + @Override + protected FeAnalysisAggregateEntity createResultObject(FeAnalysisAggregateDTO feAnalysisAggregateDTO) { + + return new FeAnalysisAggregateEntity(); + } + + @Override + public FeAnalysisAggregateEntity convert(FeAnalysisAggregateDTO dto) { + + if (Objects.nonNull(dto.getId())) { + return aggregateRepository.getOne(dto.getId()); + } else { + return aggregateRepository.findDefault().orElse(null); + } + } + +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisDTOToFeAnalysisConverter.java b/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisDTOToFeAnalysisConverter.java new file mode 100644 index 0000000000..0b227488dc --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisDTOToFeAnalysisConverter.java @@ -0,0 +1,37 @@ +package org.ohdsi.webapi.feanalysis.converter; + +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisEntity; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisWithCriteriaEntity; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisWithStringEntity; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisDTO; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.convert.ConversionService; +import org.springframework.stereotype.Component; + +import java.util.Objects; + +import static org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisType.CRITERIA_SET; + +@Component +public class FeAnalysisDTOToFeAnalysisConverter extends BaseFeAnalysisDTOToFeAnalysisConverter { + + @Autowired + private ConversionService conversionService; + + @Override + public FeAnalysisEntity convert(final FeAnalysisDTO source) { + return super.convert(source); + } + + @Override + protected FeAnalysisEntity createResultObject(final FeAnalysisDTO source) { + if (Objects.equals(source.getType(), CRITERIA_SET)) { + return conversionService.convert(source, FeAnalysisWithCriteriaEntity.class); + } else { + return conversionService.convert(source, FeAnalysisWithStringEntity.class); + } + } + + +} + diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisDTOToFeAnalysisWithCriteriasConverter.java b/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisDTOToFeAnalysisWithCriteriasConverter.java new file mode 100644 index 0000000000..d0943e6b5b --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisDTOToFeAnalysisWithCriteriasConverter.java @@ -0,0 +1,152 @@ +package org.ohdsi.webapi.feanalysis.converter; + +import org.ohdsi.analysis.Utils; +import org.ohdsi.analysis.cohortcharacterization.design.CcResultType; +import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisType; +import org.ohdsi.webapi.feanalysis.domain.*; +import org.ohdsi.webapi.feanalysis.dto.*; +import org.springframework.core.convert.support.GenericConversionService; +import org.springframework.stereotype.Component; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +@Component +public class FeAnalysisDTOToFeAnalysisWithCriteriasConverter extends BaseFeAnalysisDTOToFeAnalysisConverter { + + private static final String RESULT_TYPE_IS_NOT_SUPPORTED = "Result type of %s is not supported"; + private static final String DTO_IS_NOT_SUPPORTED = "DTO class is not supported"; + + @Override + public FeAnalysisWithCriteriaEntity convert(final FeAnalysisDTO source) { + final FeAnalysisWithCriteriaEntity baseEntity = super.convert(source); + baseEntity.setStatType(source.getStatType()); + List list = getBuilder(source.getStatType()).buildList(source.getDesign()); + baseEntity.setDesign(list); + baseEntity.getDesign().forEach(c -> c.setFeatureAnalysis(baseEntity)); + if (Objects.equals(StandardFeatureAnalysisType.CRITERIA_SET, source.getType())){ + convert(baseEntity, (FeAnalysisWithConceptSetDTO) source); + } + return baseEntity; + } + + private void convert(FeAnalysisWithCriteriaEntity baseEntity, FeAnalysisWithConceptSetDTO source) { + FeAnalysisConcepsetEntity concepsetEntity = new FeAnalysisConcepsetEntity(); + concepsetEntity.setFeatureAnalysis(baseEntity); + concepsetEntity.setRawExpression(Utils.serialize(source.getConceptSets())); + baseEntity.setConceptSetEntity(concepsetEntity); + } + + @Override + protected FeAnalysisWithCriteriaEntity createResultObject(FeAnalysisDTO source) { + + return getBuilder(source.getStatType()).createFeAnalysisObject(); + } + + interface FeAnalysisBuilder { + FeAnalysisWithCriteriaEntity createFeAnalysisObject(); + List buildList(final Object design); + } + + FeAnalysisBuilder getBuilder(CcResultType statType) { + if (Objects.equals(CcResultType.PREVALENCE, statType)) { + return new FeAnalysisPrevalenceCriteriaBuilder(conversionService); + } else if (Objects.equals(CcResultType.DISTRIBUTION, statType)) { + return new FeAnalysisDistributionCriteriaBuilder(conversionService); + } + throw new IllegalArgumentException(String.format(RESULT_TYPE_IS_NOT_SUPPORTED, statType)); + } + + static abstract class FeAnalysisBuilderSupport implements FeAnalysisBuilder { + + private GenericConversionService conversionService; + + public FeAnalysisBuilderSupport(GenericConversionService conversionService) { + this.conversionService = conversionService; + } + + public List buildList(final Object design) { + List result = new ArrayList<>(); + if (!(design instanceof List)) { + throw new IllegalArgumentException("Design: " + design.toString() + " cannot be converted to Criteria List"); + } else { + for (final Object criteria : (List) design) { + if (!(criteria instanceof BaseFeAnalysisCriteriaDTO)) { + throw new IllegalArgumentException("Object " + criteria.toString() + " cannot be converted to Criteria"); + } else { + final BaseFeAnalysisCriteriaDTO typifiedCriteria = (BaseFeAnalysisCriteriaDTO) criteria; + final T criteriaEntity = newCriteriaEntity(typifiedCriteria); + criteriaEntity.setExpressionString(Utils.serialize(getExpression(typifiedCriteria))); + criteriaEntity.setId(typifiedCriteria.getId()); + criteriaEntity.setName(typifiedCriteria.getName()); + criteriaEntity.setAggregate(conversionService.convert(typifiedCriteria.getAggregate(), FeAnalysisAggregateEntity.class)); + result.add(criteriaEntity); + } + } + } + return result; + } + + protected abstract Object getExpression(BaseFeAnalysisCriteriaDTO typifiedCriteria); + + protected abstract T newCriteriaEntity(BaseFeAnalysisCriteriaDTO typifiedCriteria); + } + + static class FeAnalysisPrevalenceCriteriaBuilder extends FeAnalysisBuilderSupport{ + + public FeAnalysisPrevalenceCriteriaBuilder(GenericConversionService conversionService) { + super(conversionService); + } + + @Override + public FeAnalysisWithCriteriaEntity createFeAnalysisObject() { + return new FeAnalysisWithPrevalenceCriteriaEntity(); + } + + @Override + protected Object getExpression(BaseFeAnalysisCriteriaDTO typifiedCriteria) { + if (typifiedCriteria instanceof FeAnalysisCriteriaDTO) { + return ((FeAnalysisCriteriaDTO)typifiedCriteria).getExpression(); + } + return null; + } + + @Override + protected FeAnalysisCriteriaGroupEntity newCriteriaEntity(BaseFeAnalysisCriteriaDTO criteriaDTO) { + return new FeAnalysisCriteriaGroupEntity(); + } + } + + static class FeAnalysisDistributionCriteriaBuilder extends FeAnalysisBuilderSupport { + + public FeAnalysisDistributionCriteriaBuilder(GenericConversionService conversionService) { + super(conversionService); + } + + @Override + public FeAnalysisWithCriteriaEntity createFeAnalysisObject() { + return new FeAnalysisWithDistributionCriteriaEntity(); + } + + @Override + protected Object getExpression(BaseFeAnalysisCriteriaDTO typifiedCriteria) { + if (typifiedCriteria instanceof FeAnalysisWindowedCriteriaDTO) { + return ((FeAnalysisWindowedCriteriaDTO)typifiedCriteria).getExpression(); + } else if (typifiedCriteria instanceof FeAnalysisDemographicCriteriaDTO) { + return ((FeAnalysisDemographicCriteriaDTO)typifiedCriteria).getExpression(); + } + throw new IllegalArgumentException(DTO_IS_NOT_SUPPORTED); + } + + @Override + protected FeAnalysisDistributionCriteriaEntity newCriteriaEntity(BaseFeAnalysisCriteriaDTO criteriaDTO) { + if (criteriaDTO instanceof FeAnalysisWindowedCriteriaDTO) { + return new FeAnalysisWindowedCriteriaEntity(); + } else if (criteriaDTO instanceof FeAnalysisDemographicCriteriaDTO) { + return new FeAnalysisDemographicCriteriaEntity(); + } + throw new IllegalArgumentException(DTO_IS_NOT_SUPPORTED); + } + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisDTOToFeAnalysisWithStringConverter.java b/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisDTOToFeAnalysisWithStringConverter.java new file mode 100644 index 0000000000..7589893525 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisDTOToFeAnalysisWithStringConverter.java @@ -0,0 +1,24 @@ +package org.ohdsi.webapi.feanalysis.converter; + +import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisType; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisDTO; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisWithStringEntity; +import org.springframework.stereotype.Component; + +@Component +public class FeAnalysisDTOToFeAnalysisWithStringConverter extends BaseFeAnalysisDTOToFeAnalysisConverter { + @Override + public FeAnalysisWithStringEntity convert(final FeAnalysisDTO source) { + if (source.getType() != StandardFeatureAnalysisType.CUSTOM_FE && source.getType() != StandardFeatureAnalysisType.PRESET) { + throw new IllegalArgumentException("Only PRESET and CUSTOME_FE analyses can have design of String type"); + } + final FeAnalysisWithStringEntity baseEntity = super.convert(source); + baseEntity.setDesign(String.valueOf(source.getDesign())); + return baseEntity; + } + + @Override + protected FeAnalysisWithStringEntity createResultObject() { + return new FeAnalysisWithStringEntity(); + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisEntityToFeAnalysisDTOConverter.java b/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisEntityToFeAnalysisDTOConverter.java new file mode 100644 index 0000000000..0291459ae1 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisEntityToFeAnalysisDTOConverter.java @@ -0,0 +1,87 @@ +package org.ohdsi.webapi.feanalysis.converter; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.collections4.CollectionUtils; +import org.ohdsi.webapi.feanalysis.domain.*; +import org.ohdsi.webapi.feanalysis.dto.*; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; +import org.hibernate.Hibernate; + +import java.util.Collections; +import java.util.Optional; +import java.util.stream.Collectors; + +import static org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisType.CRITERIA_SET; + +@Component +public class FeAnalysisEntityToFeAnalysisDTOConverter extends BaseFeAnalysisEntityToFeAnalysisDTOConverter { + + @Autowired + private ObjectMapper objectMapper; + + @Override + public FeAnalysisDTO convert(final FeAnalysisEntity source) { + final FeAnalysisDTO dto = super.convert(source); + dto.setDesign(convertDesignToJson(source)); + dto.setSupportsAnnual(source.getSupportsAnnual()); + dto.setSupportsTemporal(source.getSupportsTemporal()); + if (CRITERIA_SET.equals(source.getType())){ + FeAnalysisWithConceptSetDTO dtoWithConceptSet = (FeAnalysisWithConceptSetDTO) dto; + FeAnalysisWithCriteriaEntity sourceWithCriteria = (FeAnalysisWithCriteriaEntity) source; + dtoWithConceptSet.setConceptSets(sourceWithCriteria.getConceptSets()); + } + return dto; + } + + @Override + protected FeAnalysisDTO createResultObject(FeAnalysisEntity feAnalysisEntity) { + return Optional.ofNullable(feAnalysisEntity.getType()).map(type -> { + switch (type) { + case CRITERIA_SET: + return new FeAnalysisWithConceptSetDTO(); + default: + return new FeAnalysisDTO(); + } + }).orElseGet(() -> new FeAnalysisDTO()); + } + + private Object convertDesignToJson(final FeAnalysisEntity source) { + return Optional.ofNullable(source.getType()).map(type -> { + switch (type) { + case CRITERIA_SET: + FeAnalysisWithCriteriaEntity sourceWithCriteria = (FeAnalysisWithCriteriaEntity) source; + Hibernate.initialize(sourceWithCriteria.getDesign()); // Explicitly initialize the collection + if (CollectionUtils.isEmpty(sourceWithCriteria.getDesign())) { + return Collections.emptyList(); + } + return sourceWithCriteria.getDesign() + .stream() + .map(this::convertCriteria) + .map(c -> (JsonNode) objectMapper.valueToTree(c)) + .collect(Collectors.toList()); + default: + return source.getDesign(); + } + }).orElseGet(() -> source.getDesign()); + } + + private BaseFeAnalysisCriteriaDTO convertCriteria(FeAnalysisCriteriaEntity criteriaEntity){ + BaseFeAnalysisCriteriaDTO criteriaDTO; + if (criteriaEntity instanceof FeAnalysisCriteriaGroupEntity) { + FeAnalysisCriteriaGroupEntity groupEntity = (FeAnalysisCriteriaGroupEntity) criteriaEntity; + criteriaDTO = new FeAnalysisCriteriaDTO(groupEntity.getId(), groupEntity.getName(), groupEntity.getExpression()); + } else if (criteriaEntity instanceof FeAnalysisWindowedCriteriaEntity) { + FeAnalysisWindowedCriteriaEntity w = (FeAnalysisWindowedCriteriaEntity) criteriaEntity; + criteriaDTO = new FeAnalysisWindowedCriteriaDTO(w.getId(), w.getName(), w.getExpression()); + } else if (criteriaEntity instanceof FeAnalysisDemographicCriteriaEntity) { + FeAnalysisDemographicCriteriaEntity d = (FeAnalysisDemographicCriteriaEntity) criteriaEntity; + criteriaDTO = new FeAnalysisDemographicCriteriaDTO(d.getId(), d.getName(), d.getExpression()); + } else { + throw new IllegalArgumentException(String.format("Cannot convert criteria entity, %s is not supported", criteriaEntity)); + } + criteriaDTO.setAggregate(conversionService.convert(criteriaEntity.getAggregate(), FeAnalysisAggregateDTO.class)); + return criteriaDTO; + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisEntityToFeAnalysisShortDTOConverter.java b/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisEntityToFeAnalysisShortDTOConverter.java new file mode 100644 index 0000000000..8ec86b21e2 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisEntityToFeAnalysisShortDTOConverter.java @@ -0,0 +1,13 @@ +package org.ohdsi.webapi.feanalysis.converter; + +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisShortDTO; +import org.springframework.stereotype.Component; + +@Component +public class FeAnalysisEntityToFeAnalysisShortDTOConverter extends BaseFeAnalysisEntityToFeAnalysisDTOConverter { + + @Override + protected FeAnalysisShortDTO createResultObject() { + return new FeAnalysisShortDTO(); + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisShortDTOToFeaAnalysisConverter.java b/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisShortDTOToFeaAnalysisConverter.java new file mode 100644 index 0000000000..eb9cb58d6c --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeAnalysisShortDTOToFeaAnalysisConverter.java @@ -0,0 +1,20 @@ +package org.ohdsi.webapi.feanalysis.converter; + +import org.ohdsi.analysis.cohortcharacterization.design.CcResultType; +import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisType; +import org.ohdsi.webapi.feanalysis.domain.*; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisShortDTO; +import org.springframework.stereotype.Component; + +import java.util.Objects; + +@Component +public class FeAnalysisShortDTOToFeaAnalysisConverter extends BaseFeAnalysisDTOToFeAnalysisConverter { + + @Override + protected FeAnalysisEntity createResultObject(FeAnalysisShortDTO dto) { + return Objects.equals(dto.getType(), StandardFeatureAnalysisType.CRITERIA_SET) ? + Objects.equals(dto.getStatType(), CcResultType.PREVALENCE) ? new FeAnalysisWithPrevalenceCriteriaEntity() : new FeAnalysisWithDistributionCriteriaEntity() + : new FeAnalysisWithStringEntity(); + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeatureAnalysisAggregateToDTOConverter.java b/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeatureAnalysisAggregateToDTOConverter.java new file mode 100644 index 0000000000..e0f1bfffdf --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/converter/FeatureAnalysisAggregateToDTOConverter.java @@ -0,0 +1,39 @@ +package org.ohdsi.webapi.feanalysis.converter; + + +import org.ohdsi.analysis.cohortcharacterization.design.FeatureAnalysisAggregate; +import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisDomain; +import org.ohdsi.webapi.arachne.commons.converter.BaseConvertionServiceAwareConverter; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisAggregateEntity; +import org.ohdsi.webapi.feanalysis.dto.FeAnalysisAggregateDTO; +import org.springframework.stereotype.Component; + +@Component +public class FeatureAnalysisAggregateToDTOConverter extends BaseConvertionServiceAwareConverter { + + @Override + protected FeAnalysisAggregateDTO createResultObject(FeatureAnalysisAggregate featureAnalysisAggregate) { + + return new FeAnalysisAggregateDTO(); + } + + @Override + protected void convert(FeatureAnalysisAggregate source, FeAnalysisAggregateDTO dto) { + + if (source instanceof FeAnalysisAggregateEntity) { + dto.setId(source.getId()); + } + dto.setDomain((StandardFeatureAnalysisDomain) source.getDomain()); + dto.setName(source.getName()); + dto.setExpression(source.getExpression()); + dto.setFunction(source.getFunction()); + dto.setJoinTable(source.getJoinTable()); + dto.setJoinType(source.getJoinType()); + dto.setJoinCondition(source.getJoinCondition()); + dto.setAdditionalColumns(source.getAdditionalColumns()); + if (source instanceof FeAnalysisAggregateEntity) { + dto.setDefault(((FeAnalysisAggregateEntity) source).isDefault()); + } + dto.setMissingMeansZero(source.isMissingMeansZero()); + } +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisAggregateEntity.java b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisAggregateEntity.java new file mode 100644 index 0000000000..129b9af9a2 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisAggregateEntity.java @@ -0,0 +1,173 @@ +package org.ohdsi.webapi.feanalysis.domain; + +import org.apache.commons.lang3.StringUtils; +import org.hibernate.annotations.GenericGenerator; +import org.hibernate.annotations.Parameter; +import org.ohdsi.analysis.TableJoin; +import org.ohdsi.analysis.WithId; +import org.ohdsi.analysis.cohortcharacterization.design.AggregateFunction; +import org.ohdsi.analysis.cohortcharacterization.design.FeatureAnalysisAggregate; +import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisDomain; +import org.ohdsi.circe.cohortdefinition.builders.CriteriaColumn; +import org.ohdsi.webapi.feanalysis.converter.CriteriaColumnListConverter; + +import jakarta.persistence.*; +import java.util.List; + +@Entity +@Table(name = "fe_analysis_aggregate") +public class FeAnalysisAggregateEntity implements FeatureAnalysisAggregate, WithId { + + @Id + @GenericGenerator( + name = "fe_aggregate_generator", + strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator", + parameters = { + @Parameter(name = "sequence_name", value = "fe_aggregate_sequence"), + @Parameter(name = "increment_size", value = "1") + } + ) + @GeneratedValue(generator = "fe_aggregate_generator") + private Integer id; + + @Column + private String name; + + @Column + @Enumerated(value = EnumType.STRING) + private StandardFeatureAnalysisDomain domain; + + @Column(name = "agg_function") + @Enumerated(value = EnumType.STRING) + private AggregateFunction function; + + @Column + private String expression; + + @Column(name = "join_table") + private String joinTable; + + @Column(name = "join_type") + @Enumerated(EnumType.STRING) + private TableJoin joinType; + + @Column(name = "join_condition") + private String joinCondition; + + @Column(name = "is_default") + private boolean isDefault; + + @Column(name = "missing_means_zero") + private boolean isMissingMeansZero; + + @Column(name = "criteria_columns") + @Convert(converter = CriteriaColumnListConverter.class) + private List columns; + + @Override + public Integer getId() { + + return id; + } + + public void setId(Integer id) { + + this.id = id; + } + + public String getName() { + + return name; + } + + public void setName(String name) { + + this.name = name; + } + + public StandardFeatureAnalysisDomain getDomain() { + + return domain; + } + + public void setDomain(StandardFeatureAnalysisDomain domain) { + + this.domain = domain; + } + + public AggregateFunction getFunction() { + + return function; + } + + @Override + public List getAdditionalColumns() { + + return columns; + } + + public void setCriteriaColumns(List columns) { + this.columns = columns; + } + + public void setFunction(AggregateFunction function) { + + this.function = function; + } + + public String getExpression() { + + return expression; + } + + @Override + public boolean hasQuery() { + + return StringUtils.isNotBlank(this.joinTable); + } + + public void setExpression(String expression) { + + this.expression = expression; + } + + public String getJoinTable() { + return joinTable; + } + + public void setJoinTable(String joinTable) { + this.joinTable = joinTable; + } + + public TableJoin getJoinType() { + return joinType; + } + + public void setJoinType(TableJoin joinType) { + this.joinType = joinType; + } + + public String getJoinCondition() { + return joinCondition; + } + + public void setJoinCondition(String joinCondition) { + this.joinCondition = joinCondition; + } + + public boolean isDefault() { + return isDefault; + } + + public void setDefault(boolean aDefault) { + isDefault = aDefault; + } + + public boolean isMissingMeansZero() { + return isMissingMeansZero; + } + + public void setMissingMeansZero(boolean aDefault) { + isMissingMeansZero = aDefault; + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisConcepsetEntity.java b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisConcepsetEntity.java new file mode 100644 index 0000000000..ab07e2ffe2 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisConcepsetEntity.java @@ -0,0 +1,51 @@ +package org.ohdsi.webapi.feanalysis.domain; + +import jakarta.persistence.Entity; +import jakarta.persistence.FetchType; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.OneToOne; +import jakarta.persistence.Table; +import org.hibernate.annotations.GenericGenerator; +import org.hibernate.annotations.Parameter; +import org.ohdsi.webapi.common.CommonConceptSetEntity; + +@Entity +@Table(name = "fe_analysis_conceptset") +public class FeAnalysisConcepsetEntity extends CommonConceptSetEntity { + @Id + @GenericGenerator( + name = "fe_conceptset_generator", + strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator", + parameters = { + @Parameter(name = "sequence_name", value = "fe_conceptset_sequence"), + @Parameter(name = "increment_size", value = "1") + } + ) + @GeneratedValue(generator = "fe_conceptset_generator") + private Long id; + + @OneToOne(fetch = FetchType.LAZY) + @JoinColumn(name = "fe_analysis_id") + private FeAnalysisWithCriteriaEntity featureAnalysis; + + public FeAnalysisConcepsetEntity() { + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public FeAnalysisWithCriteriaEntity getFeatureAnalysis() { + return featureAnalysis; + } + + public void setFeatureAnalysis(FeAnalysisWithCriteriaEntity featureAnalysis) { + this.featureAnalysis = featureAnalysis; + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisCriteriaEntity.java b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisCriteriaEntity.java new file mode 100644 index 0000000000..dbaab3b7b8 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisCriteriaEntity.java @@ -0,0 +1,97 @@ +package org.ohdsi.webapi.feanalysis.domain; + +import jakarta.persistence.Column; +import jakarta.persistence.DiscriminatorColumn; +import jakarta.persistence.Entity; +import jakarta.persistence.FetchType; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.Id; +import jakarta.persistence.Inheritance; +import jakarta.persistence.InheritanceType; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.Lob; +import jakarta.persistence.ManyToOne; +import jakarta.persistence.Table; +import org.hibernate.annotations.DiscriminatorOptions; +import org.hibernate.annotations.GenericGenerator; +import org.hibernate.annotations.JdbcTypeCode; +import org.hibernate.annotations.Parameter; +import org.hibernate.type.SqlTypes; +import org.ohdsi.analysis.WithId; + +@Entity +@Table(name = "fe_analysis_criteria") +@Inheritance(strategy = InheritanceType.SINGLE_TABLE) +@DiscriminatorColumn(name = "criteria_type") +@DiscriminatorOptions(force = false) +public abstract class FeAnalysisCriteriaEntity implements WithId { + + @Id + @GenericGenerator( + name = "fe_analysis_criteria_generator", + strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator", + parameters = { + @Parameter(name = "sequence_name", value = "fe_analysis_criteria_sequence"), + @Parameter(name = "increment_size", value = "1") + } + ) + @GeneratedValue(generator = "fe_analysis_criteria_generator") + private Long id; + + @Column + private String name; + + @Lob + @Column(name = "expression") + @JdbcTypeCode(SqlTypes.VARCHAR) + private String expressionString; + + @ManyToOne(fetch = FetchType.EAGER) + @JoinColumn(name = "fe_aggregate_id") + private FeAnalysisAggregateEntity aggregate; + + @ManyToOne(optional = false, targetEntity = FeAnalysisWithCriteriaEntity.class, fetch = FetchType.LAZY) + @JoinColumn(name = "fe_analysis_id") + private FeAnalysisWithCriteriaEntity featureAnalysis; + + public String getName() { + return name; + } + + public void setName(final String name) { + this.name = name; + } + + @Override + public Long getId() { + return id; + } + + public void setId(final Long id) { + this.id = id; + } + + public FeAnalysisWithCriteriaEntity getFeatureAnalysis() { + return featureAnalysis; + } + + public void setFeatureAnalysis(final FeAnalysisWithCriteriaEntity featureAnalysis) { + this.featureAnalysis = featureAnalysis; + } + + public String getExpressionString() { + return expressionString; + } + + public void setExpressionString(final String expressionString) { + this.expressionString = expressionString; + } + + public FeAnalysisAggregateEntity getAggregate() { + return aggregate; + } + + public void setAggregate(FeAnalysisAggregateEntity aggregate) { + this.aggregate = aggregate; + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisCriteriaGroupEntity.java b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisCriteriaGroupEntity.java new file mode 100644 index 0000000000..82cc008139 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisCriteriaGroupEntity.java @@ -0,0 +1,23 @@ +package org.ohdsi.webapi.feanalysis.domain; + +import org.ohdsi.analysis.Utils; +import org.ohdsi.analysis.cohortcharacterization.design.CriteriaFeature; +import org.ohdsi.circe.cohortdefinition.CriteriaGroup; + +import jakarta.persistence.DiscriminatorValue; +import jakarta.persistence.Entity; + +@Entity +@DiscriminatorValue("CRITERIA_GROUP") +public class FeAnalysisCriteriaGroupEntity extends FeAnalysisCriteriaEntity implements CriteriaFeature { + + @Override + public CriteriaGroup getExpression() { + return getCriteriaGroup(); + } + + private CriteriaGroup getCriteriaGroup() { + return Utils.deserialize(this.getExpressionString(), CriteriaGroup.class); + } + +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisDemographicCriteriaEntity.java b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisDemographicCriteriaEntity.java new file mode 100644 index 0000000000..3ad9d20209 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisDemographicCriteriaEntity.java @@ -0,0 +1,19 @@ +package org.ohdsi.webapi.feanalysis.domain; + +import org.ohdsi.analysis.Utils; +import org.ohdsi.analysis.cohortcharacterization.design.DemographicCriteriaFeature; +import org.ohdsi.circe.cohortdefinition.DemographicCriteria; + +import jakarta.persistence.DiscriminatorValue; +import jakarta.persistence.Entity; + +@Entity +@DiscriminatorValue("DEMOGRAPHIC_CRITERIA") +public class FeAnalysisDemographicCriteriaEntity extends FeAnalysisDistributionCriteriaEntity implements DemographicCriteriaFeature { + + @Override + public DemographicCriteria getExpression() { + + return Utils.deserialize(this.getExpressionString(), DemographicCriteria.class); + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisDistributionCriteriaEntity.java b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisDistributionCriteriaEntity.java new file mode 100644 index 0000000000..0951a9c834 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisDistributionCriteriaEntity.java @@ -0,0 +1,8 @@ +package org.ohdsi.webapi.feanalysis.domain; + +import jakarta.persistence.Entity; + +@Entity +public abstract class FeAnalysisDistributionCriteriaEntity extends FeAnalysisCriteriaEntity { + public abstract T getExpression(); +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisEntity.java b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisEntity.java new file mode 100644 index 0000000000..025058840e --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisEntity.java @@ -0,0 +1,240 @@ +package org.ohdsi.webapi.feanalysis.domain; + +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.FetchType; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.Id; +import jakarta.persistence.Inheritance; +import jakarta.persistence.InheritanceType; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.JoinTable; +import jakarta.persistence.Lob; +import jakarta.persistence.ManyToMany; +import jakarta.persistence.Table; + +import org.apache.commons.lang3.ObjectUtils; +import org.hibernate.annotations.DiscriminatorFormula; +import org.hibernate.annotations.GenericGenerator; +import org.hibernate.annotations.JdbcTypeCode; +import org.hibernate.annotations.Parameter; +import org.hibernate.annotations.Type; +import org.hibernate.type.SqlTypes; +import org.ohdsi.analysis.cohortcharacterization.design.CcResultType; +import org.ohdsi.analysis.cohortcharacterization.design.FeatureAnalysis; +import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisDomain; +import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisType; +import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity; +import org.ohdsi.webapi.model.CommonEntity; + +@Entity +@Table(name = "fe_analysis") +@Inheritance(strategy = InheritanceType.SINGLE_TABLE) +@DiscriminatorFormula( + "CASE WHEN type = 'CRITERIA_SET' THEN CONCAT(CONCAT(type,'_'),stat_type) " + + "ELSE type END" +) +public abstract class FeAnalysisEntity extends CommonEntity implements FeatureAnalysis, Comparable> { + + public FeAnalysisEntity() { + } + + public FeAnalysisEntity(final FeAnalysisEntity entityForCopy) { + this.id = entityForCopy.id; + this.type = entityForCopy.type; + this.name = entityForCopy.name; + this.setDesign(entityForCopy.getDesign()); + this.domain = entityForCopy.domain; + this.descr = entityForCopy.descr; + this.isLocked = entityForCopy.isLocked; + this.statType = entityForCopy.statType; + } + + @Id + @GenericGenerator( + name = "fe_analysis_generator", + strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator", + parameters = { + @Parameter(name = "sequence_name", value = "fe_analysis_sequence"), + @Parameter(name = "increment_size", value = "1") + } + ) + @GeneratedValue(generator = "fe_analysis_generator") + private Integer id; + + @Column + @Enumerated(EnumType.STRING) + private StandardFeatureAnalysisType type; + + @Column + private String name; + + @Lob + @JdbcTypeCode(SqlTypes.VARCHAR) + @Column(name = "design", insertable = false, updatable = false) + private String rawDesign; + + @Column + @Enumerated(EnumType.STRING) + private StandardFeatureAnalysisDomain domain; + + @Column + private String descr; + + @Column(name = "is_locked") + private Boolean isLocked; + + @ManyToMany(targetEntity = CohortCharacterizationEntity.class, fetch = FetchType.LAZY) + @JoinTable(name = "cc_analysis", + joinColumns = @JoinColumn(name = "fe_analysis_id"), + inverseJoinColumns = @JoinColumn(name = "cohort_characterization_id")) + private Set cohortCharacterizations = new HashSet<>(); + + @Column(name = "stat_type") + @Enumerated(value = EnumType.STRING) + private CcResultType statType; + + @Column(name = "supports_annual", updatable = false, insertable = false) + private Boolean supportsAnnual; + + @Column(name = "supports_temporal", updatable = false, insertable = false) + private Boolean supportsTemporal; + + @Override + public Integer getId() { + return id; + } + + @Override + public StandardFeatureAnalysisType getType() { + return type; + } + + @Override + public String getName() { + return name; + } + + @Override + public StandardFeatureAnalysisDomain getDomain() { + return domain; + } + + @Override + public String getDescr() { + return descr; + } + + @Override + public abstract T getDesign(); + + public abstract void setDesign(T design); + + public boolean isPreset() { + return this.type == StandardFeatureAnalysisType.PRESET; + } + + public boolean isCustom() { + return this.type == StandardFeatureAnalysisType.CUSTOM_FE; + } + + public boolean isCriteria() { + return this.type == StandardFeatureAnalysisType.CRITERIA_SET; + } + + public void setId(final Integer id) { + this.id = id; + } + + public void setType(final StandardFeatureAnalysisType type) { + this.type = type; + } + + public void setName(final String name) { + this.name = name; + } + + public void setDomain(final StandardFeatureAnalysisDomain domain) { + this.domain = domain; + } + + public void setDescr(final String descr) { + this.descr = descr; + } + + public String getRawDesign() { + return rawDesign; + } + + @Override + public boolean equals(final Object o) { + if (this == o) return true; + if (!(o instanceof FeAnalysisEntity)) return false; + final FeAnalysisEntity that = (FeAnalysisEntity) o; + if (getId() != null && that.getId() != null) { + return Objects.equals(getId(), that.getId()); + } else { + return Objects.equals(getType(), that.getType()) && Objects.equals(getDesign(), that.getDesign()); + } + } + + @Override + public int hashCode() { + return Objects.hash(getId()); + } + + public Boolean getLocked() { + return isLocked; + } + + public void setLocked(final Boolean locked) { + isLocked = locked; + } + + public Set getCohortCharacterizations() { + + return cohortCharacterizations; + } + + public void setCohortCharacterizations(final Set cohortCharacterizations) { + + this.cohortCharacterizations = cohortCharacterizations; + } + + @Override + public int compareTo(final FeAnalysisEntity o) { + return ObjectUtils.compare(this.name, o.name); + } + + public CcResultType getStatType() { + + return statType; + } + + public void setStatType(final CcResultType statType) { + + this.statType = statType; + } + + public Boolean getSupportsAnnual() { + return supportsAnnual; + } + + public void setSupportsAnnual(Boolean supportsAnnual) { + this.supportsAnnual = supportsAnnual; + } + + public Boolean getSupportsTemporal() { + return supportsTemporal; + } + + public void setSupportsTemporal(Boolean supportsTemporal) { + this.supportsTemporal = supportsTemporal; + } +} + diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisWindowedCriteriaEntity.java b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisWindowedCriteriaEntity.java new file mode 100644 index 0000000000..71fafce6ce --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisWindowedCriteriaEntity.java @@ -0,0 +1,20 @@ +package org.ohdsi.webapi.feanalysis.domain; + +import org.ohdsi.analysis.Utils; +import org.ohdsi.analysis.cohortcharacterization.design.WindowedCriteriaFeature; +import org.ohdsi.circe.cohortdefinition.WindowedCriteria; + +import jakarta.persistence.DiscriminatorValue; +import jakarta.persistence.Entity; + +@Entity +@DiscriminatorValue("WINDOWED_CRITERIA") +public class FeAnalysisWindowedCriteriaEntity extends FeAnalysisDistributionCriteriaEntity implements WindowedCriteriaFeature { + + @Override + public WindowedCriteria getExpression() { + + return Utils.deserialize(this.getExpressionString(), WindowedCriteria.class); + } + +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisWithCriteriaEntity.java b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisWithCriteriaEntity.java new file mode 100644 index 0000000000..e744a34dc4 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisWithCriteriaEntity.java @@ -0,0 +1,51 @@ +package org.ohdsi.webapi.feanalysis.domain; + +import org.ohdsi.analysis.cohortcharacterization.design.FeatureAnalysisWithCriteria; +import org.ohdsi.circe.cohortdefinition.ConceptSet; + +import jakarta.persistence.*; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +@Entity +public abstract class FeAnalysisWithCriteriaEntity extends FeAnalysisEntity> implements FeatureAnalysisWithCriteria { + + @OneToMany(targetEntity = FeAnalysisCriteriaEntity.class, fetch = FetchType.EAGER, mappedBy = "featureAnalysis", + cascade = {CascadeType.MERGE, CascadeType.REMOVE, CascadeType.REFRESH, CascadeType.DETACH}) + private List design; + + @OneToOne(fetch = FetchType.EAGER, mappedBy = "featureAnalysis", cascade = CascadeType.ALL) + private FeAnalysisConcepsetEntity conceptSetEntity; + + public FeAnalysisWithCriteriaEntity() { + super(); + } + + public FeAnalysisWithCriteriaEntity(final FeAnalysisWithCriteriaEntity analysis) { + super(analysis); + } + + @Override + public List getDesign() { + return design; + } + + @Override + public void setDesign(List design) { + this.design = design; + } + + public FeAnalysisConcepsetEntity getConceptSetEntity() { + return conceptSetEntity; + } + + public void setConceptSetEntity(FeAnalysisConcepsetEntity conceptSetEntity) { + this.conceptSetEntity = conceptSetEntity; + } + + public List getConceptSets() { + + return Objects.nonNull(this.conceptSetEntity) ? this.conceptSetEntity.getConceptSets() : Collections.emptyList(); + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisWithDistributionCriteriaEntity.java b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisWithDistributionCriteriaEntity.java new file mode 100644 index 0000000000..2912fe89df --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisWithDistributionCriteriaEntity.java @@ -0,0 +1,16 @@ +package org.ohdsi.webapi.feanalysis.domain; + +import jakarta.persistence.DiscriminatorValue; +import jakarta.persistence.Entity; + +@Entity +@DiscriminatorValue("CRITERIA_SET_DISTRIBUTION") +public class FeAnalysisWithDistributionCriteriaEntity extends FeAnalysisWithCriteriaEntity { + + public FeAnalysisWithDistributionCriteriaEntity() { + } + + public FeAnalysisWithDistributionCriteriaEntity(FeAnalysisWithCriteriaEntity analysis) { + super(analysis); + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisWithPrevalenceCriteriaEntity.java b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisWithPrevalenceCriteriaEntity.java new file mode 100644 index 0000000000..3d25fead1d --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisWithPrevalenceCriteriaEntity.java @@ -0,0 +1,16 @@ +package org.ohdsi.webapi.feanalysis.domain; + +import jakarta.persistence.DiscriminatorValue; +import jakarta.persistence.Entity; + +@Entity +@DiscriminatorValue("CRITERIA_SET_PREVALENCE") +public class FeAnalysisWithPrevalenceCriteriaEntity extends FeAnalysisWithCriteriaEntity { + + public FeAnalysisWithPrevalenceCriteriaEntity() { + } + + public FeAnalysisWithPrevalenceCriteriaEntity(FeAnalysisWithCriteriaEntity analysis) { + super(analysis); + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisWithStringEntity.java b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisWithStringEntity.java new file mode 100644 index 0000000000..043563eb7e --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/domain/FeAnalysisWithStringEntity.java @@ -0,0 +1,35 @@ +package org.ohdsi.webapi.feanalysis.domain; + +import jakarta.persistence.DiscriminatorValue; +import jakarta.persistence.Entity; +import jakarta.persistence.Lob; + +import org.hibernate.annotations.JdbcTypeCode; +import org.hibernate.type.SqlTypes; + +@Entity +@DiscriminatorValue("not null") +public class FeAnalysisWithStringEntity extends FeAnalysisEntity { + public FeAnalysisWithStringEntity() { + super(); + } + + public FeAnalysisWithStringEntity(final FeAnalysisWithStringEntity analysis) { + super(analysis); + } + + @Lob + @JdbcTypeCode(SqlTypes.VARCHAR) + private String design; + + @Override + public String getDesign() { + + return design; + } + + public void setDesign(final String design) { + + this.design = design; + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/dto/BaseFeAnalysisCriteriaDTO.java b/src/main/java/org/ohdsi/webapi/feanalysis/dto/BaseFeAnalysisCriteriaDTO.java new file mode 100644 index 0000000000..5afe6e06c7 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/dto/BaseFeAnalysisCriteriaDTO.java @@ -0,0 +1,53 @@ +package org.ohdsi.webapi.feanalysis.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeInfo; + +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "criteriaType", defaultImpl = FeAnalysisCriteriaDTO.class) +@JsonSubTypes({ + @JsonSubTypes.Type(value = FeAnalysisCriteriaDTO.class, name = "CriteriaGroup"), + @JsonSubTypes.Type(value = FeAnalysisWindowedCriteriaDTO.class, name = "WindowedCriteria"), + @JsonSubTypes.Type(value = FeAnalysisDemographicCriteriaDTO.class, name = "DemographicCriteria") +}) +public abstract class BaseFeAnalysisCriteriaDTO { + @JsonProperty("id") + private Long id; + @JsonProperty("name") + private String name; + + @JsonProperty("aggregate") + private FeAnalysisAggregateDTO aggregate; + + public BaseFeAnalysisCriteriaDTO() { + } + + public BaseFeAnalysisCriteriaDTO(Long id, String name) { + this.id = id; + this.name = name; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(final String name) { + this.name = name; + } + + public FeAnalysisAggregateDTO getAggregate() { + return aggregate; + } + + public void setAggregate(FeAnalysisAggregateDTO aggregate) { + this.aggregate = aggregate; + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisAggregateDTO.java b/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisAggregateDTO.java new file mode 100644 index 0000000000..f7dccd3c6a --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisAggregateDTO.java @@ -0,0 +1,133 @@ +package org.ohdsi.webapi.feanalysis.dto; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; +import org.ohdsi.analysis.TableJoin; +import org.ohdsi.analysis.cohortcharacterization.design.AggregateFunction; +import org.ohdsi.analysis.cohortcharacterization.design.FeatureAnalysisAggregate; +import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisDomain; +import org.ohdsi.circe.cohortdefinition.builders.CriteriaColumn; + +public class FeAnalysisAggregateDTO implements FeatureAnalysisAggregate { + + @JsonProperty("id") + private Integer id; + @JsonProperty("name") + private String name; + @JsonProperty("domain") + private StandardFeatureAnalysisDomain domain; + @JsonProperty("function") + private AggregateFunction function; + @JsonProperty("expression") + private String expression; + @JsonProperty("joinTable") + private String joinTable; + @JsonProperty("joinType") + private TableJoin joinType; + @JsonProperty("joinCondition") + private String joinCondition; + @JsonProperty("isDefault") + private boolean isDefault; + @JsonProperty("missingMeansZero") + private boolean missingMeansZero; + @JsonProperty("additionalColumns") + private List columns; + + @Override + public List getAdditionalColumns() { + + return columns; + } + + public void setAdditionalColumns(List columns) { + this.columns = columns; + } + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public StandardFeatureAnalysisDomain getDomain() { + return domain; + } + + public void setDomain(StandardFeatureAnalysisDomain domain) { + this.domain = domain; + } + + public AggregateFunction getFunction() { + return function; + } + + public void setFunction(AggregateFunction function) { + this.function = function; + } + + public String getExpression() { + return expression; + } + + public void setExpression(String expression) { + this.expression = expression; + } + + public String getJoinTable() { + return joinTable; + } + + public void setJoinTable(String joinTable) { + this.joinTable = joinTable; + } + + public boolean isDefault() { + return isDefault; + } + + public void setDefault(boolean aDefault) { + isDefault = aDefault; + } + + public TableJoin getJoinType() { + return joinType; + } + + public void setJoinType(TableJoin joinType) { + this.joinType = joinType; + } + + public String getJoinCondition() { + return joinCondition; + } + + public void setJoinCondition(String joinCondition) { + this.joinCondition = joinCondition; + } + + public boolean isMissingMeansZero() { + return missingMeansZero; + } + + public void setMissingMeansZero(boolean missingMeansZero) { + this.missingMeansZero = missingMeansZero; + } + @JsonIgnore + @Override + /* this is required by the interface, although not used anywhere */ + public boolean hasQuery() { + return false; + } + +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisCriteriaDTO.java b/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisCriteriaDTO.java new file mode 100644 index 0000000000..bffccc9060 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisCriteriaDTO.java @@ -0,0 +1,40 @@ +package org.ohdsi.webapi.feanalysis.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.ohdsi.circe.cohortdefinition.CriteriaGroup; + +public class FeAnalysisCriteriaDTO extends BaseFeAnalysisCriteriaDTO { + @JsonProperty("expression") + private CriteriaGroup expression; + + @JsonProperty("aggregate") + private FeAnalysisAggregateDTO aggregate; + + public FeAnalysisCriteriaDTO() { + + } + + public FeAnalysisCriteriaDTO(Long id, String name, CriteriaGroup expression) { + + super(id, name); + this.expression = expression; + } + + public CriteriaGroup getExpression() { + return expression; + } + + public void setExpression(final CriteriaGroup expression) { + this.expression = expression; + } + + @Override + public FeAnalysisAggregateDTO getAggregate() { + return aggregate; + } + + @Override + public void setAggregate(FeAnalysisAggregateDTO aggregate) { + this.aggregate = aggregate; + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisDTO.java b/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisDTO.java new file mode 100644 index 0000000000..00cca2637a --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisDTO.java @@ -0,0 +1,41 @@ +package org.ohdsi.webapi.feanalysis.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import org.ohdsi.analysis.cohortcharacterization.design.FeatureAnalysis; +import org.ohdsi.webapi.feanalysis.FeAnalysisDeserializer; + +@JsonDeserialize(using = FeAnalysisDeserializer.class) +public class FeAnalysisDTO extends FeAnalysisShortDTO implements FeatureAnalysis{ + + private String value; + @JsonProperty("design") + private Object design; + + public String getValue() { + + return value; + } + + public void setValue(final String value) { + + this.value = value; + } + + @Override + public Object getDesign() { + + return design; + } + + public void setDesign(final Object design) { + + this.design = design; + } + + @Override + public String getDescr() { + + return getDescription(); + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisDemographicCriteriaDTO.java b/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisDemographicCriteriaDTO.java new file mode 100644 index 0000000000..84b85d2aac --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisDemographicCriteriaDTO.java @@ -0,0 +1,26 @@ +package org.ohdsi.webapi.feanalysis.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.ohdsi.circe.cohortdefinition.DemographicCriteria; + +public class FeAnalysisDemographicCriteriaDTO extends BaseFeAnalysisCriteriaDTO { + + @JsonProperty("expression") + private DemographicCriteria expression; + + public FeAnalysisDemographicCriteriaDTO() { + } + + public FeAnalysisDemographicCriteriaDTO(Long id, String name, DemographicCriteria expression) { + super(id, name); + this.expression = expression; + } + + public DemographicCriteria getExpression() { + return expression; + } + + public void setExpression(DemographicCriteria expression) { + this.expression = expression; + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisShortDTO.java b/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisShortDTO.java new file mode 100644 index 0000000000..02bedcf4ac --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisShortDTO.java @@ -0,0 +1,117 @@ +package org.ohdsi.webapi.feanalysis.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.ohdsi.analysis.cohortcharacterization.design.CcResultType; +import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisDomain; +import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisType; +import org.ohdsi.webapi.service.dto.CommonEntityDTO; + +public class FeAnalysisShortDTO extends CommonEntityDTO { + + @JsonProperty("description") + protected String description; + protected Boolean supportsAnnual; + protected Boolean supportsTemporal; + @JsonProperty("id") + private Integer id; + @JsonProperty("name") + private String name; + @JsonProperty("type") + private StandardFeatureAnalysisType type; + @JsonProperty("domain") + private StandardFeatureAnalysisDomain domain; + @JsonProperty("statType") + private CcResultType statType; + @JsonProperty("includeAnnual") + private Boolean includeAnnual; + @JsonProperty("includeTemporal") + private Boolean includeTemporal; + + public Integer getId() { + + return id; + } + + public void setId(final Integer id) { + + this.id = id; + } + + public String getName() { + + return name; + } + + public void setName(final String name) { + + this.name = name; + } + + public StandardFeatureAnalysisType getType() { + return type; + } + + public void setType(final StandardFeatureAnalysisType type) { + this.type = type; + } + + public StandardFeatureAnalysisDomain getDomain() { + return domain; + } + + public void setDomain(final StandardFeatureAnalysisDomain domain) { + this.domain = domain; + } + + public String getDescription() { + + return description; + } + + public void setDescription(final String description) { + + this.description = description; + } + + public CcResultType getStatType() { + + return statType; + } + + public void setStatType(CcResultType statType) { + + this.statType = statType; + } + + public Boolean getSupportsAnnual() { + return supportsAnnual; + } + + public void setSupportsAnnual(Boolean supportsAnnual) { + this.supportsAnnual = supportsAnnual; + } + + public Boolean getSupportsTemporal() { + return supportsTemporal; + } + + public void setSupportsTemporal(Boolean supportsTemporal) { + this.supportsTemporal = supportsTemporal; + } + + public Boolean getIncludeAnnual() { + return includeAnnual; + } + + public void setIncludeAnnual(Boolean includeAnnual) { + this.includeAnnual = includeAnnual; + } + + public Boolean getIncludeTemporal() { + return includeTemporal; + } + + public void setIncludeTemporal(Boolean includeTemporal) { + this.includeTemporal = includeTemporal; + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisWindowedCriteriaDTO.java b/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisWindowedCriteriaDTO.java new file mode 100644 index 0000000000..d553beaffa --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisWindowedCriteriaDTO.java @@ -0,0 +1,26 @@ +package org.ohdsi.webapi.feanalysis.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.ohdsi.circe.cohortdefinition.WindowedCriteria; + +public class FeAnalysisWindowedCriteriaDTO extends BaseFeAnalysisCriteriaDTO { + + @JsonProperty("expression") + private WindowedCriteria expression; + + public FeAnalysisWindowedCriteriaDTO() { + } + + public FeAnalysisWindowedCriteriaDTO(Long id, String name, WindowedCriteria expression) { + super(id, name); + this.expression = expression; + } + + public WindowedCriteria getExpression() { + return expression; + } + + public void setExpression(WindowedCriteria expression) { + this.expression = expression; + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisWithConceptSetDTO.java b/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisWithConceptSetDTO.java new file mode 100644 index 0000000000..409d57aaa2 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/dto/FeAnalysisWithConceptSetDTO.java @@ -0,0 +1,21 @@ +package org.ohdsi.webapi.feanalysis.dto; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.ohdsi.circe.cohortdefinition.ConceptSet; + +import java.util.List; + +public class FeAnalysisWithConceptSetDTO extends FeAnalysisDTO { + + @JsonProperty("conceptSets") + private List conceptSets; + + public List getConceptSets() { + return conceptSets; + } + + public void setConceptSets(List conceptSets) { + this.conceptSets = conceptSets; + } + +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/event/FeAnalysisChangedEvent.java b/src/main/java/org/ohdsi/webapi/feanalysis/event/FeAnalysisChangedEvent.java new file mode 100644 index 0000000000..7bfa09c733 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/event/FeAnalysisChangedEvent.java @@ -0,0 +1,16 @@ +package org.ohdsi.webapi.feanalysis.event; + +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisEntity; + +public class FeAnalysisChangedEvent { + + private FeAnalysisEntity feAnalysis; + + public FeAnalysisChangedEvent(FeAnalysisEntity feAnalysis) { + this.feAnalysis = feAnalysis; + } + + public FeAnalysisEntity getFeAnalysis() { + return feAnalysis; + } +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/repository/BaseFeAnalysisEntityRepository.java b/src/main/java/org/ohdsi/webapi/feanalysis/repository/BaseFeAnalysisEntityRepository.java new file mode 100644 index 0000000000..ba5b8ab503 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/repository/BaseFeAnalysisEntityRepository.java @@ -0,0 +1,18 @@ +package org.ohdsi.webapi.feanalysis.repository; + +import com.cosium.spring.data.jpa.entity.graph.domain2.EntityGraph; +import com.cosium.spring.data.jpa.entity.graph.repository.EntityGraphJpaRepository; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisType; +import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisEntity; +import org.springframework.data.repository.NoRepositoryBean; + +@NoRepositoryBean +public interface BaseFeAnalysisEntityRepository extends EntityGraphJpaRepository { + Set findAllByCohortCharacterizations(CohortCharacterizationEntity cohortCharacterization); + List findAllByType(StandardFeatureAnalysisType preset); + Optional findByName(String name); +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/repository/FeAnalysisAggregateRepository.java b/src/main/java/org/ohdsi/webapi/feanalysis/repository/FeAnalysisAggregateRepository.java new file mode 100644 index 0000000000..1f56802ca3 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/repository/FeAnalysisAggregateRepository.java @@ -0,0 +1,16 @@ +package org.ohdsi.webapi.feanalysis.repository; + +import org.ohdsi.analysis.cohortcharacterization.design.StandardFeatureAnalysisDomain; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisAggregateEntity; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Query; + +import java.util.List; +import java.util.Optional; + +public interface FeAnalysisAggregateRepository extends JpaRepository { + + List findByDomain(StandardFeatureAnalysisDomain domain); + @Query("select fa from FeAnalysisAggregateEntity fa where fa.isDefault = true") + Optional findDefault(); +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/repository/FeAnalysisCriteriaRepository.java b/src/main/java/org/ohdsi/webapi/feanalysis/repository/FeAnalysisCriteriaRepository.java new file mode 100644 index 0000000000..dc1982a5c6 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/repository/FeAnalysisCriteriaRepository.java @@ -0,0 +1,13 @@ +package org.ohdsi.webapi.feanalysis.repository; + +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisCriteriaEntity; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Query; + +import java.util.List; + +public interface FeAnalysisCriteriaRepository extends JpaRepository { + List findAllByFeatureAnalysisId(Integer id); + @Query("select fa from FeAnalysisCriteriaEntity AS fa JOIN FETCH fa.featureAnalysis where fa.expressionString = ?1") + List findAllByExpressionString(String expression); +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/repository/FeAnalysisEntityRepository.java b/src/main/java/org/ohdsi/webapi/feanalysis/repository/FeAnalysisEntityRepository.java new file mode 100644 index 0000000000..97d4551b8c --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/repository/FeAnalysisEntityRepository.java @@ -0,0 +1,19 @@ +package org.ohdsi.webapi.feanalysis.repository; + +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisEntity; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; + +import java.util.List; +import java.util.Set; + +public interface FeAnalysisEntityRepository extends BaseFeAnalysisEntityRepository { + @Query("Select fe FROM FeAnalysisEntity fe WHERE fe.name LIKE ?1 ESCAPE '\\'") + List findAllByNameStartsWith(String pattern); + + @Query("SELECT COUNT(fe) FROM FeAnalysisEntity fe WHERE fe.name = :name and fe.id <> :id") + int getCountFeWithSameName(@Param("id") Integer id, @Param("name") String name); + + @Query("SELECT fe FROM FeAnalysisEntity fe WHERE fe.id IN :ids") + Set findByListIds(@Param("ids") List ids); +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/repository/FeAnalysisWithCriteriaEntityRepository.java b/src/main/java/org/ohdsi/webapi/feanalysis/repository/FeAnalysisWithCriteriaEntityRepository.java new file mode 100644 index 0000000000..75da909aa6 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/repository/FeAnalysisWithCriteriaEntityRepository.java @@ -0,0 +1,6 @@ +package org.ohdsi.webapi.feanalysis.repository; + +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisWithCriteriaEntity; + +public interface FeAnalysisWithCriteriaEntityRepository extends BaseFeAnalysisEntityRepository { +} diff --git a/src/main/java/org/ohdsi/webapi/feanalysis/repository/FeAnalysisWithStringEntityRepository.java b/src/main/java/org/ohdsi/webapi/feanalysis/repository/FeAnalysisWithStringEntityRepository.java new file mode 100644 index 0000000000..4c28a5316c --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/feanalysis/repository/FeAnalysisWithStringEntityRepository.java @@ -0,0 +1,12 @@ +package org.ohdsi.webapi.feanalysis.repository; + +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisWithStringEntity; + +import java.util.Collection; +import java.util.List; + +public interface FeAnalysisWithStringEntityRepository extends BaseFeAnalysisEntityRepository { + List findByDesignIn(Collection names); + + List findByDesign(String design); +} diff --git a/src/main/java/org/ohdsi/webapi/feasibility/FeasibilityStudy.java b/src/main/java/org/ohdsi/webapi/feasibility/FeasibilityStudy.java index 9059914c19..5b3010fa86 100644 --- a/src/main/java/org/ohdsi/webapi/feasibility/FeasibilityStudy.java +++ b/src/main/java/org/ohdsi/webapi/feasibility/FeasibilityStudy.java @@ -21,8 +21,6 @@ import java.util.List; import java.util.Set; -import jakarta.persistence.Access; -import jakarta.persistence.AccessType; import jakarta.persistence.CascadeType; import jakarta.persistence.CollectionTable; import jakarta.persistence.Column; @@ -79,7 +77,6 @@ public class FeasibilityStudy { ) @GeneratedValue(generator = "feasibility_study_generator") @Column(name="id") - @Access(AccessType.PROPERTY) private Integer id; @Column(name="name") diff --git a/src/main/java/org/ohdsi/webapi/ircalc/AnalysisInfoDTO.java b/src/main/java/org/ohdsi/webapi/ircalc/AnalysisInfoDTO.java new file mode 100644 index 0000000000..d081b82004 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/AnalysisInfoDTO.java @@ -0,0 +1,26 @@ +package org.ohdsi.webapi.ircalc; + +import java.util.ArrayList; +import java.util.List; + +public class AnalysisInfoDTO { + + private ExecutionInfo executionInfo; + private List summaryList = new ArrayList<>(); + + public ExecutionInfo getExecutionInfo() { + return executionInfo; + } + + public void setExecutionInfo(ExecutionInfo executionInfo) { + this.executionInfo = executionInfo; + } + + public List getSummaryList() { + return summaryList; + } + + public void setSummaryList(List summaryList) { + this.summaryList = summaryList; + } +} diff --git a/src/main/java/org/ohdsi/webapi/ircalc/AnalysisReport.java b/src/main/java/org/ohdsi/webapi/ircalc/AnalysisReport.java new file mode 100644 index 0000000000..5eab6af376 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/AnalysisReport.java @@ -0,0 +1,53 @@ +/* + * Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org]. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ohdsi.webapi.ircalc; + +import java.util.List; +import jakarta.xml.bind.annotation.XmlRootElement; +import jakarta.xml.bind.annotation.XmlType; + +/** + * + * @author Chris Knoll + */ + +public class AnalysisReport { + + @XmlType(name="Summary", namespace="http://ohdsi.org/webapi/ircalc") + public static class Summary { + public int targetId; + public int outcomeId; + public long totalPersons; + public long timeAtRisk; + public long cases; + } + + public static class StrataStatistic + { + public int targetId; + public int outcomeId; + public int id; + public String name; + public long totalPersons; + public long cases; + public long timeAtRisk; + } + + public Summary summary; + public List stratifyStats; + public String treemapData; + +} diff --git a/src/main/java/org/ohdsi/webapi/ircalc/DateRange.java b/src/main/java/org/ohdsi/webapi/ircalc/DateRange.java new file mode 100644 index 0000000000..783ac74f80 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/DateRange.java @@ -0,0 +1,27 @@ +/* + * Copyright 2016 Observational Health Data Sciences and Informatics [OHDSI.org]. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ohdsi.webapi.ircalc; + +import java.util.Date; + +/** + * + * @author Chris Knoll + */ +public class DateRange { + public String startDate; + public String endDate; +} diff --git a/src/main/java/org/ohdsi/webapi/ircalc/ExecutionInfo.java b/src/main/java/org/ohdsi/webapi/ircalc/ExecutionInfo.java new file mode 100644 index 0000000000..8df0138027 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/ExecutionInfo.java @@ -0,0 +1,158 @@ +/* + * Copyright 2016 Observational Health Data Sciences and Informatics [OHDSI.org]. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ohdsi.webapi.ircalc; + +import com.fasterxml.jackson.annotation.JsonFormat; +import com.fasterxml.jackson.annotation.JsonIgnore; +import java.io.Serializable; +import java.util.Date; +import jakarta.persistence.*; + +import org.hibernate.annotations.NotFound; +import org.hibernate.annotations.NotFoundAction; +import org.ohdsi.webapi.GenerationStatus; +import org.ohdsi.webapi.IExecutionInfo; +import org.ohdsi.webapi.source.Source; + +/** + * + * @author Chris Knoll + */ +@Entity(name = "IRAnalysisGenerationInfo") +@Table(name="ir_execution") +public class ExecutionInfo implements Serializable, IExecutionInfo { + private static final long serialVersionUID = 1L; + + @EmbeddedId + private ExecutionInfoId id; + + @JsonIgnore + @ManyToOne + @MapsId("analysisId") + @JoinColumn(name="analysis_id", referencedColumnName="id") + private IncidenceRateAnalysis analysis; + + @JsonIgnore + @ManyToOne + @MapsId("sourceId") + @JoinColumn(name="source_id", referencedColumnName="source_id") + @NotFound(action = NotFoundAction.IGNORE) + private Source source; + + @Column(name="start_time") + private Date startTime; + + @Column(name="execution_duration") + private Integer executionDuration; + + @Column(name="status") + @Enumerated(EnumType.STRING) + private GenerationStatus status; + + @Column(name="is_valid") + private boolean isValid; + + @Column(name = "is_canceled") + private boolean isCanceled; + + @Column(name="message") + private String message; + + public ExecutionInfo() + { + } + + public ExecutionInfo(IncidenceRateAnalysis analysis, Source source) + { + this.id = new ExecutionInfoId(analysis.getId(), source.getSourceId()); + this.source = source; + this.analysis = analysis; + } + + public ExecutionInfoId getId() { + return id; + } + + public void setId(ExecutionInfoId id) { + this.id = id; + } + + public Date getStartTime() { + return startTime; + } + + public ExecutionInfo setStartTime(Date startTime) { + this.startTime = startTime; + return this; + } + + public Integer getExecutionDuration() { + return executionDuration; + } + + public ExecutionInfo setExecutionDuration(Integer executionDuration) { + this.executionDuration = executionDuration; + return this; + } + + public GenerationStatus getStatus() { + return status; + } + + public ExecutionInfo setStatus(GenerationStatus status) { + this.status = status; + return this; + } + + public boolean getIsValid() { + return isValid; + } + + public ExecutionInfo setIsValid(boolean isValid) { + this.isValid = isValid; + return this; + } + + @Override + public boolean getIsCanceled() { + return isCanceled(); + } + + public boolean isCanceled() { + return isCanceled; + } + + public void setCanceled(boolean canceled) { + isCanceled = canceled; + } + + public String getMessage() { + return message; + } + + public ExecutionInfo setMessage(String message) { + this.message = message; + return this; + } + + public IncidenceRateAnalysis getAnalysis() { + return analysis; + } + + public Source getSource() { + return source; + } +} diff --git a/src/main/java/org/ohdsi/webapi/ircalc/ExecutionInfoId.java b/src/main/java/org/ohdsi/webapi/ircalc/ExecutionInfoId.java new file mode 100644 index 0000000000..d6d1dc8f64 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/ExecutionInfoId.java @@ -0,0 +1,70 @@ +/* + * Copyright 2016 Observational Health Data Sciences and Informatics [OHDSI.org]. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ohdsi.webapi.ircalc; + +import java.io.Serializable; +import jakarta.persistence.Column; +import jakarta.persistence.Embeddable; + +/** + * + * @author Chris Knoll + */ +@Embeddable +public class ExecutionInfoId implements Serializable { + private static final long serialVersionUID = 1L; + + public ExecutionInfoId() { + } + + public ExecutionInfoId(Integer analysisId, Integer sourceId) { + this.analysisId = analysisId; + this.sourceId = sourceId; + } + + @Column(name = "analysis_id", insertable = false, updatable = false) + private Integer analysisId; + + @Column(name = "source_id") + private Integer sourceId; + + public Integer getAnalysisId() { + return analysisId; + } + + public void setAnalysisId(Integer analysisId) { + this.analysisId = analysisId; + } + + public Integer getSourceId() { + return sourceId; + } + + public void setSourceId(Integer sourceId) { + this.sourceId = sourceId; + } + + public boolean equals(Object o) { + return ((o instanceof ExecutionInfoId) + && analysisId.equals(((ExecutionInfoId) o).getAnalysisId()) + && sourceId.equals(((ExecutionInfoId) o).getSourceId()) ); + } + + public int hashCode() { + return analysisId + sourceId; + } +} + diff --git a/src/main/java/org/ohdsi/webapi/ircalc/FieldOffset.java b/src/main/java/org/ohdsi/webapi/ircalc/FieldOffset.java new file mode 100644 index 0000000000..1385da4c23 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/FieldOffset.java @@ -0,0 +1,37 @@ +/* + * Copyright 2016 Observational Health Data Sciences and Informatics [OHDSI.org]. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ohdsi.webapi.ircalc; + +import org.ohdsi.webapi.cohortdefinition.*; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * + * @author Chris Knoll + */ +public class FieldOffset { + + public enum DateField { + StartDate, EndDate + } + + @JsonProperty("DateField") + public DateField dateField = DateField.StartDate; + + @JsonProperty("Offset") + public int offset = 0; + +} diff --git a/src/main/java/org/ohdsi/webapi/ircalc/IRAnalysisInfoListener.java b/src/main/java/org/ohdsi/webapi/ircalc/IRAnalysisInfoListener.java new file mode 100644 index 0000000000..013497d445 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/IRAnalysisInfoListener.java @@ -0,0 +1,98 @@ +package org.ohdsi.webapi.ircalc; + +import org.ohdsi.webapi.Constants; +import org.ohdsi.webapi.GenerationStatus; +import org.springframework.batch.core.BatchStatus; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobExecutionListener; +import org.springframework.batch.core.JobParameters; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.support.DefaultTransactionDefinition; +import org.springframework.transaction.support.TransactionTemplate; + +import com.cosium.spring.data.jpa.entity.graph.domain2.EntityGraph; +import com.cosium.spring.data.jpa.entity.graph.domain2.NamedEntityGraph; + +import java.util.Calendar; +import java.util.Collection; +import java.util.Date; +import java.util.Objects; +import java.util.Optional; + +public class IRAnalysisInfoListener implements JobExecutionListener { + + private static final int MAX_MESSAGE_LENGTH = 2000; + private static final EntityGraph IR_WITH_EXECUTION_INFOS_ENTITY_GRAPH = NamedEntityGraph.loading("IncidenceRateAnalysis.withExecutionInfoList"); + + private final TransactionTemplate transactionTemplate; + private final IncidenceRateAnalysisRepository incidenceRateAnalysisRepository; + private Date startTime; + + public IRAnalysisInfoListener(TransactionTemplate transactionTemplate, IncidenceRateAnalysisRepository incidenceRateAnalysisRepository) { + + this.transactionTemplate = transactionTemplate; + this.incidenceRateAnalysisRepository = incidenceRateAnalysisRepository; + } + + @Override + public void beforeJob(JobExecution je) { + + startTime = Calendar.getInstance().getTime(); + JobParameters jobParams = je.getJobParameters(); + Integer analysisId = Integer.valueOf(jobParams.getString("analysis_id")); + Integer sourceId = Integer.valueOf(jobParams.getString("source_id")); + + DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition(); + requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); + + TransactionStatus initStatus = this.transactionTemplate.getTransactionManager().getTransaction(requresNewTx); + IncidenceRateAnalysis analysis = this.incidenceRateAnalysisRepository.findOneWithExecutionsOnExistingSources(analysisId, + IR_WITH_EXECUTION_INFOS_ENTITY_GRAPH); + + findExecutionInfoBySourceId(analysis.getExecutionInfoList(), sourceId).ifPresent(analysisInfo -> { + analysisInfo.setIsValid(false); + analysisInfo.setStartTime(startTime); + analysisInfo.setStatus(GenerationStatus.RUNNING); + }); + + this.incidenceRateAnalysisRepository.save(analysis); + this.transactionTemplate.getTransactionManager().commit(initStatus); + } + + @Override + public void afterJob(JobExecution je) { + + boolean isValid = !(je.getStatus() == BatchStatus.FAILED || je.getStatus() == BatchStatus.STOPPED); + String statusMessage = je.getExitStatus().getExitDescription(); + + JobParameters jobParams = je.getJobParameters(); + Integer analysisId = Integer.valueOf(jobParams.getString("analysis_id")); + Integer sourceId = Integer.valueOf(jobParams.getString("source_id")); + + DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition(); + requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); + TransactionStatus completeStatus = this.transactionTemplate.getTransactionManager().getTransaction(requresNewTx); + Date endTime = Calendar.getInstance().getTime(); + IncidenceRateAnalysis analysis = this.incidenceRateAnalysisRepository.findOneWithExecutionsOnExistingSources(analysisId, + IR_WITH_EXECUTION_INFOS_ENTITY_GRAPH); + + findExecutionInfoBySourceId(analysis.getExecutionInfoList(), sourceId).ifPresent(analysisInfo -> { + analysisInfo.setIsValid(isValid); + analysisInfo.setCanceled(je.getStatus() == BatchStatus.STOPPED || je.getStepExecutions().stream().anyMatch(se -> Objects.equals(Constants.CANCELED, se.getExitStatus().getExitCode()))); + analysisInfo.setExecutionDuration((int) (endTime.getTime() - startTime.getTime())); + analysisInfo.setStatus(GenerationStatus.COMPLETE); + analysisInfo.setMessage(statusMessage.substring(0, Math.min(MAX_MESSAGE_LENGTH, statusMessage.length()))); + }); + + this.incidenceRateAnalysisRepository.save(analysis); + this.transactionTemplate.getTransactionManager().commit(completeStatus); + } + + private Optional findExecutionInfoBySourceId(Collection infoList, Integer sourceId) { + + return infoList.stream() + .filter(info -> Objects.equals(info.getId().getSourceId(), sourceId)) + .findFirst(); + } +} diff --git a/src/main/java/org/ohdsi/webapi/ircalc/IRAnalysisQueryBuilder.java b/src/main/java/org/ohdsi/webapi/ircalc/IRAnalysisQueryBuilder.java new file mode 100644 index 0000000000..9410d97d39 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/IRAnalysisQueryBuilder.java @@ -0,0 +1,170 @@ +/* + * Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org]. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ohdsi.webapi.ircalc; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.ohdsi.circe.cohortdefinition.CohortExpressionQueryBuilder; +import org.ohdsi.circe.cohortdefinition.CriteriaGroup; +import org.ohdsi.circe.helper.ResourceHelper; +import org.ohdsi.circe.vocabulary.ConceptSetExpressionQueryBuilder; + +import java.util.ArrayList; +import org.ohdsi.webapi.Constants; +import org.ohdsi.webapi.util.SqlUtils; + +/** + * + * @author Chris Knoll + */ +public class IRAnalysisQueryBuilder { + + private final static ConceptSetExpressionQueryBuilder conceptSetQueryBuilder = new ConceptSetExpressionQueryBuilder(); + private final static CohortExpressionQueryBuilder cohortExpressionQueryBuilder = new CohortExpressionQueryBuilder(); + + private final static String PERFORM_ANALYSIS_QUERY_TEMPLATE = ResourceHelper.GetResourceAsString("/resources/incidencerate/sql/performAnalysis.sql"); + private final static String STRATA_QUERY_TEMPLATE = ResourceHelper.GetResourceAsString("/resources/incidencerate/sql/strata.sql"); + + public static class BuildExpressionQueryOptions { + @JsonProperty("cdmSchema") + public String cdmSchema; + + @JsonProperty("resultsSchema") + public String resultsSchema; + + @JsonProperty("vocabularySchema") + public String vocabularySchema; + + @JsonProperty("tempSchema") + public String tempSchema; + + @JsonProperty("cohortTable") + public String cohortTable; + } + + private final ObjectMapper objectMapper; + + public IRAnalysisQueryBuilder(ObjectMapper objectMapper) { + + this.objectMapper = objectMapper; + } + + private String getStrataQuery(CriteriaGroup strataCriteria) + { + String resultSql = STRATA_QUERY_TEMPLATE; + String additionalCriteriaQuery = "\nJOIN (\n" + cohortExpressionQueryBuilder.getCriteriaGroupQuery(strataCriteria, "#analysis_events") + ") AC on AC.person_id = pe.person_id AND AC.event_id = pe.event_id"; + additionalCriteriaQuery = StringUtils.replace(additionalCriteriaQuery,"@indexId", "" + 0); + resultSql = StringUtils.replace(resultSql, "@additionalCriteriaQuery", additionalCriteriaQuery); + return resultSql; + } + + public String buildAnalysisQuery(IncidenceRateAnalysisExpression analysisExpression, Integer analysisId, BuildExpressionQueryOptions options) { + String resultSql = PERFORM_ANALYSIS_QUERY_TEMPLATE; + + // target and outcome statements for analysis + ArrayList cohortIdStatements = new ArrayList<>(); + for (int targetId : analysisExpression.targetIds) { + cohortIdStatements.add(String.format("SELECT %d as cohort_id, 0 as is_outcome", targetId)); + } + + for (int outcomeId : analysisExpression.outcomeIds) { + cohortIdStatements.add(String.format("SELECT %d as cohort_id, 1 as is_outcome", outcomeId)); + } + + resultSql = StringUtils.replace(resultSql,"@cohortInserts", StringUtils.join(cohortIdStatements,"\nUNION\n")); + + // apply adjustments + + String adjustmentExpression = "DATEADD(day,%d,%s)"; + + String adjustedStart = String.format(adjustmentExpression, + analysisExpression.timeAtRisk.start.offset, + analysisExpression.timeAtRisk.start.dateField == FieldOffset.DateField.StartDate ? "cohort_start_date" : "cohort_end_date"); + resultSql = StringUtils.replace(resultSql,"@adjustedStart", adjustedStart); + + String adjustedEnd = String.format(adjustmentExpression, + analysisExpression.timeAtRisk.end.offset, + analysisExpression.timeAtRisk.end.dateField == FieldOffset.DateField.StartDate ? "cohort_start_date" : "cohort_end_date"); + resultSql = StringUtils.replace(resultSql,"@adjustedEnd", adjustedEnd); + + // apply study window WHERE clauses + ArrayList studyWindowClauses = new ArrayList<>(); + if (analysisExpression.studyWindow != null) + { + if (analysisExpression.studyWindow.startDate != null && analysisExpression.studyWindow.startDate.length() > 0) + studyWindowClauses.add(String.format("t.cohort_start_date >= %s", SqlUtils.dateStringToSql(analysisExpression.studyWindow.startDate))); + if (analysisExpression.studyWindow.endDate != null && analysisExpression.studyWindow.endDate.length() > 0) + studyWindowClauses.add(String.format("t.cohort_start_date <= %s", SqlUtils.dateStringToSql(analysisExpression.studyWindow.endDate))); + } + if (studyWindowClauses.size() > 0) + resultSql = StringUtils.replace(resultSql, "@cohortDataFilter", "AND " + StringUtils.join(studyWindowClauses," AND ")); + else + resultSql = StringUtils.replace(resultSql, "@cohortDataFilter", ""); + + // add end dates if study window end is defined + if (analysisExpression.studyWindow != null && analysisExpression.studyWindow.endDate != null && analysisExpression.studyWindow.endDate.length() > 0) + { + StringBuilder endDatesQuery = new StringBuilder( + String.format("UNION\nselect combos.target_id, combos.outcome_id, t.subject_id, t.cohort_start_date, %s as followup_end, 0 as is_case", + SqlUtils.dateStringToSql(analysisExpression.studyWindow.endDate)) + ); + endDatesQuery.append("\nFROM #cteCohortCombos combos"); + endDatesQuery.append("\nJOIN #cteCohortData t on combos.target_id = t.target_id and combos.outcome_id = t.outcome_id"); + + resultSql = StringUtils.replace(resultSql, "@EndDateUnions", endDatesQuery.toString()); + } + else + resultSql = StringUtils.replace(resultSql, "@EndDateUnions", ""); + + String codesetQuery = cohortExpressionQueryBuilder.getCodesetQuery(analysisExpression.conceptSets); + resultSql = StringUtils.replace(resultSql, "@codesetQuery", codesetQuery); + + ArrayList strataInsert = new ArrayList<>(); + for (int i = 0; i < analysisExpression.strata.size(); i++) + { + CriteriaGroup cg = analysisExpression.strata.get(i).expression; + String stratumInsert = getStrataQuery(cg); + stratumInsert = StringUtils.replace(stratumInsert, "@strata_sequence", "" + i); + strataInsert.add(stratumInsert); + } + + resultSql = StringUtils.replace(resultSql,"@strataCohortInserts", StringUtils.join(strataInsert,"\n")); + + if (options != null) + { + // replease query parameters with tokens + resultSql = StringUtils.replace(resultSql, Constants.SqlSchemaPlaceholders.CDM_DATABASE_SCHEMA_PLACEHOLDER, options.cdmSchema); + resultSql = StringUtils.replace(resultSql, Constants.SqlSchemaPlaceholders.RESULTS_DATABASE_SCHEMA_PLACEHOLDER, options.resultsSchema); + resultSql = StringUtils.replace(resultSql, Constants.SqlSchemaPlaceholders.VOCABULARY_DATABASE_SCHEMA_PLACEHOLDER, options.vocabularySchema); + resultSql = StringUtils.replace(resultSql, Constants.SqlSchemaPlaceholders.TEMP_DATABASE_SCHEMA_PLACEHOLDER, options.tempSchema); + resultSql = StringUtils.replace(resultSql, "@cohort_table", options.cohortTable); + } + + resultSql = StringUtils.replace(resultSql, "@analysisId", analysisId.toString()); + + return resultSql; + } + + public String buildAnalysisQuery(IncidenceRateAnalysis analyisis, BuildExpressionQueryOptions options) { + try { + IncidenceRateAnalysisExpression analysisExpression = objectMapper.readValue(analyisis.getDetails().getExpression(), IncidenceRateAnalysisExpression.class); + return buildAnalysisQuery(analysisExpression, analyisis.getId(), options); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + } diff --git a/src/main/java/org/ohdsi/webapi/ircalc/IRAnalysisResource.java b/src/main/java/org/ohdsi/webapi/ircalc/IRAnalysisResource.java new file mode 100644 index 0000000000..b4f2c389d8 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/IRAnalysisResource.java @@ -0,0 +1,205 @@ +package org.ohdsi.webapi.ircalc; + +import org.ohdsi.webapi.check.CheckResult; +import org.ohdsi.webapi.common.generation.GenerateSqlResult; +import org.ohdsi.webapi.ircalc.AnalysisReport; +import org.ohdsi.webapi.ircalc.dto.IRAnalysisDTO; +import org.ohdsi.webapi.ircalc.dto.IRAnalysisShortDTO; +import org.ohdsi.webapi.ircalc.dto.IRVersionFullDTO; +import org.ohdsi.webapi.job.JobExecutionResource; +import org.ohdsi.webapi.tag.domain.HasTags; +import org.ohdsi.webapi.tag.dto.TagNameListRequestDTO; +import org.ohdsi.webapi.versioning.dto.VersionDTO; +import org.ohdsi.webapi.versioning.dto.VersionUpdateDTO; +import org.springframework.web.bind.annotation.*; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; + +import java.util.List; + +@RequestMapping(path = "/ir") +public interface IRAnalysisResource extends HasTags { + + /** + * Returns all IR Analysis in a list. + * + * @return List of IncidenceRateAnalysis + */ + @GetMapping(produces = MediaType.APPLICATION_JSON_VALUE) + List getIRAnalysisList(); + + @GetMapping(path = "/{id}/exists", produces = MediaType.APPLICATION_JSON_VALUE) + int getCountIRWithSameName(@PathVariable("id") final int id, @RequestParam(value = "name", required = false) String name); + + /** + * Creates the incidence rate analysis + * + * @param analysis The analysis to create. + * @return The new FeasibilityStudy + */ + @PostMapping(path = "/", produces = MediaType.APPLICATION_JSON_VALUE, consumes = MediaType.APPLICATION_JSON_VALUE) + IRAnalysisDTO createAnalysis(@RequestBody IRAnalysisDTO analysis); + + @GetMapping(path = "/{id}", produces = MediaType.APPLICATION_JSON_VALUE) + IRAnalysisDTO getAnalysis(@PathVariable("id") final int id); + + @PostMapping(path = "/design", produces = MediaType.APPLICATION_JSON_VALUE, consumes = MediaType.APPLICATION_JSON_VALUE) + IRAnalysisDTO doImport(@RequestBody final IRAnalysisDTO dto); + + @GetMapping(path = "/{id}/design", produces = MediaType.APPLICATION_JSON_VALUE) + IRAnalysisDTO export(@PathVariable("id") final Integer id); + + @PutMapping(path = "/{id}", produces = MediaType.APPLICATION_JSON_VALUE) + IRAnalysisDTO saveAnalysis(@PathVariable("id") final int id, @RequestBody IRAnalysisDTO analysis); + + @GetMapping(path = "/{analysis_id}/execute/{sourceKey}", produces = MediaType.APPLICATION_JSON_VALUE) + JobExecutionResource performAnalysis(@PathVariable("analysis_id") final int analysisId, @PathVariable("sourceKey") final String sourceKey); + + @DeleteMapping(path = "/{analysis_id}/execute/{sourceKey}") + void cancelAnalysis(@PathVariable("analysis_id") final int analysisId, @PathVariable("sourceKey") final String sourceKey); + + @GetMapping(path = "/{id}/info", produces = MediaType.APPLICATION_JSON_VALUE) + List getAnalysisInfo(@PathVariable("id") final int id); + + @GetMapping(path = "/{id}/info/{sourceKey}", produces = MediaType.APPLICATION_JSON_VALUE) + AnalysisInfoDTO getAnalysisInfo(@PathVariable("id") final int id, @PathVariable("sourceKey") final String sourceKey); + + /** + * Deletes the specified cohort definition + * + * @param id - the Cohort Definition ID to copy + */ + @DeleteMapping(path = "/{id}/info/{sourceKey}", produces = MediaType.APPLICATION_JSON_VALUE) + void deleteInfo(@PathVariable("id") final int id, @PathVariable("sourceKey") final String sourceKey); + + /** + * Deletes the specified cohort definition + * + * @param id - the Cohort Definition ID to copy + */ + @DeleteMapping(path = "/{id}", produces = MediaType.APPLICATION_JSON_VALUE) + void delete(@PathVariable("id") final int id); + + /** + * Exports the analysis definition and results + * + * @param id - the IR Analysis ID to export + * @return Response containing binary stream of zipped data + */ + @GetMapping(path = "/{id}/export") + ResponseEntity export(@PathVariable("id") final int id); + + /** + * Copies the specified cohort definition + * + * @param id - the Cohort Definition ID to copy + * @return the copied cohort definition as a CohortDefinitionDTO + */ + @GetMapping(path = "/{id}/copy", produces = MediaType.APPLICATION_JSON_VALUE) + IRAnalysisDTO copy(@PathVariable("id") final int id); + + @GetMapping(path = "/{id}/report/{sourceKey}", produces = MediaType.APPLICATION_JSON_VALUE) + AnalysisReport getAnalysisReport(@PathVariable("id") final int id, @PathVariable("sourceKey") final String sourceKey, + @RequestParam("targetId") final int targetId, @RequestParam("outcomeId") final int outcomeId ); + + @PostMapping(path = "/sql", produces = MediaType.APPLICATION_JSON_VALUE, consumes = MediaType.APPLICATION_JSON_VALUE) + public GenerateSqlResult generateSql(@RequestBody IRAnalysisService.GenerateSqlRequest request); + + @PostMapping(path = "/check", produces = MediaType.APPLICATION_JSON_VALUE, consumes = MediaType.APPLICATION_JSON_VALUE) + public CheckResult runDiagnostics(@RequestBody IRAnalysisDTO irAnalysisDTO); + + /** + * Assign tag to IR Analysis + * + * @param id + * @param tagId + */ + @PostMapping(path = "/{id}/tag/", produces = MediaType.APPLICATION_JSON_VALUE) + void assignTag(@PathVariable("id") final Integer id, @RequestParam("tagId") final int tagId); + + /** + * Unassign tag from IR Analysis + * + * @param id + * @param tagId + */ + @DeleteMapping(path = "/{id}/tag/{tagId}", produces = MediaType.APPLICATION_JSON_VALUE) + void unassignTag(@PathVariable("id") final Integer id, @PathVariable("tagId") final int tagId); + + /** + * Assign protected tag to IR Analysis + * + * @param id + * @param tagId + */ + @PostMapping(path = "/{id}/protectedtag/", produces = MediaType.APPLICATION_JSON_VALUE) + void assignPermissionProtectedTag(@PathVariable("id") final int id, @RequestParam("tagId") final int tagId); + + /** + * Unassign protected tag from IR Analysis + * + * @param id + * @param tagId + */ + @DeleteMapping(path = "/{id}/protectedtag/{tagId}", produces = MediaType.APPLICATION_JSON_VALUE) + void unassignPermissionProtectedTag(@PathVariable("id") final int id, @PathVariable("tagId") final int tagId); + + /** + * Get list of versions of IR Analysis + * + * @param id + * @return + */ + @GetMapping(path = "/{id}/version/", produces = MediaType.APPLICATION_JSON_VALUE) + List getVersions(@PathVariable("id") final long id); + + /** + * Get version of IR Analysis + * + * @param id + * @param version + * @return + */ + @GetMapping(path = "/{id}/version/{version}", produces = MediaType.APPLICATION_JSON_VALUE) + IRVersionFullDTO getVersion(@PathVariable("id") final int id, @PathVariable("version") final int version); + + /** + * Update version of IR Analysis + * + * @param id + * @param version + * @param updateDTO + * @return + */ + @PutMapping(path = "/{id}/version/{version}", produces = MediaType.APPLICATION_JSON_VALUE) + VersionDTO updateVersion(@PathVariable("id") final int id, @PathVariable("version") final int version, + @RequestBody VersionUpdateDTO updateDTO); + + /** + * Delete version of IR Analysis + * + * @param id + * @param version + */ + @DeleteMapping(path = "/{id}/version/{version}", produces = MediaType.APPLICATION_JSON_VALUE) + void deleteVersion(@PathVariable("id") final int id, @PathVariable("version") final int version) ; + + /** + * Create a new asset form version of IR Analysis + * + * @param id + * @param version + * @return + */ + @PutMapping(path = "/{id}/version/{version}/createAsset", produces = MediaType.APPLICATION_JSON_VALUE) + IRAnalysisDTO copyAssetFromVersion(@PathVariable("id") final int id, @PathVariable("version") final int version); + + /** + * Get list of incidence rates with assigned tags + * + * @param requestDTO + * @return + */ + @PostMapping(path = "/byTags", produces = MediaType.APPLICATION_JSON_VALUE, consumes = MediaType.APPLICATION_JSON_VALUE) + List listByTags(@RequestBody TagNameListRequestDTO requestDTO); +} diff --git a/src/main/java/org/ohdsi/webapi/ircalc/IRAnalysisService.java b/src/main/java/org/ohdsi/webapi/ircalc/IRAnalysisService.java new file mode 100644 index 0000000000..9559f3d1ae --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/IRAnalysisService.java @@ -0,0 +1,982 @@ +/* + * Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org]. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ohdsi.webapi.ircalc; + +import com.cosium.spring.data.jpa.entity.graph.domain2.EntityGraph; +import com.cosium.spring.data.jpa.entity.graph.domain2.NamedEntityGraph; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableMap; +import com.opencsv.CSVWriter; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.collections4.IterableUtils; +import org.apache.commons.lang3.StringUtils; +import org.ohdsi.analysis.Utils; +import org.ohdsi.circe.helper.ResourceHelper; +import org.ohdsi.sql.SqlRender; +import org.ohdsi.sql.SqlTranslate; +import org.ohdsi.webapi.GenerationStatus; +import org.ohdsi.webapi.check.CheckResult; +import org.ohdsi.webapi.check.checker.ir.IRChecker; +import org.ohdsi.webapi.cohortdefinition.CohortDefinitionEntity; +import org.ohdsi.webapi.cohortdefinition.CohortDefinitionDetailsEntity; +import org.ohdsi.webapi.cohortdefinition.CohortDefinitionRepository; +import org.ohdsi.webapi.cohortdefinition.dto.CohortDTO; +import org.ohdsi.webapi.common.DesignImportService; +import org.ohdsi.webapi.common.generation.GenerateSqlResult; +import org.ohdsi.webapi.common.generation.GenerationUtils; +import org.ohdsi.webapi.ircalc.dto.IRAnalysisDTO; +import org.ohdsi.webapi.ircalc.dto.IRAnalysisShortDTO; +import org.ohdsi.webapi.ircalc.dto.IRVersionFullDTO; +import org.ohdsi.webapi.job.GeneratesNotification; +import org.ohdsi.webapi.job.JobExecutionResource; +import org.ohdsi.webapi.security.authz.AuthorizationService; +import org.ohdsi.webapi.security.authz.UserEntity; +import org.ohdsi.webapi.security.authz.UserRepository; +import org.ohdsi.webapi.service.AbstractDaoService; +import org.ohdsi.webapi.service.JobService; +import org.ohdsi.webapi.source.Source; +import org.ohdsi.webapi.source.SourceDaimon; +import org.ohdsi.webapi.source.SourceService; +import org.ohdsi.webapi.tag.dto.TagNameListRequestDTO; +import org.ohdsi.webapi.util.ExportUtil; +import org.ohdsi.webapi.util.ExceptionUtils; +import org.ohdsi.webapi.util.NameUtils; +import org.ohdsi.webapi.util.PreparedStatementRenderer; +import org.ohdsi.webapi.util.SessionUtils; +import org.ohdsi.webapi.versioning.domain.IRVersion; +import org.ohdsi.webapi.versioning.domain.Version; +import org.ohdsi.webapi.versioning.domain.VersionBase; +import org.ohdsi.webapi.versioning.domain.VersionType; +import org.ohdsi.webapi.versioning.dto.VersionDTO; +import org.ohdsi.webapi.versioning.dto.VersionUpdateDTO; +import org.ohdsi.webapi.versioning.service.VersionService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.JobParametersBuilder; +import org.springframework.batch.core.job.builder.SimpleJobBuilder; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.core.convert.ConversionService; +import org.springframework.jdbc.core.RowMapper; +import org.springframework.web.bind.annotation.RestController; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.http.ResponseEntity; +import org.springframework.http.HttpHeaders; +import org.springframework.http.MediaType; +import org.springframework.http.HttpStatus; +import org.springframework.web.server.ResponseStatusException; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.transaction.support.DefaultTransactionDefinition; + +import jakarta.annotation.PostConstruct; +import java.io.ByteArrayOutputStream; +import java.io.StringWriter; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; + +import static org.ohdsi.webapi.Constants.GENERATE_IR_ANALYSIS; +import static org.ohdsi.webapi.Constants.Params.ANALYSIS_ID; +import static org.ohdsi.webapi.Constants.Params.JOB_NAME; +import static org.ohdsi.webapi.Constants.Params.SOURCE_ID; +import static org.ohdsi.webapi.util.SecurityUtils.whitelist; + +/** + * + * @author Chris Knoll + */ +@RestController +public class IRAnalysisService extends AbstractDaoService implements + GeneratesNotification, IRAnalysisResource { + + private static final Logger log = LoggerFactory.getLogger(IRAnalysisService.class); + private final static String STRATA_STATS_QUERY_TEMPLATE = ResourceHelper.GetResourceAsString("/resources/incidencerate/sql/strata_stats.sql"); + private static final String NAME = "irAnalysis"; + private static final String NO_INCIDENCE_RATE_ANALYSIS_MESSAGE = "There is no incidence rate analysis with id = %d."; + private static final EntityGraph ANALYSIS_WITH_EXECUTION_INFO = NamedEntityGraph.loading("IncidenceRateAnalysis.withExecutionInfoList"); + + private final IRAnalysisQueryBuilder queryBuilder; + + @Value("${security.defaultGlobalReadPermissions}") + private boolean defaultGlobalReadPermissions; + + @Autowired + private IncidenceRateAnalysisRepository irAnalysisRepository; + + @Autowired + private IRExecutionInfoRepository irExecutionInfoRepository; + + @Autowired + private UserRepository userRepository; + + @Autowired + private JobService jobService; + + @Autowired + private SourceService sourceService; + + @Autowired + private GenerationUtils generationUtils; + + @Autowired + ConversionService conversionService; + + @Autowired + private ObjectMapper objectMapper; + + @Autowired + private CohortDefinitionRepository cohortDefinitionRepository; + + @Autowired + private DesignImportService designImportService; + + @Autowired + private IRChecker checker; + + @Autowired + private AuthorizationService authorizationService; + + @Autowired + private VersionService versionService; + + public IRAnalysisService(final ObjectMapper objectMapper) { + + this.queryBuilder = new IRAnalysisQueryBuilder(objectMapper); + } + + private ExecutionInfo findExecutionInfoBySourceId(Collection infoList, Integer sourceId) { + for (ExecutionInfo info : infoList) { + if (sourceId.equals(info.getId().getSourceId())) { + return info; + } + } + return null; + } + + public static class StratifyReportItem { + public long bits; + public long totalPersons; + public long timeAtRisk; + public long cases; + } + + public static class GenerateSqlRequest { + public GenerateSqlRequest() { + } + + @JsonProperty("analysisId") + public Integer analysisId; + + @JsonProperty("expression") + public IncidenceRateAnalysisExpression expression; + + @JsonProperty("options") + public IRAnalysisQueryBuilder.BuildExpressionQueryOptions options; + + } + + private final RowMapper summaryMapper = (rs, rowNum) -> { + AnalysisReport.Summary summary = new AnalysisReport.Summary(); + summary.targetId = rs.getInt("target_id"); + summary.outcomeId = rs.getInt("outcome_id"); + summary.totalPersons = rs.getLong("person_count"); + summary.timeAtRisk = rs.getLong("time_at_risk"); + summary.cases = rs.getLong("cases"); + return summary; + }; + + private List getAnalysisSummaryList(int id, Source source) { + String tqName = "tableQualifier"; + String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Results); + String sql = "select target_id, outcome_id, sum(person_count) as person_count, sum(time_at_risk) as time_at_risk," + + " sum(cases) as cases from @tableQualifier.ir_analysis_result where analysis_id = @id GROUP BY target_id, outcome_id"; + PreparedStatementRenderer psr = new PreparedStatementRenderer(source, sql, tqName, tqValue, "id", whitelist(id)); + return getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), summaryMapper); + } + + private final RowMapper strataRuleStatisticMapper = (rs, rowNum) -> { + AnalysisReport.StrataStatistic statistic = new AnalysisReport.StrataStatistic(); + + statistic.id = rs.getInt("strata_sequence"); + statistic.name = rs.getString("name"); + statistic.targetId = rs.getInt("target_id"); + statistic.outcomeId = rs.getInt("outcome_id"); + + statistic.totalPersons = rs.getLong("person_count"); + statistic.timeAtRisk = rs.getLong("time_at_risk"); + statistic.cases = rs.getLong("cases"); + return statistic; + }; + + private List getStrataStatistics(int id, Source source) { + String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results); + PreparedStatementRenderer psr = new PreparedStatementRenderer(source, STRATA_STATS_QUERY_TEMPLATE, "results_database_schema", resultsTableQualifier, "analysis_id", whitelist(id)); + return getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), strataRuleStatisticMapper); + } + + private int countSetBits(long n) { + int count = 0; + while (n > 0) { + n &= (n - 1); + count++; + } + return count; + } + + private String formatBitMask(Long n, int size) { + return StringUtils.reverse(StringUtils.leftPad(Long.toBinaryString(n), size, "0")); + } + + private final RowMapper stratifyResultsMapper = (rs, rowNum) -> { + StratifyReportItem resultItem = new StratifyReportItem(); + resultItem.bits = rs.getLong("strata_mask"); + resultItem.totalPersons = rs.getLong("person_count"); + resultItem.timeAtRisk = rs.getLong("time_at_risk"); + resultItem.cases = rs.getLong("cases"); + return resultItem; + }; + + private String getStrataTreemapData(int analysisId, int targetId, int outcomeId, int inclusionRuleCount, Source source) { + String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results); + + String query = "select strata_mask, person_count, time_at_risk, cases from @resultsTableQualifier.ir_analysis_result where analysis_id = @analysis_id and target_id = @target_id and outcome_id = @outcome_id"; + Object[] paramValues = {analysisId, targetId, outcomeId}; + String[] params = {"analysis_id", "target_id", "outcome_id"}; + PreparedStatementRenderer psr = new PreparedStatementRenderer(source, query, "resultsTableQualifier", resultsTableQualifier, params, paramValues, SessionUtils.sessionId()); + // [0] is the inclusion rule bitmask, [1] is the count of the match + List items = getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), stratifyResultsMapper); + + Map> groups = new HashMap<>(); + for (StratifyReportItem item : items) { + int bitsSet = countSetBits(item.bits); + if (!groups.containsKey(bitsSet)) { + groups.put(bitsSet, new ArrayList<>()); + } + groups.get(bitsSet).add(item); + } + + StringBuilder treemapData = new StringBuilder("{\"name\" : \"Everyone\", \"children\" : ["); + + List groupKeys = new ArrayList<>(groups.keySet()); + Collections.sort(groupKeys); + Collections.reverse(groupKeys); + + int groupCount = 0; + // create a nested treemap data where more matches (more bits set in string) appear higher in the hierarchy) + for (Integer groupKey : groupKeys) { + if (groupCount > 0) { + treemapData.append(","); + } + + treemapData.append(String.format("{\"name\" : \"Group %d\", \"children\" : [", groupKey)); + + int groupItemCount = 0; + for (StratifyReportItem groupItem : groups.get(groupKey)) { + if (groupItemCount > 0) { + treemapData.append(","); + } + + //sb_treemap.Append("{\"name\": \"" + cohort_identifer + "\", \"size\": " + cohorts[cohort_identifer].ToString() + "}"); + treemapData.append(String.format("{\"name\": \"%s\", \"size\": %d, \"cases\": %d, \"timeAtRisk\": %d }", formatBitMask(groupItem.bits, inclusionRuleCount), groupItem.totalPersons, groupItem.cases, groupItem.timeAtRisk)); + groupItemCount++; + } + groupCount++; + } + + treemapData.append(StringUtils.repeat("]}", groupCount + 1)); + + return treemapData.toString(); + } + + @Override + public List getIRAnalysisList() { + return getTransactionTemplate().execute(transactionStatus -> { + Iterable analysisList = this.irAnalysisRepository.findAll(); + return StreamSupport.stream(analysisList.spliterator(), false) + //.filter(!defaultGlobalReadPermissions ? entity -> authorizationService.hasReadAccess(entity) : entity -> true) + .map(analysis -> { + IRAnalysisShortDTO dto = conversionService.convert(analysis, IRAnalysisShortDTO.class); + // authorizationService.fillWriteAccess(analysis, dto); + // authorizationService.fillReadAccess(analysis, dto); + return dto; + }) + .collect(Collectors.toList()); + }); + } + + @Override + @Transactional + public int getCountIRWithSameName(final int id, String name) { + return irAnalysisRepository.getCountIRWithSameName(id, name); + } + + @Override + @Transactional + @PreAuthorize("isPermitted('create:incidence')") + public IRAnalysisDTO createAnalysis(IRAnalysisDTO analysis) { + Date currentTime = Calendar.getInstance().getTime(); + + UserEntity user = userRepository.findByLogin(authorizationService.getCurrentUser().login()).orElseThrow(); + // it might be possible to leverage saveAnalysis() but not sure how to pull the auto ID from + // the DB to pass it into saveAnalysis (since saveAnalysis does a findOne() at the start). + // If there's a way to get the Entity into the persistence manager so findOne() returns this newly created entity + // then we could create the entity here (without persist) and then call saveAnalysis within the same Tx. + IncidenceRateAnalysis newAnalysis = new IncidenceRateAnalysis(); + newAnalysis.setName(StringUtils.trim(analysis.getName())) + .setDescription(analysis.getDescription()); + newAnalysis.setCreatedBy(user); + newAnalysis.setCreatedDate(currentTime); + if (analysis.getExpression() != null) { + IncidenceRateAnalysisDetails details = new IncidenceRateAnalysisDetails(newAnalysis); + newAnalysis.setDetails(details); + details.setExpression(analysis.getExpression()); + } + else { + newAnalysis.setDetails(null); + } + IncidenceRateAnalysis createdAnalysis = this.irAnalysisRepository.save(newAnalysis); + return conversionService.convert(createdAnalysis, IRAnalysisDTO.class); + } + + @PreAuthorize("isOwner(#id, INCIDENCE_RATE) or isPermitted('read:incidence') or isPermitted('write:incidence') or hasEntityAccess(#id, INCIDENCE_RATE, READ)") + @Override + @Transactional + public IRAnalysisDTO getAnalysis(final int id) { + return getTransactionTemplate().execute(transactionStatus -> { + IncidenceRateAnalysis a = this.irAnalysisRepository.findById(id).orElseThrow(); + ExceptionUtils.throwNotFoundExceptionIfNull(a, String.format(NO_INCIDENCE_RATE_ANALYSIS_MESSAGE, id)); + return conversionService.convert(a, IRAnalysisDTO.class); + }); + } + + @PreAuthorize("isPermitted('create:incidence')") + @Override + public IRAnalysisDTO doImport(final IRAnalysisDTO dto) { + dto.setTags(null); + if (dto.getExpression() != null) { + try { + IncidenceRateAnalysisExportExpression expression = objectMapper.readValue( + dto.getExpression(), IncidenceRateAnalysisExportExpression.class); + // Create lists of ids from list of cohort definitions because we do not store + // cohort definitions in expression now + fillCohortIds(expression.targetIds, expression.targetCohorts); + fillCohortIds(expression.outcomeIds, expression.outcomeCohorts); + String strExpression = objectMapper.writeValueAsString(new IncidenceRateAnalysisExpression(expression)); + dto.setExpression(strExpression); + } catch (Exception e) { + log.error("Error converting expression to object", e); + throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR); + } + } + dto.setName(NameUtils.getNameWithSuffix(dto.getName(), this::getNamesLike)); + return createAnalysis(dto); + } + + + @PreAuthorize("isOwner(#id, INCIDENCE_RATE) or isPermitted('read:incidence') or isPermitted('write:incidence') or hasEntityAccess(#id, INCIDENCE_RATE, READ)") + @Override + @Transactional + public IRAnalysisDTO export(final Integer id) { + IncidenceRateAnalysis analysis = this.irAnalysisRepository.findById(id).orElseThrow(); + ExceptionUtils.throwNotFoundExceptionIfNull(analysis, String.format(NO_INCIDENCE_RATE_ANALYSIS_MESSAGE, id)); + + try { + IncidenceRateAnalysisExportExpression expression = objectMapper.readValue( + analysis.getDetails().getExpression(), IncidenceRateAnalysisExportExpression.class); + + // Cohorts are not stored in expression now - create lists of cohorts from + // lists of their ids + fillCohorts(expression.outcomeIds, expression.outcomeCohorts); + fillCohorts(expression.targetIds, expression.targetCohorts); + expression.outcomeCohorts.forEach(ExportUtil::clearCreateAndUpdateInfo); + expression.targetCohorts.forEach(ExportUtil::clearCreateAndUpdateInfo); + + String strExpression = objectMapper.writeValueAsString(expression); + analysis.getDetails().setExpression(strExpression); + } catch (Exception e) { + log.error("Error converting expression to object", e); + throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR); + } + IRAnalysisDTO irAnalysisDTO = conversionService.convert(analysis, IRAnalysisDTO.class); + ExportUtil.clearCreateAndUpdateInfo(irAnalysisDTO); + + return irAnalysisDTO; + } + + @PreAuthorize("isOwner(#id, INCIDENCE_RATE) or isPermitted('write:incidence') or hasEntityAccess(#id, INCIDENCE_RATE, WRITE)") + @Override + @Transactional + public IRAnalysisDTO saveAnalysis(final int id, IRAnalysisDTO analysis) { + Date currentTime = Calendar.getInstance().getTime(); + + saveVersion(id); + + UserEntity user = userRepository.findByLogin(authorizationService.getCurrentUser().login()).orElseThrow(); + IncidenceRateAnalysis updatedAnalysis = this.irAnalysisRepository.findById(id).orElseThrow(); + updatedAnalysis.setName(StringUtils.trim(analysis.getName())) + .setDescription(analysis.getDescription()); + updatedAnalysis.setModifiedBy(user); + updatedAnalysis.setModifiedDate(currentTime); + + if (analysis.getExpression() != null) { + + IncidenceRateAnalysisDetails details = updatedAnalysis.getDetails(); + if (details == null) { + details = new IncidenceRateAnalysisDetails(updatedAnalysis); + updatedAnalysis.setDetails(details); + } + details.setExpression(analysis.getExpression()); + } + else + updatedAnalysis.setDetails(null); + + this.irAnalysisRepository.save(updatedAnalysis); + return getAnalysis(id); + } + + @PreAuthorize("(isOwner(#analysisId, INCIDENCE_RATE) or isPermitted(anyOf('read:incidence','write:incidence')) or hasEntityAccess(#analysisId, INCIDENCE_RATE, READ)) and (isPermitted('write:source') or hasSourceAccess(#sourceKey, WRITE))") + @Override + public JobExecutionResource performAnalysis(final int analysisId, final String sourceKey) { + IRAnalysisDTO irAnalysisDTO = getAnalysis(analysisId); + CheckResult checkResult = runDiagnostics(irAnalysisDTO); + if (checkResult.hasCriticalErrors()) { + throw new RuntimeException("Cannot be generated due to critical errors in design. Call 'check' service for further details"); + } + + Date startTime = Calendar.getInstance().getTime(); + + Source source = this.getSourceRepository().findBySourceKey(sourceKey); + + ExceptionUtils.throwNotFoundExceptionIfNull(source, String.format("There is no source with sourceKey = %s", sourceKey)); + + DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition(); + requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); + + TransactionStatus initStatus = this.getTransactionTemplate().getTransactionManager().getTransaction(requresNewTx); + + IncidenceRateAnalysis analysis = this.irAnalysisRepository.findOneWithExecutionsOnExistingSources(analysisId, ANALYSIS_WITH_EXECUTION_INFO); + + ExecutionInfo analysisInfo = findExecutionInfoBySourceId(analysis.getExecutionInfoList(), source.getSourceId()); + if (analysisInfo != null) { + if (analysisInfo.getStatus() != GenerationStatus.COMPLETE) + return null; // Exit execution, another process has started it. + } + else { + analysisInfo = new ExecutionInfo(analysis, source); + analysis.getExecutionInfoList().add(analysisInfo); + } + + analysisInfo.setStatus(GenerationStatus.PENDING) + .setStartTime(startTime) + .setExecutionDuration(null); + + this.irAnalysisRepository.save(analysis); + + this.getTransactionTemplate().getTransactionManager().commit(initStatus); + + JobParametersBuilder builder = new JobParametersBuilder(); + builder.addString(JOB_NAME, String.format("IR Analysis: %d: %s (%s)", analysis.getId(), source.getSourceName(), source.getSourceKey())); + builder.addString(ANALYSIS_ID, String.valueOf(analysisId)); + builder.addString(SOURCE_ID, String.valueOf(source.getSourceId())); + + SimpleJobBuilder generateIrJob = generationUtils.buildJobForCohortBasedAnalysisTasklet( + GENERATE_IR_ANALYSIS, + source, + builder, + getSourceJdbcTemplate(source), + chunkContext -> { + Integer irId = Integer.valueOf(chunkContext.getStepContext().getJobParameters().get(ANALYSIS_ID).toString()); + IncidenceRateAnalysis ir = this.irAnalysisRepository.findById(irId).orElseThrow(); + IncidenceRateAnalysisExpression expression = Utils.deserialize(ir.getDetails().getExpression(), IncidenceRateAnalysisExpression.class); + return Stream.concat( + expression.targetIds.stream(), + expression.outcomeIds.stream() + ).map(id -> cohortDefinitionRepository.findOneWithDetail(id)) + .collect(Collectors.toList()); + }, + new IRAnalysisTasklet(getSourceJdbcTemplate(source), getTransactionTemplate(), irAnalysisRepository, sourceService, queryBuilder, objectMapper) + ); + + generateIrJob.listener(new IRAnalysisInfoListener(getTransactionTemplate(), irAnalysisRepository)); + + final JobParameters jobParameters = builder.toJobParameters(); + + return jobService.runJob(generateIrJob.build(), jobParameters); + } + + @PreAuthorize("(isOwner(#analysisId, INCIDENCE_RATE) or isPermitted(anyOf('read:incidence','write:incidence')) or hasEntityAccess(#analysisId, INCIDENCE_RATE, READ)) and (isPermitted('write:source') or hasSourceAccess(#sourceKey, WRITE))") + @Override + public void cancelAnalysis(int analysisId, String sourceKey) { + + Source source = getSourceRepository().findBySourceKey(sourceKey); + jobService.cancelJobExecution(j -> { + JobParameters jobParameters = j.getJobParameters(); + String jobName = j.getJobInstance().getJobName(); + return Objects.equals(jobParameters.getString(ANALYSIS_ID), String.valueOf(analysisId)) + && Objects.equals(jobParameters.getString(SOURCE_ID), String.valueOf(source.getSourceId())) + && Objects.equals(NAME, jobName); + }); + } + + @Override + @PreAuthorize("isOwner(#id, INCIDENCE_RATE) or isPermitted('read:incidence') or isPermitted('write:incidence') or hasEntityAccess(#id, INCIDENCE_RATE, READ)") + @Transactional(readOnly = true) + public List getAnalysisInfo(final int id) { + + List executionInfoList = irExecutionInfoRepository.findByAnalysisId(id); + return executionInfoList.stream().map(ei -> { + AnalysisInfoDTO info = new AnalysisInfoDTO(); + info.setExecutionInfo(ei); + return info; + }).collect(Collectors.toList()); + } + + @Override + @PreAuthorize("(isOwner(#id, INCIDENCE_RATE) or isPermitted('read:incidence') or isPermitted('write:incidence') or hasEntityAccess(#id, INCIDENCE_RATE, READ)) and (isPermitted(anyOf('read:source','write:source')) or hasSourceAccess(#sourceKey, READ))") + @Transactional(readOnly = true) + public AnalysisInfoDTO getAnalysisInfo(int id, String sourceKey) { + + Source source = sourceService.findBySourceKey(sourceKey); + ExceptionUtils.throwNotFoundExceptionIfNull(source, String.format("There is no source with sourceKey = %s", sourceKey)); + AnalysisInfoDTO info = new AnalysisInfoDTO(); + List executionInfoList = irExecutionInfoRepository.findByAnalysisId(id); + info.setExecutionInfo(executionInfoList.stream().filter(i -> Objects.equals(i.getSource(), source)) + .findFirst().orElse(null)); + try{ + if (Objects.nonNull(info.getExecutionInfo()) && Objects.equals(info.getExecutionInfo().getStatus(), GenerationStatus.COMPLETE) + && info.getExecutionInfo().getIsValid()) { + info.setSummaryList(getAnalysisSummaryList(id, source)); + } + }catch (Exception e) { + log.error("Error getting IR Analysis summary list", e); + throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, e.getMessage(), e); + } + return info; + } + + @Override + @PreAuthorize("(isOwner(#id, INCIDENCE_RATE) or isPermitted('read:incidence') or isPermitted('write:incidence') or hasEntityAccess(#id, INCIDENCE_RATE, READ)) and (isPermitted(anyOf('read:source','write:source')) or hasSourceAccess(#sourceKey, READ))") + @Transactional + public AnalysisReport getAnalysisReport(final int id, final String sourceKey, final int targetId, final int outcomeId ) { + + Source source = this.getSourceRepository().findBySourceKey(sourceKey); + + AnalysisReport.Summary summary = IterableUtils.find(getAnalysisSummaryList(id, source), summary12 -> ((summary12.targetId == targetId) && (summary12.outcomeId == outcomeId))); + + Collection strataStats = CollectionUtils.select(getStrataStatistics(id, source), + summary1 -> ((summary1.targetId == targetId) && (summary1.outcomeId == outcomeId))); + String treemapData = getStrataTreemapData(id, targetId, outcomeId, strataStats.size(), source); + + AnalysisReport report = new AnalysisReport(); + report.summary = summary; + report.stratifyStats = new ArrayList<>(strataStats); + report.treemapData = treemapData; + + return report; + } + + @Override + public GenerateSqlResult generateSql(GenerateSqlRequest request) { + IRAnalysisQueryBuilder.BuildExpressionQueryOptions options = request.options; + GenerateSqlResult result = new GenerateSqlResult(); + if (options == null) { + options = new IRAnalysisQueryBuilder.BuildExpressionQueryOptions(); + } + String expressionSql = queryBuilder.buildAnalysisQuery(request.expression, request.analysisId, options); + result.templateSql = SqlRender.renderSql(expressionSql, null, null); + + return result; + } + + @Override + public CheckResult runDiagnostics(IRAnalysisDTO irAnalysisDTO){ + + return new CheckResult(checker.check(irAnalysisDTO)); + } + + @Override + @Transactional + @PreAuthorize("(isOwner(#id, INCIDENCE_RATE) or isPermitted(anyOf('read:incidence','write:incidence')) or hasEntityAccess(#id, INCIDENCE_RATE, READ)) and isPermitted('create:incidence')") + public IRAnalysisDTO copy(final int id) { + IRAnalysisDTO analysis = getAnalysis(id); + analysis.setTags(null); + analysis.setId(null); // clear the ID + analysis.setName(getNameForCopy(analysis.getName())); + return createAnalysis(analysis); + } + + + @Override + @Transactional + @PreAuthorize("isOwner(#id, INCIDENCE_RATE) or isPermitted('read:incidence') or isPermitted('write:incidence') or hasEntityAccess(#id, INCIDENCE_RATE, READ)") + public ResponseEntity export(final int id) { + + Map fileList = new HashMap<>(); + Map distTypeLookup = ImmutableMap.of(1, "TAR", 2, "TTO"); + + try { + IncidenceRateAnalysis analysis = this.irAnalysisRepository.findById(id).orElseThrow(); + Set executions = analysis.getExecutionInfoList(); + + fileList.put("analysisDefinition.json", analysis.getDetails().getExpression()); + + // squentially return reults of IR calculation. In Spring 1.4.2, we can utlilize @Async operations to do this in parallel. + // store results in single CSV file + ArrayList summaryLines = new ArrayList<>(); + ArrayList strataLines = new ArrayList<>(); + ArrayList distLines = new ArrayList<>(); + + executions = executions.stream().filter(e -> this.isSourceAvailable(e.getSource())).collect(Collectors.toSet()); + for (ExecutionInfo execution : executions) + { + Source source = execution.getSource(); + String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results); + + // get the summary data + List summaryList = getAnalysisSummaryList(id, source); + if (summaryLines.isEmpty()) + { + summaryLines.add("db_id#targetId#outcomeId#total#timeAtRisk#cases".split("#")); + } + for (AnalysisReport.Summary summary : summaryList) + { + summaryLines.add(new String[] {source.getSourceKey(),String.valueOf(summary.targetId), String.valueOf(summary.outcomeId), String.valueOf(summary.totalPersons), String.valueOf(summary.timeAtRisk), String.valueOf(summary.cases)}); + } + + // get the strata results + List strataList = getStrataStatistics(id, source); + if (strataLines.isEmpty()) + { + strataLines.add("db_id#targetId#outcomeId#strata_id#strata_name#total#timeAtRisk#cases".split("#")); + } + for (AnalysisReport.StrataStatistic strata : strataList) + { + strataLines.add(new String[] {source.getSourceKey(),String.valueOf(strata.targetId), String.valueOf(strata.outcomeId),String.valueOf(strata.id), String.valueOf(strata.name), String.valueOf(strata.totalPersons), String.valueOf(strata.timeAtRisk), String.valueOf(strata.cases)}); + } + + // get the distribution data + String distQuery = String.format("select '%s' as db_id, target_id, outcome_id, strata_sequence, dist_type, total, avg_value, std_dev, min_value, p10_value, p25_value, median_value, p75_value, p90_value, max_value from %s.ir_analysis_dist where analysis_id = %d", source.getSourceKey(), resultsTableQualifier, id); + String translatedSql = SqlTranslate.translateSql(distQuery, source.getSourceDialect(), SessionUtils.sessionId(), resultsTableQualifier); + + this.getSourceJdbcTemplate(source).query(translatedSql, resultSet -> { + if (distLines.isEmpty()) { + ArrayList columnNames = new ArrayList<>(); + for(int i = 1; i <= resultSet.getMetaData().getColumnCount(); i++) { + columnNames.add(resultSet.getMetaData().getColumnName(i)); + } + distLines.add(columnNames.toArray(new String[0])); + } + ArrayList columnValues = new ArrayList<>(); + for(int i = 1; i <= resultSet.getMetaData().getColumnCount(); i++) { + switch (resultSet.getMetaData().getColumnName(i)) { + case "dist_type": + columnValues.add(distTypeLookup.get(resultSet.getInt(i))); + break; + default: + columnValues.add(resultSet.getString(i)); + break; + } + } + distLines.add(columnValues.toArray(new String[0])); + }); + } + + // Write report lines to CSV + StringWriter sw = null; + CSVWriter csvWriter = null; + + sw = new StringWriter(); + csvWriter = new CSVWriter(sw); + csvWriter.writeAll(summaryLines); + csvWriter.flush(); + fileList.put("ir_summary.csv", sw.getBuffer().toString()); + + sw = new StringWriter(); + csvWriter = new CSVWriter(sw); + csvWriter.writeAll(strataLines); + csvWriter.flush(); + fileList.put("ir_strata.csv", sw.getBuffer().toString()); + + sw = new StringWriter(); + csvWriter = new CSVWriter(sw); + csvWriter.writeAll(distLines); + csvWriter.flush(); + fileList.put("ir_dist.csv", sw.getBuffer().toString()); + + // build zip output + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + ZipOutputStream zos = new ZipOutputStream(baos); + + for(String fileName : fileList.keySet()) + { + ZipEntry resultsEntry = new ZipEntry(fileName); + zos.putNextEntry(resultsEntry); + zos.write(fileList.get(fileName).getBytes()); + } + + zos.closeEntry(); + zos.close(); + baos.flush(); + baos.close(); + + byte[] payload = baos.toByteArray(); + HttpHeaders headers = new HttpHeaders(); + headers.add(HttpHeaders.CONTENT_DISPOSITION, String.format("attachment; filename=\"%s\"", "ir_analysis_" + id + ".zip")); + return ResponseEntity.ok().headers(headers).contentType(MediaType.APPLICATION_OCTET_STREAM).body(payload); + } catch (Exception ex) { + throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, ex.getMessage(), ex); + } + + } + + @Override + @Transactional + @PreAuthorize("isOwner(#id, INCIDENCE_RATE) or isPermitted('write:incidence') or hasEntityAccess(#id, INCIDENCE_RATE, WRITE)") + public void delete(final int id) { + irAnalysisRepository.deleteById(id); + } + + @Override + @Transactional + @PreAuthorize("(isOwner(#id, INCIDENCE_RATE) or isPermitted(anyOf('read:incidence','write:incidence')) or hasEntityAccess(#id, INCIDENCE_RATE, READ)) and (isPermitted('write:source') or hasSourceAccess(#sourceKey, WRITE))") + public void deleteInfo(final int id, final String sourceKey) { + IncidenceRateAnalysis analysis = irAnalysisRepository.findById(id).orElseThrow(); + ExecutionInfo itemToRemove = null; + for (ExecutionInfo info : analysis.getExecutionInfoList()) + { + if (info.getSource().getSourceKey().equals(sourceKey)) + itemToRemove = info; + } + + if (itemToRemove != null) + analysis.getExecutionInfoList().remove(itemToRemove); + + irAnalysisRepository.save(analysis); + } + + @Override + @Transactional + @PreAuthorize("isOwner(#id, INCIDENCE_RATE) or isPermitted('admin:tags') or isPermitted('write:incidence') or hasEntityAccess(#id, INCIDENCE_RATE, WRITE)") + public void assignTag(final Integer id, final int tagId) { + IncidenceRateAnalysis entity = irAnalysisRepository.findById(id).orElseThrow(); + assignTag(entity, tagId); + } + + @Override + @Transactional + @PreAuthorize("isOwner(#id, INCIDENCE_RATE) or isPermitted('admin:tags') or isPermitted('write:incidence') or hasEntityAccess(#id, INCIDENCE_RATE, WRITE)") + public void unassignTag(final Integer id, final int tagId) { + IncidenceRateAnalysis entity = irAnalysisRepository.findById(id).orElseThrow(); + unassignTag(entity, tagId); + } + + @Override + @Transactional + @PreAuthorize("isOwner(#id, INCIDENCE_RATE) or isPermitted('admin:tags') or isPermitted('write:incidence') or hasEntityAccess(#id, INCIDENCE_RATE, WRITE)") + public void assignPermissionProtectedTag(final int id, final int tagId) { + assignTag(id, tagId); + } + + @Override + @Transactional + @PreAuthorize("isOwner(#id, INCIDENCE_RATE) or isPermitted('admin:tags') or isPermitted('write:incidence') or hasEntityAccess(#id, INCIDENCE_RATE, WRITE)") + public void unassignPermissionProtectedTag(final int id, final int tagId) { + unassignTag(id, tagId); + } + + @Override + @PreAuthorize("isOwner(#id, INCIDENCE_RATE) or isPermitted('read:incidence') or isPermitted('write:incidence') or hasEntityAccess(#id, INCIDENCE_RATE, READ)") + public List getVersions(long id) { + List versions = versionService.getVersions(VersionType.INCIDENCE_RATE, id); + return versions.stream() + .map(v -> conversionService.convert(v, VersionDTO.class)) + .collect(Collectors.toList()); + } + + @Override + @PreAuthorize("isOwner(#id, INCIDENCE_RATE) or isPermitted('read:incidence') or isPermitted('write:incidence') or hasEntityAccess(#id, INCIDENCE_RATE, READ)") + @Transactional + public IRVersionFullDTO getVersion(int id, int version) { + checkVersion(id, version, false); + IRVersion irVersion = versionService.getById(VersionType.INCIDENCE_RATE, id, version); + return conversionService.convert(irVersion, IRVersionFullDTO.class); + } + + @Override + @PreAuthorize("isOwner(#id, INCIDENCE_RATE) or isPermitted('write:incidence') or hasEntityAccess(#id, INCIDENCE_RATE, WRITE)") + @Transactional + public VersionDTO updateVersion(int id, int version, VersionUpdateDTO updateDTO) { + checkVersion(id, version); + updateDTO.setAssetId(id); + updateDTO.setVersion(version); + IRVersion updated = versionService.update(VersionType.INCIDENCE_RATE, updateDTO); + + return conversionService.convert(updated, VersionDTO.class); + } + + @Override + @PreAuthorize("isOwner(#id, INCIDENCE_RATE) or isPermitted('write:incidence') or hasEntityAccess(#id, INCIDENCE_RATE, WRITE)") + @Transactional + public void deleteVersion(int id, int version) { + checkVersion(id, version); + versionService.delete(VersionType.INCIDENCE_RATE, id, version); + } + + @Override + @PreAuthorize("(isOwner(#id, INCIDENCE_RATE) or isPermitted(anyOf('read:incidence','write:incidence')) or hasEntityAccess(#id, INCIDENCE_RATE, READ)) and isPermitted('create:incidence')") + @Transactional + public IRAnalysisDTO copyAssetFromVersion(int id, int version) { + checkVersion(id, version, false); + IRVersion irVersion = versionService.getById(VersionType.INCIDENCE_RATE, id, version); + IRVersionFullDTO fullDTO = conversionService.convert(irVersion, IRVersionFullDTO.class); + + IRAnalysisDTO dto = fullDTO.getEntityDTO(); + dto.setId(null); + dto.setTags(null); + dto.setName(NameUtils.getNameForCopy(dto.getName(), this::getNamesLike, + irAnalysisRepository.findByName(dto.getName()))); + return createAnalysis(dto); + } + + @Override + @Transactional + public List listByTags(TagNameListRequestDTO requestDTO) { + if (requestDTO == null || requestDTO.getNames() == null || requestDTO.getNames().isEmpty()) { + return Collections.emptyList(); + } + List names = requestDTO.getNames().stream() + .map(name -> name.toLowerCase(Locale.ROOT)) + .collect(Collectors.toList()); + List entities = irAnalysisRepository.findByTags(names); + return listByTags(entities, names, IRAnalysisDTO.class); + } + + @PostConstruct + public void init() { + + invalidateIRExecutions(); + } + + @Override + public String getJobName() { + return NAME; + } + + @Override + public String getExecutionFoldingKey() { + return ANALYSIS_ID; + } + + private void invalidateIRExecutions() { + + getTransactionTemplateRequiresNew().execute(status -> { + + List executions = irExecutionInfoRepository.findByStatusIn(INVALIDATE_STATUSES); + invalidateExecutions(executions); + irExecutionInfoRepository.saveAll(executions); + return null; + }); + } + + private String getNameForCopy(String dtoName) { + return NameUtils.getNameForCopy(dtoName, this::getNamesLike, irAnalysisRepository.findByName(dtoName)); + } + + private List getNamesLike(String name) { + return irAnalysisRepository.findAllByNameStartsWith(name).stream().map(IncidenceRateAnalysis::getName).collect(Collectors.toList()); + } + + private void fillCohorts(List outcomeIds, List cohortDefinitions) { + cohortDefinitions.clear(); + for (Integer cohortId : outcomeIds) { + CohortDefinitionEntity cohortDefinition = cohortDefinitionRepository.findById(cohortId).orElseThrow(); + if (Objects.isNull(cohortDefinition)) { + // Pass cohort without name to client if no cohort definition found + cohortDefinition = new CohortDefinitionEntity(); + cohortDefinition.setId(cohortId); + CohortDefinitionDetailsEntity details = new CohortDefinitionDetailsEntity(); + details.setCohortDefinition(cohortDefinition); + } + cohortDefinitions.add(conversionService.convert(cohortDefinition, CohortDTO.class)); + } + } + + private void fillCohortIds(List ids, List cohortDTOS) { + ids.clear(); + for(CohortDTO cohortDTO: cohortDTOS) { + CohortDefinitionEntity definition = conversionService.convert(cohortDTO, CohortDefinitionEntity.class); + definition = designImportService.persistCohortOrGetExisting(definition); + ids.add(definition.getId()); + } + cohortDTOS.clear(); + } + + + private boolean isSourceAvailable(Source source) { + boolean sourceAvailable = true; + try { + sourceService.checkConnection(source); + } catch (Exception e) { + log.error("cannot get connection to source with key {}", source.getSourceKey(), e); + sourceAvailable = false; + } + return sourceAvailable; + } + + private void checkVersion(int id, int version) { + checkVersion(id, version, true); + } + + private void checkVersion(int id, int version, boolean checkOwnerShip) { + Version irVersion = versionService.getById(VersionType.INCIDENCE_RATE, id, version); + ExceptionUtils.throwNotFoundExceptionIfNull(irVersion, + String.format("There is no incidence rates analysis version with id = %d.", version)); + + IncidenceRateAnalysis entity = this.irAnalysisRepository.findById(id).orElseThrow(); + if (checkOwnerShip) { + // TODO: Do we need check ownership under the new permission strategy + // authorizationService.isOwner(id, EntityType.INCIDENCE_RATE); + } + } + + private IRVersion saveVersion(int id) { + IncidenceRateAnalysis def = this.irAnalysisRepository.findById(id).orElseThrow(); + IRVersion version = conversionService.convert(def, IRVersion.class); + + UserEntity user = Objects.nonNull(def.getModifiedBy()) ? def.getModifiedBy() : def.getCreatedBy(); + Date versionDate = Objects.nonNull(def.getModifiedDate()) ? def.getModifiedDate() : def.getCreatedDate(); + version.setCreatedBy(user); + version.setCreatedDate(versionDate); + return versionService.create(VersionType.INCIDENCE_RATE, version); + } +} diff --git a/src/main/java/org/ohdsi/webapi/ircalc/IRAnalysisTasklet.java b/src/main/java/org/ohdsi/webapi/ircalc/IRAnalysisTasklet.java new file mode 100644 index 0000000000..628e040695 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/IRAnalysisTasklet.java @@ -0,0 +1,109 @@ +/* + * Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org]. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ohdsi.webapi.ircalc; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.ohdsi.sql.SqlSplit; +import org.ohdsi.sql.SqlTranslate; +import org.ohdsi.webapi.common.generation.CancelableTasklet; +import org.ohdsi.webapi.source.SourceService; +import org.ohdsi.webapi.source.Source; +import org.ohdsi.webapi.util.CancelableJdbcTemplate; +import org.ohdsi.webapi.util.PreparedStatementRenderer; +import org.ohdsi.webapi.util.SourceUtils; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.scope.context.ChunkContext; +import org.springframework.transaction.support.TransactionTemplate; + +import java.util.*; + +import static org.ohdsi.webapi.Constants.Params.*; + +/** + * + * @author Chris Knoll + */ +public class IRAnalysisTasklet extends CancelableTasklet { + + private final IRAnalysisQueryBuilder analysisQueryBuilder; + + private final IncidenceRateAnalysisRepository incidenceRateAnalysisRepository; + private final SourceService sourceService; + private final ObjectMapper objectMapper; + + public IRAnalysisTasklet( + final CancelableJdbcTemplate jdbcTemplate, + final TransactionTemplate transactionTemplate, + final IncidenceRateAnalysisRepository incidenceRateAnalysisRepository, + final SourceService sourceService, + final IRAnalysisQueryBuilder analysisQueryBuilder, + final ObjectMapper objectMapper) { + + super(LoggerFactory.getLogger(IRAnalysisTasklet.class), jdbcTemplate, transactionTemplate); + this.incidenceRateAnalysisRepository = incidenceRateAnalysisRepository; + this.sourceService = sourceService; + this.analysisQueryBuilder = analysisQueryBuilder; + this.objectMapper = objectMapper; + } + + protected String[] prepareQueries(ChunkContext chunkContext, CancelableJdbcTemplate jdbcTemplate) { + + Map jobParams = chunkContext.getStepContext().getJobParameters(); + + Integer sourceId = Integer.parseInt(jobParams.get(SOURCE_ID).toString()); + Source source = sourceService.findBySourceId(sourceId); + String oracleTempSchema = SourceUtils.getTempQualifier(source); + + Integer analysisId = Integer.valueOf(jobParams.get(ANALYSIS_ID).toString()); + String sessionId = jobParams.get(SESSION_ID).toString(); + try { + IncidenceRateAnalysis analysis = this.incidenceRateAnalysisRepository.findById(analysisId).orElseThrow(); + IncidenceRateAnalysisExpression expression = objectMapper.readValue(analysis.getDetails().getExpression(), IncidenceRateAnalysisExpression.class); + + IRAnalysisQueryBuilder.BuildExpressionQueryOptions options = new IRAnalysisQueryBuilder.BuildExpressionQueryOptions(); + options.cdmSchema = SourceUtils.getCdmQualifier(source); + options.resultsSchema = SourceUtils.getResultsQualifier(source); + options.vocabularySchema = SourceUtils.getVocabularyQualifier(source); + options.tempSchema = SourceUtils.getTempQualifier(source); + options.cohortTable = jobParams.get(TARGET_TABLE).toString(); + + String delete = "DELETE FROM @tableQualifier.ir_strata WHERE analysis_id = @analysis_id;"; + PreparedStatementRenderer psr = new PreparedStatementRenderer(source, delete, "tableQualifier", + options.resultsSchema, "analysis_id", analysisId); + jdbcTemplate.update(psr.getSql(), psr.getSetter()); + + String insert = "INSERT INTO @results_schema.ir_strata (analysis_id, strata_sequence, name, description) VALUES (@analysis_id,@strata_sequence,@name,@description)"; + + String [] params = {"analysis_id", "strata_sequence", "name", "description"}; + List strataRules = expression.strata; + for (int i = 0; i< strataRules.size(); i++) + { + StratifyRule r = strataRules.get(i); + psr = new PreparedStatementRenderer(source, insert, "results_schema", + options.resultsSchema, params, new Object[] { analysisId, i, r.name, r.description}); + jdbcTemplate.update(psr.getSql(), psr.getSetter()); + } + + String expressionSql = analysisQueryBuilder.buildAnalysisQuery(analysis, options); + + String translatedSql = SqlTranslate.translateSql(expressionSql, source.getSourceDialect(), sessionId, oracleTempSchema); + return SqlSplit.splitSql(translatedSql); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + +} \ No newline at end of file diff --git a/src/main/java/org/ohdsi/webapi/ircalc/IRExecutionInfoRepository.java b/src/main/java/org/ohdsi/webapi/ircalc/IRExecutionInfoRepository.java new file mode 100644 index 0000000000..4cfc038d08 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/IRExecutionInfoRepository.java @@ -0,0 +1,15 @@ +package org.ohdsi.webapi.ircalc; + +import org.ohdsi.webapi.GenerationStatus; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.CrudRepository; +import org.springframework.data.repository.query.Param; + +import java.util.List; + +public interface IRExecutionInfoRepository extends CrudRepository { + List findByStatus(GenerationStatus status); + List findByStatusIn(List statuses); + @Query("SELECT ei FROM IRAnalysisGenerationInfo ei JOIN Source s ON s.id = ei.source.id AND s.deletedDate IS NULL WHERE ei.analysis.id = :analysisId") + List findByAnalysisId(@Param("analysisId") Integer analysisId); +} diff --git a/src/main/java/org/ohdsi/webapi/ircalc/IncidenceRateAnalysis.java b/src/main/java/org/ohdsi/webapi/ircalc/IncidenceRateAnalysis.java new file mode 100644 index 0000000000..1b19b5f362 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/IncidenceRateAnalysis.java @@ -0,0 +1,133 @@ +/* + * Copyright 2016 Observational Health Data Sciences and Informatics [OHDSI.org]. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ohdsi.webapi.ircalc; + +import java.io.Serializable; +import java.util.HashSet; +import java.util.Set; +import jakarta.persistence.*; + +import org.hibernate.annotations.GenericGenerator; +import org.hibernate.annotations.Parameter; +import org.ohdsi.webapi.model.CommonEntity; +import org.ohdsi.webapi.model.CommonEntityExt; +import org.ohdsi.webapi.tag.domain.Tag; + +/** + * + * @author Chris Knoll + */ + +@Entity(name = "IncidenceRateAnalysis") +@Table(name="ir_analysis") +@NamedEntityGraphs({ + @NamedEntityGraph( + name = "IncidenceRateAnalysis.withExecutionInfoList", + attributeNodes = @NamedAttributeNode("executionInfoList") + ) +}) +public class IncidenceRateAnalysis extends CommonEntityExt implements Serializable { + private static final long serialVersionUID = 1L; + + @Id + @GenericGenerator( + name = "ir_analysis_generator", + strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator", + parameters = { + @Parameter(name = "sequence_name", value = "ir_analysis_sequence"), + @Parameter(name = "increment_size", value = "1") + } + ) + @GeneratedValue(generator = "ir_analysis_generator") + @Column(name="id") + @Access(AccessType.PROPERTY) + private Integer id; + + @Column(name="name") + private String name; + + @Column(name="description") + private String description; + + @OneToOne(cascade = CascadeType.ALL, fetch = FetchType.LAZY, optional=true, orphanRemoval = true, mappedBy="analysis") + @JoinColumn(name="id") + private IncidenceRateAnalysisDetails details; + + @OneToMany(fetch= FetchType.LAZY, cascade = CascadeType.ALL, mappedBy = "analysis", orphanRemoval=true) + private Set executionInfoList = new HashSet<>(); + + @ManyToMany(targetEntity = Tag.class, fetch = FetchType.LAZY) + @JoinTable(name = "ir_tag", + joinColumns = @JoinColumn(name = "asset_id", referencedColumnName = "id"), + inverseJoinColumns = @JoinColumn(name = "tag_id", referencedColumnName = "id")) + private Set tags; + + @Override + public Integer getId() { + return id; + } + + public IncidenceRateAnalysis setId(Integer id) { + this.id = id; + return this; + } + + public String getName() { + return name; + } + + public IncidenceRateAnalysis setName(String name) { + this.name = name; + return this; + } + + public String getDescription() { + return description; + } + + public IncidenceRateAnalysis setDescription(String description) { + this.description = description; + return this; + } + + public IncidenceRateAnalysisDetails getDetails() { + return details; + } + + public IncidenceRateAnalysis setDetails(IncidenceRateAnalysisDetails details) { + this.details = details; + return this; + } + + public Set getExecutionInfoList() { + return executionInfoList; + } + + public IncidenceRateAnalysis setExecutionInfoList(Set executionInfoList) { + this.executionInfoList = executionInfoList; + return this; + } + + @Override + public Set getTags() { + return tags; + } + + @Override + public void setTags(Set tags) { + this.tags = tags; + } +} diff --git a/src/main/java/org/ohdsi/webapi/ircalc/IncidenceRateAnalysisDetails.java b/src/main/java/org/ohdsi/webapi/ircalc/IncidenceRateAnalysisDetails.java new file mode 100644 index 0000000000..538bd817c0 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/IncidenceRateAnalysisDetails.java @@ -0,0 +1,67 @@ +/* + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ohdsi.webapi.ircalc; + +import java.io.Serializable; +import jakarta.persistence.Entity; +import jakarta.persistence.Id; +import jakarta.persistence.JoinColumn; +import jakarta.persistence.Lob; +import jakarta.persistence.MapsId; +import jakarta.persistence.OneToOne; +import jakarta.persistence.Table; +import org.hibernate.annotations.JdbcTypeCode; +import org.hibernate.type.SqlTypes; + +/** + * + * Stores the LOB/CLOB portion of the cohort definition expression. + */ +@Entity(name = "IncidenceRateAnalysisDetails") +@Table(name="ir_analysis_details") +public class IncidenceRateAnalysisDetails implements Serializable { + + private static final long serialVersionUID = 1L; + + @Id + private Integer id; + + @MapsId + @OneToOne + @JoinColumn(name="id") + private IncidenceRateAnalysis analysis; + + @Lob + @JdbcTypeCode(SqlTypes.VARCHAR) + private String expression; + + protected IncidenceRateAnalysisDetails() {} + + public IncidenceRateAnalysisDetails(IncidenceRateAnalysis analysis) { + this.analysis = analysis; + } + + public String getExpression() { + return expression; + } + public IncidenceRateAnalysisDetails setExpression(String expression) { + this.expression = expression; + return this; + } + + public IncidenceRateAnalysis getAnalysis() { + return this.analysis; + } +} diff --git a/src/main/java/org/ohdsi/webapi/ircalc/IncidenceRateAnalysisExportExpression.java b/src/main/java/org/ohdsi/webapi/ircalc/IncidenceRateAnalysisExportExpression.java new file mode 100644 index 0000000000..1c65d7078a --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/IncidenceRateAnalysisExportExpression.java @@ -0,0 +1,16 @@ +package org.ohdsi.webapi.ircalc; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.ohdsi.webapi.cohortdefinition.dto.CohortDTO; + +import java.util.ArrayList; +import java.util.List; + +public class IncidenceRateAnalysisExportExpression extends IncidenceRateAnalysisExpression { + + @JsonProperty("targetCohorts") + public List targetCohorts = new ArrayList<>(); + + @JsonProperty("outcomeCohorts") + public List outcomeCohorts = new ArrayList<>(); +} diff --git a/src/main/java/org/ohdsi/webapi/ircalc/IncidenceRateAnalysisExpression.java b/src/main/java/org/ohdsi/webapi/ircalc/IncidenceRateAnalysisExpression.java new file mode 100644 index 0000000000..12ac2e7b26 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/IncidenceRateAnalysisExpression.java @@ -0,0 +1,63 @@ +/* + * Copyright 2016 Observational Health Data Sciences and Informatics [OHDSI.org]. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ohdsi.webapi.ircalc; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.ohdsi.circe.cohortdefinition.ConceptSet; +import org.ohdsi.webapi.cohortdefinition.dto.CohortDTO; + +import java.util.ArrayList; +import java.util.List; + +/** + * + * @author Chris Knoll + */ +public class IncidenceRateAnalysisExpression { + + @JsonProperty("ConceptSets") + public ConceptSet[] conceptSets = new ConceptSet[0]; + + @JsonProperty("targetIds") + public List targetIds = new ArrayList<>(); + + @JsonProperty("outcomeIds") + public List outcomeIds = new ArrayList<>(); + + @JsonProperty("timeAtRisk") + public TimeAtRisk timeAtRisk; + + @JsonProperty("studyWindow") + public DateRange studyWindow; + + @JsonProperty("strata") + public List strata = new ArrayList<>(); + + public IncidenceRateAnalysisExpression() { + + } + + public IncidenceRateAnalysisExpression(T source) { + + this.conceptSets = source.conceptSets; + this.targetIds = source.targetIds; + this.outcomeIds = source.outcomeIds; + this.timeAtRisk = source.timeAtRisk; + this.studyWindow = source.studyWindow; + this.strata = source.strata; + } + +} diff --git a/src/main/java/org/ohdsi/webapi/ircalc/IncidenceRateAnalysisRepository.java b/src/main/java/org/ohdsi/webapi/ircalc/IncidenceRateAnalysisRepository.java new file mode 100644 index 0000000000..9b2f89c947 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/IncidenceRateAnalysisRepository.java @@ -0,0 +1,48 @@ +/* + * Copyright 2016 Observational Health Data Sciences and Informatics [OHDSI.org]. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ohdsi.webapi.ircalc; + +import com.cosium.spring.data.jpa.entity.graph.domain2.EntityGraph; +import com.cosium.spring.data.jpa.entity.graph.repository.EntityGraphCrudRepository; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; + +import java.util.List; +import java.util.Optional; + +/** + * + * @author Chris Knoll + */ +public interface IncidenceRateAnalysisRepository extends EntityGraphCrudRepository { + + @Query("SELECT ira FROM IncidenceRateAnalysis AS ira LEFT JOIN FETCH ira.details AS d") + Iterable findAll(); + + @Query("SELECT ira FROM IncidenceRateAnalysis AS ira LEFT JOIN ira.executionInfoList e LEFT JOIN Source s ON s.id = e.source.id AND s.deletedDate = NULL WHERE ira.id = ?1") + IncidenceRateAnalysis findOneWithExecutionsOnExistingSources(int id, EntityGraph entityGraph); + + @Query("SELECT COUNT(ira) FROM IncidenceRateAnalysis ira WHERE ira.name = :name and ira.id <> :id") + int getCountIRWithSameName(@Param("id") Integer id, @Param("name") String name); + + @Query("SELECT ira FROM IncidenceRateAnalysis ira WHERE ira.name LIKE ?1 ESCAPE '\\'") + List findAllByNameStartsWith(String pattern); + + Optional findByName(String name); + + @Query("SELECT DISTINCT ira FROM IncidenceRateAnalysis ira JOIN FETCH ira.tags t WHERE lower(t.name) in :tagNames") + List findByTags(@Param("tagNames") List tagNames); +} diff --git a/src/main/java/org/ohdsi/webapi/ircalc/StratifyRule.java b/src/main/java/org/ohdsi/webapi/ircalc/StratifyRule.java new file mode 100644 index 0000000000..5d91832d1a --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/StratifyRule.java @@ -0,0 +1,28 @@ +/* + * Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org]. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ohdsi.webapi.ircalc; + +import org.ohdsi.circe.cohortdefinition.CriteriaGroup; + +/** + * + * @author Chris Knoll + */ +public class StratifyRule { + public String name; + public String description; + public CriteriaGroup expression; +} diff --git a/src/main/java/org/ohdsi/webapi/ircalc/TimeAtRisk.java b/src/main/java/org/ohdsi/webapi/ircalc/TimeAtRisk.java new file mode 100644 index 0000000000..a8f12f52a7 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/TimeAtRisk.java @@ -0,0 +1,27 @@ +/* + * Copyright 2016 Observational Health Data Sciences and Informatics [OHDSI.org]. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.ohdsi.webapi.ircalc; + +/** + * + * @author Chris Knoll + */ +public class TimeAtRisk { + + public FieldOffset start = new FieldOffset(); + public FieldOffset end = new FieldOffset(); + +} diff --git a/src/main/java/org/ohdsi/webapi/ircalc/converter/IRAnalysisToIRVersionConverter.java b/src/main/java/org/ohdsi/webapi/ircalc/converter/IRAnalysisToIRVersionConverter.java new file mode 100644 index 0000000000..5a502ce72f --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/converter/IRAnalysisToIRVersionConverter.java @@ -0,0 +1,21 @@ +package org.ohdsi.webapi.ircalc.converter; + +import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter; +import org.ohdsi.webapi.ircalc.IncidenceRateAnalysis; +import org.ohdsi.webapi.versioning.domain.CohortVersion; +import org.ohdsi.webapi.versioning.domain.IRVersion; +import org.springframework.stereotype.Component; + +@Component +public class IRAnalysisToIRVersionConverter + extends BaseConversionServiceAwareConverter { + @Override + public IRVersion convert(IncidenceRateAnalysis source) { + IRVersion target = new IRVersion(); + target.setAssetId(source.getId()); + target.setDescription(source.getDescription()); + target.setAssetJson(source.getDetails().getExpression()); + + return target; + } +} diff --git a/src/main/java/org/ohdsi/webapi/ircalc/converter/IRVersionToIRAnalysisVersionFullDTOConverter.java b/src/main/java/org/ohdsi/webapi/ircalc/converter/IRVersionToIRAnalysisVersionFullDTOConverter.java new file mode 100644 index 0000000000..9f07073210 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/converter/IRVersionToIRAnalysisVersionFullDTOConverter.java @@ -0,0 +1,77 @@ +package org.ohdsi.webapi.ircalc.converter; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.ohdsi.webapi.converter.BaseConversionServiceAwareConverter; +import org.ohdsi.webapi.exception.ConversionAtlasException; +import org.ohdsi.webapi.ircalc.IncidenceRateAnalysis; +import org.ohdsi.webapi.ircalc.IncidenceRateAnalysisDetails; +import org.ohdsi.webapi.ircalc.IncidenceRateAnalysisExportExpression; +import org.ohdsi.webapi.ircalc.IncidenceRateAnalysisRepository; +import org.ohdsi.webapi.ircalc.dto.IRAnalysisDTO; +import org.ohdsi.webapi.ircalc.dto.IRVersionFullDTO; +import org.ohdsi.webapi.cohortdefinition.CohortDefinitionService; +import org.ohdsi.webapi.util.ExceptionUtils; +import org.ohdsi.webapi.versioning.domain.IRVersion; +import org.ohdsi.webapi.versioning.dto.VersionDTO; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class IRVersionToIRAnalysisVersionFullDTOConverter + extends BaseConversionServiceAwareConverter { + private static final Logger log = LoggerFactory.getLogger(IRVersionToIRAnalysisVersionFullDTOConverter.class); + + @Autowired + private IncidenceRateAnalysisRepository analysisRepository; + + @Autowired + private ObjectMapper objectMapper; + + @Autowired + private CohortDefinitionService cohortService; + + @Override + public IRVersionFullDTO convert(IRVersion source) { + IncidenceRateAnalysis def = this.analysisRepository.findById(source.getAssetId().intValue()).orElse(null); + ExceptionUtils.throwNotFoundExceptionIfNull(def, + String.format("There is no incidence rate analysis with id = %d.", source.getAssetId())); + + IncidenceRateAnalysis entity = new IncidenceRateAnalysis(); + entity.setId(def.getId()); + entity.setTags(def.getTags()); + entity.setName(def.getName()); + entity.setDescription(source.getDescription()); + entity.setCreatedBy(def.getCreatedBy()); + entity.setCreatedDate(def.getCreatedDate()); + entity.setModifiedBy(def.getModifiedBy()); + entity.setModifiedDate(def.getModifiedDate()); + entity.setExecutionInfoList(def.getExecutionInfoList()); + + IncidenceRateAnalysisDetails details = new IncidenceRateAnalysisDetails(entity); + try { + IncidenceRateAnalysisExportExpression expression = objectMapper.readValue( + source.getAssetJson(), IncidenceRateAnalysisExportExpression.class); + expression.outcomeCohorts = cohortService.getCohortDTOs(expression.outcomeIds); + expression.targetCohorts = cohortService.getCohortDTOs(expression.targetIds); + if (expression.outcomeCohorts.size() != expression.outcomeIds.size() || + expression.targetCohorts.size() != expression.targetIds.size()) { + throw new ConversionAtlasException("Could not load version because it contains deleted cohorts"); + } + } catch (JsonProcessingException e) { + log.error("Error converting expression to object", e); + throw new RuntimeException(e); + } + details.setExpression(source.getAssetJson()); + + entity.setDetails(details); + + IRVersionFullDTO target = new IRVersionFullDTO(); + target.setVersionDTO(conversionService.convert(source, VersionDTO.class)); + target.setEntityDTO(conversionService.convert(entity, IRAnalysisDTO.class)); + + return target; + } +} diff --git a/src/main/java/org/ohdsi/webapi/ircalc/converter/IncidenceRateAnalysisToIRAnalysisDTOConverter.java b/src/main/java/org/ohdsi/webapi/ircalc/converter/IncidenceRateAnalysisToIRAnalysisDTOConverter.java new file mode 100644 index 0000000000..e97cd0ab40 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/converter/IncidenceRateAnalysisToIRAnalysisDTOConverter.java @@ -0,0 +1,19 @@ +package org.ohdsi.webapi.ircalc.converter; + +import org.ohdsi.webapi.ircalc.IncidenceRateAnalysis; +import org.ohdsi.webapi.ircalc.dto.IRAnalysisDTO; +import org.springframework.stereotype.Component; + +@Component +public class IncidenceRateAnalysisToIRAnalysisDTOConverter extends IncidenceRateAnalysisToIRAnalysisShortDTOConverter { + @Override + protected IRAnalysisDTO createResultObject() { + return new IRAnalysisDTO(); + } + + @Override + protected void doConvert(IncidenceRateAnalysis source, IRAnalysisDTO target) { + super.doConvert(source, target); + target.setExpression(source.getDetails() != null ? source.getDetails().getExpression() : null); + } +} diff --git a/src/main/java/org/ohdsi/webapi/ircalc/converter/IncidenceRateAnalysisToIRAnalysisShortDTOConverter.java b/src/main/java/org/ohdsi/webapi/ircalc/converter/IncidenceRateAnalysisToIRAnalysisShortDTOConverter.java new file mode 100644 index 0000000000..060480ce6f --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/converter/IncidenceRateAnalysisToIRAnalysisShortDTOConverter.java @@ -0,0 +1,22 @@ +package org.ohdsi.webapi.ircalc.converter; + +import org.apache.commons.lang3.StringUtils; +import org.ohdsi.webapi.ircalc.IncidenceRateAnalysis; +import org.ohdsi.webapi.ircalc.dto.IRAnalysisShortDTO; +import org.ohdsi.webapi.service.converters.BaseCommonEntityExtToDTOExtConverter; +import org.springframework.stereotype.Component; + +@Component +public class IncidenceRateAnalysisToIRAnalysisShortDTOConverter extends BaseCommonEntityExtToDTOExtConverter { + @Override + protected T createResultObject() { + return (T) new IRAnalysisShortDTO(); + } + + @Override + protected void doConvert(IncidenceRateAnalysis source, T target) { + target.setId(source.getId()); + target.setName(StringUtils.trim(source.getName())); + target.setDescription(source.getDescription()); + } +} diff --git a/src/main/java/org/ohdsi/webapi/service/dto/IRAnalysisDTO.java b/src/main/java/org/ohdsi/webapi/ircalc/dto/IRAnalysisDTO.java similarity index 87% rename from src/main/java/org/ohdsi/webapi/service/dto/IRAnalysisDTO.java rename to src/main/java/org/ohdsi/webapi/ircalc/dto/IRAnalysisDTO.java index 0866bdd2ca..d6134cd9d3 100644 --- a/src/main/java/org/ohdsi/webapi/service/dto/IRAnalysisDTO.java +++ b/src/main/java/org/ohdsi/webapi/ircalc/dto/IRAnalysisDTO.java @@ -1,4 +1,4 @@ -package org.ohdsi.webapi.service.dto; +package org.ohdsi.webapi.ircalc.dto; public class IRAnalysisDTO extends IRAnalysisShortDTO { diff --git a/src/main/java/org/ohdsi/webapi/service/dto/IRAnalysisShortDTO.java b/src/main/java/org/ohdsi/webapi/ircalc/dto/IRAnalysisShortDTO.java similarity index 85% rename from src/main/java/org/ohdsi/webapi/service/dto/IRAnalysisShortDTO.java rename to src/main/java/org/ohdsi/webapi/ircalc/dto/IRAnalysisShortDTO.java index 00883f39f1..96d0da00db 100644 --- a/src/main/java/org/ohdsi/webapi/service/dto/IRAnalysisShortDTO.java +++ b/src/main/java/org/ohdsi/webapi/ircalc/dto/IRAnalysisShortDTO.java @@ -1,6 +1,6 @@ -package org.ohdsi.webapi.service.dto; +package org.ohdsi.webapi.ircalc.dto; -import org.ohdsi.webapi.model.CommonEntityExt; +import org.ohdsi.webapi.service.dto.CommonEntityExtDTO; public class IRAnalysisShortDTO extends CommonEntityExtDTO { diff --git a/src/main/java/org/ohdsi/webapi/ircalc/dto/IRVersionFullDTO.java b/src/main/java/org/ohdsi/webapi/ircalc/dto/IRVersionFullDTO.java new file mode 100644 index 0000000000..77e42c11cc --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/ircalc/dto/IRVersionFullDTO.java @@ -0,0 +1,6 @@ +package org.ohdsi.webapi.ircalc.dto; + +import org.ohdsi.webapi.versioning.dto.VersionFullDTO; + +public class IRVersionFullDTO extends VersionFullDTO { +} diff --git a/src/main/java/org/ohdsi/webapi/mvc/GlobalExceptionHandler.java b/src/main/java/org/ohdsi/webapi/mvc/GlobalExceptionHandler.java index a24c7cbb7b..492b1b4fc9 100644 --- a/src/main/java/org/ohdsi/webapi/mvc/GlobalExceptionHandler.java +++ b/src/main/java/org/ohdsi/webapi/mvc/GlobalExceptionHandler.java @@ -23,8 +23,6 @@ import org.springframework.web.server.ResponseStatusException; -import java.io.PrintWriter; -import java.io.StringWriter; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.UndeclaredThrowableException; import java.util.Objects; @@ -207,11 +205,9 @@ private Throwable getThrowable(UndeclaredThrowableException ex) { } /** - * Log exception with full stack trace + * Log exception with stack trace (formatted by Logback) */ private void logException(Throwable ex) { - StringWriter errorStackTrace = new StringWriter(); - ex.printStackTrace(new PrintWriter(errorStackTrace)); - LOGGER.error(errorStackTrace.toString()); + LOGGER.error(ex.toString(), ex); } } diff --git a/src/main/java/org/ohdsi/webapi/security/authz/AuthorizationService.java b/src/main/java/org/ohdsi/webapi/security/authz/AuthorizationService.java index 4ef423ed46..7bd1a00d55 100644 --- a/src/main/java/org/ohdsi/webapi/security/authz/AuthorizationService.java +++ b/src/main/java/org/ohdsi/webapi/security/authz/AuthorizationService.java @@ -15,8 +15,7 @@ import org.ohdsi.webapi.security.authz.access.EntityGrant; import org.ohdsi.webapi.security.authz.access.EntityType; import org.ohdsi.webapi.security.authz.access.UserAuthorizations; - -import jakarta.transaction.Transactional; +import org.springframework.transaction.annotation.Transactional; /** * The AuthorizatonService is part of security.authz which orchastrates the permission assignments for users, roles, and permisisons @@ -294,6 +293,18 @@ public boolean isOwner(Long entityId, EntityType entityType) { EntityGrant grant = authz.conceptSetAccess.get(entityId); yield grant != null && grant.isOwner(); } + case COHORT_CHARACTERIZATION -> { + EntityGrant grant = authz.cohortCharacterizationAccess.get(entityId); + yield grant != null && grant.isOwner(); + } + case FE_ANALYSIS -> { + EntityGrant grant = authz.feAnalysisAccess.get(entityId); + yield grant != null && grant.isOwner(); + } + case INCIDENCE_RATE -> { + EntityGrant grant = authz.incidenceRateAccess.get(entityId); + yield grant != null && grant.isOwner(); + } case SOURCE -> false; }; } @@ -326,6 +337,18 @@ public boolean hasEntityAccess(Long entityId, EntityType entityType, AccessType EntityGrant grant = authz.conceptSetAccess.get(entityId); yield grant != null && grant.hasAccess(accessType); } + case COHORT_CHARACTERIZATION -> { + EntityGrant grant = authz.cohortCharacterizationAccess.get(entityId); + yield grant != null && grant.hasAccess(accessType); + } + case FE_ANALYSIS -> { + EntityGrant grant = authz.feAnalysisAccess.get(entityId); + yield grant != null && grant.hasAccess(accessType); + } + case INCIDENCE_RATE -> { + EntityGrant grant = authz.incidenceRateAccess.get(entityId); + yield grant != null && grant.hasAccess(accessType); + } // infrastructure types that don't have ownership (ie: sources, tools, etc) case SOURCE -> { Set granted = authz.sourceAccess.get(entityId); diff --git a/src/main/java/org/ohdsi/webapi/security/authz/UserEntity.java b/src/main/java/org/ohdsi/webapi/security/authz/UserEntity.java index d83df31a08..23d4c17581 100644 --- a/src/main/java/org/ohdsi/webapi/security/authz/UserEntity.java +++ b/src/main/java/org/ohdsi/webapi/security/authz/UserEntity.java @@ -18,17 +18,28 @@ public class UserEntity implements Serializable{ private static final long serialVersionUID = -2697485161468660016L; + @Id + @Column(name = "ID") + @SequenceGenerator(name = "sec_user_seq", sequenceName = "sec_user_sequence", allocationSize = 1) + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "sec_user_seq") private Long id; + + @Column(name = "LOGIN") private String login; + + @Column(name = "NAME") private String name; + + @Column(name = "origin", nullable = false) + @Enumerated(EnumType.STRING) private UserOrigin origin = UserOrigin.SYSTEM; + + @OneToMany(mappedBy = "user", fetch = FetchType.LAZY, cascade = CascadeType.REMOVE) private Set userRoles = new LinkedHashSet<>(); + + @Column(name = "last_viewed_notifications_time") private Date lastViewedNotificationsTime; - @Id - @Column(name = "ID") - @SequenceGenerator(name = "sec_user_seq", sequenceName = "sec_user_sequence", allocationSize = 1) - @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "sec_user_seq") public Long getId() { return id; } @@ -37,7 +48,6 @@ public void setId(Long id) { this.id = id; } - @Column(name = "LOGIN") public String getLogin() { return login; } @@ -46,7 +56,6 @@ public void setLogin(String login) { this.login = login; } - @Column(name = "NAME") public String getName() { return name; } @@ -55,7 +64,6 @@ public void setName(String name) { this.name = name; } - @OneToMany(mappedBy = "user", fetch = FetchType.LAZY, cascade = CascadeType.REMOVE) public Set getUserRoles() { return userRoles; } @@ -64,7 +72,6 @@ public void setUserRoles(Set userRoles) { this.userRoles = userRoles; } - @Column(name = "last_viewed_notifications_time") public Date getLastViewedNotificationsTime() { return lastViewedNotificationsTime; } @@ -73,8 +80,6 @@ public void setLastViewedNotificationsTime(Date lastViewedNotificationsTime) { this.lastViewedNotificationsTime = lastViewedNotificationsTime; } - @Column(name = "origin", nullable = false) - @Enumerated(EnumType.STRING) public UserOrigin getOrigin() { return origin; } diff --git a/src/main/java/org/ohdsi/webapi/security/authz/UserRoleEntity.java b/src/main/java/org/ohdsi/webapi/security/authz/UserRoleEntity.java index 98f8ae20f8..1c78f30cb8 100644 --- a/src/main/java/org/ohdsi/webapi/security/authz/UserRoleEntity.java +++ b/src/main/java/org/ohdsi/webapi/security/authz/UserRoleEntity.java @@ -24,15 +24,24 @@ public class UserRoleEntity implements Serializable { private static final long serialVersionUID = 6257846375334314942L; + @Id + @Column(name = "ID") + @SequenceGenerator(name = "sec_user_role_seq", sequenceName = "sec_user_role_sequence", allocationSize = 1) + @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "sec_user_role_seq") private Long id; + + @ManyToOne + @JoinColumn(name="USER_ID", nullable=false) private UserEntity user; + + @ManyToOne + @JoinColumn(name="ROLE_ID", nullable=false) private RoleEntity role; + + @Column(name = "origin", nullable = false) + @Enumerated(EnumType.STRING) private UserOrigin origin = UserOrigin.SYSTEM; - @Id - @Column(name = "ID") - @SequenceGenerator(name = "sec_user_role_seq", sequenceName = "sec_user_role_sequence", allocationSize = 1) - @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "sec_user_role_seq") public Long getId() { return id; } @@ -41,8 +50,6 @@ public void setId(Long id) { this.id = id; } - @ManyToOne - @JoinColumn(name="USER_ID", nullable=false) public UserEntity getUser() { return user; } @@ -51,8 +58,6 @@ public void setUser(UserEntity user) { this.user = user; } - @ManyToOne - @JoinColumn(name="ROLE_ID", nullable=false) public RoleEntity getRole() { return role; } @@ -61,8 +66,6 @@ public void setRole(RoleEntity role) { this.role = role; } - @Column(name = "origin", nullable = false) - @Enumerated(EnumType.STRING) public UserOrigin getOrigin() { return origin; } diff --git a/src/main/java/org/ohdsi/webapi/security/authz/access/CohortCharacterizationAccessEntity.java b/src/main/java/org/ohdsi/webapi/security/authz/access/CohortCharacterizationAccessEntity.java new file mode 100644 index 0000000000..2fcd6d61d8 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/security/authz/access/CohortCharacterizationAccessEntity.java @@ -0,0 +1,90 @@ +package org.ohdsi.webapi.security.authz.access; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.Id; +import jakarta.persistence.IdClass; +import jakarta.persistence.Table; +import java.io.Serializable; +import java.util.Objects; + +/** + * JPA Entity for sec_cohort_characterization table + */ +@Entity(name="CohortCharacterizationAccess") +@Table(name = "sec_cohort_characterization") +@IdClass(CohortCharacterizationAccessEntity.CohortCharacterizationAccessId.class) +public class CohortCharacterizationAccessEntity { + + @Id + @Column(name = "role_id") + private Long roleId; + + @Id + @Column(name = "cohort_characterization_id") + private Long cohortCharacterizationId; + + @Id + @Column(name = "access_type") + @Enumerated(EnumType.STRING) + private AccessType accessType; + + public Long getRoleId() { + return roleId; + } + + public void setRoleId(Long userId) { + this.roleId = userId; + } + + public Long getCohortCharacterizationId() { + return cohortCharacterizationId; + } + + public void setCohortCharacterizationId(Long cohortCharacterizationId) { + this.cohortCharacterizationId = cohortCharacterizationId; + } + + public AccessType getAccessType() { + return accessType; + } + + public void setAccessType(AccessType accessType) { + this.accessType = accessType; + } + + /** + * Composite key class for CohortCharacterizationAccessEntity + */ + public static class CohortCharacterizationAccessId implements Serializable { + private Long roleId; + private Long cohortCharacterizationId; + private AccessType accessType; + + public CohortCharacterizationAccessId() { + } + + public CohortCharacterizationAccessId(Long roleId, Long cohortCharacterizationId, AccessType accessType) { + this.roleId = roleId; + this.cohortCharacterizationId = cohortCharacterizationId; + this.accessType = accessType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof CohortCharacterizationAccessId)) return false; + CohortCharacterizationAccessId that = (CohortCharacterizationAccessId) o; + return Objects.equals(roleId, that.roleId) && + Objects.equals(cohortCharacterizationId, that.cohortCharacterizationId) && + accessType == that.accessType; + } + + @Override + public int hashCode() { + return Objects.hash(roleId, cohortCharacterizationId, accessType); + } + } +} diff --git a/src/main/java/org/ohdsi/webapi/security/authz/access/CohortCharacterizationAccessRepository.java b/src/main/java/org/ohdsi/webapi/security/authz/access/CohortCharacterizationAccessRepository.java new file mode 100644 index 0000000000..2d84b397cf --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/security/authz/access/CohortCharacterizationAccessRepository.java @@ -0,0 +1,35 @@ +package org.ohdsi.webapi.security.authz.access; + +import java.util.List; + +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; +import org.springframework.stereotype.Repository; + +/** + * Repository for sec_cohort_characterization table + */ +@Repository +public interface CohortCharacterizationAccessRepository extends JpaRepository { + + /** + * Find all cohort characterization access grants for a user via their roles. + * Joins through UserRole to resolve role membership from a userId. + * Returns (entityId, accessType) projections. + */ + @Query(""" + SELECT ca.cohortCharacterizationId as entityId, ca.accessType as accessType + FROM CohortCharacterizationAccess ca + JOIN UserRole ur ON ur.role.id = ca.roleId + WHERE ur.user.id = :userId + """) + List findAccessByUserId(@Param("userId") Long userId); + + /** + * Find all cohort characterization IDs created (owned) by this user. + * Used to merge ownership as implicit WRITE access. + */ + @Query("SELECT cc.id FROM CohortCharacterizationEntity cc WHERE cc.createdBy.id = :userId") + List findOwnedCohortCharacterizationIds(@Param("userId") Long userId); +} diff --git a/src/main/java/org/ohdsi/webapi/security/authz/access/CohortDefinitionAccessRepository.java b/src/main/java/org/ohdsi/webapi/security/authz/access/CohortDefinitionAccessRepository.java index 430e001797..6d03f59fe7 100644 --- a/src/main/java/org/ohdsi/webapi/security/authz/access/CohortDefinitionAccessRepository.java +++ b/src/main/java/org/ohdsi/webapi/security/authz/access/CohortDefinitionAccessRepository.java @@ -13,27 +13,6 @@ @Repository public interface CohortDefinitionAccessRepository extends JpaRepository { - /** - * Check if a user has specific access to a cohort definition - */ - @Query(""" - SELECT CASE WHEN COUNT(ca) > 0 THEN true ELSE false END - FROM CohortDefinitionAccess ca - JOIN UserRole ur ON ur.role.id = ca.roleId - WHERE ur.user.id = :userId - AND ca.cohortDefinitionId = :cohortDefinitionId - AND ca.accessType = :accessType - """) - boolean hasAccess(@Param("userId") Long userId, - @Param("cohortDefinitionId") Long cohortDefinitionId, - @Param("accessType") AccessType accessType); - - /** - * Get the owner (created_by_id) of a cohort definition - */ - @Query("SELECT cd.createdBy.id FROM CohortDefinition cd WHERE cd.id = :cohortDefinitionId") - Long getCreatedById(@Param("cohortDefinitionId") Long cohortDefinitionId); - /** * Find all cohort definition access grants for a user via their roles. * Joins through UserRole to resolve role membership from a userId. diff --git a/src/main/java/org/ohdsi/webapi/security/authz/access/EntityAccessService.java b/src/main/java/org/ohdsi/webapi/security/authz/access/EntityAccessService.java index f0f49d4c1d..69a9b84a9c 100644 --- a/src/main/java/org/ohdsi/webapi/security/authz/access/EntityAccessService.java +++ b/src/main/java/org/ohdsi/webapi/security/authz/access/EntityAccessService.java @@ -27,16 +27,25 @@ public class EntityAccessService { private final CohortDefinitionAccessRepository cohortDefAccessRepo; private final ConceptSetAccessRepository conceptSetAccessRepo; + private final CohortCharacterizationAccessRepository cohortCharAccessRepo; + private final FeAnalysisAccessRepository feAnalysisAccessRepo; private final SourceAccessRepository sourceAccessRepo; + private final IncidenceRateAccessRepository incidenceRateAccessRepo; private final PermissionRepository permissionRepository; public EntityAccessService(CohortDefinitionAccessRepository cohortDefAccessRepo, ConceptSetAccessRepository conceptSetAccessRepo, + CohortCharacterizationAccessRepository cohortCharAccessRepo, + FeAnalysisAccessRepository feAnalysisAccessRepo, SourceAccessRepository sourceAccessRepo, + IncidenceRateAccessRepository incidenceRateAccessRepo, PermissionRepository permissionRepository) { this.cohortDefAccessRepo = cohortDefAccessRepo; this.conceptSetAccessRepo = conceptSetAccessRepo; + this.cohortCharAccessRepo = cohortCharAccessRepo; + this.feAnalysisAccessRepo = feAnalysisAccessRepo; this.sourceAccessRepo = sourceAccessRepo; + this.incidenceRateAccessRepo = incidenceRateAccessRepo; this.permissionRepository = permissionRepository; } @@ -65,14 +74,19 @@ public UserAuthorizations buildUserAuthorizations(Long userId) { // Per-entity access maps built from sec_* tables + ownership authz.cohortDefinitionAccess = buildCohortDefinitionAccess(userId); authz.conceptSetAccess = buildConceptSetAccess(userId); + authz.cohortCharacterizationAccess = buildCohortCharacterizationAccess(userId); + authz.feAnalysisAccess = buildFeAnalysisAccess(userId); + authz.incidenceRateAccess = buildIncidenceRateAccess(userId); authz.sourceAccess = buildSourceAccess(userId); long elapsedMs = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); - log.debug("Built UserAuthorizations for userId={} in {}ms (permissions={}, cohortDefs={}, conceptSets={}, sources={})", + log.debug("Built UserAuthorizations for userId={} in {}ms (permissions={}, cohortDefs={}, conceptSets={}, cohortChars={}, , incidenceAnalysis={}, sources={})", userId, elapsedMs, authz.permissions.size(), authz.cohortDefinitionAccess.size(), authz.conceptSetAccess.size(), + authz.cohortCharacterizationAccess.size(), + authz.incidenceRateAccess.size(), authz.sourceAccess.size()); return authz; @@ -161,6 +175,122 @@ public Map buildConceptSetAccess(Long userId) { return access; } + /** + * Build the cohort characterization access map for a user. + * Queries the sec_cohort_characterization table via roles assigned to the user, + * then merges owned cohort characterizations as implicit WRITE access. + * + * @param userId The user ID + * @return Map of cohortCharacterizationId → EntityGrant + */ + public Map buildCohortCharacterizationAccess(Long userId) { + // Collect role-based grants + Map> roleGrants = new HashMap<>(); + for (EntityAccessProjection p : cohortCharAccessRepo.findAccessByUserId(userId)) { + roleGrants.computeIfAbsent(p.getEntityId(), k -> EnumSet.noneOf(AccessType.class)) + .add(p.getAccessType()); + } + + // Collect owned entity IDs + Set ownedIds = new java.util.HashSet<>(); + for (Long ownedId : cohortCharAccessRepo.findOwnedCohortCharacterizationIds(userId)) { + ownedIds.add(ownedId); + } + + // Merge into EntityGrant map + Map access = new HashMap<>(); + + // Start with role-granted entities + for (Map.Entry> entry : roleGrants.entrySet()) { + Long entityId = entry.getKey(); + access.put(entityId, new EntityGrant(entry.getValue(), ownedIds.contains(entityId))); + } + + // Add owned entities that had no role-based grants + for (Long ownedId : ownedIds) { + access.computeIfAbsent(ownedId, k -> new EntityGrant(EnumSet.noneOf(AccessType.class), true)); + } + + return access; + } + + /** + * Build the feature analysis access map for a user. + * Queries the sec_fe_analysis table via roles assigned to the user, + * then merges owned feature analyses as implicit WRITE access. + * + * @param userId The user ID + * @return Map of feAnalysisId → EntityGrant + */ + public Map buildFeAnalysisAccess(Long userId) { + // Collect role-based grants + Map> roleGrants = new HashMap<>(); + for (EntityAccessProjection p : feAnalysisAccessRepo.findAccessByUserId(userId)) { + roleGrants.computeIfAbsent(p.getEntityId(), k -> EnumSet.noneOf(AccessType.class)) + .add(p.getAccessType()); + } + + // Collect owned entity IDs + Set ownedIds = new java.util.HashSet<>(); + for (Integer ownedId : feAnalysisAccessRepo.findOwnedFeAnalysisIds(userId)) { + ownedIds.add(ownedId.longValue()); + } + + // Merge into EntityGrant map + Map access = new HashMap<>(); + + // Start with role-granted entities + for (Map.Entry> entry : roleGrants.entrySet()) { + Long entityId = entry.getKey(); + access.put(entityId, new EntityGrant(entry.getValue(), ownedIds.contains(entityId))); + } + + // Add owned entities that had no role-based grants + for (Long ownedId : ownedIds) { + access.computeIfAbsent(ownedId, k -> new EntityGrant(EnumSet.noneOf(AccessType.class), true)); + } + + return access; + } + + /** + * Build the incidence rate access map for a user. + * Queries the sec_ir_analysis table via roles assigned to the user, + * then merges owned incidence rate analyses as implicit WRITE access. + * + * @param userId The user ID + * @return Map of irId → EntityGrant + */ + public Map buildIncidenceRateAccess(Long userId) { + Map> roleGrants = new HashMap<>(); + for (EntityAccessProjection p : incidenceRateAccessRepo.findAccessByUserId(userId)) { + roleGrants.computeIfAbsent(p.getEntityId(), k -> EnumSet.noneOf(AccessType.class)) + .add(p.getAccessType()); + } + + // Collect owned entity IDs + Set ownedIds = new java.util.HashSet<>(); + for (Integer ownedId : incidenceRateAccessRepo.findOwnedIncidenceRateIds(userId)) { + ownedIds.add(ownedId.longValue()); + } + + // Merge into EntityGrant map + Map access = new HashMap<>(); + + // Start with role-granted entities + for (Map.Entry> entry : roleGrants.entrySet()) { + Long entityId = entry.getKey(); + access.put(entityId, new EntityGrant(entry.getValue(), ownedIds.contains(entityId))); + } + + // Add owned entities that had no role-based grants + for (Long ownedId : ownedIds) { + access.computeIfAbsent(ownedId, k -> new EntityGrant(EnumSet.noneOf(AccessType.class), true)); + } + + return access; + } + /** * Build the source access map for a user. * Queries the sec_source table via roles assigned to the user. @@ -181,18 +311,4 @@ public Map> buildSourceAccess(Long userId) { return access; } - /** - * Get the owner (created_by_id) of an entity - * - * @param entityId The entity ID - * @param entityType The type of entity - * @return The user ID of the owner, or null if not found - */ - public Long getOwnerId(Long entityId, EntityType entityType) { - return switch (entityType) { - case COHORT_DEFINITION -> cohortDefAccessRepo.getCreatedById(entityId); - case CONCEPT_SET -> conceptSetAccessRepo.getCreatedById(entityId); - case SOURCE -> null; // no one owns a source - }; - } } diff --git a/src/main/java/org/ohdsi/webapi/security/authz/access/EntityType.java b/src/main/java/org/ohdsi/webapi/security/authz/access/EntityType.java index 89d65e7933..3182fe2967 100644 --- a/src/main/java/org/ohdsi/webapi/security/authz/access/EntityType.java +++ b/src/main/java/org/ohdsi/webapi/security/authz/access/EntityType.java @@ -1,29 +1,32 @@ package org.ohdsi.webapi.security.authz.access; +import org.ohdsi.webapi.cohortcharacterization.domain.CohortCharacterizationEntity; import org.ohdsi.webapi.cohortdefinition.CohortDefinitionEntity; import org.ohdsi.webapi.conceptset.ConceptSet; -import org.ohdsi.webapi.model.CommonEntity; +import org.ohdsi.webapi.feanalysis.domain.FeAnalysisEntity; +import org.ohdsi.webapi.ircalc.IncidenceRateAnalysis; import org.ohdsi.webapi.source.Source; public enum EntityType { COHORT_DEFINITION(CohortDefinitionEntity.class), CONCEPT_SET(ConceptSet.class), - SOURCE(Source.class); - // COHORT_CHARACTERIZATION(CohortCharacterizationEntity.class), + SOURCE(Source.class), + COHORT_CHARACTERIZATION(CohortCharacterizationEntity.class), + FE_ANALYSIS(FeAnalysisEntity.class), + INCIDENCE_RATE(IncidenceRateAnalysis.class); // PATHWAY_ANALYSIS(PathwayAnalysisEntity.class), // FE_ANALYSIS(FeAnalysisEntity.class), - // INCIDENCE_RATE(IncidenceRateAnalysis.class), // COHORT_SAMPLE(CohortSample.class), // TAG(Tag.class), // TOOL(Tool.class), // REUSABLE(Reusable.class); - private final Class> entityClass; + private final Class entityClass; - EntityType(Class> entityClass) { + EntityType(Class entityClass) { this.entityClass = entityClass; } - public Class> getEntityClass() { + public Class getEntityClass() { return entityClass; } } diff --git a/src/main/java/org/ohdsi/webapi/security/authz/access/FeAnalysisAccessEntity.java b/src/main/java/org/ohdsi/webapi/security/authz/access/FeAnalysisAccessEntity.java new file mode 100644 index 0000000000..cba913deef --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/security/authz/access/FeAnalysisAccessEntity.java @@ -0,0 +1,90 @@ +package org.ohdsi.webapi.security.authz.access; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.Id; +import jakarta.persistence.IdClass; +import jakarta.persistence.Table; +import java.io.Serializable; +import java.util.Objects; + +/** + * JPA Entity for sec_fe_analysis table + */ +@Entity(name = "FeAnalysisAccess") +@Table(name = "sec_fe_analysis") +@IdClass(FeAnalysisAccessEntity.FeAnalysisAccessId.class) +public class FeAnalysisAccessEntity { + + @Id + @Column(name = "role_id") + private Long roleId; + + @Id + @Column(name = "fe_analysis_id") + private Long feAnalysisId; + + @Id + @Column(name = "access_type") + @Enumerated(EnumType.STRING) + private AccessType accessType; + + public Long getRoleId() { + return roleId; + } + + public void setRoleId(Long userId) { + this.roleId = userId; + } + + public Long getFeAnalysisId() { + return feAnalysisId; + } + + public void setFeAnalysisId(Long feAnalysisId) { + this.feAnalysisId = feAnalysisId; + } + + public AccessType getAccessType() { + return accessType; + } + + public void setAccessType(AccessType accessType) { + this.accessType = accessType; + } + + /** + * Composite key class for FeAnalysisAccessEntity + */ + public static class FeAnalysisAccessId implements Serializable { + private Long roleId; + private Long feAnalysisId; + private AccessType accessType; + + public FeAnalysisAccessId() { + } + + public FeAnalysisAccessId(Long roleId, Long feAnalysisId, AccessType accessType) { + this.roleId = roleId; + this.feAnalysisId = feAnalysisId; + this.accessType = accessType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof FeAnalysisAccessId)) return false; + FeAnalysisAccessId that = (FeAnalysisAccessId) o; + return Objects.equals(roleId, that.roleId) && + Objects.equals(feAnalysisId, that.feAnalysisId) && + accessType == that.accessType; + } + + @Override + public int hashCode() { + return Objects.hash(roleId, feAnalysisId, accessType); + } + } +} diff --git a/src/main/java/org/ohdsi/webapi/security/authz/access/FeAnalysisAccessRepository.java b/src/main/java/org/ohdsi/webapi/security/authz/access/FeAnalysisAccessRepository.java new file mode 100644 index 0000000000..fd995b8c97 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/security/authz/access/FeAnalysisAccessRepository.java @@ -0,0 +1,35 @@ +package org.ohdsi.webapi.security.authz.access; + +import java.util.List; + +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; +import org.springframework.stereotype.Repository; + +/** + * Repository for sec_fe_analysis table + */ +@Repository +public interface FeAnalysisAccessRepository extends JpaRepository { + + /** + * Find all feature-analysis access grants for a user via their roles. + * Joins through UserRole to resolve role membership from a userId. + * Returns (entityId, accessType) projections. + */ + @Query(""" + SELECT fa.feAnalysisId as entityId, fa.accessType as accessType + FROM FeAnalysisAccess fa + JOIN UserRole ur ON ur.role.id = fa.roleId + WHERE ur.user.id = :userId + """) + List findAccessByUserId(@Param("userId") Long userId); + + /** + * Find all feature analysis IDs created (owned) by this user. + * Used to merge ownership as implicit WRITE access. + */ + @Query("SELECT fa.id FROM FeAnalysisEntity fa WHERE fa.createdBy.id = :userId") + List findOwnedFeAnalysisIds(@Param("userId") Long userId); +} diff --git a/src/main/java/org/ohdsi/webapi/security/authz/access/IncidenceRateAccessEntity.java b/src/main/java/org/ohdsi/webapi/security/authz/access/IncidenceRateAccessEntity.java new file mode 100644 index 0000000000..36b4104895 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/security/authz/access/IncidenceRateAccessEntity.java @@ -0,0 +1,87 @@ +package org.ohdsi.webapi.security.authz.access; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.Id; +import jakarta.persistence.IdClass; +import jakarta.persistence.Table; +import java.io.Serializable; +import java.util.Objects; + +/** + * JPA Entity for sec_ir_analysis table + */ +@Entity(name = "IncidenceRateAccess") +@Table(name = "sec_ir_analysis") +@IdClass(IncidenceRateAccessEntity.IncidenceRateAccessId.class) +public class IncidenceRateAccessEntity { + + @Id + @Column(name = "role_id") + private Long roleId; + + @Id + @Column(name = "ir_id") + private Long irId; + + @Id + @Column(name = "access_type") + @Enumerated(EnumType.STRING) + private AccessType accessType; + + public Long getRoleId() { + return roleId; + } + + public void setRoleId(Long userId) { + this.roleId = userId; + } + + public Long getIrId() { + return irId; + } + + public void setIrId(Long irId) { + this.irId = irId; + } + + public AccessType getAccessType() { + return accessType; + } + + public void setAccessType(AccessType accessType) { + this.accessType = accessType; + } + + public static class IncidenceRateAccessId implements Serializable { + private Long roleId; + private Long irId; + private AccessType accessType; + + public IncidenceRateAccessId() { + } + + public IncidenceRateAccessId(Long roleId, Long irId, AccessType accessType) { + this.roleId = roleId; + this.irId = irId; + this.accessType = accessType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof IncidenceRateAccessId)) return false; + IncidenceRateAccessId that = (IncidenceRateAccessId) o; + return Objects.equals(roleId, that.roleId) && + Objects.equals(irId, that.irId) && + accessType == that.accessType; + } + + @Override + public int hashCode() { + return Objects.hash(roleId, irId, accessType); + } + } +} diff --git a/src/main/java/org/ohdsi/webapi/security/authz/access/IncidenceRateAccessRepository.java b/src/main/java/org/ohdsi/webapi/security/authz/access/IncidenceRateAccessRepository.java new file mode 100644 index 0000000000..bab7007666 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/security/authz/access/IncidenceRateAccessRepository.java @@ -0,0 +1,23 @@ +package org.ohdsi.webapi.security.authz.access; + +import java.util.List; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Query; +import org.springframework.data.repository.query.Param; +import org.springframework.stereotype.Repository; + +/** Repository for sec_ir_analysis table */ +@Repository +public interface IncidenceRateAccessRepository extends JpaRepository { + + @Query(""" + SELECT ia.irId as entityId, ia.accessType as accessType + FROM IncidenceRateAccess ia + JOIN UserRole ur ON ur.role.id = ia.roleId + WHERE ur.user.id = :userId + """) + List findAccessByUserId(@Param("userId") Long userId); + + @Query("SELECT ira.id FROM IncidenceRateAnalysis ira WHERE ira.createdBy.id = :userId") + List findOwnedIncidenceRateIds(@Param("userId") Long userId); +} diff --git a/src/main/java/org/ohdsi/webapi/security/authz/access/UserAuthorizations.java b/src/main/java/org/ohdsi/webapi/security/authz/access/UserAuthorizations.java index 7e04ff2cc5..de52c7be9e 100644 --- a/src/main/java/org/ohdsi/webapi/security/authz/access/UserAuthorizations.java +++ b/src/main/java/org/ohdsi/webapi/security/authz/access/UserAuthorizations.java @@ -9,6 +9,9 @@ public class UserAuthorizations { /** Authored entities: access grants + ownership status */ public Map cohortDefinitionAccess = Map.of(); public Map conceptSetAccess = Map.of(); + public Map cohortCharacterizationAccess = Map.of(); + public Map feAnalysisAccess = Map.of(); + public Map incidenceRateAccess = Map.of(); /** Infrastructure entities: access grants only (no ownership semantics) */ public Map> sourceAccess = Map.of(); diff --git a/src/main/java/org/ohdsi/webapi/security/session/SessionService.java b/src/main/java/org/ohdsi/webapi/security/session/SessionService.java index 5583ef2dbc..e7eba888dd 100644 --- a/src/main/java/org/ohdsi/webapi/security/session/SessionService.java +++ b/src/main/java/org/ohdsi/webapi/security/session/SessionService.java @@ -6,8 +6,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; - -import jakarta.transaction.Transactional; +import org.springframework.transaction.annotation.Transactional; @Service @Transactional diff --git a/src/main/java/org/ohdsi/webapi/security/spring/WebApiSecurityExpressionRoot.java b/src/main/java/org/ohdsi/webapi/security/spring/WebApiSecurityExpressionRoot.java index f2a0b3a94d..7304b51685 100644 --- a/src/main/java/org/ohdsi/webapi/security/spring/WebApiSecurityExpressionRoot.java +++ b/src/main/java/org/ohdsi/webapi/security/spring/WebApiSecurityExpressionRoot.java @@ -35,6 +35,9 @@ public class WebApiSecurityExpressionRoot public final EntityType COHORT_DEFINITION = EntityType.COHORT_DEFINITION; public final EntityType CONCEPT_SET = EntityType.CONCEPT_SET; public final EntityType SOURCE = EntityType.SOURCE; + public final EntityType COHORT_CHARACTERIZATION = EntityType.COHORT_CHARACTERIZATION; + public final EntityType FE_ANALYSIS = EntityType.FE_ANALYSIS; + public final EntityType INCIDENCE_RATE = EntityType.INCIDENCE_RATE; public WebApiSecurityExpressionRoot( Authentication authentication, @@ -79,7 +82,7 @@ public boolean hasEntityAccess(Long entityId, EntityType entityType, AccessType * Accepts a Collection for easier SpEL usage when grouping values via * `anyOf(...)`. */ - public boolean hasEntityAccess(Long entityId, EntityType entityType, Collection accessTypes) { + public boolean hasAnyEntityAccess(Long entityId, EntityType entityType, Collection accessTypes) { if (accessTypes == null || accessTypes.isEmpty()) return false; for (AccessType at : accessTypes) { diff --git a/src/main/java/org/ohdsi/webapi/service/AbstractDaoService.java b/src/main/java/org/ohdsi/webapi/service/AbstractDaoService.java index 3e9449be03..6b6f545522 100644 --- a/src/main/java/org/ohdsi/webapi/service/AbstractDaoService.java +++ b/src/main/java/org/ohdsi/webapi/service/AbstractDaoService.java @@ -47,6 +47,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Lazy; import org.springframework.core.convert.ConversionService; @@ -123,6 +124,14 @@ public ConceptSetRepository getConceptSetRepository() { @Autowired private TransactionTemplate transactionTemplateNoTransaction; + @Autowired(required = false) + @Qualifier("batchTransactionTemplate") + private TransactionTemplate batchTransactionTemplate; + + @Autowired(required = false) + @Qualifier("batchTransactionTemplateRequiresNew") + private TransactionTemplate batchTransactionTemplateRequiresNew; + @Autowired private KerberosService kerberosService; @@ -289,7 +298,20 @@ public TransactionTemplate getTransactionTemplateRequiresNew() { public TransactionTemplate getTransactionTemplateNoTransaction() { return transactionTemplateNoTransaction; } - + + /** + * @return the batchTransactionTemplate for batch job tasklets + */ + public TransactionTemplate getBatchTransactionTemplate() { + return batchTransactionTemplate; + } + + /** + * @return the batchTransactionTemplateRequiresNew for batch job tasklets with REQUIRES_NEW + */ + public TransactionTemplate getBatchTransactionTemplateRequiresNew() { + return batchTransactionTemplateRequiresNew; + } /** * @return the ohdsiSchema diff --git a/src/main/java/org/ohdsi/webapi/service/FeatureExtractionService.java b/src/main/java/org/ohdsi/webapi/service/FeatureExtractionService.java new file mode 100644 index 0000000000..0c6a9f3826 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/service/FeatureExtractionService.java @@ -0,0 +1,47 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package org.ohdsi.webapi.service; + + +import org.springframework.web.bind.annotation.*; +import org.springframework.http.MediaType; +import org.ohdsi.featureExtraction.FeatureExtraction; + +/** + * + * @author asena5 + * @author alondhe2 + */ +@RestController +@RequestMapping("/featureextraction") +public class FeatureExtractionService extends AbstractDaoService { + /** + * Get default feature extraction settings + * @param temporal Use temporal covariate settings? true or false (default) + * @return JSON with default covariate settings object + */ + @GetMapping(value = "/defaultcovariatesettings", produces = MediaType.APPLICATION_JSON_VALUE) + public String getDefaultCovariateSettings(@RequestParam(value = "temporal", required = false) final String temporal) { + boolean getTemporal = false; + try { + if (temporal != null && !temporal.isEmpty()) { + getTemporal = Boolean.parseBoolean(temporal); + } + } catch (Exception e) { + throw new IllegalArgumentException("The parameter temporal must be a string of true or false."); + } + + FeatureExtraction.init(null); + String settings = ""; + if (getTemporal) { + settings = FeatureExtraction.getDefaultPrespecTemporalAnalyses(); + } else { + settings = FeatureExtraction.getDefaultPrespecAnalyses(); + } + + return settings; + } +} diff --git a/src/main/java/org/ohdsi/webapi/util/EntityUtils.java b/src/main/java/org/ohdsi/webapi/util/EntityUtils.java index ec8931ff34..70c0114128 100644 --- a/src/main/java/org/ohdsi/webapi/util/EntityUtils.java +++ b/src/main/java/org/ohdsi/webapi/util/EntityUtils.java @@ -10,7 +10,10 @@ private EntityUtils() { } public static EntityGraph fromAttributePaths(final String... strings) { - - return DynamicEntityGraph.loading().addPath(strings).build(); + DynamicEntityGraph.Builder builder = DynamicEntityGraph.loading(); + for (String path : strings) { + builder = builder.addPath(path); + } + return builder.build(); } } diff --git a/src/main/java/org/ohdsi/webapi/util/HttpUtils.java b/src/main/java/org/ohdsi/webapi/util/HttpUtils.java index d5147c7796..440b914e01 100644 --- a/src/main/java/org/ohdsi/webapi/util/HttpUtils.java +++ b/src/main/java/org/ohdsi/webapi/util/HttpUtils.java @@ -3,17 +3,23 @@ import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.http.HttpHeaders; -import java.io.OutputStream; +import org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody; +import java.io.ByteArrayOutputStream; public class HttpUtils { - public static ResponseEntity respondBinary(OutputStream stream, String filename) { + public static ResponseEntity respondBinary(ByteArrayOutputStream stream, String filename) { HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_OCTET_STREAM); headers.set(HttpHeaders.CONTENT_DISPOSITION, String.format("attachment; filename=\"%s\"", filename)); + StreamingResponseBody responseBody = outputStream -> { + stream.writeTo(outputStream); + outputStream.flush(); + }; + return ResponseEntity.ok() .headers(headers) - .body(stream); + .body(responseBody); } } diff --git a/src/main/java/org/ohdsi/webapi/util/PackageFilteringThrowableProxyConverter.java b/src/main/java/org/ohdsi/webapi/util/PackageFilteringThrowableProxyConverter.java new file mode 100644 index 0000000000..b0d73a6415 --- /dev/null +++ b/src/main/java/org/ohdsi/webapi/util/PackageFilteringThrowableProxyConverter.java @@ -0,0 +1,122 @@ +package org.ohdsi.webapi.util; + +import ch.qos.logback.classic.pattern.ThrowableHandlingConverter; +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.classic.spi.IThrowableProxy; +import ch.qos.logback.classic.spi.StackTraceElementProxy; +import ch.qos.logback.core.CoreConstants; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +/** + * Custom Logback converter that filters out framework packages from stack traces + * to make them more readable by showing only application-level code. + * + * Configuration: Pass comma-separated package prefixes as option from application.yaml. + * Example in logback-spring.xml: %fex{org.springframework,org.apache,jakarta.servlet} + * + * Note: Logback's built-in %ex converter does NOT support package filtering (unlike Log4j2). + * This custom converter is required for Spring Boot 3.x / Logback 1.5.x. + * + * Extends ThrowableHandlingConverter (not ThrowableProxyConverter) to avoid the parent's + * option parsing which expects integers for stack trace depth limiting. + */ +public class PackageFilteringThrowableProxyConverter extends ThrowableHandlingConverter { + + private List ignoredPackages = Collections.emptyList(); + + @Override + public void start() { + // Read option from pattern (configured via application.yaml -> logback-spring.xml) + List options = getOptionList(); + if (options != null && !options.isEmpty()) { + ignoredPackages = options; + } + super.start(); + } + + @Override + public String convert(ILoggingEvent event) { + IThrowableProxy tp = event.getThrowableProxy(); + if (tp == null) { + return ""; + } + StringBuilder sb = new StringBuilder(2048); + recursiveAppendFiltered(sb, "", tp); + return sb.toString(); + } + + private void recursiveAppendFiltered(StringBuilder sb, String prefix, IThrowableProxy tp) { + if (tp == null) { + return; + } + + StackTraceElementProxy[] stepArray = tp.getStackTraceElementProxyArray(); + int commonFrames = tp.getCommonFrames(); + + int filteredCount = 0; + + // Print stack trace elements, filtering framework packages + for (int i = 0; i < stepArray.length - commonFrames; i++) { + StackTraceElementProxy step = stepArray[i]; + String className = step.getStackTraceElement().getClassName(); + + boolean shouldIgnore = ignoredPackages.stream() + .anyMatch(className::startsWith); + + if (!shouldIgnore) { + sb.append(prefix); + sb.append("\tat "); + sb.append(step.getStackTraceElement().toString()); + sb.append(CoreConstants.LINE_SEPARATOR); + } else { + filteredCount++; + } + } + + // Show how many frames were filtered (parenthetical to indicate omission, not continuation) + if (filteredCount > 0) { + sb.append(prefix); + sb.append("\t("); + sb.append(filteredCount); + sb.append(" frames filtered)"); + sb.append(CoreConstants.LINE_SEPARATOR); + } + + if (commonFrames > 0) { + sb.append(prefix); + sb.append("\t... "); + sb.append(commonFrames); + sb.append(" common frames omitted"); + sb.append(CoreConstants.LINE_SEPARATOR); + } + + // Handle cause + IThrowableProxy cause = tp.getCause(); + if (cause != null) { + sb.append(prefix); + sb.append("Caused by: "); + sb.append(cause.getClassName()); + sb.append(": "); + sb.append(cause.getMessage()); + sb.append(CoreConstants.LINE_SEPARATOR); + recursiveAppendFiltered(sb, prefix, cause); + } + + // Handle suppressed exceptions + IThrowableProxy[] suppressed = tp.getSuppressed(); + if (suppressed != null) { + for (IThrowableProxy suppressedEx : suppressed) { + sb.append(prefix); + sb.append("Suppressed: "); + sb.append(suppressedEx.getClassName()); + sb.append(": "); + sb.append(suppressedEx.getMessage()); + sb.append(CoreConstants.LINE_SEPARATOR); + recursiveAppendFiltered(sb, prefix + "\t", suppressedEx); + } + } + } +} diff --git a/src/main/java/org/ohdsi/webapi/versioning/domain/VersionType.java b/src/main/java/org/ohdsi/webapi/versioning/domain/VersionType.java index 88f11e3c80..eec2940c2a 100644 --- a/src/main/java/org/ohdsi/webapi/versioning/domain/VersionType.java +++ b/src/main/java/org/ohdsi/webapi/versioning/domain/VersionType.java @@ -1,5 +1,5 @@ package org.ohdsi.webapi.versioning.domain; public enum VersionType { - CONCEPT_SET, COHORT, REUSABLE + CONCEPT_SET, COHORT, CHARACTERIZATION, INCIDENCE_RATE, PATHWAY, REUSABLE } diff --git a/src/main/java/org/ohdsi/webapi/versioning/service/VersionService.java b/src/main/java/org/ohdsi/webapi/versioning/service/VersionService.java index 54edd43b1d..85b99ac19a 100644 --- a/src/main/java/org/ohdsi/webapi/versioning/service/VersionService.java +++ b/src/main/java/org/ohdsi/webapi/versioning/service/VersionService.java @@ -1,5 +1,10 @@ package org.ohdsi.webapi.versioning.service; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + import org.ohdsi.webapi.exception.AtlasException; import org.ohdsi.webapi.service.AbstractDaoService; import org.ohdsi.webapi.versioning.domain.Version; @@ -7,8 +12,10 @@ import org.ohdsi.webapi.versioning.domain.VersionPK; import org.ohdsi.webapi.versioning.domain.VersionType; import org.ohdsi.webapi.versioning.dto.VersionUpdateDTO; +import org.ohdsi.webapi.versioning.repository.CharacterizationVersionRepository; import org.ohdsi.webapi.versioning.repository.CohortVersionRepository; import org.ohdsi.webapi.versioning.repository.ConceptSetVersionRepository; +import org.ohdsi.webapi.versioning.repository.IrVersionRepository; import org.ohdsi.webapi.versioning.repository.ReusableVersionRepository; import org.ohdsi.webapi.versioning.repository.VersionRepository; import org.slf4j.Logger; @@ -16,19 +23,14 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Lazy; +import org.springframework.http.HttpStatus; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.server.ResponseStatusException; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceException; -import org.springframework.web.server.ResponseStatusException; -import org.springframework.http.HttpStatus; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; @Service @Transactional @@ -47,15 +49,19 @@ public class VersionService extends AbstractDaoService { @Autowired public VersionService( EntityManager entityManager, - CohortVersionRepository cohortRepository, ConceptSetVersionRepository conceptSetVersionRepository, - ReusableVersionRepository reusableRepository) { + CohortVersionRepository cohortRepository, + ReusableVersionRepository reusableRepository, + CharacterizationVersionRepository characterizationRepository, + IrVersionRepository irVersionRepository) { this.entityManager = entityManager; this.repositoryMap = new HashMap<>(); - this.repositoryMap.put(VersionType.COHORT, (VersionRepository) cohortRepository); this.repositoryMap.put(VersionType.CONCEPT_SET, (VersionRepository) conceptSetVersionRepository); + this.repositoryMap.put(VersionType.COHORT, (VersionRepository) cohortRepository); this.repositoryMap.put(VersionType.REUSABLE, (VersionRepository) reusableRepository); + this.repositoryMap.put(VersionType.CHARACTERIZATION, (VersionRepository) characterizationRepository); + this.repositoryMap.put(VersionType.INCIDENCE_RATE, (VersionRepository) irVersionRepository); } private VersionRepository getRepository(VersionType type) { diff --git a/src/main/resources/application.yaml b/src/main/resources/application.yaml index 5a4ebbcef5..cf596a96d8 100644 --- a/src/main/resources/application.yaml +++ b/src/main/resources/application.yaml @@ -89,6 +89,8 @@ kerberos: kinitPath: "" timeout: 60 logging: + stacktrace: + filter-packages: org.springframework,org.apache,jakarta.servlet,org.hibernate,java.lang.reflect,jdk.internal,sun.reflect,com.sun.proxy level: org: hibernate: info diff --git a/src/main/resources/logback-spring.xml b/src/main/resources/logback-spring.xml new file mode 100644 index 0000000000..2b3da3f574 --- /dev/null +++ b/src/main/resources/logback-spring.xml @@ -0,0 +1,55 @@ + + + + + + + + + + + + + %d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%thread] %logger{40} - %msg%n%fex{${filterPackages}} + + + + + + logs/webapi-full.log + + %d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%thread] %logger{50} - %msg%n%ex{full} + + + logs/webapi-full-%d{yyyy-MM-dd}.%i.log + 50MB + 7 + + + + + + logs/webapi-error.log + + ERROR + + + %d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%thread] %logger{50} - %msg%n%fex{${filterPackages}} + + + logs/webapi-error-%d{yyyy-MM-dd}.%i.log + 50MB + 30 + + + + + + + + + + + +