diff --git a/pom.xml b/pom.xml
index af33ef9421..ac2eefd42c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -622,6 +622,12 @@
jackson-annotations
${jackson.version}
+
+ org.hibernate
+ hibernate-jpamodelgen
+ ${hibernate.version}
+ provided
+
@@ -1094,15 +1100,9 @@
- com.odysseusinc.arachne
- arachne-scheduler
- ${arachne.version}
-
-
- org.hibernate
- hibernate-validator
-
-
+ com.cronutils
+ cron-utils
+ 9.1.6
org.glassfish.jersey.media
diff --git a/src/main/java/com/odysseusinc/scheduler/api/v1/converter/BaseArachneJobDTOToArachneJobConverter.java b/src/main/java/com/odysseusinc/scheduler/api/v1/converter/BaseArachneJobDTOToArachneJobConverter.java
new file mode 100644
index 0000000000..8cba5e913b
--- /dev/null
+++ b/src/main/java/com/odysseusinc/scheduler/api/v1/converter/BaseArachneJobDTOToArachneJobConverter.java
@@ -0,0 +1,145 @@
+package com.odysseusinc.scheduler.api.v1.converter;
+
+import com.cronutils.builder.CronBuilder;
+import com.cronutils.model.Cron;
+import com.cronutils.model.definition.CronDefinition;
+import com.cronutils.model.field.expression.FieldExpression;
+import com.cronutils.model.field.expression.FieldExpressionFactory;
+import com.cronutils.model.field.expression.On;
+import com.cronutils.model.field.value.IntegerFieldValue;
+import com.odysseusinc.arachne.commons.converter.BaseConvertionServiceAwareConverter;
+import com.odysseusinc.scheduler.api.v1.dto.ArachneJobDTO;
+import com.odysseusinc.scheduler.model.ArachneJob;
+import com.odysseusinc.scheduler.model.JobExecutingType;
+
+import java.time.DayOfWeek;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.List;
+import java.util.Optional;
+import java.util.stream.Collectors;
+
+import static com.cronutils.model.field.expression.FieldExpressionFactory.always;
+import static com.cronutils.model.field.expression.FieldExpressionFactory.every;
+import static com.cronutils.model.field.expression.FieldExpressionFactory.on;
+import static com.cronutils.model.field.expression.FieldExpressionFactory.questionMark;
+
+public abstract class BaseArachneJobDTOToArachneJobConverter extends BaseConvertionServiceAwareConverter {
+
+ protected final CronDefinition cronDefinition;
+
+ protected BaseArachneJobDTOToArachneJobConverter(CronDefinition cronDefinition) {
+
+ this.cronDefinition = cronDefinition;
+ }
+
+ @Override
+ protected final void convert(final S source, T target) {
+ final Date startDate = source.getStartDate();
+ final JobExecutingType frequency = source.getFrequency();
+ final List weekDays = source.getWeekDays();
+ final String cron = createCron(startDate, frequency, weekDays);
+ target.setCron(cron);
+ target.setId(source.getId());
+ target.setEnabled(source.isEnabled());
+ target.setFrequency(frequency);
+ target.setRecurringTimes(Optional.ofNullable(source.getRecurringTimes()).orElse(0));
+ target.setRecurringUntilDate(source.getRecurringUntilDate());
+ target.setStartDate(source.getStartDate());
+ target.setWeekDays(weekDays);
+ convertJob(source, target);
+ }
+
+ protected abstract void convertJob(final S source, T target);
+
+ protected String createCron(Date startDate, JobExecutingType frequency, List weekDays) {
+
+ final Calendar calendar = Calendar.getInstance();
+ calendar.setTime(startDate);
+
+ final int second = calendar.get(Calendar.SECOND);
+ final int minute = calendar.get(Calendar.MINUTE);
+ final int hour = calendar.get(Calendar.HOUR_OF_DAY);
+ final int day = calendar.get(Calendar.DAY_OF_MONTH);
+ final int month = calendar.get(Calendar.MONTH) + 1;
+ final int year = calendar.get(Calendar.YEAR);
+
+ Cron cron;
+ switch (frequency) {
+ case ONCE:
+ cron = CronBuilder.cron(cronDefinition)
+ .withDoM(on(day))
+ .withMonth(on(month))
+ .withDoW(questionMark())
+ .withHour(on(hour))
+ .withMinute(on(minute))
+ .withSecond(on(second))
+ .instance();
+ break;
+ case HOURLY:
+ cron = CronBuilder.cron(cronDefinition)
+ .withDoM(always())
+ .withMonth(always())
+ .withDoW(questionMark())
+ .withHour(always())
+ .withMinute(on(minute))
+ .withSecond(on(second))
+ .instance();
+ break;
+ case DAILY: {
+ cron = CronBuilder.cron(cronDefinition)
+ .withDoM(every(1))
+ .withMonth(every(1))
+ .withDoW(questionMark())
+ .withHour(on(hour))
+ .withMinute(on(minute))
+ .withSecond(on(second))
+ .instance();
+ break;
+ }
+ case WEEKLY: {
+ if (weekDays == null || weekDays.isEmpty()) {
+ final String message = String.format("Execution period %s must have at least 1 day of execute", frequency);
+ throw new IllegalArgumentException(message);
+ }
+ final List collect = weekDays.stream().map(
+ dayOfWeek -> (FieldExpression) new On(new IntegerFieldValue(dayOfWeek.getValue()))).collect(Collectors.toList());
+ cron = CronBuilder.cron(cronDefinition)
+ .withDoM(questionMark())
+ .withMonth(always())
+ .withDoW(FieldExpressionFactory.and(collect))
+ .withHour(on(hour))
+ .withMinute(on(minute))
+ .withSecond(on(second))
+ .instance();
+ break;
+ }
+ case MONTHLY: {
+ cron = CronBuilder.cron(cronDefinition)
+ .withDoM(on(day))
+ .withMonth(always())
+ .withDoW(questionMark())
+ .withHour(on(hour))
+ .withMinute(on(minute))
+ .withSecond(on(second))
+ .instance();
+ break;
+ }
+ case YEARLY: {
+ cron = CronBuilder.cron(cronDefinition)
+ .withDoM(on(day))
+ .withMonth(on(month))
+ .withDoW(questionMark())
+ .withHour(on(hour))
+ .withMinute(on(minute))
+ .withSecond(on(second))
+ .instance();
+ break;
+ }
+ default: {
+ throw new IllegalArgumentException("Unsupported period: " + frequency);
+ }
+ }
+ return cron.asString();
+ }
+}
diff --git a/src/main/java/com/odysseusinc/scheduler/api/v1/converter/BaseArachneJobToArachneJobDTOConverter.java b/src/main/java/com/odysseusinc/scheduler/api/v1/converter/BaseArachneJobToArachneJobDTOConverter.java
new file mode 100644
index 0000000000..a9fda33d8e
--- /dev/null
+++ b/src/main/java/com/odysseusinc/scheduler/api/v1/converter/BaseArachneJobToArachneJobDTOConverter.java
@@ -0,0 +1,25 @@
+package com.odysseusinc.scheduler.api.v1.converter;
+
+import com.odysseusinc.arachne.commons.converter.BaseConvertionServiceAwareConverter;
+import com.odysseusinc.scheduler.api.v1.dto.ArachneJobDTO;
+import com.odysseusinc.scheduler.model.ArachneJob;
+
+public abstract class BaseArachneJobToArachneJobDTOConverter extends BaseConvertionServiceAwareConverter {
+
+ protected final void convert(S s, T dto) {
+
+ dto.setId(s.getId());
+ dto.setClosed(s.getClosed());
+ dto.setEnabled(s.getEnabled());
+ dto.setFrequency(s.getFrequency());
+ dto.setRecurringTimes(s.getRecurringTimes());
+ dto.setRecurringUntilDate(s.getRecurringUntilDate());
+ dto.setStartDate(s.getStartDate());
+ dto.setWeekDays(s.getWeekDays());
+ dto.setNextExecution(s.getNextExecution());
+ convertJob(s, dto);
+ }
+
+ protected abstract void convertJob(S source, T target);
+
+}
diff --git a/src/main/java/com/odysseusinc/scheduler/api/v1/dto/ArachneJobDTO.java b/src/main/java/com/odysseusinc/scheduler/api/v1/dto/ArachneJobDTO.java
new file mode 100644
index 0000000000..9d2aed5778
--- /dev/null
+++ b/src/main/java/com/odysseusinc/scheduler/api/v1/dto/ArachneJobDTO.java
@@ -0,0 +1,116 @@
+package com.odysseusinc.scheduler.api.v1.dto;
+
+import com.fasterxml.jackson.annotation.JsonFormat;
+import com.odysseusinc.scheduler.model.JobExecutingType;
+
+import javax.validation.constraints.NotNull;
+import java.time.DayOfWeek;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+
+public class ArachneJobDTO {
+ private Long id;
+ private boolean enabled;
+ @NotNull
+ @JsonFormat(shape = JsonFormat.Shape.STRING)
+ private Date startDate;
+ @JsonFormat(shape = JsonFormat.Shape.STRING)
+ private Date nextExecution;
+ @NotNull
+ private JobExecutingType frequency;
+ private List weekDays = new ArrayList<>();
+ private Date recurringUntilDate;
+ private Integer recurringTimes;
+ private Boolean isClosed;
+
+ public Long getId() {
+
+ return id;
+ }
+
+ public void setId(Long id) {
+
+ this.id = id;
+ }
+
+ public boolean isEnabled() {
+
+ return enabled;
+ }
+
+ public void setEnabled(boolean enabled) {
+
+ this.enabled = enabled;
+ }
+
+ public Date getStartDate() {
+
+ return startDate;
+ }
+
+ public void setStartDate(Date startDate) {
+
+ this.startDate = startDate;
+ }
+
+ public JobExecutingType getFrequency() {
+
+ return frequency;
+ }
+
+ public void setFrequency(JobExecutingType frequency) {
+
+ this.frequency = frequency;
+ }
+
+ public List getWeekDays() {
+
+ return weekDays;
+ }
+
+ public void setWeekDays(List weekDays) {
+
+ this.weekDays = weekDays;
+ }
+
+ public Date getRecurringUntilDate() {
+
+ return recurringUntilDate;
+ }
+
+ public void setRecurringUntilDate(Date recurringUntilDate) {
+
+ this.recurringUntilDate = recurringUntilDate;
+ }
+
+ public Integer getRecurringTimes() {
+
+ return recurringTimes;
+ }
+
+ public void setRecurringTimes(Integer recurringTimes) {
+
+ this.recurringTimes = recurringTimes;
+ }
+
+ public Boolean getClosed() {
+
+ return isClosed;
+ }
+
+ public void setClosed(Boolean closed) {
+
+ isClosed = closed;
+ }
+
+ public Date getNextExecution() {
+
+ return nextExecution;
+ }
+
+ public void setNextExecution(Date nextExecution) {
+
+ this.nextExecution = nextExecution;
+ }
+}
diff --git a/src/main/java/com/odysseusinc/scheduler/exception/JobNotFoundException.java b/src/main/java/com/odysseusinc/scheduler/exception/JobNotFoundException.java
new file mode 100644
index 0000000000..5221d16874
--- /dev/null
+++ b/src/main/java/com/odysseusinc/scheduler/exception/JobNotFoundException.java
@@ -0,0 +1,13 @@
+package com.odysseusinc.scheduler.exception;
+
+public class JobNotFoundException extends Exception {
+
+ public JobNotFoundException() {
+
+ }
+
+ public JobNotFoundException(String message) {
+
+ super(message);
+ }
+}
diff --git a/src/main/java/com/odysseusinc/scheduler/model/ArachneJob.java b/src/main/java/com/odysseusinc/scheduler/model/ArachneJob.java
new file mode 100644
index 0000000000..9e950dc6a1
--- /dev/null
+++ b/src/main/java/com/odysseusinc/scheduler/model/ArachneJob.java
@@ -0,0 +1,191 @@
+package com.odysseusinc.scheduler.model;
+
+import org.hibernate.validator.constraints.NotEmpty;
+
+import javax.persistence.Column;
+import javax.persistence.EnumType;
+import javax.persistence.Enumerated;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.MappedSuperclass;
+import javax.persistence.Temporal;
+import javax.persistence.TemporalType;
+import javax.persistence.Transient;
+import javax.validation.constraints.Min;
+import javax.validation.constraints.NotNull;
+import java.time.DayOfWeek;
+import java.util.Date;
+import java.util.List;
+
+@MappedSuperclass
+public abstract class ArachneJob {
+
+ @Id
+ @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "arachne_job_generator")
+ private Long id;
+
+ @NotNull
+ @Column(name = "is_enabled")
+ private boolean enabled = Boolean.FALSE;
+
+ @NotNull
+ @Column(name = "start_date")
+ private Date startDate;
+
+ @NotNull
+ @Column(name = "frequency")
+ @Enumerated(EnumType.STRING)
+ private JobExecutingType frequency;
+
+ @Min(0)
+ @Column(name = "executed_times")
+ private Integer executedTimes = 0;
+
+ @Column(name = "last_executed_at")
+ private Date lastExecutedAt;
+
+ @Column(name = "is_closed")
+ private Boolean isClosed;
+
+ @Min(0)
+ @Column(name = "recurring_times")
+ private Integer recurringTimes;
+
+ @Column(name = "recurring_until_date")
+ @Temporal(TemporalType.TIMESTAMP)
+ private Date recurringUntilDate;
+
+ @NotEmpty
+ @Column(name = "cron")
+ private String cron;
+
+ @Transient
+ private Date nextExecution;
+
+ public Long getId() {
+
+ return id;
+ }
+
+ public void setId(Long id) {
+
+ this.id = id;
+ }
+
+ public boolean getEnabled() {
+
+ return enabled;
+ }
+
+ public void setEnabled(boolean enabled) {
+
+ this.enabled = enabled;
+ }
+
+ public Date getStartDate() {
+
+ return startDate;
+ }
+
+ public void setStartDate(Date startDate) {
+
+ this.startDate = startDate;
+ }
+
+ public JobExecutingType getFrequency() {
+
+ return frequency;
+ }
+
+ public void setFrequency(JobExecutingType frequency) {
+
+ this.frequency = frequency;
+ }
+
+ @Transient
+ public abstract List getWeekDays();
+
+ public abstract void setWeekDays(List weekDays);
+
+ public Integer getExecutedTimes() {
+
+ return executedTimes;
+ }
+
+ public void setExecutedTimes(Integer executedTimes) {
+
+ this.executedTimes = executedTimes;
+ }
+
+ public Date getLastExecutedAt() {
+
+ return lastExecutedAt;
+ }
+
+ public void setLastExecutedAt(Date lastExecutedAt) {
+
+ this.lastExecutedAt = lastExecutedAt;
+ }
+
+ public Boolean getClosed() {
+
+ return isClosed;
+ }
+
+ public void setClosed(Boolean closed) {
+
+ isClosed = closed;
+ }
+
+ public Integer getRecurringTimes() {
+
+ return recurringTimes;
+ }
+
+ public void setRecurringTimes(Integer recurringTimes) {
+
+ this.recurringTimes = recurringTimes;
+ }
+
+ public Date getRecurringUntilDate() {
+
+ return recurringUntilDate;
+ }
+
+ public void setRecurringUntilDate(Date recurringUntilDate) {
+
+ this.recurringUntilDate = recurringUntilDate;
+ }
+
+ public String getCron() {
+
+ return cron;
+ }
+
+ public void setCron(String cron) {
+
+ this.cron = cron;
+ }
+
+ public Date getNextExecution() {
+
+ return nextExecution;
+ }
+
+ public void setNextExecution(Date nextExecution) {
+
+ this.nextExecution = nextExecution;
+ }
+
+ @Override
+ public String toString() {
+
+ return "ArachneJob{" +
+ "enabled=" + enabled +
+ ", startDate=" + startDate +
+ ", frequency=" + frequency +
+ ", cron='" + cron + '\'' +
+ '}';
+ }
+}
diff --git a/src/main/java/com/odysseusinc/scheduler/model/JobExecutingType.java b/src/main/java/com/odysseusinc/scheduler/model/JobExecutingType.java
new file mode 100644
index 0000000000..89d8ad58a6
--- /dev/null
+++ b/src/main/java/com/odysseusinc/scheduler/model/JobExecutingType.java
@@ -0,0 +1,22 @@
+package com.odysseusinc.scheduler.model;
+
+public enum JobExecutingType {
+ ONCE("ONCE"),
+ HOURLY("HOURLY"),
+ DAILY("DAILY"),
+ WEEKLY("WEEKLY"),
+ MONTHLY("MONTHLY"),
+ YEARLY("YEARLY");
+
+ private String title;
+
+ JobExecutingType(String title) {
+
+ this.title = title;
+ }
+
+ public String getTitle() {
+
+ return title;
+ }
+}
diff --git a/src/main/java/com/odysseusinc/scheduler/model/ScheduledTask.java b/src/main/java/com/odysseusinc/scheduler/model/ScheduledTask.java
new file mode 100644
index 0000000000..31c2bc3f5b
--- /dev/null
+++ b/src/main/java/com/odysseusinc/scheduler/model/ScheduledTask.java
@@ -0,0 +1,15 @@
+package com.odysseusinc.scheduler.model;
+
+public abstract class ScheduledTask implements Runnable {
+ protected final T job;
+
+ protected ScheduledTask(T job) {
+
+ this.job = job;
+ }
+
+ public T getJob() {
+
+ return job;
+ }
+}
diff --git a/src/main/java/com/odysseusinc/scheduler/repository/ArachneJobRepository.java b/src/main/java/com/odysseusinc/scheduler/repository/ArachneJobRepository.java
new file mode 100644
index 0000000000..cc1e4ddf28
--- /dev/null
+++ b/src/main/java/com/odysseusinc/scheduler/repository/ArachneJobRepository.java
@@ -0,0 +1,16 @@
+package com.odysseusinc.scheduler.repository;
+
+import com.cosium.spring.data.jpa.entity.graph.domain.EntityGraph;
+import com.cosium.spring.data.jpa.entity.graph.repository.EntityGraphJpaRepository;
+import com.odysseusinc.scheduler.model.ArachneJob;
+import org.springframework.data.repository.NoRepositoryBean;
+
+import java.util.List;
+
+@NoRepositoryBean
+public interface ArachneJobRepository extends EntityGraphJpaRepository {
+
+ List findAllByEnabledTrueAndIsClosedFalse(EntityGraph entityGraph);
+
+ List findAllByEnabledTrueAndIsClosedFalse();
+}
diff --git a/src/main/java/com/odysseusinc/scheduler/service/BaseJobService.java b/src/main/java/com/odysseusinc/scheduler/service/BaseJobService.java
new file mode 100644
index 0000000000..6e0b414a98
--- /dev/null
+++ b/src/main/java/com/odysseusinc/scheduler/service/BaseJobService.java
@@ -0,0 +1,15 @@
+package com.odysseusinc.scheduler.service;
+
+import com.odysseusinc.scheduler.exception.JobNotFoundException;
+import com.odysseusinc.scheduler.model.ArachneJob;
+
+public interface BaseJobService {
+
+ T createJob(T job);
+
+ T updateJob(T job) throws JobNotFoundException;
+
+ void delete(T job);
+
+ void reassignAllJobs();
+}
diff --git a/src/main/java/com/odysseusinc/scheduler/service/BaseJobServiceImpl.java b/src/main/java/com/odysseusinc/scheduler/service/BaseJobServiceImpl.java
new file mode 100644
index 0000000000..449c387de4
--- /dev/null
+++ b/src/main/java/com/odysseusinc/scheduler/service/BaseJobServiceImpl.java
@@ -0,0 +1,253 @@
+package com.odysseusinc.scheduler.service;
+
+import com.cronutils.model.definition.CronDefinition;
+import com.cronutils.model.time.ExecutionTime;
+import com.cronutils.parser.CronParser;
+import com.odysseusinc.scheduler.exception.JobNotFoundException;
+import com.odysseusinc.scheduler.model.ArachneJob;
+import com.odysseusinc.scheduler.model.JobExecutingType;
+import com.odysseusinc.scheduler.model.ScheduledTask;
+import com.odysseusinc.scheduler.repository.ArachneJobRepository;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.BeansException;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.ApplicationContextAware;
+import org.springframework.scheduling.TaskScheduler;
+import org.springframework.scheduling.support.CronTrigger;
+import org.springframework.transaction.annotation.Transactional;
+
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.time.temporal.ChronoUnit;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ScheduledFuture;
+
+public abstract class BaseJobServiceImpl implements com.odysseusinc.scheduler.service.BaseJobService, ApplicationContextAware {
+
+ protected final Logger LOGGER = LoggerFactory.getLogger(this.getClass());
+ protected static final String ADDING_NEW_TO_SCHEDULER_LOG = "Scheduling new job";
+ protected static final String REMOVING_FROM_SCHEDULER_LOG = "Removing job with id='{}' from scheduler";
+ protected static final String NOT_EXISTS_EXCEPTION = "Job for analysis with id='%s' is not exist";
+
+ private final TaskScheduler taskScheduler;
+ private final CronDefinition cronDefinition;
+ private final ArachneJobRepository jobRepository;
+ protected ApplicationContext applicationContext;
+
+ private final Map taskInWork = new ConcurrentHashMap<>();
+
+ protected BaseJobServiceImpl(TaskScheduler taskScheduler,
+ CronDefinition cronDefinition,
+ ArachneJobRepository jobRepository) {
+
+ this.taskScheduler = taskScheduler;
+ this.cronDefinition = cronDefinition;
+ this.jobRepository = jobRepository;
+ }
+
+ @Override
+ public final void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
+ this.applicationContext = applicationContext;
+ }
+
+ @Override
+ @Transactional(rollbackFor = Exception.class)
+ public T createJob(T job) {
+
+ beforeCreate(job);
+ job.setId(null);
+ final boolean isClosed = isClosed(job);
+ job.setClosed(isClosed);
+ final T saved = jobRepository.save(job);
+ saveAdditionalFields(saved);
+ if (!isClosed && saved.getEnabled()) {
+ addToScheduler(saved);
+ }
+ assignNextExecution(saved);
+ afterCreate(saved);
+ return saved;
+ }
+
+ @Override
+ @Transactional
+ public T updateJob(T job) throws JobNotFoundException {
+
+ final T exists = jobRepository.getOne(job.getId());
+ if (exists == null) {
+ final String message = String.format(NOT_EXISTS_EXCEPTION, job.getId());
+ removeFromScheduler(job.getId());
+ throw new JobNotFoundException(message);
+ }
+ beforeUpdate(exists, job);
+ boolean updatedByUser = job.getExecutedTimes() == 0 && Objects.isNull(job.getLastExecutedAt());
+ if (updatedByUser) {
+ exists.setEnabled(job.getEnabled());
+ exists.setStartDate(job.getStartDate());
+ exists.setCron(job.getCron());
+ exists.setWeekDays(job.getWeekDays());
+ exists.setFrequency(job.getFrequency());
+ exists.setRecurringTimes(job.getRecurringTimes());
+ exists.setRecurringUntilDate(job.getRecurringUntilDate());
+ exists.setExecutedTimes(0);
+ exists.setLastExecutedAt(null);
+ updateAdditionalFields(exists, job);
+ } else {
+ exists.setExecutedTimes(job.getExecutedTimes());
+ exists.setLastExecutedAt(job.getLastExecutedAt());
+ }
+ final boolean isClosed = isClosed(exists);
+ exists.setClosed(isClosed);
+ // If job is finished, automatically turn it off
+ if (isClosed && !updatedByUser) {
+ exists.setEnabled(false);
+ }
+ if (isClosed || !exists.getEnabled()){
+ removeFromScheduler(job.getId());
+ } else if (updatedByUser) {
+ removeFromScheduler(job.getId());
+ addToScheduler(exists);
+ }
+ T updated = jobRepository.save(exists);
+ assignNextExecution(updated);
+ afterUpdate(updated);
+ return updated;
+ }
+
+ @Override
+ public void reassignAllJobs() {
+
+ List jobs = getActiveJobs();
+ jobs.forEach(job -> {
+ removeFromScheduler(job.getId());
+ addToScheduler(job);
+ });
+ }
+
+ protected List getActiveJobs() {
+
+ return jobRepository.findAllByEnabledTrueAndIsClosedFalse();
+ }
+
+ @Override
+ @Transactional
+ public void delete(T job) {
+
+ jobRepository.delete(job);
+ }
+
+ protected void beforeCreate(T job) {
+ }
+
+ protected void afterCreate(T job) {
+ }
+
+ protected void beforeUpdate(T exists, T updated) {
+ }
+
+ protected void saveAdditionalFields(T job) {
+ }
+
+ protected abstract void updateAdditionalFields(T exists, T job);
+
+ protected void afterUpdate(T job) {
+ }
+
+ protected boolean isClosed(T job) {
+
+ boolean result = false;
+ if (job.getRecurringTimes() > 0) {
+ result = job.getExecutedTimes() >= job.getRecurringTimes();
+ }
+ final ExecutionTime executionTime = getExecutionTime(job);
+ final ZonedDateTime lastExecuted = ZonedDateTime.now();
+ final Optional nextExecutionOptional = executionTime.nextExecution(lastExecuted);
+ if (!nextExecutionOptional.isPresent()) {
+ return true;
+ }
+ ZonedDateTime nextExecution = nextExecutionOptional.get();
+ if (JobExecutingType.ONCE.equals(job.getFrequency())) {
+ return !Objects.equals(job.getStartDate(), Date.from(nextExecution.toInstant()));
+ }
+ final Date recurringUntilDate = job.getRecurringUntilDate();
+ if (recurringUntilDate != null) {
+ final ZonedDateTime recurringUntil = getZonedDateTime(recurringUntilDate);
+ final long diff = ChronoUnit.SECONDS.between(nextExecution, recurringUntil);
+ result = result || diff <= 0;
+ }
+ return result;
+ }
+
+ protected T assignNextExecution(T job) {
+
+ Optional nextExecution = getNextExecution(job);
+ job.setNextExecution(nextExecution.isPresent() ? Date.from(nextExecution.get().toInstant()) : null);
+ return job;
+ }
+
+ protected Optional getNextExecution(T job) {
+
+ if (job.getEnabled()) {
+ if (Objects.equals(JobExecutingType.ONCE, job.getFrequency())) {
+ return Optional.of(ZonedDateTime.ofInstant(job.getStartDate().toInstant(), ZoneId.systemDefault()));
+ } else {
+ final ExecutionTime executionTime = getExecutionTime(job);
+ return executionTime.nextExecution(ZonedDateTime.now());
+ }
+ }
+ return Optional.empty();
+ }
+
+ protected final ExecutionTime getExecutionTime(T job) {
+
+ final CronParser parser = new CronParser(cronDefinition);
+ return ExecutionTime.forCron(parser.parse(job.getCron()));
+ }
+
+ private ZonedDateTime getZonedDateTime(Date date) {
+
+ return ZonedDateTime.ofInstant(date.toInstant(), ZoneId.systemDefault());
+ }
+
+ protected void addToScheduler(T job) {
+
+ LOGGER.debug(ADDING_NEW_TO_SCHEDULER_LOG);
+ removeFromScheduler(job.getId());
+ if (job.getEnabled()) {
+ final ScheduledFuture> runningTask;
+ ScheduledTask scheduledTask = buildScheduledTask(job);
+ ScheduledTaskDelegate taskDelegate = new ScheduledTaskDelegate(scheduledTask, getJobService());
+ if (Objects.equals(JobExecutingType.ONCE, job.getFrequency())) {
+ runningTask = taskScheduler.schedule(taskDelegate, job.getStartDate());
+ } else {
+ runningTask = taskScheduler.schedule(
+ taskDelegate,
+ new CronTrigger(job.getCron())
+ );
+ }
+ taskInWork.put(job.getId(), runningTask);
+ }
+ }
+
+ protected com.odysseusinc.scheduler.service.BaseJobService getJobService() {
+
+ return applicationContext.getBean(BaseJobService.class);
+ }
+
+ protected void removeFromScheduler(Long id) {
+
+ LOGGER.debug(REMOVING_FROM_SCHEDULER_LOG, id);
+ final ScheduledFuture scheduledFuture = taskInWork.remove(id);
+ if (scheduledFuture != null) {
+ scheduledFuture.cancel(false);
+ }
+ }
+
+ protected abstract ScheduledTask buildScheduledTask(T job);
+
+}
diff --git a/src/main/java/com/odysseusinc/scheduler/service/ScheduledTaskDelegate.java b/src/main/java/com/odysseusinc/scheduler/service/ScheduledTaskDelegate.java
new file mode 100644
index 0000000000..70d0f77f2f
--- /dev/null
+++ b/src/main/java/com/odysseusinc/scheduler/service/ScheduledTaskDelegate.java
@@ -0,0 +1,38 @@
+package com.odysseusinc.scheduler.service;
+
+import com.odysseusinc.scheduler.exception.JobNotFoundException;
+import com.odysseusinc.scheduler.model.ArachneJob;
+import com.odysseusinc.scheduler.model.JobExecutingType;
+import com.odysseusinc.scheduler.model.ScheduledTask;
+
+import java.util.Date;
+import java.util.Objects;
+
+class ScheduledTaskDelegate implements Runnable {
+
+ private final ScheduledTask task;
+ private final com.odysseusinc.scheduler.service.BaseJobService jobService;
+
+ ScheduledTaskDelegate(ScheduledTask task, BaseJobService jobService) {
+
+ this.task = task;
+ this.jobService = jobService;
+ }
+
+ @Override
+ public void run() {
+ try {
+ task.run();
+ } finally {
+ T job = task.getJob();
+ job.setLastExecutedAt(new Date());
+ if (Objects.equals(JobExecutingType.ONCE, job.getFrequency())) {
+ job.setEnabled(false);
+ }
+ try {
+ jobService.updateJob(job);
+ } catch (JobNotFoundException ignored) {
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/main/java/org/ohdsi/webapi/ClockConfig.java b/src/main/java/org/ohdsi/webapi/ClockConfig.java
new file mode 100644
index 0000000000..2e7d62629a
--- /dev/null
+++ b/src/main/java/org/ohdsi/webapi/ClockConfig.java
@@ -0,0 +1,14 @@
+package org.ohdsi.webapi;
+
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import java.time.Clock;
+
+@Configuration
+public class ClockConfig {
+ @Bean
+ public Clock clock() {
+ return Clock.systemUTC();
+ }
+}
diff --git a/src/main/java/org/ohdsi/webapi/review/ReviewAction.java b/src/main/java/org/ohdsi/webapi/review/ReviewAction.java
new file mode 100644
index 0000000000..76e024c9e7
--- /dev/null
+++ b/src/main/java/org/ohdsi/webapi/review/ReviewAction.java
@@ -0,0 +1,150 @@
+package org.ohdsi.webapi.review;
+
+import org.ohdsi.webapi.shiro.Entities.UserEntity;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+import javax.persistence.SequenceGenerator;
+import javax.persistence.Table;
+import java.time.Instant;
+
+@Entity
+@Table(name = "review_action")
+public class ReviewAction {
+ @Id
+ @SequenceGenerator(name = "review_action_seq_generator", sequenceName = "review_action_seq", allocationSize = 1)
+ @GeneratedValue(generator = "concept_set_annotation_generator")
+ @Column(name = "id")
+ private Integer id;
+
+ @Column(name = "timestamp")
+ private Instant timestamp;
+
+ @Column(name = "asset_type", nullable = false)
+ private String assetType;
+
+ @Column(name = "asset_id", nullable = false)
+ private Integer assetId;
+
+ @Column(name = "version")
+ private Integer version;
+
+ @ManyToOne
+ @JoinColumn(name = "user_id")
+ private UserEntity user;
+
+ @Column(name = "action")
+ private String action;
+
+ @Column(name = "comment")
+ private String comment;
+
+ @Column(name = "revoke_comment")
+ private String revokeComment;
+
+ @Column(name = "supporting_info")
+ private String supportingInfo;
+
+ @ManyToOne
+ @JoinColumn(name = "representative_id")
+ private UserEntity representative;
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public Instant getTimestamp() {
+ return timestamp;
+ }
+
+ public void setTimestamp(Instant timestamp) {
+ this.timestamp = timestamp;
+ }
+
+ public String getAssetType() {
+ return assetType;
+ }
+
+ public void setAssetType(String assetType) {
+ this.assetType = assetType;
+ }
+
+ public Integer getAssetId() {
+ return assetId;
+ }
+
+ public void setAssetId(Integer assetId) {
+ this.assetId = assetId;
+ }
+
+ public Integer getVersion() {
+ return version;
+ }
+
+ public void setVersion(Integer version) {
+ this.version = version;
+ }
+
+ public UserEntity getUser() {
+ return user;
+ }
+
+ public void setUser(UserEntity user) {
+ this.user = user;
+ }
+
+ public String getAction() {
+ return action;
+ }
+
+ public void setAction(String action) {
+ this.action = action;
+ }
+
+ public String getComment() {
+ return comment;
+ }
+
+ public void setComment(String comment) {
+ this.comment = comment;
+ }
+
+ public UserEntity getRepresentative() {
+ return representative;
+ }
+
+ public void setRepresentative(UserEntity representative) {
+ this.representative = representative;
+ }
+
+ public String getSupportingInfo() {
+ return supportingInfo;
+ }
+
+ public void setSupportingInfo(String supportingInfo) {
+ this.supportingInfo = supportingInfo;
+ }
+
+ public String getRevokeComment() {
+ return revokeComment;
+ }
+
+ public void setRevokeComment(String revokeComment) {
+ this.revokeComment = revokeComment;
+ }
+
+ public interface Type {
+ String CREATE = "CREATE";
+ String APPROVE = "APPROVE";
+ String REVOKE = "REVOKE";
+ }
+
+}
diff --git a/src/main/java/org/ohdsi/webapi/review/ReviewActionDTO.java b/src/main/java/org/ohdsi/webapi/review/ReviewActionDTO.java
new file mode 100644
index 0000000000..6933f87a97
--- /dev/null
+++ b/src/main/java/org/ohdsi/webapi/review/ReviewActionDTO.java
@@ -0,0 +1,91 @@
+package org.ohdsi.webapi.review;
+
+import com.fasterxml.jackson.annotation.JsonFormat;
+import org.ohdsi.webapi.user.dto.UserDTO;
+
+import java.time.Instant;
+
+public class ReviewActionDTO {
+ private Integer id;
+ @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", timezone = "UTC")
+ private Instant timestamp;
+ private Integer version;
+ private UserDTO user;
+ private String type;
+ private String comment;
+ private String revokeComment;
+ private String supportingInfo;
+ private UserDTO representative;
+
+ public Integer getId() {
+ return id;
+ }
+
+ public void setId(Integer id) {
+ this.id = id;
+ }
+
+ public Instant getTimestamp() {
+ return timestamp;
+ }
+
+ public void setTimestamp(Instant timestamp) {
+ this.timestamp = timestamp;
+ }
+
+ public Integer getVersion() {
+ return version;
+ }
+
+ public void setVersion(Integer version) {
+ this.version = version;
+ }
+
+ public UserDTO getUser() {
+ return user;
+ }
+
+ public void setUser(UserDTO user) {
+ this.user = user;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public String getComment() {
+ return comment;
+ }
+
+ public void setComment(String comment) {
+ this.comment = comment;
+ }
+
+ public UserDTO getRepresentative() {
+ return representative;
+ }
+
+ public void setRepresentative(UserDTO representative) {
+ this.representative = representative;
+ }
+
+ public String getSupportingInfo() {
+ return supportingInfo;
+ }
+
+ public void setSupportingInfo(String supportingInfo) {
+ this.supportingInfo = supportingInfo;
+ }
+
+ public String getRevokeComment() {
+ return revokeComment;
+ }
+
+ public void setRevokeComment(String revokeComment) {
+ this.revokeComment = revokeComment;
+ }
+}
diff --git a/src/main/java/org/ohdsi/webapi/review/ReviewService.java b/src/main/java/org/ohdsi/webapi/review/ReviewService.java
new file mode 100644
index 0000000000..cf8b9c3449
--- /dev/null
+++ b/src/main/java/org/ohdsi/webapi/review/ReviewService.java
@@ -0,0 +1,319 @@
+package org.ohdsi.webapi.review;
+
+import org.apache.shiro.SecurityUtils;
+import org.jetbrains.annotations.NotNull;
+import org.ohdsi.webapi.shiro.Entities.PermissionEntity_;
+import org.ohdsi.webapi.shiro.Entities.RoleEntity;
+import org.ohdsi.webapi.shiro.Entities.RolePermissionEntity;
+import org.ohdsi.webapi.shiro.Entities.RolePermissionEntity_;
+import org.ohdsi.webapi.shiro.Entities.UserEntity;
+import org.ohdsi.webapi.shiro.Entities.UserEntity_;
+import org.ohdsi.webapi.shiro.Entities.UserRoleEntity;
+import org.ohdsi.webapi.shiro.Entities.UserRoleEntity_;
+import org.ohdsi.webapi.shiro.PermissionManager;
+import org.ohdsi.webapi.user.converter.UserEntityToUserDTOConverter;
+import org.ohdsi.webapi.user.dto.UserDTO;
+import org.ohdsi.webapi.util.jpa.JpaSugar;
+import org.ohdsi.webapi.util.jpa.JpaSugar.Filter;
+import org.ohdsi.webapi.versioning.domain.ConceptSetVersion;
+import org.ohdsi.webapi.versioning.service.VersionService;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Controller;
+import org.springframework.transaction.annotation.Transactional;
+import org.springframework.web.bind.annotation.RequestBody;
+
+import javax.persistence.EntityManager;
+import javax.persistence.PersistenceContext;
+import javax.persistence.TypedQuery;
+import javax.persistence.criteria.Root;
+import javax.ws.rs.BadRequestException;
+import javax.ws.rs.ForbiddenException;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.MediaType;
+import java.time.Clock;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.function.BinaryOperator;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+import static org.ohdsi.webapi.tag.TagSecurityUtils.CONCEPT_SET;
+import static org.ohdsi.webapi.util.jpa.JpaSugar.Condition.has;
+import static org.ohdsi.webapi.util.jpa.JpaSugar.Filter.subquery;
+
+@Path(ReviewService.REVIEW)
+@Controller
+public class ReviewService {
+ public static final String REVIEW = "review";
+ private static final String REVIEW_DELEGATE = "review:delegate";
+
+ @PersistenceContext
+ private EntityManager em;
+ @Autowired
+ private Clock clock;
+ @Autowired
+ private UserEntityToUserDTOConverter userConverter;
+ @Autowired
+ private PermissionManager permissionManager;
+
+ @POST
+ @Path("/{type}/{id}/approve")
+ @Transactional
+ public void approve(@PathParam("type") String type, @PathParam("id") Integer id, Approval approve) {
+ UserEntity approver = Optional.ofNullable(approve.approverId).map(validateApprover(type)).orElse(null);
+ Integer version = VersionService.getLatest(em, ConceptSetVersion.class, id);
+ getApproval(type, id, version).ifPresent(approval -> {
+ throw new BadRequestException("Already approved by [" + approval.getUser().getName() + "]");
+ });
+ String comment = Optional.ofNullable(approve)
+ .map(Approval::getComment)
+ .orElse(null);
+
+ UserEntity currentUser = permissionManager.getCurrentUser();
+ UserEntity formalApprover = currentUser.equals(approver) ? currentUser : approver;
+
+ ReviewAction action = create(validateType(type), id, formalApprover, ReviewAction.Type.APPROVE, version, currentUser, comment, approve.getSupportingInfo(), null);
+ em.persist(action);
+ }
+
+ @POST
+ @Path("/{type}/{id}/revoke/{versionId}")
+ @Transactional
+ public void revoke(@PathParam("type") String type, @PathParam("id") Integer id, @PathParam("versionId") Integer version, RevokeRequest request) {
+
+ TypedQuery query = JpaSugar.query(em, ReviewAction.class, (cb, cq) -> {
+ Root root = cq.from(ReviewAction.class);
+ cq.select(root);
+ cq.where(
+ cb.equal(root.get(ReviewAction_.assetType), validateType(type)),
+ cb.equal(root.get(ReviewAction_.assetId), id),
+ cb.equal(root.get(ReviewAction_.version), version)
+ );
+ cq.orderBy(cb.desc(root.get(ReviewAction_.timestamp)));
+ return em.createQuery(cq);
+ });
+ query.setMaxResults(1);
+ ReviewAction actionToRevoke = query.getSingleResult();
+ if (actionToRevoke != null) {
+ ReviewAction revokeAction = create(validateType(type), id, permissionManager.getCurrentUser(), ReviewAction.Type.REVOKE, actionToRevoke.getVersion(), null, actionToRevoke.getComment(), actionToRevoke.getSupportingInfo(), request.getComment());
+ em.persist(revokeAction);
+ }
+ }
+
+ public static class RevokeRequest {
+ private String comment;
+
+ public String getComment() {
+ return comment;
+ }
+
+ public void setComment(String comment) {
+ this.comment = comment;
+ }
+ }
+
+ @GET
+ @Path("/{type}/{id}")
+ @Produces(MediaType.APPLICATION_JSON)
+ @Transactional
+ public Map listByObjectId(@PathParam("type") String type, @PathParam("id") Integer id) {
+ TypedQuery query = JpaSugar.query(em, ReviewAction.class, (cb, cq) -> {
+ Root root = cq.from(ReviewAction.class);
+ cq.select(root);
+ cq.where(
+ cb.equal(root.get(ReviewAction_.assetType), validateType(type)),
+ cb.equal(root.get(ReviewAction_.assetId), id)
+ );
+ cq.orderBy(cb.desc(root.get(ReviewAction_.timestamp)));
+ return em.createQuery(cq);
+ });
+ Map reviewActionsByVersion = query.getResultStream().collect(Collectors.toMap(ReviewAction::getVersion, this::toDto, toLatest()));
+ return reviewActionsByVersion.entrySet().stream()
+ .collect(Collectors.toMap(
+ Map.Entry::getKey,
+ Map.Entry::getValue
+ ));
+ }
+
+ @POST
+ @Path("/{type}/approved")
+ @Produces(MediaType.APPLICATION_JSON)
+ @Transactional
+ public Map> listByObjectIds(
+ @PathParam("type") String type,
+ @RequestBody List ids
+ ) {
+ if (ids == null || ids.isEmpty()) {
+ return new HashMap<>();
+ }
+ String validatedType = validateType(type);
+
+ // Get ALL review actions for the requested assets
+ List allActions = JpaSugar.select(em, ReviewAction.class)
+ .where(has(ReviewAction_.assetType, validatedType))
+ .getResultStream()
+ .filter(action -> ids.contains(action.getAssetId()))
+ .collect(Collectors.toList());
+
+ // Group by assetId, then by version, keeping only the latest action per version
+ return allActions.stream()
+ .collect(Collectors.groupingBy(
+ ReviewAction::getAssetId,
+ Collectors.groupingBy(
+ ReviewAction::getVersion,
+ Collectors.collectingAndThen(
+ Collectors.maxBy(Comparator.comparing(ReviewAction::getTimestamp)),
+ opt -> opt.map(this::toDto).orElse(null)
+ )
+ )
+ ))
+ .entrySet().stream()
+ .collect(Collectors.toMap(
+ Map.Entry::getKey,
+ entry -> entry.getValue().entrySet().stream()
+ .filter(versionEntry -> versionEntry.getValue() != null)
+ .collect(Collectors.toMap(
+ Map.Entry::getKey,
+ Map.Entry::getValue
+ ))
+ ))
+ .entrySet().stream()
+ .filter(entry -> !entry.getValue().isEmpty())
+ .filter(entry -> {
+ Integer assetId = entry.getKey();
+ Integer maxReviewVersion = entry.getValue().keySet().stream().max(Comparator.comparingInt(value -> value)).orElse(0);
+ return hasNoLaterVersions(assetId, maxReviewVersion);
+ })
+ .collect(Collectors.toMap(
+ Map.Entry::getKey,
+ Map.Entry::getValue
+ ));
+ }
+
+ private boolean hasNoLaterVersions(Integer assetId, Integer approvedVersion) {
+ Integer nextVersion = VersionService.getLatest(em, ConceptSetVersion.class, assetId);
+ return !(nextVersion > approvedVersion);
+ }
+
+ @GET
+ @Path("/{type}/approvers")
+ @Produces(MediaType.APPLICATION_JSON)
+ @Transactional
+ public List getApprovers(@PathParam("type") String type) {
+ return JpaSugar.select(em, UserEntity.class).where(
+ hasRoleWithPermission(REVIEW + ":" + type)
+ ).getResultStream().map(userConverter::convert).collect(Collectors.toList());
+ }
+
+ @NotNull
+ public Optional getApproval(String type, Integer id, Integer version) {
+ return JpaSugar.select(em, ReviewAction.class).where(
+ has(ReviewAction_.assetType, validateType(type)),
+ has(ReviewAction_.assetId, id),
+ has(ReviewAction_.version, version)
+ ).getResultStream().map(this::toDto).reduce(toLatest()).filter(action1 ->
+ Objects.equals(action1.getType(), ReviewAction.Type.APPROVE)
+ );
+ }
+
+ private ReviewAction create(String type, Integer assetId, UserEntity user, String create, Integer version, UserEntity representative, String comment, String supportingInfo, String revokeComment) {
+ ReviewAction action = new ReviewAction();
+ action.setUser(user);
+ action.setAssetType(type);
+ action.setAssetId(assetId);
+ action.setAction(create);
+ action.setTimestamp(clock.instant());
+ action.setVersion(version);
+ action.setRepresentative(representative);
+ action.setComment(comment);
+ action.setRevokeComment(revokeComment);
+ action.setSupportingInfo(supportingInfo);
+ return action;
+ }
+
+ private ReviewActionDTO toDto(ReviewAction entity) {
+ ReviewActionDTO dto = new ReviewActionDTO();
+ dto.setId(entity.getId());
+ dto.setTimestamp(entity.getTimestamp());
+ dto.setVersion(entity.getVersion());
+ dto.setUser(userConverter.convert(entity.getUser()));
+ dto.setType(entity.getAction());
+ dto.setComment(entity.getComment());
+ dto.setRevokeComment(entity.getRevokeComment());
+ dto.setSupportingInfo(entity.getSupportingInfo());
+ dto.setRepresentative(userConverter.convert(entity.getRepresentative()));
+ return dto;
+ }
+
+ private Function validateApprover(String type) {
+ return approverId -> {
+ if (SecurityUtils.getSubject().isPermitted(REVIEW_DELEGATE)) {
+ return JpaSugar.select(em, UserEntity.class).where(
+ hasRoleWithPermission(REVIEW + ":" + type), has(UserEntity_.id, approverId)
+ ).getResultStream().findFirst().orElseThrow(() ->
+ new BadRequestException("Not an id that refers to user with valid approval permission: " + approverId)
+ );
+ } else {
+ throw new ForbiddenException("User not authorized to delegate the approval");
+ }
+ };
+ }
+
+ private static BinaryOperator toLatest() {
+ return (a, b) -> a.getTimestamp().compareTo(b.getTimestamp()) > 0 ? a : b;
+ }
+
+ private static String validateType(String type) {
+ if (Objects.equals(type, CONCEPT_SET)) {
+ return type;
+ } else {
+ throw new BadRequestException("Reviews are not supported for [" + type + "]");
+ }
+ }
+
+ private static Filter hasRoleWithPermission(String permission) {
+ return subquery(UserEntity.class, UserRoleEntity.class, UserRoleEntity_.user).where(
+ subquery(RoleEntity.class, RolePermissionEntity.class, RolePermissionEntity_.role).where(
+ has(RolePermissionEntity_.permission, PermissionEntity_.value, permission)
+ ).on(UserRoleEntity_.role)
+ );
+ }
+
+ public static class Approval {
+ private String comment;
+ private Long approverId;
+ private String supportingInfo;
+
+ public String getComment() {
+ return comment;
+ }
+
+ public void setComment(String comment) {
+ this.comment = comment;
+ }
+
+ public Long getApproverId() {
+ return approverId;
+ }
+
+ public void setApproverId(Long approverId) {
+ this.approverId = approverId;
+ }
+
+ public String getSupportingInfo() {
+ return supportingInfo;
+ }
+
+ public void setSupportingInfo(String supportingInfo) {
+ this.supportingInfo = supportingInfo;
+ }
+ }
+}
diff --git a/src/main/java/org/ohdsi/webapi/service/ConceptSetService.java b/src/main/java/org/ohdsi/webapi/service/ConceptSetService.java
index 84059bc4a6..88a16ebdd5 100644
--- a/src/main/java/org/ohdsi/webapi/service/ConceptSetService.java
+++ b/src/main/java/org/ohdsi/webapi/service/ConceptSetService.java
@@ -17,6 +17,7 @@
import java.io.ByteArrayOutputStream;
import java.util.*;
+import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
@@ -164,7 +165,9 @@ public void customize(CacheManager cacheManager) {
public ConceptSetDTO getConceptSet(@PathParam("id") final int id) {
ConceptSet conceptSet = getConceptSetRepository().findById(id);
ExceptionUtils.throwNotFoundExceptionIfNull(conceptSet, String.format("There is no concept set with id = %d.", id));
- return conversionService.convert(conceptSet, ConceptSetDTO.class);
+ ConceptSetDTO dto = conversionService.convert(conceptSet, ConceptSetDTO.class);
+ dto.setVersion(versionService.getLatest(ConceptSetVersion.class, id));
+ return dto;
}
/**
diff --git a/src/main/java/org/ohdsi/webapi/service/dto/ConceptSetDTO.java b/src/main/java/org/ohdsi/webapi/service/dto/ConceptSetDTO.java
index 1323d338ed..d2e3ef8661 100644
--- a/src/main/java/org/ohdsi/webapi/service/dto/ConceptSetDTO.java
+++ b/src/main/java/org/ohdsi/webapi/service/dto/ConceptSetDTO.java
@@ -5,6 +5,7 @@ public class ConceptSetDTO extends CommonEntityExtDTO {
private Integer id;
private String name;
private String description;
+ private Integer version;
public Integer getId() {
return id;
@@ -30,4 +31,12 @@ public void setDescription(String description) {
this.description = description;
}
+ public Integer getVersion() {
+ return version;
+ }
+
+ public void setVersion(Integer version) {
+ this.version = version;
+ }
+
}
diff --git a/src/main/java/org/ohdsi/webapi/util/jpa/JpaSugar.java b/src/main/java/org/ohdsi/webapi/util/jpa/JpaSugar.java
new file mode 100644
index 0000000000..ae447bbdd3
--- /dev/null
+++ b/src/main/java/org/ohdsi/webapi/util/jpa/JpaSugar.java
@@ -0,0 +1,199 @@
+package org.ohdsi.webapi.util.jpa;
+
+
+import javax.persistence.EntityManager;
+import javax.persistence.TypedQuery;
+import javax.persistence.criteria.AbstractQuery;
+import javax.persistence.criteria.CriteriaBuilder;
+import javax.persistence.criteria.CriteriaQuery;
+import javax.persistence.criteria.CriteriaUpdate;
+import javax.persistence.criteria.Path;
+import javax.persistence.criteria.Predicate;
+import javax.persistence.criteria.Root;
+import javax.persistence.criteria.Subquery;
+import javax.persistence.metamodel.SingularAttribute;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.function.BiFunction;
+import java.util.function.Function;
+import java.util.stream.Stream;
+
+/**
+ * Syntactic sugar to get more expressive semantics on the JPA operations.
+ */
+public interface JpaSugar {
+
+ /**
+ * Creates a simple select query, with no ordering but straightforward, SQL-like semantics
+ *
+ * @param em entity manager to use
+ * @param clazz Root class to use in FROM query section
+ * @param root entity type
+ */
+ static Where> select(EntityManager em, Class clazz) {
+ return conditions -> {
+ CriteriaQuery criteriaQuery = query(em, clazz, (cb, cq) -> {
+ Root root = cq.from(clazz);
+ CriteriaQuery query = cq.select(root);
+ return query.where(Filter.and(conditions).apply(cb, query).apply(root));
+ });
+ return em.createQuery(criteriaQuery);
+ };
+ }
+
+
+ /**
+ * The most basic syntactic sugar function that saves the caller the need to write
+ * EntityManager.getCriteriaBuilder() and CriteriaQuery.createQuery() calls
+ * @param em entity manager to use
+ * @param clazz query return class
+ * @param query query building function. Takes criteria builder, criteria query, root path and produces complete query
+ * @param query return type
+ * @param method return type. Since this function does not perform a call to em.createQuery() itself,
+ * this allows for flexible return type, so that the caller can do it both inside the query function
+ * or as part of processing return value from this method
+ */
+ static V query(EntityManager em, Class clazz, BiFunction, V> query) {
+ CriteriaBuilder cb = em.getCriteriaBuilder();
+ return query.apply(cb, cb.createQuery(clazz));
+ }
+
+ static int update(
+ EntityManager em, Class clazz,
+ BiFunction, Function, CriteriaUpdate>> query
+ ) {
+ CriteriaBuilder cb = em.getCriteriaBuilder();
+ CriteriaUpdate q = cb.createCriteriaUpdate(clazz);
+ return em.createQuery(query.apply(cb, q).apply(q.from(clazz))).executeUpdate();
+ }
+
+ @FunctionalInterface
+ interface Where {
+ R where(Filter... biFunctions);
+ }
+
+ @FunctionalInterface
+ interface Filter {
+ Function, Predicate> apply(CriteriaBuilder criteriaBuilder, AbstractQuery> query);
+
+ @SafeVarargs
+ static Filter or(Filter... fns) {
+ return or(Arrays.asList(fns));
+ }
+
+ static Filter or(List> list) {
+ return (cb, query) -> root ->
+ list.stream().map(item ->
+ item.apply(cb, query).apply(root)
+ ).reduce(cb::or).orElseGet(cb::disjunction);
+ }
+
+ @SafeVarargs
+ static Filter and(Filter... fns) {
+ return and(Arrays.asList(fns));
+ }
+
+ static Filter and(List> list) {
+ return (cb, query) -> root ->
+ list.stream().map(item ->
+ item.apply(cb, query).apply(root)
+ ).reduce(cb::and).orElseGet(cb::conjunction);
+ }
+
+ static Where> subquery(Class entityClass, Class linkClass, SingularAttribute super L, T> attribute) {
+ return filter -> (cb, query) -> path -> {
+ Subquery sq = query.subquery(entityClass);
+ Root root = sq.from(linkClass);
+ return path.in(sq.select(root.get(attribute)).where(and(filter).apply(cb, sq).apply(root)));
+ };
+ }
+
+ default Filter on(SingularAttribute super V, E> attribute) {
+ return (cb, query) -> path -> apply(cb, query).apply(path.get(attribute));
+ }
+
+ default Filter on(Class entityClass, Class linkClass, SingularAttribute super E, T> attribute) {
+ return (cb, query) -> path -> {
+ Subquery sq = query.subquery(entityClass);
+ Root root = sq.from(linkClass);
+ return path.in(sq.select(root.get(attribute)).where(apply(cb, sq).apply(root)));
+ };
+ }
+
+ }
+
+ /**
+ * A basic predicate-holding function.
+ * Normally encapsulates predicate functions and one or more values required to apply it.
+ *
+ * @param type of value on which predicates operate
+ */
+ @FunctionalInterface
+ interface Condition extends BiFunction, Predicate>, Filter {
+ @Override
+ default Function, Predicate> apply(CriteriaBuilder cb, AbstractQuery> query) {
+ return root -> apply(cb, root);
+ };
+
+ /**
+ * A go-to where function for trivial queries selecting by a single attribute.
+ * Not incredibly well-thought in terms of composition potential, but we'll need more active use cases
+ * to sort that out.
+ *
+ * @param attribute attribute metamodel reference
+ * @param value value to match against using equals
+ * @param Entity type
+ * @param Attribute value type
+ */
+ static Condition has(SingularAttribute super E, V> attribute, V value) {
+ return (cb, path) -> cb.equal(path.get(attribute), value);
+ }
+
+ static Condition has(SingularAttribute super E, U> attribute1, SingularAttribute super U, V> attribute2, V value) {
+ return (cb, path) -> cb.equal(path.get(attribute1).get(attribute2), value);
+ }
+
+ static Condition in(Collection values) {
+ return values.isEmpty() ? (cb, path) -> cb.disjunction() : (cb, path) -> path.in(values);
+ }
+
+ /**
+ * Merges multiple conditions operating on the same entity into a single one, using AND and merging operation.
+ *
+ * @param conditions conditions to merge
+ * @param entity or attribute type of all conditions
+ */
+ @SafeVarargs
+ static Condition and(Condition... conditions) {
+ return (cb, path) -> Stream.of(conditions).map(
+ condition -> condition.apply(cb, path)
+ ).reduce(cb::and).orElseGet(cb::conjunction);
+ }
+
+ /**
+ * Merges multiple conditions operating on the same entity into a single one, using OR operation.
+ *
+ * @param conditions conditions to merge
+ * @param entity or attribute type of all conditions
+ */
+ @SafeVarargs
+ static Condition or(Condition... conditions) {
+ return (cb, path) -> Stream.of(conditions).map(
+ condition -> condition.apply(cb, path)
+ ).reduce(cb::or).orElseGet(cb::disjunction);
+ }
+
+
+ /**
+ * A functional composition method that applies current condition after extracting the provided attribute.
+ * Purely for convenience purposes for cases when we have no need to define a variable to hold the transition path separately
+ *
+ * @param attribute attribute.
+ * @param entity type to read attribute from
+ */
+ default Condition on(SingularAttribute super T, E> attribute) {
+ return (cb, path) -> apply(cb, path.get(attribute));
+ }
+ }
+}
diff --git a/src/main/java/org/ohdsi/webapi/versioning/service/VersionService.java b/src/main/java/org/ohdsi/webapi/versioning/service/VersionService.java
index 66966fd4d5..8cda98bcf5 100644
--- a/src/main/java/org/ohdsi/webapi/versioning/service/VersionService.java
+++ b/src/main/java/org/ohdsi/webapi/versioning/service/VersionService.java
@@ -2,10 +2,13 @@
import org.ohdsi.webapi.exception.AtlasException;
import org.ohdsi.webapi.service.AbstractDaoService;
+import org.ohdsi.webapi.util.jpa.JpaSugar;
import org.ohdsi.webapi.versioning.domain.Version;
import org.ohdsi.webapi.versioning.domain.VersionBase;
import org.ohdsi.webapi.versioning.domain.VersionPK;
+import org.ohdsi.webapi.versioning.domain.VersionPK_;
import org.ohdsi.webapi.versioning.domain.VersionType;
+import org.ohdsi.webapi.versioning.domain.Version_;
import org.ohdsi.webapi.versioning.dto.VersionUpdateDTO;
import org.ohdsi.webapi.versioning.repository.CharacterizationVersionRepository;
import org.ohdsi.webapi.versioning.repository.CohortVersionRepository;
@@ -24,11 +27,15 @@
import javax.persistence.EntityManager;
import javax.persistence.PersistenceException;
+import javax.persistence.criteria.CriteriaQuery;
+import javax.persistence.criteria.Path;
+import javax.persistence.criteria.Predicate;
import javax.ws.rs.NotFoundException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
+import java.util.Optional;
@Service
@Transactional
@@ -63,6 +70,19 @@ public VersionService(
this.repositoryMap.put(VersionType.REUSABLE, (VersionRepository) reusableRepository);
}
+ public Integer getLatest(Class clazz, long id) {
+ return getLatest(entityManager, clazz, id);
+ }
+
+ public static Integer getLatest(EntityManager em, Class clazz, long id) {
+ CriteriaQuery criteriaQuery = JpaSugar.query(em, Integer.class, (cb, cq) -> {
+ Path pk = cq.from(clazz).get(Version_.pk);
+ Predicate hasId = cb.equal(pk.get(VersionPK_.assetId), id);
+ return cq.select(cb.max(pk.get(VersionPK_.version))).where(hasId);
+ });
+ return Optional.ofNullable(em.createQuery(criteriaQuery).getSingleResult()).orElse(0) + 1;
+ }
+
private VersionRepository getRepository(VersionType type) {
return repositoryMap.get(type);
}
diff --git a/src/main/resources/db/migration/postgresql/V2.16.0.20250909000000__reviews.sql b/src/main/resources/db/migration/postgresql/V2.16.0.20250909000000__reviews.sql
new file mode 100644
index 0000000000..7881a4a78c
--- /dev/null
+++ b/src/main/resources/db/migration/postgresql/V2.16.0.20250909000000__reviews.sql
@@ -0,0 +1,17 @@
+CREATE SEQUENCE ${ohdsiSchema}.review_action_seq START WITH 1;
+CREATE TABLE ${ohdsiSchema}.review_action (
+ id BIGINT PRIMARY KEY DEFAULT nextval('${ohdsiSchema}.review_action_seq'),
+ timestamp TIMESTAMP NOT NULL,
+ asset_type VARCHAR(50) NOT NULL,
+ asset_id BIGINT NOT NULL,
+ version INT4,
+ user_id INTEGER,
+ action VARCHAR NOT NULL,
+ comment VARCHAR(2000),
+ revoke_comment VARCHAR(2000),
+ supporting_info VARCHAR(4000),
+ representative_id INTEGER,
+
+ CONSTRAINT fk_review_action__user FOREIGN KEY (user_id) REFERENCES ${ohdsiSchema}.sec_user (id),
+ CONSTRAINT fk_review_action__representative FOREIGN KEY (representative_id) REFERENCES ${ohdsiSchema}.sec_user (id)
+);
diff --git a/src/main/resources/db/migration/postgresql/V2.16.0.20250909000002__review_permissions.sql b/src/main/resources/db/migration/postgresql/V2.16.0.20250909000002__review_permissions.sql
new file mode 100644
index 0000000000..01826eb55f
--- /dev/null
+++ b/src/main/resources/db/migration/postgresql/V2.16.0.20250909000002__review_permissions.sql
@@ -0,0 +1,17 @@
+INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description) VALUES
+(nextval('${ohdsiSchema}.sec_permission_id_seq'), 'review:delegate', 'Delegate approval'),
+(nextval('${ohdsiSchema}.sec_permission_id_seq'), 'review:conceptset', 'Approve (conceptset)');
+
+INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id)
+SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id
+FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr
+WHERE sp.value IN ('review:delegate', 'review:conceptset') AND sr.name IN ('Moderator');
+
+INSERT INTO ${ohdsiSchema}.sec_permission (id, value, description) VALUES
+(nextval('${ohdsiSchema}.sec_permission_id_seq'), 'review:conceptset:approved:post', 'View concept sets approval info'),
+(nextval('${ohdsiSchema}.sec_permission_id_seq'), 'review:conceptset:*:get', 'Get concept set approval info');
+
+INSERT INTO ${ohdsiSchema}.sec_role_permission(id, role_id, permission_id)
+SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sr.id, sp.id
+FROM ${ohdsiSchema}.sec_permission SP, ${ohdsiSchema}.sec_role sr
+WHERE sp.value IN ('review:conceptset:approved:post', 'review:conceptset:*:get') AND sr.name IN ('Atlas users');