Commit 0a0c5b8d authored by Matija Obreza's avatar Matija Obreza

Merge branch 'smarter-JSON-serialization-for-ui' into 'master'

Smarter json serialization for ui

See merge request genesys-pgr/genesys-server!421
parents f24473b6 bee90848
......@@ -46,7 +46,7 @@ import java.util.Set;
@Table(name = "partner")
@Cacheable
@Document(indexName = "partner")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "uuid")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "uuid", scope = Partner.class)
public class Partner extends UuidModel implements SelfCleaning, AclAwareModel {
private static final ClassAclOid<Partner> PARENT_OID = ClassAclOid.forClass(Partner.class);
......
......@@ -81,7 +81,7 @@ import com.fasterxml.jackson.annotation.ObjectIdGenerators;
@Cacheable
@Audited
@Document(indexName = "dataset")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "uuid")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "uuid", scope = Dataset.class)
public class Dataset extends UuidModel implements Publishable, SelfCleaning, AclAwareModel {
/** The Constant serialVersionUID. */
......
......@@ -47,7 +47,7 @@ import java.util.List;
@Table(name = "descriptor")
@Audited
@Document(indexName = "descriptor")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "uuid")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "uuid", scope = Descriptor.class)
public class Descriptor extends UuidModel implements SelfCleaning, Publishable, Copyable<Descriptor>, AclAwareModel {
/** The Constant serialVersionUID. */
......
......@@ -48,7 +48,7 @@ import java.util.Map;
@Audited
@Cacheable
@Document(indexName = "descriptorlist")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "uuid")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "uuid", scope = DescriptorList.class)
public class DescriptorList extends UuidModel implements Publishable, SelfCleaning, AclAwareModel {
/** The Constant serialVersionUID. */
......
......@@ -43,7 +43,7 @@ import java.util.List;
*/
@Entity
@Table(name = "vocabulary")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "uuid")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "uuid", scope = ControlledVocabulary.class)
@Document(indexName = "controlledvocabulary")
public class ControlledVocabulary extends UuidModel implements Publishable, SelfCleaning, AclAwareModel {
......
......@@ -36,6 +36,8 @@ import org.genesys.blocks.auditlog.annotations.Audited;
import org.genesys.blocks.model.BasicModel;
import org.genesys.blocks.model.SelfCleaning;
import com.fasterxml.jackson.annotation.JsonBackReference;
/**
* Accession "alias"
*/
......@@ -85,6 +87,7 @@ public class AccessionAlias extends BasicModel implements AccessionRelated, Self
@ManyToOne(optional = false, fetch = FetchType.LAZY, cascade = {})
@JoinColumn(name = "accessionId", nullable = false, updatable = false)
@JsonBackReference
private AccessionId accession;
@Size(max = 150)
......
......@@ -42,6 +42,8 @@ import org.genesys.blocks.model.BasicModel;
import org.genesys.blocks.model.SelfCleaning;
import org.hibernate.annotations.Type;
import com.fasterxml.jackson.annotation.JsonBackReference;
/**
* Collecting data
*/
......@@ -56,6 +58,7 @@ public class AccessionCollect extends BasicModel implements AccessionRelated, Se
private long version = 0;
@OneToOne(mappedBy = "coll", optional = false, fetch = FetchType.LAZY, cascade = {})
@JsonBackReference
private AccessionId accession;
@Size(max = 8)
......
......@@ -73,6 +73,7 @@ public abstract class AccessionData extends AuditedVersionedModel implements IdU
@OneToOne(cascade = { CascadeType.PERSIST, CascadeType.MERGE }, fetch = FetchType.EAGER, optional = false, orphanRemoval = false)
@JoinColumn(name = "id")
@JsonUnwrapped
@JsonIgnoreProperties({ "id", "version", "active", "createdBy", "createdDate", "lastModifiedBy", "lastModifiedDate", "_class" })
@Field(type=FieldType.Auto)
@QueryInit({ "geo.*", "coll.*", "pdci.*", "lists.*" })
private AccessionId accessionId;
......
......@@ -29,7 +29,6 @@ import javax.persistence.PreUpdate;
import javax.persistence.Table;
import javax.persistence.Version;
import org.apache.commons.lang3.StringUtils;
import org.genesys.blocks.auditlog.annotations.Audited;
import org.genesys.blocks.model.BasicModel;
import org.genesys.blocks.model.JsonViews;
......@@ -40,6 +39,7 @@ import org.genesys2.server.model.impl.TileClimate;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldType;
import com.fasterxml.jackson.annotation.JsonBackReference;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
......@@ -56,6 +56,7 @@ public class AccessionGeo extends BasicModel implements GeoReferencedEntity, Acc
private long version = 0;
@OneToOne(mappedBy = "geo", optional = false, fetch = FetchType.LAZY, cascade = {})
@JsonBackReference
private AccessionId accession;
@Column(name = "longitude")
......
......@@ -54,12 +54,10 @@ import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldIndex;
import org.springframework.data.elasticsearch.annotations.FieldType;
import com.fasterxml.jackson.annotation.JsonIdentityInfo;
import com.fasterxml.jackson.annotation.JsonIdentityReference;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonView;
import com.fasterxml.jackson.annotation.ObjectIdGenerators;
import cz.jirutka.validator.collection.constraints.EachPattern;
import cz.jirutka.validator.collection.constraints.EachSize;
......@@ -91,7 +89,6 @@ public class AccessionId extends AuditedVersionedModel implements IdUUID {
@ManyToMany(cascade = {}, fetch = FetchType.LAZY)
@JoinTable(name = "accession_listitem", joinColumns = @JoinColumn(name = "acceid"), inverseJoinColumns = @JoinColumn(name = "listid"))
@JsonView({ JsonViews.Indexed.class })
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "uuid")
@JsonIdentityReference(alwaysAsId = true)
@Field(index = FieldIndex.not_analyzed, type = FieldType.String)
private Set<AccessionList> lists;
......
......@@ -35,8 +35,12 @@ import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldIndex;
import org.springframework.data.elasticsearch.annotations.FieldType;
import com.fasterxml.jackson.annotation.JsonIdentityInfo;
import com.fasterxml.jackson.annotation.ObjectIdGenerators;
@Entity
@Table(name = "accession_list")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "uuid", scope = AccessionList.class)
public class AccessionList extends AuditedVersionedModel implements AclAwareModel {
private static final long serialVersionUID = 991886970995006680L;
......
......@@ -36,6 +36,8 @@ import org.genesys.blocks.model.BasicModel;
import org.genesys.blocks.model.SelfCleaning;
import org.hibernate.annotations.Type;
import com.fasterxml.jackson.annotation.JsonBackReference;
/**
* Accession "alias"
*/
......@@ -55,6 +57,7 @@ public class AccessionRemark extends BasicModel implements AccessionRelated, Sel
@ManyToOne(optional = false, fetch = FetchType.LAZY, cascade = {})
@JoinColumn(name = "accessionId", nullable = false, updatable = false)
@JsonBackReference
private AccessionId accession;
@Lob
......
......@@ -50,7 +50,7 @@ import com.fasterxml.jackson.annotation.ObjectIdGenerators;
// Add index on all fields
@Table(name = "taxonomy2", uniqueConstraints = { @UniqueConstraint(name="UK_taxonomy2", columnNames = { "genus", "species", "spAuthor", "subtaxa", "subtAuthor" }) })
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "id")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "id", scope = Taxonomy2.class)
public class Taxonomy2 extends GlobalVersionedAuditedModel {
private static final long serialVersionUID = 8881324404490162933L;
......
......@@ -54,7 +54,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
@Entity
@Table(name = "country")
@Document(indexName = "country")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "id")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "id", scope = Country.class)
public class Country extends BasicModel {
private static final long serialVersionUID = -1688723909298769804L;
......
......@@ -65,7 +65,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
@Entity
@Table(name = "crop")
@Document(indexName = "crop")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "id")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "id", scope = Crop.class)
public class Crop extends GlobalVersionedAuditedModel implements SelfCleaning, AclAwareModel {
private static final long serialVersionUID = -2686341831839109257L;
......
......@@ -66,7 +66,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
@Entity
@Table(name = "faoinstitute", uniqueConstraints = @UniqueConstraint(columnNames = { "code" }), indexes = { @Index(columnList = "code", name = "code_FAOINSTITUTE") })
@Document(indexName = "faoinstitute")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "id")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "id", scope = FaoInstitute.class)
public class FaoInstitute extends BasicModel implements GeoReferencedEntity, AclAwareModel, EntityId {
private static final ClassAclOid<FaoInstitute> DEFAULT_PARENT_OID = ClassAclOid.forClass(FaoInstitute.class);
......
......@@ -26,15 +26,19 @@ import org.hibernate.annotations.Type;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldIndex;
import com.fasterxml.jackson.annotation.JsonIdentityInfo;
import com.fasterxml.jackson.annotation.JsonIdentityReference;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonView;
import com.fasterxml.jackson.annotation.ObjectIdGenerators;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
@Entity
@Table(name = "georegion")
@Cacheable
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "id", scope = GeoRegion.class)
public class GeoRegion extends VersionedModel {
private static final long serialVersionUID = -1L;
......@@ -53,7 +57,7 @@ public class GeoRegion extends VersionedModel {
@ManyToOne()
@JoinColumn(name = "parentId")
@JsonView({ JsonViews.Public.class })
@JsonIdentityReference(alwaysAsId = true)
@JsonIgnoreProperties({ "parentRegion" })
@IgnoreField
private GeoRegion parentRegion;
......
......@@ -69,7 +69,7 @@ import com.fasterxml.jackson.annotation.ObjectIdGenerators;
@Table(name = "subset", uniqueConstraints = @UniqueConstraint(name = "UQ_current_subs_version", columnNames={"versionsId", "current"}))
@Audited
@Document(indexName = "subset")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "uuid")
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "uuid", scope = Subset.class)
public class Subset extends UuidModel implements AclAwareModel, SelfCleaning {
/** The Constant serialVersionUID. */
......
/*
* Copyright 2018 Global Crop Diversity Trust
* Copyright 2019 Global Crop Diversity Trust
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
......@@ -44,6 +44,15 @@ public interface ElasticsearchService {
*/
<R extends BasicModel> void indexEntity(Class<R> clazz);
/**
* Index entity.
*
* @param <R> the generic type
* @param clazz the model class
* @param reindexBatchSize custom batch size
*/
<R extends BasicModel> void indexEntity(Class<R> clazz, int reindexBatchSize);
/**
* Reindex.
*
......@@ -241,4 +250,16 @@ public interface ElasticsearchService {
* @throws InterruptedException
*/
void waitForCount(Class<? extends BasicModel> clazz, BasicModelFilter<?, ?> filter, int mustHaveCount) throws InterruptedException;
/**
* Sets the batch size for reindexing. Some entities generate super large JSON and we want to
* reindex those in much smaller batches.
*
* @param <R> the BasicModel type
* @param model the document model
* @param batchSize the batch size
* @return the integer
*/
<R extends BasicModel> Integer setReindexBatchSize(Class<R> model, Integer batchSize);
}
/*
* Copyright 2018 Global Crop Diversity Trust
* Copyright 2019 Global Crop Diversity Trust
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
......@@ -37,7 +37,6 @@ import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
import com.querydsl.core.BooleanBuilder;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.ListUtils;
import org.apache.commons.collections4.MapUtils;
......@@ -95,6 +94,7 @@ import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.core.task.TaskExecutor;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
......@@ -119,6 +119,7 @@ import com.fasterxml.jackson.module.jsonSchema.JsonSchema;
import com.fasterxml.jackson.module.jsonSchema.JsonSchemaGenerator;
import com.fasterxml.jackson.module.jsonSchema.types.ObjectSchema;
import com.google.common.collect.Sets;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.EntityPath;
import com.querydsl.core.types.Predicate;
import com.querydsl.core.types.dsl.PathBuilder;
......@@ -149,7 +150,7 @@ public class ElasticsearchServiceImpl implements ElasticsearchService, Initializ
protected static final SourceFilter DEFAULT_SOURCE_FILTER = new FetchSourceFilter(new String[] { "id", "_class", "title", "code", "description" }, new String[] {});
@Autowired
private TaskScheduler taskScheduler;
private TaskExecutor taskExecutor;
@Autowired
private EntityManager em;
......@@ -166,6 +167,7 @@ public class ElasticsearchServiceImpl implements ElasticsearchService, Initializ
private final Set<Class<? extends BasicModel>> indexedEntities = Collections.synchronizedSet(new HashSet<>());
private final Map<String, Class<BasicModel>> namesToClasses = Collections.synchronizedMap(new HashMap<>());
private final Map<Class<? extends BasicModel>, Set<String>> jsonSchemas = new HashMap<>();
private final Map<Class<? extends BasicModel>, Integer> reindexBatchSize = new HashMap<>();
/// Size of database batch scan for IDs
private int batchSize = 1000;
......@@ -288,6 +290,25 @@ public class ElasticsearchServiceImpl implements ElasticsearchService, Initializ
indexedEntities.add(clazz);
namesToClasses.put(clazz.getSimpleName(), (Class<BasicModel>) clazz);
}
@Override
public <R extends BasicModel> void indexEntity(Class<R> clazz, int reindexBatchSize) {
indexEntity(clazz);
this.reindexBatchSize.put(clazz, reindexBatchSize);
}
/**
* Sets the reindex batch size.
*
* @param <R> the generic type
* @param model the model
* @param batchSize the batch size
* @return the integer
*/
@Override
public <R extends BasicModel> Integer setReindexBatchSize(Class<R> model, Integer batchSize) {
return this.reindexBatchSize.put(model, batchSize);
}
@Override
public List<Class<? extends BasicModel>> getIndexedEntities() {
......@@ -378,18 +399,21 @@ public class ElasticsearchServiceImpl implements ElasticsearchService, Initializ
query.where(filter.buildPredicate());
}
Integer scanSize = reindexBatchSize.get(clazz);
final int customBatchSize = scanSize == null ? batchSize : scanSize.intValue();
int startPosition = 0;
query.offset(startPosition);
query.limit(batchSize);
query.limit(customBatchSize);
StopWatch stopWatch = new StopWatch();
stopWatch.start();
List<Long> results;
do {
stopWatch.split();
LOG.debug("Reading IDs stopwatch={}s {} {}+{}", stopWatch.getSplitTime() / 1000, clazz.getName(), startPosition, batchSize);
if (startPosition > 10 * batchSize && startPosition / (10 * batchSize) == 0) {
LOG.info("Reading IDs stopwatch={}s {} {}+{}", stopWatch.getSplitTime() / 1000, clazz.getName(), startPosition, batchSize);
LOG.debug("Reading IDs stopwatch={}s {} {}+{}", stopWatch.getSplitTime() / 1000, clazz.getName(), startPosition, customBatchSize);
if (startPosition > 10 * customBatchSize && startPosition / (10 * customBatchSize) == 0) {
LOG.info("Reading IDs stopwatch={}s {} {}+{}", stopWatch.getSplitTime() / 1000, clazz.getName(), startPosition, customBatchSize);
}
results = query.fetch();
asyncUpdate(clazz, results);
......@@ -422,14 +446,14 @@ public class ElasticsearchServiceImpl implements ElasticsearchService, Initializ
ensureWriteAlias(clazz);
taskScheduler.schedule(() -> {
taskExecutor.execute(() -> {
LOG.debug("Running scheduled reindex of {} size={}", clazz.getName(), copy.size());
try {
_self.update(clazz, copy);
} catch (Throwable e) {
LOG.error(e.getMessage(), e);
}
}, new Date(System.currentTimeMillis() + 5000));
});
}
/**
......
......@@ -145,19 +145,19 @@ public class ElasticsearchConfig {
es.indexEntity(Partner.class);
es.indexEntity(Crop.class);
es.indexEntity(ControlledVocabulary.class);
es.indexEntity(ControlledVocabulary.class, 20); // big JSON, small batch
es.indexEntity(VocabularyTerm.class);
es.indexEntity(Descriptor.class);
es.indexEntity(DescriptorList.class);
es.indexEntity(Dataset.class);
es.indexEntity(Subset.class);
es.indexEntity(DescriptorList.class, 100); // big JSON, small batch
es.indexEntity(Dataset.class, 1); // big JSON, small batch
es.indexEntity(Subset.class, 10); // smaller batch
es.indexEntity(Accession.class);
es.indexEntity(FaoInstitute.class);
es.indexEntity(Country.class);
es.indexEntity(Article.class);
es.indexEntity(ActivityPost.class);
return es;
}
......
......@@ -47,6 +47,7 @@ public class SchedulerConfig implements SchedulingConfigurer, AsyncConfigurer {
pool.setCorePoolSize(4);
pool.setMaxPoolSize(16);
pool.setQueueCapacity(100);
pool.setThreadPriority(Thread.NORM_PRIORITY - 2);
pool.setThreadNamePrefix("genesys-background-");
pool.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
return pool;
......
......@@ -50,7 +50,6 @@ import org.genesys2.server.model.genesys.Accession;
import org.genesys2.server.model.genesys.AccessionRef;
import org.junit.Test;
import org.springframework.dao.ConcurrencyFailureException;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.dao.InvalidDataAccessApiUsageException;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
......@@ -168,8 +167,7 @@ public class DatasetServiceTest extends AbstractDatasetServiceTest {
assertThat(input, not(nullValue()));
assertThat(input.getAccessionCount(), is(1));
datasetService.rematchDatasetAccessions(input);
Thread.sleep(2000);
Thread.sleep(500);
final AccessionRef datasetAccessionRef = datasetService.listAccessions(input, new PageRequest(0, 10)).getContent().get(0);
assertThat(datasetAccessionRef, notNullValue());
......@@ -194,8 +192,7 @@ public class DatasetServiceTest extends AbstractDatasetServiceTest {
assertThat(input.getAccessionCount(), is(3));
assertThat(input.getAccessionCount(), is(datasetAccessionRefs.size()));
datasetService.rematchDatasetAccessions(input);
Thread.sleep(5500);
Thread.sleep(500);
datasetService.listAccessions(input, new PageRequest(0, 10)).forEach(aid -> {
assertThat(aid.getAccession(), notNullValue());
......@@ -211,6 +208,7 @@ public class DatasetServiceTest extends AbstractDatasetServiceTest {
input = datasetService.setAccessionRefs(input, accessionIdentifiers2);
datasetService.rematchDatasetAccessions(input);
assertThat(input, not(nullValue()));
Thread.sleep(500);
// test if dataset has a new set of accessions
assertThat(datasetService.listAccessions(input, new PageRequest(0, 10)).getTotalElements(), is(2l));
......@@ -222,7 +220,7 @@ public class DatasetServiceTest extends AbstractDatasetServiceTest {
}
@Test
public void testListDatasetsByAccession() {
public void testListDatasetsByAccession() throws InterruptedException {
final Accession accession = upsertAccession(TEST_INSTCODE, TEST_ACCNUM, TEST_GEN);
Dataset input = buildDataset(DATASET_TITLE_1, DATASET_DESCRIPTION_1, partner, PublishState.DRAFT);
......@@ -234,6 +232,7 @@ public class DatasetServiceTest extends AbstractDatasetServiceTest {
input = datasetService.setAccessionRefs(input, datasetAccessionRefs);
assertThat(input.getAccessionCount(), is(1));
datasetService.rematchDatasetAccessions(input);
Thread.sleep(500);
input = datasetService.approveDataset(datasetService.reviewDataset(input));
// make sure that accession from Genesys also be added
......@@ -848,8 +847,7 @@ public class DatasetServiceTest extends AbstractDatasetServiceTest {
final Accession accession1 = upsertAccession(TEST_INSTCODE, "A2", "Musa");
datasetService.rematchDatasetAccessions(dataset1);
datasetService.rematchDatasetAccessions(dataset2);
Thread.sleep(5500);
Thread.sleep(500);
List<Dataset> datasets1 = datasetService.listByAccession(accession1);
assertThat(datasets1, not(nullValue()));
......@@ -859,8 +857,7 @@ public class DatasetServiceTest extends AbstractDatasetServiceTest {
final Accession accession2 = upsertAccession(TEST_INSTCODE, "A8", "Musa");
datasetService.rematchDatasetAccessions(dataset1);
Thread.sleep(5500);
Thread.sleep(500);
datasets1 = datasetService.listByAccession(accession2);
assertThat(datasets1.size(), is(1));
......@@ -869,7 +866,7 @@ public class DatasetServiceTest extends AbstractDatasetServiceTest {
}
@Test
public void testAccessionDeleted() {
public void testAccessionDeleted() throws InterruptedException {
upsertAccession(TEST_INSTCODE, "A2", "Musa");
upsertAccession(TEST_INSTCODE, "A8", "Musa");
......@@ -881,6 +878,7 @@ public class DatasetServiceTest extends AbstractDatasetServiceTest {
dataset1 = datasetService.setAccessionRefs(dataset1, ids1);
assertThat(dataset1.getAccessionCount(), is(3));
datasetService.rematchDatasetAccessions(dataset1);
Thread.sleep(500);
final Set<AccessionRef> ids2 = new HashSet<>();
ids2.add(makeAccessionIdentifier(TEST_INSTCODE, "A1", "Musa", null));
......@@ -890,6 +888,7 @@ public class DatasetServiceTest extends AbstractDatasetServiceTest {
dataset2 = datasetService.setAccessionRefs(dataset2, ids2);
assertThat(dataset2.getAccessionCount(), is(3));
datasetService.rematchDatasetAccessions(dataset2);
Thread.sleep(500);
dataset1 = datasetService.loadDataset(dataset1.getUuid());
dataset2 = datasetService.loadDataset(dataset2.getUuid());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment