Commit f8668409 authored by Maxym Borodenko's avatar Maxym Borodenko

Merge branch '390-multiple-matching-descriptors' into 'master'

Resolve "Multiple matching descriptors"

Closes #390

See merge request genesys-pgr/genesys-server!307
parents 94a1b99c 1b9960dc
......@@ -49,7 +49,7 @@ public interface DescriptorService {
* @param descriptor the descriptor
* @return matching descriptor
*/
Descriptor searchMatchingDescriptor(Descriptor descriptor);
List<Descriptor> searchMatchingDescriptor(Descriptor descriptor);
/**
* Update descriptor.
......
......@@ -207,10 +207,9 @@ public class DatasetServiceImpl implements DatasetService {
public Dataset updateDataset(@Valid final Dataset input) {
LOG.info("Update Dataset. Input data {}", input);
final Dataset dataset = loadDataset(input);
if (dataset.isPublished()) {
LOG.warn("Refusing to update a published Dataset");
throw new DataIntegrityViolationException("Published dataset can't be updated");
throw new InvalidApiUsageException("Cannot modify a published Dataset.");
}
if (input.getOwner() != null && !dataset.getOwner().equals(input.getOwner())) {
......@@ -232,6 +231,10 @@ public class DatasetServiceImpl implements DatasetService {
public Dataset updateAccessionRefs(Dataset dataset, @Valid final Set<AccessionRef> accessionRefs) {
LOG.info("Update AccessionIdentifiers. Input dataset: {}, input accessionRefs {}", dataset, accessionRefs);
dataset = loadDataset(dataset);
if (dataset.isPublished()) {
throw new InvalidApiUsageException("Cannot modify a published Dataset.");
}
if (dataset.getAccessionRefs() != null && !dataset.getAccessionRefs().isEmpty()) {
dataset.getAccessionRefs().clear();
}
......@@ -248,6 +251,9 @@ public class DatasetServiceImpl implements DatasetService {
public Dataset addDescriptors(Dataset dataset, final Descriptor... descriptors) {
LOG.info("Add descriptor to Dataset {}. Input descriptors {}", dataset, descriptors);
dataset = loadDataset(dataset);
if (dataset.isPublished()) {
throw new InvalidApiUsageException("Cannot modify a published Dataset.");
}
final Set<UUID> descriptorUuids = dataset.getDescriptors().stream().map(descriptor -> descriptor.getUuid()).collect(Collectors.toSet());
......@@ -257,8 +263,6 @@ public class DatasetServiceImpl implements DatasetService {
} else {
dataset.getDescriptors().add(inputDescriptor);
descriptorUuids.add(inputDescriptor.getUuid());
// We will change its PublishState only if we add at least one new Descriptor
dataset.setState(PublishState.DRAFT);
}
}
......@@ -274,13 +278,15 @@ public class DatasetServiceImpl implements DatasetService {
public Dataset removeDescriptors(Dataset dataset, final Descriptor... descriptors) {
LOG.info("Remove descriptors {} of dataset {}.", descriptors, dataset);
dataset = loadDataset(dataset);
if (dataset.isPublished()) {
throw new InvalidApiUsageException("Cannot modify a published Dataset.");
}
// Which UUIDs to remove?
final Set<UUID> descriptorUuids = Arrays.stream(descriptors).map(descriptor -> descriptor.getUuid()).collect(Collectors.toSet());
// Keep descriptors that are not in the list
dataset.setDescriptors(dataset.getDescriptors().stream().filter(descriptor -> !descriptorUuids.contains(descriptor.getUuid())).collect(Collectors.toList()));
dataset.setState(PublishState.DRAFT);
return lazyLoad(datasetRepository.save(dataset));
}
......@@ -294,6 +300,9 @@ public class DatasetServiceImpl implements DatasetService {
public Dataset updateDescriptors(Dataset dataset, final List<Descriptor> descriptors) {
LOG.info("Update descriptors for Dataset {}. Input descriptors {}", dataset, descriptors);
dataset = loadDataset(dataset);
if (dataset.isPublished()) {
throw new InvalidApiUsageException("Cannot modify a published Dataset.");
}
dataset.getDescriptors().clear();
copyDescriptors(dataset, descriptors);
......@@ -423,6 +432,9 @@ public class DatasetServiceImpl implements DatasetService {
public Dataset addDatasetFile(Dataset dataset, final MultipartFile file) throws NotFoundElement, IOException, InvalidRepositoryPathException,
InvalidRepositoryFileDataException {
dataset = datasetRepository.findByUuidAndVersion(dataset.getUuid(), dataset.getVersion());
if (dataset.isPublished()) {
throw new InvalidApiUsageException("Cannot modify a published Dataset.");
}
final RepositoryFile repositoryFile = repositoryService.addFile(getDatasetRepositoryFolder(dataset), file.getOriginalFilename(), file.getContentType(), file.getBytes(),
null);
......@@ -443,6 +455,10 @@ public class DatasetServiceImpl implements DatasetService {
@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
public Dataset updateDatasetFile(Dataset dataset, final RepositoryFile metadata) throws NoSuchRepositoryFileException {
dataset = datasetRepository.findByUuidAndVersion(dataset.getUuid(), dataset.getVersion());
if (dataset.isPublished()) {
throw new InvalidApiUsageException("Cannot modify a published Dataset.");
}
final RepositoryFile datasetFile = dataset.getRepositoryFiles().stream().filter(df -> df.getUuid().equals(metadata.getUuid()) && df.getVersion().equals(metadata
.getVersion())).findFirst().orElse(null);
if (datasetFile == null) {
......@@ -462,6 +478,9 @@ public class DatasetServiceImpl implements DatasetService {
@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
public Dataset removeDatasetFile(Dataset dataset, final UUID fileUuid) throws NotFoundElement, NoSuchRepositoryFileException, IOException {
dataset = datasetRepository.findByUuidAndVersion(dataset.getUuid(), dataset.getVersion());
if (dataset.isPublished()) {
throw new InvalidApiUsageException("Cannot modify a published Dataset.");
}
final RepositoryFile repositoryFile = repositoryService.getFile(fileUuid);
dataset.getRepositoryFiles().remove(repositoryFile);
......@@ -550,7 +569,7 @@ public class DatasetServiceImpl implements DatasetService {
throw new NotFoundElement("Dataset doesn't exist");
}
if (loadedDataset.isPublished()) {
throw new InvalidApiUsageException("Dataset is published, no edits allowed.");
throw new InvalidApiUsageException("Cannot modify a published Dataset.");
}
LOG.warn("Matching " + accessionRefs.size() + " with Accessions");
......
......@@ -123,10 +123,13 @@ public class DescriptorServiceImpl implements DescriptorService {
* {@inheritDoc}
*/
@Override
public Descriptor searchMatchingDescriptor(final Descriptor input) {
@PostFilter("hasRole('ADMINISTRATOR') or filterObject.published or hasPermission(filterObject, 'READ')")
public List<Descriptor> searchMatchingDescriptor(final Descriptor input) {
final Predicate predicate = descriptor.category.in(input.getCategory()).and(descriptor.dataType.in(input.getDataType())).and(descriptor.crop.eq(input.getCrop()))
.and(descriptor.versionTag.eq(input.getVersionTag())).and(descriptor.title.eq(input.getTitle())).and(descriptor.key.eq(input.isKey()));
return lazyLoad(descriptorRepository.findOne(predicate));
List<Descriptor> matches = new ArrayList<>();
descriptorRepository.findAll(predicate).forEach(match -> matches.add(lazyLoad(match)));
return matches;
}
/**
......
......@@ -22,6 +22,7 @@ import java.io.IOException;
import java.io.OutputStream;
import java.util.List;
import java.util.UUID;
import java.util.stream.Collectors;
import com.fasterxml.jackson.annotation.JsonView;
import io.swagger.annotations.Api;
......@@ -305,14 +306,14 @@ public class DescriptorController extends ApiBaseController {
}
/**
* Search matching descriptor.
* Search matching descriptors.
*
* @param source the source
* @return matching descriptor
* @return matching descriptors
*/
@RequestMapping(value = "/search-matching", method = RequestMethod.POST)
public Descriptor searchMatchingDescriptor(@RequestBody final Descriptor source) {
return descriptorService.searchMatchingDescriptor(source);
public List<Descriptor> searchMatchingDescriptor(@RequestBody final Descriptor source) {
return descriptorService.searchMatchingDescriptor(source).stream().filter(d -> d != null).collect(Collectors.toList());
}
/**
......
......@@ -219,7 +219,7 @@ public class DatasetServiceTest extends AbstractDatasetServiceTest {
public void testListDatasetsByAccession() {
final Accession accession = upsertAccession(TEST_INSTCODE, TEST_ACCNUM, TEST_GEN);
Dataset input = buildDataset(DATASET_TITLE_1, DATASET_DESCRIPTION_1, partner, PublishState.PUBLISHED);
Dataset input = buildDataset(DATASET_TITLE_1, DATASET_DESCRIPTION_1, partner, PublishState.DRAFT);
input = datasetService.createDataset(input);
// add DatasetAccessionRef to dataset
......@@ -227,6 +227,7 @@ public class DatasetServiceTest extends AbstractDatasetServiceTest {
datasetAccessionRefs.add(makeAccessionIdentifier(TEST_INSTCODE, TEST_ACCNUM, TEST_GEN, null));
input = datasetService.updateAccessionRefs(input, datasetAccessionRefs);
assertThat(input.getAccessionRefs().size(), is(1));
datasetService.approveDataset(datasetService.reviewDataset(input));
// make sure that accession from Genesys also be added
input.getAccessionRefs().forEach(aid -> {
......@@ -243,9 +244,9 @@ public class DatasetServiceTest extends AbstractDatasetServiceTest {
/**
* Fail to update Dataset when isPublished() = true
*/
@Test(expected = DataIntegrityViolationException.class)
@Test(expected = InvalidApiUsageException.class)
public void testFailUpdatePublishedDataset() {
Dataset input = buildAndSaveDataset(DATASET_TITLE_1, DATASET_DESCRIPTION_1, partner, PublishState.DRAFT);
Dataset input = buildAndSaveDataset(DATASET_TITLE_1, DATASET_DESCRIPTION_1, partner, PublishState.PUBLISHED);
input = datasetService.reviewDataset(input);
input = datasetService.approveDataset(input);
input.setTitle(DATASET_TITLE_2);
......@@ -607,17 +608,19 @@ public class DatasetServiceTest extends AbstractDatasetServiceTest {
ids1.add(makeAccessionIdentifier(TEST_INSTCODE, "A1", "Musa", null));
ids1.add(makeAccessionIdentifier(TEST_INSTCODE, "A2", "Musa", null));
ids1.add(makeAccessionIdentifier(TEST_INSTCODE, "A8", "Musa", null));
Dataset dataset1 = datasetService.createDataset(buildDataset(DATASET_TITLE_1, DATASET_DESCRIPTION_1, partner, PublishState.PUBLISHED));
Dataset dataset1 = datasetService.createDataset(buildDataset(DATASET_TITLE_1, DATASET_DESCRIPTION_1, partner, PublishState.DRAFT));
dataset1 = datasetService.updateAccessionRefs(dataset1, ids1);
assertThat(dataset1.getAccessionCount(), is(3));
datasetService.approveDataset(datasetService.reviewDataset(dataset1));
final Set<AccessionRef> ids2 = new HashSet<>();
ids2.add(makeAccessionIdentifier(TEST_INSTCODE, "A1", "Musa", null));
ids2.add(makeAccessionIdentifier(TEST_INSTCODE, "A2", "Musa", null));
ids2.add(makeAccessionIdentifier(TEST_INSTCODE, "A3", "Manihot", null));
Dataset dataset2 = datasetService.createDataset(buildDataset(DATASET_TITLE_1, DATASET_DESCRIPTION_1, partner, PublishState.PUBLISHED));
Dataset dataset2 = datasetService.createDataset(buildDataset(DATASET_TITLE_1, DATASET_DESCRIPTION_1, partner, PublishState.DRAFT));
dataset2 = datasetService.updateAccessionRefs(dataset2, ids2);
assertThat(dataset2.getAccessionCount(), is(3));
datasetService.approveDataset(datasetService.reviewDataset(dataset2));
final DatasetFilter filter = new DatasetFilter();
filter.accessionRef = new AccessionRefFilter();
......@@ -818,17 +821,19 @@ public class DatasetServiceTest extends AbstractDatasetServiceTest {
ids1.add(makeAccessionIdentifier(TEST_INSTCODE, "A1", "Musa", null));
ids1.add(makeAccessionIdentifier(TEST_INSTCODE, "A2", "Musa", null));
ids1.add(makeAccessionIdentifier(TEST_INSTCODE, "A8", "Musa", null));
Dataset dataset1 = datasetService.createDataset(buildDataset(DATASET_TITLE_1, DATASET_DESCRIPTION_1, partner, PublishState.PUBLISHED));
Dataset dataset1 = datasetService.createDataset(buildDataset(DATASET_TITLE_1, DATASET_DESCRIPTION_1, partner, PublishState.DRAFT));
dataset1 = datasetService.updateAccessionRefs(dataset1, ids1);
assertThat(dataset1.getAccessionCount(), is(3));
datasetService.approveDataset(datasetService.reviewDataset(dataset1));
final Set<AccessionRef> ids2 = new HashSet<>();
ids2.add(makeAccessionIdentifier(TEST_INSTCODE, "A1", "Musa", null));
ids2.add(makeAccessionIdentifier(TEST_INSTCODE, "A2", "Musa", null));
ids2.add(makeAccessionIdentifier(TEST_INSTCODE, "A3", "Manihot", null));
Dataset dataset2 = datasetService.createDataset(buildDataset(DATASET_TITLE_1, DATASET_DESCRIPTION_1, partner, PublishState.PUBLISHED));
Dataset dataset2 = datasetService.createDataset(buildDataset(DATASET_TITLE_1, DATASET_DESCRIPTION_1, partner, PublishState.DRAFT));
dataset2 = datasetService.updateAccessionRefs(dataset2, ids2);
assertThat(dataset2.getAccessionCount(), is(3));
datasetService.approveDataset(datasetService.reviewDataset(dataset2));
final Accession accession1 = upsertAccession(TEST_INSTCODE, "A2", "Musa");
datasetService.rematchDatasetAccessions(dataset1);
......@@ -858,7 +863,7 @@ public class DatasetServiceTest extends AbstractDatasetServiceTest {
ids1.add(makeAccessionIdentifier(TEST_INSTCODE, "A1", "Musa", null));
ids1.add(makeAccessionIdentifier(TEST_INSTCODE, "A2", "Musa", null));
ids1.add(makeAccessionIdentifier(TEST_INSTCODE, "A8", "Musa", null));
Dataset dataset1 = datasetService.createDataset(buildDataset(DATASET_TITLE_1, DATASET_DESCRIPTION_1, partner, PublishState.PUBLISHED));
Dataset dataset1 = datasetService.createDataset(buildDataset(DATASET_TITLE_1, DATASET_DESCRIPTION_1, partner, PublishState.DRAFT));
dataset1 = datasetService.updateAccessionRefs(dataset1, ids1);
assertThat(dataset1.getAccessionCount(), is(3));
......@@ -866,7 +871,7 @@ public class DatasetServiceTest extends AbstractDatasetServiceTest {
ids2.add(makeAccessionIdentifier(TEST_INSTCODE, "A1", "Musa", null));
ids2.add(makeAccessionIdentifier(TEST_INSTCODE, "A2", "Musa", null));
ids2.add(makeAccessionIdentifier(TEST_INSTCODE, "A3", "Manihot", null));
Dataset dataset2 = datasetService.createDataset(buildDataset(DATASET_TITLE_1, DATASET_DESCRIPTION_1, partner, PublishState.PUBLISHED));
Dataset dataset2 = datasetService.createDataset(buildDataset(DATASET_TITLE_1, DATASET_DESCRIPTION_1, partner, PublishState.DRAFT));
dataset2 = datasetService.updateAccessionRefs(dataset2, ids2);
assertThat(dataset2.getAccessionCount(), is(3));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment