Commit 33fb990e authored by Matija Obreza's avatar Matija Obreza

Merge branch '425-versioned-datasets-update' into 'master'

Resolve "Versioned Datasets: update"

Closes #425

See merge request genesys-pgr/genesys-server!376
parents 84694442 4a52ffab
/* /*
* Copyright 2018 Global Crop Diversity Trust * Copyright 2019 Global Crop Diversity Trust
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
...@@ -17,6 +17,7 @@ package org.genesys.catalog.model.dataset; ...@@ -17,6 +17,7 @@ package org.genesys.catalog.model.dataset;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.UUID;
import javax.persistence.Cacheable; import javax.persistence.Cacheable;
import javax.persistence.CascadeType; import javax.persistence.CascadeType;
...@@ -35,12 +36,16 @@ import javax.persistence.ManyToMany; ...@@ -35,12 +36,16 @@ import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne; import javax.persistence.ManyToOne;
import javax.persistence.OneToMany; import javax.persistence.OneToMany;
import javax.persistence.OrderColumn; import javax.persistence.OrderColumn;
import javax.persistence.PostLoad;
import javax.persistence.PrePersist; import javax.persistence.PrePersist;
import javax.persistence.PreUpdate; import javax.persistence.PreUpdate;
import javax.persistence.Table; import javax.persistence.Table;
import javax.persistence.Transient;
import javax.persistence.UniqueConstraint;
import javax.validation.constraints.NotNull; import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size; import javax.validation.constraints.Size;
import com.fasterxml.jackson.annotation.JsonGetter;
import org.genesys.blocks.auditlog.annotations.Audited; import org.genesys.blocks.auditlog.annotations.Audited;
import org.genesys.blocks.model.JsonViews; import org.genesys.blocks.model.JsonViews;
import org.genesys.blocks.model.Publishable; import org.genesys.blocks.model.Publishable;
...@@ -66,10 +71,10 @@ import com.fasterxml.jackson.annotation.JsonView; ...@@ -66,10 +71,10 @@ import com.fasterxml.jackson.annotation.JsonView;
* *
* @author Matija Obreza * @author Matija Obreza
* @author Andrey Lugovskoy * @author Andrey Lugovskoy
* @author Maxim Borodenko * @author Maxym Borodenko
*/ */
@Entity @Entity
@Table(name = "dataset") @Table(name = "dataset", uniqueConstraints = @UniqueConstraint(name = "UQ_current_dataset_version", columnNames={"versionsId", "current"}))
@Cacheable @Cacheable
@Audited @Audited
@Document(indexName = "dataset") @Document(indexName = "dataset")
...@@ -79,11 +84,13 @@ public class Dataset extends UuidModel implements Publishable, SelfCleaning, Acl ...@@ -79,11 +84,13 @@ public class Dataset extends UuidModel implements Publishable, SelfCleaning, Acl
private static final long serialVersionUID = -4601980446454791177L; private static final long serialVersionUID = -4601980446454791177L;
/** The versions. */ /** The versions. */
@ManyToOne(cascade = CascadeType.ALL, optional = false) @ManyToOne(cascade = { CascadeType.MERGE, CascadeType.REFRESH }, optional = false)
@JoinColumn(name = "versionsId", updatable = false) @JoinColumn(name = "versionsId", updatable = false)
@JsonIgnore @JsonIgnore
private DatasetVersions versions; private DatasetVersions versions;
private Boolean current;
/** The owner. */ /** The owner. */
@ManyToOne(cascade = {}, optional = false) @ManyToOne(cascade = {}, optional = false)
@JoinColumn(name = "partnerId", updatable = false) @JoinColumn(name = "partnerId", updatable = false)
...@@ -195,6 +202,16 @@ public class Dataset extends UuidModel implements Publishable, SelfCleaning, Acl ...@@ -195,6 +202,16 @@ public class Dataset extends UuidModel implements Publishable, SelfCleaning, Acl
@Enumerated(EnumType.ORDINAL) @Enumerated(EnumType.ORDINAL)
private PublishState state = PublishState.DRAFT; private PublishState state = PublishState.DRAFT;
@Transient
private UUID currentVersion;
@PostLoad
protected void postLoad() {
if (this.versions != null && versions.getCurrentVersion() != null && !this.uuid.equals(versions.getCurrentVersion().getUuid())) {
this.currentVersion = versions.getCurrentVersion().getUuid();
}
}
/** /**
* Preupdate. * Preupdate.
*/ */
...@@ -215,6 +232,43 @@ public class Dataset extends UuidModel implements Publishable, SelfCleaning, Acl ...@@ -215,6 +232,43 @@ public class Dataset extends UuidModel implements Publishable, SelfCleaning, Acl
trimStringsToNull(); trimStringsToNull();
} }
/**
* Gets the current.
*
* @return the current
*/
public Boolean getCurrent() {
return current;
}
/**
* Sets current value
*
* @param current the new value of current
*/
public void setCurrent(final Boolean current) {
this.current = current;
}
/**
* Gets the UUID of current dataset
*
* @return the UUID of current dataset
*/
@JsonGetter
public UUID getCurrentVersion() {
return currentVersion;
}
/**
* Sets the UUID of current dataset
*
* @param currentVersion the UUID of current dataset
*/
public void setCurrentVersion(final UUID currentVersion) {
this.currentVersion = currentVersion;
}
/** /**
* Owner is the ACL parent object for the dataset * Owner is the ACL parent object for the dataset
*/ */
......
...@@ -463,4 +463,12 @@ public interface DatasetService { ...@@ -463,4 +463,12 @@ public interface DatasetService {
*/ */
void writeXlsxMCPD(Dataset dataset, OutputStream outputStream) throws IOException; void writeXlsxMCPD(Dataset dataset, OutputStream outputStream) throws IOException;
/**
* Method creating a new version of Dataset based on an existing published Dataset.
*
* @param source the source
* @return saved Dataset in db.
*/
Dataset createNewVersion(@Valid Dataset source);
} }
...@@ -26,9 +26,11 @@ import java.nio.file.Paths; ...@@ -26,9 +26,11 @@ import java.nio.file.Paths;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Comparator;
import java.util.Date; import java.util.Date;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.UUID; import java.util.UUID;
import java.util.stream.Collectors; import java.util.stream.Collectors;
...@@ -55,6 +57,7 @@ import org.genesys.catalog.persistence.dataset.DatasetRepository; ...@@ -55,6 +57,7 @@ import org.genesys.catalog.persistence.dataset.DatasetRepository;
import org.genesys.catalog.persistence.dataset.DatasetVersionsRepository; import org.genesys.catalog.persistence.dataset.DatasetVersionsRepository;
import org.genesys.catalog.service.DatasetService; import org.genesys.catalog.service.DatasetService;
import org.genesys.catalog.service.DescriptorService; import org.genesys.catalog.service.DescriptorService;
import org.genesys.catalog.service.VersionManager;
import org.genesys.filerepository.FolderNotEmptyException; import org.genesys.filerepository.FolderNotEmptyException;
import org.genesys.filerepository.InvalidRepositoryFileDataException; import org.genesys.filerepository.InvalidRepositoryFileDataException;
import org.genesys.filerepository.InvalidRepositoryPathException; import org.genesys.filerepository.InvalidRepositoryPathException;
...@@ -101,6 +104,7 @@ import com.querydsl.core.types.dsl.PathBuilder; ...@@ -101,6 +104,7 @@ import com.querydsl.core.types.dsl.PathBuilder;
import com.querydsl.core.types.dsl.PathBuilderFactory; import com.querydsl.core.types.dsl.PathBuilderFactory;
import com.querydsl.jpa.JPAExpressions; import com.querydsl.jpa.JPAExpressions;
import com.querydsl.jpa.JPQLQuery; import com.querydsl.jpa.JPQLQuery;
import com.vdurmont.semver4j.SemverException;
/** /**
* The Class DatasetServiceImpl. * The Class DatasetServiceImpl.
...@@ -165,6 +169,9 @@ public class DatasetServiceImpl implements DatasetService { ...@@ -165,6 +169,9 @@ public class DatasetServiceImpl implements DatasetService {
@PersistenceContext @PersistenceContext
private EntityManager entityManager; private EntityManager entityManager;
@Autowired
private VersionManager versionManager;
/** /**
* {@inheritDoc} * {@inheritDoc}
*/ */
...@@ -174,12 +181,15 @@ public class DatasetServiceImpl implements DatasetService { ...@@ -174,12 +181,15 @@ public class DatasetServiceImpl implements DatasetService {
public Dataset createDataset(@Valid final Dataset source) { public Dataset createDataset(@Valid final Dataset source) {
LOG.info("Create Dataset. Input data {}", source); LOG.info("Create Dataset. Input data {}", source);
final DatasetVersions datasetVersions = new DatasetVersions(); final DatasetVersions datasetVersions = new DatasetVersions();
datasetVersions.setCurrentVersion(null);
datasetVersionsRepository.save(datasetVersions); datasetVersionsRepository.save(datasetVersions);
Dataset dataset = new Dataset(); Dataset dataset = new Dataset();
dataset.setUuid(source.getUuid()); dataset.setUuid(source.getUuid());
dataset.setVersions(datasetVersions); dataset.setVersions(datasetVersions);
dataset.setCurrent(null);
copyValues(dataset, source); copyValues(dataset, source);
dataset.setState(PublishState.DRAFT);
dataset.setOwner(source.getOwner()); dataset.setOwner(source.getOwner());
dataset.setVersionTag(source.getVersionTag()); dataset.setVersionTag(source.getVersionTag());
dataset.setDescriptors(new ArrayList<>()); dataset.setDescriptors(new ArrayList<>());
...@@ -195,22 +205,8 @@ public class DatasetServiceImpl implements DatasetService { ...@@ -195,22 +205,8 @@ public class DatasetServiceImpl implements DatasetService {
// Make dataset publicly not-readable // Make dataset publicly not-readable
aclService.makePubliclyReadable(dataset, false); aclService.makePubliclyReadable(dataset, false);
try { // Make dataset folder
final Path datasetPath = Paths.get(datasetRepositoryPath, dataset.getUuid().toString()); makeDatasetFolder(dataset.getUuid());
final Partner partner = dataset.getOwner();
// Ensure folder ownership
asAdminInvoker.invoke(() -> {
// Ensure target folder exists for the Dataset
return repositoryService.ensureFolder(datasetPath, partner);
});
} catch (Exception e) {
LOG.warn("Could not update Folder properties: {}", e.getMessage());
}
datasetVersions.setCurrentVersion(dataset);
datasetVersionsRepository.save(datasetVersions);
return dataset; return dataset;
} }
...@@ -247,7 +243,7 @@ public class DatasetServiceImpl implements DatasetService { ...@@ -247,7 +243,7 @@ public class DatasetServiceImpl implements DatasetService {
@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')") @PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
public Dataset setAccessionRefs(Dataset dataset, @Valid final Set<AccessionRef> accessionRefs) { public Dataset setAccessionRefs(Dataset dataset, @Valid final Set<AccessionRef> accessionRefs) {
LOG.info("Update AccessionIdentifiers. Input dataset: {}, input accessionRefs {}", dataset, accessionRefs); LOG.info("Update AccessionIdentifiers. Input dataset: {}, input accessionRefs {}", dataset, accessionRefs);
dataset = reloadFromDatabase(dataset); dataset = getDataset(dataset);
if (dataset.isPublished()) { if (dataset.isPublished()) {
throw new InvalidApiUsageException("Cannot modify a published Dataset."); throw new InvalidApiUsageException("Cannot modify a published Dataset.");
} }
...@@ -265,7 +261,7 @@ public class DatasetServiceImpl implements DatasetService { ...@@ -265,7 +261,7 @@ public class DatasetServiceImpl implements DatasetService {
@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')") @PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
public Dataset addDescriptors(Dataset dataset, final Descriptor... descriptors) { public Dataset addDescriptors(Dataset dataset, final Descriptor... descriptors) {
LOG.info("Add descriptor to Dataset {}. Input descriptors {}", dataset, descriptors); LOG.info("Add descriptor to Dataset {}. Input descriptors {}", dataset, descriptors);
dataset = reloadFromDatabase(dataset); dataset = getDataset(dataset);
if (dataset.isPublished()) { if (dataset.isPublished()) {
throw new InvalidApiUsageException("Cannot modify a published Dataset."); throw new InvalidApiUsageException("Cannot modify a published Dataset.");
} }
...@@ -296,7 +292,7 @@ public class DatasetServiceImpl implements DatasetService { ...@@ -296,7 +292,7 @@ public class DatasetServiceImpl implements DatasetService {
@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')") @PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
public Dataset removeDescriptors(Dataset dataset, final Descriptor... descriptors) { public Dataset removeDescriptors(Dataset dataset, final Descriptor... descriptors) {
LOG.info("Remove descriptors {} of dataset {}.", descriptors, dataset); LOG.info("Remove descriptors {} of dataset {}.", descriptors, dataset);
dataset = reloadFromDatabase(dataset); dataset = getDataset(dataset);
if (dataset.isPublished()) { if (dataset.isPublished()) {
throw new InvalidApiUsageException("Cannot modify a published Dataset."); throw new InvalidApiUsageException("Cannot modify a published Dataset.");
} }
...@@ -318,7 +314,7 @@ public class DatasetServiceImpl implements DatasetService { ...@@ -318,7 +314,7 @@ public class DatasetServiceImpl implements DatasetService {
@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')") @PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
public Dataset updateDescriptors(Dataset dataset, final List<Descriptor> descriptors) { public Dataset updateDescriptors(Dataset dataset, final List<Descriptor> descriptors) {
LOG.info("Update descriptors for Dataset {}. Input descriptors {}", dataset, descriptors); LOG.info("Update descriptors for Dataset {}. Input descriptors {}", dataset, descriptors);
dataset = reloadFromDatabase(dataset); dataset = getDataset(dataset);
if (dataset.isPublished()) { if (dataset.isPublished()) {
throw new InvalidApiUsageException("Cannot modify a published Dataset."); throw new InvalidApiUsageException("Cannot modify a published Dataset.");
} }
...@@ -364,17 +360,155 @@ public class DatasetServiceImpl implements DatasetService { ...@@ -364,17 +360,155 @@ public class DatasetServiceImpl implements DatasetService {
downloadService.writeXlsxMCPD(queryAccessionId, outputStream, "", "/datasets/" + dataset.getUuid()); downloadService.writeXlsxMCPD(queryAccessionId, outputStream, "", "/datasets/" + dataset.getUuid());
} }
/**
* {@inheritDoc}
*/
@Override
@Transactional
@PreAuthorize("(hasRole('ADMINISTRATOR') || hasPermission(#source, 'write')) && #source.published")
public Dataset createNewVersion(@Valid Dataset source) {
source = getDataset(source);
final Dataset dataset = new Dataset();
copyValues(dataset, source);
try {
dataset.setVersionTag(versionManager.nextMajor(source.getVersionTag()));
} catch (SemverException e) {
dataset.setVersionTag(source.getVersionTag() + ".1");
}
dataset.setState(PublishState.DRAFT);
dataset.setCurrent(null);
dataset.setUuid(null);
dataset.setVersions(source.getVersions());
// Copy descriptors
dataset.setDescriptors(new ArrayList<>());
copyDescriptors(dataset, source.getDescriptors());
Dataset saved = datasetRepository.save(dataset);
// copy accessionRefs
saved = copyAccessionRefs(saved, source.getAccessionRefs());
// Copy creators
copyCreators(saved, source.getCreators());
// Copy locations
copyLocations(saved, source.getLocations());
saved.setCurrentVersion(source.getUuid());
// Make dataset publicly not-readable
aclService.makePubliclyReadable(saved, false);
// Make dataset folder
makeDatasetFolder(saved.getUuid());
return saved;
}
/**
* Copy and save dataset accessionRefs.
*
* @param target the target
* @param accessionRefs the dataset accessionRefs
* @return
*/
private Dataset copyAccessionRefs(final Dataset target, final List<DatasetAccessionRef> accessionRefs) {
if (accessionRefs == null || accessionRefs.size() == 0) {
return target;
}
Dataset loadedDataset = getDataset(target);
List<DatasetAccessionRef> copiedAccessionRefs = Lists.newArrayList();
accessionRefs.forEach(dAccessionRef -> {
DatasetAccessionRef copy = new DatasetAccessionRef();
copyAccessionRef(copy, dAccessionRef);
copy.setDataset(loadedDataset);
copiedAccessionRefs.add(copy);
});
accessionRefRepository.save(copiedAccessionRefs);
loadedDataset.setAccessionCount((int) accessionRefRepository.countByDataset(loadedDataset));
LOG.info("Done saving {} accession refs, have {} in dataset", accessionRefs.size(), loadedDataset.getAccessionCount());
return datasetRepository.save(loadedDataset);
}
/**
* Copy and save dataset locations.
*
* @param target the target
* @param locations the dataset locations
*/
private void copyLocations(final Dataset target, final List<DatasetLocation> locations) {
if (locations == null || locations.size() == 0) {
return;
}
List<DatasetLocation> copiedLocations = Lists.newArrayList();
locations.forEach(location -> {
DatasetLocation copy = new DatasetLocation();
copyLocation(copy, location);
copy.setDataset(target);
copiedLocations.add(copy);
});
target.setLocations(locationRepository.save(copiedLocations));
}
/**
* Copy and save dataset creators.
*
* @param target the target
* @param creators the dataset creators
*/
private void copyCreators(final Dataset target, final List<DatasetCreator> creators) {
if (creators == null || creators.size() == 0) {
return;
}
List<DatasetCreator> copiedCreators = Lists.newArrayList();
creators.forEach(creator -> {
DatasetCreator copy = new DatasetCreator();
copyCreator(copy, creator);
copy.setDataset(target);
copiedCreators.add(copy);
});
target.setCreators(datasetCreatorRepository.save(copiedCreators));
}
private void makeDatasetFolder(final UUID uuid) {
final Dataset dataset = datasetRepository.findByUuid(uuid);
if (dataset == null) {
LOG.warn("Folder wasn't created. No such dataset with UUID={}", uuid.toString());
return;
}
try {
final Path datasetPath = Paths.get(datasetRepositoryPath, dataset.getUuid().toString());
final Partner partner = dataset.getOwner();
// Ensure folder ownership
asAdminInvoker.invoke(() -> {
// Ensure target folder exists for the Dataset
return repositoryService.ensureFolder(datasetPath, partner);
});
} catch (Exception e) {
LOG.warn("Could not update Folder properties: {}", e.getMessage());
}
}
/** /**
* {@inheritDoc} * {@inheritDoc}
*/ */
@Override @Override
@PostAuthorize("hasRole('ADMINISTRATOR') || returnObject==null || returnObject.isPublished() || hasPermission(returnObject, 'read')") @PostAuthorize("hasRole('ADMINISTRATOR') || returnObject==null || returnObject.isPublished() || hasPermission(returnObject, 'read')")
public Dataset loadDataset(final Dataset input) { public Dataset loadDataset(final Dataset input) {
final Dataset dataset = reloadFromDatabase(input); final Dataset dataset = getDataset(input);
return lazyLoad(dataset); return lazyLoad(dataset);
} }
private Dataset reloadFromDatabase(final Dataset input) { private Dataset getDataset(final Dataset input) {
LOG.debug("Load Dataset. Input data {}", input); LOG.debug("Load Dataset. Input data {}", input);
final Dataset dataset = datasetRepository.findOne(input.getId()); final Dataset dataset = datasetRepository.findOne(input.getId());
...@@ -394,9 +528,11 @@ public class DatasetServiceImpl implements DatasetService { ...@@ -394,9 +528,11 @@ public class DatasetServiceImpl implements DatasetService {
*/ */
@Override @Override
public Page<Dataset> listDatasets(final DatasetFilter filter, final Pageable page) { public Page<Dataset> listDatasets(final DatasetFilter filter, final Pageable page) {
Pageable markdownSortPageRequest = JPAUtils.toMarkdownSort(page, "title"); final Pageable markdownSortPageRequest = JPAUtils.toMarkdownSort(page, "title");
Page<Dataset> res = datasetRepository.findAll(new BooleanBuilder().and(filter.buildPredicate()).and(QDataset.dataset.state.in(PublishState.PUBLISHED)), final BooleanBuilder published = new BooleanBuilder();
markdownSortPageRequest); published.and(QDataset.dataset.state.eq(PublishState.PUBLISHED).and(QDataset.dataset.current.isTrue()));
published.and(filter.buildPredicate());
final Page<Dataset> res = datasetRepository.findAll(published, markdownSortPageRequest);
return new PageImpl<>(res.getContent(), page, res.getTotalElements()); return new PageImpl<>(res.getContent(), page, res.getTotalElements());
} }
...@@ -459,7 +595,7 @@ public class DatasetServiceImpl implements DatasetService { ...@@ -459,7 +595,7 @@ public class DatasetServiceImpl implements DatasetService {
@Override @Override
@PreAuthorize("hasRole('ADMINISTRATOR') || #dataset.published || hasPermission(#dataset, 'read')") @PreAuthorize("hasRole('ADMINISTRATOR') || #dataset.published || hasPermission(#dataset, 'read')")
public Page<? extends AccessionRef> listAccessions(Dataset dataset, final Pageable page) { public Page<? extends AccessionRef> listAccessions(Dataset dataset, final Pageable page) {
dataset = reloadFromDatabase(dataset); dataset = getDataset(dataset);
return accessionRefRepository.findAll(dataset, page); return accessionRefRepository.findAll(dataset, page);
} }
...@@ -576,6 +712,9 @@ public class DatasetServiceImpl implements DatasetService { ...@@ -576,6 +712,9 @@ public class DatasetServiceImpl implements DatasetService {
if (dataset.getCrops() != null) { if (dataset.getCrops() != null) {
dataset.getCrops().size(); dataset.getCrops().size();
} }
if (dataset.getVersions() != null && dataset.getVersions().getAllVersions() != null) {
dataset.getVersions().getAllVersions().size();
}
return dataset; return dataset;
} }
...@@ -586,7 +725,7 @@ public class DatasetServiceImpl implements DatasetService { ...@@ -586,7 +725,7 @@ public class DatasetServiceImpl implements DatasetService {
@Override @Override
@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'delete')") @PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'delete')")
public Dataset removeDataset(final Dataset dataset) { public Dataset removeDataset(final Dataset dataset) {
final Dataset loadedDataset = reloadFromDatabase(dataset); final Dataset loadedDataset = getDataset(dataset);
for (RepositoryFile repositoryFile : loadedDataset.getRepositoryFiles()) { for (RepositoryFile repositoryFile : loadedDataset.getRepositoryFiles()) {
try { try {
...@@ -614,7 +753,7 @@ public class DatasetServiceImpl implements DatasetService { ...@@ -614,7 +753,7 @@ public class DatasetServiceImpl implements DatasetService {
@Override @Override
@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')") @PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
public Dataset addAccessionRefs(final Dataset dataset, @Valid final Set<AccessionRef> accessionRefs) throws NotFoundElement { public Dataset addAccessionRefs(final Dataset dataset, @Valid final Set<AccessionRef> accessionRefs) throws NotFoundElement {
final Dataset loadedDataset = reloadFromDatabase(dataset); final Dataset loadedDataset = getDataset(dataset);
if (loadedDataset == null) { if (loadedDataset == null) {
throw new NotFoundElement("Dataset doesn't exist"); throw new NotFoundElement("Dataset doesn't exist");
} }
...@@ -646,7 +785,7 @@ public class DatasetServiceImpl implements DatasetService { ...@@ -646,7 +785,7 @@ public class DatasetServiceImpl implements DatasetService {
@Transactional @Transactional
@PreAuthorize("hasRole('ADMINISTRATOR')") @PreAuthorize("hasRole('ADMINISTRATOR')")
public Dataset approveDataset(final Dataset dataset) { public Dataset approveDataset(final Dataset dataset) {
Dataset loaded = reloadFromDatabase(dataset); Dataset loaded = getDataset(dataset);
if (loaded == null) { if (loaded == null) {
throw new NotFoundElement("No dataset with specified uuid and version"); throw new NotFoundElement("No dataset with specified uuid and version");
} }
...@@ -705,6 +844,16 @@ public class DatasetServiceImpl implements DatasetService { ...@@ -705,6 +844,16 @@ public class DatasetServiceImpl implements DatasetService {
// Make dataset publicly readable // Make dataset publicly readable
aclService.makePubliclyReadable(loaded, true); aclService.makePubliclyReadable(loaded, true);
final DatasetVersions datasetVersions = loaded.getVersions();
final Dataset oldCurrentDataset = datasetVersions.getAllVersions().stream().filter(s -> Objects.equals(s.getCurrent(), Boolean.TRUE)).findFirst().orElse(null);
if (oldCurrentDataset != null) {
oldCurrentDataset.setCurrent(null);