Commit 67396eeb authored by Matija Obreza's avatar Matija Obreza
Browse files

Added Dataset Descriptors and updated SGSV upload

parent 18aa8c47
...@@ -18,11 +18,14 @@ package org.crophub.rest.common.model.impl; ...@@ -18,11 +18,14 @@ package org.crophub.rest.common.model.impl;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.Date; import java.util.Date;
import java.util.List;
import javax.persistence.Column; import javax.persistence.Column;
import javax.persistence.Entity; import javax.persistence.Entity;
import javax.persistence.Lob; import javax.persistence.Lob;
import javax.persistence.ManyToOne; import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.OrderBy;
import javax.persistence.Table; import javax.persistence.Table;
import org.crophub.rest.common.model.BusinessModel; import org.crophub.rest.common.model.BusinessModel;
...@@ -49,8 +52,13 @@ public class Dataset extends BusinessModel { ...@@ -49,8 +52,13 @@ public class Dataset extends BusinessModel {
@Lob @Lob
private String mapping; private String mapping;
private Date uploadDate; private Date uploadDate;
@OrderBy("orderIndex")
@OneToMany(cascade={}, mappedBy="dataset")
private List<DatasetDescriptor> datasetDescriptors;
public License getLicense() { public License getLicense() {
return license; return license;
} }
...@@ -116,6 +124,15 @@ public class Dataset extends BusinessModel { ...@@ -116,6 +124,15 @@ public class Dataset extends BusinessModel {
public void setMapping(final String mapping) { public void setMapping(final String mapping) {
this.mapping = mapping; this.mapping = mapping;
} }
public List<DatasetDescriptor> getDatasetDescriptors() {
return datasetDescriptors;
}
public void setDatasetDescriptors(List<DatasetDescriptor> datasetDescriptors) {
this.datasetDescriptors = datasetDescriptors;
}
@Override @Override
public String toString() { public String toString() {
......
/**
* Copyright 2013 Global Crop Diversity Trust
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.crophub.rest.common.model.impl;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import javax.persistence.UniqueConstraint;
import org.crophub.rest.common.model.BusinessModel;
@Entity
@Table(name = "datasetdescriptor", uniqueConstraints = { @UniqueConstraint(columnNames = { "datasetId", "descriptorId" }) })
public class DatasetDescriptor extends BusinessModel {
private static final long serialVersionUID = 2413430585742976014L;
@ManyToOne(optional = false)
@JoinColumn(name = "datasetId")
private Dataset dataset;
@Column(nullable = false)
private int orderIndex;
@ManyToOne(optional = false)
@JoinColumn(name = "descriptorId")
private Descriptor descriptor;
public Dataset getDataset() {
return dataset;
}
public void setDataset(Dataset dataset) {
this.dataset = dataset;
}
public int getOrderIndex() {
return orderIndex;
}
public void setOrderIndex(int orderIndex) {
this.orderIndex = orderIndex;
}
public Descriptor getDescriptor() {
return descriptor;
}
public void setDescriptor(Descriptor descriptor) {
this.descriptor = descriptor;
}
}
...@@ -34,10 +34,10 @@ import org.hibernate.annotations.Index; ...@@ -34,10 +34,10 @@ import org.hibernate.annotations.Index;
@Entity @Entity
@Table(name = "sparsedata") @Table(name = "sparsedata")
@org.hibernate.annotations.Table(appliesTo = "sparsedata", indexes = { @org.hibernate.annotations.Table(appliesTo = "sparsedata", indexes = {
@Index(columnNames = { "descriptorId", "stringId" }, name = "descriptorstring_SPARSEDATA"), @Index(columnNames = { "datasetDescriptorId", "stringId" }, name = "descriptorstring_SPARSEDATA"),
@Index(columnNames = { "sparseEntryId" }, name = "entry_SPARSEDATA"), @Index(columnNames = { "sparseEntryId" }, name = "entry_SPARSEDATA"),
@Index(columnNames = { "datasetId", "descriptorId" }, name = "datasetdescriptor_SPARSEDATA"), @Index(columnNames = { "datasetId", "datasetDescriptorId" }, name = "datasetdescriptor_SPARSEDATA"),
@Index(columnNames = { "descriptorId" }, name = "descriptor_SPARSEDATA") }) @Index(columnNames = { "datasetDescriptorId" }, name = "descriptor_SPARSEDATA") })
public class SparseData extends BusinessModel { public class SparseData extends BusinessModel {
private static final long serialVersionUID = -2142036544458439223L; private static final long serialVersionUID = -2142036544458439223L;
...@@ -48,7 +48,7 @@ public class SparseData extends BusinessModel { ...@@ -48,7 +48,7 @@ public class SparseData extends BusinessModel {
private long sparseEntryId; private long sparseEntryId;
@OrderColumn @OrderColumn
private long descriptorId; private long datasetDescriptorId;
@Column(nullable = false, length = 500) @Column(nullable = false, length = 500)
private String value; private String value;
...@@ -73,12 +73,12 @@ public class SparseData extends BusinessModel { ...@@ -73,12 +73,12 @@ public class SparseData extends BusinessModel {
this.sparseEntryId = sparseEntryId; this.sparseEntryId = sparseEntryId;
} }
public long getDescriptorId() { public long getDatasetDescriptorId() {
return descriptorId; return datasetDescriptorId;
} }
public void setDescriptorId(final long descriptorId) { public void setDatasetDescriptorId(final long datasetDescriptorId) {
this.descriptorId = descriptorId; this.datasetDescriptorId = datasetDescriptorId;
} }
public String getValue() { public String getValue() {
......
/**
* Copyright 2013 Global Crop Diversity Trust
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.crophub.rest.common.persistence.domain;
import java.util.List;
import org.crophub.rest.common.model.impl.Dataset;
import org.crophub.rest.common.model.impl.DatasetDescriptor;
import org.crophub.rest.common.model.impl.Descriptor;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
public interface DatasetDescriptorRepository extends JpaRepository<DatasetDescriptor, Long> {
@Query("select dsd.descriptor from DatasetDescriptor dsd where dsd.dataset=?1 order by dsd.orderIndex")
List<Descriptor> findDescriptorsByDataset(Dataset dataset);
List<DatasetDescriptor> findByDataset(Dataset dataset);
DatasetDescriptor findByDatasetAndDescriptor(Dataset dataset, Descriptor descriptor);
}
...@@ -19,7 +19,6 @@ package org.crophub.rest.common.persistence.domain; ...@@ -19,7 +19,6 @@ package org.crophub.rest.common.persistence.domain;
import java.util.List; import java.util.List;
import org.crophub.rest.common.model.impl.SparseData; import org.crophub.rest.common.model.impl.SparseData;
import org.crophub.rest.common.model.impl.SparseString;
import org.springframework.cache.annotation.Cacheable; import org.springframework.cache.annotation.Cacheable;
import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query; import org.springframework.data.jpa.repository.Query;
...@@ -31,26 +30,26 @@ public interface SparseDataRepository extends JpaRepository<SparseData, Long> { ...@@ -31,26 +30,26 @@ public interface SparseDataRepository extends JpaRepository<SparseData, Long> {
@Cacheable(value="sparsedata", key="#root.methodName+#a0") @Cacheable(value="sparsedata", key="#root.methodName+#a0")
long countByDatasetId(long datasetId); long countByDatasetId(long datasetId);
@Query("select distinct sd.descriptorId from SparseData sd where sd.datasetId=?1") // @Query("select distinct sd.descriptorId from SparseData sd where sd.datasetId=?1")
@Cacheable(value="sparsedata", key="#root.methodName+#a0") // @Cacheable(value="sparsedata", key="#root.methodName+#a0")
List<Long> getDescriptorIdsByDatasetId(long datasetId); // List<Long> getDescriptorIdsByDatasetId(long datasetId);
@Query("select count(sd) from SparseData sd where sd.sparseEntryId=?1") @Query("select count(sd) from SparseData sd where sd.sparseEntryId=?1")
long countBySparseEntryId(long sparseEntryId); long countBySparseEntryId(long sparseEntryId);
List<SparseData> findByDescriptorId(long descriptorId); //List<SparseData> findByDescriptorId(long descriptorId);
@Query("select count(sd) from SparseData sd where sd.descriptorId=?1") //@Query("select count(sd) from SparseData sd where sd.descriptorId=?1")
long countByDescriptorId(long descriptorId); //long countByDescriptorId(long descriptorId);
List<SparseData> findByDescriptorIdAndValue(long descriptorId, SparseString value); //List<SparseData> findByDescriptorIdAndValue(long descriptorId, SparseString value);
@Query("select count(sd) from SparseData sd where sd.descriptorId=?1 and sd.value=?2") //@Query("select count(sd) from SparseData sd where sd.descriptorId=?1 and sd.value=?2")
long countByDescriptorIdAndValue(long descriptorId, SparseString value); //long countByDescriptorIdAndValue(long descriptorId, SparseString value);
List<SparseData> findByDescriptorIdAndSparseString(long descriptorId, //List<SparseData> findByDescriptorIdAndSparseString(long descriptorId,
SparseString string); // SparseString string);
@Query("select count(sd) from SparseData sd where sd.descriptorId=?1 and sd.sparseString=?2") //@Query("select count(sd) from SparseData sd where sd.descriptorId=?1 and sd.sparseString=?2")
long countByDescriptorIdAndSparseString(long descriptorId, SparseString string); //long countByDescriptorIdAndSparseString(long descriptorId, SparseString string);
} }
...@@ -19,10 +19,15 @@ package org.crophub.rest.common.service; ...@@ -19,10 +19,15 @@ package org.crophub.rest.common.service;
import java.util.List; import java.util.List;
import org.crophub.rest.common.model.impl.Dataset;
import org.crophub.rest.common.model.impl.DatasetDescriptor;
import org.crophub.rest.common.model.impl.Descriptor; import org.crophub.rest.common.model.impl.Descriptor;
public interface DescriptorService { public interface DescriptorService {
List<Descriptor> list(); List<Descriptor> list();
DatasetDescriptor[] getDatasetDescriptors(Dataset dataset, Descriptor[] descriptors);
DatasetDescriptor[] ensureDatasetDescriptors(Dataset dataset, Descriptor[] descriptors);
} }
...@@ -7,54 +7,57 @@ import org.apache.commons.lang.ArrayUtils; ...@@ -7,54 +7,57 @@ import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.crophub.rest.common.model.impl.Dataset; import org.crophub.rest.common.model.impl.Dataset;
import org.crophub.rest.common.model.impl.DatasetDescriptor;
import org.crophub.rest.common.model.impl.Descriptor; import org.crophub.rest.common.model.impl.Descriptor;
import org.crophub.rest.common.model.impl.SparseData; import org.crophub.rest.common.model.impl.SparseData;
import org.crophub.rest.common.model.impl.SparseEntry; import org.crophub.rest.common.model.impl.SparseEntry;
import org.crophub.rest.common.persistence.domain.DatasetDescriptorRepository;
import org.crophub.rest.common.persistence.domain.DatasetRepository; import org.crophub.rest.common.persistence.domain.DatasetRepository;
import org.crophub.rest.common.persistence.domain.DescriptorRepository;
import org.crophub.rest.common.persistence.domain.SparseDataRepository; import org.crophub.rest.common.persistence.domain.SparseDataRepository;
import org.crophub.rest.common.persistence.domain.SparseEntryRepository; import org.crophub.rest.common.persistence.domain.SparseEntryRepository;
import org.crophub.rest.common.service.DataService; import org.crophub.rest.common.service.DataService;
import org.crophub.rest.common.service.DescriptorService;
import org.crophub.rest.common.service.SparseStringService; import org.crophub.rest.common.service.SparseStringService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
@Service @Service
@Transactional(readOnly = true) @Transactional(readOnly = true)
public class DataServiceImpl implements DataService { public class DataServiceImpl implements DataService {
public static final Log LOG = LogFactory public static final Log LOG = LogFactory.getLog(DataServiceImpl.class);
.getLog(DataServiceImpl.class);
@Autowired @Autowired
SparseEntryRepository entryRepository; SparseEntryRepository entryRepository;
@Autowired @Autowired
SparseDataRepository dataRepository; SparseDataRepository dataRepository;
@Autowired
DatasetDescriptorRepository datasetDescriptorRepository;
@Autowired @Autowired
DescriptorRepository descriptorRepository; DescriptorService descriptorService;
@Autowired @Autowired
SparseStringService stringService; SparseStringService stringService;
@Autowired @Autowired
private DatasetRepository datasetRepository; private DatasetRepository datasetRepository;
@Override @Override
public List<Dataset> list() { public List<Dataset> list() {
return datasetRepository.findAll(); return datasetRepository.findAll();
} }
@Override @Override
public Dataset getDataset(long datasetId) { public Dataset getDataset(long datasetId) {
return datasetRepository.findOne(datasetId); return datasetRepository.findOne(datasetId);
} }
@Override @Override
@Transactional(readOnly=false) @Transactional(readOnly = false)
public void save(Dataset dataset) { public void save(Dataset dataset) {
datasetRepository.save(dataset); datasetRepository.save(dataset);
} }
...@@ -71,20 +74,19 @@ public class DataServiceImpl implements DataService { ...@@ -71,20 +74,19 @@ public class DataServiceImpl implements DataService {
@Override @Override
public Iterable<Descriptor> getDescriptors(Dataset dataset) { public Iterable<Descriptor> getDescriptors(Dataset dataset) {
return descriptorRepository.findAll(dataRepository.getDescriptorIdsByDatasetId(dataset.getId())); return datasetDescriptorRepository.findDescriptorsByDataset(dataset);
} }
// FIXME This code does not handle column grouping // FIXME This code does not handle column grouping
@Override @Override
@Transactional(readOnly = false) @Transactional(propagation = Propagation.REQUIRED, readOnly = false)
@CacheEvict(value="methods", allEntries=true) public void writeEntries(final Dataset dataset, final Descriptor[] descriptors, final List<String[]> datas) {
public void writeEntries(final Dataset dataset, final Descriptor[] descriptors, LOG.debug("Writing batch of " + datas.size());
final List<String[]> datas) {
final DatasetDescriptor[] datasetDescriptors = descriptorService.getDatasetDescriptors(dataset, descriptors);
final List<SparseEntry> sparseEntries = new ArrayList<SparseEntry>( final List<SparseEntry> sparseEntries = new ArrayList<SparseEntry>(datas.size());
datas.size()); final List<SparseData> sparseDatas = new ArrayList<SparseData>(descriptors.length);
final List<SparseData> sparseDatas = new ArrayList<SparseData>(
descriptors.length);
for (int j = 0; j < datas.size(); j++) { for (int j = 0; j < datas.size(); j++) {
// New entry // New entry
...@@ -95,7 +97,10 @@ public class DataServiceImpl implements DataService { ...@@ -95,7 +97,10 @@ public class DataServiceImpl implements DataService {
// save the lot // save the lot
entryRepository.save(sparseEntries); entryRepository.save(sparseEntries);
LOG.debug("sparseEntries.size=" + sparseEntries.size());
long datasetId = dataset.getId();
for (int j = 0; j < datas.size(); j++) { for (int j = 0; j < datas.size(); j++) {
final String[] data = datas.get(j); final String[] data = datas.get(j);
final long sparseEntryId = sparseEntries.get(j).getId(); final long sparseEntryId = sparseEntries.get(j).getId();
...@@ -111,15 +116,19 @@ public class DataServiceImpl implements DataService { ...@@ -111,15 +116,19 @@ public class DataServiceImpl implements DataService {
valueCount++; valueCount++;
final SparseData sparseData = new SparseData(); final SparseData sparseData = new SparseData();
sparseData.setSparseEntryId(sparseEntryId); sparseData.setSparseEntryId(sparseEntryId);
sparseData.setDescriptorId(descriptors[i].getId()); // Can throw NPE! Cool :-)
sparseData.setDatasetDescriptorId(datasetDescriptors[i].getId());
sparseData.setDatasetId(datasetId);
sparseData.setValue(data[i]); sparseData.setValue(data[i]);
sparseDatas.add(sparseData); sparseDatas.add(sparseData);
} }
} }
if (valueCount == 0) { if (valueCount == 0) {
entryRepository.delete(sparseEntries.remove(j)); LOG.warn("No data, removing entry");
entryRepository.delete(sparseEntries.get(j));
} }
} }
dataRepository.save(sparseDatas); dataRepository.save(sparseDatas);
} }
} }
...@@ -2,22 +2,86 @@ package org.crophub.rest.common.service.impl; ...@@ -2,22 +2,86 @@ package org.crophub.rest.common.service.impl;
import java.util.List; import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.crophub.rest.common.model.impl.Dataset;
import org.crophub.rest.common.model.impl.DatasetDescriptor;
import org.crophub.rest.common.model.impl.Descriptor; import org.crophub.rest.common.model.impl.Descriptor;
import org.crophub.rest.common.persistence.domain.DatasetDescriptorRepository;
import org.crophub.rest.common.persistence.domain.DescriptorRepository; import org.crophub.rest.common.persistence.domain.DescriptorRepository;
import org.crophub.rest.common.service.DescriptorService; import org.crophub.rest.common.service.DescriptorService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
@Service @Service
@Transactional(readOnly = true) @Transactional(readOnly = true)
public class DescriptorServiceImpl implements DescriptorService { public class DescriptorServiceImpl implements DescriptorService {
public static final Log LOG = LogFactory.getLog(DescriptorServiceImpl.class);
@Autowired @Autowired
private DescriptorRepository descriptorRepository; private DescriptorRepository descriptorRepository;
@Autowired
private DatasetDescriptorRepository datasetDescriptorRepository;
@Override @Override
public List<Descriptor> list() { public List<Descriptor> list() {
return descriptorRepository.findAll(); return descriptorRepository.findAll();
} }
/**
* Fetch {@link DatasetDescriptor}s.
*
* @param dataset
* @param descriptors
* @return
*/
@Override
@Transactional
public DatasetDescriptor[] getDatasetDescriptors(final Dataset dataset, final Descriptor[] descriptors) {
final DatasetDescriptor[] datasetDescriptors = new DatasetDescriptor[descriptors.length];
for (int i = 0; i < descriptors.length; i++) {
if (descriptors[i] == null) {
// Skip null descriptor
continue;
}
datasetDescriptors[i] = datasetDescriptorRepository.findByDatasetAndDescriptor(dataset, descriptors[i]);
}
return datasetDescriptors;
}
/**
* Fetch and ensure {@link DatasetDescriptor}s exist.
*
* @param dataset
* @param descriptors
* @return
*/
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW, readOnly = false)
public DatasetDescriptor[] ensureDatasetDescriptors(final Dataset dataset, final Descriptor[] descriptors) {
final DatasetDescriptor[] datasetDescriptors = new DatasetDescriptor[descriptors.length];
for (int i = 0; i < descriptors.length; i++) {
if (descriptors[i] == null) {