Commit d868f741 authored by Steve Fisher's avatar Steve Fisher

Restructure to facilitate integration tests and simplify the Storage Interface

parent bf8fe93c
......@@ -8,18 +8,18 @@
<fileSets>
<fileSet>
<outputDirectory>ids</outputDirectory>
<outputDirectory>ids.server</outputDirectory>
<directory>${basedir}/src/main/scripts</directory>
<excludes>
<exclude>*.dump</exclude>
</excludes>
</fileSet>
<fileSet>
<outputDirectory>ids</outputDirectory>
<outputDirectory>ids.server</outputDirectory>
<directory>${basedir}/src/main/config</directory>
</fileSet>
<fileSet>
<outputDirectory>ids</outputDirectory>
<outputDirectory>ids.server</outputDirectory>
<directory>${basedir}</directory>
<filtered>true</filtered>
<includes>
......@@ -31,15 +31,15 @@
<dependencySets>
<dependencySet>
<outputDirectory>ids</outputDirectory>
<outputDirectory>ids.server</outputDirectory>
<useTransitiveDependencies>false</useTransitiveDependencies>
<includes>
<include>org.icatproject:ids</include>
<include>org.icatproject:ids.server</include>
</includes>
</dependencySet>
<dependencySet>
<outputDirectory>ids</outputDirectory>
<outputDirectory>ids.server</outputDirectory>
<useTransitiveDependencies>false</useTransitiveDependencies>
<unpack>true</unpack>
<unpackOptions>
......
This diff is collapsed.
#!/bin/bash
mvn clean install -DskipTests
asadmin undeploy ids-1.0.0-SNAPSHOT
asadmin deploy --contextroot ids target/ids-1.0.0-SNAPSHOT.war | grep -v PER0
......@@ -2,26 +2,28 @@ package org.icatproject.ids.storage;
import java.io.IOException;
import java.io.InputStream;
import org.icatproject.Datafile;
import org.icatproject.Dataset;
public interface StorageInterface {
public InputStream getDataset(Dataset dataset) throws IOException;
public void putDataset(Dataset dataset, InputStream is) throws IOException;
public void deleteDataset(Dataset dataset) throws IOException;
public void deleteDataset(String location) throws IOException;
public boolean datasetExists(Dataset dataset) throws IOException;
public boolean datafileExists(String location) throws IOException;
public boolean datasetExists(String location) throws IOException;
public InputStream getDatafile(Datafile datafile) throws IOException;
/*
* putDatafile methods (both of them) don't close the InputStream!
*/
public long putDatafile(Datafile datafile, InputStream is) throws IOException;
public long putDatafile(String relativeLocation, InputStream is) throws IOException;
public void deleteDatafile(Datafile datafile) throws IOException;
public boolean datafileExists(Datafile datafile) throws IOException;
public void deleteDatafile(String location) throws IOException;
public void deleteDataset(String location) throws IOException;
public InputStream getDatafile(String location) throws IOException;
public InputStream getDataset(String location) throws IOException;
public InputStream getPreparedZip(String zipName, long offset) throws IOException;
/** Write to datafile file at location and leave the input stream open */
public long putDatafile(String location, InputStream is) throws IOException;
/** Write to dataset file at location and close the input stream */
public void putDataset(String location, InputStream is) throws IOException;
/** Write to zip file at location and close the input stream */
public void putPreparedZip(String zipName, InputStream is) throws IOException;
}
......@@ -10,8 +10,6 @@ import java.io.IOException;
import java.io.InputStream;
import org.apache.commons.io.IOUtils;
import org.icatproject.Datafile;
import org.icatproject.Dataset;
import org.icatproject.ids.storage.StorageInterface;
import org.icatproject.ids.storage.StoragePropertyHandler;
import org.icatproject.ids.storage.StorageType;
......@@ -44,11 +42,12 @@ public class LocalFileStorage implements StorageInterface {
}
@Override
public InputStream getDataset(Dataset dataset) throws IOException {
public InputStream getDataset(String location) throws IOException {
if (STORAGE_ZIP_DIR == null) {
throw new UnsupportedOperationException(String.format("Storage %s doesn't support Datasets", storageType));
throw new UnsupportedOperationException(String.format(
"Storage %s doesn't support Datasets", storageType));
}
File zippedDs = new File(new File(STORAGE_ZIP_DIR, dataset.getLocation()), "files.zip");
File zippedDs = new File(new File(STORAGE_ZIP_DIR, location), "files.zip");
if (!zippedDs.exists()) {
throw new FileNotFoundException(zippedDs.getAbsolutePath());
}
......@@ -56,11 +55,12 @@ public class LocalFileStorage implements StorageInterface {
}
@Override
public void putDataset(Dataset dataset, InputStream is) throws IOException {
public void putDataset(String location, InputStream is) throws IOException {
if (STORAGE_ZIP_DIR == null) {
throw new UnsupportedOperationException(String.format("Storage %s doesn't support Datasets", storageType));
throw new UnsupportedOperationException(String.format(
"Storage %s doesn't support Datasets", storageType));
}
File zippedDs = new File(new File(STORAGE_ZIP_DIR, dataset.getLocation()), "files.zip");
File zippedDs = new File(new File(STORAGE_ZIP_DIR, location), "files.zip");
File zippedDsDir = zippedDs.getParentFile();
zippedDsDir.mkdirs();
zippedDs.createNewFile();
......@@ -68,49 +68,33 @@ public class LocalFileStorage implements StorageInterface {
is.close();
}
@Override
public void deleteDataset(Dataset dataset) throws IOException {
if (STORAGE_ZIP_DIR == null) {
throw new UnsupportedOperationException(String.format("Storage %s doesn't support Datasets", storageType));
}
File zippedDs = new File(new File(STORAGE_ZIP_DIR, dataset.getLocation()), "files.zip");
zippedDs.delete();
}
@Override
public void deleteDataset(String location) throws IOException {
if (STORAGE_ZIP_DIR == null) {
throw new UnsupportedOperationException(String.format("Storage %s doesn't support Datasets", storageType));
throw new UnsupportedOperationException(String.format(
"Storage %s doesn't support Datasets", storageType));
}
File zippedDs = new File(new File(STORAGE_ZIP_DIR, location), "files.zip");
zippedDs.delete();
}
@Override
public boolean datasetExists(Dataset dataset) throws IOException {
if (STORAGE_ZIP_DIR == null) {
throw new UnsupportedOperationException(String.format("Storage %s doesn't support Datasets", storageType));
}
File zippedDs = new File(new File(STORAGE_ZIP_DIR, dataset.getLocation()), "files.zip");
return zippedDs.exists();
}
@Override
public boolean datasetExists(String location) throws IOException {
if (STORAGE_ZIP_DIR == null) {
throw new UnsupportedOperationException(String.format("Storage %s doesn't support Datasets", storageType));
throw new UnsupportedOperationException(String.format(
"Storage %s doesn't support Datasets", storageType));
}
File zippedDs = new File(new File(STORAGE_ZIP_DIR, location), "files.zip");
return zippedDs.exists();
}
@Override
public InputStream getDatafile(Datafile datafile) throws FileNotFoundException {
public InputStream getDatafile(String location) throws FileNotFoundException {
if (STORAGE_DIR == null) {
throw new UnsupportedOperationException(String.format("Storage %s doesn't support single Datafiles",
storageType));
throw new UnsupportedOperationException(String.format(
"Storage %s doesn't support single Datafiles", storageType));
}
File file = new File(STORAGE_DIR, datafile.getLocation());
File file = new File(STORAGE_DIR, location);
if (!file.exists()) {
throw new FileNotFoundException(file.getAbsolutePath());
}
......@@ -118,44 +102,33 @@ public class LocalFileStorage implements StorageInterface {
}
@Override
public long putDatafile(Datafile datafile, InputStream is) throws IOException {
public long putDatafile(String location, InputStream is) throws IOException {
if (STORAGE_DIR == null) {
throw new UnsupportedOperationException(String.format("Storage %s doesn't support single Datafiles",
storageType));
}
File file = new File(new File(STORAGE_DIR, datafile.getDataset().getLocation()), datafile.getName());
writeInputStreamToFile(file, is);
return file.length();
};
@Override
public long putDatafile(String relativeLocation, InputStream is) throws IOException {
if (STORAGE_DIR == null) {
throw new UnsupportedOperationException(String.format("Storage %s doesn't support single Datafiles",
storageType));
throw new UnsupportedOperationException(String.format(
"Storage %s doesn't support single Datafiles", storageType));
}
File file = new File(STORAGE_DIR, relativeLocation);
File file = new File(STORAGE_DIR, location);
writeInputStreamToFile(file, is);
return file.length();
}
@Override
public void deleteDatafile(Datafile datafile) throws IOException {
public void deleteDatafile(String location) throws IOException {
if (STORAGE_DIR == null) {
throw new UnsupportedOperationException(String.format("Storage %s doesn't support single Datafiles",
storageType));
throw new UnsupportedOperationException(String.format(
"Storage %s doesn't support single Datafiles", storageType));
}
File file = new File(STORAGE_DIR, datafile.getLocation());
File file = new File(STORAGE_DIR, location);
file.delete();
}
@Override
public boolean datafileExists(Datafile datafile) throws IOException {
public boolean datafileExists(String location) throws IOException {
if (STORAGE_DIR == null) {
throw new UnsupportedOperationException(String.format("Storage %s doesn't support single Datafiles",
storageType));
throw new UnsupportedOperationException(String.format(
"Storage %s doesn't support single Datafiles", storageType));
}
File file = new File(STORAGE_DIR, datafile.getLocation());
File file = new File(STORAGE_DIR, location);
return file.exists();
}
......@@ -170,8 +143,8 @@ public class LocalFileStorage implements StorageInterface {
throw new FileNotFoundException(preparedZip.getAbsolutePath());
}
if (offset >= preparedZip.length()) {
throw new IllegalArgumentException("Offset (" + offset + " bytes) is larger than file size ("
+ preparedZip.length() + " bytes)");
throw new IllegalArgumentException("Offset (" + offset
+ " bytes) is larger than file size (" + preparedZip.length() + " bytes)");
}
InputStream res = new BufferedInputStream(new FileInputStream(preparedZip));
IOUtils.skip(res, offset);
......
......@@ -30,20 +30,21 @@ public class Archiver implements Runnable {
this.requestQueues = RequestQueues.getInstance();
this.requestHelper = requestHelper;
}
@Override
public void run() {
logger.info("starting Archiver");
Map<IdsDataEntity, RequestedState> deferredOpsQueue = requestQueues.getDeferredOpsQueue();
Set<Dataset> changing = requestQueues.getChanging();
StorageInterface fastStorageInterface = StorageFactory.getInstance().createFastStorageInterface();
StorageInterface fastStorageInterface = StorageFactory.getInstance()
.createFastStorageInterface();
StatusInfo resultingStatus = StatusInfo.COMPLETED; // assuming that everything will go OK
Dataset ds = de.getIcatDataset();
try {
fastStorageInterface.deleteDataset(ds);
fastStorageInterface.deleteDataset(ds.getLocation());
for (Datafile df : ds.getDatafiles()) {
fastStorageInterface.deleteDatafile(df);
fastStorageInterface.deleteDatafile(df.getLocation());
}
logger.info("Archive of " + ds.getLocation() + " succesful");
} catch (Exception e) {
......
......@@ -37,55 +37,64 @@ public class Preparer implements Runnable {
this.requestQueues = RequestQueues.getInstance();
this.requestHelper = requestHelper;
}
@Override
public void run() {
logger.info("starting preparer");
Map<IdsDataEntity, RequestedState> deferredOpsQueue = requestQueues.getDeferredOpsQueue();
Set<Dataset> changing = requestQueues.getChanging();
StorageInterface fastStorageInterface = StorageFactory.getInstance().createFastStorageInterface();
StorageInterface slowStorageInterface = StorageFactory.getInstance().createSlowStorageInterface();
// if one of the previous DataEntities of the Request failed, there's no point continuing with this one
// if (de.getRequest().getStatus() == StatusInfo.INCOMPLETE) {
// synchronized (deferredOpsQueue) {
// requestHelper.setDataEntityStatus(de, StatusInfo.INCOMPLETE);
// }
// }
// if this is the first DE of the Request being processed, set the Request status to RETRIVING
StorageInterface fastStorageInterface = StorageFactory.getInstance()
.createFastStorageInterface();
StorageInterface slowStorageInterface = StorageFactory.getInstance()
.createSlowStorageInterface();
// if one of the previous DataEntities of the Request failed, there's no point continuing
// with this one
// if (de.getRequest().getStatus() == StatusInfo.INCOMPLETE) {
// synchronized (deferredOpsQueue) {
// requestHelper.setDataEntityStatus(de, StatusInfo.INCOMPLETE);
// }
// }
// if this is the first DE of the Request being processed, set the Request status to
// RETRIVING
if (de.getRequest().getStatus() == StatusInfo.SUBMITTED) {
synchronized (deferredOpsQueue) {
requestHelper.setRequestStatus(de.getRequest(), StatusInfo.RETRIVING);
}
}
StatusInfo resultingStatus = StatusInfo.COMPLETED; // let's assume that everything will go OK
StatusInfo resultingStatus = StatusInfo.COMPLETED; // let's assume that everything will go
// OK
// restore the dataset if needed
InputStream slowIS = null;
ZipInputStream fastIS = null;
try {
if (!fastStorageInterface.datasetExists(de.getIcatDataset())) {
if (!fastStorageInterface.datasetExists(de.getIcatDataset().getLocation())) {
if (slowStorageInterface == null) {
logger.error("Preparer can't perform because there's no slow storage");
resultingStatus = StatusInfo.ERROR;
} else {
slowIS = slowStorageInterface.getDataset(de.getIcatDataset());
fastStorageInterface.putDataset(de.getIcatDataset(), slowIS);
fastIS = new ZipInputStream(fastStorageInterface.getDataset(de.getIcatDataset()));
slowIS = slowStorageInterface.getDataset(de.getIcatDataset().getLocation());
fastStorageInterface.putDataset(de.getIcatDataset().getLocation(), slowIS);
fastIS = new ZipInputStream(fastStorageInterface.getDataset(de.getIcatDataset()
.getLocation()));
ZipEntry entry;
while ((entry = fastIS.getNextEntry()) != null) {
if (entry.isDirectory()) {
continue;
}
String datafileLocation = new File(de.getIcatDataset().getLocation(), entry.getName()).getPath();
String datafileLocation = new File(de.getIcatDataset().getLocation(),
entry.getName()).getPath();
fastStorageInterface.putDatafile(datafileLocation, fastIS);
}
}
}
} catch (FileNotFoundException e) {
logger.warn("Could not restore " + de.getIcatDataset() + " (file doesn't exist): " + e.getMessage());
logger.warn("Could not restore " + de.getIcatDataset() + " (file doesn't exist): "
+ e.getMessage());
resultingStatus = StatusInfo.NOT_FOUND;
} catch (Exception e) {
logger.warn("Could not restore " + de.getIcatDataset() + " (reason uknonwn): " + e.getMessage());
logger.warn("Could not restore " + de.getIcatDataset() + " (reason uknonwn): "
+ e.getMessage());
resultingStatus = StatusInfo.ERROR;
} finally {
if (slowIS != null) {
......@@ -103,17 +112,18 @@ public class Preparer implements Runnable {
}
}
}
synchronized (deferredOpsQueue) {
logger.info(String.format("Changing status of %s to %s", de, resultingStatus));
requestHelper.setDataEntityStatus(de, resultingStatus);
changing.remove(de.getIcatDataset());
}
// if it's the last DataEntity of the Request and all of them were successful
if (de.getRequest().getStatus() == StatusInfo.COMPLETED) {
try {
InputStream is = ZipHelper.prepareZipForUserRequest(de.getRequest(), fastStorageInterface);
InputStream is = ZipHelper.prepareZipForUserRequest(de.getRequest(),
fastStorageInterface);
fastStorageInterface.putPreparedZip(de.getRequest().getPreparedId() + ".zip", is);
} catch (Exception e) {
logger.warn(String.format("Could not prepare the zip. Reason: " + e.getMessage()));
......
......@@ -36,40 +36,46 @@ public class Restorer implements Runnable {
this.requestQueues = RequestQueues.getInstance();
this.requestHelper = requestHelper;
}
@Override
public void run() {
logger.info("starting restorer");
StorageInterface slowStorageInterface = StorageFactory.getInstance().createSlowStorageInterface();
StorageInterface fastStorageInterface = StorageFactory.getInstance().createFastStorageInterface();
StorageInterface slowStorageInterface = StorageFactory.getInstance()
.createSlowStorageInterface();
StorageInterface fastStorageInterface = StorageFactory.getInstance()
.createFastStorageInterface();
StatusInfo resultingStatus = StatusInfo.COMPLETED; // assuming, that everything will go OK
InputStream slowIS = null;
ZipInputStream fastIS = null;
try {
if (!fastStorageInterface.datasetExists(de.getIcatDataset())) {
if (!fastStorageInterface.datasetExists(de.getIcatDataset().getLocation())) {
if (slowStorageInterface == null) {
logger.error("Restorer can't perform because there's no slow storage");
resultingStatus = StatusInfo.ERROR;
} else {
slowIS = slowStorageInterface.getDataset(de.getIcatDataset());
fastStorageInterface.putDataset(de.getIcatDataset(), slowIS);
fastIS = new ZipInputStream(fastStorageInterface.getDataset(de.getIcatDataset()));
slowIS = slowStorageInterface.getDataset(de.getIcatDataset().getLocation());
fastStorageInterface.putDataset(de.getIcatDataset().getLocation(), slowIS);
fastIS = new ZipInputStream(fastStorageInterface.getDataset(de.getIcatDataset()
.getLocation()));
ZipEntry entry;
while ((entry = fastIS.getNextEntry()) != null) {
if (entry.isDirectory()) {
continue;
}
String datafileLocation = new File(de.getIcatDataset().getLocation(), entry.getName()).getPath();
String datafileLocation = new File(de.getIcatDataset().getLocation(),
entry.getName()).getPath();
fastStorageInterface.putDatafile(datafileLocation, fastIS);
}
}
}
} catch (FileNotFoundException e) {
logger.warn("Could not restore " + de.getIcatDataset() + " (file doesn't exist): " + e.getMessage());
logger.warn("Could not restore " + de.getIcatDataset() + " (file doesn't exist): "
+ e.getMessage());
resultingStatus = StatusInfo.NOT_FOUND;
} catch (Exception e) {
logger.warn("Could not restore " + de.getIcatDataset() + " (reason unknonwn): " + e.getMessage());
logger.warn("Could not restore " + de.getIcatDataset() + " (reason unknonwn): "
+ e.getMessage());
resultingStatus = StatusInfo.ERROR;
} finally {
if (slowIS != null) {
......@@ -86,7 +92,7 @@ public class Restorer implements Runnable {
logger.warn("Couldn't close an input stream from the fast storage");
}
}
}
}
Map<IdsDataEntity, RequestedState> deferredOpsQueue = requestQueues.getDeferredOpsQueue();
Set<Dataset> changing = requestQueues.getChanging();
......@@ -94,7 +100,7 @@ public class Restorer implements Runnable {
logger.info(String.format("Changing status of %s to %s", de, resultingStatus));
requestHelper.setDataEntityStatus(de, resultingStatus);
changing.remove(de.getIcatDataset());
}
}
}
}
\ No newline at end of file
......@@ -21,7 +21,7 @@ import org.slf4j.LoggerFactory;
* removes the files on the fast storage
*/
public class WriteThenArchiver implements Runnable {
private final static Logger logger = LoggerFactory.getLogger(WriteThenArchiver.class);
private IdsDataEntity de;
......@@ -33,34 +33,37 @@ public class WriteThenArchiver implements Runnable {
this.requestQueues = RequestQueues.getInstance();
this.requestHelper = requestHelper;
}
@Override
public void run() {
logger.info("starting WriteThenArchiver");
Map<IdsDataEntity, RequestedState> deferredOpsQueue = requestQueues.getDeferredOpsQueue();
Set<Dataset> changing = requestQueues.getChanging();
StorageInterface slowStorageInterface = StorageFactory.getInstance().createSlowStorageInterface();
StorageInterface fastStorageInterface = StorageFactory.getInstance().createFastStorageInterface();
StorageInterface slowStorageInterface = StorageFactory.getInstance()
.createSlowStorageInterface();
StorageInterface fastStorageInterface = StorageFactory.getInstance()
.createFastStorageInterface();
StatusInfo resultingStatus = StatusInfo.COMPLETED; // assuming that everything will go OK
Dataset ds = de.getIcatDataset();
Dataset ds = de.getIcatDataset();
try {
if (slowStorageInterface == null) {
logger.error("WriteThenArchiver can't perform because there's no slow storage");
resultingStatus = StatusInfo.ERROR;
return;
}
if (fastStorageInterface.datasetExists(ds)) {
InputStream is = fastStorageInterface.getDataset(ds);
slowStorageInterface.putDataset(ds, is);
fastStorageInterface.deleteDataset(ds);
if (fastStorageInterface.datasetExists(ds.getLocation())) {
InputStream is = fastStorageInterface.getDataset(ds.getLocation());
slowStorageInterface.putDataset(ds.getLocation(), is);
fastStorageInterface.deleteDataset(ds.getLocation());
for (Datafile df : ds.getDatafiles()) {
fastStorageInterface.deleteDatafile(df);
fastStorageInterface.deleteDatafile(df.getLocation());
}
}
logger.info("WriteThenArchive of " + ds.getLocation() + " succesful");
} catch (Exception e) {
logger.error("WriteThenArchive of " + ds.getLocation() + " failed due to " + e.getMessage());
logger.error("WriteThenArchive of " + ds.getLocation() + " failed due to "
+ e.getMessage());
resultingStatus = StatusInfo.INCOMPLETE;
} finally {
synchronized (deferredOpsQueue) {
......
......@@ -21,7 +21,7 @@ import org.slf4j.LoggerFactory;
* Copies datasets across storages (fast to slow)
*/
public class Writer implements Runnable {
private final static Logger logger = LoggerFactory.getLogger(Writer.class);
private IdsDataEntity de;
......@@ -33,19 +33,22 @@ public class Writer implements Runnable {
this.requestQueues = RequestQueues.getInstance();
this.requestHelper = requestHelper;
}
@Override
public void run() {
logger.info("starting Writer");
Map<IdsDataEntity, RequestedState> deferredOpsQueue = requestQueues.getDeferredOpsQueue();
Set<Dataset> changing = requestQueues.getChanging();
StorageInterface slowStorageInterface = StorageFactory.getInstance().createSlowStorageInterface();
StorageInterface fastStorageInterface = StorageFactory.getInstance().createFastStorageInterface();
StorageInterface slowStorageInterface = StorageFactory.getInstance()
.createSlowStorageInterface();
StorageInterface fastStorageInterface = StorageFactory.getInstance()
.createFastStorageInterface();
StatusInfo resultingStatus = StatusInfo.COMPLETED; // assuming that everything will go OK
Dataset ds = null;
try {
if (de instanceof IdsDatasetEntity && !fastStorageInterface.datasetExists(de.getLocation())) {
if (de instanceof IdsDatasetEntity
&& !fastStorageInterface.datasetExists(de.getLocation())) {
if (slowStorageInterface != null) {
slowStorageInterface.deleteDataset(de.getLocation());
}
......@@ -53,16 +56,16 @@ public class Writer implements Runnable {
}
ds = de.getIcatDataset();
InputStream zipIs = ZipHelper.zipDataset(ds, false, fastStorageInterface);
fastStorageInterface.putDataset(ds, zipIs);
fastStorageInterface.putDataset(ds.getLocation(), zipIs);
if (slowStorageInterface == null) {
logger.error("Writer can't perform because there's no slow storage");
resultingStatus = StatusInfo.ERROR;
return;
}
if (fastStorageInterface.datasetExists(ds)) {
InputStream is = fastStorageInterface.getDataset(ds);
slowStorageInterface.putDataset(ds, is);
if (fastStorageInterface.datasetExists(ds.getLocation())) {
InputStream is = fastStorageInterface.getDataset(ds.getLocation());
slowStorageInterface.putDataset(ds.getLocation(), is);
}
logger.info("Write of " + ds.getLocation() + " succesful");
} catch (Exception e) {
......
......@@ -22,10 +22,11 @@ public class ZipHelper {
private final static Logger logger = LoggerFactory.getLogger(ZipHelper.class);
public static InputStream zipDataset(Dataset dataset, boolean compress, StorageInterface storageInterface)
throws IOException {
File tmpZipFile = new File(PropertyHandler.getInstance().getTmpDir(),
new Long(System.currentTimeMillis()).toString() + ".zip");
public static InputStream zipDataset(Dataset dataset, boolean compress,
StorageInterface storageInterface) throws IOException {
File tmpZipFile = new File(PropertyHandler.getInstance().getTmpDir(), new Long(
System.currentTimeMillis()).toString()
+ ".zip");
if (dataset.getDatafiles().isEmpty()) {
// Create empty file
tmpZipFile.createNewFile();
......@@ -48,7 +49,7 @@ public class ZipHelper {
}
for (Datafile df : dataset.getDatafiles()) {
logger.info("Adding file " + df.getName() + " to zip");
addToZip(df.getName(), zos, storageInterface.getDatafile(df));
addToZip(df.getName(), zos, storageInterface.getDatafile(df.getLocation()));
}
} finally {
if (zos != null) {
......@@ -62,14 +63,15 @@ public class ZipHelper {
return new FileInputStream(tmpZipFile);
}
public static InputStream prepareZipForUserRequest(IdsRequestEntity request, StorageInterface storageInterface)
throws IOException {
public static InputStream prepareZipForUserRequest(IdsRequestEntity request,
StorageInterface storageInterface) throws IOException {
return prepareTemporaryZip(request.getPreparedId() + ".zip", request.getIcatDatasets(),
request.getIcatDatafiles(), request.isCompress(), storageInterface);
}
public static InputStream prepareTemporaryZip(String zipName, Collection<Dataset> datasets,
Collection<Datafile> datafiles, boolean compress, StorageInterface storageInterface) throws IOException {
Collection<Datafile> datafiles, boolean compress, StorageInterface storageInterface)
throws IOException {
File tmpZipFile = new File(PropertyHandler.getInstance().getTmpDir(), zipName);
ZipOutputStream zos = null;
try {
......@@ -87,12 +89,13 @@ public class ZipHelper {
zos.setLevel(0);
}
for (Datafile df : datafiles) {
addToZip("Datafile-" + df.getId(), zos, storageInterface.getDatafile(df));
addToZip("Datafile-" + df.getId(), zos,
storageInterface.getDatafile(df.getLocation()));
}
for (Dataset ds : datasets) {
for (Datafile df : ds.getDatafiles()) {
addToZip(String.format("Dataset-%s/Datafile-%s", ds.getId(), df.getId()), zos,
storageInterface.getDatafile(df));
storageInterface.getDatafile(df.getLocation()));
}
}
} finally {
......
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE glassfish-web-app PUBLIC "-//GlassFish.org//DTD GlassFish Application Server 3.1 Servlet 3.0//EN" "http://glassfish.org/dtds/glassfish-web-app_3_0-1.dtd">
<glassfish-web-app>
<context-root>ids</context-root>
</glassfish-web-app>
<?xml version="1.0" encoding="UTF-8"?>
<web-app version="3.0" xmlns="http://java.sun.com/xml/ns/javaee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd">
<web-app version="3.0" xmlns="http://java.sun.com/xml/ns/javaee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd">
<servlet>
<servlet-name>ServletAdaptor</servlet-name>
<servlet-class>com.sun.jersey.spi.container.servlet.ServletContainer</servlet-class>
......@@ -10,8 +11,18 @@
<url-pattern>/*</url-pattern>
</servlet-mapping>
<session-config>
<session-timeout>
30
</session-timeout>
<session-timeout>30</session-timeout>
</session-config>
<security-constraint>
<web-resource-collection>
<web-resource-name>secure</web-resource-name>
<url-pattern>/*</url-pattern>
</web-resource-collection>