Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
67 changes: 33 additions & 34 deletions components/blitz/src/ome/formats/OMEROMetadataStoreClient.java
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.ResourceBundle;
import java.util.Set;
import java.util.TreeMap;
import java.util.UUID;
Expand Down Expand Up @@ -74,8 +73,11 @@
import ome.formats.model.ShapeProcessor;
import ome.formats.model.TargetProcessor;
import ome.formats.model.WellProcessor;
import ome.system.UpgradeCheck;
import ome.util.LSID;
import ome.util.checksum.ChecksumProvider;
import ome.util.checksum.ChecksumProviderFactory;
import ome.util.checksum.ChecksumProviderFactoryImpl;
import ome.util.checksum.ChecksumType;
import ome.xml.model.AffineTransform;
import ome.xml.model.enums.FillRule;
import ome.xml.model.enums.FontFamily;
Expand Down Expand Up @@ -205,6 +207,7 @@
import omero.sys.ParametersI;
import omero.util.TempFileManager;

import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

Expand All @@ -222,6 +225,9 @@
public class OMEROMetadataStoreClient
implements MetadataStore, IMinMaxStore, IObjectContainerStore
{
/* checksum provider factory for verifying file integrity in upload */
private static final ChecksumProviderFactory checksumProviderFactory = new ChecksumProviderFactoryImpl();

/** Logger for this class */
private Log log = LogFactory.getLog(OMEROMetadataStoreClient.class);

Expand Down Expand Up @@ -1540,9 +1546,10 @@ public List<File> setArchiveScreeningDomain()
int originalFileIndex = 0;
for (String usedFilename : usedFiles)
{
if (!companionFiles.contains(usedFilename)) {
continue;
}
File usedFile = new File(usedFilename);
boolean isCompanionFile = companionFiles == null? false :
companionFiles.contains(usedFilename);
LinkedHashMap<Index, Integer> indexes =
new LinkedHashMap<Index, Integer>();
indexes.put(Index.ORIGINAL_FILE_INDEX, originalFileIndex);
Expand Down Expand Up @@ -1630,7 +1637,7 @@ public List<File> setArchive(boolean archive, boolean useMetadataFile)
}

// Create all original file objects for later population based on
// the existence or abscence of companion files and the archive
// the existence or absence of companion files and the archive
// flag. This increments the original file count by the number of
// files to actually be created.
for (int i = 0; i < usedFiles.length; i++)
Expand Down Expand Up @@ -1702,12 +1709,10 @@ else if (isCompanionFile)
* @param files An array of the files to filter.
* @return A collection of the filtered files.
*/
private List<String> filterFilenames(String[] files)
{
if (files == null)
{
return null;
}
private List<String> filterFilenames(String[] files) {
if (ArrayUtils.isEmpty(files)) {
return Collections.emptyList();
}
List<String> filteredFiles = new ArrayList<String>();
for (String file : files)
{
Expand Down Expand Up @@ -1810,10 +1815,11 @@ private OriginalFile byUUID(
* @param files Files to populate against an original file list.
* @param originalFileMap Map of absolute path against original file
* objects that we are to populate.
* @throws IOException in the event of an IO error during upload
* @throws ServerError propagated up from the raw file store proxy to which upload occurs
*/
public void writeFilesToFileStore(
List<File> files, Map<String, OriginalFile> originalFileMap)
{
public void writeFilesToFileStore(List<File> files, Map<String, OriginalFile> originalFileMap)
throws IOException, ServerError {
// Lookup each source file in our hash map and write it to the
// correct original file object server side.
byte[] buf = new byte[1048576]; // 1 MB buffer
Expand All @@ -1833,8 +1839,8 @@ public void writeFilesToFileStore(
}

FileInputStream stream = null;
try
{
try {
final ChecksumProvider hasher = checksumProviderFactory.getProvider(ChecksumType.SHA1);
stream = new FileInputStream(file);
rawFileStore.setFileId(originalFile.getId().getValue());
int rlen = 0;
Expand All @@ -1844,25 +1850,18 @@ public void writeFilesToFileStore(
rlen = stream.read(buf);
rawFileStore.write(buf, offset, rlen);
offset += rlen;
hasher.putBytes(buf, 0, rlen);
}
}
catch (Exception e)
{
log.error("I/O or server error populating file store.", e);
break;
}
finally
{
if (stream != null)
{
try
{
originalFile = rawFileStore.save();
final String clientHash = hasher.checksumAsString();
final String serverHash = originalFile.getSha1().getValue();
if (!clientHash.equals(serverHash)) {
throw new IOException("file checksum mismatch on upload: " + path +
" (client has " + clientHash + ", server has " + serverHash + ")");
}
} finally {
if (stream != null) {
stream.close();
}
catch (Exception e)
{
log.error("I/O error closing stream.", e);
}
}
}
}
Expand Down Expand Up @@ -2004,7 +2003,7 @@ public String getDefaultGroupName() throws ServerError
}

/**
* Retrieve teh default groups permission 'level'.
* Retrieve the default group's permission 'level'.
*
* @return ImportEvent's group level
* @throws ServerError
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1118,7 +1118,7 @@ private OriginalFile createOriginalFile(File f, omero.RString mimetype) {
private Image createImage(String imageName, Pixels pix) {
Image image = new ImageI();
image.setName(rstring(imageName));
// Property must be not-null but will be overwritten on metadat import.
// Property must be not-null but will be overwritten on metadata import.
image.setAcquisitionDate(rtime(java.lang.System.currentTimeMillis()));
image.addPixels(pix);
return image;
Expand Down
2 changes: 2 additions & 0 deletions components/common/src/ome/util/checksum/ChecksumProvider.java
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,8 @@ public interface ChecksumProvider {
* empty array despite its lack of content.
*
* @param byteArray The input byte array.
* @param offset The offset in the byte array at which to start putting bytes.
* @param length The number of bytes to put, starting from the offset.
* @return ChecksumProvider
*/
ChecksumProvider putBytes(byte[] byteArray, int offset, int length);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,10 @@
import ome.formats.importer.ImportLibrary;
import ome.formats.importer.OMEROWrapper;
import ome.system.UpgradeCheck;
import ome.util.checksum.ChecksumProvider;
import ome.util.checksum.ChecksumProviderFactory;
import ome.util.checksum.ChecksumProviderFactoryImpl;
import ome.util.checksum.ChecksumType;
import omero.ApiUsageException;
import omero.AuthenticationException;
import omero.ConcurrencyException;
Expand Down Expand Up @@ -314,6 +318,9 @@ class OMEROGateway
/** The collection of scripts that have a UI available. */
private static final List<String> SCRIPTS_NOT_AVAILABLE_TO_USER;

/* checksum provider factory for verifying file integrity in upload */
private static final ChecksumProviderFactory checksumProviderFactory = new ChecksumProviderFactoryImpl();

static {
SUPPORTED_SPECIAL_CHAR = new ArrayList<Character>();
SUPPORTED_SPECIAL_CHAR.add(Character.valueOf('-'));
Expand Down Expand Up @@ -4249,6 +4256,7 @@ private synchronized OriginalFile upload(SecurityContext ctx, File file,
}
byte[] buf = new byte[INC];
FileInputStream stream = null;
final ChecksumProvider hasher = checksumProviderFactory.getProvider(ChecksumType.SHA1);
try {
stream = new FileInputStream(file);
long pos = 0;
Expand All @@ -4259,11 +4267,18 @@ private synchronized OriginalFile upload(SecurityContext ctx, File file,
pos += rlen;
bbuf = ByteBuffer.wrap(buf);
bbuf.limit(rlen);
hasher.putBytes(bbuf);
}
stream.close();
OriginalFile f = store.save();
closeService(ctx, store);
if (f != null) save = f;
final String clientHash = hasher.checksumAsString();
final String serverHash = save.getSha1().getValue();
if (!clientHash.equals(serverHash)) {
throw new ImportException("file checksum mismatch on upload: " + file +
" (client has " + clientHash + ", server has " + serverHash + ")");
}
} catch (Exception e) {
try {
if (fileCreated) deleteObject(ctx, save);
Expand Down
1 change: 1 addition & 0 deletions components/insight/ivy.xml
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
<dependency org="insight" name="ehcache" rev="${versions.ehcache}"/>
<dependency org="insight" name="gicentreUtils" rev="${versions.gicentreUtils}"/>
<dependency org="insight" name="gluegen-rt" rev="${versions.jogl}"/>
<dependency org="insight" name="guava-jdk5" rev="${versions.guava}"/>
<dependency org="insight" name="ij" rev="${versions.ij}"/>
<dependency org="insight" name="jaxrpc" rev="${versions.jaxrpc}"/>
<dependency org="insight" name="jcommon" rev="${versions.jcommon}"/>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -457,6 +457,7 @@ def write_to_omero(self):
log.info('Table initialized with %d columns.' % (len(self.columns)))
table.addData(self.columns)
log.info('Added data column data.')
table.close()
file_annotation = FileAnnotationI()
file_annotation.ns = \
rstring('openmicroscopy.org/omero/bulk_annotations')
Expand Down
1 change: 1 addition & 0 deletions components/tools/OmeroPy/src/omero/util/populate_roi.py
Original file line number Diff line number Diff line change
Expand Up @@ -632,6 +632,7 @@ def update_table(self, columns):
column_report[column.name] = len(column.values)
log.debug("Column report: %r" % column_report)
self.table.addData(columns)
self.table.close()
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

causes SHA1 to be written for the table file

log.info("Table update took %sms" % (int(time.time() * 1000) - t0))

def create_file_annotation(self, set_of_columns):
Expand Down