Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,14 @@
PROPS_FOR_PD_CHARACTER = ["FrequencyLogNames", "WaveLengthLogNames"]


def determineCompression(filename, compression, chunking, absorption):
if compression == 0.0 or chunking > 0.0 or absorption:
return False
sizeGiB = os.path.getsize(filename) / 1024.0 / 1024.0 / 1024.0
if sizeGiB > compression:
return True


def determineChunking(filename, chunkSize):
# chunkSize=0 signifies that the user wants to read the whole file
if chunkSize == 0.0:
Expand Down Expand Up @@ -147,6 +155,7 @@ def summary(self):
def PyInit(self):
self.declareProperty(MultipleFileProperty(name="Filename", extensions=EXTENSIONS_NXS), "Files to combine in reduction")
self.declareProperty("MaxChunkSize", 0.0, "Specify maximum Gbytes of file to read in one chunk. Default is whole file.")
self.declareProperty("MinSizeCompressOnLoad", 0.0, "Specify the file size in GB to use compression")
self.declareProperty("FilterBadPulses", 0.0, doc="Filter out events measured while proton charge is more than 5% below average")

self.declareProperty(
Expand Down Expand Up @@ -213,6 +222,11 @@ def __createLoader(self, filename, wkspname, progstart=None, progstop=None, skip
loader.setPropertyValue("Filename", filename)
loader.setPropertyValue("OutputWorkspace", wkspname)

if self.do_compression:
self.kwargs = self.__getAlignAndFocusArgs()
loader.setPropertyValue("CompressTolerance", str(self.kwargs["CompressTolerance"]))
loader.setPropertyValue("CompressBinningMode", self.kwargs["CompressBinningMode"])

if skipLoadingLogs:
if self.__loaderName != "LoadEventNexus":
raise RuntimeError("Cannot set LoadLogs=False in {}".format(self.__loaderName))
Expand Down Expand Up @@ -664,6 +678,7 @@ def PyExec(self):
self.__loaderName = "Load" # set the loader to be generic on first load
self.filterBadPulses = self.getProperty("FilterBadPulses").value
self.chunkSize = self.getProperty("MaxChunkSize").value
self.compression_threshold = self.getProperty("MinSizeCompressOnLoad").value
self.absorption = self.getProperty("AbsorptionWorkspace").value
self.charac = self.getProperty("Characterizations").value
self.useCaching = len(self.getProperty("CacheDir").value) > 0
Expand All @@ -677,6 +692,10 @@ def PyExec(self):
# empty string means it is not used
finalunfocusname = self.getPropertyValue("UnfocussedWorkspace")

# determing compression
self.do_compression = determineCompression(
filename=self._filenames[0], compression=self.compression_threshold, chunking=self.chunkSize, absorption=self.absorption
)
if self.useCaching:
# unfocus check only matters if caching is requested
if finalunfocusname != "":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,32 @@ def validate(self):
return ("with_chunks", "no_chunks")


class CompressedCompare(systemtesting.MantidSystemTest):
# this test is very similar to SNAPRedux.Simple

def requiredMemoryMB(self):
return 24 * 1024 # GiB

def runTest(self):
GRP_WKSP = "SNAP_compress_params"

# 11MB file
kwargs = {"Filename": "SNAP_45874", "Params": (0.5, -0.004, 7), "GroupingWorkspace": GRP_WKSP}

# create grouping for two output spectra
CreateGroupingWorkspace(InstrumentFilename="SNAP_Definition.xml", GroupDetectorsBy="Group", OutputWorkspace=GRP_WKSP)

AlignAndFocusPowderFromFiles(
OutputWorkspace="compress1", MaxChunkSize=0.0, CompressTolerance=1e-2, MinSizeCompressOnLoad=1e-14, **kwargs
)

def validateMethod(self):
return None

def validate(self):
return None


class UseCache(systemtesting.MantidSystemTest):
cal_file = "PG3_FERNS_d4832_2011_08_24.cal"
char_file = "PG3_characterization_2012_02_23-HR-ILL.txt"
Expand Down