diff --git a/Framework/Algorithms/inc/MantidAlgorithms/GeneratePythonFitScript.h b/Framework/Algorithms/inc/MantidAlgorithms/GeneratePythonFitScript.h index f11625562000..c55139fffc16 100644 --- a/Framework/Algorithms/inc/MantidAlgorithms/GeneratePythonFitScript.h +++ b/Framework/Algorithms/inc/MantidAlgorithms/GeneratePythonFitScript.h @@ -35,7 +35,7 @@ class MANTID_ALGORITHMS_DLL GeneratePythonFitScript : public API::Algorithm { std::size_t getNumberOfDomainsInFunction(Mantid::API::IFunction_sptr const &function) const; std::string generateFitScript(std::string const &fittingType) const; - std::string generateVariableSetupCode() const; + std::string generateVariableSetupCode(std::string const &filename) const; std::string generateSimultaneousFitCode() const; std::string generateFunctionString() const; diff --git a/Framework/Algorithms/src/GeneratePythonFitScript.cpp b/Framework/Algorithms/src/GeneratePythonFitScript.cpp index af098eb0cc5d..d18af4294b43 100644 --- a/Framework/Algorithms/src/GeneratePythonFitScript.cpp +++ b/Framework/Algorithms/src/GeneratePythonFitScript.cpp @@ -218,12 +218,15 @@ std::size_t GeneratePythonFitScript::getNumberOfDomainsInFunction(IFunction_sptr std::string GeneratePythonFitScript::generateFitScript(std::string const &fittingType) const { std::string generatedScript; - generatedScript += generateVariableSetupCode(); - generatedScript += "\n"; - if (fittingType == "Sequential") + if (fittingType == "Sequential") { + generatedScript += generateVariableSetupCode("GeneratePythonFitScript_SequentialVariableSetup.py.in"); + generatedScript += "\n"; generatedScript += getFileContents("GeneratePythonFitScript_SequentialFit.py.in"); - else if (fittingType == "Simultaneous") + } else if (fittingType == "Simultaneous") { + generatedScript += generateVariableSetupCode("GeneratePythonFitScript_SimultaneousVariableSetup.py.in"); + generatedScript += "\n"; generatedScript += generateSimultaneousFitCode(); + } bool plotOutput = getProperty("PlotOutput"); if (plotOutput) { @@ -239,8 +242,8 @@ std::string GeneratePythonFitScript::generateFitScript(std::string const &fittin return generatedScript; } -std::string GeneratePythonFitScript::generateVariableSetupCode() const { - std::string code = getFileContents("GeneratePythonFitScript_VariableSetup.py.in"); +std::string GeneratePythonFitScript::generateVariableSetupCode(std::string const &filename) const { + std::string code = getFileContents(filename); std::vector const inputWorkspaces = getProperty("InputWorkspaces"); std::vector const workspaceIndices = getProperty("WorkspaceIndices"); diff --git a/Framework/Algorithms/test/NormaliseByDetectorTest.h b/Framework/Algorithms/test/NormaliseByDetectorTest.h index b502b83d2dbd..f430588f0c7f 100644 --- a/Framework/Algorithms/test/NormaliseByDetectorTest.h +++ b/Framework/Algorithms/test/NormaliseByDetectorTest.h @@ -20,6 +20,8 @@ #include "MantidFrameworkTestHelpers/ScopedFileHelper.h" #include "MantidFrameworkTestHelpers/WorkspaceCreationHelper.h" +#include + using namespace Mantid; using namespace Mantid::Algorithms; using namespace Mantid::API; @@ -552,22 +554,24 @@ class NormaliseByDetectorTestPerformance : public CxxTest::TestSuite { // Create a parameter file, with a root equation that will apply to all // detectors. - const std::string parameterFileContents = boost::str(boost::format("\n\ - \n\ - \n\ - \n\ - \n\ - \n\ - \n\ - \n\ - \n\ - \n\ - \n\ - \n\ - \n") % instrumentName); + std::ostringstream parameterFileStream; + parameterFileStream << "\n" + << "\n" + << " \n" + << " \n" + << " \n" + << " \n" + << " \n" + << " \n" + << " \n" + << " \n" + << " \n" + << " \n" + << "\n"; // Create a temporary Instrument Parameter file. - ScopedFile file(parameterFileContents, instrumentName + "_Parameters.xml"); + ScopedFile file(parameterFileStream.str(), instrumentName + "_Parameters.xml"); // Apply parameter file to workspace. apply_instrument_parameter_file_to_workspace(ws, file); diff --git a/Framework/Algorithms/test/NormaliseToUnityTest.py b/Framework/Algorithms/test/NormaliseToUnityTest.py index 13ec247eb1ef..8eb02fa22814 100644 --- a/Framework/Algorithms/test/NormaliseToUnityTest.py +++ b/Framework/Algorithms/test/NormaliseToUnityTest.py @@ -5,7 +5,8 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + import unittest -from mantid.simpleapi import * +from mantid.api import mtd +from mantid.simpleapi import CreateWorkspace, DeleteWorkspace, NormaliseToUnity class NormaliseToUnityTest(unittest.TestCase): diff --git a/Framework/DataObjects/scripts/analysis.py b/Framework/DataObjects/scripts/analysis.py index ae8484a75eb7..be82548621bc 100644 --- a/Framework/DataObjects/scripts/analysis.py +++ b/Framework/DataObjects/scripts/analysis.py @@ -12,7 +12,7 @@ # This line has to be first for some reason. # from enthought.mayavi import mlab -from pylab import * +from pylab import figure, legend, plot, title, savefig, show, xlabel, ylabel import pickle from scipy import stats diff --git a/Framework/PythonInterface/plugins/algorithms/AngularAutoCorrelationsSingleAxis.py b/Framework/PythonInterface/plugins/algorithms/AngularAutoCorrelationsSingleAxis.py index e3e81cb526c7..9a5fc6713bbb 100644 --- a/Framework/PythonInterface/plugins/algorithms/AngularAutoCorrelationsSingleAxis.py +++ b/Framework/PythonInterface/plugins/algorithms/AngularAutoCorrelationsSingleAxis.py @@ -5,9 +5,9 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=too-many-branches,too-many-locals, invalid-name -from mantid.simpleapi import * -from mantid.kernel import * -from mantid.api import * +from mantid.api import AlgorithmFactory, FileAction, FileProperty, PythonAlgorithm, WorkspaceProperty +from mantid.kernel import Direction +from mantid.simpleapi import logger, CreateWorkspace from scipy.io import netcdf import numpy as np diff --git a/Framework/PythonInterface/plugins/algorithms/AngularAutoCorrelationsTwoAxes.py b/Framework/PythonInterface/plugins/algorithms/AngularAutoCorrelationsTwoAxes.py index fc2f771ca0f9..048b483cbb6d 100644 --- a/Framework/PythonInterface/plugins/algorithms/AngularAutoCorrelationsTwoAxes.py +++ b/Framework/PythonInterface/plugins/algorithms/AngularAutoCorrelationsTwoAxes.py @@ -5,9 +5,9 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=too-many-branches,too-many-locals, invalid-name -from mantid.simpleapi import * -from mantid.kernel import * -from mantid.api import * +from mantid.api import AlgorithmFactory, FileAction, FileProperty, PythonAlgorithm, WorkspaceProperty +from mantid.kernel import Direction +from mantid.simpleapi import logger, CreateWorkspace from scipy.io import netcdf import numpy as np diff --git a/Framework/PythonInterface/plugins/algorithms/ApplyNegMuCorrection.py b/Framework/PythonInterface/plugins/algorithms/ApplyNegMuCorrection.py index 3112569c2ebe..37f48bc62b88 100644 --- a/Framework/PythonInterface/plugins/algorithms/ApplyNegMuCorrection.py +++ b/Framework/PythonInterface/plugins/algorithms/ApplyNegMuCorrection.py @@ -4,9 +4,8 @@ # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + -from mantid.api import * # PythonAlgorithm, registerAlgorithm, WorkspaceProperty -from mantid.simpleapi import * -from mantid.kernel import * +from mantid.api import AlgorithmFactory, FileAction, FileProperty, PythonAlgorithm +from mantid.simpleapi import CreateWorkspace, DeleteWorkspace, Plus, Rebin, RenameWorkspaces, Load # pylint: disable=no-init, too-many-arguments diff --git a/Framework/PythonInterface/plugins/algorithms/CalculateFlux.py b/Framework/PythonInterface/plugins/algorithms/CalculateFlux.py index 5c8d57e064f1..375e95eb251f 100644 --- a/Framework/PythonInterface/plugins/algorithms/CalculateFlux.py +++ b/Framework/PythonInterface/plugins/algorithms/CalculateFlux.py @@ -4,9 +4,9 @@ # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + -from mantid.api import PythonAlgorithm, MatrixWorkspaceProperty, InstrumentValidator +from mantid.api import AlgorithmFactory, PythonAlgorithm, MatrixWorkspaceProperty, InstrumentValidator from mantid.kernel import Direction, FloatBoundedValidator -from mantid.simpleapi import * +from mantid.simpleapi import CloneWorkspace, FindDetectorsInShape, GroupDetectors, Rebin import numpy as np diff --git a/Framework/PythonInterface/plugins/algorithms/CalculateSampleTransmission.py b/Framework/PythonInterface/plugins/algorithms/CalculateSampleTransmission.py index f9cb8615fa1d..de506a7653d8 100644 --- a/Framework/PythonInterface/plugins/algorithms/CalculateSampleTransmission.py +++ b/Framework/PythonInterface/plugins/algorithms/CalculateSampleTransmission.py @@ -5,9 +5,9 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init,invalid-name -from mantid.simpleapi import * -from mantid.api import * -from mantid.kernel import * +from mantid.api import mtd, AlgorithmFactory, MatrixWorkspaceProperty, PythonAlgorithm +from mantid.kernel import Direction, MaterialBuilder, StringListValidator, StringMandatoryValidator +from mantid.simpleapi import ConvertToPointData, CreateWorkspace, Rebin, SetSampleMaterial import math import numpy as np diff --git a/Framework/PythonInterface/plugins/algorithms/CleanFileCache.py b/Framework/PythonInterface/plugins/algorithms/CleanFileCache.py index a6dc35c97493..26e6752d0e89 100644 --- a/Framework/PythonInterface/plugins/algorithms/CleanFileCache.py +++ b/Framework/PythonInterface/plugins/algorithms/CleanFileCache.py @@ -5,8 +5,8 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init,invalid-name,bare-except,too-many-arguments,multiple-statements -from mantid.api import * -from mantid.kernel import * +from mantid.api import AlgorithmFactory, PythonAlgorithm +from mantid.kernel import ConfigService, Direction import os diff --git a/Framework/PythonInterface/plugins/algorithms/ClipPeaks.py b/Framework/PythonInterface/plugins/algorithms/ClipPeaks.py index c22c13fc2c3c..b49c9c5cedb3 100644 --- a/Framework/PythonInterface/plugins/algorithms/ClipPeaks.py +++ b/Framework/PythonInterface/plugins/algorithms/ClipPeaks.py @@ -4,8 +4,8 @@ # NScD Oak Ridge National Laboratory, European Spallation Source # & Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + -from mantid.api import PythonAlgorithm, WorkspaceProperty -from mantid.simpleapi import * +from mantid.api import AlgorithmFactory, PythonAlgorithm, WorkspaceProperty +from mantid.simpleapi import CloneWorkspace from mantid.kernel import Direction, IntBoundedValidator import numpy as np diff --git a/Framework/PythonInterface/plugins/algorithms/CollectHB3AExperimentInfo.py b/Framework/PythonInterface/plugins/algorithms/CollectHB3AExperimentInfo.py index f73617cff4e0..e36f28bf98df 100644 --- a/Framework/PythonInterface/plugins/algorithms/CollectHB3AExperimentInfo.py +++ b/Framework/PythonInterface/plugins/algorithms/CollectHB3AExperimentInfo.py @@ -7,8 +7,7 @@ # pylint: disable=no-init,invalid-name,too-many-instance-attributes import mantid import mantid.simpleapi as api -from mantid.api import * -from mantid.kernel import * +from mantid.api import AlgorithmFactory, FileAction, FileProperty, PythonAlgorithm import os diff --git a/Framework/PythonInterface/plugins/algorithms/ComputeIncoherentDOS.py b/Framework/PythonInterface/plugins/algorithms/ComputeIncoherentDOS.py index 6e6c7e3af526..3b00e1f27fc2 100644 --- a/Framework/PythonInterface/plugins/algorithms/ComputeIncoherentDOS.py +++ b/Framework/PythonInterface/plugins/algorithms/ComputeIncoherentDOS.py @@ -8,7 +8,7 @@ from scipy import constants from mantid.kernel import CompositeValidator, Direction, FloatBoundedValidator from mantid.api import AlgorithmFactory, CommonBinsValidator, HistogramValidator, MatrixWorkspaceProperty, PythonAlgorithm -from mantid.simpleapi import * +from mantid.simpleapi import CloneWorkspace, Rebin, Rebin2D, ScaleX, Transpose class UnitError(ValueError): diff --git a/Framework/PythonInterface/plugins/algorithms/ConjoinFiles.py b/Framework/PythonInterface/plugins/algorithms/ConjoinFiles.py index 243acaa3eae6..b0517c5ca191 100644 --- a/Framework/PythonInterface/plugins/algorithms/ConjoinFiles.py +++ b/Framework/PythonInterface/plugins/algorithms/ConjoinFiles.py @@ -5,9 +5,9 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init,invalid-name -from mantid.api import * -from mantid.kernel import * -from mantid.simpleapi import * +from mantid.api import mtd, AlgorithmFactory, FileAction, FileProperty, PythonAlgorithm, WorkspaceProperty +from mantid.kernel import config, logger, Direction, IntArrayBoundedValidator, IntArrayProperty +from mantid.simpleapi import ConjoinWorkspaces, DeleteWorkspace, LoadGSS import os diff --git a/Framework/PythonInterface/plugins/algorithms/ConjoinSpectra.py b/Framework/PythonInterface/plugins/algorithms/ConjoinSpectra.py index c11164a2a23d..e1255b12d7f2 100644 --- a/Framework/PythonInterface/plugins/algorithms/ConjoinSpectra.py +++ b/Framework/PythonInterface/plugins/algorithms/ConjoinSpectra.py @@ -5,9 +5,9 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init,invalid-name -from mantid.api import * -from mantid.kernel import * -from mantid.simpleapi import * +from mantid.api import mtd, AlgorithmFactory, PythonAlgorithm, TextAxis, WorkspaceGroup, WorkspaceProperty +from mantid.kernel import logger, Direction, StringListValidator, StringMandatoryValidator +from mantid.simpleapi import ConjoinWorkspaces, DeleteWorkspace, ExtractSingleSpectrum, RenameWorkspace class ConjoinSpectra(PythonAlgorithm): diff --git a/Framework/PythonInterface/plugins/algorithms/CreateCacheFilename.py b/Framework/PythonInterface/plugins/algorithms/CreateCacheFilename.py index 5d55987785fc..fcc66855cfed 100644 --- a/Framework/PythonInterface/plugins/algorithms/CreateCacheFilename.py +++ b/Framework/PythonInterface/plugins/algorithms/CreateCacheFilename.py @@ -5,8 +5,8 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init,invalid-name,bare-except,too-many-arguments -from mantid.api import * -from mantid.kernel import * +from mantid.api import AlgorithmFactory, FileAction, FileProperty, PythonAlgorithm +from mantid.kernel import ConfigService, Direction, StringArrayProperty import mantid import os diff --git a/Framework/PythonInterface/plugins/algorithms/CreateLeBailFitInput.py b/Framework/PythonInterface/plugins/algorithms/CreateLeBailFitInput.py index 79afd524ef7c..0142904a4be5 100644 --- a/Framework/PythonInterface/plugins/algorithms/CreateLeBailFitInput.py +++ b/Framework/PythonInterface/plugins/algorithms/CreateLeBailFitInput.py @@ -6,24 +6,28 @@ # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init,invalid-name import mantid.simpleapi as api -from mantid.api import * -from mantid.kernel import * - -_OUTPUTLEVEL = "NOOUTPUT" +from mantid.api import ( + AlgorithmFactory, + AnalysisDataService, + FileAction, + FileProperty, + ITableWorkspaceProperty, + PythonAlgorithm, + WorkspaceFactory, +) +from mantid.kernel import Direction, FloatBoundedValidator, IntArrayBoundedValidator, IntArrayProperty class CreateLeBailFitInput(PythonAlgorithm): """Create the input TableWorkspaces for LeBail Fitting""" def category(self): - """ """ return "Diffraction\\Fitting;Utility\\Workspaces" def seeAlso(self): return ["LeBailFit"] def name(self): - """ """ return "CreateLeBailFitInput" def summary(self): diff --git a/Framework/PythonInterface/plugins/algorithms/CylinderPaalmanPingsCorrection2.py b/Framework/PythonInterface/plugins/algorithms/CylinderPaalmanPingsCorrection2.py index 839eb36aba37..59c8be1b7a93 100644 --- a/Framework/PythonInterface/plugins/algorithms/CylinderPaalmanPingsCorrection2.py +++ b/Framework/PythonInterface/plugins/algorithms/CylinderPaalmanPingsCorrection2.py @@ -7,8 +7,16 @@ # pylint: disable=no-init,too-many-locals,too-many-instance-attributes,too-many-arguments,invalid-name import math import numpy as np -from mantid.simpleapi import * +from mantid.simpleapi import ( + AddSampleLogMultiple, + CreateWorkspace, + DeleteWorkspace, + ExtractSingleSpectrum, + GroupWorkspaces, + SplineInterpolation, +) from mantid.api import ( + mtd, PythonAlgorithm, AlgorithmFactory, PropertyMode, diff --git a/Framework/PythonInterface/plugins/algorithms/EnggCalibrate.py b/Framework/PythonInterface/plugins/algorithms/EnggCalibrate.py index 9f61a94dd69f..7ee14eb93595 100644 --- a/Framework/PythonInterface/plugins/algorithms/EnggCalibrate.py +++ b/Framework/PythonInterface/plugins/algorithms/EnggCalibrate.py @@ -4,8 +4,17 @@ # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + -from mantid.kernel import * -from mantid.api import * +from mantid.api import ( + AlgorithmFactory, + FileAction, + FileProperty, + ITableWorkspaceProperty, + MatrixWorkspaceProperty, + Progress, + PropertyMode, + PythonAlgorithm, +) +from mantid.kernel import Direction, FloatArrayProperty, StringListValidator import mantid.simpleapi as mantid diff --git a/Framework/PythonInterface/plugins/algorithms/EnggCalibrateFull.py b/Framework/PythonInterface/plugins/algorithms/EnggCalibrateFull.py index 681ea0febe25..40b5615d7887 100644 --- a/Framework/PythonInterface/plugins/algorithms/EnggCalibrateFull.py +++ b/Framework/PythonInterface/plugins/algorithms/EnggCalibrateFull.py @@ -6,8 +6,17 @@ # SPDX - License - Identifier: GPL - 3.0 + import math -from mantid.kernel import * -from mantid.api import * +from mantid.api import ( + AlgorithmFactory, + FileAction, + FileProperty, + ITableWorkspaceProperty, + MatrixWorkspaceProperty, + Progress, + PropertyMode, + PythonAlgorithm, +) +from mantid.kernel import Direction, FloatArrayProperty, StringListValidator, V3D from mantid.simpleapi import SaveAscii, logger import EnggUtils diff --git a/Framework/PythonInterface/plugins/algorithms/EnggFitPeaks.py b/Framework/PythonInterface/plugins/algorithms/EnggFitPeaks.py index 694f16a0871b..71768085f9c8 100644 --- a/Framework/PythonInterface/plugins/algorithms/EnggFitPeaks.py +++ b/Framework/PythonInterface/plugins/algorithms/EnggFitPeaks.py @@ -6,9 +6,18 @@ # SPDX - License - Identifier: GPL - 3.0 + import math -from mantid.kernel import * -from mantid.api import * -from mantid.simpleapi import * +from mantid.api import ( + AlgorithmFactory, + FileAction, + FileProperty, + FunctionFactory, + ITableWorkspaceProperty, + MatrixWorkspaceProperty, + Progress, + PythonAlgorithm, +) +from mantid.kernel import Direction, FloatArrayProperty +from mantid.simpleapi import ConvertUnits, CreateEmptyTableWorkspace, CreateWorkspace, FindPeaks, Fit class EnggFitPeaks(PythonAlgorithm): @@ -536,7 +545,7 @@ def _find_peak_center_in_params(self, fitted_params): return fitted_params["X0"] else: raise ValueError( - "Inconsistency found. I do not know how to deal with centers of peaks " "of types other than {0}".format(PEAK_TYPE) + "Inconsistency found. I do not know how to deal with centers of peaks " "of types other than {0}".format(self.PEAK_TYPE) ) def _find_peak_intensity_in_params(self, fitted_params): @@ -550,7 +559,7 @@ def _find_peak_intensity_in_params(self, fitted_params): return fitted_params["I"] else: raise ValueError( - "Inconsistency found. I do not know how to deal with intensities of " "peaks of types other than {0}".format(PEAK_TYPE) + "Inconsistency found. I do not know how to deal with intensities of " "peaks of types other than {0}".format(self.PEAK_TYPE) ) def _b2bexp_is_acceptable(self, fitted_params): diff --git a/Framework/PythonInterface/plugins/algorithms/EnggFitTOFFromPeaks.py b/Framework/PythonInterface/plugins/algorithms/EnggFitTOFFromPeaks.py index 3b9726b0b9fe..3455fccd631c 100644 --- a/Framework/PythonInterface/plugins/algorithms/EnggFitTOFFromPeaks.py +++ b/Framework/PythonInterface/plugins/algorithms/EnggFitTOFFromPeaks.py @@ -4,9 +4,9 @@ # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + -from mantid.kernel import * -from mantid.api import * -from mantid.simpleapi import * +from mantid.api import AlgorithmFactory, ITableWorkspaceProperty, PythonAlgorithm +from mantid.kernel import Direction +from mantid.simpleapi import ConvertTableToMatrixWorkspace, Fit class EnggFitTOFFromPeaks(PythonAlgorithm): diff --git a/Framework/PythonInterface/plugins/algorithms/EnggFocus.py b/Framework/PythonInterface/plugins/algorithms/EnggFocus.py index 6291c07aa4f3..0bce9a734d12 100644 --- a/Framework/PythonInterface/plugins/algorithms/EnggFocus.py +++ b/Framework/PythonInterface/plugins/algorithms/EnggFocus.py @@ -4,8 +4,8 @@ # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + -from mantid.kernel import * -from mantid.api import * +from mantid.api import AlgorithmFactory, ITableWorkspaceProperty, MatrixWorkspaceProperty, Progress, PropertyMode, PythonAlgorithm +from mantid.kernel import logger, Direction, FloatArrayProperty, StringListValidator import EnggUtils diff --git a/Framework/PythonInterface/plugins/algorithms/EnggSaveGSASIIFitResultsToHDF5.py b/Framework/PythonInterface/plugins/algorithms/EnggSaveGSASIIFitResultsToHDF5.py index b25ddd831448..522b360af061 100644 --- a/Framework/PythonInterface/plugins/algorithms/EnggSaveGSASIIFitResultsToHDF5.py +++ b/Framework/PythonInterface/plugins/algorithms/EnggSaveGSASIIFitResultsToHDF5.py @@ -4,8 +4,8 @@ # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + -from mantid.api import * -from mantid.kernel import * +from mantid.api import mtd, AlgorithmFactory, FileAction, FileProperty, PythonAlgorithm +from mantid.kernel import FloatArrayProperty, IntArrayProperty, StringArrayProperty, StringListValidator import h5py import numpy diff --git a/Framework/PythonInterface/plugins/algorithms/EnggSaveSinglePeakFitResultsToHDF5.py b/Framework/PythonInterface/plugins/algorithms/EnggSaveSinglePeakFitResultsToHDF5.py index d7852aa97347..f45f543c6244 100644 --- a/Framework/PythonInterface/plugins/algorithms/EnggSaveSinglePeakFitResultsToHDF5.py +++ b/Framework/PythonInterface/plugins/algorithms/EnggSaveSinglePeakFitResultsToHDF5.py @@ -4,8 +4,8 @@ # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + -from mantid.api import * -from mantid.kernel import * +from mantid.api import mtd, AlgorithmFactory, FileAction, FileProperty, PythonAlgorithm +from mantid.kernel import IntArrayProperty, StringArrayProperty import h5py diff --git a/Framework/PythonInterface/plugins/algorithms/EnggVanadiumCorrections.py b/Framework/PythonInterface/plugins/algorithms/EnggVanadiumCorrections.py index 3fffac566586..ed7a42a024a7 100644 --- a/Framework/PythonInterface/plugins/algorithms/EnggVanadiumCorrections.py +++ b/Framework/PythonInterface/plugins/algorithms/EnggVanadiumCorrections.py @@ -4,8 +4,8 @@ # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + -from mantid.kernel import * -from mantid.api import * +from mantid.api import mtd, AlgorithmFactory, ITableWorkspaceProperty, MatrixWorkspaceProperty, Progress, PropertyMode, PythonAlgorithm +from mantid.kernel import Direction, IntBoundedValidator import mantid.simpleapi as mantid import numpy as np diff --git a/Framework/PythonInterface/plugins/algorithms/ExaminePowderDiffProfile.py b/Framework/PythonInterface/plugins/algorithms/ExaminePowderDiffProfile.py index 8afdfb66a1e0..ce7d92e9e4b0 100644 --- a/Framework/PythonInterface/plugins/algorithms/ExaminePowderDiffProfile.py +++ b/Framework/PythonInterface/plugins/algorithms/ExaminePowderDiffProfile.py @@ -6,10 +6,16 @@ # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init, too-many-instance-attributes import mantid.simpleapi as api -from mantid.api import * -from mantid.kernel import * - -_OUTPUTLEVEL = "NOOUTPUT" +from mantid.api import ( + AlgorithmFactory, + FileAction, + FileProperty, + ITableWorkspaceProperty, + MatrixWorkspaceProperty, + PropertyMode, + PythonAlgorithm, +) +from mantid.kernel import Direction, FloatArrayBoundedValidator, FloatArrayProperty, StringListValidator class ExaminePowderDiffProfile(PythonAlgorithm): diff --git a/Framework/PythonInterface/plugins/algorithms/Examples/ExampleSaveAscii.py b/Framework/PythonInterface/plugins/algorithms/Examples/ExampleSaveAscii.py index f1c37795da35..d05da1cdf10d 100644 --- a/Framework/PythonInterface/plugins/algorithms/Examples/ExampleSaveAscii.py +++ b/Framework/PythonInterface/plugins/algorithms/Examples/ExampleSaveAscii.py @@ -11,9 +11,8 @@ Note that the SaveAscii algorithm should be used instead in most cases. """ -# This __future__ import is for Python 2/3 compatibility -from mantid.kernel import * -from mantid.api import * +from mantid.api import AlgorithmFactory, FileAction, FileProperty, PythonAlgorithm, WorkspaceProperty +from mantid.kernel import Direction class ExampleSaveAscii(PythonAlgorithm): diff --git a/Framework/PythonInterface/plugins/algorithms/Examples/Squares.py b/Framework/PythonInterface/plugins/algorithms/Examples/Squares.py index c13efab7f297..9cd915b7b201 100644 --- a/Framework/PythonInterface/plugins/algorithms/Examples/Squares.py +++ b/Framework/PythonInterface/plugins/algorithms/Examples/Squares.py @@ -5,15 +5,8 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init -# This __future__ import is for Python 2/3 compatibility -from mantid.api import * -from mantid.kernel import * - -# -# The following points are recommendations for writing Python algorithms: -# - The class name should match the file name; -# - Each file should contain exactly one algorithm. -# +from mantid.api import AlgorithmFactory, FileAction, FileProperty, MatrixWorkspaceProperty, Progress, PythonAlgorithm, WorkspaceFactory +from mantid.kernel import Direction, IntBoundedValidator, StringMandatoryValidator class Squares(PythonAlgorithm): @@ -66,7 +59,4 @@ def PyExec(self): sumfile.close() -############################################################################################# - - AlgorithmFactory.subscribe(Squares) diff --git a/Framework/PythonInterface/plugins/algorithms/ExportExperimentLog.py b/Framework/PythonInterface/plugins/algorithms/ExportExperimentLog.py index 45e6b67970af..036894a5d070 100644 --- a/Framework/PythonInterface/plugins/algorithms/ExportExperimentLog.py +++ b/Framework/PythonInterface/plugins/algorithms/ExportExperimentLog.py @@ -6,8 +6,8 @@ # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init,invalid-name import mantid -from mantid.api import * -from mantid.kernel import * +from mantid.api import AlgorithmFactory, FileAction, FileProperty, MatrixWorkspaceProperty, PythonAlgorithm +from mantid.kernel import Direction, StringArrayProperty, StringListValidator import datetime import time import os diff --git a/Framework/PythonInterface/plugins/algorithms/ExportSampleLogsToCSVFile.py b/Framework/PythonInterface/plugins/algorithms/ExportSampleLogsToCSVFile.py index fe3df84e41bd..686fc1dfa52d 100644 --- a/Framework/PythonInterface/plugins/algorithms/ExportSampleLogsToCSVFile.py +++ b/Framework/PythonInterface/plugins/algorithms/ExportSampleLogsToCSVFile.py @@ -5,8 +5,8 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init,invalid-name,too-many-instance-attributes -from mantid.api import * -from mantid.kernel import * +from mantid.api import AlgorithmFactory, FileAction, FileProperty, MatrixWorkspaceProperty, PythonAlgorithm +from mantid.kernel import DateAndTime, Direction, StringArrayProperty, StringListValidator import numpy as np import os diff --git a/Framework/PythonInterface/plugins/algorithms/ExportSampleLogsToHDF5.py b/Framework/PythonInterface/plugins/algorithms/ExportSampleLogsToHDF5.py index c836a63534c4..25d4eef99b7a 100644 --- a/Framework/PythonInterface/plugins/algorithms/ExportSampleLogsToHDF5.py +++ b/Framework/PythonInterface/plugins/algorithms/ExportSampleLogsToHDF5.py @@ -4,8 +4,18 @@ # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + -from mantid.api import * -from mantid.kernel import * +from mantid.api import AlgorithmFactory, FileAction, FileProperty, MatrixWorkspaceProperty, PythonAlgorithm +from mantid.kernel import ( + Direction, + FloatArrayProperty, + FloatTimeSeriesProperty, + Int32TimeSeriesProperty, + Int64TimeSeriesProperty, + BoolTimeSeriesProperty, + StringPropertyWithValue, + StringTimeSeriesProperty, + StringArrayProperty, +) import h5py diff --git a/Framework/PythonInterface/plugins/algorithms/ExtractMonitors.py b/Framework/PythonInterface/plugins/algorithms/ExtractMonitors.py index f2478b16cde6..65c6be844492 100644 --- a/Framework/PythonInterface/plugins/algorithms/ExtractMonitors.py +++ b/Framework/PythonInterface/plugins/algorithms/ExtractMonitors.py @@ -4,9 +4,9 @@ # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + -from mantid.simpleapi import * -from mantid.kernel import * -from mantid.api import MatrixWorkspaceProperty, DataProcessorAlgorithm, PropertyMode +from mantid.simpleapi import ExtractSpectra +from mantid.kernel import Direction +from mantid.api import AlgorithmFactory, MatrixWorkspaceProperty, DataProcessorAlgorithm, PropertyMode class ExtractMonitors(DataProcessorAlgorithm): diff --git a/Framework/PythonInterface/plugins/algorithms/FilterLogByTime.py b/Framework/PythonInterface/plugins/algorithms/FilterLogByTime.py index a24e5629a47c..8466c4f67fc3 100644 --- a/Framework/PythonInterface/plugins/algorithms/FilterLogByTime.py +++ b/Framework/PythonInterface/plugins/algorithms/FilterLogByTime.py @@ -5,9 +5,8 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init,invalid-name -from mantid.simpleapi import * -from mantid.api import * -from mantid.kernel import * +from mantid.api import AlgorithmFactory, PythonAlgorithm, WorkspaceProperty +from mantid.kernel import Direction, FloatArrayProperty, FloatBoundedValidator, StringListValidator, StringMandatoryValidator import numpy import sys diff --git a/Framework/PythonInterface/plugins/algorithms/FindSatellitePeaks.py b/Framework/PythonInterface/plugins/algorithms/FindSatellitePeaks.py index 10b081450b1a..4873cb2b9323 100644 --- a/Framework/PythonInterface/plugins/algorithms/FindSatellitePeaks.py +++ b/Framework/PythonInterface/plugins/algorithms/FindSatellitePeaks.py @@ -4,10 +4,10 @@ # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + -from mantid.kernel import * +from mantid.api import AlgorithmFactory, DataProcessorAlgorithm, MDFrameValidator, WorkspaceProperty from mantid.dataobjects import PeaksWorkspaceProperty -from mantid.api import * -from mantid.simpleapi import * +from mantid.kernel import Direction +from mantid.simpleapi import CentroidPeaksMD, CloneWorkspace, CombinePeaksWorkspaces, FilterPeaks, IntegratePeaksMD, PredictFractionalPeaks import fractional_indexing as indexing diff --git a/Framework/PythonInterface/plugins/algorithms/GSASIIRefineFitPeaks.py b/Framework/PythonInterface/plugins/algorithms/GSASIIRefineFitPeaks.py index 322694456663..d6abcfcd1e39 100644 --- a/Framework/PythonInterface/plugins/algorithms/GSASIIRefineFitPeaks.py +++ b/Framework/PythonInterface/plugins/algorithms/GSASIIRefineFitPeaks.py @@ -10,8 +10,17 @@ import sys import tempfile -from mantid.kernel import * -from mantid.api import * +from mantid.api import ( + AlgorithmFactory, + FileAction, + FileProperty, + ITableWorkspaceProperty, + MultipleFileProperty, + Progress, + PythonAlgorithm, + WorkspaceProperty, +) +from mantid.kernel import logger, Direction, StringListValidator import mantid.simpleapi as mantid diff --git a/Framework/PythonInterface/plugins/algorithms/GenerateLogbook.py b/Framework/PythonInterface/plugins/algorithms/GenerateLogbook.py index c01067e1463c..a797366844f2 100644 --- a/Framework/PythonInterface/plugins/algorithms/GenerateLogbook.py +++ b/Framework/PythonInterface/plugins/algorithms/GenerateLogbook.py @@ -6,9 +6,16 @@ # SPDX - License - Identifier: GPL - 3.0 + from mantid import config -from mantid.api import AlgorithmFactory, FileAction, FileProperty, ITableWorkspaceProperty, Progress, PythonAlgorithm +from mantid.api import mtd, AlgorithmFactory, FileAction, FileProperty, ITableWorkspaceProperty, Progress, PythonAlgorithm from mantid.kernel import Direction, IntArrayBoundedValidator, StringListValidator, StringMandatoryValidator -from mantid.simpleapi import * +from mantid.simpleapi import ( + CreateEmptyTableWorkspace, + CreateSingleValuedWorkspace, + DeleteWorkspace, + LoadEmptyInstrument, + LoadParameterFile, + SaveAscii, +) import fnmatch import h5py diff --git a/Framework/PythonInterface/plugins/algorithms/GetLiveInstrumentValue.py b/Framework/PythonInterface/plugins/algorithms/GetLiveInstrumentValue.py index c6f4381dc68b..e16a5897f53b 100644 --- a/Framework/PythonInterface/plugins/algorithms/GetLiveInstrumentValue.py +++ b/Framework/PythonInterface/plugins/algorithms/GetLiveInstrumentValue.py @@ -4,9 +4,8 @@ # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + -from mantid.simpleapi import * -from mantid.kernel import * -from mantid.api import * +from mantid.api import AlgorithmFactory, DataProcessorAlgorithm +from mantid.kernel import config, Direction, StringListValidator, StringMandatoryValidator class GetLiveInstrumentValue(DataProcessorAlgorithm): diff --git a/Framework/PythonInterface/plugins/algorithms/IndirectTransmission.py b/Framework/PythonInterface/plugins/algorithms/IndirectTransmission.py index a74d0cb7dc75..cfb136c8b4d5 100644 --- a/Framework/PythonInterface/plugins/algorithms/IndirectTransmission.py +++ b/Framework/PythonInterface/plugins/algorithms/IndirectTransmission.py @@ -5,9 +5,16 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init -from mantid.simpleapi import * -from mantid.api import * -from mantid.kernel import * +from mantid.api import mtd, AlgorithmFactory, PythonAlgorithm, WorkspaceProperty +from mantid.kernel import logger, Direction, StringListValidator, StringMandatoryValidator +from mantid.simpleapi import ( + CreateEmptyTableWorkspace, + CreateSimulationWorkspace, + DeleteWorkspace, + GroupDetectors, + LoadParameterFile, + SetSampleMaterial, +) from mantid import config import math diff --git a/Framework/PythonInterface/plugins/algorithms/IntegratePeaksProfileFitting.py b/Framework/PythonInterface/plugins/algorithms/IntegratePeaksProfileFitting.py index e9d804e784a3..aec46c827d71 100644 --- a/Framework/PythonInterface/plugins/algorithms/IntegratePeaksProfileFitting.py +++ b/Framework/PythonInterface/plugins/algorithms/IntegratePeaksProfileFitting.py @@ -10,9 +10,9 @@ fitting for integrating peaks. """ -from mantid.kernel import * -from mantid.api import * -from mantid.simpleapi import * +from mantid.api import mtd, AlgorithmFactory, FileAction, FileProperty, Progress, PythonAlgorithm, WorkspaceProperty +from mantid.kernel import logger, Direction, FloatBoundedValidator, V3D +from mantid.simpleapi import CreateEmptyTableWorkspace, CreateWorkspace, Fit import numpy as np import warnings diff --git a/Framework/PythonInterface/plugins/algorithms/LRAutoReduction.py b/Framework/PythonInterface/plugins/algorithms/LRAutoReduction.py index 0ad9980e0e15..4ff5cc0ed72b 100644 --- a/Framework/PythonInterface/plugins/algorithms/LRAutoReduction.py +++ b/Framework/PythonInterface/plugins/algorithms/LRAutoReduction.py @@ -13,11 +13,20 @@ import math import re import platform +import os import time import mantid -from mantid.api import * -from mantid.simpleapi import * -from mantid.kernel import * +from mantid.api import AlgorithmFactory, AnalysisDataService, FileAction, FileProperty, PropertyMode, PythonAlgorithm, WorkspaceProperty +from mantid.kernel import logger, Direction, IntArrayProperty +from mantid.simpleapi import ( + LiquidsReflectometryReduction, + LoadEventNexus, + LoadNexus, + LRDirectBeamSort, + LRReductionWithReference, + LRReflectivityOutput, + SaveNexus, +) from reduction_gui.reduction.reflectometer.refl_data_series import DataSeries diff --git a/Framework/PythonInterface/plugins/algorithms/LRDirectBeamSort.py b/Framework/PythonInterface/plugins/algorithms/LRDirectBeamSort.py index a33bb93d10de..4782eb25911c 100644 --- a/Framework/PythonInterface/plugins/algorithms/LRDirectBeamSort.py +++ b/Framework/PythonInterface/plugins/algorithms/LRDirectBeamSort.py @@ -5,9 +5,9 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init,invalid-name -from mantid.api import * -from mantid.simpleapi import * -from mantid.kernel import * +from mantid.api import mtd, AlgorithmFactory, FileAction, FileProperty, PythonAlgorithm +from mantid.kernel import logger, Direction, IntArrayProperty, StringArrayProperty +from mantid.simpleapi import CreateWorkspace, DeleteWorkspace, FitPeaks, LoadEventNexus, LRScalingFactors import functools import numpy as np from typing import List, Tuple diff --git a/Framework/PythonInterface/plugins/algorithms/LRPeakSelection.py b/Framework/PythonInterface/plugins/algorithms/LRPeakSelection.py index c0bded4bad21..923187caa409 100644 --- a/Framework/PythonInterface/plugins/algorithms/LRPeakSelection.py +++ b/Framework/PythonInterface/plugins/algorithms/LRPeakSelection.py @@ -8,9 +8,8 @@ import math import numpy as np -from mantid.api import * -from mantid.simpleapi import * -from mantid.kernel import * +from mantid.api import AlgorithmFactory, PythonAlgorithm, WorkspaceProperty +from mantid.kernel import logger, Direction, IntArrayProperty class PeakFinderDerivation(object): diff --git a/Framework/PythonInterface/plugins/algorithms/LRPrimaryFraction.py b/Framework/PythonInterface/plugins/algorithms/LRPrimaryFraction.py index b79190a0ba4a..d6c5f03e2c21 100644 --- a/Framework/PythonInterface/plugins/algorithms/LRPrimaryFraction.py +++ b/Framework/PythonInterface/plugins/algorithms/LRPrimaryFraction.py @@ -6,9 +6,9 @@ # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init,invalid-name import math -from mantid.api import * -from mantid.simpleapi import * -from mantid.kernel import * +from mantid.api import AlgorithmFactory, PythonAlgorithm, WorkspaceProperty +from mantid.kernel import logger, Direction, FloatArrayProperty, IntArrayProperty, IntArrayLengthValidator +from mantid.simpleapi import Integration, RefRoi, Transpose class LRPrimaryFraction(PythonAlgorithm): diff --git a/Framework/PythonInterface/plugins/algorithms/LRReflectivityOutput.py b/Framework/PythonInterface/plugins/algorithms/LRReflectivityOutput.py index 8b0eb76a6fc2..45c7b9260f20 100644 --- a/Framework/PythonInterface/plugins/algorithms/LRReflectivityOutput.py +++ b/Framework/PythonInterface/plugins/algorithms/LRReflectivityOutput.py @@ -8,9 +8,9 @@ import math import time import mantid -from mantid.api import * -from mantid.simpleapi import * -from mantid.kernel import * +from mantid.api import mtd, AnalysisDataService, AlgorithmFactory, FileAction, FileProperty, PythonAlgorithm +from mantid.kernel import logger, Direction, FloatArrayProperty, StringArrayProperty +from mantid.simpleapi import Rebin, Scale class LRReflectivityOutput(PythonAlgorithm): diff --git a/Framework/PythonInterface/plugins/algorithms/LRScalingFactors.py b/Framework/PythonInterface/plugins/algorithms/LRScalingFactors.py index 1a2e44177382..e5ab05fa9316 100644 --- a/Framework/PythonInterface/plugins/algorithms/LRScalingFactors.py +++ b/Framework/PythonInterface/plugins/algorithms/LRScalingFactors.py @@ -7,9 +7,21 @@ # pylint: disable=invalid-name, no-init import os import re -from mantid.api import * -from mantid.simpleapi import * -from mantid.kernel import * +from mantid.api import mtd, AlgorithmFactory, FileAction, FileProperty, PythonAlgorithm +from mantid.kernel import logger, Direction, FloatArrayProperty, FloatArrayLengthValidator, IntArrayProperty, Property +from mantid.simpleapi import ( + CreateWorkspace, + CropWorkspace, + Divide, + Fit, + LoadEventNexus, + LRSubtractAverageBackground, + Multiply, + NormaliseByCurrent, + Rebin, + ReplaceSpecialValues, + SumSpectra, +) class LRScalingFactors(PythonAlgorithm): diff --git a/Framework/PythonInterface/plugins/algorithms/LRSubtractAverageBackground.py b/Framework/PythonInterface/plugins/algorithms/LRSubtractAverageBackground.py index 7bd93295d4cd..f7de507e2ad5 100644 --- a/Framework/PythonInterface/plugins/algorithms/LRSubtractAverageBackground.py +++ b/Framework/PythonInterface/plugins/algorithms/LRSubtractAverageBackground.py @@ -5,9 +5,9 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init,invalid-name -from mantid.api import * -from mantid.simpleapi import * -from mantid.kernel import * +from mantid.api import AlgorithmFactory, AnalysisDataService, PythonAlgorithm, WorkspaceProperty +from mantid.kernel import Direction, IntArrayLengthValidator, IntArrayProperty, StringListValidator +from mantid.simpleapi import Minus, RefRoi class LRSubtractAverageBackground(PythonAlgorithm): diff --git a/Framework/PythonInterface/plugins/algorithms/LiquidsReflectometryReduction.py b/Framework/PythonInterface/plugins/algorithms/LiquidsReflectometryReduction.py index a90b97b48023..811ce8dda4df 100644 --- a/Framework/PythonInterface/plugins/algorithms/LiquidsReflectometryReduction.py +++ b/Framework/PythonInterface/plugins/algorithms/LiquidsReflectometryReduction.py @@ -17,9 +17,45 @@ import time import math import os -from mantid.api import * -from mantid.simpleapi import * -from mantid.kernel import * +from mantid.api import ( + mtd, + AlgorithmFactory, + AnalysisDataService, + FileFinder, + MatrixWorkspaceProperty, + PropertyMode, + PythonAlgorithm, + WorkspaceProperty, +) +from mantid.kernel import ( + logger, + Direction, + FloatArrayLengthValidator, + FloatArrayProperty, + IntArrayLengthValidator, + IntArrayProperty, + StringArrayProperty, +) +from mantid.simpleapi import ( + AddSampleLog, + ConvertToPointData, + CreateSingleValuedWorkspace, + CropWorkspace, + Divide, + ExtractSingleSpectrum, + Load, + LoadEventNexus, + LRPrimaryFraction, + LRSubtractAverageBackground, + Multiply, + NormaliseByCurrent, + Rebin, + RebinToWorkspace, + RefRoi, + ReplaceSpecialValues, + SortXAxis, + SumSpectra, +) from functools import reduce # pylint: disable=redefined-builtin diff --git a/Framework/PythonInterface/plugins/algorithms/LoadCIF.py b/Framework/PythonInterface/plugins/algorithms/LoadCIF.py index dacaf80bc629..44244277f7a0 100644 --- a/Framework/PythonInterface/plugins/algorithms/LoadCIF.py +++ b/Framework/PythonInterface/plugins/algorithms/LoadCIF.py @@ -5,9 +5,8 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init,too-few-public-methods -from mantid.kernel import * -from mantid.simpleapi import * -from mantid.api import * +from mantid.api import AlgorithmFactory, FileAction, FileProperty, PythonAlgorithm, WorkspaceProperty +from mantid.kernel import Direction from mantid.geometry import SpaceGroupFactory, CrystalStructure, UnitCell import re diff --git a/Framework/PythonInterface/plugins/algorithms/LoadEmptyVesuvio.py b/Framework/PythonInterface/plugins/algorithms/LoadEmptyVesuvio.py index e0eda308696f..f10b04a55b86 100644 --- a/Framework/PythonInterface/plugins/algorithms/LoadEmptyVesuvio.py +++ b/Framework/PythonInterface/plugins/algorithms/LoadEmptyVesuvio.py @@ -5,8 +5,8 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init -from mantid.kernel import * -from mantid.api import * +from mantid.api import AlgorithmFactory, FileAction, FileProperty, PythonAlgorithm, WorkspaceProperty +from mantid.kernel import config, Direction import os diff --git a/Framework/PythonInterface/plugins/algorithms/LoadLogPropertyTable.py b/Framework/PythonInterface/plugins/algorithms/LoadLogPropertyTable.py index cba56cf7380f..b2026c665518 100644 --- a/Framework/PythonInterface/plugins/algorithms/LoadLogPropertyTable.py +++ b/Framework/PythonInterface/plugins/algorithms/LoadLogPropertyTable.py @@ -10,9 +10,8 @@ import numbers import bisect import numpy -from mantid.api import * # PythonAlgorithm, AlgorithmFactory, WorkspaceProperty -from mantid.kernel import * # StringArrayProperty -from mantid.simpleapi import * # needed for Load +from mantid.api import AlgorithmFactory, FileAction, FileProperty, PythonAlgorithm, WorkspaceFactory, WorkspaceProperty +from mantid.kernel import Direction, StringArrayProperty class LoadLogPropertyTable(PythonAlgorithm): diff --git a/Framework/PythonInterface/plugins/algorithms/LoadMultipleGSS.py b/Framework/PythonInterface/plugins/algorithms/LoadMultipleGSS.py index c32cf31546d1..d6e543062790 100644 --- a/Framework/PythonInterface/plugins/algorithms/LoadMultipleGSS.py +++ b/Framework/PythonInterface/plugins/algorithms/LoadMultipleGSS.py @@ -5,9 +5,9 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init,invalid-name -from mantid.api import * -from mantid.simpleapi import * -from mantid.kernel import * +from mantid.api import AlgorithmFactory, FileAction, FileProperty, PythonAlgorithm +from mantid.kernel import IntArrayBoundedValidator, IntArrayProperty +from mantid.simpleapi import ConvertUnits, LoadGSS import os diff --git a/Framework/PythonInterface/plugins/algorithms/LoadNMoldyn4Ascii.py b/Framework/PythonInterface/plugins/algorithms/LoadNMoldyn4Ascii.py index 92d429aa74c6..e44deefd6b3d 100644 --- a/Framework/PythonInterface/plugins/algorithms/LoadNMoldyn4Ascii.py +++ b/Framework/PythonInterface/plugins/algorithms/LoadNMoldyn4Ascii.py @@ -5,10 +5,9 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init - -from mantid.simpleapi import * -from mantid.kernel import * -from mantid.api import * +from mantid.api import AlgorithmFactory, AlgorithmManager, FileAction, FileProperty, PythonAlgorithm, WorkspaceProperty +from mantid.kernel import logger, Direction, StringArrayProperty +from mantid.simpleapi import GroupWorkspaces import numpy as np import scipy.constants as sc diff --git a/Framework/PythonInterface/plugins/algorithms/LoadNMoldyn4Ascii1D.py b/Framework/PythonInterface/plugins/algorithms/LoadNMoldyn4Ascii1D.py index 2175c6d3254a..0cd687e6a410 100644 --- a/Framework/PythonInterface/plugins/algorithms/LoadNMoldyn4Ascii1D.py +++ b/Framework/PythonInterface/plugins/algorithms/LoadNMoldyn4Ascii1D.py @@ -4,9 +4,9 @@ # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + +from mantid.api import AlgorithmFactory, FileAction, FileProperty, Progress, PythonAlgorithm, WorkspaceProperty +from mantid.kernel import logger, Direction, StringArrayProperty, StringListValidator from mantid.simpleapi import CreateWorkspace, GroupWorkspaces -from mantid.kernel import * -from mantid.api import * import numpy as np import scipy.constants as sc diff --git a/Framework/PythonInterface/plugins/algorithms/LoadPreNexusLive.py b/Framework/PythonInterface/plugins/algorithms/LoadPreNexusLive.py index fedc09ee775d..c97ac4701b46 100644 --- a/Framework/PythonInterface/plugins/algorithms/LoadPreNexusLive.py +++ b/Framework/PythonInterface/plugins/algorithms/LoadPreNexusLive.py @@ -7,7 +7,7 @@ from mantid import mtd from mantid.api import AlgorithmFactory, DataProcessorAlgorithm, FileAction, FileProperty, WorkspaceProperty from mantid.kernel import Direction, EnabledWhenProperty, IntBoundedValidator, Property, PropertyCriterion, StringListValidator -from mantid.simpleapi import * +from mantid.simpleapi import FilterByXValue, GetIPTS, LoadEventPreNexus, LoadInstrument, LoadNexusLogs, NormaliseByCurrent import os diff --git a/Framework/PythonInterface/plugins/algorithms/LoadVesuvio.py b/Framework/PythonInterface/plugins/algorithms/LoadVesuvio.py index d09d8193a2ed..4e39ca1130ad 100644 --- a/Framework/PythonInterface/plugins/algorithms/LoadVesuvio.py +++ b/Framework/PythonInterface/plugins/algorithms/LoadVesuvio.py @@ -4,8 +4,8 @@ # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + -from mantid.kernel import * -from mantid.api import * +from mantid.api import mtd, AlgorithmFactory, FileAction, FileProperty, WorkspaceFactory, WorkspaceProperty +from mantid.kernel import config, logger, Direction, IntArrayProperty, StringListValidator, StringMandatoryValidator import mantid.simpleapi as ms from LoadEmptyVesuvio import LoadEmptyVesuvio diff --git a/Framework/PythonInterface/plugins/algorithms/LoadVisionElasticBS.py b/Framework/PythonInterface/plugins/algorithms/LoadVisionElasticBS.py index ff0c118bccc1..d10d9020dcc4 100644 --- a/Framework/PythonInterface/plugins/algorithms/LoadVisionElasticBS.py +++ b/Framework/PythonInterface/plugins/algorithms/LoadVisionElasticBS.py @@ -5,11 +5,8 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init,invalid-name -# from mantid.api import AlgorithmFactory -# from mantid.simpleapi import PythonAlgorithm, WorkspaceProperty -# from mantid.kernel import Direction -from mantid.api import * -from mantid.kernel import * +from mantid.api import AlgorithmFactory, ExperimentInfo, FileAction, FileProperty, PythonAlgorithm, WorkspaceProperty +from mantid.kernel import config, Direction import mantid.simpleapi import os diff --git a/Framework/PythonInterface/plugins/algorithms/LoadVisionElasticEQ.py b/Framework/PythonInterface/plugins/algorithms/LoadVisionElasticEQ.py index c718ba7264d2..8be23c8e22e9 100644 --- a/Framework/PythonInterface/plugins/algorithms/LoadVisionElasticEQ.py +++ b/Framework/PythonInterface/plugins/algorithms/LoadVisionElasticEQ.py @@ -5,11 +5,8 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init,invalid-name -# from mantid.api import AlgorithmFactory -# from mantid.simpleapi import PythonAlgorithm, WorkspaceProperty -# from mantid.kernel import Direction -from mantid.api import * -from mantid.kernel import * +from mantid.api import AlgorithmFactory, ExperimentInfo, FileAction, FileProperty, PythonAlgorithm, WorkspaceProperty +from mantid.kernel import config, Direction import mantid.simpleapi import os diff --git a/Framework/PythonInterface/plugins/algorithms/LoadVisionInelastic.py b/Framework/PythonInterface/plugins/algorithms/LoadVisionInelastic.py index 1979c53f15ec..a0ea2732cec6 100644 --- a/Framework/PythonInterface/plugins/algorithms/LoadVisionInelastic.py +++ b/Framework/PythonInterface/plugins/algorithms/LoadVisionInelastic.py @@ -5,11 +5,8 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init,invalid-name -# from mantid.api import AlgorithmFactory -# from mantid.simpleapi import PythonAlgorithm, WorkspaceProperty -# from mantid.kernel import Direction -from mantid.api import * -from mantid.kernel import * +from mantid.api import AlgorithmFactory, FileAction, FileProperty, PythonAlgorithm, WorkspaceProperty +from mantid.kernel import Direction import mantid.simpleapi diff --git a/Framework/PythonInterface/plugins/algorithms/MRFilterCrossSections.py b/Framework/PythonInterface/plugins/algorithms/MRFilterCrossSections.py index fe5042c6beb8..f941222c8a5c 100644 --- a/Framework/PythonInterface/plugins/algorithms/MRFilterCrossSections.py +++ b/Framework/PythonInterface/plugins/algorithms/MRFilterCrossSections.py @@ -7,8 +7,16 @@ # pylint: disable=no-init,invalid-name import os from operator import itemgetter -from mantid.api import * -from mantid.kernel import * +from mantid.api import ( + AnalysisDataService, + FileAction, + FileProperty, + PropertyMode, + PythonAlgorithm, + WorkspaceGroupProperty, + WorkspaceProperty, +) +from mantid.kernel import Direction import mantid.simpleapi as api diff --git a/Framework/PythonInterface/plugins/algorithms/MRInspectData.py b/Framework/PythonInterface/plugins/algorithms/MRInspectData.py index acb4b171d2f8..1f117aad52d4 100644 --- a/Framework/PythonInterface/plugins/algorithms/MRInspectData.py +++ b/Framework/PythonInterface/plugins/algorithms/MRInspectData.py @@ -6,8 +6,8 @@ # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=bare-except,no-init,invalid-name,dangerous-default-value import sys -from mantid.api import * -from mantid.kernel import * +from mantid.api import AlgorithmFactory, PythonAlgorithm, WorkspaceProperty +from mantid.kernel import logger, Direction, IntArrayLengthValidator, IntArrayProperty, Property import mantid.simpleapi import math import copy diff --git a/Framework/PythonInterface/plugins/algorithms/MagnetismReflectometryReduction.py b/Framework/PythonInterface/plugins/algorithms/MagnetismReflectometryReduction.py index 58e3163876b9..38ef68f17ac9 100644 --- a/Framework/PythonInterface/plugins/algorithms/MagnetismReflectometryReduction.py +++ b/Framework/PythonInterface/plugins/algorithms/MagnetismReflectometryReduction.py @@ -12,9 +12,44 @@ import sys import math import numpy as np -from mantid.api import * -from mantid.simpleapi import * -from mantid.kernel import * +from mantid.api import ( + mtd, + AlgorithmFactory, + AnalysisDataService, + PropertyMode, + PythonAlgorithm, + WorkspaceGroup, + WorkspaceProperty, +) +from mantid.kernel import ( + logger, + Direction, + FloatArrayLengthValidator, + FloatArrayProperty, + IntArrayLengthValidator, + IntArrayProperty, + Property, +) +from mantid.simpleapi import ( + AddSampleLog, + ConvertToPointData, + ConvertUnits, + CreateWorkspace, + CropWorkspace, + Divide, + GroupWorkspaces, + LoadEventNexus, + Minus, + MRGetTheta, + NormaliseByCurrent, + Rebin, + RebinToWorkspace, + RefRoi, + RenameWorkspace, + ReplaceSpecialValues, + SortXAxis, + SumSpectra, +) INSTRUMENT_NAME = "REF_M" diff --git a/Framework/PythonInterface/plugins/algorithms/MaskWorkspaceToCalFile.py b/Framework/PythonInterface/plugins/algorithms/MaskWorkspaceToCalFile.py index 527b957de8bd..ae86ae3665b8 100644 --- a/Framework/PythonInterface/plugins/algorithms/MaskWorkspaceToCalFile.py +++ b/Framework/PythonInterface/plugins/algorithms/MaskWorkspaceToCalFile.py @@ -5,9 +5,8 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=invalid-name, no-init -from mantid.kernel import * -from mantid.api import * -from mantid.simpleapi import * +from mantid.api import mtd, AlgorithmFactory, FileAction, FileProperty, PythonAlgorithm, MatrixWorkspaceProperty +from mantid.kernel import Direction # pylint: disable=too-few-public-methods diff --git a/Framework/PythonInterface/plugins/algorithms/MatchPeaks.py b/Framework/PythonInterface/plugins/algorithms/MatchPeaks.py index 38146fbb0017..dd16f501556a 100644 --- a/Framework/PythonInterface/plugins/algorithms/MatchPeaks.py +++ b/Framework/PythonInterface/plugins/algorithms/MatchPeaks.py @@ -5,9 +5,18 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=too-many-branches -from mantid.api import PythonAlgorithm, MatrixWorkspaceProperty, ITableWorkspaceProperty, PropertyMode, MatrixWorkspace -from mantid.simpleapi import * -from mantid.kernel import Direction +from mantid.api import ( + mtd, + AlgorithmFactory, + PythonAlgorithm, + MatrixWorkspaceProperty, + ITableWorkspace, + ITableWorkspaceProperty, + PropertyMode, + MatrixWorkspace, +) +from mantid.kernel import logger, Direction +from mantid.simpleapi import CloneWorkspace, CreateEmptyTableWorkspace, DeleteWorkspace, FindEPP, MaskBins, ReplaceSpecialValues import numpy as np diff --git a/Framework/PythonInterface/plugins/algorithms/Mean.py b/Framework/PythonInterface/plugins/algorithms/Mean.py index d6303092282d..e094265ff993 100644 --- a/Framework/PythonInterface/plugins/algorithms/Mean.py +++ b/Framework/PythonInterface/plugins/algorithms/Mean.py @@ -7,9 +7,9 @@ # pylint: disable=no-init,invalid-name import numpy -from mantid.simpleapi import * -from mantid.api import * -from mantid.kernel import * +from mantid.api import mtd, AlgorithmFactory, PythonAlgorithm, MatrixWorkspace, MatrixWorkspaceProperty +from mantid.kernel import Direction, StringMandatoryValidator +from mantid.simpleapi import CloneWorkspace class Mean(PythonAlgorithm): diff --git a/Framework/PythonInterface/plugins/algorithms/MergeCalFiles.py b/Framework/PythonInterface/plugins/algorithms/MergeCalFiles.py index 1b37f6a84f3b..8e57d985cd1e 100644 --- a/Framework/PythonInterface/plugins/algorithms/MergeCalFiles.py +++ b/Framework/PythonInterface/plugins/algorithms/MergeCalFiles.py @@ -5,8 +5,7 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init -from mantid.api import * -from mantid.kernel import * +from mantid.api import AlgorithmFactory, FileAction, FileProperty, PythonAlgorithm class MergeCalFiles(PythonAlgorithm): diff --git a/Framework/PythonInterface/plugins/algorithms/MuonMaxent.py b/Framework/PythonInterface/plugins/algorithms/MuonMaxent.py index 00a7c1183381..dfdb2d5ffd60 100644 --- a/Framework/PythonInterface/plugins/algorithms/MuonMaxent.py +++ b/Framework/PythonInterface/plugins/algorithms/MuonMaxent.py @@ -8,9 +8,17 @@ import math from Muon.MaxentTools.multimaxalpha import MULTIMAX from Muon.MaxentTools.dead_detector_handler import removeDeadDetectors -from mantid.api import * -from mantid.kernel import * -from mantid.simpleapi import * +from mantid.api import ( + AlgorithmFactory, + ITableWorkspaceProperty, + Progress, + PropertyMode, + PythonAlgorithm, + RawCountValidator, + WorkspaceFactory, + WorkspaceProperty, +) +from mantid.kernel import Direction, IntListValidator """ diff --git a/buildconfig/CMake/Packaging/AddPythonPath.py.in b/buildconfig/CMake/Packaging/AddPythonPath.py.in index 7a56dd579341..6ba987abd084 100644 --- a/buildconfig/CMake/Packaging/AddPythonPath.py.in +++ b/buildconfig/CMake/Packaging/AddPythonPath.py.in @@ -13,27 +13,31 @@ import traceback mantidpath = "@CMAKE_RUNTIME_OUTPUT_DIRECTORY@" # get a list of all *.egg-link files -eggfiles = [os.path.join(mantidpath, item) - for item in os.listdir(mantidpath) - # skip mantidplot looking files - if item.endswith('.egg-link') and 'plot' not in item.lower()] +eggfiles = [ + os.path.join(mantidpath, item) + for item in os.listdir(mantidpath) + # skip mantidplot looking files + if item.endswith(".egg-link") and "plot" not in item.lower() +] # directories to add are what are in those files pathdirs = [] for filename in eggfiles: with open(filename) as handle: for line in handle.readlines(): line = line.strip() - if (not line) or (line == '.'): # don't add current directory + if (not line) or (line == "."): # don't add current directory continue pathdirs.append(line) pathdirs = list(set(pathdirs)) # get unique directories + def die(msg=None): if msg: print(msg) traceback.print_exc() exit(1) + # modify the running path and check mantid can be loaded in this python pathdirs.insert(0, mantidpath) for directory in pathdirs: @@ -43,21 +47,20 @@ try: import mantid # noqa except ImportError as e: # check for an error that appears to be python2 only - if 'No module named DLFCN' == str(e): + if "No module named DLFCN" == str(e): print('Looks like "/usr/lib64/python2.7/plat-####/" is missing from sys.path') # can find platform path by comparing to (in vanilla python) # python -c "import sys, pprint; pprint.pprint(sys.path)" found_dlfcn = False - for platform_path in ['/usr/lib64/python2.7/plat-linux2/', - '/usr/lib/python2.7/plat-x86_64-linux-gnu']: + for platform_path in ["/usr/lib64/python2.7/plat-linux2/", "/usr/lib/python2.7/plat-x86_64-linux-gnu"]: if os.path.exists(platform_path): print('found "{}" ... adding to system path'.format(platform_path)) sys.path.append(platform_path) try: import DLFCN # noqa except ImportError: - die('Did not fix import error') - print(' {} ... adding to mantid.pth'.format(' ' * len(platform_path))) + die("Did not fix import error") + print(" {} ... adding to mantid.pth".format(" " * len(platform_path))) pathdirs.append(platform_path) found_dlfcn = True if not found_dlfcn: # missing path wasn't found @@ -66,16 +69,16 @@ except ImportError as e: die("Can't import mantid: {}".format(e)) # where path file should go -pathfile = os.path.join(sc.get_python_lib(plat_specific=True), 'mantid.pth') +pathfile = os.path.join(sc.get_python_lib(plat_specific=True), "mantid.pth") if os.path.exists(pathfile): - print('over-writing', pathfile, 'with', pathdirs) + print("over-writing", pathfile, "with", pathdirs) else: - print('writing', pathdirs, 'to', pathfile) -with open(pathfile, 'w') as f: + print("writing", pathdirs, "to", pathfile) +with open(pathfile, "w") as f: for directory in pathdirs: # check that trailing `/` is there if not directory.endswith(os.sep): directory += os.sep f.write(directory) - f.write('\n') \ No newline at end of file + f.write("\n") diff --git a/docs/source/algorithms/GeneratePythonFitScript-v1.rst b/docs/source/algorithms/GeneratePythonFitScript-v1.rst index 86022b4bf516..c7d913f08e20 100644 --- a/docs/source/algorithms/GeneratePythonFitScript-v1.rst +++ b/docs/source/algorithms/GeneratePythonFitScript-v1.rst @@ -39,8 +39,9 @@ Output: .. testoutput:: ExGeneratePythonSequentialFitScript :options: +ELLIPSIS - # A python script generated to perform a sequential or simultaneous fit - from mantid.simpleapi import * + # A python script generated to perform a sequential fit + from mantid.api import AnalysisDataService + from mantid.simpleapi import Fit, GroupWorkspaces import matplotlib.pyplot as plt # List of tuples [ (workspace_name, workspace_index, start_x, end_x) ] @@ -136,8 +137,9 @@ Output: .. testoutput:: ExGeneratePythonSimultaneousFitScript :options: +ELLIPSIS - # A python script generated to perform a sequential or simultaneous fit - from mantid.simpleapi import * + # A python script generated to perform a simultaneous fit + from mantid.api import AnalysisDataService + from mantid.simpleapi import Fit import matplotlib.pyplot as plt # List of tuples [ (workspace_name, workspace_index, start_x, end_x) ] diff --git a/scripts/SCD_Reduction/BVGFitTools.py b/scripts/SCD_Reduction/BVGFitTools.py index fdd7e8c0cb1d..4ae434296206 100644 --- a/scripts/SCD_Reduction/BVGFitTools.py +++ b/scripts/SCD_Reduction/BVGFitTools.py @@ -7,7 +7,9 @@ import numpy as np import matplotlib.pyplot as plt import ICCFitTools as ICCFT -from mantid.simpleapi import * +from mantid.api import mtd +from mantid.simpleapi import CreateWorkspace, Fit, Polynomial +from mantid.kernel import logger from scipy.interpolate import interp1d from scipy.ndimage.filters import convolve import ICConvoluted as ICC @@ -334,9 +336,9 @@ def getXTOF(box, peak): QX, QY, QZ = ICCFT.getQXQYQZ(box) origQS = peak.getQSampleFrame() tList = np.zeros_like(QX) - for i in xrange(QX.shape[0]): - for j in xrange(QX.shape[1]): - for k in xrange(QX.shape[2]): + for i in np.xrange(QX.shape[0]): + for j in np.xrange(QX.shape[1]): + for k in np.xrange(QX.shape[2]): newQ = V3D(QX[i, j, k], QY[i, j, k], QZ[i, j, k]) peak.setQSampleFrame(newQ) flightPath = peak.getL1() + peak.getL2() @@ -747,5 +749,5 @@ def bvg(A, mu, sigma, x, y, bg): ) return A * f + bg else: - system.information(" BVGFT:bvg:not PSD Matrix") + logger.information(" BVGFT:bvg:not PSD Matrix") return 0.0 * np.ones_like(x) diff --git a/scripts/SCD_Reduction/ICCFitTools.py b/scripts/SCD_Reduction/ICCFitTools.py index 071b33f38147..9ca97ab8fa6f 100644 --- a/scripts/SCD_Reduction/ICCFitTools.py +++ b/scripts/SCD_Reduction/ICCFitTools.py @@ -9,8 +9,9 @@ import sys from scipy.special import factorial from scipy.optimize import curve_fit -from mantid.simpleapi import * -from mantid.kernel import V3D +from mantid.api import mtd +from mantid.simpleapi import BinMD, ConvertToMD, CreateWorkspace, Fit, FunctionWrapper, Load, LoadIsawDetCal, Polynomial +from mantid.kernel import logger, V3D import ICConvoluted as ICC import itertools from functools import reduce diff --git a/scripts/SCD_Reduction/ReduceSCD_OneRun.py b/scripts/SCD_Reduction/ReduceSCD_OneRun.py index d6acb4b650f0..c2575599a6eb 100644 --- a/scripts/SCD_Reduction/ReduceSCD_OneRun.py +++ b/scripts/SCD_Reduction/ReduceSCD_OneRun.py @@ -49,8 +49,29 @@ sys.path.append("/opt/mantidnightly/bin") # sys.path.append("/opt/Mantid/bin") -from mantid.simpleapi import * -from mantid.api import * +from mantid.simpleapi import ( + ConvertToMD, + FindPeaksMD, + FindUBUsingFFT, + FindUBUsingLatticeParameters, + IndexPeaks, + IntegrateEllipsoids, + IntegratePeaksMD, + Integration, + LoadEventNexus, + LoadIsawDetCal, + SaveIsawPeaks, + SelectCellOfType, + LoadIsawUB, + LoadNexusMonitors, + PeakIntegration, + PredictPeaks, + SaveNexus, + SaveIsawUB, + Rebin, +) +from mantid.api import AnalysisDataService +from mantid import apiVersion, FileFinder print("API Version") print(apiVersion()) diff --git a/scripts/SCD_Reduction/ReduceSCD_Parallel.py b/scripts/SCD_Reduction/ReduceSCD_Parallel.py index cdab794a3dc5..4fc3caee5d3f 100644 --- a/scripts/SCD_Reduction/ReduceSCD_Parallel.py +++ b/scripts/SCD_Reduction/ReduceSCD_Parallel.py @@ -49,7 +49,22 @@ sys.path.append("/opt/mantidnightly/bin") # sys.path.append("/opt/Mantid/bin") -from mantid.simpleapi import * +from mantid import apiVersion, FileFinder +from mantid.simpleapi import ( + CombinePeaksWorkspaces, + CreatePeaksWorkspace, + FindUBUsingFFT, + FindUBUsingLatticeParameters, + IndexPeaks, + Load, + LoadEventNexus, + LoadIsawPeaks, + LoadIsawUB, + SaveNexus, + SaveIsawPeaks, + SaveIsawUB, + SelectCellOfType, +) print("API Version") print(apiVersion()) diff --git a/scripts/SCD_Reduction/SCDCalibratePanelsResults.py b/scripts/SCD_Reduction/SCDCalibratePanelsResults.py index fb14739a3cfb..bc1e3682b8c7 100755 --- a/scripts/SCD_Reduction/SCDCalibratePanelsResults.py +++ b/scripts/SCD_Reduction/SCDCalibratePanelsResults.py @@ -17,7 +17,7 @@ import numpy as np sys.path.append("/opt/mantidnightly/bin") -from mantid.simpleapi import * +from mantid.simpleapi import Load # Make a ./plots subdirectory for the plot files. if not os.path.exists("./plots"): diff --git a/scripts/reduction/find_data.py b/scripts/reduction/find_data.py index 83c4aa67ce80..3877f4ff2467 100644 --- a/scripts/reduction/find_data.py +++ b/scripts/reduction/find_data.py @@ -7,7 +7,7 @@ # pylint: disable=invalid-name,redefined-builtin import os import mantid.api as api -from mantid.simpleapi import * +from mantid.kernel import logger def find_file(filename=None, startswith=None, data_dir=None): @@ -80,7 +80,7 @@ def find_data(file, instrument="", allow_multiple=False): n_files = len(toks) instrument = str(instrument) - file_path = FileFinder.getFullPath(file) + file_path = api.FileFinder.getFullPath(file) if os.path.isfile(file_path): return file_path diff --git a/scripts/reduction/instrument.py b/scripts/reduction/instrument.py index dc9209d206a1..5e10f9b00e7a 100644 --- a/scripts/reduction/instrument.py +++ b/scripts/reduction/instrument.py @@ -4,9 +4,10 @@ # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + +import sys import mantid.simpleapi as api -from mantid.kernel import * -from mantid.api import * +from mantid.kernel import ConfigService +from mantid.api import AnalysisDataService def instrument_factory(name): diff --git a/scripts/reduction/instruments/example/ExampleRedStep.py b/scripts/reduction/instruments/example/ExampleRedStep.py index 30362359f0cb..8bdede7ba8d3 100644 --- a/scripts/reduction/instruments/example/ExampleRedStep.py +++ b/scripts/reduction/instruments/example/ExampleRedStep.py @@ -5,9 +5,9 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=no-init -from mantid.kernel import * -from mantid.api import * -from mantid.simpleapi import * +from mantid.kernel import Direction +from mantid.api import AlgorithmProperty, MatrixWorkspaceProperty, PythonAlgorithm +from mantid.simpleapi import LoadAscii class ExampleRedStep(PythonAlgorithm): diff --git a/scripts/reduction/instruments/example/example_reducer.py b/scripts/reduction/instruments/example/example_reducer.py index 23b90b4bfe9a..8bbbc105b69a 100644 --- a/scripts/reduction/instruments/example/example_reducer.py +++ b/scripts/reduction/instruments/example/example_reducer.py @@ -16,7 +16,7 @@ # It also does minimal type checking to ensure that the object that is passed is valid from reduction import validate_step, validate_loader import mantid -from mantid.simpleapi import * +from mantid.simpleapi import LoadAscii class ExampleReducer(Reducer): diff --git a/scripts/reduction/instruments/inelastic/direct_command_interface.py b/scripts/reduction/instruments/inelastic/direct_command_interface.py index cb210cf8ab06..431b1fa955d3 100644 --- a/scripts/reduction/instruments/inelastic/direct_command_interface.py +++ b/scripts/reduction/instruments/inelastic/direct_command_interface.py @@ -11,7 +11,7 @@ # Import the specific commands that we need from mantid.api import AlgorithmManager -from reduction.command_interface import * +from reduction.command_interface import Clear, ReductionSingleton from inelastic_reducer import InelasticReducer diff --git a/scripts/reduction/instruments/sans/sans_reduction_steps.py b/scripts/reduction/instruments/sans/sans_reduction_steps.py index e8f845be3d44..f78fe90eed74 100644 --- a/scripts/reduction/instruments/sans/sans_reduction_steps.py +++ b/scripts/reduction/instruments/sans/sans_reduction_steps.py @@ -16,12 +16,26 @@ # Mantid imports import mantid -from mantid.simpleapi import * +from mantid.simpleapi import ( + ApplyTransmissionCorrection, + CorrectToFile, + CreateSingleValuedWorkspace, + CropWorkspace, + DeleteWorkspace, + Divide, + ExtractMask, + MaskDetectors, + ReplaceSpecialValues, + SANSBeamFinder, + Scale, + Q1D, + Qxy, +) # Define a SANS specific logger from mantid.kernel import Logger import mantid.simpleapi as api -from mantid.api import AnalysisDataService +from mantid.api import AnalysisDataService, mtd sanslog = Logger("SANS") @@ -978,4 +992,4 @@ def execute(self, reducer, workspace): startX = x_vals[start] # Make sure we're inside the bin that we want to crop endX = 1.001 * x_vals[stop + 1] - api.CropWorkspace(InputWorkspace=workspace, OutputWorkspace=workspace, XMin=startX, XMax=endX) + CropWorkspace(InputWorkspace=workspace, OutputWorkspace=workspace, XMin=startX, XMax=endX) diff --git a/scripts/templates/GeneratePythonFitScript_SequentialVariableSetup.py.in b/scripts/templates/GeneratePythonFitScript_SequentialVariableSetup.py.in new file mode 100644 index 000000000000..a88650bd6b37 --- /dev/null +++ b/scripts/templates/GeneratePythonFitScript_SequentialVariableSetup.py.in @@ -0,0 +1,17 @@ +# A python script generated to perform a sequential fit +from mantid.api import AnalysisDataService +from mantid.simpleapi import Fit, GroupWorkspaces +import matplotlib.pyplot as plt + +# List of tuples [ (workspace_name, workspace_index, start_x, end_x) ] +input_data = {{input_list}} + +# Fit function as a string +function = {{function_string}} + +# Fitting options +max_iterations = {{max_iterations}} +minimizer = "{{minimizer}}" +cost_function = "{{cost_function}}" +evaluation_type = "{{evaluation_type}}" +output_base_name = "{{output_base_name}}" diff --git a/scripts/templates/GeneratePythonFitScript_VariableSetup.py.in b/scripts/templates/GeneratePythonFitScript_SimultaneousVariableSetup.py.in similarity index 74% rename from scripts/templates/GeneratePythonFitScript_VariableSetup.py.in rename to scripts/templates/GeneratePythonFitScript_SimultaneousVariableSetup.py.in index 8015815d58be..fb79ab1dbed8 100644 --- a/scripts/templates/GeneratePythonFitScript_VariableSetup.py.in +++ b/scripts/templates/GeneratePythonFitScript_SimultaneousVariableSetup.py.in @@ -1,5 +1,6 @@ -# A python script generated to perform a sequential or simultaneous fit -from mantid.simpleapi import * +# A python script generated to perform a simultaneous fit +from mantid.api import AnalysisDataService +from mantid.simpleapi import Fit import matplotlib.pyplot as plt # List of tuples [ (workspace_name, workspace_index, start_x, end_x) ] diff --git a/scripts/templates/reference/generate_sequential_fit_script_reference_file.py b/scripts/templates/reference/generate_sequential_fit_script_reference_file.py index 53857428da63..326fdab7f0c3 100644 --- a/scripts/templates/reference/generate_sequential_fit_script_reference_file.py +++ b/scripts/templates/reference/generate_sequential_fit_script_reference_file.py @@ -1,5 +1,6 @@ -# A python script generated to perform a sequential or simultaneous fit -from mantid.simpleapi import * +# A python script generated to perform a sequential fit +from mantid.api import AnalysisDataService +from mantid.simpleapi import Fit, GroupWorkspaces import matplotlib.pyplot as plt # List of tuples [ (workspace_name, workspace_index, start_x, end_x) ] diff --git a/scripts/templates/reference/generate_simultaneous_fit_script_reference_file.py b/scripts/templates/reference/generate_simultaneous_fit_script_reference_file.py index 85bf965f9b21..cdc89f816e82 100644 --- a/scripts/templates/reference/generate_simultaneous_fit_script_reference_file.py +++ b/scripts/templates/reference/generate_simultaneous_fit_script_reference_file.py @@ -1,5 +1,6 @@ -# A python script generated to perform a sequential or simultaneous fit -from mantid.simpleapi import * +# A python script generated to perform a simultaneous fit +from mantid.api import AnalysisDataService +from mantid.simpleapi import Fit import matplotlib.pyplot as plt # List of tuples [ (workspace_name, workspace_index, start_x, end_x) ] diff --git a/scripts/test/ConvertToWavelengthTest.py b/scripts/test/ConvertToWavelengthTest.py index 87b9d3c8104b..344c635a2fc9 100644 --- a/scripts/test/ConvertToWavelengthTest.py +++ b/scripts/test/ConvertToWavelengthTest.py @@ -5,7 +5,7 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + import unittest -from mantid.simpleapi import * +from mantid.simpleapi import CloneWorkspace, CreateWorkspace, DeleteWorkspace, Load from isis_reflectometry.convert_to_wavelength import ConvertToWavelength diff --git a/scripts/test/DirectEnergyConversionTest.py b/scripts/test/DirectEnergyConversionTest.py index 8534c3b37a00..3e043edbcaea 100644 --- a/scripts/test/DirectEnergyConversionTest.py +++ b/scripts/test/DirectEnergyConversionTest.py @@ -5,13 +5,31 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + import unittest +import os import Direct.dgreduce as dgreduce from Direct.DirectEnergyConversion import DirectEnergyConversion from Direct.PropertyManager import PropertyManager -from mantid import api -from mantid.simpleapi import * +from mantid import api, FileFinder, mtd +from mantid.simpleapi import ( + AddSampleLog, + CloneWorkspace, + ConvertUnits, + CompareWorkspaces, + CreateSampleWorkspace, + CreateWorkspace, + ExtractSingleSpectrum, + GetEi, + Load, + LoadInstrument, + MoveInstrumentComponent, + NormaliseByCurrent, + Rebin, + RenameWorkspace, + ScaleX, + SetInstrumentParameter, +) # ----------------------------------------------------------------------------------------------------------------------------------------- diff --git a/scripts/test/DirectPropertyManagerTest.py b/scripts/test/DirectPropertyManagerTest.py index d919eb55c72e..709ceec4ad14 100644 --- a/scripts/test/DirectPropertyManagerTest.py +++ b/scripts/test/DirectPropertyManagerTest.py @@ -7,13 +7,24 @@ import unittest from sys import platform import numpy as np -from Direct.AbsorptionShapes import * +import os +from Direct.AbsorptionShapes import Cylinder from Direct.PropertyManager import PropertyManager from Direct.RunDescriptor import RunDescriptor from mantid import api -from mantid.simpleapi import * +from mantid.api import mtd +from mantid.simpleapi import ( + config, + AddSampleLog, + AddTimeSeriesLog, + CloneWorkspace, + ConvertUnits, + CreateSampleWorkspace, + LoadEmptyInstrument, + SetInstrumentParameter, +) # ----------------------------------------------------------------------------------------------------------------------------------------- diff --git a/scripts/test/DirectReductionHelpersTest.py b/scripts/test/DirectReductionHelpersTest.py index d894cfaaa0cf..4541b39415f3 100644 --- a/scripts/test/DirectReductionHelpersTest.py +++ b/scripts/test/DirectReductionHelpersTest.py @@ -9,7 +9,8 @@ import Direct.ReductionHelpers as helpers from mantid import api -from mantid.simpleapi import * +from mantid.api import mtd +from mantid.simpleapi import LoadEmptyInstrument class SomeDescriptor(object): diff --git a/scripts/test/MariReduction.py b/scripts/test/MariReduction.py index 26e745fa5184..e273d1ec647e 100644 --- a/scripts/test/MariReduction.py +++ b/scripts/test/MariReduction.py @@ -6,9 +6,12 @@ # SPDX - License - Identifier: GPL - 3.0 + """Sample MARI reduction scrip used in testing ReductionWrapper""" -from Direct.AbsorptionShapes import * +from Direct.AbsorptionShapes import Cylinder +from Direct.DirectEnergyConversion import DirectEnergyConversion +from Direct.PropertyManager import PropertyManager +from Direct.ReductionWrapper import AdvancedProperties, iliad, MainProperties, ReductionWrapper -from Direct.ReductionWrapper import * +from types import MethodType try: import reduce_vars as web_var diff --git a/scripts/test/ReductionSettingsTest.py b/scripts/test/ReductionSettingsTest.py index d572a5d440d7..8aa8b2c62524 100644 --- a/scripts/test/ReductionSettingsTest.py +++ b/scripts/test/ReductionSettingsTest.py @@ -5,8 +5,8 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + import unittest -from mantid.simpleapi import * -from reduction_settings import * +from mantid.kernel import PropertyManagerDataService +from reduction_settings import get_settings_object class BasicSettingsObjectUsageTest(unittest.TestCase): diff --git a/scripts/test/ReductionWrapperTest.py b/scripts/test/ReductionWrapperTest.py index a0e409e13bdb..7e9bfbef219d 100644 --- a/scripts/test/ReductionWrapperTest.py +++ b/scripts/test/ReductionWrapperTest.py @@ -10,10 +10,12 @@ import unittest import importlib as imp -from mantid.simpleapi import * +from mantid.api import mtd +from mantid.simpleapi import AddSampleLog, CreateSampleWorkspace from mantid import api, config -from Direct.ReductionWrapper import * +from Direct.PropertyManager import PropertyManager +from Direct.ReductionWrapper import iliad, ReductionWrapper import MariReduction as mr diff --git a/scripts/test/ReflectometryQuickAuxiliaryTest.py b/scripts/test/ReflectometryQuickAuxiliaryTest.py index 12692a42def9..f83fad2728ae 100644 --- a/scripts/test/ReflectometryQuickAuxiliaryTest.py +++ b/scripts/test/ReflectometryQuickAuxiliaryTest.py @@ -8,7 +8,8 @@ from isis_reflectometry import quick -from mantid.simpleapi import * +from mantid.api import mtd +from mantid.simpleapi import CreateSingleValuedWorkspace, CreateWorkspace, DeleteWorkspace, LoadISISNexus from mantid.kernel import ConfigService diff --git a/scripts/test/RunDescriptorTest.py b/scripts/test/RunDescriptorTest.py index 38780c096e12..27e9c5e28b72 100644 --- a/scripts/test/RunDescriptorTest.py +++ b/scripts/test/RunDescriptorTest.py @@ -7,10 +7,24 @@ import unittest from Direct.PropertyManager import PropertyManager -from Direct.RunDescriptor import * +from Direct.RunDescriptor import RunDescriptor +import os from mantid import api -from mantid.simpleapi import * +from mantid.api import mtd +from mantid.simpleapi import ( + config, + AddSampleLog, + CloneWorkspace, + CompareWorkspaces, + ConvertToEventWorkspace, + CreateSampleWorkspace, + DeleteWorkspace, + ExtractMonitors, + LoadEmptyInstrument, + Rebin, + RenameWorkspace, +) # ----------------------------------------------------------------------------------------------------------------------------------------- diff --git a/scripts/test/SANS/SANSBatchModeTest.py b/scripts/test/SANS/SANSBatchModeTest.py index 0f1b17e33fce..0eef800ae8f6 100644 --- a/scripts/test/SANS/SANSBatchModeTest.py +++ b/scripts/test/SANS/SANSBatchModeTest.py @@ -4,8 +4,11 @@ # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + +import copy import unittest -from ISISCommandInterface import * +import os +from mantid import config +from ISISCommandInterface import Clean, LOQ, ReductionSingleton from isis_reduction_steps import UserFile import SANSBatchMode as bm diff --git a/scripts/test/SANS/SANSCentreFinderTest.py b/scripts/test/SANS/SANSCentreFinderTest.py index 37c6988db155..761cb078296a 100644 --- a/scripts/test/SANS/SANSCentreFinderTest.py +++ b/scripts/test/SANS/SANSCentreFinderTest.py @@ -10,7 +10,8 @@ import ISISCommandInterface as command_iface import centre_finder as cf -from mantid.simpleapi import * +from mantid.api import mtd +from mantid.simpleapi import AddSampleLog, CreateSampleWorkspace class SANSBeamCentrePositionUpdater(unittest.TestCase): diff --git a/scripts/test/SANS/SANSCommandInterfaceTest.py b/scripts/test/SANS/SANSCommandInterfaceTest.py index 450b399af39a..f3b69cb0786b 100644 --- a/scripts/test/SANS/SANSCommandInterfaceTest.py +++ b/scripts/test/SANS/SANSCommandInterfaceTest.py @@ -5,6 +5,7 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + import math +import os import random import unittest @@ -12,8 +13,8 @@ import ISISCommandInterface as command_iface import isis_reduction_steps as reduction_steps -from mantid.kernel import DateAndTime -from mantid.simpleapi import * +from mantid.kernel import config, DateAndTime +from mantid.simpleapi import AddTimeSeriesLog, CreateSampleWorkspace, DeleteWorkspace, SaveNexus class SANSCommandInterfaceGetAndSetTransmissionSettings(unittest.TestCase): diff --git a/scripts/test/SANS/SansIsisGuiSettings.py b/scripts/test/SANS/SansIsisGuiSettings.py index 7c776442f9cc..3f729b138b0d 100644 --- a/scripts/test/SANS/SansIsisGuiSettings.py +++ b/scripts/test/SANS/SansIsisGuiSettings.py @@ -5,7 +5,7 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + import unittest -from mantid.simpleapi import * +from mantid.simpleapi import config import ISISCommandInterface as i MASKFILE = "MaskSANS2D.txt" diff --git a/scripts/test/SANSDarkRunCorrectionTest.py b/scripts/test/SANSDarkRunCorrectionTest.py index 581070d92cb4..d4578c41f6c6 100644 --- a/scripts/test/SANSDarkRunCorrectionTest.py +++ b/scripts/test/SANSDarkRunCorrectionTest.py @@ -5,7 +5,8 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + import unittest -from mantid.simpleapi import * +from mantid.api import AlgorithmManager, mtd +from mantid.simpleapi import CreateSampleWorkspace from mantid.kernel import DateAndTime import DarkRunCorrection as dc diff --git a/scripts/test/SANSIsisInstrumentTest.py b/scripts/test/SANSIsisInstrumentTest.py index 286d564166d5..fe9c1c11262f 100644 --- a/scripts/test/SANSIsisInstrumentTest.py +++ b/scripts/test/SANSIsisInstrumentTest.py @@ -7,7 +7,8 @@ import unittest import isis_instrument as instruments -from mantid.simpleapi import * +from mantid.api import mtd +from mantid.simpleapi import CreateSampleWorkspace, DeleteWorkspace, SetInstrumentParameter class SANSIsisInstrumentTest(unittest.TestCase): diff --git a/scripts/test/SANSUtilityTest.py b/scripts/test/SANSUtilityTest.py index c53c82c01b39..fefba15b5526 100644 --- a/scripts/test/SANSUtilityTest.py +++ b/scripts/test/SANSUtilityTest.py @@ -25,7 +25,18 @@ ) # Need to import mantid before we import SANSUtility -from mantid.simpleapi import * +from mantid.simpleapi import ( + config, + AddTimeSeriesLog, + CloneWorkspace, + CreateSampleWorkspace, + CreateWorkspace, + DeleteWorkspace, + GroupWorkspaces, + Load, + MaskDetectors, + SaveNexusProcessed, +) TEST_STRING_DATA = "SANS2D0003434-add" + su.ADDED_EVENT_DATA_TAG TEST_STRING_MON = "SANS2D0003434-add_monitors" + su.ADDED_EVENT_DATA_TAG diff --git a/scripts/test/SettingsTest.py b/scripts/test/SettingsTest.py index f594acb978bc..42723f8d34fc 100644 --- a/scripts/test/SettingsTest.py +++ b/scripts/test/SettingsTest.py @@ -8,8 +8,6 @@ from isis_reflectometry import settings -from mantid.simpleapi import * - """ RAII Test helper class. Equivalent to the ScopedFileHelper. diff --git a/scripts/test/StitchingTest.py b/scripts/test/StitchingTest.py index cf19d951dfdb..634b293e09b0 100644 --- a/scripts/test/StitchingTest.py +++ b/scripts/test/StitchingTest.py @@ -5,7 +5,9 @@ # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + import unittest -from mantid.simpleapi import * +from mantid.api import mtd +from mantid.kernel import PropertyManagerDataService +from mantid.simpleapi import CreateWorkspace from LargeScaleStructures import data_stitching import numpy as np diff --git a/tools/CodeAnalysis/gcovr b/tools/CodeAnalysis/gcovr index 0e40c6e113ea..1dfe69a149ad 100644 --- a/tools/CodeAnalysis/gcovr +++ b/tools/CodeAnalysis/gcovr @@ -30,7 +30,6 @@ # import copy -import glob import os import re import subprocess @@ -39,7 +38,6 @@ import time import xml.dom.minidom from optparse import OptionParser -from string import Template from os.path import normpath __version__ = "2.4" @@ -52,26 +50,26 @@ source_re = re.compile("cannot open (source|graph) file") def version_str(): ans = __version__ - m = re.match('\$Revision:\s*(\S+)\s*\$', src_revision) + m = re.match(r"\$Revision:\s*(\S+)\s*\$", src_revision) if m: ans = ans + " (r%s)" % (m.group(1)) return ans + # # Container object for coverage statistics # class CoverageData(object): - def __init__(self, fname, uncovered, covered, branches, noncode): - self.fname=fname + self.fname = fname # Shallow copies are cheap & "safe" because the caller will # throw away their copies of covered & uncovered after calling # us exactly *once* self.uncovered = copy.copy(uncovered) - self.covered = copy.copy(covered) - self.noncode = copy.copy(noncode) + self.covered = copy.copy(covered) + self.noncode = copy.copy(noncode) # But, a deep copy is required here self.all_lines = copy.deepcopy(uncovered) self.all_lines.update(covered.keys()) @@ -83,7 +81,7 @@ class CoverageData(object): self.uncovered.update(uncovered) self.noncode.intersection_update(noncode) for k in covered.keys(): - self.covered[k] = self.covered.get(k,0) + covered[k] + self.covered[k] = self.covered.get(k, 0) + covered[k] for k in branches.keys(): for b in branches[k]: d = self.branches.setdefault(k, {}) @@ -110,33 +108,32 @@ class CoverageData(object): tmp.sort() first = None last = None - ranges=[] + ranges = [] for item in tmp: if last is None: - first=item - last=item - elif item == (last+1): - last=item + first = item + last = item + elif item == (last + 1): + last = item else: - if len(self.noncode.intersection(range(last+1,item))) \ - == item - last - 1: + if len(self.noncode.intersection(range(last + 1, item))) == item - last - 1: last = item continue - if first==last: + if first == last: ranges.append(str(first)) else: - ranges.append(str(first)+"-"+str(last)) - first=item - last=item - if first==last: + ranges.append(str(first) + "-" + str(last)) + first = item + last = item + if first == last: ranges.append(str(first)) else: - ranges.append(str(first)+"-"+str(last)) + ranges.append(str(first) + "-" + str(last)) return ",".join(ranges) def coverage(self): - if ( options.show_branch ): + if options.show_branch: total = 0 cover = 0 for line in self.branches.keys(): @@ -147,30 +144,28 @@ class CoverageData(object): total = len(self.all_lines) cover = len(self.covered) - percent = total and str(int(100.0*cover/total)) or "--" + percent = total and str(int(100.0 * cover / total)) or "--" return (total, cover, percent) def summary(self): - tmp = options.filter.sub('',self.fname) + tmp = options.filter.sub("", self.fname) if not self.fname.endswith(tmp): # Do no truncation if the filter does not start matching at # the beginning of the string tmp = self.fname tmp = tmp.ljust(40) if len(tmp) > 40: - tmp=tmp+"\n"+" "*40 + tmp = tmp + "\n" + " " * 40 (total, cover, percent) = self.coverage() - return ( total, cover, - tmp + str(total).rjust(8) + str(cover).rjust(8) + - percent.rjust(6) + "% " + self.uncovered_str() ) + return (total, cover, tmp + str(total).rjust(8) + str(cover).rjust(8) + percent.rjust(6) + "% " + self.uncovered_str()) def resolve_symlinks(orig_path): """ Return the normalized absolute path name with all symbolic links resolved """ - drive,tmp = os.path.splitdrive(os.path.abspath(orig_path)) + drive, tmp = os.path.splitdrive(os.path.abspath(orig_path)) if not drive: drive = os.path.sep parts = tmp.split(os.path.sep) @@ -180,8 +175,7 @@ def resolve_symlinks(orig_path): if not os.path.islink(os.path.join(*actual_path)): continue actual_path[-1] = os.readlink(os.path.join(*actual_path)) - tmp_drive, tmp_path = os.path.splitdrive( - resolve_symlinks(os.path.join(*actual_path)) ) + tmp_drive, tmp_path = os.path.splitdrive(resolve_symlinks(os.path.join(*actual_path))) if tmp_drive: drive = tmp_drive actual_path = [drive] + tmp_path.split(os.path.sep) @@ -189,8 +183,7 @@ def resolve_symlinks(orig_path): def path_startswith(path, base): - return path.startswith(base) and ( - len(base) == len(path) or path[len(base)] == os.path.sep ) + return path.startswith(base) and (len(base) == len(path) or path[len(base)] == os.path.sep) class PathAliaser(object): @@ -204,7 +197,7 @@ class PathAliaser(object): while True: for base, alias in self.aliases.items(): if path_startswith(path, base): - path = alias + path[len(base):] + path = alias + path[len(base) :] match_found = True break for master_base in self.master_targets: @@ -214,7 +207,8 @@ class PathAliaser(object): sys.stderr.write( "(ERROR) violating fundamental assumption while walking " "directory tree.\n\tPlease report this to the gcovr " - "developers.\n" ) + "developers.\n" + ) return path, None, match_found def unalias_path(self, path): @@ -224,7 +218,7 @@ class PathAliaser(object): return path # Try and resolve the preferred name for this location if master_base in self.preferred_name: - return self.preferred_name[master_base] + path[len(master_base):] + return self.preferred_name[master_base] + path[len(master_base) :] return path def add_master_target(self, master): @@ -236,6 +230,7 @@ class PathAliaser(object): def set_preferred(self, master, preferred): self.preferred_name[master] = preferred + aliases = PathAliaser() # This is UGLY. Here's why: UNIX resolves symbolic links by walking the @@ -276,25 +271,18 @@ def link_walker(path): while targets: target_dir = targets.pop(0) actual_dir = resolve_symlinks(target_dir) - #print "target dir: %s (%s)" % (target_dir, actual_dir) master_name, master_base, visited = aliases.master_path(actual_dir) if visited: - #print " ...root already visited as %s" % master_name aliases.add_alias(target_dir, master_name) continue if master_name != target_dir: aliases.set_preferred(master_name, target_dir) aliases.add_alias(target_dir, master_name) aliases.add_master_target(master_name) - #print " ...master name = %s" % master_name - #print " ...walking %s" % target_dir for root, dirs, files in os.walk(target_dir, topdown=True): - #print " ...reading %s" % root for d in dirs: tmp = os.path.abspath(os.path.join(root, d)) - #print " ...checking %s" % tmp if os.path.islink(tmp): - #print " ...buffering link %s" % tmp targets.append(tmp) yield root, dirs, files @@ -309,15 +297,15 @@ def search_file(expr, path): if path is None or path == ".": path = os.getcwd() elif not os.path.exists(path): - raise IOError("Unknown directory '"+path+"'") + raise IOError("Unknown directory '" + path + "'") for root, dirs, files in link_walker(path): for name in files: if pattern.match(name): - name = os.path.join(root,name) + name = os.path.join(root, name) if os.path.islink(name): - ans.append( os.path.abspath(os.readlink(name)) ) + ans.append(os.path.abspath(os.readlink(name))) else: - ans.append( os.path.abspath(name) ) + ans.append(os.path.abspath(name)) return ans @@ -325,38 +313,34 @@ def search_file(expr, path): # Get the list of datafiles in the directories specified by the user # def get_datafiles(flist, options): - allfiles=[] + allfiles = [] for dir in flist: if options.verbose: - sys.stdout.write( "Scanning directory %s for gcda/gcno files...\n" - % (dir, ) ) - files = search_file(".*\.gc(da|no)$", dir) + sys.stdout.write("Scanning directory %s for gcda/gcno files...\n" % (dir,)) + files = search_file(r".*\.gc(da|no)$", dir) # gcno files will *only* produce uncovered results; however, # that is useful information for the case where a compilation # unit is never actually exercised by the test code. So, we # will process gcno files, but ONLY if there is no corresponding # gcda file. - gcda_files = [file for file in files if file.endswith('gcda')] + gcda_files = [file for file in files if file.endswith("gcda")] tmp = set(gcda_files) - gcno_files = [ file for file in files if - file.endswith('gcno') and file[:-2]+'da' not in tmp ] + gcno_files = [file for file in files if file.endswith("gcno") and file[:-2] + "da" not in tmp] if options.verbose: - sys.stdout.write( - "Found %d files (and will process %d)\n" % - ( len(files), len(gcda_files) + len(gcno_files) ) ) + sys.stdout.write("Found %d files (and will process %d)\n" % (len(files), len(gcda_files) + len(gcno_files))) allfiles.extend(gcda_files) allfiles.extend(gcno_files) return allfiles -def process_gcov_data(file, covdata, options): - INPUT = open(file,"r") +def process_gcov_data(file, covdata, options): # noqa: C901 + INPUT = open(file, "r") # # Get the filename # line = INPUT.readline() - segments=line.split(':',3) - if len(segments) != 4 or not segments[2].lower().strip().endswith('source'): + segments = line.split(":", 3) + if len(segments) != 4 or not segments[2].lower().strip().endswith("source"): raise RuntimeError('Fatal error parsing gcov file, line 1: \n\t"%s"' % line.rstrip()) fname = aliases.unalias_path(os.path.abspath((segments[-1]).strip())) if options.verbose: @@ -371,10 +355,12 @@ def process_gcov_data(file, covdata, options): # # Return if the filename matches the exclude pattern # - for i in range(0,len(options.exclude)): - if options.exclude[i].match(options.filter.sub('',fname)) or \ - options.exclude[i].match(fname) or \ - options.exclude[i].match(os.path.abspath(fname)): + for i in range(0, len(options.exclude)): + if ( + options.exclude[i].match(options.filter.sub("", fname)) + or options.exclude[i].match(fname) + or options.exclude[i].match(os.path.abspath(fname)) + ): if options.verbose: sys.stdout.write(" Excluding coverage data for file %s\n" % fname) return @@ -382,32 +368,30 @@ def process_gcov_data(file, covdata, options): # Parse each line, and record the lines # that are uncovered # - noncode = set() + noncode = set() uncovered = set() - covered = {} - branches = {} - #first_record=True + covered = {} + branches = {} lineno = 0 for line in INPUT: - segments=line.split(":",2) + segments = line.split(":", 2) tmp = segments[0].strip() if len(segments) > 1: try: lineno = int(segments[1].strip()) except: - pass # keep previous line number! + pass # keep previous line number! - if tmp[0] == '#': - uncovered.add( lineno ) + if tmp[0] == "#": + uncovered.add(lineno) elif tmp[0] in "0123456789": covered[lineno] = int(segments[0].strip()) - elif tmp[0] == '-': + elif tmp[0] == "-": # remember certain non-executed lines code = segments[2].strip() - if len(code) == 0 or code == "{" or code == "}" or \ - code.startswith("//") or code == 'else': - noncode.add( lineno ) - elif tmp.startswith('branch'): + if len(code) == 0 or code == "{" or code == "}" or code.startswith("//") or code == "else": + noncode.add(lineno) + elif tmp.startswith("branch"): fields = line.split() try: count = int(fields[3]) @@ -415,37 +399,30 @@ def process_gcov_data(file, covdata, options): except: # We ignore branches that were "never executed" pass - elif tmp.startswith('call'): + elif tmp.startswith("call"): pass - elif tmp.startswith('function'): + elif tmp.startswith("function"): pass - elif tmp[0] == 'f': + elif tmp[0] == "f": pass - #if first_record: - #first_record=False - #uncovered.add(prev) - #if prev in uncovered: - #tokens=re.split('[ \t]+',tmp) - #if tokens[3] != "0": - #uncovered.remove(prev) - #prev = int(segments[1].strip()) - #first_record=True else: sys.stderr.write( "(WARNING) Unrecognized GCOV output: '%s'\n" "\tThis is indicitive of a gcov output parse error.\n" - "\tPlease report this to the gcovr developers." % tmp ) + "\tPlease report this to the gcovr developers." % tmp + ) # # If the file is already in covdata, then we # remove lines that are covered here. Otherwise, # initialize covdata # if fname not in covdata: - covdata[fname] = CoverageData(fname,uncovered,covered,branches,noncode) + covdata[fname] = CoverageData(fname, uncovered, covered, branches, noncode) else: - covdata[fname].update(uncovered,covered,branches,noncode) + covdata[fname].update(uncovered, covered, branches, noncode) INPUT.close() + # # Process a datafile (generated by running the instrumented application) # and run gcov with the corresponding arguments @@ -473,17 +450,16 @@ def process_gcov_data(file, covdata, options): # -def process_datafile(filename, covdata, options): +def process_datafile(filename, covdata, options): # noqa: C901 # # Launch gcov # abs_filename = os.path.abspath(filename) - (dirname,fname) = os.path.split(abs_filename) - #(name,ext) = os.path.splitext(base) + (dirname, fname) = os.path.split(abs_filename) potential_wd = [] starting_dir = os.getcwd() - errors=[] + errors = [] Done = False if options.objdir: @@ -493,40 +469,34 @@ def process_datafile(filename, covdata, options): while idx <= len(components): if idx > len(src_components): break - if components[-1*idx] != src_components[-1*idx]: + if components[-1 * idx] != src_components[-1 * idx]: break idx += 1 if idx > len(components): - pass # a parent dir; the normal process will find it - elif components[-1*idx] == '..': - dirs = [ os.path.join(src_components[:len(src_components)-idx+1]) ] - while idx <= len(components) and components[-1*idx] == '..': + pass # a parent dir; the normal process will find it + elif components[-1 * idx] == "..": + dirs = [os.path.join(src_components[: len(src_components) - idx + 1])] + while idx <= len(components) and components[-1 * idx] == "..": tmp = [] for d in dirs: for f in os.listdir(d): - x = os.path.join(d,f) + x = os.path.join(d, f) if os.path.isdir(x): tmp.append(x) dirs = tmp idx += 1 potential_wd = dirs else: - if components[0] == '': + if components[0] == "": # absolute path - tmp = [ options.objdir ] + tmp = [options.objdir] else: # relative path: check relative to both the cwd and the # gcda file - tmp = [ os.path.join(x, options.objdir) for x in - [os.path.dirname(abs_filename), os.getcwd()] ] - potential_wd = [ testdir for testdir in tmp - if os.path.isdir(testdir) ] + tmp = [os.path.join(x, options.objdir) for x in [os.path.dirname(abs_filename), os.getcwd()]] + potential_wd = [testdir for testdir in tmp if os.path.isdir(testdir)] if len(potential_wd) == 0: - errors.append("ERROR: cannot identify the location where GCC " - "was run using --object-directory=%s\n" % - options.objdir) - # Revert to the normal - #sys.exit(1) + errors.append("ERROR: cannot identify the location where GCC " "was run using --object-directory=%s\n" % options.objdir) # no objdir was specified (or it was a parent dir); walk up the dir tree if len(potential_wd) == 0: @@ -537,13 +507,11 @@ def process_datafile(filename, covdata, options): if wd == potential_wd[-1]: break - cmd = [ gcov_cmd, abs_filename, - "--branch-counts", "--branch-probabilities", "--preserve-paths", - '--object-directory', dirname ] + cmd = [gcov_cmd, abs_filename, "--branch-counts", "--branch-probabilities", "--preserve-paths", "--object-directory", dirname] # NB: We are lazy English speakers, so we will only parse English output env = dict(os.environ) - env['LC_ALL'] = 'en_US' + env["LC_ALL"] = "en_US" while len(potential_wd) > 0 and not Done: # NB: either len(potential_wd) == 1, or all entires are absolute @@ -551,19 +519,14 @@ def process_datafile(filename, covdata, options): # iteration. os.chdir(potential_wd.pop(0)) - #if options.objdir: - # cmd.extend(["--object-directory", Template(options.objdir).substitute(filename=filename, head=dirname, tail=base, root=name, ext=ext)]) - if options.verbose: - sys.stdout.write("Running gcov: '%s' in '%s'\n" % ( ' '.join(cmd), os.getcwd() )) - (out, err) = subprocess.Popen( cmd, env=env, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE ).communicate() - out=out.decode('utf-8') - err=err.decode('utf-8') + sys.stdout.write("Running gcov: '%s' in '%s'\n" % (" ".join(cmd), os.getcwd())) + (out, err) = subprocess.Popen(cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() + out = out.decode("utf-8") + err = err.decode("utf-8") # find the files that gcov created - gcov_files = {'active':[], 'filter':[], 'exclude':[]} + gcov_files = {"active": [], "filter": [], "exclude": []} for line in out.splitlines(): found = output_re.search(line.strip()) if found is not None: @@ -571,27 +534,29 @@ def process_datafile(filename, covdata, options): if not options.gcov_filter.match(fname): if options.verbose: sys.stdout.write("Filtering gcov file %s\n" % fname) - gcov_files['filter'].append(fname) + gcov_files["filter"].append(fname) continue - exclude=False - for i in range(0,len(options.gcov_exclude)): - if options.gcov_exclude[i].match(options.gcov_filter.sub('',fname)) or \ - options.gcov_exclude[i].match(fname) or \ - options.gcov_exclude[i].match(os.path.abspath(fname)): - exclude=True + exclude = False + for i in range(0, len(options.gcov_exclude)): + if ( + options.gcov_exclude[i].match(options.gcov_filter.sub("", fname)) + or options.gcov_exclude[i].match(fname) + or options.gcov_exclude[i].match(os.path.abspath(fname)) + ): + exclude = True break if not exclude: - gcov_files['active'].append(fname) + gcov_files["active"].append(fname) elif options.verbose: sys.stdout.write("Excluding gcov file %s\n" % fname) - gcov_files['exclude'].append(fname) + gcov_files["exclude"].append(fname) if source_re.search(err): # gcov tossed errors: try the next potential_wd errors.append(err) else: # Process *.gcov files - for fname in gcov_files['active']: + for fname in gcov_files["active"]: process_gcov_data(fname, covdata, options) Done = True @@ -602,15 +567,16 @@ def process_datafile(filename, covdata, options): os.chdir(starting_dir) if options.delete: - if not abs_filename.endswith('gcno'): + if not abs_filename.endswith("gcno"): os.remove(abs_filename) if not Done: sys.stderr.write( "(WARNING) GCOV produced the following errors processing %s:\n" "\t %s" - "\t(gcovr could not infer a working directory that resolved it.)\n" - % ( filename, "\t ".join(errors) ) ) + "\t(gcovr could not infer a working directory that resolved it.)\n" % (filename, "\t ".join(errors)) + ) + # # Produce the classic gcovr text report @@ -625,7 +591,7 @@ def print_text_report(covdata): def _percent_uncovered(key): (total, covered, percent) = covdata[key].coverage() if covered: - return -1.0*covered/total + return -1.0 * covered / total else: return total or 1e6 @@ -633,45 +599,44 @@ def print_text_report(covdata): return key if options.output: - OUTPUT = open(options.output,'w') + OUTPUT = open(options.output, "w") else: OUTPUT = sys.stdout - total_lines=0 - total_covered=0 + total_lines = 0 + total_covered = 0 # Header - OUTPUT.write("-"*78 + '\n') + OUTPUT.write("-" * 78 + "\n") a = options.show_branch and "Branch" or "Lines" b = options.show_branch and "Taken" or "Exec" - OUTPUT.write("File".ljust(40) + a.rjust(8) + b.rjust(8)+ " Cover Missing\n") - OUTPUT.write("-"*78 + '\n') + OUTPUT.write("File".ljust(40) + a.rjust(8) + b.rjust(8) + " Cover Missing\n") + OUTPUT.write("-" * 78 + "\n") # Data keys = list(covdata.keys()) - keys.sort(key=options.sort_uncovered and _num_uncovered or - options.sort_percent and _percent_uncovered or _alpha) + keys.sort(key=options.sort_uncovered and _num_uncovered or options.sort_percent and _percent_uncovered or _alpha) for key in keys: (t, n, txt) = covdata[key].summary() total_lines += t total_covered += n - OUTPUT.write(txt + '\n') + OUTPUT.write(txt + "\n") # Footer & summary - OUTPUT.write("-"*78 + '\n') - percent = total_lines and str(int(100.0*total_covered/total_lines)) or "--" - OUTPUT.write("TOTAL".ljust(40) + str(total_lines).rjust(8) + - str(total_covered).rjust(8) + str(percent).rjust(6)+"%" + '\n') - OUTPUT.write("-"*78 + '\n') + OUTPUT.write("-" * 78 + "\n") + percent = total_lines and str(int(100.0 * total_covered / total_lines)) or "--" + OUTPUT.write("TOTAL".ljust(40) + str(total_lines).rjust(8) + str(total_covered).rjust(8) + str(percent).rjust(6) + "%" + "\n") + OUTPUT.write("-" * 78 + "\n") # Close logfile if options.output: OUTPUT.close() + # # Produce an XML report in the Cobertura format # -def print_xml_report(covdata): +def print_xml_report(covdata): # noqa: C901 branchTotal = 0 branchCovered = 0 lineTotal = 0 @@ -690,17 +655,13 @@ def print_xml_report(covdata): lineCovered += covered impl = xml.dom.minidom.getDOMImplementation() - docType = impl.createDocumentType( - "coverage", None, - "http://cobertura.sourceforge.net/xml/coverage-03.dtd" ) + docType = impl.createDocumentType("coverage", None, "http://cobertura.sourceforge.net/xml/coverage-03.dtd") doc = impl.createDocument(None, "coverage", docType) root = doc.documentElement - root.setAttribute( "line-rate", lineTotal == 0 and '0.0' or - str(float(lineCovered) / lineTotal) ) - root.setAttribute( "branch-rate", branchTotal == 0 and '0.0' or - str(float(branchCovered) / branchTotal) ) - root.setAttribute( "timestamp", str(int(time.time())) ) - root.setAttribute( "version", "gcovr %s" % (version_str(),) ) + root.setAttribute("line-rate", lineTotal == 0 and "0.0" or str(float(lineCovered) / lineTotal)) + root.setAttribute("branch-rate", branchTotal == 0 and "0.0" or str(float(branchCovered) / branchTotal)) + root.setAttribute("timestamp", str(int(time.time()))) + root.setAttribute("version", "gcovr %s" % (version_str(),)) # Generate the element: this is either the root directory # (specified by --root), or the CWD. @@ -716,13 +677,13 @@ def print_xml_report(covdata): keys = sorted(covdata.keys()) for f in keys: data = covdata[f] - dir = options.filter.sub('',f) + dir = options.filter.sub("", f) if f.endswith(dir): - src_path = f[:-1*len(dir)] + src_path = f[: -1 * len(dir)] if len(src_path) > 0: while dir.startswith(os.path.sep): src_path += os.path.sep - dir = dir[len(os.path.sep):] + dir = dir[len(os.path.sep) :] source_dirs.add(src_path) else: # Do no truncation if the filter does not start matching at @@ -730,9 +691,7 @@ def print_xml_report(covdata): dir = f (dir, fname) = os.path.split(dir) - package = packages.setdefault( - dir, [ doc.createElement("package"), {}, - 0, 0, 0, 0 ] ) + package = packages.setdefault(dir, [doc.createElement("package"), {}, 0, 0, 0, 0]) c = doc.createElement("class") lines = doc.createElement("lines") c.appendChild(lines) @@ -757,15 +716,13 @@ def print_xml_report(covdata): for v in branches.values(): if v > 0: b_hits += 1 - coverage = 100*b_hits/len(branches) + coverage = 100 * b_hits / len(branches) l.setAttribute("branch", "true") - l.setAttribute( "condition-coverage", - "%i%% (%i/%i)" % - (coverage, b_hits, len(branches)) ) - cond = doc.createElement('condition') + l.setAttribute("condition-coverage", "%i%% (%i/%i)" % (coverage, b_hits, len(branches))) + cond = doc.createElement("condition") cond.setAttribute("number", "0") cond.setAttribute("type", "jump") - cond.setAttribute("coverage", "%i%%" % ( coverage ) ) + cond.setAttribute("coverage", "%i%%" % (coverage)) class_branch_hits += b_hits class_branches += float(len(branches)) conditions = doc.createElement("conditions") @@ -774,12 +731,11 @@ def print_xml_report(covdata): lines.appendChild(l) - className = fname.replace('.', '_') + className = fname.replace(".", "_") c.setAttribute("name", className) c.setAttribute("filename", os.path.join(dir, fname)) - c.setAttribute("line-rate", str(class_hits / (1.0*class_lines or 1.0))) - c.setAttribute( "branch-rate", - str(class_branch_hits / (1.0*class_branches or 1.0)) ) + c.setAttribute("line-rate", str(class_hits / (1.0 * class_lines or 1.0))) + c.setAttribute("branch-rate", str(class_branch_hits / (1.0 * class_branches or 1.0))) c.setAttribute("complexity", "0.0") package[1][className] = c @@ -796,9 +752,9 @@ def print_xml_report(covdata): classNames = sorted(packageData[1].keys()) for className in classNames: classes.appendChild(packageData[1][className]) - package.setAttribute("name", packageName.replace(os.sep, '.')) - package.setAttribute("line-rate", str(packageData[2]/(1.0*packageData[3] or 1.0))) - package.setAttribute( "branch-rate", str(packageData[4] / (1.0*packageData[5] or 1.0) )) + package.setAttribute("name", packageName.replace(os.sep, ".")) + package.setAttribute("line-rate", str(packageData[2] / (1.0 * packageData[3] or 1.0))) + package.setAttribute("branch-rate", str(packageData[4] / (1.0 * packageData[5] or 1.0))) package.setAttribute("complexity", "0.0") # Populate the element: this is either the root directory @@ -813,29 +769,28 @@ def print_xml_report(covdata): for d in source_dirs: source = doc.createElement("source") if d.startswith(cwd): - reldir = d[len(cwd):].lstrip(os.path.sep) + reldir = d[len(cwd) :].lstrip(os.path.sep) elif cwd.startswith(d): i = 1 - while normpath(d) != \ - normpath(os.path.join(*tuple([cwd]+['..']*i))): + while normpath(d) != normpath(os.path.join(*tuple([cwd] + [".."] * i))): i += 1 - reldir = os.path.join(*tuple(['..']*i)) + reldir = os.path.join(*tuple([".."] * i)) else: reldir = d source.appendChild(doc.createTextNode(reldir)) sources.appendChild(source) else: source = doc.createElement("source") - source.appendChild(doc.createTextNode('.')) + source.appendChild(doc.createTextNode(".")) sources.appendChild(source) xmlString = doc.toprettyxml() - #xml.dom.ext.PrettyPrint(doc) + # xml.dom.ext.PrettyPrint(doc) if options.output is None: - sys.stdout.write(xmlString+'\n') + sys.stdout.write(xmlString + "\n") else: - OUTPUT = open(options.output, 'w') - OUTPUT.write(xmlString +'\n') + OUTPUT = open(options.output, "w") + OUTPUT.write(xmlString + "\n") OUTPUT.close() @@ -847,83 +802,70 @@ def print_xml_report(covdata): # Create option parser # parser = OptionParser() -parser.add_option("--version", - help="Print the version number, then exit", - action="store_true", - dest="version", - default=False) -parser.add_option("-v","--verbose", - help="Print progress messages", - action="store_true", - dest="verbose", - default=False) -parser.add_option('--object-directory', - help="Specify the directory that contains the gcov data files. gcovr must be able to identify the path between the *.gcda files and the directory where gcc was originally run. Normally, gcovr can guess correctly. This option overrides gcovr's normal path detection and can specify either the path from gcc to the gcda file (i.e. what was passed to gcc's '-o' option), or the path from the gcda file to gcc's original working directory.", - action="store", - dest="objdir", - default=None) -parser.add_option("-o","--output", - help="Print output to this filename", - action="store", - dest="output", - default=None) -parser.add_option("-k","--keep", - help="Keep temporary gcov files", - action="store_true", - dest="keep", - default=False) -parser.add_option("-d","--delete", - help="Delete the coverage files after they are processed", - action="store_true", - dest="delete", - default=False) -parser.add_option("-f","--filter", - help="Keep only the data files that match this regular expression", - action="store", - dest="filter", - default=None) -parser.add_option("-e","--exclude", - help="Exclude data files that match this regular expression", - action="append", - dest="exclude", - default=[]) -parser.add_option("--gcov-filter", - help="Keep only gcov data files that match this regular expression", - action="store", - dest="gcov_filter", - default=None) -parser.add_option("--gcov-exclude", - help="Exclude gcov data files that match this regular expression", - action="append", - dest="gcov_exclude", - default=[]) -parser.add_option("-r","--root", - help="Defines the root directory. This is used to filter the files, and to standardize the output.", - action="store", - dest="root", - default=None) -parser.add_option("-x","--xml", - help="Generate XML instead of the normal tabular output.", - action="store_true", - dest="xml", - default=None) -parser.add_option("-b","--branches", - help="Tabulate the branch coverage instead of the line coverage.", - action="store_true", - dest="show_branch", - default=None) -parser.add_option("-u","--sort-uncovered", - help="Sort entries by increasing number of uncovered lines.", - action="store_true", - dest="sort_uncovered", - default=None) -parser.add_option("-p","--sort-percentage", - help="Sort entries by decreasing percentage of covered lines.", - action="store_true", - dest="sort_percent", - default=None) -parser.usage="gcovr [options]" -parser.description="A utility to run gcov and generate a simple report that summarizes the coverage" +parser.add_option("--version", help="Print the version number, then exit", action="store_true", dest="version", default=False) +parser.add_option("-v", "--verbose", help="Print progress messages", action="store_true", dest="verbose", default=False) +parser.add_option( + "--object-directory", + help="Specify the directory that contains the gcov data files. gcovr must be able to identify the path between the *.gcda " + + "files and the directory where gcc was originally run. Normally, gcovr can guess correctly. This option overrides gcovr's " + + "normal path detection and can specify either the path from gcc to the gcda file (i.e. what was passed to gcc's '-o' option), " + + "or the path from the gcda file to gcc's original working directory.", + action="store", + dest="objdir", + default=None, +) +parser.add_option("-o", "--output", help="Print output to this filename", action="store", dest="output", default=None) +parser.add_option("-k", "--keep", help="Keep temporary gcov files", action="store_true", dest="keep", default=False) +parser.add_option( + "-d", "--delete", help="Delete the coverage files after they are processed", action="store_true", dest="delete", default=False +) +parser.add_option( + "-f", "--filter", help="Keep only the data files that match this regular expression", action="store", dest="filter", default=None +) +parser.add_option( + "-e", "--exclude", help="Exclude data files that match this regular expression", action="append", dest="exclude", default=[] +) +parser.add_option( + "--gcov-filter", help="Keep only gcov data files that match this regular expression", action="store", dest="gcov_filter", default=None +) +parser.add_option( + "--gcov-exclude", help="Exclude gcov data files that match this regular expression", action="append", dest="gcov_exclude", default=[] +) +parser.add_option( + "-r", + "--root", + help="Defines the root directory. This is used to filter the files, and to standardize the output.", + action="store", + dest="root", + default=None, +) +parser.add_option("-x", "--xml", help="Generate XML instead of the normal tabular output.", action="store_true", dest="xml", default=None) +parser.add_option( + "-b", + "--branches", + help="Tabulate the branch coverage instead of the line coverage.", + action="store_true", + dest="show_branch", + default=None, +) +parser.add_option( + "-u", + "--sort-uncovered", + help="Sort entries by increasing number of uncovered lines.", + action="store_true", + dest="sort_uncovered", + default=None, +) +parser.add_option( + "-p", + "--sort-percentage", + help="Sort entries by decreasing percentage of covered lines.", + action="store_true", + dest="sort_percent", + default=None, +) +parser.usage = "gcovr [options]" +parser.description = "A utility to run gcov and generate a simple report that summarizes the coverage" # # Process options # @@ -934,22 +876,23 @@ if options.version: "\n" "Copyright (2008) Sandia Corporation. Under the terms of Contract\n" "DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government\n" - "retains certain rights in this software.\n" - % (version_str(),) ) + "retains certain rights in this software.\n" % (version_str(),) + ) sys.exit(0) if options.objdir: - tmp = options.objdir.replace('/',os.sep).replace('\\',os.sep) - while os.sep+os.sep in tmp: - tmp = tmp.replace(os.sep+os.sep, os.sep) + tmp = options.objdir.replace("/", os.sep).replace("\\", os.sep) + while os.sep + os.sep in tmp: + tmp = tmp.replace(os.sep + os.sep, os.sep) if normpath(options.objdir) != tmp: sys.stderr.write( "(WARNING) relative referencing in --object-directory.\n" "\tthis could cause strange errors when gcovr attempts to\n" - "\tidentify the original gcc working directory.\n") + "\tidentify the original gcc working directory.\n" + ) # # Setup filters # -for i in range(0,len(options.exclude)): +for i in range(0, len(options.exclude)): options.exclude[i] = re.compile(options.exclude[i]) if options.filter is not None: options.filter = re.compile(options.filter) @@ -958,18 +901,19 @@ elif options.root is not None: sys.stderr.write( "(ERROR) empty --root option.\n" "\tRoot specifies the path to the root directory of your project\n" - "\tand cannot be an empty string.\n") + "\tand cannot be an empty string.\n" + ) sys.exit(1) - options.filter = re.compile(re.escape(os.path.abspath(options.root)+os.sep)) + options.filter = re.compile(re.escape(os.path.abspath(options.root) + os.sep)) if options.filter is None: - options.filter = re.compile('') + options.filter = re.compile("") # -for i in range(0,len(options.gcov_exclude)): +for i in range(0, len(options.gcov_exclude)): options.gcov_exclude[i] = re.compile(options.gcov_exclude[i]) if options.gcov_filter is not None: options.gcov_filter = re.compile(options.gcov_filter) else: - options.gcov_filter = re.compile('') + options.gcov_filter = re.compile("") # # Get data files # @@ -982,9 +926,9 @@ else: # covdata = {} for file in datafiles: - process_datafile(file,covdata,options) + process_datafile(file, covdata, options) if options.verbose: - sys.stdout.write("Gathered coveraged data for "+str(len(covdata))+" files\n") + sys.stdout.write("Gathered coveraged data for " + str(len(covdata)) + " files\n") # # Print report #