Skip to content

Commit bb66ce8

Browse files
committed
Rename to validRatio
1 parent 040decb commit bb66ce8

File tree

2 files changed

+9
-9
lines changed

2 files changed

+9
-9
lines changed

dataloader/dataloader.hpp

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -63,14 +63,14 @@ class DataLoader
6363
*
6464
* @param datasetPath Path or name of dataset.
6565
* @param shuffle whether or not to shuffle the data.
66-
* @param testRatio Ratio of dataset to be used for validation set.
66+
* @param validRatio Ratio of dataset to be used for validation set.
6767
* @param useScaler Use feature scaler for pre-processing the dataset.
6868
* @param augmentation Adds augmentation to training data only.
6969
* @param augmentationProbability Probability of applying augmentation on dataset.
7070
*/
7171
DataLoader(const std::string& dataset,
7272
const bool shuffle,
73-
const double testRatio = 0.25,
73+
const double validRatio = 0.25,
7474
const bool useScaler = true,
7575
const std::vector<std::string> augmentation =
7676
std::vector<std::string>(),
@@ -85,7 +85,7 @@ class DataLoader
8585
* Note: This option augmentation to NULL, set ratio to 1 and
8686
* scaler will be used to only transform the test data.
8787
* @param shuffle Boolean to determine whether or not to shuffle the data.
88-
* @param testRatio Ratio of dataset to be used for validation set.
88+
* @param validRatio Ratio of dataset to be used for validation set.
8989
* @param useScaler Fits the scaler on training data and transforms dataset.
9090
* @param dropHeader Drops the first row from CSV.
9191
* @param startInputFeatures First Index which will be fed into the model as input.
@@ -106,7 +106,7 @@ class DataLoader
106106
void LoadCSV(const std::string& datasetPath,
107107
const bool loadTrainData = true,
108108
const bool shuffle = true,
109-
const double testRatio = 0.25,
109+
const double validRatio = 0.25,
110110
const bool useScaler = false,
111111
const bool dropHeader = false,
112112
const int startInputFeatures = -1,

dataloader/dataloader_impl.hpp

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ template<
3636
DatasetX, DatasetY, ScalerType
3737
>::DataLoader(const std::string& dataset,
3838
const bool shuffle,
39-
const double testRatio,
39+
const double validRatio,
4040
const bool useScaler,
4141
const std::vector<std::string> augmentation,
4242
const double augmentationProbability)
@@ -49,14 +49,14 @@ template<
4949

5050
if (datasetMap[dataset].loadCSV)
5151
{
52-
LoadCSV(datasetMap[dataset].trainPath, true, shuffle, testRatio,
52+
LoadCSV(datasetMap[dataset].trainPath, true, shuffle, validRatio,
5353
useScaler, datasetMap[dataset].dropHeader,
5454
datasetMap[dataset].startTrainingInputFeatures,
5555
datasetMap[dataset].endTrainingInputFeatures,
5656
datasetMap[dataset].endTrainingPredictionFeatures,
5757
datasetMap[dataset].endTrainingPredictionFeatures);
5858

59-
LoadCSV(datasetMap[dataset].testPath, false, false, testRatio, useScaler,
59+
LoadCSV(datasetMap[dataset].testPath, false, false, validRatio, useScaler,
6060
datasetMap[dataset].dropHeader,
6161
datasetMap[dataset].startTestingInputFeatures,
6262
datasetMap[dataset].endTestingInputFeatures);
@@ -85,7 +85,7 @@ template<
8585
>::LoadCSV(const std::string& datasetPath,
8686
const bool loadTrainData,
8787
const bool shuffle,
88-
const double testRatio,
88+
const double validRatio,
8989
const bool useScaler,
9090
const bool dropHeader,
9191
const int startInputFeatures,
@@ -104,7 +104,7 @@ template<
104104
if (loadTrainData)
105105
{
106106
arma::mat trainDataset, validDataset;
107-
data::Split(dataset, trainDataset, validDataset, testRatio, shuffle);
107+
data::Split(dataset, trainDataset, validDataset, validRatio, shuffle);
108108

109109
trainFeatures = trainDataset.rows(WrapIndex(startInputFeatures,
110110
trainDataset.n_rows), WrapIndex(endInputFeatures,

0 commit comments

Comments
 (0)