This example shows how to perform online-classification using sparse grids and averaged stochastic gradient descent method.
It creates an instance of LearnerSGD and runs the function train() where the main functionality is implemented.
Currently, only binary classification with class labels -1 and 1 is possible.
The example provides the option to execute several runs over differently ordered data and perform a 5-fold cross-validation within each run. Therefore, already randomly ordered and partitioned data is required. Average results from several runs might be more reliable in an online-learning scenario, because the ordering of the data points seen by the learner can affect the result.
Specify the number of runs to perform. If only one specific example should be executed, set totalSets=1.
size_t totalSets = 1;
size_t totalFolds = 1;
double avgError = 0.0;
double avgErrorFolds = 0.0;
for (size_t numSets = 0; numSets < totalSets; numSets++) {
A vector to compute average classification error throughout the learning process. The length of the vector determines the total number of error observations.
for (size_t numFolds = 0; numFolds < totalFolds; numFolds++) {
Get the training, test and validation data
std::string filename = "../../datasets/ripley/ripleyGarcke.train.arff";
std::cout << "# loading file: " << filename << std::endl;
filename = "../../datasets/ripley/ripleyGarcke.test.arff";
std::cout << "# loading file: " << filename << std::endl;
bool useValidData = false;
The grid configuration.
std::cout << "# creating grid config" << std::endl;
Configure adaptive refinement. As refinement monitor the periodic monitor or the convergence monitor can be chosen. Possible refinement indicators are predictive refinement and impurity-based refinement.
std::cout << "# create adaptive refinement config" << std::endl;
std::string refMonitor;
refMonitor = "periodic";
size_t refPeriod = 40;
double errorDeclineThreshold = 0.0005;
size_t errorDeclineBufferSize = 100;
size_t minRefInterval = 0;
std::cout << "Refinement monitor: " << refMonitor << std::endl;
std::string refType;
refType = "impurity";
std::cout << "Refinement type: " << refType << std::endl;
Specify number of refinement steps and the max number of grid points to refine each step.
size_t maxDataPasses = 2;
double lambda = 0.01;
double gamma = 0.25;
size_t batchSize = 50;
if (validData != nullptr) {
}
Create the learner.
std::cout << "# creating the learner" << std::endl;
testData, testLabels, validData, validLabels, lambda,
gamma, batchSize, useValidData);
learner.initialize();
Learn the data.
std::cout << "# start to train the learner" << std::endl;
learner.train(maxDataPasses, refType, refMonitor, refPeriod, errorDeclineThreshold,
errorDeclineBufferSize, minRefInterval);
Accuracy on test and current training data.
double accTrain = learner.getAccuracy(trainData, trainLabels, 0.0);
std::cout << "Acc (train): " << accTrain << std::endl;
double accTest = learner.getAccuracy(testData, testLabels, 0.0);
std::cout << "Acc (test): " << accTest << std::endl;
avgErrorFolds += learner.error;
avgErrorsFolds.add(learner.avgErrors);
}
avgErrorFolds = avgErrorFolds / static_cast<double>(totalFolds);
if ((totalSets > 1) && (totalFolds > 1)) {
Average accuracy on test data reagarding 5-fold cv.
std::cout << "Average accuracy on test data (set " + std::to_string(numSets + 1) + "): "
<< (1.0 - avgErrorFolds) << std::endl;
}
avgError += avgErrorFolds;
avgErrorFolds = 0.0;
avgErrorsFolds.mult(1.0 / static_cast<double>(totalFolds));
}