C++ API Reference for Intel® Data Analytics Acceleration Library 2018 Update 1

ridge_reg_norm_eq_dense_online.cpp

/* file: ridge_reg_norm_eq_dense_online.cpp */
/*******************************************************************************
* Copyright 2014-2017 Intel Corporation
* All Rights Reserved.
*
* If this software was obtained under the Intel Simplified Software License,
* the following terms apply:
*
* The source code, information and material ("Material") contained herein is
* owned by Intel Corporation or its suppliers or licensors, and title to such
* Material remains with Intel Corporation or its suppliers or licensors. The
* Material contains proprietary information of Intel or its suppliers and
* licensors. The Material is protected by worldwide copyright laws and treaty
* provisions. No part of the Material may be used, copied, reproduced,
* modified, published, uploaded, posted, transmitted, distributed or disclosed
* in any way without Intel's prior express written permission. No license under
* any patent, copyright or other intellectual property rights in the Material
* is granted to or conferred upon you, either expressly, by implication,
* inducement, estoppel or otherwise. Any license under such intellectual
* property rights must be express and approved by Intel in writing.
*
* Unless otherwise agreed by Intel in writing, you may not remove or alter this
* notice or any other notice embedded in Materials by Intel or Intel's
* suppliers or licensors in any way.
*
*
* If this software was obtained under the Apache License, Version 2.0 (the
* "License"), the following terms apply:
*
* You may not use this file except in compliance with the License. You may
* obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
/*
! Content:
! C++ example of ridge regression in the online processing mode.
!
! The program trains the ridge regression model on a training datasetFileName
! with the normal equations method and computes regression for the test data.
!******************************************************************************/
#include "daal.h"
#include "service.h"
using namespace std;
using namespace daal;
using namespace daal::algorithms::ridge_regression;
/* Input data set parameters */
string trainDatasetFileName = "../data/online/linear_regression_train.csv";
string testDatasetFileName = "../data/online/linear_regression_test.csv";
const size_t nTrainVectorsInBlock = 250;
const size_t nFeatures = 10; /* Number of features in training and testing data sets */
const size_t nDependentVariables = 2; /* Number of dependent variables that correspond to each observation */
void trainModel();
void testModel();
training::ResultPtr trainingResult;
prediction::ResultPtr predictionResult;
int main(int argc, char *argv[])
{
checkArguments(argc, argv, 2, &trainDatasetFileName, &testDatasetFileName);
trainModel();
testModel();
return 0;
}
void trainModel()
{
/* Initialize FileDataSource<CSVFeatureManager> to retrieve the input data from a .csv file */
FileDataSource<CSVFeatureManager> trainDataSource(trainDatasetFileName,
DataSource::notAllocateNumericTable,
DataSource::doDictionaryFromContext);
/* Create Numeric Tables for training data and dependent variables */
NumericTablePtr trainData(new HomogenNumericTable<>(nFeatures, 0, NumericTable::doNotAllocate));
NumericTablePtr trainDependentVariables(new HomogenNumericTable<>(nDependentVariables, 0, NumericTable::doNotAllocate));
NumericTablePtr mergedData(new MergedNumericTable(trainData, trainDependentVariables));
/* Create an algorithm object to train the ridge regression model */
training::Online<> algorithm;
while(trainDataSource.loadDataBlock(nTrainVectorsInBlock, mergedData.get()) == nTrainVectorsInBlock)
{
/* Pass a training data set and dependent values to the algorithm */
algorithm.input.set(training::data, trainData);
algorithm.input.set(training::dependentVariables, trainDependentVariables);
/* Update the ridge regression model */
algorithm.compute();
}
/* Finalize the ridge regression model */
algorithm.finalizeCompute();
/* Retrieve the algorithm results */
trainingResult = algorithm.getResult();
printNumericTable(trainingResult->get(training::model)->getBeta(), "Ridge Regression coefficients:");
}
void testModel()
{
/* Initialize FileDataSource<CSVFeatureManager> to retrieve the input data from a .csv file */
FileDataSource<CSVFeatureManager> testDataSource(testDatasetFileName,
DataSource::doAllocateNumericTable,
DataSource::doDictionaryFromContext);
/* Create Numeric Tables for testing data and ground truth values */
NumericTablePtr testData(new HomogenNumericTable<>(nFeatures, 0, NumericTable::doNotAllocate));
NumericTablePtr testGroundTruth(new HomogenNumericTable<>(nDependentVariables, 0, NumericTable::doNotAllocate));
NumericTablePtr mergedData(new MergedNumericTable(testData, testGroundTruth));
/* Retrieve the data from the input file */
testDataSource.loadDataBlock(mergedData.get());
/* Create an algorithm object to predict values of ridge regression */
prediction::Batch<> algorithm;
/* Pass a testing data set and the trained model to the algorithm */
algorithm.input.set(prediction::model, trainingResult->get(training::model));
algorithm.input.set(prediction::data, testData);
/* Predict values of ridge regression */
algorithm.compute();
/* Retrieve the algorithm results */
predictionResult = algorithm.getResult();
printNumericTable(predictionResult->get(prediction::prediction),
"Ridge Regression prediction results: (first 10 rows):", 10);
printNumericTable(testGroundTruth, "Ground truth (first 10 rows):", 10);
}

For more complete information about compiler optimizations, see our Optimization Notice.