Python* API Reference for Intel® Data Analytics Acceleration Library 2019 Update 4

gbt_cls_dense_batch.py

1 # file: gbt_cls_dense_batch.py
2 #===============================================================================
3 # Copyright 2014-2019 Intel Corporation.
4 #
5 # This software and the related documents are Intel copyrighted materials, and
6 # your use of them is governed by the express license under which they were
7 # provided to you (License). Unless the License provides otherwise, you may not
8 # use, modify, copy, publish, distribute, disclose or transmit this software or
9 # the related documents without Intel's prior written permission.
10 #
11 # This software and the related documents are provided as is, with no express
12 # or implied warranties, other than those that are expressly stated in the
13 # License.
14 #===============================================================================
15 
16 
17 
18 
19 import os
20 import sys
21 
22 from daal.algorithms import gbt
23 from daal.algorithms.gbt.classification import prediction, training
24 from daal.algorithms import classifier
25 from daal.data_management import (
26  FileDataSource, DataSourceIface, NumericTableIface, HomogenNumericTable,
27  MergedNumericTable, features
28 )
29 
30 utils_folder = os.path.realpath(os.path.abspath(os.path.dirname(os.path.dirname(__file__))))
31 if utils_folder not in sys.path:
32  sys.path.insert(0, utils_folder)
33 from utils import printNumericTable, printNumericTables
34 
35 DAAL_PREFIX = os.path.join('..', 'data')
36 
37 # Input data set parameters
38 trainDatasetFileName = os.path.join(DAAL_PREFIX, 'batch', 'df_classification_train.csv')
39 testDatasetFileName = os.path.join(DAAL_PREFIX, 'batch', 'df_classification_test.csv')
40 
41 nFeatures = 3
42 nClasses = 5
43 
44 # Gradient boosted trees parameters
45 maxIterations = 40
46 minObservationsInLeafNode = 8
47 
48 # Model object for the gradient boosted trees classification algorithm
49 model = None
50 predictionResult = None
51 testGroundTruth = None
52 
53 
54 def trainModel():
55  global model
56 
57  # Initialize FileDataSource<CSVFeatureManager> to retrieve the input data from a .csv file
58  trainDataSource = FileDataSource(
59  trainDatasetFileName,
60  DataSourceIface.notAllocateNumericTable,
61  DataSourceIface.doDictionaryFromContext
62  )
63 
64  # Create Numeric Tables for training data and labels
65  trainData = HomogenNumericTable(nFeatures, 0, NumericTableIface.notAllocate)
66  trainGroundTruth = HomogenNumericTable(1, 0, NumericTableIface.notAllocate)
67  mergedData = MergedNumericTable(trainData, trainGroundTruth)
68 
69  # Retrieve the data from the input file
70  trainDataSource.loadDataBlock(mergedData)
71 
72  # Get the dictionary and update it with additional information about data
73  dict = trainData.getDictionary()
74 
75  # Add a feature type to the dictionary
76  dict[0].featureType = features.DAAL_CONTINUOUS
77  dict[1].featureType = features.DAAL_CONTINUOUS
78  dict[2].featureType = features.DAAL_CATEGORICAL
79 
80  # Create an algorithm object to train the gradient boosted trees classification model
81  algorithm = training.Batch(nClasses)
82  algorithm.parameter().maxIterations = maxIterations
83  algorithm.parameter().minObservationsInLeafNode = minObservationsInLeafNode
84  algorithm.parameter().featuresPerNode = nFeatures
85 
86  # Pass the training data set and dependent values to the algorithm
87  algorithm.input.set(classifier.training.data, trainData)
88  algorithm.input.set(classifier.training.labels, trainGroundTruth)
89 
90  # Train the gradient boosted trees classification model and retrieve the results of the training algorithm
91  trainingResult = algorithm.compute()
92  model = trainingResult.get(classifier.training.model)
93 
94 def testModel():
95  global testGroundTruth, predictionResult
96 
97  # Initialize FileDataSource<CSVFeatureManager> to retrieve the test data from a .csv file
98  testDataSource = FileDataSource(
99  testDatasetFileName,
100  DataSourceIface.notAllocateNumericTable,
101  DataSourceIface.doDictionaryFromContext
102  )
103 
104  # Create Numeric Tables for testing data and labels
105  testData = HomogenNumericTable(nFeatures, 0, NumericTableIface.notAllocate)
106  testGroundTruth = HomogenNumericTable(1, 0, NumericTableIface.notAllocate)
107  mergedData = MergedNumericTable(testData, testGroundTruth)
108 
109  # Retrieve the data from input file
110  testDataSource.loadDataBlock(mergedData)
111 
112  # Get the dictionary and update it with additional information about data
113  dict = testData.getDictionary()
114 
115  # Add a feature type to the dictionary
116  dict[0].featureType = features.DAAL_CONTINUOUS
117  dict[1].featureType = features.DAAL_CONTINUOUS
118  dict[2].featureType = features.DAAL_CATEGORICAL
119 
120  # Create algorithm objects for gradient boosted trees classification prediction with the default method
121  algorithm = prediction.Batch(nClasses)
122 
123  # Pass the testing data set and trained model to the algorithm
124  algorithm.input.setTable(classifier.prediction.data, testData)
125  algorithm.input.setModel(classifier.prediction.model, model)
126 
127  # Compute prediction results and retrieve algorithm results
128  # (Result class from classifier.prediction)
129  predictionResult = algorithm.compute()
130 
131 
132 def printResults():
133 
134  printNumericTable(predictionResult.get(classifier.prediction.prediction),"Gragient boosted trees prediction results (first 10 rows):",10)
135  printNumericTable(testGroundTruth,"Ground truth (first 10 rows):",10)
136 
137 if __name__ == "__main__":
138 
139  trainModel()
140  testModel()
141  printResults()

For more complete information about compiler optimizations, see our Optimization Notice.