Python* API Reference for Intel® Data Analytics Acceleration Library 2018 Update 2

gbt_cls_dense_batch.py

1 # file: gbt_cls_dense_batch.py
2 #===============================================================================
3 # Copyright 2014-2018 Intel Corporation
4 # All Rights Reserved.
5 #
6 # If this software was obtained under the Intel Simplified Software License,
7 # the following terms apply:
8 #
9 # The source code, information and material ("Material") contained herein is
10 # owned by Intel Corporation or its suppliers or licensors, and title to such
11 # Material remains with Intel Corporation or its suppliers or licensors. The
12 # Material contains proprietary information of Intel or its suppliers and
13 # licensors. The Material is protected by worldwide copyright laws and treaty
14 # provisions. No part of the Material may be used, copied, reproduced,
15 # modified, published, uploaded, posted, transmitted, distributed or disclosed
16 # in any way without Intel's prior express written permission. No license under
17 # any patent, copyright or other intellectual property rights in the Material
18 # is granted to or conferred upon you, either expressly, by implication,
19 # inducement, estoppel or otherwise. Any license under such intellectual
20 # property rights must be express and approved by Intel in writing.
21 #
22 # Unless otherwise agreed by Intel in writing, you may not remove or alter this
23 # notice or any other notice embedded in Materials by Intel or Intel's
24 # suppliers or licensors in any way.
25 #
26 #
27 # If this software was obtained under the Apache License, Version 2.0 (the
28 # "License"), the following terms apply:
29 #
30 # You may not use this file except in compliance with the License. You may
31 # obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
32 #
33 #
34 # Unless required by applicable law or agreed to in writing, software
35 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
36 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
37 #
38 # See the License for the specific language governing permissions and
39 # limitations under the License.
40 #===============================================================================
41 
42 
44 
45 import os
46 import sys
47 
48 from daal.algorithms import gbt
49 from daal.algorithms.gbt.classification import prediction, training
50 from daal.algorithms import classifier
51 from daal.data_management import (
52  FileDataSource, DataSourceIface, NumericTableIface, HomogenNumericTable,
53  MergedNumericTable, data_feature_utils
54 )
55 
56 utils_folder = os.path.realpath(os.path.abspath(os.path.dirname(os.path.dirname(__file__))))
57 if utils_folder not in sys.path:
58  sys.path.insert(0, utils_folder)
59 from utils import printNumericTable, printNumericTables
60 
61 DAAL_PREFIX = os.path.join('..', 'data')
62 
63 # Input data set parameters
64 trainDatasetFileName = os.path.join(DAAL_PREFIX, 'batch', 'df_classification_train.csv')
65 testDatasetFileName = os.path.join(DAAL_PREFIX, 'batch', 'df_classification_test.csv')
66 
67 nFeatures = 3
68 nClasses = 5
69 
70 # Gradient boosted trees parameters
71 maxIterations = 40
72 minObservationsInLeafNode = 8
73 
74 # Model object for the gradient boosted trees classification algorithm
75 model = None
76 predictionResult = None
77 testGroundTruth = None
78 
79 
80 def trainModel():
81  global model
82 
83  # Initialize FileDataSource<CSVFeatureManager> to retrieve the input data from a .csv file
84  trainDataSource = FileDataSource(
85  trainDatasetFileName,
86  DataSourceIface.notAllocateNumericTable,
87  DataSourceIface.doDictionaryFromContext
88  )
89 
90  # Create Numeric Tables for training data and labels
91  trainData = HomogenNumericTable(nFeatures, 0, NumericTableIface.notAllocate)
92  trainGroundTruth = HomogenNumericTable(1, 0, NumericTableIface.notAllocate)
93  mergedData = MergedNumericTable(trainData, trainGroundTruth)
94 
95  # Retrieve the data from the input file
96  trainDataSource.loadDataBlock(mergedData)
97 
98  # Get the dictionary and update it with additional information about data
99  dict = trainData.getDictionary()
100 
101  # Add a feature type to the dictionary
102  dict[0].featureType = data_feature_utils.DAAL_CONTINUOUS
103  dict[1].featureType = data_feature_utils.DAAL_CONTINUOUS
104  dict[2].featureType = data_feature_utils.DAAL_CATEGORICAL
105 
106  # Create an algorithm object to train the gradient boosted trees classification model
107  algorithm = training.Batch(nClasses)
108  algorithm.parameter().maxIterations = maxIterations
109  algorithm.parameter().minObservationsInLeafNode = minObservationsInLeafNode
110  algorithm.parameter().featuresPerNode = nFeatures
111 
112  # Pass the training data set and dependent values to the algorithm
113  algorithm.input.set(classifier.training.data, trainData)
114  algorithm.input.set(classifier.training.labels, trainGroundTruth)
115 
116  # Train the gradient boosted trees classification model and retrieve the results of the training algorithm
117  trainingResult = algorithm.compute()
118  model = trainingResult.get(classifier.training.model)
119 
120 def testModel():
121  global testGroundTruth, predictionResult
122 
123  # Initialize FileDataSource<CSVFeatureManager> to retrieve the test data from a .csv file
124  testDataSource = FileDataSource(
125  testDatasetFileName,
126  DataSourceIface.notAllocateNumericTable,
127  DataSourceIface.doDictionaryFromContext
128  )
129 
130  # Create Numeric Tables for testing data and labels
131  testData = HomogenNumericTable(nFeatures, 0, NumericTableIface.notAllocate)
132  testGroundTruth = HomogenNumericTable(1, 0, NumericTableIface.notAllocate)
133  mergedData = MergedNumericTable(testData, testGroundTruth)
134 
135  # Retrieve the data from input file
136  testDataSource.loadDataBlock(mergedData)
137 
138  # Get the dictionary and update it with additional information about data
139  dict = testData.getDictionary()
140 
141  # Add a feature type to the dictionary
142  dict[0].featureType = data_feature_utils.DAAL_CONTINUOUS
143  dict[1].featureType = data_feature_utils.DAAL_CONTINUOUS
144  dict[2].featureType = data_feature_utils.DAAL_CATEGORICAL
145 
146  # Create algorithm objects for gradient boosted trees classification prediction with the default method
147  algorithm = prediction.Batch(nClasses)
148 
149  # Pass the testing data set and trained model to the algorithm
150  algorithm.input.setTable(classifier.prediction.data, testData)
151  algorithm.input.setModel(classifier.prediction.model, model)
152 
153  # Compute prediction results and retrieve algorithm results
154  # (Result class from classifier.prediction)
155  predictionResult = algorithm.compute()
156 
157 
158 def printResults():
159 
160  printNumericTable(predictionResult.get(classifier.prediction.prediction),"Gragient boosted trees prediction results (first 10 rows):",10)
161  printNumericTable(testGroundTruth,"Ground truth (first 10 rows):",10)
162 
163 if __name__ == "__main__":
164 
165  trainModel()
166  testModel()
167  printResults()

For more complete information about compiler optimizations, see our Optimization Notice.