Python* API Reference for Intel® Data Analytics Acceleration Library 2018 Update 1

gbt_cls_dense_batch.py

1 #===============================================================================
2 # Copyright 2014-2017 Intel Corporation
3 # All Rights Reserved.
4 #
5 # If this software was obtained under the Intel Simplified Software License,
6 # the following terms apply:
7 #
8 # The source code, information and material ("Material") contained herein is
9 # owned by Intel Corporation or its suppliers or licensors, and title to such
10 # Material remains with Intel Corporation or its suppliers or licensors. The
11 # Material contains proprietary information of Intel or its suppliers and
12 # licensors. The Material is protected by worldwide copyright laws and treaty
13 # provisions. No part of the Material may be used, copied, reproduced,
14 # modified, published, uploaded, posted, transmitted, distributed or disclosed
15 # in any way without Intel's prior express written permission. No license under
16 # any patent, copyright or other intellectual property rights in the Material
17 # is granted to or conferred upon you, either expressly, by implication,
18 # inducement, estoppel or otherwise. Any license under such intellectual
19 # property rights must be express and approved by Intel in writing.
20 #
21 # Unless otherwise agreed by Intel in writing, you may not remove or alter this
22 # notice or any other notice embedded in Materials by Intel or Intel's
23 # suppliers or licensors in any way.
24 #
25 #
26 # If this software was obtained under the Apache License, Version 2.0 (the
27 # "License"), the following terms apply:
28 #
29 # You may not use this file except in compliance with the License. You may
30 # obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
31 #
32 #
33 # Unless required by applicable law or agreed to in writing, software
34 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
35 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
36 #
37 # See the License for the specific language governing permissions and
38 # limitations under the License.
39 #===============================================================================
40 
41 
42 
43 
44 import os
45 import sys
46 
47 from daal.algorithms import gbt
48 from daal.algorithms.gbt.classification import prediction, training
49 from daal.algorithms import classifier
50 from daal.data_management import (
51  FileDataSource, DataSourceIface, NumericTableIface, HomogenNumericTable,
52  MergedNumericTable, data_feature_utils
53 )
54 
55 utils_folder = os.path.realpath(os.path.abspath(os.path.dirname(os.path.dirname(__file__))))
56 if utils_folder not in sys.path:
57  sys.path.insert(0, utils_folder)
58 from utils import printNumericTable, printNumericTables
59 
60 DAAL_PREFIX = os.path.join('..', 'data')
61 
62 # Input data set parameters
63 trainDatasetFileName = os.path.join(DAAL_PREFIX, 'batch', 'df_classification_train.csv')
64 testDatasetFileName = os.path.join(DAAL_PREFIX, 'batch', 'df_classification_test.csv')
65 
66 nFeatures = 3
67 nClasses = 5
68 
69 # Gradient boosted trees parameters
70 maxIterations = 40
71 minObservationsInLeafNode = 8
72 
73 # Model object for the gradient boosted trees classification algorithm
74 model = None
75 predictionResult = None
76 testGroundTruth = None
77 
78 
79 def trainModel():
80  global model
81 
82  # Initialize FileDataSource<CSVFeatureManager> to retrieve the input data from a .csv file
83  trainDataSource = FileDataSource(
84  trainDatasetFileName,
85  DataSourceIface.notAllocateNumericTable,
86  DataSourceIface.doDictionaryFromContext
87  )
88 
89  # Create Numeric Tables for training data and labels
90  trainData = HomogenNumericTable(nFeatures, 0, NumericTableIface.notAllocate)
91  trainGroundTruth = HomogenNumericTable(1, 0, NumericTableIface.notAllocate)
92  mergedData = MergedNumericTable(trainData, trainGroundTruth)
93 
94  # Retrieve the data from the input file
95  trainDataSource.loadDataBlock(mergedData)
96 
97  # Get the dictionary and update it with additional information about data
98  dict = trainData.getDictionary()
99 
100  # Add a feature type to the dictionary
101  dict[0].featureType = data_feature_utils.DAAL_CONTINUOUS
102  dict[1].featureType = data_feature_utils.DAAL_CONTINUOUS
103  dict[2].featureType = data_feature_utils.DAAL_CATEGORICAL
104 
105  # Create an algorithm object to train the gradient boosted trees classification model
106  algorithm = training.Batch(nClasses)
107  algorithm.parameter().maxIterations = maxIterations
108  algorithm.parameter().minObservationsInLeafNode = minObservationsInLeafNode
109  algorithm.parameter().featuresPerNode = nFeatures
110 
111  # Pass the training data set and dependent values to the algorithm
112  algorithm.input.set(classifier.training.data, trainData)
113  algorithm.input.set(classifier.training.labels, trainGroundTruth)
114 
115  # Train the gradient boosted trees classification model and retrieve the results of the training algorithm
116  trainingResult = algorithm.compute()
117  model = trainingResult.get(classifier.training.model)
118 
119 def testModel():
120  global testGroundTruth, predictionResult
121 
122  # Initialize FileDataSource<CSVFeatureManager> to retrieve the test data from a .csv file
123  testDataSource = FileDataSource(
124  testDatasetFileName,
125  DataSourceIface.notAllocateNumericTable,
126  DataSourceIface.doDictionaryFromContext
127  )
128 
129  # Create Numeric Tables for testing data and labels
130  testData = HomogenNumericTable(nFeatures, 0, NumericTableIface.notAllocate)
131  testGroundTruth = HomogenNumericTable(1, 0, NumericTableIface.notAllocate)
132  mergedData = MergedNumericTable(testData, testGroundTruth)
133 
134  # Retrieve the data from input file
135  testDataSource.loadDataBlock(mergedData)
136 
137  # Get the dictionary and update it with additional information about data
138  dict = testData.getDictionary()
139 
140  # Add a feature type to the dictionary
141  dict[0].featureType = data_feature_utils.DAAL_CONTINUOUS
142  dict[1].featureType = data_feature_utils.DAAL_CONTINUOUS
143  dict[2].featureType = data_feature_utils.DAAL_CATEGORICAL
144 
145  # Create algorithm objects for gradient boosted trees classification prediction with the default method
146  algorithm = prediction.Batch(nClasses)
147 
148  # Pass the testing data set and trained model to the algorithm
149  algorithm.input.setTable(classifier.prediction.data, testData)
150  algorithm.input.setModel(classifier.prediction.model, model)
151 
152  # Compute prediction results and retrieve algorithm results
153  # (Result class from classifier.prediction)
154  predictionResult = algorithm.compute()
155 
156 
157 def printResults():
158 
159  printNumericTables(
160  testGroundTruth,
161  predictionResult.get(classifier.prediction.prediction),
162  "Ground truth", "Classification results",
163  "gradient boosted trees classification results (first 20 observations):", 20
164  )
165 
166 if __name__ == "__main__":
167 
168  trainModel()
169  testModel()
170  printResults()

For more complete information about compiler optimizations, see our Optimization Notice.