Python* API Reference for Intel® Data Analytics Acceleration Library 2018 Update 2

df_reg_traverse_model.py

1 # file: df_reg_traverse_model.py
2 #===============================================================================
3 # Copyright 2014-2018 Intel Corporation
4 # All Rights Reserved.
5 #
6 # If this software was obtained under the Intel Simplified Software License,
7 # the following terms apply:
8 #
9 # The source code, information and material ("Material") contained herein is
10 # owned by Intel Corporation or its suppliers or licensors, and title to such
11 # Material remains with Intel Corporation or its suppliers or licensors. The
12 # Material contains proprietary information of Intel or its suppliers and
13 # licensors. The Material is protected by worldwide copyright laws and treaty
14 # provisions. No part of the Material may be used, copied, reproduced,
15 # modified, published, uploaded, posted, transmitted, distributed or disclosed
16 # in any way without Intel's prior express written permission. No license under
17 # any patent, copyright or other intellectual property rights in the Material
18 # is granted to or conferred upon you, either expressly, by implication,
19 # inducement, estoppel or otherwise. Any license under such intellectual
20 # property rights must be express and approved by Intel in writing.
21 #
22 # Unless otherwise agreed by Intel in writing, you may not remove or alter this
23 # notice or any other notice embedded in Materials by Intel or Intel's
24 # suppliers or licensors in any way.
25 #
26 #
27 # If this software was obtained under the Apache License, Version 2.0 (the
28 # "License"), the following terms apply:
29 #
30 # You may not use this file except in compliance with the License. You may
31 # obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
32 #
33 #
34 # Unless required by applicable law or agreed to in writing, software
35 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
36 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
37 #
38 # See the License for the specific language governing permissions and
39 # limitations under the License.
40 #===============================================================================
41 
42 #
43 # ! Content:
44 # ! Python example of decision forest regression model traversal.
45 # !
46 # ! The program trains the decision forest regression model on a training
47 # ! datasetFileName and prints the trained model by its depth-first traversing.
48 # !*****************************************************************************
49 
50 #
51 
52 
53 #
54 from __future__ import print_function
55 
56 from daal import algorithms
57 from daal.algorithms import decision_forest
58 import daal.algorithms.decision_forest.regression
59 import daal.algorithms.decision_forest.regression.training
60 
61 from daal.data_management import (
62  FileDataSource, DataSourceIface, NumericTableIface, HomogenNumericTable, MergedNumericTable, data_feature_utils
63 )
64 
65 # Input data set parameters
66 trainDatasetFileName = "../data/batch/df_regression_train.csv"
67 categoricalFeaturesIndices = [3]
68 nFeatures = 13 # Number of features in training and testing data sets
69 
70 # Decision forest parameters
71 nTrees = 2
72 
73 
74 def trainModel():
75 
76  # Create Numeric Tables for training data and dependent variables
77  trainData, trainDependentVariable = loadData(trainDatasetFileName)
78 
79  # Create an algorithm object to train the decision forest regression model with the default method
80  algorithm = decision_forest.regression.training.Batch()
81 
82  # Pass a training data set and dependent values to the algorithm
83  algorithm.input.set(decision_forest.regression.training.data, trainData)
84  algorithm.input.set(decision_forest.regression.training.dependentVariable, trainDependentVariable)
85 
86  algorithm.parameter.nTrees = nTrees
87 
88  # Build the decision forest regression model and return the result
89  return algorithm.compute()
90 
91 
92 def loadData(fileName):
93 
94  # Initialize FileDataSource<CSVFeatureManager> to retrieve the input data from a .csv file
95  trainDataSource = FileDataSource(
96  fileName, DataSourceIface.notAllocateNumericTable, DataSourceIface.doDictionaryFromContext
97  )
98 
99  # Create Numeric Tables for training data and dependent variables
100  data = HomogenNumericTable(nFeatures, 0, NumericTableIface.notAllocate)
101  dependentVar = HomogenNumericTable(1, 0, NumericTableIface.notAllocate)
102  mergedData = MergedNumericTable(data, dependentVar)
103 
104  # Retrieve the data from input file
105  trainDataSource.loadDataBlock(mergedData)
106 
107  dictionary = data.getDictionary()
108  for i in range(len(categoricalFeaturesIndices)):
109  dictionary[categoricalFeaturesIndices[i]].featureType = data_feature_utils.DAAL_CATEGORICAL
110 
111  return data, dependentVar
112 
113 
114 # Visitor class implementing NodeVisitor interface, prints out tree nodes of the model when it is called back by model traversal method
115 class PrintNodeVisitor(algorithms.regression.TreeNodeVisitor):
116 
117  def __init__(self):
118  super(PrintNodeVisitor, self).__init__()
119 
120  def onLeafNode(self, level, response):
121 
122  for i in range(level):
123  print(" ", end='')
124  print("Level {}, leaf node. Response value = {:.4g}".format(level, response))
125  return True
126 
127 
128  def onSplitNode(self, level, featureIndex, featureValue):
129 
130  for i in range(level):
131  print(" ", end='')
132  print("Level {}, split node. Feature index = {}, feature value = {:.4g}".format(level, featureIndex, featureValue))
133  return True
134 
135 
136 def printModel(m):
137  visitor = PrintNodeVisitor()
138  print("Number of trees: {}".format(m.numberOfTrees()))
139  for i in range(m.numberOfTrees()):
140  print("Tree #{}".format(i))
141  m.traverseDF(i, visitor)
142 
143 if __name__ == "__main__":
144 
145  trainingResult = trainModel()
146  printModel(trainingResult.get(decision_forest.regression.training.model))

For more complete information about compiler optimizations, see our Optimization Notice.