Python* API Reference for Intel® Data Analytics Acceleration Library 2018 Update 1

kdtree_knn_dense_batch.py

1 #===============================================================================
2 # Copyright 2014-2017 Intel Corporation
3 # All Rights Reserved.
4 #
5 # If this software was obtained under the Intel Simplified Software License,
6 # the following terms apply:
7 #
8 # The source code, information and material ("Material") contained herein is
9 # owned by Intel Corporation or its suppliers or licensors, and title to such
10 # Material remains with Intel Corporation or its suppliers or licensors. The
11 # Material contains proprietary information of Intel or its suppliers and
12 # licensors. The Material is protected by worldwide copyright laws and treaty
13 # provisions. No part of the Material may be used, copied, reproduced,
14 # modified, published, uploaded, posted, transmitted, distributed or disclosed
15 # in any way without Intel's prior express written permission. No license under
16 # any patent, copyright or other intellectual property rights in the Material
17 # is granted to or conferred upon you, either expressly, by implication,
18 # inducement, estoppel or otherwise. Any license under such intellectual
19 # property rights must be express and approved by Intel in writing.
20 #
21 # Unless otherwise agreed by Intel in writing, you may not remove or alter this
22 # notice or any other notice embedded in Materials by Intel or Intel's
23 # suppliers or licensors in any way.
24 #
25 #
26 # If this software was obtained under the Apache License, Version 2.0 (the
27 # "License"), the following terms apply:
28 #
29 # You may not use this file except in compliance with the License. You may
30 # obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
31 #
32 #
33 # Unless required by applicable law or agreed to in writing, software
34 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
35 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
36 #
37 # See the License for the specific language governing permissions and
38 # limitations under the License.
39 #===============================================================================
40 
41 ## <a name="DAAL-EXAMPLE-PY-KDTREE_KNN_DENSE_BATCH"></a>
42 ## \example kdtree_knn_dense_batch.py
43 
44 import os
45 import sys
46 
47 from daal.algorithms.kdtree_knn_classification import training, prediction
48 from daal.algorithms import classifier
49 from daal.data_management import (
50  DataSourceIface, FileDataSource, HomogenNumericTable, MergedNumericTable, NumericTableIface
51 )
52 
53 utils_folder = os.path.realpath(os.path.abspath(os.path.dirname(os.path.dirname(__file__))))
54 if utils_folder not in sys.path:
55  sys.path.insert(0, utils_folder)
56 from utils import printNumericTables
57 
58 DAAL_PREFIX = os.path.join('..', 'data')
59 
60 # Input data set parameters
61 trainDatasetFileName = os.path.join(DAAL_PREFIX, 'batch', 'k_nearest_neighbors_train.csv')
62 testDatasetFileName = os.path.join(DAAL_PREFIX, 'batch', 'k_nearest_neighbors_test.csv')
63 
64 nFeatures = 5
65 
66 trainingResult = None
67 predictionResult = None
68 
69 
70 def trainModel():
71  global trainingResult
72 
73  # Initialize FileDataSource<CSVFeatureManager> to retrieve the input data from a .csv file
74  trainDataSource = FileDataSource(
75  trainDatasetFileName, DataSourceIface.notAllocateNumericTable,
76  DataSourceIface.doDictionaryFromContext
77  )
78 
79  # Create Numeric Tables for training data and dependent variables
80  trainData = HomogenNumericTable(nFeatures, 0, NumericTableIface.doNotAllocate)
81  trainGroundTruth = HomogenNumericTable(1, 0, NumericTableIface.doNotAllocate)
82  mergedData = MergedNumericTable(trainData, trainGroundTruth)
83 
84  # Retrieve the data from input file
85  trainDataSource.loadDataBlock(mergedData)
86 
87  # Create an algorithm object to train the KD-tree based kNN model
88  algorithm = training.Batch()
89 
90  # Pass a training data set and dependent values to the algorithm
91  algorithm.input.set(classifier.training.data, trainData)
92  algorithm.input.set(classifier.training.labels, trainGroundTruth)
93 
94  # Train the KD-tree based kNN model
95  trainingResult = algorithm.compute()
96 
97 
98 def testModel():
99  global trainingResult, predictionResult
100 
101  # Initialize FileDataSource<CSVFeatureManager> to retrieve the test data from a .csv file
102  testDataSource = FileDataSource(
103  testDatasetFileName, DataSourceIface.doAllocateNumericTable,
104  DataSourceIface.doDictionaryFromContext
105  )
106 
107  # Create Numeric Tables for testing data and ground truth values
108  testData = HomogenNumericTable(nFeatures, 0, NumericTableIface.doNotAllocate)
109  testGroundTruth = HomogenNumericTable(1, 0, NumericTableIface.doNotAllocate)
110  mergedData = MergedNumericTable(testData, testGroundTruth)
111 
112  # Load the data from the data file
113  testDataSource.loadDataBlock(mergedData)
114 
115  # Create algorithm objects for KD-tree based kNN prediction with the default method
116  algorithm = prediction.Batch()
117 
118  # Pass the testing data set and trained model to the algorithm
119  algorithm.input.setTable(classifier.prediction.data, testData)
120  algorithm.input.setModel(classifier.prediction.model, trainingResult.get(classifier.training.model))
121 
122  # Compute prediction results
123  predictionResult = algorithm.compute()
124  printNumericTables(
125  testGroundTruth, predictionResult.get(classifier.prediction.prediction),
126  "Ground truth", "Classification results",
127  "KD-tree based kNN classification results (first 20 observations):", 20, flt64=False
128  )
129 
130 if __name__ == "__main__":
131 
132  trainModel()
133  testModel()

For more complete information about compiler optimizations, see our Optimization Notice.