Python* API Reference for Intel® Data Analytics Acceleration Library 2019 Update 5

sgd_mini_dense_batch.py

Deprecation Notice: With the introduction of daal4py, a package that supersedes PyDAAL, Intel is deprecating PyDAAL and will discontinue support starting with Intel® DAAL 2021 and Intel® Distribution for Python 2021. Until then Intel will continue to provide compatible pyDAAL pip and conda packages for newer releases of Intel DAAL and make it available in open source. However, Intel will not add the new features of Intel DAAL to pyDAAL. Intel recommends developers switch to and use daal4py.

Note: To find daal4py examples, refer to daal4py documentation or browse github repository.

1 # file: sgd_mini_dense_batch.py
2 #===============================================================================
3 # Copyright 2014-2019 Intel Corporation.
4 #
5 # This software and the related documents are Intel copyrighted materials, and
6 # your use of them is governed by the express license under which they were
7 # provided to you (License). Unless the License provides otherwise, you may not
8 # use, modify, copy, publish, distribute, disclose or transmit this software or
9 # the related documents without Intel's prior written permission.
10 #
11 # This software and the related documents are provided as is, with no express
12 # or implied warranties, other than those that are expressly stated in the
13 # License.
14 #===============================================================================
15 
16 #
17 # ! Content:
18 # ! Python example of the Stochastic gradient descent algorithm
19 # !*****************************************************************************
20 
21 #
22 ## <a name="DAAL-EXAMPLE-PY-SGD_MINI_BATCH"></a>
23 ## \example sgd_mini_dense_batch.py
24 #
25 
26 import os
27 import sys
28 
29 import numpy as np
30 
31 import daal.algorithms.optimization_solver as optimization_solver
32 import daal.algorithms.optimization_solver.mse
33 import daal.algorithms.optimization_solver.sgd
34 import daal.algorithms.optimization_solver.iterative_solver
35 
36 from daal.data_management import (
37  DataSourceIface, FileDataSource, HomogenNumericTable, MergedNumericTable, NumericTableIface
38 )
39 
40 utils_folder = os.path.realpath(os.path.abspath(os.path.dirname(os.path.dirname(__file__))))
41 if utils_folder not in sys.path:
42  sys.path.insert(0, utils_folder)
43 from utils import printNumericTable
44 
45 datasetFileName = os.path.join('..', 'data', 'batch', 'mse.csv')
46 
47 nFeatures = 3
48 accuracyThreshold = 0.0000001
49 nIterations = 1000
50 batchSize = 4
51 learningRate = 0.5
52 initialPoint = np.array([[8], [2], [1], [4]], dtype=np.float64)
53 
54 if __name__ == "__main__":
55 
56  # Initialize FileDataSource<CSVFeatureManager> to retrieve the input data from a .csv file
57  dataSource = FileDataSource(datasetFileName,
58  DataSourceIface.notAllocateNumericTable,
59  DataSourceIface.doDictionaryFromContext)
60 
61  # Create Numeric Tables for data and values for dependent variable
62  data = HomogenNumericTable(nFeatures, 0, NumericTableIface.doNotAllocate)
63  dependentVariables = HomogenNumericTable(1, 0, NumericTableIface.doNotAllocate)
64  mergedData = MergedNumericTable(data, dependentVariables)
65 
66  # Retrieve the data from the input file
67  dataSource.loadDataBlock(mergedData)
68 
69  nVectors = data.getNumberOfRows()
70 
71  mseObjectiveFunction = optimization_solver.mse.Batch(nVectors)
72  mseObjectiveFunction.input.set(optimization_solver.mse.data, data)
73  mseObjectiveFunction.input.set(optimization_solver.mse.dependentVariables, dependentVariables)
74 
75  # Create objects to compute the Stochastic gradient descent result using the mini-batch method
76  sgdMiniBatchAlgorithm = optimization_solver.sgd.Batch(mseObjectiveFunction, method=optimization_solver.sgd.miniBatch)
77 
78  # Set input objects for the the Stochastic gradient descent algorithm
79  sgdMiniBatchAlgorithm.input.setInput(optimization_solver.iterative_solver.inputArgument,
80  HomogenNumericTable(initialPoint))
81  sgdMiniBatchAlgorithm.parameter.learningRateSequence = HomogenNumericTable(1, 1, NumericTableIface.doAllocate,
82  learningRate)
83  sgdMiniBatchAlgorithm.parameter.nIterations = nIterations
84  sgdMiniBatchAlgorithm.parameter.batchSize = batchSize
85  sgdMiniBatchAlgorithm.parameter.accuracyThreshold = accuracyThreshold
86 
87  # Compute the Stochastic gradient descent result
88  # Result class from daal.algorithms.optimization_solver.iterative_solver
89  res = sgdMiniBatchAlgorithm.compute()
90 
91  # Print computed the Stochastic gradient descent result
92  printNumericTable(res.getResult(optimization_solver.iterative_solver.minimum), "Minimum")
93  printNumericTable(res.getResult(optimization_solver.iterative_solver.nIterations), "Number of iterations performed:")

For more complete information about compiler optimizations, see our Optimization Notice.