Python* API Reference for Intel® Data Analytics Acceleration Library 2019 Update 4

prelu_layer_dense_batch.py

Deprecation Notice: With the introduction of daal4py, a package that supersedes PyDAAL, Intel is deprecating PyDAAL and will discontinue support starting with Intel® DAAL 2021 and Intel® Distribution for Python 2021. Until then Intel will continue to provide compatible pyDAAL pip and conda packages for newer releases of Intel DAAL and make it available in open source. However, Intel will not add the new features of Intel DAAL to pyDAAL. Intel recommends developers switch to and use daal4py.

Note: To find daal4py examples, refer to daal4py documentation or browse github repository.

1 # file: prelu_layer_dense_batch.py
2 #===============================================================================
3 # Copyright 2014-2019 Intel Corporation.
4 #
5 # This software and the related documents are Intel copyrighted materials, and
6 # your use of them is governed by the express license under which they were
7 # provided to you (License). Unless the License provides otherwise, you may not
8 # use, modify, copy, publish, distribute, disclose or transmit this software or
9 # the related documents without Intel's prior written permission.
10 #
11 # This software and the related documents are provided as is, with no express
12 # or implied warranties, other than those that are expressly stated in the
13 # License.
14 #===============================================================================
15 
16 #
17 # ! Content:
18 # ! Python example of forward and backward parametric rectified linear unit (prelu) layer usage
19 # !
20 # !*****************************************************************************
21 
22 #
23 ## <a name="DAAL-EXAMPLE-PY-PRELU_LAYER_BATCH"></a>
24 ## \example prelu_layer_dense_batch.py
25 #
26 
27 import os
28 import sys
29 
30 from daal.algorithms.neural_networks import layers
31 from daal.data_management import HomogenTensor, Tensor
32 
33 utils_folder = os.path.realpath(os.path.abspath(os.path.dirname(os.path.dirname(__file__))))
34 if utils_folder not in sys.path:
35  sys.path.insert(0, utils_folder)
36 from utils import printTensor, readTensorFromCSV
37 
38 # Input data set parameters
39 datasetName = os.path.join("..", "data", "batch", "layer.csv")
40 weightsName = os.path.join("..", "data", "batch", "layer.csv")
41 
42 dataDimension = 0
43 weightsDimension = 2
44 
45 if __name__ == "__main__":
46 
47  # Read datasetFileName from a file and create a tensor to store input data
48  tensorData = readTensorFromCSV(datasetName)
49  tensorWeights = readTensorFromCSV(weightsName)
50 
51  # Create an algorithm to compute forward prelu layer results using default method
52  forwardPreluLayer = layers.prelu.forward.Batch()
53  forwardPreluLayer.parameter.dataDimension = dataDimension
54  forwardPreluLayer.parameter.weightsDimension = weightsDimension
55  forwardPreluLayer.parameter.weightsAndBiasesInitialized = True
56 
57  # Set input objects for the forward prelu layer
58  forwardPreluLayer.input.setInput(layers.forward.data, tensorData)
59  forwardPreluLayer.input.setInput(layers.forward.weights, tensorWeights)
60 
61  # Compute forward prelu layer results
62  forwardResult = forwardPreluLayer.compute()
63 
64  # Print the results of the forward prelu layer
65  printTensor(forwardResult.getResult(layers.forward.value), "Forward prelu layer result (first 5 rows):", 5)
66 
67  # Get the size of forward prelu layer output
68  gDims = forwardResult.getResult(layers.forward.value).getDimensions()
69  tensorDataBack = HomogenTensor(gDims, Tensor.doAllocate, 0.01)
70 
71  # Create an algorithm to compute backward prelu layer results using default method
72  backwardPreluLayer = layers.prelu.backward.Batch()
73  backwardPreluLayer.parameter.dataDimension = dataDimension
74  backwardPreluLayer.parameter.weightsDimension = weightsDimension
75 
76  # Set input objects for the backward prelu layer
77  backwardPreluLayer.input.setInput(layers.backward.inputGradient, tensorDataBack)
78  backwardPreluLayer.input.setInputLayerData(layers.backward.inputFromForward, forwardResult.getResultLayerData(layers.forward.resultForBackward))
79 
80  # Compute backward prelu layer results
81  backwardResult = backwardPreluLayer.compute()
82 
83  # Print the results of the backward prelu layer
84  printTensor(backwardResult.getResult(layers.backward.gradient), "Backward prelu layer result (first 5 rows):", 5)
85  printTensor(backwardResult.getResult(layers.backward.weightDerivatives), "Weights derivative (first 5 rows):", 5)

For more complete information about compiler optimizations, see our Optimization Notice.