Python* API Reference for Intel® Data Analytics Acceleration Library 2019 Update 4

relu_layer_dense_batch.py

1 # file: relu_layer_dense_batch.py
2 #===============================================================================
3 # Copyright 2014-2019 Intel Corporation.
4 #
5 # This software and the related documents are Intel copyrighted materials, and
6 # your use of them is governed by the express license under which they were
7 # provided to you (License). Unless the License provides otherwise, you may not
8 # use, modify, copy, publish, distribute, disclose or transmit this software or
9 # the related documents without Intel's prior written permission.
10 #
11 # This software and the related documents are provided as is, with no express
12 # or implied warranties, other than those that are expressly stated in the
13 # License.
14 #===============================================================================
15 
16 #
17 # ! Content:
18 # ! Python example of forward and backward rectified linear unit (relu) layer usage
19 # !
20 # !*****************************************************************************
21 
22 #
23 
24 
25 #
26 
27 import os
28 import sys
29 
30 from daal.algorithms.neural_networks import layers
31 from daal.data_management import HomogenTensor, Tensor
32 
33 utils_folder = os.path.realpath(os.path.abspath(os.path.dirname(os.path.dirname(__file__))))
34 if utils_folder not in sys.path:
35  sys.path.insert(0, utils_folder)
36 from utils import printTensor, readTensorFromCSV
37 
38 # Input data set parameters
39 datasetName = os.path.join("..", "data", "batch", "layer.csv")
40 
41 if __name__ == "__main__":
42 
43  # Read datasetFileName from a file and create a tensor to store input data
44  tensorData = readTensorFromCSV(datasetName)
45 
46  # Create an algorithm to compute forward relu layer results using default method
47  reluLayerForward = layers.relu.forward.Batch()
48 
49  # Set input objects for the forward relu layer
50  reluLayerForward.input.setInput(layers.forward.data, tensorData)
51 
52  # Compute forward relu layer results
53  forwardResult = reluLayerForward.compute()
54 
55  # Print the results of the forward relu layer
56  printTensor(forwardResult.getResult(layers.forward.value), "Forward relu layer result (first 5 rows):", 5)
57 
58  # Get the size of forward relu layer output
59  gDims = forwardResult.getResult(layers.forward.value).getDimensions()
60  tensorDataBack = HomogenTensor(gDims, Tensor.doAllocate, 0.01)
61 
62  # Create an algorithm to compute backward relu layer results using default method
63  reluLayerBackward = layers.relu.backward.Batch()
64 
65  # Set input objects for the backward relu layer
66  reluLayerBackward.input.setInput(layers.backward.inputGradient, tensorDataBack)
67  reluLayerBackward.input.setInputLayerData(layers.backward.inputFromForward, forwardResult.getResultLayerData(layers.forward.resultForBackward))
68 
69  # Compute backward relu layer results
70  backwardResult = reluLayerBackward.compute()
71 
72  # Print the results of the backward relu layer
73  printTensor(backwardResult.getResult(layers.backward.gradient), "Backward relu layer result (first 5 rows):", 5)

For more complete information about compiler optimizations, see our Optimization Notice.