Python* API Reference for Intel® Data Analytics Acceleration Library 2018 Update 3

spat_ave_pool2d_layer_dense_batch.py

1 # file: spat_ave_pool2d_layer_dense_batch.py
2 #===============================================================================
3 # Copyright 2014-2018 Intel Corporation.
4 #
5 # This software and the related documents are Intel copyrighted materials, and
6 # your use of them is governed by the express license under which they were
7 # provided to you (License). Unless the License provides otherwise, you may not
8 # use, modify, copy, publish, distribute, disclose or transmit this software or
9 # the related documents without Intel's prior written permission.
10 #
11 # This software and the related documents are provided as is, with no express
12 # or implied warranties, other than those that are expressly stated in the
13 # License.
14 #===============================================================================
15 
16 #
17 # ! Content:
18 # ! Python example of neural network forward and backward two-dimensional spatial pyramid average pooling layers usage
19 # !
20 # !*****************************************************************************
21 
22 #
23 ## <a name="DAAL-EXAMPLE-PY-SPAT_AVE_POOL2D_LAYER_DENSE_BATCH"></a>
24 ## \example spat_ave_pool2d_layer_dense_batch.py
25 #
26 
27 import os
28 import sys
29 
30 import numpy as np
31 
32 from daal.algorithms.neural_networks import layers
33 from daal.data_management import HomogenTensor
34 
35 utils_folder = os.path.realpath(os.path.abspath(os.path.dirname(os.path.dirname(__file__))))
36 if utils_folder not in sys.path:
37  sys.path.insert(0, utils_folder)
38 from utils import printTensor, printNumericTable
39 
40 nDim = 4
41 dims = [2, 3, 2, 4]
42 dataArray = np.array([[[[2, 4, 6, 8],
43  [10, 12, 14, 16]],
44  [[18, 20, 22, 24],
45  [26, 28, 30, 32]],
46  [[34, 36, 38, 40],
47  [42, 44, 46, 48]]],
48  [[[-2, -4, -6, -8],
49  [-10, -12, -14, -16]],
50  [[-18, -20, -22, -24],
51  [-26, -28, -30, -32]],
52  [[-34, -36, -38, -40],
53  [-42, -44, -46, -48]]]],
54  dtype=np.float64)
55 
56 if __name__ == "__main__":
57  data = HomogenTensor(dataArray)
58 
59  # Read datasetFileName from a file and create a tensor to store input data
60  printTensor(data, "Forward two-dimensional spatial pyramid average pooling layer input (first 10 rows):", 10)
61 
62  # Create an algorithm to compute forward two-dimensional maximum pooling layer results using default method
63  forwardLayer = layers.spatial_average_pooling2d.forward.Batch(2, nDim)
64  forwardLayer.input.setInput(layers.forward.data, data)
65 
66  # Compute forward two-dimensional spatial pyramid average pooling layer results and return them
67  # Result class from layers.spatial_average_pooling2d.forward
68  forwardResult = forwardLayer.compute()
69 
70  printTensor(forwardResult.getResult(layers.forward.value),
71  "Forward two-dimensional spatial pyramid average pooling layer result (first 5 rows):",
72  5)
73  printNumericTable(forwardResult.getLayerData(layers.spatial_average_pooling2d.auxInputDimensions),
74  "Forward two-dimensional spatial pyramid average pooling layer input dimensions:")
75 
76  # Create an algorithm to compute backward two-dimensional spatial pyramid average pooling layer results using default method
77  backwardLayer = layers.spatial_average_pooling2d.backward.Batch(2, nDim)
78  backwardLayer.input.setInput(layers.backward.inputGradient, forwardResult.getResult(layers.forward.value))
79  backwardLayer.input.setInputLayerData(layers.backward.inputFromForward, forwardResult.getResultLayerData(layers.forward.resultForBackward))
80 
81  # Compute backward two-dimensional spatial pyramid average pooling layer results
82  # Result class from layers.spatial_average_pooling2d.backward
83  backwardResult = backwardLayer.compute()
84 
85  printTensor(backwardResult.getResult(layers.backward.gradient),
86  "Backward two-dimensional spatial pyramid average pooling layer result (first 10 rows):",
87  10)

For more complete information about compiler optimizations, see our Optimization Notice.