C++ API Reference for Intel® Data Analytics Acceleration Library 2018 Update 3

eltwise_sum_layer_backward_types.h
1 /* file: eltwise_sum_layer_backward_types.h */
2 /*******************************************************************************
3 * Copyright 2014-2018 Intel Corporation.
4 *
5 * This software and the related documents are Intel copyrighted materials, and
6 * your use of them is governed by the express license under which they were
7 * provided to you (License). Unless the License provides otherwise, you may not
8 * use, modify, copy, publish, distribute, disclose or transmit this software or
9 * the related documents without Intel's prior written permission.
10 *
11 * This software and the related documents are provided as is, with no express
12 * or implied warranties, other than those that are expressly stated in the
13 * License.
14 *******************************************************************************/
15 
16 /*
17 //++
18 // Implementation of backward element-wise sum layer.
19 //--
20 */
21 
22 #ifndef __ELTWISE_SUM_LAYER_BACKWARD_TYPES_H__
23 #define __ELTWISE_SUM_LAYER_BACKWARD_TYPES_H__
24 
25 #include "algorithms/algorithm.h"
26 #include "services/daal_defines.h"
27 
28 #include "data_management/data/tensor.h"
29 
30 #include "algorithms/neural_networks/layers/layer_backward_types.h"
31 #include "algorithms/neural_networks/layers/eltwise_sum/eltwise_sum_layer_types.h"
32 
33 namespace daal
34 {
35 namespace algorithms
36 {
37 namespace neural_networks
38 {
39 namespace layers
40 {
41 namespace eltwise_sum
42 {
52 namespace backward
53 {
57 namespace interface1
58 {
59 
64 class DAAL_EXPORT Input : public layers::backward::Input
65 {
66 public:
67  typedef layers::backward::Input super;
71  Input();
72 
74  Input(const Input& other);
75 
76  virtual ~Input() {}
77 
81  using layers::backward::Input::get;
82 
86  using layers::backward::Input::set;
87 
93  data_management::TensorPtr get(LayerDataId id) const;
94 
100  data_management::NumericTablePtr get(LayerDataNumericTableId id) const;
101 
107  void set(LayerDataId id, const data_management::TensorPtr &value);
108 
114  void set(LayerDataNumericTableId id, const data_management::NumericTablePtr &value);
115 
123  services::Status check(const daal::algorithms::Parameter *par, int method) const DAAL_C11_OVERRIDE;
124 
130  size_t getNumberOfCoefficients() const;
131 
132 private:
133  size_t getNumberOfAuxCoefficientsFromTable() const;
134 
135  services::Status checkInputGradient() const;
136  services::Status checkAuxCoefficients() const;
137  services::Status checkAuxNumberOfCoefficients() const;
138 };
139 
144 class DAAL_EXPORT Result : public layers::backward::Result
145 {
146 public:
147  DECLARE_SERIALIZABLE_CAST(Result);
148 
152  Result();
153 
154  virtual ~Result() {}
155 
159  using layers::backward::Result::get;
160 
164  using layers::backward::Result::set;
165 
172  data_management::TensorPtr get(layers::backward::ResultLayerDataId id, size_t index) const;
173 
180  void set(layers::backward::ResultLayerDataId id, const data_management::TensorPtr &value, size_t index);
181 
187  virtual data_management::TensorPtr getGradient(size_t index) const DAAL_C11_OVERRIDE;
188 
197  template <typename algorithmFPType>
198  DAAL_EXPORT services::Status allocate(const daal::algorithms::Input *input,
199  const daal::algorithms::Parameter *parameter, const int method);
200 
209  services::Status check(const daal::algorithms::Input *input,
210  const daal::algorithms::Parameter *par, int method) const DAAL_C11_OVERRIDE;
211 
216  virtual LayerResultLayout getLayout() const DAAL_C11_OVERRIDE;
217 
218 protected:
220  template<typename Archive, bool onDeserialize>
221  services::Status serialImpl(Archive *arch)
222  {
223  return daal::algorithms::Result::serialImpl<Archive, onDeserialize>(arch);
224  }
225 
226 private:
227  template<typename algorithmFPType>
228  services::Status allocateNewOutputTensors(const data_management::TensorPtr &inputGradient, size_t nOutputs);
229 
230  LayerDataPtr getResultLayerDataAllocateIfEmpty();
231 
232  void useInputGradientTensorAsOutput(const data_management::TensorPtr &inputGradient, size_t nOutputs);
233 
234  services::Status checkResultLayerData(const Input *input) const;
235  services::Status checkOutputGradients(const Input *input) const;
236 };
237 
238 typedef services::SharedPtr<Result> ResultPtr;
239 } // namespace interface1
240 
241 using interface1::Input;
242 using interface1::Result;
243 using interface1::ResultPtr;
244 
245 } // namespace backward
247 } // namespace eltwise_sum
248 } // namespace layers
249 } // namespace neural_networks
250 } // namespace algorithms
251 } // namespace daal
252 #endif
daal
Definition: algorithm_base_common.h:31
daal::algorithms::neural_networks::layers::eltwise_sum::backward::interface1::Result
Results obtained with the compute() method of the backward element-wise sum layer.
Definition: eltwise_sum_layer_backward_types.h:144
daal_defines.h
daal::algorithms::neural_networks::layers::eltwise_sum::LayerDataNumericTableId
LayerDataNumericTableId
Available identifiers of input numeric tables for the backward element-wise sum layer and identifiers...
Definition: eltwise_sum_layer_types.h:72
daal::algorithms::neural_networks::layers::backward::ResultLayerDataId
ResultLayerDataId
Definition: layer_backward_types.h:94
daal::algorithms::neural_networks::layers::backward::inputGradient
Definition: layer_backward_types.h:64
daal::algorithms::neural_networks::layers::LayerResultLayout
LayerResultLayout
Definition: layer_types.h:68
daal::algorithms::math::abs::value
Definition: abs_types.h:86
daal::algorithms::neural_networks::layers::eltwise_sum::LayerDataId
LayerDataId
Available identifiers of input tensors for the backward element-wise sum layer and identifiers of res...
Definition: eltwise_sum_layer_types.h:61
daal::algorithms::neural_networks::layers::eltwise_sum::backward::interface1::Input
Input objects for the backward element-wise sum layer
Definition: eltwise_sum_layer_backward_types.h:64

For more complete information about compiler optimizations, see our Optimization Notice.