C++ API Reference for Intel® Data Analytics Acceleration Library 2019

tanh_layer_backward_types.h
1 /* file: tanh_layer_backward_types.h */
2 /*******************************************************************************
3 * Copyright 2014-2018 Intel Corporation.
4 *
5 * This software and the related documents are Intel copyrighted materials, and
6 * your use of them is governed by the express license under which they were
7 * provided to you (License). Unless the License provides otherwise, you may not
8 * use, modify, copy, publish, distribute, disclose or transmit this software or
9 * the related documents without Intel's prior written permission.
10 *
11 * This software and the related documents are provided as is, with no express
12 * or implied warranties, other than those that are expressly stated in the
13 * License.
14 *******************************************************************************/
15 
16 /*
17 //++
18 // Implementation of the backward hyperbolic tangent layer.
19 //--
20 */
21 
22 #ifndef __TANH_LAYER_BACKWARD_TYPES_H__
23 #define __TANH_LAYER_BACKWARD_TYPES_H__
24 
25 #include "algorithms/algorithm.h"
26 #include "data_management/data/tensor.h"
27 #include "algorithms/neural_networks/layers/layer_backward_types.h"
28 #include "algorithms/neural_networks/layers/tanh/tanh_layer_types.h"
29 #include "data_management/data/homogen_tensor.h"
30 #include "services/daal_defines.h"
31 
32 
33 namespace daal
34 {
35 namespace algorithms
36 {
37 namespace neural_networks
38 {
39 namespace layers
40 {
44 namespace tanh
45 {
55 namespace backward
56 {
60 namespace interface1
61 {
66 class DAAL_EXPORT Input : public layers::backward::Input
67 {
68 public:
69  typedef layers::backward::Input super;
71  Input();
72 
74  Input(const Input& other);
75 
76  virtual ~Input() {}
77 
81  using layers::backward::Input::get;
85  using layers::backward::Input::set;
86 
92  data_management::TensorPtr get(LayerDataId id) const;
93 
99  void set(LayerDataId id, const data_management::TensorPtr &value);
100 
108  services::Status check(const daal::algorithms::Parameter *par, int method) const DAAL_C11_OVERRIDE;
109 };
110 
115 class DAAL_EXPORT Result : public layers::backward::Result
116 {
117 public:
118  DECLARE_SERIALIZABLE_CAST(Result);
120  Result();
121 
122  virtual ~Result() {};
123 
127  using layers::backward::Result::get;
132  using layers::backward::Result::set;
133 
142  services::Status check(const daal::algorithms::Input *input, const daal::algorithms::Parameter *par, int method) const DAAL_C11_OVERRIDE;
143 
152  template <typename algorithmFPType>
153  DAAL_EXPORT services::Status allocate(const daal::algorithms::Input *input, const daal::algorithms::Parameter *par, const int method);
154 
155 protected:
157  template<typename Archive, bool onDeserialize>
158  services::Status serialImpl(Archive *arch)
159  {
160  return daal::algorithms::Result::serialImpl<Archive, onDeserialize>(arch);
161  }
162 };
163 typedef services::SharedPtr<Result> ResultPtr;
164 
165 } // namespace interface1
166 using interface1::Input;
167 using interface1::Result;
168 using interface1::ResultPtr;
169 } // namespace backward
171 } // namespace tanh
172 } // namespace layers
173 } // namespace neural_networks
174 } // namespace algorithm
175 } // namespace daal
176 #endif
daal
Definition: algorithm_base_common.h:31
daal::algorithms::neural_networks::layers::tanh::LayerDataId
LayerDataId
Identifiers of input objects for the backward hyperbolic tangent layer and results for the forward hy...
Definition: tanh_layer_types.h:64
daal_defines.h
daal::algorithms::neural_networks::layers::tanh::backward::interface1::Result
Provides methods to access the result obtained with the compute() method of the backward hyperbolic t...
Definition: tanh_layer_backward_types.h:115
daal::algorithms::math::abs::value
Definition: abs_types.h:86
daal::algorithms::neural_networks::layers::tanh::backward::interface1::Input
Input objects for the backward hyperbolic tangent layer
Definition: tanh_layer_backward_types.h:66

For more complete information about compiler optimizations, see our Optimization Notice.