C++ API Reference for Intel® Data Analytics Acceleration Library 2018 Update 1

neural_networks_training_model.h
1 /* file: neural_networks_training_model.h */
2 /*******************************************************************************
3 * Copyright 2014-2017 Intel Corporation
4 * All Rights Reserved.
5 *
6 * If this software was obtained under the Intel Simplified Software License,
7 * the following terms apply:
8 *
9 * The source code, information and material ("Material") contained herein is
10 * owned by Intel Corporation or its suppliers or licensors, and title to such
11 * Material remains with Intel Corporation or its suppliers or licensors. The
12 * Material contains proprietary information of Intel or its suppliers and
13 * licensors. The Material is protected by worldwide copyright laws and treaty
14 * provisions. No part of the Material may be used, copied, reproduced,
15 * modified, published, uploaded, posted, transmitted, distributed or disclosed
16 * in any way without Intel's prior express written permission. No license under
17 * any patent, copyright or other intellectual property rights in the Material
18 * is granted to or conferred upon you, either expressly, by implication,
19 * inducement, estoppel or otherwise. Any license under such intellectual
20 * property rights must be express and approved by Intel in writing.
21 *
22 * Unless otherwise agreed by Intel in writing, you may not remove or alter this
23 * notice or any other notice embedded in Materials by Intel or Intel's
24 * suppliers or licensors in any way.
25 *
26 *
27 * If this software was obtained under the Apache License, Version 2.0 (the
28 * "License"), the following terms apply:
29 *
30 * You may not use this file except in compliance with the License. You may
31 * obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
32 *
33 *
34 * Unless required by applicable law or agreed to in writing, software
35 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
36 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
37 *
38 * See the License for the specific language governing permissions and
39 * limitations under the License.
40 *******************************************************************************/
41 
42 /*
43 //++
44 // Implementation of neural network.
45 //--
46 */
47 
48 #ifndef __NEURAL_NETWORK_TRAINING_MODEL_H__
49 #define __NEURAL_NETWORK_TRAINING_MODEL_H__
50 
51 #include "services/daal_defines.h"
52 #include "data_management/data/tensor.h"
53 #include "data_management/data/numeric_table.h"
54 #include "services/daal_memory.h"
55 #include "algorithms/neural_networks/layers/layer.h"
56 #include "algorithms/neural_networks/layers/layer_types.h"
57 #include "algorithms/neural_networks/layers/loss/loss_layer_forward.h"
58 #include "algorithms/neural_networks/layers/split/split_layer_forward.h"
59 #include "algorithms/neural_networks/neural_networks_prediction_model.h"
60 #include "algorithms/neural_networks/neural_networks_training_topology.h"
61 
62 #include "algorithms/optimization_solver/iterative_solver/iterative_solver_batch.h"
63 
64 namespace daal
65 {
66 namespace algorithms
67 {
71 namespace neural_networks
72 {
73 namespace training
74 {
75 namespace interface1
76 {
85 class Parameter : public daal::algorithms::Parameter
86 {
87 public:
93  Parameter(const services::SharedPtr<optimization_solver::iterative_solver::Batch > &optimizationSolver_ = services::SharedPtr<optimization_solver::iterative_solver::Batch>(),
94  engines::EnginePtr engine_ = engines::mt19937::Batch<DAAL_ALGORITHM_FP_TYPE>::create()) :
95  optimizationSolver(optimizationSolver_),
96  engine(engine_) {}
97 
98  services::SharedPtr<optimization_solver::iterative_solver::Batch> optimizationSolver;
99  engines::EnginePtr engine;
100 };
101 
106 class DAAL_EXPORT Model : public neural_networks::ModelImpl
107 {
108 public:
109  DECLARE_SERIALIZABLE_CAST(Model);
110 
111  using neural_networks::ModelImpl::getWeightsAndBiases;
112  using neural_networks::ModelImpl::setWeightsAndBiases;
113 
115  Model();
116 
117  static services::SharedPtr<Model> create(services::Status *stat = NULL);
118 
120  Model(const Model &model) :
121  ModelImpl(model),
122  _backwardLayers(model.getBackwardLayers()),
123  _storeWeightDerivativesInTable(model._storeWeightDerivativesInTable)
124  {}
125 
127  virtual ~Model() {}
128 
137  template<typename modelFPType>
138  services::Status initialize(const services::Collection<size_t> &sampleSize, const Topology &topology,
139  const Parameter &parameter = Parameter())
140  {
141  using namespace layers;
142  using namespace services;
143 
144  size_t nLayers = topology.size();
145  Status st;
146  _backwardNextLayers = SharedPtr<Collection<NextLayers> >(new Collection<NextLayers>(nLayers));
147  if (!_backwardNextLayers)
148  {
149  st.add(services::ErrorMemoryAllocationFailed);
150  return st;
151  }
152 
153  for(size_t i = 0; i < nLayers; i++)
154  {
155  insertLayer(topology[i]);
156  }
157 
158  for(int i = (int)nLayers - 1; i >= 0; i--)
159  {
160  size_t layerId = topology[i].index();
161  const NextLayers &next = topology[i].nextLayers();
162  for (size_t j = 0; j < next.size(); j++)
163  {
164  (*_backwardNextLayers)[next[j]].push_back(layerId);
165  }
166  }
167 
168  for(int i = (int)nLayers - 1; i >= 0; i--)
169  {
170  layers::forward::LayerIfacePtr layer = getForwardLayer(i);
171  SharedPtr<split::forward::Batch<float> > splitLayerFloat = dynamicPointerCast<split::forward::Batch<float>, forward::LayerIface>(layer);
172  SharedPtr<split::forward::Batch<double> > splitLayerDouble = dynamicPointerCast<split::forward::Batch<double>, forward::LayerIface>(layer);
173  if(splitLayerFloat.get() || splitLayerDouble.get())
174  {
175  const NextLayers &next = topology[i].nextLayers();
176  for (size_t j = 0; j < next.size(); j++)
177  {
178  layers::forward::LayerIfacePtr nextLayer = getForwardLayer(next[j]);
179  nextLayer->getLayerParameter()->allowInplaceComputation = false;
180  }
181  }
182  }
183 
184  allocate<modelFPType>(sampleSize, parameter);
185 
186  for(size_t i = 0; i < nLayers; i++)
187  {
188  getForwardLayer(i)->enableResetOnCompute(false);
189  getBackwardLayer(i)->enableResetOnCompute(false);
190  }
191  return st;
192  }
193 
198  const ForwardLayersPtr getForwardLayers() const
199  {
200  return _forwardLayers;
201  }
202 
208  const layers::forward::LayerIfacePtr getForwardLayer(size_t index) const
209  {
210  return _forwardLayers->get(index);
211  }
212 
217  const BackwardLayersPtr getBackwardLayers() const
218  {
219  return _backwardLayers;
220  }
221 
227  const layers::backward::LayerIfacePtr getBackwardLayer(size_t index) const
228  {
229  return _backwardLayers->get(index);
230  }
231 
236  template<typename modelFPType>
237  const prediction::ModelPtr getPredictionModel()
238  {
239  using namespace services;
240  using namespace data_management;
241  using namespace layers;
242 
243  size_t nLayers = _forwardLayers->size();
244 
245  /* Copy forward layers */
246  ForwardLayersPtr _predictionForwardLayers(new ForwardLayers(nLayers));
247  SharedPtr<Collection<NextLayers> > _predictionNextLayers(new Collection<NextLayers>(nLayers));
248  for (size_t i = 0; i < nLayers; i++)
249  {
250  (*_predictionNextLayers)[i] = _nextLayers->get(i);
251  (*_predictionForwardLayers)[i] = ((*_forwardLayers)[i])->getLayerForPrediction();
252  (*_predictionForwardLayers)[i]->getLayerParameter()->predictionStage = true;
253  }
254 
255  bool storeWeightsInTable = true;
256  prediction::ModelPtr predictionModel(new prediction::Model(
257  _predictionForwardLayers, _predictionNextLayers, (modelFPType)0.0, storeWeightsInTable));
258 
259  predictionModel->setWeightsAndBiases(getWeightsAndBiases());
260  return predictionModel;
261  }
262 
268  bool getWeightsAndBiasesStorageStatus() const
269  {
270  return _storeWeightsInTable;
271  }
272 
280  services::Status setWeightsAndBiases(size_t idx, const data_management::NumericTablePtr &table);
281 
287  data_management::NumericTablePtr getWeightsAndBiases(size_t idx) const;
288 
293  data_management::NumericTablePtr getWeightsAndBiasesDerivatives() const;
294 
300  data_management::NumericTablePtr getWeightsAndBiasesDerivatives(size_t idx) const;
301 
309  DAAL_DEPRECATED services::Status setErrors(services::ErrorCollection &errors)
310  {
311  return services::Status();
312  }
313 
319  DAAL_DEPRECATED const services::ErrorCollection &getErrors() const { return _errors; }
320 
328  template<typename modelFPType>
329  services::Status allocate(const services::Collection<size_t> &sampleSize, const Parameter &parameter = Parameter())
330  {
331  using namespace services;
332  using namespace data_management;
333  using namespace layers;
334 
335  services::Status s;
336 
337  if (_sampleSize.size() > 0) { _sampleSize.clear(); }
338  _sampleSize = sampleSize;
339 
340  _forwardLayers->get(0)->getLayerInput()->set(forward::data,
341  TensorPtr(new HomogenTensor<modelFPType>(_sampleSize, Tensor::doAllocate)));
342 
343  size_t nLayers = _forwardLayers->size();
344 
345  for (size_t i = 0; i < nLayers; i++)
346  {
347  layers::Parameter *lParameter = _forwardLayers->get(i)->getLayerParameter();
348  initializers::Parameter *wParameter = lParameter->weightsInitializer->getParameter();
349  initializers::Parameter *bParameter = lParameter->biasesInitializer->getParameter();
350 
351  s |= connectForwardLayers(i);
352 
353  if(!wParameter->engine)
354  {
355  wParameter->engine = parameter.engine;
356  }
357  if(!bParameter->engine)
358  {
359  bParameter->engine = parameter.engine;
360  }
361  }
362 
363  bool checkWeightsAndBiasesAlloc = true;
364  s |= createWeightsAndBiases<modelFPType>(checkWeightsAndBiasesAlloc);
365  s |= enableConditionalGradientPropagation();
366  if(!s) return s;
367 
368  for (size_t i = 0; i < nLayers; i++)
369  {
370  forward::LayerIfacePtr forwardLayer = _forwardLayers->get(i);
371  forward::Input *forwardInput = forwardLayer->getLayerInput();
372 
373  forwardLayer->getLayerResult()->setResultForBackward(forwardInput);
374  }
375 
376  /* Check weights and biases derivatives allocation status before allocating the results of backward layers */
377  s |= checkWeightsAndBiasesDerivativesAllocation();
378 
379  for (int i = (int)nLayers - 1; i >= 0; i--)
380  {
381  s |= connectBackwardLayers(i);
382  }
383 
384  s |= createWeightsAndBiasesDerivatives<modelFPType>();
385  if(_solverOptionalArgumentCollection.size() == 0)
386  {
387  if(_storeWeightsInTable) _solverOptionalArgumentCollection = DataCollection(1);
388  else _solverOptionalArgumentCollection = DataCollection(nLayers);
389  }
390  return s;
391  }
392 
393 protected:
395  Model(services::Status &st);
396 
398  template<typename Archive, bool onDeserialize>
399  services::Status serialImpl(Archive *arch)
400  {
401  return services::Status();
402  }
403 
404  void insertLayer(const layers::LayerDescriptor &layerDescriptor)
405  {
406  _forwardLayers->insert(layerDescriptor.index(), layerDescriptor.layer()->forwardLayer->clone());
407  _backwardLayers->insert(layerDescriptor.index(), layerDescriptor.layer()->backwardLayer->clone());
408  _nextLayers->insert(layerDescriptor.index(), layerDescriptor.nextLayers());
409  }
410 
411  services::Status enableConditionalGradientPropagation()
412  {
413  using namespace services;
414  using namespace layers;
415 
416  services::Status s;
417 
418  size_t nLayers = _forwardLayers->size();
419 
420  /* Array of flags for the neural network layers */
421  bool *flags = (bool *)daal_malloc(nLayers * sizeof(bool));
422 
423  /* Perform depth search to disable gradient propagation in starting forward layers with weights
424  and all the previous layers */
425  s |= disableGradientPropagationInStartingLayers(nLayers, flags);
426 
427  /* Perform depth search to enable gradient propagation in the layers
428  that follow forward layers with weights */
429  s |= enableGradientPropagation(nLayers, flags);
430 
431  daal_free(flags);
432  return s;
433  }
434 
435  services::Status disableGradientPropagationInStartingLayers(size_t nLayers, bool *visited)
436  {
437  using namespace services;
438  using namespace layers;
439 
440  for (size_t i = 0; i < nLayers; i++)
441  {
442  visited[i] = false;
443  }
444 
445  Collection<size_t> stack;
446  stack.push_back(0 /* ID of the first forward layer */);
447  while (stack.size() > 0)
448  {
449  size_t layerId = stack[stack.size() - 1];
450  stack.erase(stack.size() - 1);
451  if (!visited[layerId])
452  {
453  visited[layerId] = true;
454 
455  forward::LayerIfacePtr forwardLayer = _forwardLayers->get(layerId);
456  forward::Input *forwardInput = forwardLayer->getLayerInput();
457  layers::Parameter *forwardParameter = forwardLayer->getLayerParameter();
458  layers::Parameter *backwardParameter = _backwardLayers->get(layerId)->getLayerParameter();
459 
460  backwardParameter->propagateGradient = false;
461 
462  if (forwardInput->getWeightsSizes(forwardParameter).size() +
463  forwardInput->getBiasesSizes(forwardParameter) .size() == 0)
464  {
465  /* Continue depth search for layers that do not have weights and biases */
466  const NextLayers &next = _nextLayers->get(layerId);
467  for (size_t i = 0; i < next.size(); i++)
468  {
469  stack.push_back(next[i]);
470  }
471  }
472  }
473  }
474  return services::Status();
475  }
476 
477  services::Status enableGradientPropagationInSubsequentLayers(size_t startLayerId, size_t nLayers, bool *enabledPropagation)
478  {
479  using namespace services;
480  using namespace layers;
481  Collection<size_t> stack;
482  const NextLayers &next = _nextLayers->get(startLayerId);
483  for (size_t i = 0; i < next.size(); i++)
484  {
485  stack.push_back(next[i]);
486  }
487  while (stack.size() > 0)
488  {
489  size_t layerId = stack[stack.size() - 1];
490  stack.erase(stack.size() - 1);
491  if (!enabledPropagation[layerId])
492  {
493  enabledPropagation[layerId] = true;
494  backward::LayerIfacePtr backwardLayer = _backwardLayers->get(layerId);
495  backwardLayer->getLayerParameter()->propagateGradient = true;
496  const NextLayers &next = _nextLayers->get(layerId);
497  for (size_t i = 0; i < next.size(); i++)
498  {
499  stack.push_back(next[i]);
500  }
501  }
502  }
503  return services::Status();
504  }
505 
506  services::Status enableGradientPropagation(size_t nLayers, bool *enabledPropagation)
507  {
508  using namespace services;
509  using namespace layers;
510  Collection<size_t> stack;
511  stack.push_back(0 /* ID of the first forward layer */);
512 
513  for (size_t i = 0; i < nLayers; i++)
514  {
515  enabledPropagation[i] = false;
516  }
517 
518  while (stack.size() > 0)
519  {
520  size_t layerId = stack[stack.size() - 1];
521  stack.erase(stack.size() - 1);
522  if (!enabledPropagation[layerId])
523  {
524  forward::LayerIfacePtr forwardLayer = _forwardLayers->get(layerId);
525  forward::Input *forwardInput = forwardLayer->getLayerInput();
526  layers::Parameter *forwardParameter = forwardLayer->getLayerParameter();
527  layers::Parameter *backwardParameter = _backwardLayers->get(layerId)->getLayerParameter();
528 
529  if (backwardParameter->propagateGradient == false &&
530  (forwardInput->getWeightsSizes(forwardParameter).size() +
531  forwardInput->getBiasesSizes(forwardParameter) .size()) > 0)
532  {
533  enableGradientPropagationInSubsequentLayers(layerId, nLayers, enabledPropagation);
534  }
535  else
536  {
537  const NextLayers &next = _nextLayers->get(layerId);
538  for (size_t i = 0; i < next.size(); i++)
539  {
540  stack.push_back(next[i]);
541  }
542  }
543  }
544  }
545  return services::Status();
546  }
547 
548  services::Status checkWeightsAndBiasesDerivativesAllocation()
549  {
550  using namespace services;
551  using namespace layers;
552 
553  _storeWeightDerivativesInTable = true;
554  size_t nLayers = _backwardLayers->size();
555  for (size_t i = 0; i < nLayers; i++)
556  {
557  backward::LayerIfacePtr &backwardLayer = _backwardLayers->get(i);
558  if (!backwardLayer) { continue; }
559  backward::ResultPtr backwardResult = backwardLayer->getLayerResult();
560  /* Check if weight and bias derivatives are allocated by user */
561  if (backwardResult->get(backward::weightDerivatives) || backwardResult->get(backward::biasDerivatives))
562  {
563  _storeWeightDerivativesInTable = false;
564  break;
565  }
566  }
567  return services::Status();
568  }
569 
570  services::Status connectBackwardLayers(size_t layerId)
571  {
572  using namespace services;
573  using namespace data_management;
574  using namespace layers;
575 
576  forward::LayerIfacePtr &forwardLayer = _forwardLayers->get(layerId);
577  backward::LayerIfacePtr &backwardLayer = _backwardLayers->get(layerId);
578 
579  if (!forwardLayer || !backwardLayer) { return services::Status(); }
580 
581  backward::Input *backwardInput = backwardLayer->getLayerInput();
582  forward::ResultPtr forwardResult = forwardLayer->getLayerResult();
583 
584  backwardInput->setInputFromForward(forwardResult);
585  backwardLayer->allocateResult();
586 
587  /* Don't connect backward layer to next backward layers
588  if the layer does not propagate gradient */
589  if (!backwardLayer->getLayerParameter()->propagateGradient) { return services::Status(); }
590 
591  backward::ResultPtr backwardResult = backwardLayer->getLayerResult();
592 
593  const NextLayers &next = _backwardNextLayers->get(layerId);
594  const size_t nextLayersSize = next.size();
595  for(size_t j = 0; j < nextLayersSize; j++)
596  {
597  size_t inputIndex = nextLayersSize - j - 1;
598  _backwardLayers->get(next[j])->addInput(backwardResult, inputIndex, 0 /* index in input object of next[j] backward layer */);
599  }
600  return services::Status();
601  }
602 
603  template<typename modelFPType>
604  DAAL_EXPORT services::Status createWeightsAndBiasesDerivatives();
605 
606 public:
612  algorithms::OptionalArgumentPtr getSolverOptionalArgument(size_t index)
613  {
614  return services::dynamicPointerCast<algorithms::OptionalArgument, data_management::SerializationIface>(_solverOptionalArgumentCollection[index]);
615  }
616 
624  services::Status setSolverOptionalArgument(const algorithms::OptionalArgumentPtr& solverOptionalArgument, size_t index)
625  {
626  _solverOptionalArgumentCollection[index] = solverOptionalArgument;
627  return services::Status();
628  }
629 
634  data_management::DataCollection getSolverOptionalArgumentCollection()
635  {
636  return _solverOptionalArgumentCollection;
637  }
638 
645  services::Status setSolverOptionalArgumentCollection(const data_management::DataCollection &solverOptionalArgumentCollection)
646  {
647  _solverOptionalArgumentCollection = solverOptionalArgumentCollection;
648  return services::Status();
649  }
650 
651 private:
652  data_management::DataCollection _solverOptionalArgumentCollection;
653  services::Collection<size_t> _sampleSize;
654  BackwardLayersPtr _backwardLayers;
655  services::SharedPtr<services::Collection<layers::NextLayers> > _backwardNextLayers;
656  mutable services::ErrorCollection _errors;
658  bool _storeWeightDerivativesInTable;
659  LearnableParametersIfacePtr _weightsAndBiasesDerivatives;
660 };
661 
662 typedef services::SharedPtr<Model> ModelPtr;
665 } // namespace interface1
666 using interface1::Parameter;
667 using interface1::Model;
668 using interface1::ModelPtr;
669 
670 } // namespace training
671 } // namespace neural_networks
672 } // namespace algorithms
673 } // namespace daal
674 #endif
daal::algorithms::neural_networks::training::interface1::Model::setSolverOptionalArgumentCollection
services::Status setSolverOptionalArgumentCollection(const data_management::DataCollection &solverOptionalArgumentCollection)
Definition: neural_networks_training_model.h:645
daal::algorithms::neural_networks::training::interface1::Model
Class representing the model of neural network.
Definition: neural_networks_training_model.h:106
daal::algorithms::neural_networks::training::interface1::Model::setSolverOptionalArgument
services::Status setSolverOptionalArgument(const algorithms::OptionalArgumentPtr &solverOptionalArgument, size_t index)
Definition: neural_networks_training_model.h:624
daal::algorithms::neural_networks::training::interface1::Topology
Class defining a neural network topology - a set of layers and connection between them - on the train...
Definition: neural_networks_training_topology.h:66
daal::algorithms::neural_networks::layers::backward::weightDerivatives
Definition: layer_backward_types.h:111
daal::algorithms::neural_networks::training::interface1::Topology::push_back
size_t push_back(const layers::LayerIfacePtr &layer)
Definition: neural_networks_training_topology.h:95
daal
Definition: algorithm_base_common.h:57
daal::algorithms::neural_networks::training::interface1::Model::initialize
services::Status initialize(const services::Collection< size_t > &sampleSize, const Topology &topology, const Parameter &parameter=Parameter())
Definition: neural_networks_training_model.h:138
daal::algorithms::neural_networks::training::interface1::Model::allocate
services::Status allocate(const services::Collection< size_t > &sampleSize, const Parameter &parameter=Parameter())
Definition: neural_networks_training_model.h:329
daal::services::ErrorMemoryAllocationFailed
Definition: error_indexes.h:170
daal::algorithms::neural_networks::training::interface1::Model::setErrors
DAAL_DEPRECATED services::Status setErrors(services::ErrorCollection &errors)
Definition: neural_networks_training_model.h:309
daal::algorithms::neural_networks::training::interface1::Model::getForwardLayers
const ForwardLayersPtr getForwardLayers() const
Definition: neural_networks_training_model.h:198
daal::algorithms::neural_networks::training::interface1::Parameter::optimizationSolver
services::SharedPtr< optimization_solver::iterative_solver::Batch > optimizationSolver
Definition: neural_networks_training_model.h:98
daal::algorithms::neural_networks::layers::backward::biasDerivatives
Definition: layer_backward_types.h:112
daal_defines.h
daal::algorithms::neural_networks::training::interface1::Model::getBackwardLayers
const BackwardLayersPtr getBackwardLayers() const
Definition: neural_networks_training_model.h:217
daal::algorithms::neural_networks::training::interface1::Model::getBackwardLayer
const layers::backward::LayerIfacePtr getBackwardLayer(size_t index) const
Definition: neural_networks_training_model.h:227
daal::algorithms::neural_networks::training::interface1::Model::getErrors
DAAL_DEPRECATED const services::ErrorCollection & getErrors() const
Definition: neural_networks_training_model.h:319
daal::algorithms::neural_networks::training::interface1::Parameter::engine
engines::EnginePtr engine
Definition: neural_networks_training_model.h:99
daal::algorithms::interface1::Parameter
Base class to represent computation parameters. Algorithm-specific parameters are represented as deri...
Definition: algorithm_types.h:86
daal::algorithms::neural_networks::training::interface1::Model::getSolverOptionalArgument
algorithms::OptionalArgumentPtr getSolverOptionalArgument(size_t index)
Definition: neural_networks_training_model.h:612
daal::algorithms::neural_networks::training::interface1::Parameter
Class representing the parameters of neural network.
Definition: neural_networks_training_model.h:85
daal::services::daal_malloc
DAAL_EXPORT void * daal_malloc(size_t size, size_t alignment=DAAL_MALLOC_DEFAULT_ALIGNMENT)
daal::algorithms::association_rules::data
Definition: apriori_types.h:107
daal::algorithms::neural_networks::training::interface1::Parameter::Parameter
Parameter(const services::SharedPtr< optimization_solver::iterative_solver::Batch > &optimizationSolver_=services::SharedPtr< optimization_solver::iterative_solver::Batch >(), engines::EnginePtr engine_=engines::mt19937::Batch< DAAL_ALGORITHM_FP_TYPE >::create())
Definition: neural_networks_training_model.h:93
daal::algorithms::neural_networks::training::interface1::Model::getForwardLayer
const layers::forward::LayerIfacePtr getForwardLayer(size_t index) const
Definition: neural_networks_training_model.h:208
daal::algorithms::neural_networks::training::interface1::Model::~Model
virtual ~Model()
Destructor.
Definition: neural_networks_training_model.h:127
daal::services::daal_free
DAAL_EXPORT void daal_free(void *ptr)
daal::algorithms::neural_networks::training::interface1::Model::getSolverOptionalArgumentCollection
data_management::DataCollection getSolverOptionalArgumentCollection()
Definition: neural_networks_training_model.h:634
daal::algorithms::neural_networks::training::interface1::Model::Model
Model(const Model &model)
Copy constructor.
Definition: neural_networks_training_model.h:120
daal::algorithms::neural_networks::training::interface1::Model::getPredictionModel
const prediction::ModelPtr getPredictionModel()
Definition: neural_networks_training_model.h:237
daal::algorithms::neural_networks::training::interface1::Topology::size
size_t size() const
Definition: neural_networks_training_topology.h:88
daal::algorithms::neural_networks::training::model
Definition: neural_networks_training_result.h:78
daal::algorithms::neural_networks::training::interface1::Model::getWeightsAndBiasesStorageStatus
bool getWeightsAndBiasesStorageStatus() const
Definition: neural_networks_training_model.h:268
daal::algorithms::interface1::Model
The base class for the classes that represent the models, such as linear_regression::Model or svm::Mo...
Definition: model.h:78

For more complete information about compiler optimizations, see our Optimization Notice.