C++ API Reference for Intel® Data Analytics Acceleration Library 2019 Update 4

classifier_predict.h
1 /* file: classifier_predict.h */
2 /*******************************************************************************
3 * Copyright 2014-2019 Intel Corporation.
4 *
5 * This software and the related documents are Intel copyrighted materials, and
6 * your use of them is governed by the express license under which they were
7 * provided to you (License). Unless the License provides otherwise, you may not
8 * use, modify, copy, publish, distribute, disclose or transmit this software or
9 * the related documents without Intel's prior written permission.
10 *
11 * This software and the related documents are provided as is, with no express
12 * or implied warranties, other than those that are expressly stated in the
13 * License.
14 *******************************************************************************/
15 
16 /*
17 //++
18 // Implementation of the prediction stage of the classification algorithm interface.
19 //--
20 */
21 
22 #ifndef __CLASSIFIER_PREDICT_H__
23 #define __CLASSIFIER_PREDICT_H__
24 
25 #include "algorithms/algorithm.h"
26 #include "algorithms/classifier/classifier_predict_types.h"
27 
28 namespace daal
29 {
30 namespace algorithms
31 {
32 namespace classifier
33 {
34 namespace prediction
35 {
36 
37 namespace interface1
38 {
59 class Batch : public daal::algorithms::Prediction
60 {
61 public:
62  typedef algorithms::classifier::prediction::Input InputType;
63  typedef algorithms::classifier::Parameter ParameterType;
64  typedef algorithms::classifier::prediction::Result ResultType;
65 
66  Batch()
67  {
68  initialize();
69  }
70 
77  Batch(const Batch &other)
78  {
79  initialize();
80  }
81 
82  virtual ~Batch() {}
83 
88  virtual InputType * getInput() = 0;
89 
94  ResultPtr getResult()
95  {
96  return _result;
97  }
98 
105  services::Status setResult(const ResultPtr &result)
106  {
107  DAAL_CHECK(result, services::ErrorNullResult)
108  _result = result;
109  _res = _result.get();
110  return services::Status();
111  }
112 
118  services::SharedPtr<Batch> clone() const
119  {
120  return services::SharedPtr<Batch>(cloneImpl());
121  }
122 
123 protected:
124 
125  void initialize()
126  {
127  _result.reset(new ResultType());
128  }
129  virtual Batch * cloneImpl() const DAAL_C11_OVERRIDE = 0;
130  ResultPtr _result;
131 };
133 } // namespace interface1
134 using interface1::Batch;
135 
136 }
137 }
138 }
139 }
140 #endif
daal::algorithms::classifier::prediction::interface1::Batch::Batch
Batch(const Batch &other)
Definition: classifier_predict.h:77
daal
Definition: algorithm_base_common.h:31
daal::algorithms::classifier::prediction::interface1::Batch::getInput
virtual InputType * getInput()=0
daal::algorithms::Prediction
Provides prediction methods depending on the model such as linear_regression::Model. The methods of the class support different computation modes: batch, distributed, and online(see ComputeMode). Classes that implement specific algorithms of the model based data prediction are derived classes of the Prediction class. The class additionally provides virtual methods for validation of input and output parameters of the algorithms.
Definition: prediction.h:50
daal::algorithms::classifier::prediction::interface1::Batch
Base class for making predictions based on the model of the classification algorithms.
Definition: classifier_predict.h:59
daal::algorithms::classifier::prediction::prediction
Definition: classifier_predict_types.h:76
daal::algorithms::classifier::prediction::interface1::Batch::clone
services::SharedPtr< Batch > clone() const
Definition: classifier_predict.h:118
daal::algorithms::classifier::prediction::interface1::Batch::setResult
services::Status setResult(const ResultPtr &result)
Definition: classifier_predict.h:105
daal::services::ErrorNullResult
Definition: error_indexes.h:96
daal::algorithms::classifier::prediction::interface1::Batch::getResult
ResultPtr getResult()
Definition: classifier_predict.h:94

For more complete information about compiler optimizations, see our Optimization Notice.