#include "daal.h"
#include "service.h"
using namespace std;
using namespace daal;
using namespace daal::algorithms;
using namespace daal::algorithms::neural_networks::layers;
using namespace daal::data_management;
using namespace daal::services;
string datasetName = "../data/batch/layer.csv";
int main()
{
size_t m = 5;
TensorPtr tensorData = readTensorFromCSV(datasetName);
fullyconnected::forward::Batch<> fullyconnectedLayerForward(m);
fullyconnectedLayerForward.input.set(forward::data, tensorData);
fullyconnectedLayerForward.compute();
fullyconnected::forward::ResultPtr forwardResult = fullyconnectedLayerForward.getResult();
printTensor(forwardResult->get(forward::value), "Forward fully-connected layer result (first 5 rows):", 5);
printTensor(forwardResult->get(fullyconnected::auxWeights), "Forward fully-connected layer weights (first 5 rows):", 5);
const Collection<size_t> &gDims = forwardResult->get(forward::value)->getDimensions();
TensorPtr tensorDataBack = TensorPtr(new HomogenTensor<>(gDims, Tensor::doAllocate, 0.01f));
fullyconnected::backward::Batch<> fullyconnectedLayerBackward(m);
fullyconnectedLayerBackward.input.set(backward::inputGradient, tensorDataBack);
fullyconnectedLayerBackward.input.set(backward::inputFromForward, forwardResult->get(forward::resultForBackward));
fullyconnectedLayerBackward.compute();
backward::ResultPtr backwardResult = fullyconnectedLayerBackward.getResult();
printTensor(backwardResult->get(backward::gradient),
"Backward fully-connected layer gradient result (first 5 rows):", 5);
printTensor(backwardResult->get(backward::weightDerivatives),
"Backward fully-connected layer weightDerivative result (first 5 rows):", 5);
printTensor(backwardResult->get(backward::biasDerivatives),
"Backward fully-connected layer biasDerivative result (first 5 rows):", 5);
return 0;
}