package com.intel.daal.examples.neural_networks;
import com.intel.daal.algorithms.neural_networks.layers.relu.*;
import com.intel.daal.algorithms.neural_networks.layers.ForwardResultId;
import com.intel.daal.algorithms.neural_networks.layers.ForwardResultLayerDataId;
import com.intel.daal.algorithms.neural_networks.layers.ForwardInputId;
import com.intel.daal.algorithms.neural_networks.layers.BackwardResultId;
import com.intel.daal.algorithms.neural_networks.layers.BackwardInputId;
import com.intel.daal.algorithms.neural_networks.layers.BackwardInputLayerDataId;
import com.intel.daal.data_management.data.Tensor;
import com.intel.daal.data_management.data.HomogenTensor;
import com.intel.daal.examples.utils.Service;
import com.intel.daal.services.DaalContext;
class ReLULayerDenseBatch {
private static final String datasetFileName = "../data/batch/layer.csv";
private static DaalContext context = new DaalContext();
public static void main(String[] args) throws java.io.FileNotFoundException, java.io.IOException {
Tensor tensorData = Service.readTensorFromCSV(context, datasetFileName);
ReluForwardBatch reluLayerForward = new ReluForwardBatch(context, Float.class, ReluMethod.defaultDense);
reluLayerForward.input.set(ForwardInputId.data, tensorData);
ReluForwardResult forwardResult = reluLayerForward.compute();
Service.printTensor("Forward relu layer result (first 5 rows):", forwardResult.get(ForwardResultId.value), 5, 0);
int nSize = (int)forwardResult.get(ForwardResultId.value).getSize();
long[] dims = forwardResult.get(ForwardResultId.value).getDimensions();
double[] data = new double[nSize];
Tensor tensorDataBack = new HomogenTensor(context, dims, data, 0.01);
ReluBackwardBatch reluLayerBackward = new ReluBackwardBatch(context, Float.class, ReluMethod.defaultDense);
reluLayerBackward.input.set(BackwardInputId.inputGradient, tensorDataBack);
reluLayerBackward.input.set(BackwardInputLayerDataId.inputFromForward, forwardResult.get(ForwardResultLayerDataId.resultForBackward));
ReluBackwardResult backwardResult = reluLayerBackward.compute();
Service.printTensor("Backward relu layer result (first 5 rows):", backwardResult.get(BackwardResultId.gradient), 5, 0);
context.dispose();
}
}