package com.intel.daal.examples.quality_metrics;
import java.nio.DoubleBuffer;
import com.intel.daal.algorithms.classifier.prediction.ModelInputId;
import com.intel.daal.algorithms.classifier.prediction.NumericTableInputId;
import com.intel.daal.algorithms.classifier.prediction.PredictionResult;
import com.intel.daal.algorithms.classifier.prediction.PredictionResultId;
import com.intel.daal.algorithms.classifier.quality_metric.binary_confusion_matrix.*;
import com.intel.daal.algorithms.classifier.training.InputId;
import com.intel.daal.algorithms.classifier.training.TrainingResultId;
import com.intel.daal.algorithms.svm.Model;
import com.intel.daal.algorithms.svm.prediction.PredictionBatch;
import com.intel.daal.algorithms.svm.prediction.PredictionMethod;
import com.intel.daal.algorithms.svm.quality_metric_set.*;
import com.intel.daal.algorithms.svm.training.*;
import com.intel.daal.data_management.data.NumericTable;
import com.intel.daal.data_management.data.HomogenNumericTable;
import com.intel.daal.data_management.data.MergedNumericTable;
import com.intel.daal.data_management.data_source.DataSource;
import com.intel.daal.data_management.data_source.FileDataSource;
import com.intel.daal.examples.utils.Service;
import com.intel.daal.services.DaalContext;
class SVMTwoClassQualityMetricSetBatchExample {
private static final String trainDatasetFileName = "../data/batch/svm_two_class_train_dense.csv";
private static final String testDatasetFileName = "../data/batch/svm_two_class_test_dense.csv";
private static final int nFeatures = 20;
private static TrainingResult trainingResult;
private static PredictionResult predictionResult;
private static ResultCollection qualityMetricSetResult;
private static NumericTable groundTruthLabels;
private static NumericTable predictedLabels;
private static DaalContext context = new DaalContext();
public static void main(String[] args) throws java.io.FileNotFoundException, java.io.IOException {
trainModel();
testModel();
testModelQuality();
printResults();
context.dispose();
}
private static void trainModel() {
FileDataSource trainDataSource = new FileDataSource(context, trainDatasetFileName,
DataSource.DictionaryCreationFlag.DoDictionaryFromContext,
DataSource.NumericTableAllocationFlag.NotAllocateNumericTable);
NumericTable trainData = new HomogenNumericTable(context, Double.class, nFeatures, 0, NumericTable.AllocationFlag.NotAllocate);
NumericTable trainGroundTruth = new HomogenNumericTable(context, Double.class, 1, 0, NumericTable.AllocationFlag.NotAllocate);
MergedNumericTable mergedData = new MergedNumericTable(context);
mergedData.addNumericTable(trainData);
mergedData.addNumericTable(trainGroundTruth);
trainDataSource.loadDataBlock(mergedData);
TrainingBatch algorithm = new TrainingBatch(context, Double.class, TrainingMethod.boser);
algorithm.parameter.setCacheSize(40000000);
algorithm.parameter
.setKernel(new com.intel.daal.algorithms.kernel_function.linear.Batch(context, Double.class));
algorithm.input.set(InputId.data, trainData);
algorithm.input.set(InputId.labels, trainGroundTruth);
trainingResult = algorithm.compute();
}
private static void testModel() {
FileDataSource testDataSource = new FileDataSource(context, testDatasetFileName,
DataSource.DictionaryCreationFlag.DoDictionaryFromContext,
DataSource.NumericTableAllocationFlag.NotAllocateNumericTable);
NumericTable testData = new HomogenNumericTable(context, Double.class, nFeatures, 0, NumericTable.AllocationFlag.NotAllocate);
groundTruthLabels = new HomogenNumericTable(context, Double.class, 1, 0, NumericTable.AllocationFlag.NotAllocate);
MergedNumericTable mergedData = new MergedNumericTable(context);
mergedData.addNumericTable(testData);
mergedData.addNumericTable(groundTruthLabels);
testDataSource.loadDataBlock(mergedData);
PredictionBatch algorithm = new PredictionBatch(context, Double.class, PredictionMethod.defaultDense);
algorithm.parameter
.setKernel(new com.intel.daal.algorithms.kernel_function.linear.Batch(context, Double.class));
Model model = trainingResult.get(TrainingResultId.model);
algorithm.input.set(NumericTableInputId.data, testData);
algorithm.input.set(ModelInputId.model, model);
predictionResult = algorithm.compute();
}
private static void testModelQuality() {
predictedLabels = predictionResult.get(PredictionResultId.prediction);
QualityMetricSetBatch quality_metric_set = new QualityMetricSetBatch(context);
BinaryConfusionMatrixInput input = quality_metric_set.getInputDataCollection()
.getInput(QualityMetricId.confusionMatrix);
input.set(BinaryConfusionMatrixInputId.predictedLabels, predictedLabels);
input.set(BinaryConfusionMatrixInputId.groundTruthLabels, groundTruthLabels);
qualityMetricSetResult = quality_metric_set.compute();
}
private static void printResults() {
Service.printClassificationResult(groundTruthLabels, predictedLabels, "Ground truth", "Classification results",
"SVM classification results (first 20 observations):", 20);
BinaryConfusionMatrixResult qualityMetricResult = qualityMetricSetResult
.getResult(QualityMetricId.confusionMatrix);
NumericTable confusionMatrix = qualityMetricResult.get(BinaryConfusionMatrixResultId.confusionMatrix);
NumericTable binaryMetrics = qualityMetricResult.get(BinaryConfusionMatrixResultId.binaryMetrics);
Service.printNumericTable("Confusion matrix:", confusionMatrix);
DoubleBuffer qualityMetricsData = DoubleBuffer
.allocate((int) (binaryMetrics.getNumberOfColumns() * binaryMetrics.getNumberOfRows()));
qualityMetricsData = binaryMetrics.getBlockOfRows(0, binaryMetrics.getNumberOfRows(), qualityMetricsData);
System.out.println("Accuracy: " + qualityMetricsData.get(BinaryMetricId.accuracy.getValue()));
System.out.println("Precision: " + qualityMetricsData.get(BinaryMetricId.precision.getValue()));
System.out.println("Recall: " + qualityMetricsData.get(BinaryMetricId.recall.getValue()));
System.out.println("F-score: " + qualityMetricsData.get(BinaryMetricId.fscore.getValue()));
System.out.println("Specificity: " + qualityMetricsData.get(BinaryMetricId.specificity.getValue()));
System.out.println("AUC: " + qualityMetricsData.get(BinaryMetricId.AUC.getValue()));
}
}