This example is identical to Example 4 except that it also shows how to apply case weights to the training data. Compare the fitted values between the two examples (the predicted values do not change).
import com.imsl.datamining.supportvectormachine.*; public class SupportVectorMachineEx5 { public static void main(String[] args) throws Exception { SVRegression.VariableType[] ex4DataType = { SVRegression.VariableType.CATEGORICAL, SVRegression.VariableType.QUANTITATIVE_CONTINUOUS, SVRegression.VariableType.QUANTITATIVE_CONTINUOUS}; String dashes = "--------------------------------------------------------------"; double C = 50., nu = .01; double[][] xyTrain = { {1, 0.19, 0.61}, {1, 0.156, 0.564}, {1, 0.224, 0.528}, {1, 0.178, 0.51}, {1, 0.234, 0.578}, {2, 0.394, 0.296}, {2, 0.478, 0.254}, {2, 0.454, 0.294}, {2, 0.48, 0.358}, {2, 0.398, 0.336} }; double[][] xyTest = { {1, 0.316, 0.556}, {1, 0.278, 0.622}, {2, 0.562, 0.336}, {2, 0.522, 0.412} }; double[] trainingWts = {10, 1, 1, 1, 1, 1, 1, 1, 1, 1}; /* Construct a Support Vector Machine. */ SVRegression svm1 = new SVRegression(xyTrain, 0, ex4DataType); svm1.setNuFormulation(true); svm1.setNuParameter(nu); svm1.setRegularizationParameter(C); svm1.setWeights(trainingWts); svm1.fitModel(); double[] fittedValues = svm1.predict(); System.out.println("\n" + dashes); System.out.println(" NU SVR: Training data predicted (fitted) values"); System.out.println(" Actual Fitted value | Difference"); for (int i = 0; i < fittedValues.length; i++) { System.out.printf(" %2.1f %5.4f %5.4f\n", xyTrain[i][0], fittedValues[i], (fittedValues[i] - xyTrain[i][0])); } System.out.println("\n" + dashes); double[] testPredictedValues = svm1.predict(xyTest); System.out.println("\n NU SVR: Test data predictions"); System.out.println(" Actual Prediction | Difference"); for (int i = 0; i < testPredictedValues.length; i++) { System.out.printf(" %2.1f %5.4f %5.4f\n", xyTest[i][0], testPredictedValues[i], (testPredictedValues[i] - xyTest[i][0])); } /* Now use the categorical version and compare results. */ SVClassification svm2 = new SVClassification(xyTrain, 0, ex4DataType); svm2.setNuFormulation(true); svm2.setNuParameter(nu); svm2.setRegularizationParameter(C); svm2.setWeights(trainingWts); svm2.fitModel(); fittedValues = svm2.predict(); System.out.println("\n" + dashes); System.out.println(" NU SVC: Training data predicted (fitted) values"); System.out.println(" Actual Fitted value | Difference"); for (int i = 0; i < fittedValues.length; i++) { System.out.printf(" %2.1f %5.4f %5.4f\n", xyTrain[i][0], fittedValues[i], (fittedValues[i] - xyTrain[i][0])); } System.out.println("\n" + dashes); testPredictedValues = svm2.predict(xyTest); System.out.println("\n NU SVC: Test data predictions"); System.out.println(" Actual Prediction | Difference"); for (int i = 0; i < testPredictedValues.length; i++) { System.out.printf(" %2.1f %5.4f %5.4f\n", xyTest[i][0], testPredictedValues[i], (testPredictedValues[i] - xyTest[i][0])); } } }
-------------------------------------------------------------- NU SVR: Training data predicted (fitted) values Actual Fitted value | Difference 1.0 1.2458 0.2458 1.0 1.2586 0.2586 1.0 1.3432 0.3432 1.0 1.3226 0.3226 1.0 1.3075 0.3075 2.0 1.6886 -0.3114 2.0 1.7892 -0.2108 2.0 1.7373 -0.2627 2.0 1.7020 -0.2980 2.0 1.6568 -0.3432 -------------------------------------------------------------- NU SVR: Test data predictions Actual Prediction | Difference 1.0 1.3928 0.3928 1.0 1.3048 0.3048 2.0 1.7822 -0.2178 2.0 1.6868 -0.3132 -------------------------------------------------------------- NU SVC: Training data predicted (fitted) values Actual Fitted value | Difference 1.0 1.0000 0.0000 1.0 1.0000 0.0000 1.0 1.0000 0.0000 1.0 1.0000 0.0000 1.0 1.0000 0.0000 2.0 2.0000 0.0000 2.0 2.0000 0.0000 2.0 2.0000 0.0000 2.0 2.0000 0.0000 2.0 2.0000 0.0000 -------------------------------------------------------------- NU SVC: Test data predictions Actual Prediction | Difference 1.0 1.0000 0.0000 1.0 1.0000 0.0000 2.0 2.0000 0.0000 2.0 2.0000 0.0000Link to Java source.