diff --git a/examples/estimator/classifier/SVC/c/basics.ipynb b/examples/estimator/classifier/SVC/c/basics.ipynb index 365c92d2..a824f11e 100644 --- a/examples/estimator/classifier/SVC/c/basics.ipynb +++ b/examples/estimator/classifier/SVC/c/basics.ipynb @@ -29,7 +29,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "(150, 4) (150,)\n" + "((150, 4), (150,))\n" ] } ], @@ -37,9 +37,10 @@ "from sklearn.datasets import load_iris\n", "\n", "iris_data = load_iris()\n", - "X, y = iris_data.data, iris_data.target\n", + "X = iris_data.data\n", + "y = iris_data.target\n", "\n", - "print X.shape, y.shape" + "print(X.shape, y.shape)" ] }, { @@ -58,7 +59,7 @@ "data": { "text/plain": [ "SVC(C=1.0, cache_size=200, class_weight=None, coef0=0.0,\n", - " decision_function_shape=None, degree=3, gamma=0.001, kernel='rbf',\n", + " decision_function_shape='ovr', degree=3, gamma=0.001, kernel='rbf',\n", " max_iter=-1, probability=False, random_state=0, shrinking=True,\n", " tol=0.001, verbose=False)" ] @@ -97,93 +98,169 @@ "#include \n", "#include \n", "\n", - "int predict(float atts[]) {\n", + "#define N_FEATURES 4\n", + "#define N_CLASSES 3\n", + "#define N_VECTORS 150\n", + "#define N_ROWS 3\n", + "#define N_COEFFICIENTS 2\n", + "#define N_INTERCEPTS 3\n", + "#define KERNEL_TYPE 'r'\n", + "#define KERNEL_GAMMA 0.001\n", + "#define KERNEL_COEF 0.0\n", + "#define KERNEL_DEGREE 3\n", + "\n", + "double vectors[150][4] = {{5.0999999999999996, 3.5, 1.3999999999999999, 0.20000000000000001}, {4.9000000000000004, 3.0, 1.3999999999999999, 0.20000000000000001}, {4.7000000000000002, 3.2000000000000002, 1.3, 0.20000000000000001}, {4.5999999999999996, 3.1000000000000001, 1.5, 0.20000000000000001}, {5.0, 3.6000000000000001, 1.3999999999999999, 0.20000000000000001}, {5.4000000000000004, 3.8999999999999999, 1.7, 0.40000000000000002}, {4.5999999999999996, 3.3999999999999999, 1.3999999999999999, 0.29999999999999999}, {5.0, 3.3999999999999999, 1.5, 0.20000000000000001}, {4.4000000000000004, 2.8999999999999999, 1.3999999999999999, 0.20000000000000001}, {4.9000000000000004, 3.1000000000000001, 1.5, 0.10000000000000001}, {5.4000000000000004, 3.7000000000000002, 1.5, 0.20000000000000001}, {4.7999999999999998, 3.3999999999999999, 1.6000000000000001, 0.20000000000000001}, {4.7999999999999998, 3.0, 1.3999999999999999, 0.10000000000000001}, {4.2999999999999998, 3.0, 1.1000000000000001, 0.10000000000000001}, {5.7999999999999998, 4.0, 1.2, 0.20000000000000001}, {5.7000000000000002, 4.4000000000000004, 1.5, 0.40000000000000002}, {5.4000000000000004, 3.8999999999999999, 1.3, 0.40000000000000002}, {5.0999999999999996, 3.5, 1.3999999999999999, 0.29999999999999999}, {5.7000000000000002, 3.7999999999999998, 1.7, 0.29999999999999999}, {5.0999999999999996, 3.7999999999999998, 1.5, 0.29999999999999999}, {5.4000000000000004, 3.3999999999999999, 1.7, 0.20000000000000001}, {5.0999999999999996, 3.7000000000000002, 1.5, 0.40000000000000002}, {4.5999999999999996, 3.6000000000000001, 1.0, 0.20000000000000001}, {5.0999999999999996, 3.2999999999999998, 1.7, 0.5}, {4.7999999999999998, 3.3999999999999999, 1.8999999999999999, 0.20000000000000001}, {5.0, 3.0, 1.6000000000000001, 0.20000000000000001}, {5.0, 3.3999999999999999, 1.6000000000000001, 0.40000000000000002}, {5.2000000000000002, 3.5, 1.5, 0.20000000000000001}, {5.2000000000000002, 3.3999999999999999, 1.3999999999999999, 0.20000000000000001}, {4.7000000000000002, 3.2000000000000002, 1.6000000000000001, 0.20000000000000001}, {4.7999999999999998, 3.1000000000000001, 1.6000000000000001, 0.20000000000000001}, {5.4000000000000004, 3.3999999999999999, 1.5, 0.40000000000000002}, {5.2000000000000002, 4.0999999999999996, 1.5, 0.10000000000000001}, {5.5, 4.2000000000000002, 1.3999999999999999, 0.20000000000000001}, {4.9000000000000004, 3.1000000000000001, 1.5, 0.10000000000000001}, {5.0, 3.2000000000000002, 1.2, 0.20000000000000001}, {5.5, 3.5, 1.3, 0.20000000000000001}, {4.9000000000000004, 3.1000000000000001, 1.5, 0.10000000000000001}, {4.4000000000000004, 3.0, 1.3, 0.20000000000000001}, {5.0999999999999996, 3.3999999999999999, 1.5, 0.20000000000000001}, {5.0, 3.5, 1.3, 0.29999999999999999}, {4.5, 2.2999999999999998, 1.3, 0.29999999999999999}, {4.4000000000000004, 3.2000000000000002, 1.3, 0.20000000000000001}, {5.0, 3.5, 1.6000000000000001, 0.59999999999999998}, {5.0999999999999996, 3.7999999999999998, 1.8999999999999999, 0.40000000000000002}, {4.7999999999999998, 3.0, 1.3999999999999999, 0.29999999999999999}, {5.0999999999999996, 3.7999999999999998, 1.6000000000000001, 0.20000000000000001}, {4.5999999999999996, 3.2000000000000002, 1.3999999999999999, 0.20000000000000001}, {5.2999999999999998, 3.7000000000000002, 1.5, 0.20000000000000001}, {5.0, 3.2999999999999998, 1.3999999999999999, 0.20000000000000001}, {7.0, 3.2000000000000002, 4.7000000000000002, 1.3999999999999999}, {6.4000000000000004, 3.2000000000000002, 4.5, 1.5}, {6.9000000000000004, 3.1000000000000001, 4.9000000000000004, 1.5}, {5.5, 2.2999999999999998, 4.0, 1.3}, {6.5, 2.7999999999999998, 4.5999999999999996, 1.5}, {5.7000000000000002, 2.7999999999999998, 4.5, 1.3}, {6.2999999999999998, 3.2999999999999998, 4.7000000000000002, 1.6000000000000001}, {4.9000000000000004, 2.3999999999999999, 3.2999999999999998, 1.0}, {6.5999999999999996, 2.8999999999999999, 4.5999999999999996, 1.3}, {5.2000000000000002, 2.7000000000000002, 3.8999999999999999, 1.3999999999999999}, {5.0, 2.0, 3.5, 1.0}, {5.9000000000000004, 3.0, 4.2000000000000002, 1.5}, {6.0, 2.2000000000000002, 4.0, 1.0}, {6.0999999999999996, 2.8999999999999999, 4.7000000000000002, 1.3999999999999999}, {5.5999999999999996, 2.8999999999999999, 3.6000000000000001, 1.3}, {6.7000000000000002, 3.1000000000000001, 4.4000000000000004, 1.3999999999999999}, {5.5999999999999996, 3.0, 4.5, 1.5}, {5.7999999999999998, 2.7000000000000002, 4.0999999999999996, 1.0}, {6.2000000000000002, 2.2000000000000002, 4.5, 1.5}, {5.5999999999999996, 2.5, 3.8999999999999999, 1.1000000000000001}, {5.9000000000000004, 3.2000000000000002, 4.7999999999999998, 1.8}, {6.0999999999999996, 2.7999999999999998, 4.0, 1.3}, {6.2999999999999998, 2.5, 4.9000000000000004, 1.5}, {6.0999999999999996, 2.7999999999999998, 4.7000000000000002, 1.2}, {6.4000000000000004, 2.8999999999999999, 4.2999999999999998, 1.3}, {6.5999999999999996, 3.0, 4.4000000000000004, 1.3999999999999999}, {6.7999999999999998, 2.7999999999999998, 4.7999999999999998, 1.3999999999999999}, {6.7000000000000002, 3.0, 5.0, 1.7}, {6.0, 2.8999999999999999, 4.5, 1.5}, {5.7000000000000002, 2.6000000000000001, 3.5, 1.0}, {5.5, 2.3999999999999999, 3.7999999999999998, 1.1000000000000001}, {5.5, 2.3999999999999999, 3.7000000000000002, 1.0}, {5.7999999999999998, 2.7000000000000002, 3.8999999999999999, 1.2}, {6.0, 2.7000000000000002, 5.0999999999999996, 1.6000000000000001}, {5.4000000000000004, 3.0, 4.5, 1.5}, {6.0, 3.3999999999999999, 4.5, 1.6000000000000001}, {6.7000000000000002, 3.1000000000000001, 4.7000000000000002, 1.5}, {6.2999999999999998, 2.2999999999999998, 4.4000000000000004, 1.3}, {5.5999999999999996, 3.0, 4.0999999999999996, 1.3}, {5.5, 2.5, 4.0, 1.3}, {5.5, 2.6000000000000001, 4.4000000000000004, 1.2}, {6.0999999999999996, 3.0, 4.5999999999999996, 1.3999999999999999}, {5.7999999999999998, 2.6000000000000001, 4.0, 1.2}, {5.0, 2.2999999999999998, 3.2999999999999998, 1.0}, {5.5999999999999996, 2.7000000000000002, 4.2000000000000002, 1.3}, {5.7000000000000002, 3.0, 4.2000000000000002, 1.2}, {5.7000000000000002, 2.8999999999999999, 4.2000000000000002, 1.3}, {6.2000000000000002, 2.8999999999999999, 4.2999999999999998, 1.3}, {5.0999999999999996, 2.5, 3.0, 1.1000000000000001}, {5.7000000000000002, 2.7999999999999998, 4.0999999999999996, 1.3}, {6.2999999999999998, 3.2999999999999998, 6.0, 2.5}, {5.7999999999999998, 2.7000000000000002, 5.0999999999999996, 1.8999999999999999}, {7.0999999999999996, 3.0, 5.9000000000000004, 2.1000000000000001}, {6.2999999999999998, 2.8999999999999999, 5.5999999999999996, 1.8}, {6.5, 3.0, 5.7999999999999998, 2.2000000000000002}, {7.5999999999999996, 3.0, 6.5999999999999996, 2.1000000000000001}, {4.9000000000000004, 2.5, 4.5, 1.7}, {7.2999999999999998, 2.8999999999999999, 6.2999999999999998, 1.8}, {6.7000000000000002, 2.5, 5.7999999999999998, 1.8}, {7.2000000000000002, 3.6000000000000001, 6.0999999999999996, 2.5}, {6.5, 3.2000000000000002, 5.0999999999999996, 2.0}, {6.4000000000000004, 2.7000000000000002, 5.2999999999999998, 1.8999999999999999}, {6.7999999999999998, 3.0, 5.5, 2.1000000000000001}, {5.7000000000000002, 2.5, 5.0, 2.0}, {5.7999999999999998, 2.7999999999999998, 5.0999999999999996, 2.3999999999999999}, {6.4000000000000004, 3.2000000000000002, 5.2999999999999998, 2.2999999999999998}, {6.5, 3.0, 5.5, 1.8}, {7.7000000000000002, 3.7999999999999998, 6.7000000000000002, 2.2000000000000002}, {7.7000000000000002, 2.6000000000000001, 6.9000000000000004, 2.2999999999999998}, {6.0, 2.2000000000000002, 5.0, 1.5}, {6.9000000000000004, 3.2000000000000002, 5.7000000000000002, 2.2999999999999998}, {5.5999999999999996, 2.7999999999999998, 4.9000000000000004, 2.0}, {7.7000000000000002, 2.7999999999999998, 6.7000000000000002, 2.0}, {6.2999999999999998, 2.7000000000000002, 4.9000000000000004, 1.8}, {6.7000000000000002, 3.2999999999999998, 5.7000000000000002, 2.1000000000000001}, {7.2000000000000002, 3.2000000000000002, 6.0, 1.8}, {6.2000000000000002, 2.7999999999999998, 4.7999999999999998, 1.8}, {6.0999999999999996, 3.0, 4.9000000000000004, 1.8}, {6.4000000000000004, 2.7999999999999998, 5.5999999999999996, 2.1000000000000001}, {7.2000000000000002, 3.0, 5.7999999999999998, 1.6000000000000001}, {7.4000000000000004, 2.7999999999999998, 6.0999999999999996, 1.8999999999999999}, {7.9000000000000004, 3.7999999999999998, 6.4000000000000004, 2.0}, {6.4000000000000004, 2.7999999999999998, 5.5999999999999996, 2.2000000000000002}, {6.2999999999999998, 2.7999999999999998, 5.0999999999999996, 1.5}, {6.0999999999999996, 2.6000000000000001, 5.5999999999999996, 1.3999999999999999}, {7.7000000000000002, 3.0, 6.0999999999999996, 2.2999999999999998}, {6.2999999999999998, 3.3999999999999999, 5.5999999999999996, 2.3999999999999999}, {6.4000000000000004, 3.1000000000000001, 5.5, 1.8}, {6.0, 3.0, 4.7999999999999998, 1.8}, {6.9000000000000004, 3.1000000000000001, 5.4000000000000004, 2.1000000000000001}, {6.7000000000000002, 3.1000000000000001, 5.5999999999999996, 2.3999999999999999}, {6.9000000000000004, 3.1000000000000001, 5.0999999999999996, 2.2999999999999998}, {5.7999999999999998, 2.7000000000000002, 5.0999999999999996, 1.8999999999999999}, {6.7999999999999998, 3.2000000000000002, 5.9000000000000004, 2.2999999999999998}, {6.7000000000000002, 3.2999999999999998, 5.7000000000000002, 2.5}, {6.7000000000000002, 3.0, 5.2000000000000002, 2.2999999999999998}, {6.2999999999999998, 2.5, 5.0, 1.8999999999999999}, {6.5, 3.0, 5.2000000000000002, 2.0}, {6.2000000000000002, 3.3999999999999999, 5.4000000000000004, 2.2999999999999998}, {5.9000000000000004, 3.0, 5.0999999999999996, 1.8}};\n", + "double coefficients[2][150] = {{1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -0.0, -1.0, -0.0, -1.0, -0.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -0.0, -0.0, -1.0, -1.0, -1.0, -0.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -0.0, -0.0, -1.0, -1.0, -1.0, -0.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0}, {1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0}};\n", + "double intercepts[3] = {0.043376922607421875, 0.11445245146751404, -0.0031709671020507812};\n", + "int weights[3] = {50, 50, 50};\n", + "\n", + "int predict (double features[]) {\n", " int i, j, k, d, l;\n", - " \n", - " int n_svs[] = {50, 50, 50};\n", - " double svs[150][4] = {{5.0999999999999996, 3.5, 1.3999999999999999, 0.20000000000000001}, {4.9000000000000004, 3.0, 1.3999999999999999, 0.20000000000000001}, {4.7000000000000002, 3.2000000000000002, 1.3, 0.20000000000000001}, {4.5999999999999996, 3.1000000000000001, 1.5, 0.20000000000000001}, {5.0, 3.6000000000000001, 1.3999999999999999, 0.20000000000000001}, {5.4000000000000004, 3.8999999999999999, 1.7, 0.40000000000000002}, {4.5999999999999996, 3.3999999999999999, 1.3999999999999999, 0.29999999999999999}, {5.0, 3.3999999999999999, 1.5, 0.20000000000000001}, {4.4000000000000004, 2.8999999999999999, 1.3999999999999999, 0.20000000000000001}, {4.9000000000000004, 3.1000000000000001, 1.5, 0.10000000000000001}, {5.4000000000000004, 3.7000000000000002, 1.5, 0.20000000000000001}, {4.7999999999999998, 3.3999999999999999, 1.6000000000000001, 0.20000000000000001}, {4.7999999999999998, 3.0, 1.3999999999999999, 0.10000000000000001}, {4.2999999999999998, 3.0, 1.1000000000000001, 0.10000000000000001}, {5.7999999999999998, 4.0, 1.2, 0.20000000000000001}, {5.7000000000000002, 4.4000000000000004, 1.5, 0.40000000000000002}, {5.4000000000000004, 3.8999999999999999, 1.3, 0.40000000000000002}, {5.0999999999999996, 3.5, 1.3999999999999999, 0.29999999999999999}, {5.7000000000000002, 3.7999999999999998, 1.7, 0.29999999999999999}, {5.0999999999999996, 3.7999999999999998, 1.5, 0.29999999999999999}, {5.4000000000000004, 3.3999999999999999, 1.7, 0.20000000000000001}, {5.0999999999999996, 3.7000000000000002, 1.5, 0.40000000000000002}, {4.5999999999999996, 3.6000000000000001, 1.0, 0.20000000000000001}, {5.0999999999999996, 3.2999999999999998, 1.7, 0.5}, {4.7999999999999998, 3.3999999999999999, 1.8999999999999999, 0.20000000000000001}, {5.0, 3.0, 1.6000000000000001, 0.20000000000000001}, {5.0, 3.3999999999999999, 1.6000000000000001, 0.40000000000000002}, {5.2000000000000002, 3.5, 1.5, 0.20000000000000001}, {5.2000000000000002, 3.3999999999999999, 1.3999999999999999, 0.20000000000000001}, {4.7000000000000002, 3.2000000000000002, 1.6000000000000001, 0.20000000000000001}, {4.7999999999999998, 3.1000000000000001, 1.6000000000000001, 0.20000000000000001}, {5.4000000000000004, 3.3999999999999999, 1.5, 0.40000000000000002}, {5.2000000000000002, 4.0999999999999996, 1.5, 0.10000000000000001}, {5.5, 4.2000000000000002, 1.3999999999999999, 0.20000000000000001}, {4.9000000000000004, 3.1000000000000001, 1.5, 0.10000000000000001}, {5.0, 3.2000000000000002, 1.2, 0.20000000000000001}, {5.5, 3.5, 1.3, 0.20000000000000001}, {4.9000000000000004, 3.1000000000000001, 1.5, 0.10000000000000001}, {4.4000000000000004, 3.0, 1.3, 0.20000000000000001}, {5.0999999999999996, 3.3999999999999999, 1.5, 0.20000000000000001}, {5.0, 3.5, 1.3, 0.29999999999999999}, {4.5, 2.2999999999999998, 1.3, 0.29999999999999999}, {4.4000000000000004, 3.2000000000000002, 1.3, 0.20000000000000001}, {5.0, 3.5, 1.6000000000000001, 0.59999999999999998}, {5.0999999999999996, 3.7999999999999998, 1.8999999999999999, 0.40000000000000002}, {4.7999999999999998, 3.0, 1.3999999999999999, 0.29999999999999999}, {5.0999999999999996, 3.7999999999999998, 1.6000000000000001, 0.20000000000000001}, {4.5999999999999996, 3.2000000000000002, 1.3999999999999999, 0.20000000000000001}, {5.2999999999999998, 3.7000000000000002, 1.5, 0.20000000000000001}, {5.0, 3.2999999999999998, 1.3999999999999999, 0.20000000000000001}, {7.0, 3.2000000000000002, 4.7000000000000002, 1.3999999999999999}, {6.4000000000000004, 3.2000000000000002, 4.5, 1.5}, {6.9000000000000004, 3.1000000000000001, 4.9000000000000004, 1.5}, {5.5, 2.2999999999999998, 4.0, 1.3}, {6.5, 2.7999999999999998, 4.5999999999999996, 1.5}, {5.7000000000000002, 2.7999999999999998, 4.5, 1.3}, {6.2999999999999998, 3.2999999999999998, 4.7000000000000002, 1.6000000000000001}, {4.9000000000000004, 2.3999999999999999, 3.2999999999999998, 1.0}, {6.5999999999999996, 2.8999999999999999, 4.5999999999999996, 1.3}, {5.2000000000000002, 2.7000000000000002, 3.8999999999999999, 1.3999999999999999}, {5.0, 2.0, 3.5, 1.0}, {5.9000000000000004, 3.0, 4.2000000000000002, 1.5}, {6.0, 2.2000000000000002, 4.0, 1.0}, {6.0999999999999996, 2.8999999999999999, 4.7000000000000002, 1.3999999999999999}, {5.5999999999999996, 2.8999999999999999, 3.6000000000000001, 1.3}, {6.7000000000000002, 3.1000000000000001, 4.4000000000000004, 1.3999999999999999}, {5.5999999999999996, 3.0, 4.5, 1.5}, {5.7999999999999998, 2.7000000000000002, 4.0999999999999996, 1.0}, {6.2000000000000002, 2.2000000000000002, 4.5, 1.5}, {5.5999999999999996, 2.5, 3.8999999999999999, 1.1000000000000001}, {5.9000000000000004, 3.2000000000000002, 4.7999999999999998, 1.8}, {6.0999999999999996, 2.7999999999999998, 4.0, 1.3}, {6.2999999999999998, 2.5, 4.9000000000000004, 1.5}, {6.0999999999999996, 2.7999999999999998, 4.7000000000000002, 1.2}, {6.4000000000000004, 2.8999999999999999, 4.2999999999999998, 1.3}, {6.5999999999999996, 3.0, 4.4000000000000004, 1.3999999999999999}, {6.7999999999999998, 2.7999999999999998, 4.7999999999999998, 1.3999999999999999}, {6.7000000000000002, 3.0, 5.0, 1.7}, {6.0, 2.8999999999999999, 4.5, 1.5}, {5.7000000000000002, 2.6000000000000001, 3.5, 1.0}, {5.5, 2.3999999999999999, 3.7999999999999998, 1.1000000000000001}, {5.5, 2.3999999999999999, 3.7000000000000002, 1.0}, {5.7999999999999998, 2.7000000000000002, 3.8999999999999999, 1.2}, {6.0, 2.7000000000000002, 5.0999999999999996, 1.6000000000000001}, {5.4000000000000004, 3.0, 4.5, 1.5}, {6.0, 3.3999999999999999, 4.5, 1.6000000000000001}, {6.7000000000000002, 3.1000000000000001, 4.7000000000000002, 1.5}, {6.2999999999999998, 2.2999999999999998, 4.4000000000000004, 1.3}, {5.5999999999999996, 3.0, 4.0999999999999996, 1.3}, {5.5, 2.5, 4.0, 1.3}, {5.5, 2.6000000000000001, 4.4000000000000004, 1.2}, {6.0999999999999996, 3.0, 4.5999999999999996, 1.3999999999999999}, {5.7999999999999998, 2.6000000000000001, 4.0, 1.2}, {5.0, 2.2999999999999998, 3.2999999999999998, 1.0}, {5.5999999999999996, 2.7000000000000002, 4.2000000000000002, 1.3}, {5.7000000000000002, 3.0, 4.2000000000000002, 1.2}, {5.7000000000000002, 2.8999999999999999, 4.2000000000000002, 1.3}, {6.2000000000000002, 2.8999999999999999, 4.2999999999999998, 1.3}, {5.0999999999999996, 2.5, 3.0, 1.1000000000000001}, {5.7000000000000002, 2.7999999999999998, 4.0999999999999996, 1.3}, {6.2999999999999998, 3.2999999999999998, 6.0, 2.5}, {5.7999999999999998, 2.7000000000000002, 5.0999999999999996, 1.8999999999999999}, {7.0999999999999996, 3.0, 5.9000000000000004, 2.1000000000000001}, {6.2999999999999998, 2.8999999999999999, 5.5999999999999996, 1.8}, {6.5, 3.0, 5.7999999999999998, 2.2000000000000002}, {7.5999999999999996, 3.0, 6.5999999999999996, 2.1000000000000001}, {4.9000000000000004, 2.5, 4.5, 1.7}, {7.2999999999999998, 2.8999999999999999, 6.2999999999999998, 1.8}, {6.7000000000000002, 2.5, 5.7999999999999998, 1.8}, {7.2000000000000002, 3.6000000000000001, 6.0999999999999996, 2.5}, {6.5, 3.2000000000000002, 5.0999999999999996, 2.0}, {6.4000000000000004, 2.7000000000000002, 5.2999999999999998, 1.8999999999999999}, {6.7999999999999998, 3.0, 5.5, 2.1000000000000001}, {5.7000000000000002, 2.5, 5.0, 2.0}, {5.7999999999999998, 2.7999999999999998, 5.0999999999999996, 2.3999999999999999}, {6.4000000000000004, 3.2000000000000002, 5.2999999999999998, 2.2999999999999998}, {6.5, 3.0, 5.5, 1.8}, {7.7000000000000002, 3.7999999999999998, 6.7000000000000002, 2.2000000000000002}, {7.7000000000000002, 2.6000000000000001, 6.9000000000000004, 2.2999999999999998}, {6.0, 2.2000000000000002, 5.0, 1.5}, {6.9000000000000004, 3.2000000000000002, 5.7000000000000002, 2.2999999999999998}, {5.5999999999999996, 2.7999999999999998, 4.9000000000000004, 2.0}, {7.7000000000000002, 2.7999999999999998, 6.7000000000000002, 2.0}, {6.2999999999999998, 2.7000000000000002, 4.9000000000000004, 1.8}, {6.7000000000000002, 3.2999999999999998, 5.7000000000000002, 2.1000000000000001}, {7.2000000000000002, 3.2000000000000002, 6.0, 1.8}, {6.2000000000000002, 2.7999999999999998, 4.7999999999999998, 1.8}, {6.0999999999999996, 3.0, 4.9000000000000004, 1.8}, {6.4000000000000004, 2.7999999999999998, 5.5999999999999996, 2.1000000000000001}, {7.2000000000000002, 3.0, 5.7999999999999998, 1.6000000000000001}, {7.4000000000000004, 2.7999999999999998, 6.0999999999999996, 1.8999999999999999}, {7.9000000000000004, 3.7999999999999998, 6.4000000000000004, 2.0}, {6.4000000000000004, 2.7999999999999998, 5.5999999999999996, 2.2000000000000002}, {6.2999999999999998, 2.7999999999999998, 5.0999999999999996, 1.5}, {6.0999999999999996, 2.6000000000000001, 5.5999999999999996, 1.3999999999999999}, {7.7000000000000002, 3.0, 6.0999999999999996, 2.2999999999999998}, {6.2999999999999998, 3.3999999999999999, 5.5999999999999996, 2.3999999999999999}, {6.4000000000000004, 3.1000000000000001, 5.5, 1.8}, {6.0, 3.0, 4.7999999999999998, 1.8}, {6.9000000000000004, 3.1000000000000001, 5.4000000000000004, 2.1000000000000001}, {6.7000000000000002, 3.1000000000000001, 5.5999999999999996, 2.3999999999999999}, {6.9000000000000004, 3.1000000000000001, 5.0999999999999996, 2.2999999999999998}, {5.7999999999999998, 2.7000000000000002, 5.0999999999999996, 1.8999999999999999}, {6.7999999999999998, 3.2000000000000002, 5.9000000000000004, 2.2999999999999998}, {6.7000000000000002, 3.2999999999999998, 5.7000000000000002, 2.5}, {6.7000000000000002, 3.0, 5.2000000000000002, 2.2999999999999998}, {6.2999999999999998, 2.5, 5.0, 1.8999999999999999}, {6.5, 3.0, 5.2000000000000002, 2.0}, {6.2000000000000002, 3.3999999999999999, 5.4000000000000004, 2.2999999999999998}, {5.9000000000000004, 3.0, 5.0999999999999996, 1.8}};\n", - " double coeffs[2][150] = {{1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -0.0, -1.0, -0.0, -1.0, -0.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -0.0, -0.0, -1.0, -1.0, -1.0, -0.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -0.0, -0.0, -1.0, -1.0, -1.0, -0.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0}, {1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0}};\n", - " double inters[] = {0.043376922607421875, 0.11445245146751404, -0.0031709671020507812};\n", - " int classes[] = {0, 1, 2};\n", - " \n", - " // exp(-y|x-x'|^2)\n", - " double kernels[150];\n", + "\n", + " double kernels[N_VECTORS];\n", " double kernel;\n", - " for (i = 0; i < 150; i++) {\n", - " kernel = 0.;\n", - " for (j = 0; j < 4; j++) {\n", - " kernel += pow(svs[i][j] - atts[j], 2);\n", - " }\n", - " kernels[i] = exp(-0.001 * kernel);\n", + " switch (KERNEL_TYPE) {\n", + " case 'l':\n", + " // \n", + " for (i = 0; i < N_VECTORS; i++) {\n", + " kernel = 0.;\n", + " for (j = 0; j < N_FEATURES; j++) {\n", + " kernel += vectors[i][j] * features[j];\n", + " }\n", + " kernels[i] = kernel;\n", + " }\n", + " break;\n", + " case 'p':\n", + " // (y+r)^d\n", + " for (i = 0; i < N_VECTORS; i++) {\n", + " kernel = 0.;\n", + " for (j = 0; j < N_FEATURES; j++) {\n", + " kernel += vectors[i][j] * features[j];\n", + " }\n", + " kernels[i] = pow((KERNEL_GAMMA * kernel) + KERNEL_COEF, KERNEL_DEGREE);\n", + " }\n", + " break;\n", + " case 'r':\n", + " // exp(-y|x-x'|^2)\n", + " for (i = 0; i < N_VECTORS; i++) {\n", + " kernel = 0.;\n", + " for (j = 0; j < N_FEATURES; j++) {\n", + " kernel += pow(vectors[i][j] - features[j], 2);\n", + " }\n", + " kernels[i] = exp(-KERNEL_GAMMA * kernel);\n", + " }\n", + " break;\n", + " case 's':\n", + " // tanh(y+r)\n", + " for (i = 0; i < N_VECTORS; i++) {\n", + " kernel = 0.;\n", + " for (j = 0; j < N_FEATURES; j++) {\n", + " kernel += vectors[i][j] * features[j];\n", + " }\n", + " kernels[i] = tanh((KERNEL_GAMMA * kernel) + KERNEL_COEF);\n", + " }\n", + " break;\n", " }\n", - " \n", - " int starts[3];\n", - " for (i = 0; i < 3; i++) {\n", + "\n", + " int starts[N_CLASSES];\n", + " int start;\n", + " for (i = 0; i < N_CLASSES; i++) {\n", " if (i != 0) {\n", - " int start = 0;\n", + " start = 0;\n", " for (j = 0; j < i; j++) {\n", - " start += n_svs[j];\n", + " start += weights[j];\n", " }\n", " starts[i] = start;\n", " } else {\n", " starts[0] = 0;\n", " }\n", " }\n", - " \n", - " int ends[3];\n", - " for (i = 0; i < 3; i++) {\n", - " ends[i] = n_svs[i] + starts[i];\n", + "\n", + " int ends[N_CLASSES];\n", + " for (i = 0; i < N_CLASSES; i++) {\n", + " ends[i] = weights[i] + starts[i];\n", " }\n", - " \n", - " double decisions[3];\n", - " for (i = 0, d = 0, l = 3; i < l; i++) {\n", - " for (j = i + 1; j < l; j++) {\n", - " double tmp = 0.;\n", - " for (k = starts[j]; k < ends[j]; k++) {\n", - " tmp += kernels[k] * coeffs[i][k];\n", + "\n", + " if (N_CLASSES == 2) {\n", + "\n", + " for (i = 0; i < N_VECTORS; i++) {\n", + " kernels[i] = -kernels[i];\n", + " }\n", + "\n", + " double decision = 0.;\n", + " for (k = starts[1]; k < ends[1]; k++) {\n", + " decision += kernels[k] * coefficients[0][k];\n", + " }\n", + " for (k = starts[0]; k < ends[0]; k++) {\n", + " decision += kernels[k] * coefficients[0][k];\n", + " }\n", + " decision += intercepts[0];\n", + "\n", + " if (decision > 0) {\n", + " return 0;\n", + " }\n", + " return 1;\n", + "\n", + " } else {\n", + "\n", + " double decisions[N_INTERCEPTS];\n", + " double tmp;\n", + " for (i = 0, d = 0, l = N_ROWS; i < l; i++) {\n", + " for (j = i + 1; j < l; j++) {\n", + " tmp = 0.;\n", + " for (k = starts[j]; k < ends[j]; k++) {\n", + " tmp += kernels[k] * coefficients[i][k];\n", + " }\n", + " for (k = starts[i]; k < ends[i]; k++) {\n", + " tmp += kernels[k] * coefficients[j - 1][k];\n", + " }\n", + " decisions[d] = tmp + intercepts[d];\n", + " d = d + 1;\n", " }\n", - " for (k = starts[i]; k < ends[i]; k++) {\n", - " tmp += kernels[k] * coeffs[j - 1][k];\n", + " }\n", + "\n", + " int votes[N_INTERCEPTS];\n", + " for (i = 0, d = 0, l = N_ROWS; i < l; i++) {\n", + " for (j = i + 1; j < l; j++) {\n", + " votes[d] = decisions[d] > 0 ? i : j;\n", + " d = d + 1;\n", " }\n", - " decisions[d] = tmp + inters[d];\n", - " d = d + 1;\n", " }\n", - " }\n", - " \n", - " int votes[3];\n", - " for (i = 0, d = 0, l = 3; i < l; i++) {\n", - " for (j = i + 1; j < l; j++) {\n", - " votes[d] = decisions[d] > 0 ? i : j;\n", - " d = d + 1;\n", + "\n", + " int amounts[N_CLASSES];\n", + " for (i = 0, l = N_CLASSES; i < l; i++) {\n", + " amounts[i] = 0;\n", " }\n", - " }\n", - " \n", - " int amounts[3];\n", - " for (i = 0, l = 3; i < l; i++) {\n", - " amounts[i] = 0;\n", - " }\n", - " for (i = 0, l = 3; i < l; i++) {\n", - " amounts[votes[i]] += 1;\n", - " }\n", - " \n", - " int class_val = -1, class_idx = -1;\n", - " for (i = 0, l = 3; i < l; i++) {\n", - " if (amounts[i] > class_val) {\n", - " class_val = amounts[i];\n", - " class_idx = i;\n", + " for (i = 0; i < N_INTERCEPTS; i++) {\n", + " amounts[votes[i]] += 1;\n", + " }\n", + "\n", + " int classVal = -1;\n", + " int classIdx = -1;\n", + " for (i = 0; i < N_CLASSES; i++) {\n", + " if (amounts[i] > classVal) {\n", + " classVal = amounts[i];\n", + " classIdx= i;\n", + " }\n", " }\n", + " return classIdx;\n", + "\n", " }\n", - " return classes[class_idx];\n", "}\n", "\n", "int main(int argc, const char * argv[]) {\n", - " float atts[argc-1];\n", - " int i = 0;\n", + "\n", + " /* Features: */\n", + " double features[argc-1];\n", + " int i;\n", " for (i = 1; i < argc; i++) {\n", - " atts[i-1] = atof(argv[i]);\n", + " features[i-1] = atof(argv[i]);\n", " }\n", - " printf(\"%d\", predict(atts));\n", + "\n", + " /* Prediction: */\n", + " printf(\"%d\", predict(features));\n", " return 0;\n", + "\n", "}\n", "\n" ] @@ -192,9 +269,10 @@ "source": [ "from sklearn_porter import Porter\n", "\n", - "output = Porter(clf, language='c').export()\n", + "porter = Porter(clf, language='c')\n", + "output = porter.export()\n", "\n", - "print output" + "print(output)" ] } ], diff --git a/examples/estimator/classifier/SVC/c/basics.py b/examples/estimator/classifier/SVC/c/basics.py index 437ab43a..93ce1509 100644 --- a/examples/estimator/classifier/SVC/c/basics.py +++ b/examples/estimator/classifier/SVC/c/basics.py @@ -6,12 +6,14 @@ iris_data = load_iris() -X, y = iris_data.data, iris_data.target +X = iris_data.data +y = iris_data.target clf = svm.SVC(C=1., gamma=0.001, kernel='rbf', random_state=0) clf.fit(X, y) -output = Porter(clf, language='c').export() +porter = Porter(clf, language='c') +output = porter.export() print(output) """ @@ -19,32 +21,77 @@ #include #include -int predict(float atts[]) { +#define N_FEATURES 4 +#define N_CLASSES 3 +#define N_VECTORS 150 +#define N_ROWS 3 +#define N_COEFFICIENTS 2 +#define N_INTERCEPTS 3 +#define KERNEL_TYPE 'r' +#define KERNEL_GAMMA 0.001 +#define KERNEL_COEF 0.0 +#define KERNEL_DEGREE 3 + +double vectors[150][4] = {{5.0999999999999996, 3.5, 1.3999999999999999, 0.20000000000000001}, {4.9000000000000004, 3.0, 1.3999999999999999, 0.20000000000000001}, {4.7000000000000002, 3.2000000000000002, 1.3, 0.20000000000000001}, {4.5999999999999996, 3.1000000000000001, 1.5, 0.20000000000000001}, {5.0, 3.6000000000000001, 1.3999999999999999, 0.20000000000000001}, {5.4000000000000004, 3.8999999999999999, 1.7, 0.40000000000000002}, {4.5999999999999996, 3.3999999999999999, 1.3999999999999999, 0.29999999999999999}, {5.0, 3.3999999999999999, 1.5, 0.20000000000000001}, {4.4000000000000004, 2.8999999999999999, 1.3999999999999999, 0.20000000000000001}, {4.9000000000000004, 3.1000000000000001, 1.5, 0.10000000000000001}, {5.4000000000000004, 3.7000000000000002, 1.5, 0.20000000000000001}, {4.7999999999999998, 3.3999999999999999, 1.6000000000000001, 0.20000000000000001}, {4.7999999999999998, 3.0, 1.3999999999999999, 0.10000000000000001}, {4.2999999999999998, 3.0, 1.1000000000000001, 0.10000000000000001}, {5.7999999999999998, 4.0, 1.2, 0.20000000000000001}, {5.7000000000000002, 4.4000000000000004, 1.5, 0.40000000000000002}, {5.4000000000000004, 3.8999999999999999, 1.3, 0.40000000000000002}, {5.0999999999999996, 3.5, 1.3999999999999999, 0.29999999999999999}, {5.7000000000000002, 3.7999999999999998, 1.7, 0.29999999999999999}, {5.0999999999999996, 3.7999999999999998, 1.5, 0.29999999999999999}, {5.4000000000000004, 3.3999999999999999, 1.7, 0.20000000000000001}, {5.0999999999999996, 3.7000000000000002, 1.5, 0.40000000000000002}, {4.5999999999999996, 3.6000000000000001, 1.0, 0.20000000000000001}, {5.0999999999999996, 3.2999999999999998, 1.7, 0.5}, {4.7999999999999998, 3.3999999999999999, 1.8999999999999999, 0.20000000000000001}, {5.0, 3.0, 1.6000000000000001, 0.20000000000000001}, {5.0, 3.3999999999999999, 1.6000000000000001, 0.40000000000000002}, {5.2000000000000002, 3.5, 1.5, 0.20000000000000001}, {5.2000000000000002, 3.3999999999999999, 1.3999999999999999, 0.20000000000000001}, {4.7000000000000002, 3.2000000000000002, 1.6000000000000001, 0.20000000000000001}, {4.7999999999999998, 3.1000000000000001, 1.6000000000000001, 0.20000000000000001}, {5.4000000000000004, 3.3999999999999999, 1.5, 0.40000000000000002}, {5.2000000000000002, 4.0999999999999996, 1.5, 0.10000000000000001}, {5.5, 4.2000000000000002, 1.3999999999999999, 0.20000000000000001}, {4.9000000000000004, 3.1000000000000001, 1.5, 0.10000000000000001}, {5.0, 3.2000000000000002, 1.2, 0.20000000000000001}, {5.5, 3.5, 1.3, 0.20000000000000001}, {4.9000000000000004, 3.1000000000000001, 1.5, 0.10000000000000001}, {4.4000000000000004, 3.0, 1.3, 0.20000000000000001}, {5.0999999999999996, 3.3999999999999999, 1.5, 0.20000000000000001}, {5.0, 3.5, 1.3, 0.29999999999999999}, {4.5, 2.2999999999999998, 1.3, 0.29999999999999999}, {4.4000000000000004, 3.2000000000000002, 1.3, 0.20000000000000001}, {5.0, 3.5, 1.6000000000000001, 0.59999999999999998}, {5.0999999999999996, 3.7999999999999998, 1.8999999999999999, 0.40000000000000002}, {4.7999999999999998, 3.0, 1.3999999999999999, 0.29999999999999999}, {5.0999999999999996, 3.7999999999999998, 1.6000000000000001, 0.20000000000000001}, {4.5999999999999996, 3.2000000000000002, 1.3999999999999999, 0.20000000000000001}, {5.2999999999999998, 3.7000000000000002, 1.5, 0.20000000000000001}, {5.0, 3.2999999999999998, 1.3999999999999999, 0.20000000000000001}, {7.0, 3.2000000000000002, 4.7000000000000002, 1.3999999999999999}, {6.4000000000000004, 3.2000000000000002, 4.5, 1.5}, {6.9000000000000004, 3.1000000000000001, 4.9000000000000004, 1.5}, {5.5, 2.2999999999999998, 4.0, 1.3}, {6.5, 2.7999999999999998, 4.5999999999999996, 1.5}, {5.7000000000000002, 2.7999999999999998, 4.5, 1.3}, {6.2999999999999998, 3.2999999999999998, 4.7000000000000002, 1.6000000000000001}, {4.9000000000000004, 2.3999999999999999, 3.2999999999999998, 1.0}, {6.5999999999999996, 2.8999999999999999, 4.5999999999999996, 1.3}, {5.2000000000000002, 2.7000000000000002, 3.8999999999999999, 1.3999999999999999}, {5.0, 2.0, 3.5, 1.0}, {5.9000000000000004, 3.0, 4.2000000000000002, 1.5}, {6.0, 2.2000000000000002, 4.0, 1.0}, {6.0999999999999996, 2.8999999999999999, 4.7000000000000002, 1.3999999999999999}, {5.5999999999999996, 2.8999999999999999, 3.6000000000000001, 1.3}, {6.7000000000000002, 3.1000000000000001, 4.4000000000000004, 1.3999999999999999}, {5.5999999999999996, 3.0, 4.5, 1.5}, {5.7999999999999998, 2.7000000000000002, 4.0999999999999996, 1.0}, {6.2000000000000002, 2.2000000000000002, 4.5, 1.5}, {5.5999999999999996, 2.5, 3.8999999999999999, 1.1000000000000001}, {5.9000000000000004, 3.2000000000000002, 4.7999999999999998, 1.8}, {6.0999999999999996, 2.7999999999999998, 4.0, 1.3}, {6.2999999999999998, 2.5, 4.9000000000000004, 1.5}, {6.0999999999999996, 2.7999999999999998, 4.7000000000000002, 1.2}, {6.4000000000000004, 2.8999999999999999, 4.2999999999999998, 1.3}, {6.5999999999999996, 3.0, 4.4000000000000004, 1.3999999999999999}, {6.7999999999999998, 2.7999999999999998, 4.7999999999999998, 1.3999999999999999}, {6.7000000000000002, 3.0, 5.0, 1.7}, {6.0, 2.8999999999999999, 4.5, 1.5}, {5.7000000000000002, 2.6000000000000001, 3.5, 1.0}, {5.5, 2.3999999999999999, 3.7999999999999998, 1.1000000000000001}, {5.5, 2.3999999999999999, 3.7000000000000002, 1.0}, {5.7999999999999998, 2.7000000000000002, 3.8999999999999999, 1.2}, {6.0, 2.7000000000000002, 5.0999999999999996, 1.6000000000000001}, {5.4000000000000004, 3.0, 4.5, 1.5}, {6.0, 3.3999999999999999, 4.5, 1.6000000000000001}, {6.7000000000000002, 3.1000000000000001, 4.7000000000000002, 1.5}, {6.2999999999999998, 2.2999999999999998, 4.4000000000000004, 1.3}, {5.5999999999999996, 3.0, 4.0999999999999996, 1.3}, {5.5, 2.5, 4.0, 1.3}, {5.5, 2.6000000000000001, 4.4000000000000004, 1.2}, {6.0999999999999996, 3.0, 4.5999999999999996, 1.3999999999999999}, {5.7999999999999998, 2.6000000000000001, 4.0, 1.2}, {5.0, 2.2999999999999998, 3.2999999999999998, 1.0}, {5.5999999999999996, 2.7000000000000002, 4.2000000000000002, 1.3}, {5.7000000000000002, 3.0, 4.2000000000000002, 1.2}, {5.7000000000000002, 2.8999999999999999, 4.2000000000000002, 1.3}, {6.2000000000000002, 2.8999999999999999, 4.2999999999999998, 1.3}, {5.0999999999999996, 2.5, 3.0, 1.1000000000000001}, {5.7000000000000002, 2.7999999999999998, 4.0999999999999996, 1.3}, {6.2999999999999998, 3.2999999999999998, 6.0, 2.5}, {5.7999999999999998, 2.7000000000000002, 5.0999999999999996, 1.8999999999999999}, {7.0999999999999996, 3.0, 5.9000000000000004, 2.1000000000000001}, {6.2999999999999998, 2.8999999999999999, 5.5999999999999996, 1.8}, {6.5, 3.0, 5.7999999999999998, 2.2000000000000002}, {7.5999999999999996, 3.0, 6.5999999999999996, 2.1000000000000001}, {4.9000000000000004, 2.5, 4.5, 1.7}, {7.2999999999999998, 2.8999999999999999, 6.2999999999999998, 1.8}, {6.7000000000000002, 2.5, 5.7999999999999998, 1.8}, {7.2000000000000002, 3.6000000000000001, 6.0999999999999996, 2.5}, {6.5, 3.2000000000000002, 5.0999999999999996, 2.0}, {6.4000000000000004, 2.7000000000000002, 5.2999999999999998, 1.8999999999999999}, {6.7999999999999998, 3.0, 5.5, 2.1000000000000001}, {5.7000000000000002, 2.5, 5.0, 2.0}, {5.7999999999999998, 2.7999999999999998, 5.0999999999999996, 2.3999999999999999}, {6.4000000000000004, 3.2000000000000002, 5.2999999999999998, 2.2999999999999998}, {6.5, 3.0, 5.5, 1.8}, {7.7000000000000002, 3.7999999999999998, 6.7000000000000002, 2.2000000000000002}, {7.7000000000000002, 2.6000000000000001, 6.9000000000000004, 2.2999999999999998}, {6.0, 2.2000000000000002, 5.0, 1.5}, {6.9000000000000004, 3.2000000000000002, 5.7000000000000002, 2.2999999999999998}, {5.5999999999999996, 2.7999999999999998, 4.9000000000000004, 2.0}, {7.7000000000000002, 2.7999999999999998, 6.7000000000000002, 2.0}, {6.2999999999999998, 2.7000000000000002, 4.9000000000000004, 1.8}, {6.7000000000000002, 3.2999999999999998, 5.7000000000000002, 2.1000000000000001}, {7.2000000000000002, 3.2000000000000002, 6.0, 1.8}, {6.2000000000000002, 2.7999999999999998, 4.7999999999999998, 1.8}, {6.0999999999999996, 3.0, 4.9000000000000004, 1.8}, {6.4000000000000004, 2.7999999999999998, 5.5999999999999996, 2.1000000000000001}, {7.2000000000000002, 3.0, 5.7999999999999998, 1.6000000000000001}, {7.4000000000000004, 2.7999999999999998, 6.0999999999999996, 1.8999999999999999}, {7.9000000000000004, 3.7999999999999998, 6.4000000000000004, 2.0}, {6.4000000000000004, 2.7999999999999998, 5.5999999999999996, 2.2000000000000002}, {6.2999999999999998, 2.7999999999999998, 5.0999999999999996, 1.5}, {6.0999999999999996, 2.6000000000000001, 5.5999999999999996, 1.3999999999999999}, {7.7000000000000002, 3.0, 6.0999999999999996, 2.2999999999999998}, {6.2999999999999998, 3.3999999999999999, 5.5999999999999996, 2.3999999999999999}, {6.4000000000000004, 3.1000000000000001, 5.5, 1.8}, {6.0, 3.0, 4.7999999999999998, 1.8}, {6.9000000000000004, 3.1000000000000001, 5.4000000000000004, 2.1000000000000001}, {6.7000000000000002, 3.1000000000000001, 5.5999999999999996, 2.3999999999999999}, {6.9000000000000004, 3.1000000000000001, 5.0999999999999996, 2.2999999999999998}, {5.7999999999999998, 2.7000000000000002, 5.0999999999999996, 1.8999999999999999}, {6.7999999999999998, 3.2000000000000002, 5.9000000000000004, 2.2999999999999998}, {6.7000000000000002, 3.2999999999999998, 5.7000000000000002, 2.5}, {6.7000000000000002, 3.0, 5.2000000000000002, 2.2999999999999998}, {6.2999999999999998, 2.5, 5.0, 1.8999999999999999}, {6.5, 3.0, 5.2000000000000002, 2.0}, {6.2000000000000002, 3.3999999999999999, 5.4000000000000004, 2.2999999999999998}, {5.9000000000000004, 3.0, 5.0999999999999996, 1.8}}; +double coefficients[2][150] = {{1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -0.0, -1.0, -0.0, -1.0, -0.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -0.0, -0.0, -1.0, -1.0, -1.0, -0.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -0.0, -0.0, -1.0, -1.0, -1.0, -0.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0}, {1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0}}; +double intercepts[3] = {0.043376922607421875, 0.11445245146751404, -0.0031709671020507812}; +int weights[3] = {50, 50, 50}; + +int predict (double features[]) { int i, j, k, d, l; - int n_svs[] = {50, 50, 50}; - double svs[150][4] = {{5.0999999999999996, 3.5, 1.3999999999999999, 0.20000000000000001}, {4.9000000000000004, 3.0, 1.3999999999999999, 0.20000000000000001}, {4.7000000000000002, 3.2000000000000002, 1.3, 0.20000000000000001}, {4.5999999999999996, 3.1000000000000001, 1.5, 0.20000000000000001}, {5.0, 3.6000000000000001, 1.3999999999999999, 0.20000000000000001}, {5.4000000000000004, 3.8999999999999999, 1.7, 0.40000000000000002}, {4.5999999999999996, 3.3999999999999999, 1.3999999999999999, 0.29999999999999999}, {5.0, 3.3999999999999999, 1.5, 0.20000000000000001}, {4.4000000000000004, 2.8999999999999999, 1.3999999999999999, 0.20000000000000001}, {4.9000000000000004, 3.1000000000000001, 1.5, 0.10000000000000001}, {5.4000000000000004, 3.7000000000000002, 1.5, 0.20000000000000001}, {4.7999999999999998, 3.3999999999999999, 1.6000000000000001, 0.20000000000000001}, {4.7999999999999998, 3.0, 1.3999999999999999, 0.10000000000000001}, {4.2999999999999998, 3.0, 1.1000000000000001, 0.10000000000000001}, {5.7999999999999998, 4.0, 1.2, 0.20000000000000001}, {5.7000000000000002, 4.4000000000000004, 1.5, 0.40000000000000002}, {5.4000000000000004, 3.8999999999999999, 1.3, 0.40000000000000002}, {5.0999999999999996, 3.5, 1.3999999999999999, 0.29999999999999999}, {5.7000000000000002, 3.7999999999999998, 1.7, 0.29999999999999999}, {5.0999999999999996, 3.7999999999999998, 1.5, 0.29999999999999999}, {5.4000000000000004, 3.3999999999999999, 1.7, 0.20000000000000001}, {5.0999999999999996, 3.7000000000000002, 1.5, 0.40000000000000002}, {4.5999999999999996, 3.6000000000000001, 1.0, 0.20000000000000001}, {5.0999999999999996, 3.2999999999999998, 1.7, 0.5}, {4.7999999999999998, 3.3999999999999999, 1.8999999999999999, 0.20000000000000001}, {5.0, 3.0, 1.6000000000000001, 0.20000000000000001}, {5.0, 3.3999999999999999, 1.6000000000000001, 0.40000000000000002}, {5.2000000000000002, 3.5, 1.5, 0.20000000000000001}, {5.2000000000000002, 3.3999999999999999, 1.3999999999999999, 0.20000000000000001}, {4.7000000000000002, 3.2000000000000002, 1.6000000000000001, 0.20000000000000001}, {4.7999999999999998, 3.1000000000000001, 1.6000000000000001, 0.20000000000000001}, {5.4000000000000004, 3.3999999999999999, 1.5, 0.40000000000000002}, {5.2000000000000002, 4.0999999999999996, 1.5, 0.10000000000000001}, {5.5, 4.2000000000000002, 1.3999999999999999, 0.20000000000000001}, {4.9000000000000004, 3.1000000000000001, 1.5, 0.10000000000000001}, {5.0, 3.2000000000000002, 1.2, 0.20000000000000001}, {5.5, 3.5, 1.3, 0.20000000000000001}, {4.9000000000000004, 3.1000000000000001, 1.5, 0.10000000000000001}, {4.4000000000000004, 3.0, 1.3, 0.20000000000000001}, {5.0999999999999996, 3.3999999999999999, 1.5, 0.20000000000000001}, {5.0, 3.5, 1.3, 0.29999999999999999}, {4.5, 2.2999999999999998, 1.3, 0.29999999999999999}, {4.4000000000000004, 3.2000000000000002, 1.3, 0.20000000000000001}, {5.0, 3.5, 1.6000000000000001, 0.59999999999999998}, {5.0999999999999996, 3.7999999999999998, 1.8999999999999999, 0.40000000000000002}, {4.7999999999999998, 3.0, 1.3999999999999999, 0.29999999999999999}, {5.0999999999999996, 3.7999999999999998, 1.6000000000000001, 0.20000000000000001}, {4.5999999999999996, 3.2000000000000002, 1.3999999999999999, 0.20000000000000001}, {5.2999999999999998, 3.7000000000000002, 1.5, 0.20000000000000001}, {5.0, 3.2999999999999998, 1.3999999999999999, 0.20000000000000001}, {7.0, 3.2000000000000002, 4.7000000000000002, 1.3999999999999999}, {6.4000000000000004, 3.2000000000000002, 4.5, 1.5}, {6.9000000000000004, 3.1000000000000001, 4.9000000000000004, 1.5}, {5.5, 2.2999999999999998, 4.0, 1.3}, {6.5, 2.7999999999999998, 4.5999999999999996, 1.5}, {5.7000000000000002, 2.7999999999999998, 4.5, 1.3}, {6.2999999999999998, 3.2999999999999998, 4.7000000000000002, 1.6000000000000001}, {4.9000000000000004, 2.3999999999999999, 3.2999999999999998, 1.0}, {6.5999999999999996, 2.8999999999999999, 4.5999999999999996, 1.3}, {5.2000000000000002, 2.7000000000000002, 3.8999999999999999, 1.3999999999999999}, {5.0, 2.0, 3.5, 1.0}, {5.9000000000000004, 3.0, 4.2000000000000002, 1.5}, {6.0, 2.2000000000000002, 4.0, 1.0}, {6.0999999999999996, 2.8999999999999999, 4.7000000000000002, 1.3999999999999999}, {5.5999999999999996, 2.8999999999999999, 3.6000000000000001, 1.3}, {6.7000000000000002, 3.1000000000000001, 4.4000000000000004, 1.3999999999999999}, {5.5999999999999996, 3.0, 4.5, 1.5}, {5.7999999999999998, 2.7000000000000002, 4.0999999999999996, 1.0}, {6.2000000000000002, 2.2000000000000002, 4.5, 1.5}, {5.5999999999999996, 2.5, 3.8999999999999999, 1.1000000000000001}, {5.9000000000000004, 3.2000000000000002, 4.7999999999999998, 1.8}, {6.0999999999999996, 2.7999999999999998, 4.0, 1.3}, {6.2999999999999998, 2.5, 4.9000000000000004, 1.5}, {6.0999999999999996, 2.7999999999999998, 4.7000000000000002, 1.2}, {6.4000000000000004, 2.8999999999999999, 4.2999999999999998, 1.3}, {6.5999999999999996, 3.0, 4.4000000000000004, 1.3999999999999999}, {6.7999999999999998, 2.7999999999999998, 4.7999999999999998, 1.3999999999999999}, {6.7000000000000002, 3.0, 5.0, 1.7}, {6.0, 2.8999999999999999, 4.5, 1.5}, {5.7000000000000002, 2.6000000000000001, 3.5, 1.0}, {5.5, 2.3999999999999999, 3.7999999999999998, 1.1000000000000001}, {5.5, 2.3999999999999999, 3.7000000000000002, 1.0}, {5.7999999999999998, 2.7000000000000002, 3.8999999999999999, 1.2}, {6.0, 2.7000000000000002, 5.0999999999999996, 1.6000000000000001}, {5.4000000000000004, 3.0, 4.5, 1.5}, {6.0, 3.3999999999999999, 4.5, 1.6000000000000001}, {6.7000000000000002, 3.1000000000000001, 4.7000000000000002, 1.5}, {6.2999999999999998, 2.2999999999999998, 4.4000000000000004, 1.3}, {5.5999999999999996, 3.0, 4.0999999999999996, 1.3}, {5.5, 2.5, 4.0, 1.3}, {5.5, 2.6000000000000001, 4.4000000000000004, 1.2}, {6.0999999999999996, 3.0, 4.5999999999999996, 1.3999999999999999}, {5.7999999999999998, 2.6000000000000001, 4.0, 1.2}, {5.0, 2.2999999999999998, 3.2999999999999998, 1.0}, {5.5999999999999996, 2.7000000000000002, 4.2000000000000002, 1.3}, {5.7000000000000002, 3.0, 4.2000000000000002, 1.2}, {5.7000000000000002, 2.8999999999999999, 4.2000000000000002, 1.3}, {6.2000000000000002, 2.8999999999999999, 4.2999999999999998, 1.3}, {5.0999999999999996, 2.5, 3.0, 1.1000000000000001}, {5.7000000000000002, 2.7999999999999998, 4.0999999999999996, 1.3}, {6.2999999999999998, 3.2999999999999998, 6.0, 2.5}, {5.7999999999999998, 2.7000000000000002, 5.0999999999999996, 1.8999999999999999}, {7.0999999999999996, 3.0, 5.9000000000000004, 2.1000000000000001}, {6.2999999999999998, 2.8999999999999999, 5.5999999999999996, 1.8}, {6.5, 3.0, 5.7999999999999998, 2.2000000000000002}, {7.5999999999999996, 3.0, 6.5999999999999996, 2.1000000000000001}, {4.9000000000000004, 2.5, 4.5, 1.7}, {7.2999999999999998, 2.8999999999999999, 6.2999999999999998, 1.8}, {6.7000000000000002, 2.5, 5.7999999999999998, 1.8}, {7.2000000000000002, 3.6000000000000001, 6.0999999999999996, 2.5}, {6.5, 3.2000000000000002, 5.0999999999999996, 2.0}, {6.4000000000000004, 2.7000000000000002, 5.2999999999999998, 1.8999999999999999}, {6.7999999999999998, 3.0, 5.5, 2.1000000000000001}, {5.7000000000000002, 2.5, 5.0, 2.0}, {5.7999999999999998, 2.7999999999999998, 5.0999999999999996, 2.3999999999999999}, {6.4000000000000004, 3.2000000000000002, 5.2999999999999998, 2.2999999999999998}, {6.5, 3.0, 5.5, 1.8}, {7.7000000000000002, 3.7999999999999998, 6.7000000000000002, 2.2000000000000002}, {7.7000000000000002, 2.6000000000000001, 6.9000000000000004, 2.2999999999999998}, {6.0, 2.2000000000000002, 5.0, 1.5}, {6.9000000000000004, 3.2000000000000002, 5.7000000000000002, 2.2999999999999998}, {5.5999999999999996, 2.7999999999999998, 4.9000000000000004, 2.0}, {7.7000000000000002, 2.7999999999999998, 6.7000000000000002, 2.0}, {6.2999999999999998, 2.7000000000000002, 4.9000000000000004, 1.8}, {6.7000000000000002, 3.2999999999999998, 5.7000000000000002, 2.1000000000000001}, {7.2000000000000002, 3.2000000000000002, 6.0, 1.8}, {6.2000000000000002, 2.7999999999999998, 4.7999999999999998, 1.8}, {6.0999999999999996, 3.0, 4.9000000000000004, 1.8}, {6.4000000000000004, 2.7999999999999998, 5.5999999999999996, 2.1000000000000001}, {7.2000000000000002, 3.0, 5.7999999999999998, 1.6000000000000001}, {7.4000000000000004, 2.7999999999999998, 6.0999999999999996, 1.8999999999999999}, {7.9000000000000004, 3.7999999999999998, 6.4000000000000004, 2.0}, {6.4000000000000004, 2.7999999999999998, 5.5999999999999996, 2.2000000000000002}, {6.2999999999999998, 2.7999999999999998, 5.0999999999999996, 1.5}, {6.0999999999999996, 2.6000000000000001, 5.5999999999999996, 1.3999999999999999}, {7.7000000000000002, 3.0, 6.0999999999999996, 2.2999999999999998}, {6.2999999999999998, 3.3999999999999999, 5.5999999999999996, 2.3999999999999999}, {6.4000000000000004, 3.1000000000000001, 5.5, 1.8}, {6.0, 3.0, 4.7999999999999998, 1.8}, {6.9000000000000004, 3.1000000000000001, 5.4000000000000004, 2.1000000000000001}, {6.7000000000000002, 3.1000000000000001, 5.5999999999999996, 2.3999999999999999}, {6.9000000000000004, 3.1000000000000001, 5.0999999999999996, 2.2999999999999998}, {5.7999999999999998, 2.7000000000000002, 5.0999999999999996, 1.8999999999999999}, {6.7999999999999998, 3.2000000000000002, 5.9000000000000004, 2.2999999999999998}, {6.7000000000000002, 3.2999999999999998, 5.7000000000000002, 2.5}, {6.7000000000000002, 3.0, 5.2000000000000002, 2.2999999999999998}, {6.2999999999999998, 2.5, 5.0, 1.8999999999999999}, {6.5, 3.0, 5.2000000000000002, 2.0}, {6.2000000000000002, 3.3999999999999999, 5.4000000000000004, 2.2999999999999998}, {5.9000000000000004, 3.0, 5.0999999999999996, 1.8}}; - double coeffs[2][150] = {{1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -0.0, -1.0, -0.0, -1.0, -0.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -0.0, -0.0, -1.0, -1.0, -1.0, -0.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -0.0, -0.0, -1.0, -1.0, -1.0, -0.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0}, {1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0}}; - double inters[] = {0.043376922607421875, 0.11445245146751404, -0.0031709671020507812}; - int classes[] = {0, 1, 2}; - - // exp(-y|x-x'|^2) - double kernels[150]; + double kernels[N_VECTORS]; double kernel; - for (i = 0; i < 150; i++) { - kernel = 0.; - for (j = 0; j < 4; j++) { - kernel += pow(svs[i][j] - atts[j], 2); - } - kernels[i] = exp(-0.001 * kernel); + switch (KERNEL_TYPE) { + case 'l': + // + for (i = 0; i < N_VECTORS; i++) { + kernel = 0.; + for (j = 0; j < N_FEATURES; j++) { + kernel += vectors[i][j] * features[j]; + } + kernels[i] = kernel; + } + break; + case 'p': + // (y+r)^d + for (i = 0; i < N_VECTORS; i++) { + kernel = 0.; + for (j = 0; j < N_FEATURES; j++) { + kernel += vectors[i][j] * features[j]; + } + kernels[i] = pow((KERNEL_GAMMA * kernel) + KERNEL_COEF, KERNEL_DEGREE); + } + break; + case 'r': + // exp(-y|x-x'|^2) + for (i = 0; i < N_VECTORS; i++) { + kernel = 0.; + for (j = 0; j < N_FEATURES; j++) { + kernel += pow(vectors[i][j] - features[j], 2); + } + kernels[i] = exp(-KERNEL_GAMMA * kernel); + } + break; + case 's': + // tanh(y+r) + for (i = 0; i < N_VECTORS; i++) { + kernel = 0.; + for (j = 0; j < N_FEATURES; j++) { + kernel += vectors[i][j] * features[j]; + } + kernels[i] = tanh((KERNEL_GAMMA * kernel) + KERNEL_COEF); + } + break; } - int starts[3]; - for (i = 0; i < 3; i++) { + int starts[N_CLASSES]; + int start; + for (i = 0; i < N_CLASSES; i++) { if (i != 0) { - int start = 0; + start = 0; for (j = 0; j < i; j++) { - start += n_svs[j]; + start += weights[j]; } starts[i] = start; } else { @@ -52,59 +99,90 @@ } } - int ends[3]; - for (i = 0; i < 3; i++) { - ends[i] = n_svs[i] + starts[i]; + int ends[N_CLASSES]; + for (i = 0; i < N_CLASSES; i++) { + ends[i] = weights[i] + starts[i]; } - double decisions[3]; - for (i = 0, d = 0, l = 3; i < l; i++) { - for (j = i + 1; j < l; j++) { - double tmp = 0.; - for (k = starts[j]; k < ends[j]; k++) { - tmp += kernels[k] * coeffs[i][k]; - } - for (k = starts[i]; k < ends[i]; k++) { - tmp += kernels[k] * coeffs[j - 1][k]; + if (N_CLASSES == 2) { + + for (i = 0; i < N_VECTORS; i++) { + kernels[i] = -kernels[i]; + } + + double decision = 0.; + for (k = starts[1]; k < ends[1]; k++) { + decision += kernels[k] * coefficients[0][k]; + } + for (k = starts[0]; k < ends[0]; k++) { + decision += kernels[k] * coefficients[0][k]; + } + decision += intercepts[0]; + + if (decision > 0) { + return 0; + } + return 1; + + } else { + + double decisions[N_INTERCEPTS]; + double tmp; + for (i = 0, d = 0, l = N_ROWS; i < l; i++) { + for (j = i + 1; j < l; j++) { + tmp = 0.; + for (k = starts[j]; k < ends[j]; k++) { + tmp += kernels[k] * coefficients[i][k]; + } + for (k = starts[i]; k < ends[i]; k++) { + tmp += kernels[k] * coefficients[j - 1][k]; + } + decisions[d] = tmp + intercepts[d]; + d = d + 1; } - decisions[d] = tmp + inters[d]; - d = d + 1; } - } - int votes[3]; - for (i = 0, d = 0, l = 3; i < l; i++) { - for (j = i + 1; j < l; j++) { - votes[d] = decisions[d] > 0 ? i : j; - d = d + 1; + int votes[N_INTERCEPTS]; + for (i = 0, d = 0, l = N_ROWS; i < l; i++) { + for (j = i + 1; j < l; j++) { + votes[d] = decisions[d] > 0 ? i : j; + d = d + 1; + } } - } - int amounts[3]; - for (i = 0, l = 3; i < l; i++) { - amounts[i] = 0; - } - for (i = 0, l = 3; i < l; i++) { - amounts[votes[i]] += 1; - } + int amounts[N_CLASSES]; + for (i = 0, l = N_CLASSES; i < l; i++) { + amounts[i] = 0; + } + for (i = 0; i < N_INTERCEPTS; i++) { + amounts[votes[i]] += 1; + } - int class_val = -1, class_idx = -1; - for (i = 0, l = 3; i < l; i++) { - if (amounts[i] > class_val) { - class_val = amounts[i]; - class_idx = i; + int classVal = -1; + int classIdx = -1; + for (i = 0; i < N_CLASSES; i++) { + if (amounts[i] > classVal) { + classVal = amounts[i]; + classIdx= i; + } } + return classIdx; + } - return classes[class_idx]; } int main(int argc, const char * argv[]) { - float atts[argc-1]; - int i = 0; + + /* Features: */ + double features[argc-1]; + int i; for (i = 1; i < argc; i++) { - atts[i-1] = atof(argv[i]); + features[i-1] = atof(argv[i]); } - printf("%d", predict(atts)); + + /* Prediction: */ + printf("%d", predict(features)); return 0; + } """ diff --git a/sklearn_porter/estimator/classifier/SVC/__init__.py b/sklearn_porter/estimator/classifier/SVC/__init__.py index 912e87b7..b9234e7b 100644 --- a/sklearn_porter/estimator/classifier/SVC/__init__.py +++ b/sklearn_porter/estimator/classifier/SVC/__init__.py @@ -19,7 +19,7 @@ class SVC(Classifier): 'c': { 'type': '{0}', 'arr': '{{{0}}}', - 'arr[]': '{type} {name}[] = {{{values}}};', + 'arr[]': '{type} {name}[{n}] = {{{values}}};', 'arr[][]': '{type} {name}[{n}][{m}] = {{{values}}};', 'indent': ' ', }, @@ -91,8 +91,6 @@ def __init__(self, estimator, target_language='java', raise ValueError(msg) self.params = params - # self.svs = estimator.support_vectors_ - # self.n_svs = len(estimator.support_vectors_[0]) self.n_features = len(estimator.support_vectors_[0]) self.svs_rows = estimator.n_support_ self.n_svs_rows = len(estimator.n_support_) @@ -100,11 +98,8 @@ def __init__(self, estimator, target_language='java', self.weights = self.temp('arr[]', skipping=True).format( type='int', name='weights', values=', '.join([str(e) for e in self.svs_rows]), n=len(self.svs_rows)) + self.n_weights = len(self.svs_rows) - # self.coeffs = estimator.dual_coef_ - # self.inters = estimator._intercept_ # pylint: disable=W0212 - # self.n_inters = len(estimator._intercept_) # pylint: disable=W0212 - # self.classes = estimator.classes_ self.n_classes = len(estimator.classes_) self.is_binary = self.n_classes == 2 self.prefix = 'binary' if self.is_binary else 'multi' @@ -118,8 +113,9 @@ def __init__(self, estimator, target_language='java', vectors = ', '.join(vectors) vectors = self.temp('arr[][]', skipping=True).format( type='double', name='vectors', values=vectors, - n=len(estimator.support_vectors_), m=estimator.support_vectors_[0]) + n=len(estimator.support_vectors_), m=len(estimator.support_vectors_[0])) self.vectors = vectors + self.n_vectors = len(estimator.support_vectors_) # Coefficients: coeffs = [] @@ -132,16 +128,19 @@ def __init__(self, estimator, target_language='java', values=coeffs, n=len(estimator.dual_coef_), m=len(estimator.dual_coef_[0])) self.coefficients = coeffs + self.n_coefficients = len(estimator.dual_coef_) # Interceptions: inters = [temp_type.format(self.repr(i)) for i in estimator._intercept_] inters = ', '.join(inters) inters = temp_arr_.format(type='double', name='intercepts', - values=inters) + values=inters, n=len(estimator._intercept_)) self.intercepts = inters + self.n_intercepts = len(estimator._intercept_) # Kernel: - self.kernel = str(params['kernel']) + self.kernel = str(params['kernel'])[0] if self.target_language == 'c'\ + else str(params['kernel']) self.gamma = self.repr(self.params['gamma']) self.coef0 = self.repr(self.params['coef0']) self.degree = self.repr(self.params['degree']) @@ -196,92 +195,9 @@ def create_method(self): :return out : string The built method as string. """ - temp_type = self.temp('type') - temp_arr = self.temp('arr') - temp_arr_ = self.temp('arr[]') - temp_arr__ = self.temp('arr[][]') - - # out = '\n' - - # Number of support vectors: - # n_svs = [temp_type.format(self.repr(v)) for v in self.svs_rows] - # n_svs = ', '.join(n_svs) - # out += temp_arr_.format(type='int', name='n_svs', values=n_svs) - # out += '\n' - - # # Support vectors: - # vectors = [] - # for vector in self.svs: - # _vectors = [temp_type.format(self.repr(v)) for v in vector] - # _vectors = temp_arr.format(', '.join(_vectors)) - # vectors.append(_vectors) - # vectors = ', '.join(vectors) - # out += self.temp('arr[][]', skipping=True).format( - # type='double', name='svs', values=vectors, - # n=len(self.svs), m=self.n_svs) - # out += '\n' - - # # Coefficients: - # coeffs = [] - # for coeff in self.coeffs: - # _coeffs = [temp_type.format(self.repr(c)) for c in coeff] - # _coeffs = temp_arr.format(', '.join(_coeffs)) - # coeffs.append(_coeffs) - # coeffs = ', '.join(coeffs) - # out += temp_arr__.format(type='double', name='coeffs', values=coeffs, - # n=len(self.coeffs), m=len(self.coeffs[0])) - # out += '\n' - - # Interceptions: - # inters = [temp_type.format(self.repr(i)) for i in self.inters] - # inters = ', '.join(inters) - # out += temp_arr_.format(type='double', name='inters', values=inters) - # out += '\n' - - # Classes: - # if not self.is_binary: - # classes = [temp_type.format(self.repr(c)) for c in self.classes] - # classes = ', '.join(classes) - # out += temp_arr_.format(type='int', name='classes', values=classes) - # out += '\n' - - # # Kernels: - # if self.params['kernel'] == 'rbf': - # name = self.prefix + '.kernel.rbf' - # out += self.temp(name).format( - # len(self.svs), self.n_svs, - # self.repr(self.params['gamma'])) - # elif self.params['kernel'] == 'poly': - # name = self.prefix + '.kernel.poly' - # out += self.temp(name).format( - # len(self.svs), self.n_svs, - # self.repr(self.params['gamma']), - # self.repr(self.params['coef0']), - # self.repr(self.params['degree'])) - # elif self.params['kernel'] == 'sigmoid': - # name = self.prefix + '.kernel.sigmoid' - # out += self.temp(name).format( - # len(self.svs), self.n_svs, - # self.repr(self.params['gamma']), - # self.repr(self.params['coef0']), - # self.repr(self.params['degree'])) - # elif self.params['kernel'] == 'linear': - # name = self.prefix + '.kernel.linear' - # out += self.temp(name).format( - # len(self.svs), self.n_svs) - # out += '\n' - - # # Decicion: - # out += self.temp('starts').format(self.n_svs_rows) - # out += self.temp('ends').format(self.n_svs_rows) - # name = self.prefix + '.decisions' - # out += self.temp(name).format(self.n_inters, self.n_svs_rows) - # name = self.prefix + '.classes' - # out += self.temp(name).format(self.n_inters, self.n_classes) - n_indents = 0 if self.target_language in ['java', 'js', 'php', 'ruby'] else 1 - # out = self.indent(out, n_indents=2-n_indents) - - method = self.temp('method', n_indents=1-n_indents, + n_indents = 1 if self.target_language in ['java', 'js', + 'php', 'ruby'] else 0 + method = self.temp('method', n_indents=n_indents, skipping=True).format(**self.__dict__) return method diff --git a/sklearn_porter/estimator/classifier/SVC/templates/c/binary.classes.txt b/sklearn_porter/estimator/classifier/SVC/templates/c/binary.classes.txt deleted file mode 100644 index b43cd59c..00000000 --- a/sklearn_porter/estimator/classifier/SVC/templates/c/binary.classes.txt +++ /dev/null @@ -1,5 +0,0 @@ - -if (decision > 0) {{ - return 0; -}} -return 1; diff --git a/sklearn_porter/estimator/classifier/SVC/templates/c/binary.decisions.txt b/sklearn_porter/estimator/classifier/SVC/templates/c/binary.decisions.txt deleted file mode 100644 index 8b260e4e..00000000 --- a/sklearn_porter/estimator/classifier/SVC/templates/c/binary.decisions.txt +++ /dev/null @@ -1,9 +0,0 @@ - -double decision = 0.; -for (k = starts[1]; k < ends[1]; k++) {{ - decision += kernels[k] * coeffs[0][k]; -}} -for (k = starts[0]; k < ends[0]; k++) {{ - decision += kernels[k] * coeffs[0][k]; -}} -decision += inters[0]; diff --git a/sklearn_porter/estimator/classifier/SVC/templates/c/binary.kernel.linear.txt b/sklearn_porter/estimator/classifier/SVC/templates/c/binary.kernel.linear.txt deleted file mode 100644 index da37b6c5..00000000 --- a/sklearn_porter/estimator/classifier/SVC/templates/c/binary.kernel.linear.txt +++ /dev/null @@ -1,11 +0,0 @@ - -// -double kernels[{0}]; -double kernel; -for (i = 0; i < {0}; i++) {{ - kernel = 0.; - for (j = 0; j < {1}; j++) {{ - kernel += svs[i][j] * atts[j]; - }} - kernels[i] = -kernel; -}} diff --git a/sklearn_porter/estimator/classifier/SVC/templates/c/binary.kernel.poly.txt b/sklearn_porter/estimator/classifier/SVC/templates/c/binary.kernel.poly.txt deleted file mode 100644 index 8319de11..00000000 --- a/sklearn_porter/estimator/classifier/SVC/templates/c/binary.kernel.poly.txt +++ /dev/null @@ -1,11 +0,0 @@ - -// (y+r)^d -double kernels[{0}]; -double kernel; -for (i = 0; i < {0}; i++) {{ - kernel = 0.; - for (j = 0; j < {1}; j++) {{ - kernel += svs[i][j] * atts[j]; - }} - kernels[i] = -(pow(({2} * kernel) + {3}, {4})); -}} diff --git a/sklearn_porter/estimator/classifier/SVC/templates/c/binary.kernel.rbf.txt b/sklearn_porter/estimator/classifier/SVC/templates/c/binary.kernel.rbf.txt deleted file mode 100644 index 7b3938b0..00000000 --- a/sklearn_porter/estimator/classifier/SVC/templates/c/binary.kernel.rbf.txt +++ /dev/null @@ -1,11 +0,0 @@ - -// exp(-y|x-x'|^2) -double kernels[{0}]; -double kernel; -for (i = 0; i < {0}; i++) {{ - kernel = 0.; - for (j = 0; j < {1}; j++) {{ - kernel += pow(svs[i][j] - atts[j], 2); - }} - kernels[i] = -(exp(-{2} * kernel)); -}} diff --git a/sklearn_porter/estimator/classifier/SVC/templates/c/binary.kernel.sigmoid.txt b/sklearn_porter/estimator/classifier/SVC/templates/c/binary.kernel.sigmoid.txt deleted file mode 100644 index c47f0fab..00000000 --- a/sklearn_porter/estimator/classifier/SVC/templates/c/binary.kernel.sigmoid.txt +++ /dev/null @@ -1,11 +0,0 @@ - -// tanh(y+r) -double kernels[{0}]; -double kernel; -for (i = 0; i < {0}; i++) {{ - kernel = 0.; - for (j = 0; j < {1}; j++) {{ - kernel += svs[i][j] * atts[j]; - }} - kernels[i] = -(tanh(({2} * kernel) + {3})); -}} diff --git a/sklearn_porter/estimator/classifier/SVC/templates/c/class.txt b/sklearn_porter/estimator/classifier/SVC/templates/c/class.txt index 352d35dd..6d8e59b5 100644 --- a/sklearn_porter/estimator/classifier/SVC/templates/c/class.txt +++ b/sklearn_porter/estimator/classifier/SVC/templates/c/class.txt @@ -2,14 +2,35 @@ #include #include +#define N_FEATURES {n_features} +#define N_CLASSES {n_classes} +#define N_VECTORS {n_vectors} +#define N_ROWS {n_svs_rows} +#define N_COEFFICIENTS {n_coefficients} +#define N_INTERCEPTS {n_intercepts} +#define KERNEL_TYPE '{kernel}' +#define KERNEL_GAMMA {gamma} +#define KERNEL_COEF {coef0} +#define KERNEL_DEGREE {degree} + +{vectors} +{coefficients} +{intercepts} +{weights} + {method} int main(int argc, const char * argv[]) {{ - float atts[argc-1]; - int i = 0; + + /* Features: */ + double features[argc-1]; + int i; for (i = 1; i < argc; i++) {{ - atts[i-1] = atof(argv[i]); + features[i-1] = atof(argv[i]); }} - printf("%d", {method_name}(atts)); + + /* Prediction: */ + printf("%d", {method_name}(features)); return 0; + }} diff --git a/sklearn_porter/estimator/classifier/SVC/templates/c/ends.txt b/sklearn_porter/estimator/classifier/SVC/templates/c/ends.txt deleted file mode 100644 index bb3b6ab2..00000000 --- a/sklearn_porter/estimator/classifier/SVC/templates/c/ends.txt +++ /dev/null @@ -1,5 +0,0 @@ - -int ends[{0}]; -for (i = 0; i < {0}; i++) {{ - ends[i] = n_svs[i] + starts[i]; -}} diff --git a/sklearn_porter/estimator/classifier/SVC/templates/c/method.txt b/sklearn_porter/estimator/classifier/SVC/templates/c/method.txt index b093bc35..ef6e2490 100644 --- a/sklearn_porter/estimator/classifier/SVC/templates/c/method.txt +++ b/sklearn_porter/estimator/classifier/SVC/templates/c/method.txt @@ -1,4 +1,133 @@ -int {method_name}(float atts[]) {{ +int {method_name} (double features[]) {{ int i, j, k, d, l; - {decicion} -}} + + double kernels[N_VECTORS]; + double kernel; + switch (KERNEL_TYPE) {{ + case 'l': + // + for (i = 0; i < N_VECTORS; i++) {{ + kernel = 0.; + for (j = 0; j < N_FEATURES; j++) {{ + kernel += vectors[i][j] * features[j]; + }} + kernels[i] = kernel; + }} + break; + case 'p': + // (y+r)^d + for (i = 0; i < N_VECTORS; i++) {{ + kernel = 0.; + for (j = 0; j < N_FEATURES; j++) {{ + kernel += vectors[i][j] * features[j]; + }} + kernels[i] = pow((KERNEL_GAMMA * kernel) + KERNEL_COEF, KERNEL_DEGREE); + }} + break; + case 'r': + // exp(-y|x-x'|^2) + for (i = 0; i < N_VECTORS; i++) {{ + kernel = 0.; + for (j = 0; j < N_FEATURES; j++) {{ + kernel += pow(vectors[i][j] - features[j], 2); + }} + kernels[i] = exp(-KERNEL_GAMMA * kernel); + }} + break; + case 's': + // tanh(y+r) + for (i = 0; i < N_VECTORS; i++) {{ + kernel = 0.; + for (j = 0; j < N_FEATURES; j++) {{ + kernel += vectors[i][j] * features[j]; + }} + kernels[i] = tanh((KERNEL_GAMMA * kernel) + KERNEL_COEF); + }} + break; + }} + + int starts[N_CLASSES]; + int start; + for (i = 0; i < N_CLASSES; i++) {{ + if (i != 0) {{ + start = 0; + for (j = 0; j < i; j++) {{ + start += weights[j]; + }} + starts[i] = start; + }} else {{ + starts[0] = 0; + }} + }} + + int ends[N_CLASSES]; + for (i = 0; i < N_CLASSES; i++) {{ + ends[i] = weights[i] + starts[i]; + }} + + if (N_CLASSES == 2) {{ + + for (i = 0; i < N_VECTORS; i++) {{ + kernels[i] = -kernels[i]; + }} + + double decision = 0.; + for (k = starts[1]; k < ends[1]; k++) {{ + decision += kernels[k] * coefficients[0][k]; + }} + for (k = starts[0]; k < ends[0]; k++) {{ + decision += kernels[k] * coefficients[0][k]; + }} + decision += intercepts[0]; + + if (decision > 0) {{ + return 0; + }} + return 1; + + }} else {{ + + double decisions[N_INTERCEPTS]; + double tmp; + for (i = 0, d = 0, l = N_ROWS; i < l; i++) {{ + for (j = i + 1; j < l; j++) {{ + tmp = 0.; + for (k = starts[j]; k < ends[j]; k++) {{ + tmp += kernels[k] * coefficients[i][k]; + }} + for (k = starts[i]; k < ends[i]; k++) {{ + tmp += kernels[k] * coefficients[j - 1][k]; + }} + decisions[d] = tmp + intercepts[d]; + d = d + 1; + }} + }} + + int votes[N_INTERCEPTS]; + for (i = 0, d = 0, l = N_ROWS; i < l; i++) {{ + for (j = i + 1; j < l; j++) {{ + votes[d] = decisions[d] > 0 ? i : j; + d = d + 1; + }} + }} + + int amounts[N_CLASSES]; + for (i = 0, l = N_CLASSES; i < l; i++) {{ + amounts[i] = 0; + }} + for (i = 0; i < N_INTERCEPTS; i++) {{ + amounts[votes[i]] += 1; + }} + + int classVal = -1; + int classIdx = -1; + for (i = 0; i < N_CLASSES; i++) {{ + if (amounts[i] > classVal) {{ + classVal = amounts[i]; + classIdx= i; + }} + }} + return classIdx; + + }} +}} \ No newline at end of file diff --git a/sklearn_porter/estimator/classifier/SVC/templates/c/multi.classes.txt b/sklearn_porter/estimator/classifier/SVC/templates/c/multi.classes.txt deleted file mode 100644 index 0b3ab031..00000000 --- a/sklearn_porter/estimator/classifier/SVC/templates/c/multi.classes.txt +++ /dev/null @@ -1,25 +0,0 @@ - -int votes[{0}]; -for (i = 0, d = 0, l = {1}; i < l; i++) {{ - for (j = i + 1; j < l; j++) {{ - votes[d] = decisions[d] > 0 ? i : j; - d = d + 1; - }} -}} - -int amounts[{1}]; -for (i = 0, l = {1}; i < l; i++) {{ - amounts[i] = 0; -}} -for (i = 0, l = {0}; i < l; i++) {{ - amounts[votes[i]] += 1; -}} - -int class_val = -1, class_idx = -1; -for (i = 0, l = {1}; i < l; i++) {{ - if (amounts[i] > class_val) {{ - class_val = amounts[i]; - class_idx = i; - }} -}} -return classes[class_idx]; diff --git a/sklearn_porter/estimator/classifier/SVC/templates/c/multi.decisions.txt b/sklearn_porter/estimator/classifier/SVC/templates/c/multi.decisions.txt deleted file mode 100644 index 13cf68cf..00000000 --- a/sklearn_porter/estimator/classifier/SVC/templates/c/multi.decisions.txt +++ /dev/null @@ -1,15 +0,0 @@ - -double decisions[{0}]; -for (i = 0, d = 0, l = {1}; i < l; i++) {{ - for (j = i + 1; j < l; j++) {{ - double tmp = 0.; - for (k = starts[j]; k < ends[j]; k++) {{ - tmp += kernels[k] * coeffs[i][k]; - }} - for (k = starts[i]; k < ends[i]; k++) {{ - tmp += kernels[k] * coeffs[j - 1][k]; - }} - decisions[d] = tmp + inters[d]; - d = d + 1; - }} -}} diff --git a/sklearn_porter/estimator/classifier/SVC/templates/c/multi.kernel.linear.txt b/sklearn_porter/estimator/classifier/SVC/templates/c/multi.kernel.linear.txt deleted file mode 100644 index fba09fac..00000000 --- a/sklearn_porter/estimator/classifier/SVC/templates/c/multi.kernel.linear.txt +++ /dev/null @@ -1,11 +0,0 @@ - -// -double kernels[{0}]; -double kernel; -for (i = 0; i < {0}; i++) {{ - kernel = 0.; - for (j = 0; j < {1}; j++) {{ - kernel += svs[i][j] * atts[j]; - }} - kernels[i] = kernel; -}} diff --git a/sklearn_porter/estimator/classifier/SVC/templates/c/multi.kernel.poly.txt b/sklearn_porter/estimator/classifier/SVC/templates/c/multi.kernel.poly.txt deleted file mode 100644 index d9ec7f1f..00000000 --- a/sklearn_porter/estimator/classifier/SVC/templates/c/multi.kernel.poly.txt +++ /dev/null @@ -1,11 +0,0 @@ - -// (y+r)^d -double kernels[{0}]; -double kernel; -for (i = 0; i < {0}; i++) {{ - kernel = 0.; - for (j = 0; j < {1}; j++) {{ - kernel += svs[i][j] * atts[j]; - }} - kernels[i] = pow(({2} * kernel) + {3}, {4}); -}} diff --git a/sklearn_porter/estimator/classifier/SVC/templates/c/multi.kernel.rbf.txt b/sklearn_porter/estimator/classifier/SVC/templates/c/multi.kernel.rbf.txt deleted file mode 100644 index a9c19cb7..00000000 --- a/sklearn_porter/estimator/classifier/SVC/templates/c/multi.kernel.rbf.txt +++ /dev/null @@ -1,11 +0,0 @@ - -// exp(-y|x-x'|^2) -double kernels[{0}]; -double kernel; -for (i = 0; i < {0}; i++) {{ - kernel = 0.; - for (j = 0; j < {1}; j++) {{ - kernel += pow(svs[i][j] - atts[j], 2); - }} - kernels[i] = exp(-{2} * kernel); -}} diff --git a/sklearn_porter/estimator/classifier/SVC/templates/c/multi.kernel.sigmoid.txt b/sklearn_porter/estimator/classifier/SVC/templates/c/multi.kernel.sigmoid.txt deleted file mode 100644 index 22a9904b..00000000 --- a/sklearn_porter/estimator/classifier/SVC/templates/c/multi.kernel.sigmoid.txt +++ /dev/null @@ -1,11 +0,0 @@ - -// tanh(y+r) -double kernels[{0}]; -double kernel; -for (i = 0; i < {0}; i++) {{ - kernel = 0.; - for (j = 0; j < {1}; j++) {{ - kernel += svs[i][j] * atts[j]; - }} - kernels[i] = tanh(({2} * kernel) + {3}); -}} diff --git a/sklearn_porter/estimator/classifier/SVC/templates/c/starts.txt b/sklearn_porter/estimator/classifier/SVC/templates/c/starts.txt deleted file mode 100644 index bb2a721c..00000000 --- a/sklearn_porter/estimator/classifier/SVC/templates/c/starts.txt +++ /dev/null @@ -1,12 +0,0 @@ -int starts[{0}]; -for (i = 0; i < {0}; i++) {{ - if (i != 0) {{ - int start = 0; - for (j = 0; j < i; j++) {{ - start += n_svs[j]; - }} - starts[i] = start; - }} else {{ - starts[0] = 0; - }} -}} diff --git a/sklearn_porter/estimator/classifier/SVC/templates/js/method.txt b/sklearn_porter/estimator/classifier/SVC/templates/js/method.txt index a4b01d9c..2fbe93b3 100644 --- a/sklearn_porter/estimator/classifier/SVC/templates/js/method.txt +++ b/sklearn_porter/estimator/classifier/SVC/templates/js/method.txt @@ -1,56 +1,56 @@ this.{method_name} = function(features) {{ - var kernels = new Array(vectors.length); - var kernel; - switch (this.kernel) {{ - case 'LINEAR': + kernels = double[vectors.length]; + kernelVal; + switch (kernel[0]) {{ + case 'l': // - for (var i = 0; i < this.vectors.length; i++) {{ - kernel = 0.; - for (var j = 0; j < this.vectors[i].length; j++) {{ - kernel += this.vectors[i][j] * features[j]; + for (i = 0; i < vectors.length; i++) {{ + kernelVal = 0.; + for (j = 0; j < vectors[i].length; j++) {{ + kernelVal += vectors[i][j] * features[j]; }} - kernels[i] = kernel; + kernelVals[i] = kernelVal; }} break; - case 'POLY': + case 'p': // (y+r)^d - for (var i = 0; i < this.vectors.length; i++) {{ - kernel = 0.; - for (var j = 0; j < this.vectors[i].length; j++) {{ - kernel += this.vectors[i][j] * features[j]; + for (i = 0; i < vectors.length; i++) {{ + kernelVal = 0.; + for (j = 0; j < vectors[i].length; j++) {{ + kernelVal += vectors[i][j] * features[j]; }} - kernels[i] = Math.pow((this.gamma * kernel) + this.coef0, this.degree); + kernelVals[i] = Math.pow((gamma * kernelVal) + coef0, degree); }} break; - case 'RBF': + case 'r': // exp(-y|x-x'|^2) - for (var i = 0; i < this.vectors.length; i++) {{ - kernel = 0.; - for (var j = 0; j < this.vectors[i].length; j++) {{ - kernel += Math.pow(this.vectors[i][j] - features[j], 2); + for (i = 0; i < vectors.length; i++) {{ + kernelVal = 0.; + for (j = 0; j < vectors[i].length; j++) {{ + kernelVal += Math.pow(vectors[i][j] - features[j], 2); }} - kernels[i] = Math.exp(-this.gamma * kernel); + kernelVals[i] = Math.exp(-gamma * kernelVal); }} break; - case 'SIGMOID': + case 's': // tanh(y+r) - for (var i = 0; i < this.vectors.length; i++) {{ - kernel = 0.; - for (var j = 0; j < this.vectors[i].length; j++) {{ - kernel += this.vectors[i][j] * features[j]; + for (i = 0; i < vectors.length; i++) {{ + kernelVal = 0.; + for (j = 0; j < vectors[i].length; j++) {{ + kernelVal += vectors[i][j] * features[j]; }} - kernels[i] = Math.tanh((this.gamma * kernel) + this.coef0); + kernelVals[i] = Math.tanh((gamma * kernelVal) + coef0); }} break; }} - var starts = new Array(this.nClasses); - for (var i = 0; i < this.nClasses; i++) {{ + starts = new Array(nClasses); + for (i = 0; i < nClasses; i++) {{ if (i != 0) {{ - var start = 0; - for (var j = 0; j < i; j++) {{ - start += this.weights[j]; + start = 0; + for (j = 0; j < i; j++) {{ + start += weights[j]; }} starts[i] = start; }} else {{ @@ -58,25 +58,25 @@ this.{method_name} = function(features) {{ }} }} - var ends = new Array(this.nClasses); - for (var i = 0; i < this.nClasses; i++) {{ - ends[i] = this.weights[i] + starts[i]; + ends = new Array(nClasses); + for (i = 0; i < nClasses; i++) {{ + ends[i] = weights[i] + starts[i]; }} - if (this.nClasses == 2) {{ + if (nClasses == 2) {{ - for (var i = 0; i < kernels.length; i++) {{ + for (i = 0; i < kernels.length; i++) {{ kernels[i] = -kernels[i]; }} - var decision = 0.; - for (var k = starts[1]; k < ends[1]; k++) {{ - decision += kernels[k] * this.coefficients[0][k]; + decision = 0.; + for (k = starts[1]; k < ends[1]; k++) {{ + decision += kernels[k] * coefficients[0][k]; }} - for (var k = starts[0]; k < ends[0]; k++) {{ - decision += kernels[k] * this.coefficients[0][k]; + for (k = starts[0]; k < ends[0]; k++) {{ + decision += kernels[k] * coefficients[0][k]; }} - decision += this.intercepts[0]; + decision += intercepts[0]; if (decision > 0) {{ return 0; @@ -85,42 +85,42 @@ this.{method_name} = function(features) {{ }} else {{ - var decisions = new Array(this.nClasses); - for (var i = 0, d = 0, l = this.nClasses; i < l; i++) {{ - for (var j = i + 1; j < l; j++) {{ - var tmp = 0.; - for (var k = starts[j]; k < ends[j]; k++) {{ - tmp += this.coefficients[i][k] * kernels[k]; + decisions = new Array(nClasses); + for (i = 0, d = 0, l = nClasses; i < l; i++) {{ + for (j = i + 1; j < l; j++) {{ + tmp = 0.; + for (k = starts[j]; k < ends[j]; k++) {{ + tmp += coefficients[i][k] * kernels[k]; }} - for (var k = starts[i]; k < ends[i]; k++) {{ - tmp += this.coefficients[j - 1][k] * kernels[k]; + for (k = starts[i]; k < ends[i]; k++) {{ + tmp += coefficients[j - 1][k] * kernels[k]; }} - decisions[d] = tmp + this.intercepts[d]; + decisions[d] = tmp + intercepts[d]; d++; }} }} - var votes = new Array(this.intercepts.length); - for (var i = 0, d = 0, l = this.nClasses; i < l; i++) {{ - for (var j = i + 1; j < l; j++) {{ + votes = new Array(intercepts.length); + for (i = 0, d = 0, l = nClasses; i < l; i++) {{ + for (j = i + 1; j < l; j++) {{ votes[d] = decisions[d] > 0 ? i : j; d++; }} }} - var amounts = new Array(this.nClasses).fill(0); - for (var i = 0, l = votes.length; i < l; i++) {{ + amounts = new Array(nClasses).fill(0); + for (i = 0, l = votes.length; i < l; i++) {{ amounts[votes[i]] += 1; }} - var classVal = -1, classIdx = -1; - for (var i = 0, l = amounts.length; i < l; i++) {{ + classVal = -1, classIdx = -1; + for (i = 0, l = amounts.length; i < l; i++) {{ if (amounts[i] > classVal) {{ classVal = amounts[i]; classIdx= i; }} }} - return this.classes[classIdx]; + return classes[classIdx]; }} }} \ No newline at end of file