-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathGaussianProcessClassifier.cs
More file actions
executable file
·110 lines (94 loc) · 4.25 KB
/
GaussianProcessClassifier.cs
File metadata and controls
executable file
·110 lines (94 loc) · 4.25 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using Microsoft.ML.Probabilistic.Models;
using Microsoft.ML.Probabilistic.Math;
using Microsoft.ML.Probabilistic.Distributions;
using Microsoft.ML.Probabilistic.Distributions.Kernels;
namespace Microsoft.ML.Probabilistic.Tutorials
{
[Example("Applications", "A Gaussian Process classifier example")]
public class GaussianProcessClassifier
{
public void Run()
{
InferenceEngine engine = new InferenceEngine();
if (!(engine.Algorithm is Algorithms.ExpectationPropagation))
{
Console.WriteLine("This example only runs with Expectation Propagation");
return;
}
// The data
Vector[] inputs = new Vector[]
{
Vector.FromArray(new double[2] { 0, 0 }),
Vector.FromArray(new double[2] { 0, 1 }),
Vector.FromArray(new double[2] { 1, 0 }),
Vector.FromArray(new double[2] { 0, 0.5 }),
Vector.FromArray(new double[2] { 1.5, 0 }),
Vector.FromArray(new double[2] { 0.5, 1.0 })
};
bool[] outputs = { true, true, false, true, false, false };
// Open an evidence block to allow model scoring
Variable<bool> evidence = Variable.Bernoulli(0.5).Named("evidence");
IfBlock block = Variable.If(evidence);
// Set up the GP prior, which will be filled in later
Variable<SparseGP> prior = Variable.New<SparseGP>().Named("prior");
// The sparse GP variable - a distribution over functions
Variable<IFunction> f = Variable<IFunction>.Random(prior).Named("f");
// The locations to evaluate the function
VariableArray<Vector> x = Variable.Observed(inputs).Named("x");
Range j = x.Range.Named("j");
// The observation model
VariableArray<bool> y = Variable.Observed(outputs, j).Named("y");
Variable<double> score = Variable.FunctionEvaluate(f, x[j]);
y[j] = (Variable.GaussianFromMeanAndVariance(score, 0.1) > 0);
// Close the evidence block
block.CloseBlock();
// The basis
Vector[] basis = new Vector[]
{
Vector.FromArray(new double[2] { 0.2, 0.2 }),
Vector.FromArray(new double[2] { 0.2, 0.8 }),
Vector.FromArray(new double[2] { 0.8, 0.2 }),
Vector.FromArray(new double[2] { 0.8, 0.8 })
};
for (int trial = 0; trial < 3; trial++)
{
// The kernel
IKernelFunction kf;
if (trial == 0)
{
kf = new SquaredExponential(-0.0);
}
else if (trial == 1)
{
kf = new SquaredExponential(-0.5);
}
else
{
kf = new NNKernel(new double[] { 0.0, 0.0 }, -1.0);
}
// Fill in the sparse GP prior
GaussianProcess gp = new GaussianProcess(new ConstantFunction(0), kf);
prior.ObservedValue = new SparseGP(new SparseGPFixed(gp, basis));
// Model score
double NNscore = engine.Infer<Bernoulli>(evidence).LogOdds;
Console.WriteLine("{0} evidence = {1}", kf, NNscore.ToString("g4"));
}
// Infer the posterior Sparse GP
SparseGP sgp = engine.Infer<SparseGP>(f);
// Check that training set is classified correctly
Console.WriteLine("");
Console.WriteLine("Predictions on training set:");
for (int i = 0; i < outputs.Length; i++)
{
Gaussian post = sgp.Marginal(inputs[i]);
double postMean = post.GetMean();
string comment = (outputs[i] == (postMean > 0.0)) ? "correct" : "incorrect";
Console.WriteLine("f({0}) = {1} ({2})", inputs[i], post, comment);
}
}
}
}