use of com.sri.ai.praise.learning.parameterlearning.representation.dataset.DefaultDataset in project aic-praise by aic-sri-international.
the class TableBayesianModelTest method testSickSunColdModelWithDifferentDatapoints.
@Test
public void testSickSunColdModelWithDifferentDatapoints() {
// Dataset
List<TableVariable> variables = list(sickVariable, sunVariable, coldVariable);
List<Integer> variableValues1 = list(1, 0, 1);
DefaultDatapoint datapoint1 = new DefaultDatapoint(variables, variableValues1);
List<DefaultDatapoint> datapoints = list();
int numberOfDatapoints1 = 4;
for (int i = 1; i <= numberOfDatapoints1; i++) {
datapoints.add(datapoint1);
}
List<Integer> variableValues2 = list(0, 0, 0);
DefaultDatapoint datapoint2 = new DefaultDatapoint(variables, variableValues2);
int numberOfDatapoints2 = 2;
for (int i = 1; i <= numberOfDatapoints2; i++) {
datapoints.add(datapoint2);
}
List<Integer> variableValues3 = list(0, 0, 1);
DefaultDatapoint datapoint3 = new DefaultDatapoint(variables, variableValues3);
int numberOfDatapoints3 = 1;
for (int i = 1; i <= numberOfDatapoints3; i++) {
datapoints.add(datapoint3);
}
DefaultDataset dataset = new DefaultDataset(datapoints);
// Learning
sickSunColdModel = (TableBayesianModel) sickSunColdModel.learnModelParametersFromCompleteData(dataset);
List<? extends TableBayesianNode> learnedNodes = sickSunColdModel.getNodes();
// Testing
// For the sickNode first:
// Expected parameters (2 datapoints1): {(0, [0, 0])=0.5, (1, [0, 0])=0.5, (1, [1, 0])=0.5, (1, [1, 1])=0.5, (0, [1, 1])=0.5, (0, [1, 0])=0.5, (0, [0, 1])=0.25, (1, [0, 1])=0.75}
TableBayesianNode learnedSickNode = learnedNodes.get(0);
LinkedHashMap<TableVariable, Integer> variablesAndTheirValues = map();
variablesAndTheirValues.put(sickVariable, 0);
variablesAndTheirValues.put(sunVariable, 0);
variablesAndTheirValues.put(coldVariable, 0);
// Parameter for (0, [0, 0]):
Assert.assertEquals(Double.valueOf((1.0 + numberOfDatapoints2) / (2 + numberOfDatapoints2)), learnedSickNode.getEntryFor(variablesAndTheirValues));
// Parameter for (1, [0, 0]):
variablesAndTheirValues.put(sickVariable, 1);
Assert.assertEquals(Double.valueOf(1.0 / (2 + numberOfDatapoints2)), learnedSickNode.getEntryFor(variablesAndTheirValues));
// Parameter for (1, [1, 0]):
variablesAndTheirValues.put(sunVariable, 1);
Assert.assertEquals(Double.valueOf(0.5), learnedSickNode.getEntryFor(variablesAndTheirValues));
// Parameter for (1, [1, 1]):
variablesAndTheirValues.put(coldVariable, 1);
Assert.assertEquals(Double.valueOf(0.5), learnedSickNode.getEntryFor(variablesAndTheirValues));
// Parameter for (0, [1, 1]):
variablesAndTheirValues.put(sickVariable, 0);
Assert.assertEquals(Double.valueOf(0.5), learnedSickNode.getEntryFor(variablesAndTheirValues));
// Parameter for (0, [1, 0]):
variablesAndTheirValues.put(coldVariable, 0);
Assert.assertEquals(Double.valueOf(0.5), learnedSickNode.getEntryFor(variablesAndTheirValues));
// Parameter for (0, [0, 1]):
variablesAndTheirValues.put(sunVariable, 0);
variablesAndTheirValues.put(coldVariable, 1);
Assert.assertEquals(Double.valueOf((1.0 + numberOfDatapoints3) / (2 + numberOfDatapoints1 + numberOfDatapoints3)), learnedSickNode.getEntryFor(variablesAndTheirValues));
// Parameter for (1, [0, 1]):
variablesAndTheirValues.put(sickVariable, 1);
Assert.assertEquals(Double.valueOf((1.0 + numberOfDatapoints1) / (2 + numberOfDatapoints1 + numberOfDatapoints3)), learnedSickNode.getEntryFor(variablesAndTheirValues));
// For the sunNode:
// Expected parameters (2 datapoints): {(0, [])=0.75, (1, [])=0.25}
TableBayesianNode learnedSunNode = learnedNodes.get(1);
variablesAndTheirValues = map();
variablesAndTheirValues.put(sunVariable, 0);
// Parameter for (0, []):
Assert.assertEquals(Double.valueOf((1.0 + numberOfDatapoints1 + numberOfDatapoints2 + numberOfDatapoints3) / (2 + numberOfDatapoints1 + numberOfDatapoints2 + numberOfDatapoints3)), learnedSunNode.getEntryFor(variablesAndTheirValues));
// Parameter for (1, []):
variablesAndTheirValues.put(sunVariable, 1);
Assert.assertEquals(Double.valueOf(1.0 / (2 + numberOfDatapoints1 + numberOfDatapoints2 + numberOfDatapoints3)), learnedSunNode.getEntryFor(variablesAndTheirValues));
// For the coldNode:
// Expected parameters (2 datapoints): {(0, [])=0.25, (1, [])=0.75}
TableBayesianNode learnedColdNode = learnedNodes.get(2);
variablesAndTheirValues = map();
variablesAndTheirValues.put(coldVariable, 0);
// Parameter for (0, []):
Assert.assertEquals(Double.valueOf((1.0 + numberOfDatapoints2) / (2 + numberOfDatapoints1 + numberOfDatapoints2 + numberOfDatapoints3)), learnedColdNode.getEntryFor(variablesAndTheirValues));
// Parameter for (1, []):
variablesAndTheirValues.put(coldVariable, 1);
Assert.assertEquals(Double.valueOf((1.0 + numberOfDatapoints1 + numberOfDatapoints3) / (2 + numberOfDatapoints1 + numberOfDatapoints2 + numberOfDatapoints3)), learnedColdNode.getEntryFor(variablesAndTheirValues));
}
use of com.sri.ai.praise.learning.parameterlearning.representation.dataset.DefaultDataset in project aic-praise by aic-sri-international.
the class TableBayesianModelTest method testSickSunColdModel.
@Test
public void testSickSunColdModel() {
// Dataset
List<TableVariable> variables = list(sickVariable, sunVariable, coldVariable);
List<Integer> variableValues = list(1, 0, 1);
DefaultDatapoint datapoint = new DefaultDatapoint(variables, variableValues);
List<DefaultDatapoint> datapoints = list();
int numberOfDatapoints = 2;
for (int i = 1; i <= numberOfDatapoints; i++) {
datapoints.add(datapoint);
}
DefaultDataset dataset = new DefaultDataset(datapoints);
// Learning
sickSunColdModel = (TableBayesianModel) sickSunColdModel.learnModelParametersFromCompleteData(dataset);
List<? extends TableBayesianNode> learnedNodes = sickSunColdModel.getNodes();
// Testing
// For the sickNode first:
// Expected parameters (2 datapoints): {(0, [0, 0])=0.5, (1, [0, 0])=0.5, (1, [1, 0])=0.5, (1, [1, 1])=0.5, (0, [1, 1])=0.5, (0, [1, 0])=0.5, (0, [0, 1])=0.25, (1, [0, 1])=0.75}
TableBayesianNode learnedSickNode = learnedNodes.get(0);
LinkedHashMap<TableVariable, Integer> variablesAndTheirValues = map();
variablesAndTheirValues.put(sickVariable, 0);
variablesAndTheirValues.put(sunVariable, 0);
variablesAndTheirValues.put(coldVariable, 0);
// Parameter for (0, [0, 0]):
Assert.assertEquals(Double.valueOf(0.5), learnedSickNode.getEntryFor(variablesAndTheirValues));
// Parameter for (1, [0, 0]):
variablesAndTheirValues.put(sickVariable, 1);
Assert.assertEquals(Double.valueOf(0.5), learnedSickNode.getEntryFor(variablesAndTheirValues));
// Parameter for (1, [1, 0]):
variablesAndTheirValues.put(sunVariable, 1);
Assert.assertEquals(Double.valueOf(0.5), learnedSickNode.getEntryFor(variablesAndTheirValues));
// Parameter for (1, [1, 1]):
variablesAndTheirValues.put(coldVariable, 1);
Assert.assertEquals(Double.valueOf(0.5), learnedSickNode.getEntryFor(variablesAndTheirValues));
// Parameter for (0, [1, 1]):
variablesAndTheirValues.put(sickVariable, 0);
Assert.assertEquals(Double.valueOf(0.5), learnedSickNode.getEntryFor(variablesAndTheirValues));
// Parameter for (0, [1, 0]):
variablesAndTheirValues.put(coldVariable, 0);
Assert.assertEquals(Double.valueOf(0.5), learnedSickNode.getEntryFor(variablesAndTheirValues));
// Parameter for (0, [0, 1]):
variablesAndTheirValues.put(sunVariable, 0);
variablesAndTheirValues.put(coldVariable, 1);
Assert.assertEquals(Double.valueOf(1.0 / (2 + numberOfDatapoints)), learnedSickNode.getEntryFor(variablesAndTheirValues));
// Parameter for (1, [0, 1]):
variablesAndTheirValues.put(sickVariable, 1);
Assert.assertEquals(Double.valueOf((1.0 + numberOfDatapoints) / (2 + numberOfDatapoints)), learnedSickNode.getEntryFor(variablesAndTheirValues));
// For the sunNode:
// Expected parameters (2 datapoints): {(0, [])=0.75, (1, [])=0.25}
TableBayesianNode learnedSunNode = learnedNodes.get(1);
variablesAndTheirValues = map();
variablesAndTheirValues.put(sunVariable, 0);
// Parameter for (0, []):
Assert.assertEquals(Double.valueOf((1.0 + numberOfDatapoints) / (2 + numberOfDatapoints)), learnedSunNode.getEntryFor(variablesAndTheirValues));
// Parameter for (1, []):
variablesAndTheirValues.put(sunVariable, 1);
Assert.assertEquals(Double.valueOf(1.0 / (2 + numberOfDatapoints)), learnedSunNode.getEntryFor(variablesAndTheirValues));
// For the coldNode:
// Expected parameters (2 datapoints): {(0, [])=0.25, (1, [])=0.75}
TableBayesianNode learnedColdNode = learnedNodes.get(2);
variablesAndTheirValues = map();
variablesAndTheirValues.put(coldVariable, 0);
// Parameter for (0, []):
Assert.assertEquals(Double.valueOf(1.0 / (2 + numberOfDatapoints)), learnedColdNode.getEntryFor(variablesAndTheirValues));
// Parameter for (1, []):
variablesAndTheirValues.put(coldVariable, 1);
Assert.assertEquals(Double.valueOf((1.0 + numberOfDatapoints) / (2 + numberOfDatapoints)), learnedColdNode.getEntryFor(variablesAndTheirValues));
}
use of com.sri.ai.praise.learning.parameterlearning.representation.dataset.DefaultDataset in project aic-praise by aic-sri-international.
the class TableBayesianModelTest method printSickSunColdModelTest.
public static void printSickSunColdModelTest() {
// Dataset
List<TableVariable> variables = list(sickVariable, sunVariable, coldVariable);
List<Integer> variableValues = list(1, 0, 1);
DefaultDatapoint datapoint = new DefaultDatapoint(variables, variableValues);
List<DefaultDatapoint> datapoints = list();
int numberOfDatapoints = 2;
for (int i = 1; i <= numberOfDatapoints; i++) {
datapoints.add(datapoint);
}
DefaultDataset dataset = new DefaultDataset(datapoints);
// Learning
long startTime = System.currentTimeMillis();
sickSunColdModel = (TableBayesianModel) sickSunColdModel.learnModelParametersFromCompleteData(dataset);
long stopTime = System.currentTimeMillis();
long elapsedTime = stopTime - startTime;
System.out.println("Elapsed time for learning with " + numberOfDatapoints + " datapoints: " + elapsedTime + " miliseconds \n");
List<? extends TableBayesianNode> learnedNodes = sickSunColdModel.getNodes();
// Testing
String expectedParametersForSick = "{(0, [0, 0])=0.5, (1, [0, 0])=0.5, (0, [0, 1])=0.25, (1, [0, 1])=0.75, (0, [1, 0])=0.5, (1, [1, 0])=0.5, (0, [1, 1])=0.5, (1, [1, 1])=0.5}";
System.out.println("Expected parameters for sick (with 2 datapoints):\n" + expectedParametersForSick + "\n");
TableBayesianNode learnedSickNode = learnedNodes.get(0);
LinkedHashMap<TableVariable, Integer> variablesAndTheirValues = new LinkedHashMap<TableVariable, Integer>();
variablesAndTheirValues.put(sickVariable, 1);
variablesAndTheirValues.put(sunVariable, 0);
variablesAndTheirValues.put(coldVariable, 1);
System.out.println("Actual entries for sick:");
System.out.println("entryFor(" + variablesAndTheirValues.get(sickVariable) + ", [" + variablesAndTheirValues.get(sunVariable) + ", " + variablesAndTheirValues.get(coldVariable) + "]) = " + learnedSickNode.getEntryFor(variablesAndTheirValues));
variablesAndTheirValues.put(sickVariable, 0);
System.out.println("entryFor(" + variablesAndTheirValues.get(sickVariable) + ", [" + variablesAndTheirValues.get(sunVariable) + ", " + variablesAndTheirValues.get(coldVariable) + "]) = " + learnedSickNode.getEntryFor(variablesAndTheirValues));
variablesAndTheirValues.put(coldVariable, 0);
System.out.println("entryFor(" + variablesAndTheirValues.get(sickVariable) + ", [" + variablesAndTheirValues.get(sunVariable) + ", " + variablesAndTheirValues.get(coldVariable) + "]) = " + learnedSickNode.getEntryFor(variablesAndTheirValues));
}
use of com.sri.ai.praise.learning.parameterlearning.representation.dataset.DefaultDataset in project aic-praise by aic-sri-international.
the class ExpressionBayesianModelTest method testEarthquakeBurglaryAlarmModel.
/**
* For the alarmNode:
* We have four families (one for every (Earthquake, Burglary) pair value), and naturally each one with two parameters (one for Alarm = 1 and the other for Alarm = 0)
*
* Final families:
* F1: [Condition: Earthquake = 1 and Burglary = 1, Parameters: [Param1, OneMinusParam1]]
* F2: [Condition: Earthquake = 1 and Burglary = 0, Parameters: [Param2, OneMinusParam2]]
* F3: [Condition: Earthquake = 0 and Burglary = 1, Parameters: [Param3, OneMinusParam3]]
* F4: [Condition: Earthquake = 0 and Burglary = 0, Parameters: [Param4, OneMinusParam4]]
*
* For the burglaryNode:
* Similar and much simpler, we have only one family here, with two parameters (one for Burglary = 1 and the other for Burglary = 0)
* F1burglary = [Condition: true, Parameters: [Param5, OneMinusParam5]]
*
* For the earthquakeNode:
* Since we have no parameters to learn we have no families here, this expressionFactor is treated as a constant prior probability defined by the user (here, P(Earthquake) is set to 1% in the generateEarthquakeBurglaryAlarmModel method)
*/
@Test
public void testEarthquakeBurglaryAlarmModel() {
ExpressionBayesianModel model = generateEarthquakeBurglaryAlarmModel();
// Dataset - Order of variables for the datapoints: (Alarm, Earthquake, Burglary)
// (1, 0, 1)
int numberOfDatapoints1 = 1;
// (1, 1, 1)
int numberOfDatapoints2 = 2;
DefaultDataset dataset = generateDatasetForEarthquakeBurglaryAlarmModel(numberOfDatapoints1, numberOfDatapoints2);
// Expected values for alarmNode
Expression expectedParam3inF3 = parse("(1 + " + numberOfDatapoints1 + ") / (2 + " + numberOfDatapoints1 + ")");
Expression expectedOneMinusParam3inF3 = parse("1 / (2 + " + numberOfDatapoints1 + ")");
Expression expectedParam1inF1 = parse("(1 + " + numberOfDatapoints2 + ") / (2 + " + numberOfDatapoints2 + ")");
Expression expectedOneMinusParam1inF1 = parse("1 / (2 + " + numberOfDatapoints2 + ")");
Expression expectedAlarmNode = parse("if Earthquake = 0 and Burglary = 1 then if Alarm = 1 then " + expectedParam3inF3 + " else " + expectedOneMinusParam3inF3 + " else if Earthquake = 1 and Burglary = 1 then if Alarm = 1 then " + expectedParam1inF1 + " else " + expectedOneMinusParam1inF1 + " else 0.5");
// Expected values for burglaryNode
Expression expectedParam5inF1burglary = parse("(1 + " + (numberOfDatapoints1 + numberOfDatapoints2) + ") / (2 + " + (numberOfDatapoints1 + numberOfDatapoints2) + ")");
Expression expectedOneMinusParam5inF1burglary = parse("1 / (2 + " + (numberOfDatapoints1 + numberOfDatapoints2) + ")");
Expression expectedBurglaryNode = parse("if Burglary = 1 then " + expectedParam5inF1burglary + " else " + expectedOneMinusParam5inF1burglary);
// Expected value for earthquakeNode (fixed prior probability)
Expression expectedEarthquakeNode = parse("if Earthquake = 1 then 0.01 else 0.99");
// Learning
model = (ExpressionBayesianModel) model.learnModelParametersFromCompleteData(dataset);
ExpressionBayesianNode learnedAlarmNode = model.getNodes().get(0);
ExpressionBayesianNode learnedEarthquakeNode = model.getNodes().get(1);
ExpressionBayesianNode learnedBurglaryNode = model.getNodes().get(2);
// Verifications
Expression alarmNodeVerification = Equality.make(expectedAlarmNode, learnedAlarmNode);
Expression earthquakeNodeVerification = Equality.make(expectedEarthquakeNode, learnedEarthquakeNode);
Expression burglaryNodeVerification = Equality.make(expectedBurglaryNode, learnedBurglaryNode);
alarmNodeVerification = contextForEarthquakeBurglaryAlarmModel.evaluate(alarmNodeVerification);
earthquakeNodeVerification = contextForEarthquakeBurglaryAlarmModel.evaluate(earthquakeNodeVerification);
burglaryNodeVerification = contextForEarthquakeBurglaryAlarmModel.evaluate(burglaryNodeVerification);
// Assertions
assertEquals(Expressions.TRUE, alarmNodeVerification);
assertEquals(Expressions.TRUE, earthquakeNodeVerification);
assertEquals(Expressions.TRUE, burglaryNodeVerification);
}
use of com.sri.ai.praise.learning.parameterlearning.representation.dataset.DefaultDataset in project aic-praise by aic-sri-international.
the class ExpressionBayesianModelTest method testChildParentModel1.
/**
* Only one family having the two parameters
* expressionForChildNode: if Child < 5 then Param1 else Param2
*
* Final families:
* F1: [Condition = true, Parameters: [Param1, Param2]]
*/
@Test
public void testChildParentModel1() {
ExpressionBayesianModel model = generateChildParentModel(parse("if Child < 5 then Param1 else Param2"));
// (1, 2)
int numberOfDatapoints1 = 1;
// (5, 1)
int numberOfDatapoints2 = 2;
DefaultDataset dataset = generateDatasetForChildParentModel(numberOfDatapoints1, numberOfDatapoints2, 0, 0);
model = (ExpressionBayesianModel) model.learnModelParametersFromCompleteData(dataset);
ExpressionBayesianNode learnedChild = model.getNodes().get(0);
ExpressionBayesianNode learnedParent = model.getNodes().get(1);
Expression expectedParam1inF1 = parse("( (4 + " + numberOfDatapoints1 + ")/(5 + " + (numberOfDatapoints1 + numberOfDatapoints2) + ") ) / 4");
Expression expectedParam2inF1 = parse("(1 + " + numberOfDatapoints2 + ")/(5 + " + (numberOfDatapoints1 + numberOfDatapoints2) + ")");
Expression expectedChildExpression = parse("if Child < 5 then " + expectedParam1inF1 + " else " + expectedParam2inF1);
Expression expectedParentExpression = parse("0.2");
Expression childVerification = Equality.make(expectedChildExpression, learnedChild);
Expression parentVerification = Equality.make(expectedParentExpression, learnedParent);
// println(childVerification); // uncomment this line if you want to see the main equality that is being tested
childVerification = contextForChildParentModel.evaluate(childVerification);
parentVerification = contextForChildParentModel.evaluate(parentVerification);
assertEquals(Expressions.TRUE, childVerification);
assertEquals(Expressions.TRUE, parentVerification);
}
Aggregations