use of java.util.function.Function in project gatk by broadinstitute.
the class XHMMModelUnitTest method testLogEmissionProbability.
@Test(dataProvider = "testData", dependsOnMethods = "testInstantiation")
public void testLogEmissionProbability(final double eventStartProbability, final double meanEventSize, final double deletionMean, final double duplicationMean) {
final XHMMModel model = new XHMMModel(eventStartProbability, meanEventSize, deletionMean, duplicationMean);
final Target target = new Target("NAME");
final double logDenominator = Math.log(Math.sqrt(2 * Math.PI));
final Function<Double, Double> neutralEmission = x -> -.5 * (x * x) - logDenominator;
final Function<Double, Double> deletionEmission = x -> -.5 * Math.pow(x - deletionMean, 2) - logDenominator;
final Function<Double, Double> duplicationEmission = x -> -.5 * Math.pow(x - duplicationMean, 2) - logDenominator;
for (final double coverage : TEST_COVERAGE_VALUES) {
final double neutralObserved = model.logEmissionProbability(new XHMMEmissionData(coverage), CopyNumberTriState.NEUTRAL, target);
final double deletionObserved = model.logEmissionProbability(new XHMMEmissionData(coverage), CopyNumberTriState.DELETION, target);
final double duplicationObserved = model.logEmissionProbability(new XHMMEmissionData(coverage), CopyNumberTriState.DUPLICATION, target);
final double neutralExpected = neutralEmission.apply(coverage);
final double deletionExpected = deletionEmission.apply(coverage);
final double duplicationExpected = duplicationEmission.apply(coverage);
Assert.assertEquals(neutralObserved, neutralExpected, "neutral emission for " + coverage);
Assert.assertEquals(deletionObserved, deletionExpected);
Assert.assertEquals(duplicationObserved, duplicationExpected);
}
}
use of java.util.function.Function in project gatk-protected by broadinstitute.
the class AlleleFractionSegmenter method relearnAdditionalParameters.
@Override
protected void relearnAdditionalParameters(final ExpectationStep eStep) {
final Function<AlleleFractionGlobalParameters, Double> emissionLogLikelihood = params -> {
double logLikelihood = 0.0;
for (int position = 0; position < numPositions(); position++) {
for (int state = 0; state < numStates(); state++) {
final double eStepPosterior = eStep.pStateAtPosition(state, position);
logLikelihood += eStepPosterior < NEGLIGIBLE_POSTERIOR_FOR_M_STEP ? 0 : eStepPosterior * AlleleFractionHMM.logEmissionProbability(data.get(position), getState(state), params, allelicPoN);
}
}
return logLikelihood;
};
final Function<Double, Double> meanBiasObjective = mean -> emissionLogLikelihood.apply(globalParameters.copyWithNewMeanBias(mean));
final double newMeanBias = OptimizationUtils.argmax(meanBiasObjective, 0, AlleleFractionInitializer.MAX_REASONABLE_MEAN_BIAS, globalParameters.getMeanBias(), RELATIVE_TOLERANCE_FOR_OPTIMIZATION, ABSOLUTE_TOLERANCE_FOR_OPTIMIZATION, MAX_EVALUATIONS_FOR_OPTIMIZATION);
final Function<Double, Double> biasVarianceObjective = variance -> emissionLogLikelihood.apply(globalParameters.copyWithNewBiasVariance(variance));
final double newBiasVariance = OptimizationUtils.argmax(biasVarianceObjective, 0, AlleleFractionInitializer.MAX_REASONABLE_BIAS_VARIANCE, globalParameters.getBiasVariance(), RELATIVE_TOLERANCE_FOR_OPTIMIZATION, ABSOLUTE_TOLERANCE_FOR_OPTIMIZATION, MAX_EVALUATIONS_FOR_OPTIMIZATION);
final Function<Double, Double> outlierProbabilityObjective = pOutlier -> emissionLogLikelihood.apply(globalParameters.copyWithNewOutlierProbability(pOutlier));
final double newOutlierProbability = OptimizationUtils.argmax(outlierProbabilityObjective, 0, AlleleFractionInitializer.MAX_REASONABLE_OUTLIER_PROBABILITY, globalParameters.getOutlierProbability(), RELATIVE_TOLERANCE_FOR_OPTIMIZATION, ABSOLUTE_TOLERANCE_FOR_OPTIMIZATION, MAX_EVALUATIONS_FOR_OPTIMIZATION);
globalParameters = new AlleleFractionGlobalParameters(newMeanBias, newBiasVariance, newOutlierProbability);
logger.info(String.format("Global allelic bias parameters learned. Mean allelic bias: %f, variance of allelic bias: %f, outlier probability: %f.", newMeanBias, newBiasVariance, newOutlierProbability));
}
use of java.util.function.Function in project Gargoyle by callakrsos.
the class DbUtil method getTransactionedScope.
public static <T> int getTransactionedScope(Connection con, T userObj, Function<T, List<String>> sqlConverter, Consumer<Exception> exceptionHandler) throws Exception {
int result = -1;
try {
LOGGER.debug("is AutoCommit ? : {}", con.getAutoCommit());
con.setAutoCommit(false);
List<String> apply = sqlConverter.apply(userObj);
Statement createStatement = con.createStatement();
for (String sql : apply) {
/*
* sqlite에서 공백이 포함된 sql은 add한경우 에러.
* 확인해보니 isEmpty함수에 이상이 있는듯하여 수정.
*/
if (ValueUtil.isEmpty(sql))
continue;
LOGGER.debug(sql);
createStatement.addBatch(sql);
}
int[] executeBatch = createStatement.executeBatch();
con.commit();
result = (int) IntStream.of(executeBatch).filter(v -> v == 0).count();
} catch (Exception e) {
con.rollback();
exceptionHandler.accept(e);
result = -1;
} finally {
con.commit();
close(con);
}
return result;
}
use of java.util.function.Function in project gatk by broadinstitute.
the class CNLOHCaller method calcNewRhos.
private double[] calcNewRhos(final List<ACNVModeledSegment> segments, final List<double[][][]> responsibilitiesBySeg, final double lambda, final double[] rhos, final int[] mVals, final int[] nVals, final JavaSparkContext ctx) {
// Since, we pass in the entire responsibilities matrix, we need the correct index for each rho. That, and the
// fact that this is a univariate objective function, means we need to create an instance for each rho. And
// then we blast across Spark.
final List<Pair<? extends Function<Double, Double>, SearchInterval>> objectives = IntStream.range(0, rhos.length).mapToObj(i -> new Pair<>(new Function<Double, Double>() {
@Override
public Double apply(Double rho) {
return calculateESmnObjective(rho, segments, responsibilitiesBySeg, mVals, nVals, lambda, i);
}
}, new SearchInterval(0.0, 1.0, rhos[i]))).collect(Collectors.toList());
final JavaRDD<Pair<? extends Function<Double, Double>, SearchInterval>> objectivesRDD = ctx.parallelize(objectives);
final List<Double> resultsAsDouble = objectivesRDD.map(objective -> optimizeIt(objective.getFirst(), objective.getSecond())).collect();
return resultsAsDouble.stream().mapToDouble(Double::doubleValue).toArray();
}
use of java.util.function.Function in project Gargoyle by callakrsos.
the class DaoWizardViewController method btnExecOnMouseClick.
/**
* 텍스트에 기술된 SQL문을 실행한다. 기본적으로 ROWNUM 기술문을 100개를 감싸서 SQL을 조회한다.
*
* @작성자 : KYJ
* @작성일 : 2015. 10. 21.
*/
@FXML
public void btnExecOnMouseClick(MouseEvent e) {
LOGGER.debug("event] btnExecOnMouseClick");
String velocitySQL = txtSql.getText().trim();
if (velocitySQL == null || velocitySQL.isEmpty())
return;
LOGGER.debug(String.format("velocitySQL : %s", velocitySQL));
// 파라미터 컬럼값 반환받는다.
ObservableList<TbpSysDaoFieldsDVO> items = tbParams.getItems();
Map<String, TbpSysDaoColumnsDVO> unmapping = this.tbMappings.getItems().stream().filter(v -> {
String lockYn = v.getLockYn();
if ("Y".equals(lockYn))
return true;
return false;
}).collect(Collectors.toMap(TbpSysDaoColumnsDVO::getColumnName, v -> v));
Map<String, Object> paramMap = items.stream().filter(vo -> vo.getTestValue() != null && !vo.getTestValue().isEmpty()).collect(Collectors.toMap(TbpSysDaoFieldsDVO::getFieldName, new Function<TbpSysDaoFieldsDVO, Object>() {
@Override
public Object apply(TbpSysDaoFieldsDVO t) {
if ("Arrays".equals(t.getType())) {
String pattern = "'[^']{0,}'";
List<String> regexMatchs = ValueUtil.regexMatchs(pattern, t.getTestValue(), str -> {
return str.substring(1, str.length() - 1);
});
return regexMatchs;
}
return t.getTestValue();
}
}));
SimpleSQLResultView simpleSQLResultView = new SimpleSQLResultView(velocitySQL, paramMap);
try {
simpleSQLResultView.show();
List<TableModelDVO> columns = simpleSQLResultView.getColumns();
List<TbpSysDaoColumnsDVO> resultList = columns.stream().map(vo -> {
TbpSysDaoColumnsDVO dvo = new TbpSysDaoColumnsDVO();
dvo.setColumnName(vo.getDatabaseColumnName());
String databaseTypeName = vo.getDatabaseTypeName();
dvo.setColumnType(databaseTypeName);
if (unmapping.containsKey(vo.getDatabaseColumnName())) {
TbpSysDaoColumnsDVO tmp = unmapping.get(vo.getDatabaseColumnName());
dvo.setProgramType(tmp.getProgramType());
dvo.setLockYn(tmp.getLockYn());
} else {
String programType = DatabaseTypeMappingResourceLoader.getInstance().get(databaseTypeName);
dvo.setProgramType(programType);
}
return dvo;
}).collect(Collectors.toList());
// if (!this.tbMappings.getItems().isEmpty())
if (!resultList.isEmpty()) {
try {
this.tbMappings.getItems().clear();
getSelectedMethodItem().getTbpSysDaoColumnsDVOList().clear();
this.tbMappings.getItems().addAll(resultList);
getSelectedMethodItem().getTbpSysDaoColumnsDVOList().addAll(resultList);
} catch (NullPointerException n) {
DialogUtil.showMessageDialog("메소드를 선택해주세요.");
}
}
} catch (IOException e1) {
LOGGER.error(ValueUtil.toString(e1));
DialogUtil.showExceptionDailog(e1);
}
}
Aggregations