use of org.apache.commons.collections.Transformer in project BroadleafCommerce by BroadleafCommerce.
the class CategoryImpl method getCumulativeSearchFacets.
@Override
public List<CategorySearchFacet> getCumulativeSearchFacets(Set<Category> categoryHierarchy) {
categoryHierarchy.add(this);
List<CategorySearchFacet> returnCategoryFacets = new ArrayList<CategorySearchFacet>();
returnCategoryFacets.addAll(getSearchFacets());
Collections.sort(returnCategoryFacets, facetPositionComparator);
final Collection<SearchFacet> facets = CollectionUtils.collect(returnCategoryFacets, new Transformer() {
@Override
public Object transform(Object input) {
return ((CategorySearchFacet) input).getSearchFacet();
}
});
// Add in parent facets unless they are excluded
Category parentCategory = getDefaultParentCategory();
List<CategorySearchFacet> parentFacets = null;
if (parentCategory != null && !categoryHierarchy.contains(parentCategory)) {
parentFacets = parentCategory.getCumulativeSearchFacets(categoryHierarchy);
CollectionUtils.filter(parentFacets, new Predicate() {
@Override
public boolean evaluate(Object arg) {
CategorySearchFacet csf = (CategorySearchFacet) arg;
return !getExcludedSearchFacets().contains(csf.getSearchFacet()) && !facets.contains(csf.getSearchFacet());
}
});
}
if (parentFacets != null) {
returnCategoryFacets.addAll(parentFacets);
}
return returnCategoryFacets;
}
use of org.apache.commons.collections.Transformer in project Gemma by PavlidisLab.
the class LinearModelAnalyzer method doAnalysis.
/**
* @param bioAssaySet source data, could be a SubSet
* @param dmatrix data
* @param samplesUsed analyzed
* @param factors included in the model
* @param subsetFactorValue null unless analyzing a subset (only used for book-keeping)
* @return analysis, or null if there was a problem.
*/
private DifferentialExpressionAnalysis doAnalysis(BioAssaySet bioAssaySet, DifferentialExpressionAnalysisConfig config, ExpressionDataDoubleMatrix dmatrix, List<BioMaterial> samplesUsed, List<ExperimentalFactor> factors, FactorValue subsetFactorValue) {
// We may want to change this to fall back to running normally, though the real fix is to just finish the ebayes implementation.
if (config.getModerateStatistics() && dmatrix.hasMissingValues()) {
throw new UnsupportedOperationException("Ebayes cannot be used when there are values missing in the data");
}
if (factors.isEmpty()) {
LinearModelAnalyzer.log.error("Must provide at least one factor");
return null;
}
if (samplesUsed.size() <= factors.size()) {
LinearModelAnalyzer.log.error("Must have more samples than factors");
return null;
}
final Map<String, Collection<ExperimentalFactor>> label2Factors = this.getRNames(factors);
Map<ExperimentalFactor, FactorValue> baselineConditions = ExperimentalDesignUtils.getBaselineConditions(samplesUsed, factors);
this.dropIncompleteFactors(samplesUsed, factors, baselineConditions);
if (factors.isEmpty()) {
LinearModelAnalyzer.log.error("Must provide at least one factor; they were all removed due to incomplete values");
return null;
}
QuantitationType quantitationType = dmatrix.getQuantitationTypes().iterator().next();
ExperimentalFactor interceptFactor = this.determineInterceptFactor(factors, quantitationType);
/*
* Build our factor terms, with interactions handled specially
*/
List<String[]> interactionFactorLists = new ArrayList<>();
ObjectMatrix<String, String, Object> designMatrix = ExperimentalDesignUtils.buildDesignMatrix(factors, samplesUsed, baselineConditions);
config.setBaseLineFactorValues(baselineConditions);
boolean oneSampleTTest = interceptFactor != null && factors.size() == 1;
if (!oneSampleTTest) {
this.buildModelFormula(config, label2Factors, interactionFactorLists);
}
// calculate library size before log transformation (FIXME we compute this twice)
DoubleMatrix1D librarySize = MatrixStats.colSums(dmatrix.getMatrix());
/*
* FIXME: remove columns that are marked as outliers.
*/
dmatrix = ExpressionDataDoubleMatrixUtil.filterAndLog2Transform(quantitationType, dmatrix);
DoubleMatrix<CompositeSequence, BioMaterial> namedMatrix = dmatrix.getMatrix();
if (LinearModelAnalyzer.log.isDebugEnabled())
this.outputForDebugging(dmatrix, designMatrix);
/*
* PREPARATION FOR 'NATIVE' FITTING
*/
DoubleMatrix<String, String> sNamedMatrix = this.makeDataMatrix(designMatrix, namedMatrix);
DesignMatrix properDesignMatrix = this.makeDesignMatrix(designMatrix, interactionFactorLists, baselineConditions);
/*
* Run the analysis NOTE this can be simplified if we strip out R code.
*/
final Map<String, LinearModelSummary> rawResults = this.runAnalysis(namedMatrix, sNamedMatrix, properDesignMatrix, librarySize, config);
if (rawResults.size() == 0) {
LinearModelAnalyzer.log.error("Got no results from the analysis");
return null;
}
/*
* Initialize data structures we need to hold results.
*/
// this used to be a Set, but a List is much faster.
Map<String, List<DifferentialExpressionAnalysisResult>> resultLists = new HashMap<>();
Map<String, List<Double>> pvaluesForQvalue = new HashMap<>();
for (String factorName : label2Factors.keySet()) {
resultLists.put(factorName, new ArrayList<DifferentialExpressionAnalysisResult>());
pvaluesForQvalue.put(factorName, new ArrayList<Double>());
}
for (String[] fs : interactionFactorLists) {
String intF = StringUtils.join(fs, ":");
resultLists.put(intF, new ArrayList<DifferentialExpressionAnalysisResult>());
pvaluesForQvalue.put(intF, new ArrayList<Double>());
}
if (pvaluesForQvalue.isEmpty()) {
LinearModelAnalyzer.log.warn("No results were obtained for the current stage of analysis.");
return null;
}
/*
* Create result objects for each model fit. Keeping things in order is important.
*/
final Transformer rowNameExtractor = TransformerUtils.invokerTransformer("getId");
boolean warned = false;
int notUsable = 0;
int processed = 0;
for (CompositeSequence el : namedMatrix.getRowNames()) {
if (++processed % 15000 == 0) {
LinearModelAnalyzer.log.info("Processed results for " + processed + " elements ...");
}
LinearModelSummary lm = rawResults.get(rowNameExtractor.transform(el).toString());
if (LinearModelAnalyzer.log.isDebugEnabled())
LinearModelAnalyzer.log.debug(el.getName() + "\n" + lm);
if (lm == null) {
if (!warned) {
LinearModelAnalyzer.log.warn("No result for " + el + ", further warnings suppressed");
warned = true;
}
notUsable++;
continue;
}
for (String factorName : label2Factors.keySet()) {
if (!pvaluesForQvalue.containsKey(factorName)) {
// was dropped.
continue;
}
Double overallPValue;
DifferentialExpressionAnalysisResult probeAnalysisResult = DifferentialExpressionAnalysisResult.Factory.newInstance();
probeAnalysisResult.setProbe(el);
if (lm.getCoefficients() == null) {
// probeAnalysisResult.setPvalue( null );
// pvaluesForQvalue.get( factorName ).add( overallPValue );
// resultLists.get( factorName ).add( probeAnalysisResult );
notUsable++;
continue;
}
Collection<ExperimentalFactor> factorsForName = label2Factors.get(factorName);
if (factorsForName.size() > 1) {
/*
* Interactions
*/
if (factorsForName.size() > 2) {
LinearModelAnalyzer.log.error("Handling more than two-way interactions is not implemented");
return null;
}
assert factorName.contains(":");
String[] factorNames = StringUtils.split(factorName, ":");
assert factorNames.length == factorsForName.size();
overallPValue = lm.getInteractionEffectP(factorNames);
if (overallPValue != null && !Double.isNaN(overallPValue)) {
Map<String, Double> interactionContrastTStats = lm.getContrastTStats(factorName);
Map<String, Double> interactionContrastCoeffs = lm.getContrastCoefficients(factorName);
Map<String, Double> interactionContrastPValues = lm.getContrastPValues(factorName);
for (String term : interactionContrastPValues.keySet()) {
Double contrastPvalue = interactionContrastPValues.get(term);
this.makeContrast(probeAnalysisResult, factorsForName, term, factorName, contrastPvalue, interactionContrastTStats, interactionContrastCoeffs);
}
} else {
if (!warned) {
LinearModelAnalyzer.log.warn("Interaction could not be computed for " + el + ", further warnings suppressed");
warned = true;
}
if (LinearModelAnalyzer.log.isDebugEnabled())
LinearModelAnalyzer.log.debug("Interaction could not be computed for " + el + ", further warnings suppressed");
// will over count?
notUsable++;
continue;
}
} else {
/*
* Main effect
*/
assert factorsForName.size() == 1;
ExperimentalFactor experimentalFactor = factorsForName.iterator().next();
if (interceptFactor != null && factorsForName.size() == 1 && experimentalFactor.equals(interceptFactor)) {
overallPValue = lm.getInterceptP();
} else {
overallPValue = lm.getMainEffectP(factorName);
}
/*
* Add contrasts unless overall pvalue is NaN
*/
if (overallPValue != null && !Double.isNaN(overallPValue)) {
Map<String, Double> mainEffectContrastTStats = lm.getContrastTStats(factorName);
Map<String, Double> mainEffectContrastPvalues = lm.getContrastPValues(factorName);
Map<String, Double> mainEffectContrastCoeffs = lm.getContrastCoefficients(factorName);
for (String term : mainEffectContrastPvalues.keySet()) {
Double contrastPvalue = mainEffectContrastPvalues.get(term);
this.makeContrast(probeAnalysisResult, factorsForName, term, factorName, contrastPvalue, mainEffectContrastTStats, mainEffectContrastCoeffs);
}
} else {
if (!warned) {
LinearModelAnalyzer.log.warn("ANOVA could not be done for " + experimentalFactor + " on " + el + ", further warnings suppressed");
warned = true;
}
if (LinearModelAnalyzer.log.isDebugEnabled())
LinearModelAnalyzer.log.debug("ANOVA could not be done for " + experimentalFactor + " on " + el);
// will over count?
notUsable++;
continue;
}
}
assert !Double.isNaN(overallPValue) : "We should not be keeping non-number pvalues (null or NaNs)";
probeAnalysisResult.setPvalue(this.nan2Null(overallPValue));
pvaluesForQvalue.get(factorName).add(overallPValue);
resultLists.get(factorName).add(probeAnalysisResult);
}
// over terms
}
if (notUsable > 0) {
LinearModelAnalyzer.log.info(notUsable + " elements or results were not usable - model could not be fit, etc.");
}
this.getRanksAndQvalues(resultLists, pvaluesForQvalue);
DifferentialExpressionAnalysis expressionAnalysis = this.makeAnalysisEntity(bioAssaySet, config, label2Factors, baselineConditions, interceptFactor, interactionFactorLists, oneSampleTTest, resultLists, subsetFactorValue);
LinearModelAnalyzer.log.info("Analysis processing phase done ...");
return expressionAnalysis;
}
use of org.apache.commons.collections.Transformer in project sling by apache.
the class FileVaultResourceMapper method getChildren.
@SuppressWarnings("unchecked")
@Override
public Iterator<Resource> getChildren(final ResourceResolver resolver, final Resource parent) {
String parentPath = parent.getPath();
Set<String> childPaths = new LinkedHashSet<>();
// get children from content resource of parent
ContentFile parentContentFile = getContentFile(parentPath, null);
if (parentContentFile != null) {
Iterator<Map.Entry<String, ContentElement>> childMaps = parentContentFile.getChildren();
while (childMaps.hasNext()) {
Map.Entry<String, ContentElement> entry = childMaps.next();
String childPath = parentPath + "/" + entry.getKey();
if (pathMatches(childPath)) {
childPaths.add(childPath);
}
}
}
// additional check for children in file system
File parentFile = getFile(parentPath);
if (parentFile != null && parentFile.isDirectory()) {
for (File childFile : parentFile.listFiles()) {
String childPath = parentPath + "/" + PlatformNameFormat.getRepositoryName(childFile.getName());
if (pathMatches(childPath) && !childPaths.contains(childPath)) {
childPaths.add(childPath);
}
}
}
if (childPaths.isEmpty()) {
return null;
} else {
return IteratorUtils.transformedIterator(childPaths.iterator(), new Transformer() {
@Override
public Object transform(Object input) {
String path = (String) input;
return getResource(resolver, path);
}
});
}
}
use of org.apache.commons.collections.Transformer in project sling by apache.
the class DefaultConfigurationResourceResolvingStrategy method getResourceCollectionInheritanceChain.
@SuppressWarnings("unchecked")
@Override
public Collection<Iterator<Resource>> getResourceCollectionInheritanceChain(final Resource contentResource, final Collection<String> bucketNames, final String configName) {
if (!isEnabledAndParamsValid(contentResource, bucketNames, configName)) {
return null;
}
final ResourceResolver resourceResolver = contentResource.getResourceResolver();
final List<String> paths = IteratorUtils.toList(getResolvePaths(contentResource, bucketNames));
// get resource collection with respect to collection inheritance
Collection<Resource> resourceCollection = getResourceCollectionInternal(bucketNames, configName, paths.iterator(), resourceResolver);
// get inheritance chain for each item found
// yes, this resolves the closest item twice, but is the easiest solution to combine both logic aspects
Iterator<Iterator<Resource>> result = IteratorUtils.transformedIterator(resourceCollection.iterator(), new Transformer() {
@Override
public Object transform(Object input) {
Resource item = (Resource) input;
return getResourceInheritanceChainInternal(bucketNames, configName + "/" + item.getName(), paths.iterator(), resourceResolver);
}
});
if (result.hasNext()) {
return IteratorUtils.toList(result);
} else {
return null;
}
}
use of org.apache.commons.collections.Transformer in project gocd by gocd.
the class MagicalGoConfigXmlLoaderTest method shouldAllowPluggableTaskConfiguration.
@Test
public void shouldAllowPluggableTaskConfiguration() throws Exception {
String configString = "<cruise schemaVersion='" + CONFIG_SCHEMA_VERSION + "'>\n" + " <pipelines>" + "<pipeline name='pipeline1'>" + " <materials>" + " <svn url='svnurl' username='admin' password='%s'/>" + " </materials>" + " <stage name='mingle'>" + " <jobs>" + " <job name='do-something'><tasks>" + " <task>" + " <pluginConfiguration id='plugin-id-1' version='1.0'/>" + " <configuration>" + " <property><key>url</key><value>http://fake-go-server</value></property>" + " <property><key>username</key><value>godev</value></property>" + " <property><key>password</key><value>password</value></property>" + " </configuration>" + " </task> </tasks>" + " </job>" + " </jobs>" + " </stage>" + "</pipeline></pipelines>" + "</cruise>";
CruiseConfig cruiseConfig = ConfigMigrator.loadWithMigration(configString).configForEdit;
PipelineConfig pipelineConfig = cruiseConfig.getAllPipelineConfigs().get(0);
JobConfig jobConfig = pipelineConfig.getFirstStageConfig().getJobs().get(0);
Tasks tasks = jobConfig.getTasks();
assertThat(tasks.size(), is(1));
assertThat(tasks.get(0) instanceof PluggableTask, is(true));
PluggableTask task = (PluggableTask) tasks.get(0);
assertThat(task.getTaskType(), is("pluggable_task_plugin_id_1"));
assertThat(task.getTypeForDisplay(), is("Pluggable Task"));
final Configuration configuration = task.getConfiguration();
assertThat(configuration.listOfConfigKeys().size(), is(3));
assertThat(configuration.listOfConfigKeys(), is(asList("url", "username", "password")));
Collection values = CollectionUtils.collect(configuration.listOfConfigKeys(), new Transformer() {
@Override
public Object transform(Object o) {
ConfigurationProperty property = configuration.getProperty((String) o);
return property.getConfigurationValue().getValue();
}
});
assertThat(new ArrayList<>(values), is(asList("http://fake-go-server", "godev", "password")));
}
Aggregations