use of org.baderlab.csplugins.enrichmentmap.model.GeneExpressionMatrix in project EnrichmentMapApp by BaderLab.
the class LegacySessionLoader method loadSession.
/**
* Restore Enrichment maps
*
* @param pStateFileList - list of files associated with thie session
*/
@SuppressWarnings("unchecked")
public void loadSession(CySession session) {
Map<Long, EnrichmentMapParameters> paramsMap = new HashMap<>();
Map<Long, EnrichmentMap> enrichmentMapMap = new HashMap<>();
List<File> fileList = session.getAppFileListMap().get(CyActivator.APP_NAME);
try {
//go through the prop files first to create the correct objects to be able to add other files to.
for (File prop_file : fileList) {
if (prop_file.getName().contains(".props")) {
InputStream reader = streamUtil.getInputStream(prop_file.getAbsolutePath());
String fullText = new Scanner(reader, "UTF-8").useDelimiter("\\A").next();
//Given the file with all the parameters create a new parameter
EnrichmentMapParameters params = enrichmentMapParametersFactory.create(fullText);
EnrichmentMap em = new EnrichmentMap(params.getCreationParameters(), serviceRegistrar);
//get the network name
String param_name = em.getName();
//TODO:distinguish between GSEA and EM saved sessions
String props_name = (prop_file.getName().split("\\."))[0];
String networkName = param_name;
//related to bug ticket #49
if (!props_name.equalsIgnoreCase(param_name))
networkName = props_name;
//after associated the properties with the network
//initialized each Dataset that we have files for
HashMap<String, DataSetFiles> files = params.getFiles();
for (Iterator<String> j = params.getFiles().keySet().iterator(); j.hasNext(); ) {
String current_dataset = j.next();
Method method = EnrichmentMapParameters.stringToMethod(params.getMethod());
em.createDataSet(current_dataset, method, files.get(current_dataset));
}
CyNetwork network = getNetworkByName(networkName);
Long suid = network.getSUID();
em.setNetworkID(suid);
paramsMap.put(suid, params);
enrichmentMapMap.put(suid, em);
}
}
// go through the rest of the files
for (File propFile : fileList) {
FileNameParts parts = ParseFileName(propFile);
if (parts == null || propFile.getName().contains(".props"))
continue;
CyNetwork net = getNetworkByName(parts.name);
EnrichmentMap em = net == null ? null : enrichmentMapMap.get(net.getSUID());
EnrichmentMapParameters params = paramsMap.get(net.getSUID());
Method method = EnrichmentMapParameters.stringToMethod(params.getMethod());
if (em == null) {
System.out.println("network for file" + propFile.getName() + " does not exist.");
} else if ((!propFile.getName().contains(".props")) && (!propFile.getName().contains(".expression1.txt")) && (!propFile.getName().contains(".expression2.txt"))) {
HashMap<String, String> props = params.getProps();
//if this a dataset specific file make sure there is a dataset object for it
if (!(parts.dataset == null) && em.getDataSet(parts.dataset) == null && !parts.dataset.equalsIgnoreCase("signature"))
em.createDataSet(parts.dataset, method, params.getFiles().get(parts.dataset));
if (parts.type == null)
System.out.println("Sorry, unable to determine the type of the file: " + propFile.getName());
//read the file
InputStream reader = streamUtil.getInputStream(propFile.getAbsolutePath());
String fullText = new Scanner(reader, "UTF-8").useDelimiter("\\A").next();
//if the file is empty then skip it
if (fullText == null || fullText.equalsIgnoreCase(""))
continue;
if (propFile.getName().contains(".gmt")) {
HashMap<String, GeneSet> gsMap = (HashMap<String, GeneSet>) params.repopulateHashmap(fullText, 1);
if (propFile.getName().contains(".signature.gmt")) {
// TODO Find a better way to serialize EMSignatureDataSet
String sdsName = propFile.getName().replace(".signature.gmt", "");
sdsName = NamingUtil.getUniqueName(sdsName, em.getSignatureDataSets().keySet());
EMSignatureDataSet sigDataSet = new EMSignatureDataSet(sdsName);
em.addSignatureDataSet(sigDataSet);
SetOfGeneSets sigGeneSets = sigDataSet.getGeneSetsOfInterest();
gsMap.forEach((k, v) -> sigGeneSets.addGeneSet(k, v));
} else if (propFile.getName().contains(".set2.gmt")) {
// account for legacy session files
if (em.getAllGeneSets().containsKey(LegacySupport.DATASET2)) {
SetOfGeneSets gs = new SetOfGeneSets(LegacySupport.DATASET2, props);
gs.setGeneSets(gsMap);
}
} else {
SetOfGeneSets gs = new SetOfGeneSets(parts.dataset, props);
gs.setGeneSets(gsMap);
em.getDataSets().get(parts.dataset).setSetOfGeneSets(gs);
}
}
if (propFile.getName().contains(".genes.txt")) {
HashMap<String, Integer> genes = params.repopulateHashmap(fullText, 2);
genes.forEach(em::addGene);
//ticket #188 - unable to open session files that have empty enrichment maps.
if (genes != null && !genes.isEmpty())
// Ticket #107 : restore also gene count (needed to determine the next free hash in case we do PostAnalysis with a restored session)
em.setNumberOfGenes(Math.max(em.getNumberOfGenes(), Collections.max(genes.values()) + 1));
}
if (propFile.getName().contains(".hashkey2genes.txt")) {
HashMap<Integer, String> hashkey2gene = params.repopulateHashmap(fullText, 5);
//ticket #188 - unable to open session files that have empty enrichment maps.
if (hashkey2gene != null && !hashkey2gene.isEmpty())
// Ticket #107 : restore also gene count (needed to determine the next free hash in case we do PostAnalysis with a restored session)
em.setNumberOfGenes(Math.max(em.getNumberOfGenes(), Collections.max(hashkey2gene.keySet()) + 1));
}
if ((parts.type != null && (parts.type.equalsIgnoreCase("ENR") || (parts.type.equalsIgnoreCase("SubENR")))) || propFile.getName().contains(".ENR1.txt") || propFile.getName().contains(".SubENR1.txt")) {
SetOfEnrichmentResults enrichments;
int temp = 1;
//check to see if this dataset has enrichment results already
if (parts.dataset != null && em.getDataSet(parts.dataset).getEnrichments() != null) {
enrichments = em.getDataSet(parts.dataset).getEnrichments();
} else if (parts.dataset == null) {
enrichments = em.getDataSet(LegacySupport.DATASET1).getEnrichments();
/*enrichments = new SetOfEnrichmentResults(EnrichmentMap.DATASET1,props);
em.getDataset(EnrichmentMap.DATASET1).setEnrichments(enrichments);*/
} else {
enrichments = new SetOfEnrichmentResults(parts.dataset, props);
em.getDataSet(parts.dataset).setEnrichments(enrichments);
}
if (parts.type.equalsIgnoreCase("ENR") || propFile.getName().contains(".ENR1.txt")) {
if (params.getMethod().equalsIgnoreCase(EnrichmentMapParameters.method_GSEA))
enrichments.setEnrichments(params.repopulateHashmap(fullText, 3));
else
enrichments.setEnrichments(params.repopulateHashmap(fullText, 4));
}
}
//it would only happen for sessions saved with version 0.8
if (propFile.getName().contains(".RANKS1.txt") || propFile.getName().contains(".RANKS1Genes.txt")) {
Ranking new_ranking;
//Check to see if there is already GSEARanking
if (em.getDataSet(LegacySupport.DATASET1).getExpressionSets().getAllRanksNames().contains(Ranking.GSEARanking)) {
new_ranking = em.getDataSet(LegacySupport.DATASET1).getExpressionSets().getRanksByName(Ranking.GSEARanking);
} else {
new_ranking = new Ranking();
em.getDataSet(LegacySupport.DATASET1).getExpressionSets().addRanks(Ranking.GSEARanking, new_ranking);
}
if (propFile.getName().contains(".RANKS1.txt")) {
Map<Integer, Rank> ranks = (Map<Integer, Rank>) params.repopulateHashmap(fullText, 7);
ranks.forEach(new_ranking::addRank);
}
// if(prop_file.getName().contains(".RANKS1Genes.txt"))
// new_ranking.setRank2gene(em.getParams().repopulateHashmap(fullText,7));
// if(prop_file.getName().contains(".RANKS1.txt"))
// new_ranking.setRanking(em.getParams().repopulateHashmap(fullText,6));
}
if (propFile.getName().contains(".RANKS.txt")) {
if (parts.ranks_name == null) {
//we need to get the name of this set of rankings
// network_name.ranking_name.ranks.txt --> split by "." and get 2
String[] file_name_tokens = (propFile.getName()).split("\\.");
if ((file_name_tokens.length == 4) && (file_name_tokens[1].equals("Dataset 1 Ranking") || file_name_tokens[1].equals("Dataset 2 Ranking")) || (propFile.getName().contains(Ranking.GSEARanking)))
parts.ranks_name = Ranking.GSEARanking;
else //this is an extra rank file for backwards compatability. Ignore it.
if ((file_name_tokens.length == 4) && (file_name_tokens[1].equals("Dataset 1") || file_name_tokens[1].equals("Dataset 2")) && file_name_tokens[2].equals("RANKS"))
continue;
else
//file name is not structured properly --> default to file name
parts.ranks_name = propFile.getName();
}
Ranking new_ranking = new Ranking();
Map<Integer, Rank> ranks = (Map<Integer, Rank>) params.repopulateHashmap(fullText, 6);
ranks.forEach(new_ranking::addRank);
if (parts.dataset != null)
em.getDataSet(parts.dataset).getExpressionSets().addRanks(parts.ranks_name, new_ranking);
else
em.getDataSet(LegacySupport.DATASET1).getExpressionSets().addRanks(parts.ranks_name, new_ranking);
}
//Deal with legacy issues
if (params.isTwoDatasets()) {
//make sure there is a Dataset2
if (!em.getDataSets().containsKey(LegacySupport.DATASET2))
em.createDataSet(LegacySupport.DATASET2, method, new DataSetFiles());
if (propFile.getName().contains(".ENR2.txt") || propFile.getName().contains(".SubENR2.txt")) {
SetOfEnrichmentResults enrichments;
//check to see if this dataset has enrichment results already
if (em.getDataSet(LegacySupport.DATASET2).getEnrichments() != null) {
enrichments = em.getDataSet(LegacySupport.DATASET2).getEnrichments();
} else {
enrichments = new SetOfEnrichmentResults(LegacySupport.DATASET2, props);
em.getDataSet(LegacySupport.DATASET2).setEnrichments(enrichments);
}
if (propFile.getName().contains(".ENR2.txt")) {
if (params.getMethod().equalsIgnoreCase(EnrichmentMapParameters.method_GSEA))
enrichments.setEnrichments(params.repopulateHashmap(fullText, 3));
else
enrichments.setEnrichments(params.repopulateHashmap(fullText, 4));
}
}
//it would only happen for sessions saved with version 0.8
if (propFile.getName().contains(".RANKS2.txt") || propFile.getName().contains(".RANKS2Genes.txt")) {
Ranking new_ranking;
// Check to see if there is already GSEARanking
if (em.getDataSet(LegacySupport.DATASET2).getExpressionSets().getAllRanksNames().contains(Ranking.GSEARanking)) {
new_ranking = em.getDataSet(LegacySupport.DATASET2).getExpressionSets().getRanksByName(Ranking.GSEARanking);
} else {
new_ranking = new Ranking();
em.getDataSet(LegacySupport.DATASET2).getExpressionSets().addRanks(Ranking.GSEARanking, new_ranking);
}
if (propFile.getName().contains(".RANKS2.txt")) {
Map<Integer, Rank> ranks = (Map<Integer, Rank>) params.repopulateHashmap(fullText, 6);
ranks.forEach(new_ranking::addRank);
}
}
}
}
}
//info from the parameters
for (int i = 0; i < fileList.size(); i++) {
File prop_file = fileList.get(i);
FileNameParts parts_exp = ParseFileName(prop_file);
//unrecognized file
if ((parts_exp == null) || (parts_exp.name == null))
continue;
CyNetwork net = getNetworkByName(parts_exp.name);
EnrichmentMap map = net == null ? null : enrichmentMapMap.get(net.getSUID());
EnrichmentMapParameters params = paramsMap.get(net.getSUID());
Map<String, String> props = params.getProps();
if (parts_exp.type != null && parts_exp.type.equalsIgnoreCase("expression")) {
if (map.getDataSets().containsKey(parts_exp.dataset)) {
EMDataSet ds = map.getDataSet(parts_exp.dataset);
ds.getDataSetFiles().setExpressionFileName(prop_file.getAbsolutePath());
ds.getExpressionSets().setFilename(prop_file.getAbsolutePath());
ExpressionFileReaderTask expressionFile1 = new ExpressionFileReaderTask(ds);
GeneExpressionMatrix matrix = expressionFile1.parse();
matrix.restoreProps(parts_exp.dataset, props);
}
}
//Deal with legacy session files.
if (prop_file.getName().contains("expression1.txt")) {
EMDataSet ds1 = map.getDataSet(LegacySupport.DATASET1);
ds1.getDataSetFiles().setExpressionFileName(prop_file.getAbsolutePath());
ds1.getExpressionSets().setFilename(prop_file.getAbsolutePath());
ExpressionFileReaderTask expressionFile1 = new ExpressionFileReaderTask(ds1);
expressionFile1.parse();
}
if (prop_file.getName().contains("expression2.txt")) {
EMDataSet ds2 = map.getDataSet(LegacySupport.DATASET2);
ds2.getDataSetFiles().setExpressionFileName(prop_file.getAbsolutePath());
ds2.getExpressionSets().setFilename(prop_file.getAbsolutePath());
ExpressionFileReaderTask expressionFile2 = new ExpressionFileReaderTask(ds2);
expressionFile2.parse();
//are dealing with two distinct expression files.
if (map.getDataSet(LegacySupport.DATASET2) != null && map.getDataSet(LegacySupport.DATASET2).getGeneSetsOfInterest() != null && !map.getDataSet(LegacySupport.DATASET2).getGeneSetsOfInterest().getGeneSets().isEmpty()) {
map.setDistinctExpressionSets(true);
map.getDataSet(LegacySupport.DATASET1).setDataSetGenes(new HashSet<Integer>((Set<Integer>) map.getDataSet(LegacySupport.DATASET1).getExpressionSets().getGeneIds()));
map.getDataSet(LegacySupport.DATASET2).setDataSetGenes(new HashSet<Integer>((Set<Integer>) map.getDataSet(LegacySupport.DATASET2).getExpressionSets().getGeneIds()));
}
}
}
//iterate over the networks
for (Iterator<Long> j = enrichmentMapMap.keySet().iterator(); j.hasNext(); ) {
Long id = j.next();
EnrichmentMap map = enrichmentMapMap.get(id);
//only initialize objects if there is a map for this network
if (map != null) {
if (map.getDataSets().size() > 1) {
Set<Integer> dataset1_genes = map.getDataSets().get(LegacySupport.DATASET1).getDataSetGenes();
Set<Integer> dataset2_genes = map.getDataSets().get(LegacySupport.DATASET2).getDataSetGenes();
if (!dataset1_genes.equals(dataset2_genes))
map.setDistinctExpressionSets(true);
}
//initialize the Genesets (makes sure the leading edge is set correctly)
//Initialize the set of genesets and GSEA results that we want to compute over
InitializeGenesetsOfInterestTask genesets_init = new InitializeGenesetsOfInterestTask(map);
// MKTODO really?
genesets_init.setThrowIfMissing(false);
genesets_init.initializeSets(null);
// //for each map compute the similarity matrix, (easier than storing it) compute the geneset similarities
// ComputeSimilarityTask similarities = new ComputeSimilarityTask(map, ComputeSimilarityTask.ENRICHMENT);
// Map<String, GenesetSimilarity> similarity_results = similarities.computeGenesetSimilarities(null);
// map.setGenesetSimilarity(similarity_results);
//
// // also compute geneset similarities between Enrichment- and Signature Genesets (if any)
// if (! map.getSignatureGenesets().isEmpty()){
// ComputeSimilarityTask sigSimilarities = new ComputeSimilarityTask(map, ComputeSimilarityTask.SIGNATURE);
// Map<String, GenesetSimilarity> sig_similarity_results = sigSimilarities.computeGenesetSimilarities(null);
// map.getGenesetSimilarity().putAll(sig_similarity_results);
// }
}
//end of if(map != null)
}
for (Iterator<Long> j = enrichmentMapMap.keySet().iterator(); j.hasNext(); ) {
Long id = j.next();
CyNetwork currentNetwork = cyNetworkManager.getNetwork(id);
EnrichmentMap map = enrichmentMapMap.get(id);
map.setLegacy(true);
emManager.registerEnrichmentMap(map);
if (!j.hasNext()) {
//set the last network to be the one viewed and initialize the parameters panel
cyApplicationManager.setCurrentNetwork(currentNetwork);
}
}
} catch (Exception ee) {
ee.printStackTrace();
}
}
use of org.baderlab.csplugins.enrichmentmap.model.GeneExpressionMatrix in project EnrichmentMapApp by BaderLab.
the class LegacySessionLoadTest method test_1_LoadedLegacyData.
@Test
@SessionFile("em_session_2.2.cys")
public void test_1_LoadedLegacyData() throws Exception {
EnrichmentMap map = getEnrichmentMap();
assertEquals("EM1_Enrichment Map", map.getName());
CyNetwork network = networkManager.getNetwork(map.getNetworkID());
assertNotNull(network);
assertEquals(1, map.getDataSetCount());
assertEquals(14067, map.getNumberOfGenes());
assertEquals(14067, map.getAllGenes().size());
// Number of edges: 3339 - that's how many geneset similarity objects there should be!!!
CyTable edgeTable = network.getDefaultEdgeTable();
assertEquals(3339, edgeTable.getRowCount());
EMCreationParameters params = map.getParams();
String prefix = params.getAttributePrefix();
assertEquals("EM1_", prefix);
assertEquals(0.5, params.getCombinedConstant(), 0.0);
assertFalse(params.isEMgmt());
assertEquals("Geneset_Overlap", params.getEnrichmentEdgeType());
assertTrue(params.isFDR());
assertEquals(GreatFilter.HYPER, params.getGreatFilter());
assertEquals(0.005, params.getPvalue(), 0.0);
assertEquals(1.0, params.getPvalueMin(), 0.0);
assertEquals(0.1, params.getQvalue(), 0.0);
assertEquals(1.0, params.getQvalueMin(), 0.0);
assertEquals(0.5, params.getSimilarityCutoff(), 0.0);
assertEquals(SimilarityMetric.OVERLAP, params.getSimilarityMetric());
// assertFalse(params.isDistinctExpressionSets());
String geneset1 = "RESOLUTION OF SISTER CHROMATID COHESION%REACTOME%REACT_150425.2";
String geneset2 = "CHROMOSOME, CENTROMERIC REGION%GO%GO:0000775";
Collection<CyRow> rows = edgeTable.getMatchingRows(CyNetwork.NAME, geneset1 + " (Geneset_Overlap) " + geneset2);
assertEquals(1, rows.size());
CyRow row = rows.iterator().next();
assertEquals("Geneset_Overlap", row.get(CyEdge.INTERACTION, String.class));
assertEquals(0.6097560975609756, EMStyleBuilder.Columns.EDGE_SIMILARITY_COEFF.get(row, prefix), 0.0);
EMDataSet dataset = map.getDataSet("Dataset 1");
assertNotNull(dataset);
assertSame(map, dataset.getMap());
assertEquals(Method.GSEA, dataset.getMethod());
assertEquals(12653, dataset.getDataSetGenes().size());
assertEquals(389, dataset.getGeneSetsOfInterest().getGeneSets().size());
// assertEquals(17259, dataset.getSetofgenesets().getGenesets().size()); // MKTODO why? what is this used for
assertEndsWith(dataset.getSetOfGeneSets().getFilename(), "Human_GO_AllPathways_no_GO_iea_April_15_2013_symbol.gmt");
for (long suid : dataset.getNodeSuids()) {
assertNotNull(network.getNode(suid));
}
GeneSet geneset = dataset.getGeneSetsOfInterest().getGeneSets().get("NCRNA PROCESSING%GO%GO:0034470");
assertEquals(88, geneset.getGenes().size());
assertEquals("NCRNA PROCESSING%GO%GO:0034470", geneset.getName());
assertEquals("ncRNA processing", geneset.getDescription());
assertEquals(Optional.of("GO"), geneset.getSource());
SetOfEnrichmentResults enrichments = dataset.getEnrichments();
assertEquals(4756, enrichments.getEnrichments().size());
assertEndsWith(enrichments.getFilename1(), "gsea_report_for_ES12_1473194913081.xls");
assertEndsWith(enrichments.getFilename2(), "gsea_report_for_NT12_1473194913081.xls");
assertEquals("ES12", enrichments.getPhenotype1());
assertEquals("NT12", enrichments.getPhenotype2());
EnrichmentResult result = enrichments.getEnrichments().get("RIBONUCLEOSIDE TRIPHOSPHATE BIOSYNTHETIC PROCESS%GO%GO:0009201");
assertTrue(result instanceof GSEAResult);
GSEAResult gseaResult = (GSEAResult) result;
assertEquals("RIBONUCLEOSIDE TRIPHOSPHATE BIOSYNTHETIC PROCESS%GO%GO:0009201", gseaResult.getName());
assertEquals(0.42844063, gseaResult.getES(), 0.0);
assertEquals(0.45225498, gseaResult.getFdrqvalue(), 0.0);
assertEquals(1.0, gseaResult.getFwerqvalue(), 0.0);
assertEquals(23, gseaResult.getGsSize());
assertEquals(1.1938541, gseaResult.getNES(), 0.0);
assertEquals(0.2457786, gseaResult.getPvalue(), 0.0);
assertEquals(4689, gseaResult.getRankAtMax());
assertEquals(Optional.of("GO"), gseaResult.getSource());
GeneExpressionMatrix expressions = dataset.getExpressionSets();
assertEquals(20326, expressions.getExpressionUniverse());
assertEquals(3.686190609, expressions.getClosesttoZero(), 0.0);
// assertEndsWith(expressions.getFilename(), "MCF7_ExprMx_v2_names.gct");
assertEquals(15380.42388, expressions.getMaxExpression(), 0.0);
assertEquals(3.686190609, expressions.getMinExpression(), 0.0);
assertEquals(20, expressions.getNumConditions());
assertEquals(12653, expressions.getExpressionMatrix().size());
assertEquals(12653, expressions.getExpressionMatrix_rowNormalized().size());
GeneExpression expression = expressions.getExpressionMatrix().get(0);
assertEquals("MOCOS", expression.getName());
assertEquals("MOCOS (molybdenum cofactor sulfurase)", expression.getDescription());
assertEquals(18, expression.getExpression().length);
Ranking ranking = expressions.getRanks().get("GSEARanking");
assertEquals(12653, ranking.getAllRanks().size());
assertEquals(12653, ranking.getRanking().size());
Rank rank = ranking.getRanking().get(0);
assertEquals("MOCOS", rank.getName());
assertEquals(1238, rank.getRank().intValue());
assertEquals(0.54488367, rank.getScore(), 0.0);
DataSetFiles files = dataset.getDataSetFiles();
assertEndsWith(files.getClassFile(), "ES_NT.cls");
assertEndsWith(files.getEnrichmentFileName1(), "gsea_report_for_ES12_1473194913081.xls");
assertEndsWith(files.getEnrichmentFileName2(), "gsea_report_for_NT12_1473194913081.xls");
// assertEndsWith(files.getExpressionFileName(), "MCF7_ExprMx_v2_names.gct");
assertEndsWith(files.getGMTFileName(), "Human_GO_AllPathways_no_GO_iea_April_15_2013_symbol.gmt");
assertEndsWith(files.getGseaHtmlReportFile(), "estrogen_treatment_12hr_gsea_enrichment_results.Gsea.1473194913081/index.html");
assertEndsWith(files.getRankedFile(), "ranked_gene_list_ES12_versus_NT12_1473194913081.xls");
assertEquals("ES12", files.getPhenotype1());
assertEquals("NT12", files.getPhenotype2());
}
use of org.baderlab.csplugins.enrichmentmap.model.GeneExpressionMatrix in project EnrichmentMapApp by BaderLab.
the class PostAnalysisWeightPanel method updateUniverseSize.
private void updateUniverseSize(String dataset) {
if (dataset == null) {
gmtRadioButton.setText("GMT");
expressionSetRadioButton.setText("Expression Set");
intersectionRadioButton.setText("Intersection");
universeSelectionTextField.setValue(0);
} else {
GeneExpressionMatrix expressionSets = map.getDataSet(dataset).getExpressionSets();
int universeGmt = map.getNumberOfGenes();
int universeExpression = expressionSets.getExpressionUniverse();
int universeIntersection = expressionSets.getExpressionMatrix().size();
gmtRadioButton.setText("GMT (" + universeGmt + ")");
expressionSetRadioButton.setText("Expression Set (" + universeExpression + ")");
intersectionRadioButton.setText("Intersection (" + universeIntersection + ")");
universeSelectionTextField.setValue(universeExpression);
}
}
use of org.baderlab.csplugins.enrichmentmap.model.GeneExpressionMatrix in project EnrichmentMapApp by BaderLab.
the class CreateDummyExpressionTask method createDummyExpression.
//Create a dummy expression file so that when no expression files are loaded you can still
//use the intersect and union viewers.
private void createDummyExpression() {
//in order to see the gene in the expression viewer we also need a dummy expression file
//get all the genes
//HashMap<String, Integer> genes= dataset.getMap().getGenes();
Set<Integer> datasetGenes;
Map<String, Integer> genes = dataset.getMap().getGeneSetsGenes(dataset.getSetOfGeneSets().getGeneSets().values());
datasetGenes = dataset.getDataSetGenes();
String[] titletokens = { "Name", "Description", "Dummy" };
GeneExpressionMatrix expressionMatrix = dataset.getExpressionSets();
expressionMatrix.setColumnNames(titletokens);
Map<Integer, GeneExpression> expression = expressionMatrix.getExpressionMatrix();
expressionMatrix.setExpressionMatrix(expression);
String[] tokens = { "tmp", "tmp", "0.25" };
for (String currentGene : genes.keySet()) {
int genekey = genes.get(currentGene);
if (datasetGenes != null)
datasetGenes.add(genekey);
GeneExpression expres = new GeneExpression(currentGene, currentGene);
expres.setExpression(tokens);
double newMax = expres.newMax(expressionMatrix.getMaxExpression());
if (newMax != -100)
expressionMatrix.setMaxExpression(newMax);
double newMin = expres.newMin(expressionMatrix.getMinExpression());
if (newMin != -100)
expressionMatrix.setMinExpression(newMin);
double newClosest = expres.newclosesttoZero(expressionMatrix.getClosesttoZero());
if (newClosest != -100)
expressionMatrix.setClosesttoZero(newClosest);
expression.put(genekey, expres);
}
//set the number of genes
//expressionMatrix.setNumGenes(expressionMatrix.getExpressionMatrix().size());
expressionMatrix.setNumConditions(3);
expressionMatrix.setFilename("Dummy Expression_" + dataset.getName().toString());
dataset.setDummyExpressionData(true);
}
use of org.baderlab.csplugins.enrichmentmap.model.GeneExpressionMatrix in project EnrichmentMapApp by BaderLab.
the class HeatMapTableModel method getGeneExpression.
private static GeneExpression getGeneExpression(EMDataSet dataset, int geneID) {
GeneExpressionMatrix matrix = dataset.getExpressionSets();
Map<Integer, GeneExpression> expressions = matrix.getExpressionMatrix();
GeneExpression row = expressions.get(geneID);
return row;
}
Aggregations