use of org.apache.flink.shaded.guava30.com.google.common.collect.Maps in project SONG by overture-stack.
the class ExportServiceTest method generateData.
/**
* Generate {@code numStudies} studies and for each study generate {@code numAnalysisPerStudy} analyses, and put
* everything in a map, where the keys are studyIds and the values are all the analyses for that study
*/
private Map<String, List<? extends Analysis>> generateData(Class<? extends Analysis> analysisClass, int numStudies, int numAnalysesPerStudy, boolean includeAnalysisId, boolean includeOtherIds) {
val studyGenerator = createStudyGenerator(studyService, randomGenerator);
val map = Maps.<String, List<? extends Analysis>>newHashMap();
for (int s = 0; s < numStudies; s++) {
val studyId = studyGenerator.createRandomStudy();
val analysisGenerator = createAnalysisGenerator(studyId, analysisService, randomGenerator);
val analyses = range(0, numAnalysesPerStudy).boxed().map(x -> analysisGenerator.createDefaultRandomAnalysis(analysisClass)).peek(x -> massageAnalysisInplace(x, includeAnalysisId, includeOtherIds)).collect(toImmutableList());
map.put(studyId, analyses);
}
return ImmutableMap.copyOf(map);
}
use of org.apache.flink.shaded.guava30.com.google.common.collect.Maps in project SONG by overture-stack.
the class ExportServiceTest method runExportTest.
public void runExportTest(Class<? extends Analysis> analysisClass, int numStudies, int numAnalysesPerStudy) {
val includeAnalysisId = true;
val includeOtherIds = false;
// Check config
assertCorrectConfig(numStudies, numAnalysesPerStudy);
// Generate data
val expectedData = generateData(analysisClass, numStudies, numAnalysesPerStudy, includeAnalysisId, includeOtherIds);
// Process StudyMode Data
val actualStudyModeExportedPayloads = expectedData.keySet().stream().map(s -> exportService.exportPayloadsForStudy(s, includeAnalysisId, includeOtherIds)).flatMap(Collection::stream).collect(toImmutableList());
assertThat(actualStudyModeExportedPayloads).hasSize(numStudies);
val actualStudyModeData = Maps.<String, List<? extends Analysis>>newHashMap();
for (val exportedPayload : actualStudyModeExportedPayloads) {
val studyId = exportedPayload.getStudyId();
val analyses = exportedPayload.getPayloads().stream().map(x -> fromJson(x, Analysis.class)).collect(toImmutableList());
actualStudyModeData.put(studyId, analyses);
}
// Process AnalysisMode Data
val expectedAnalysisIds = expectedData.values().stream().flatMap(Collection::stream).map(Analysis::getAnalysisId).collect(toImmutableList());
val actualAnalysisModeExportedPayloads = exportService.exportPayload(expectedAnalysisIds, includeAnalysisId, includeOtherIds);
assertThat(actualAnalysisModeExportedPayloads).hasSize(numStudies);
val actualAnalysisModeData = Maps.<String, List<? extends Analysis>>newHashMap();
for (val exportedPayload : actualAnalysisModeExportedPayloads) {
val studyId = exportedPayload.getStudyId();
val analyses = exportedPayload.getPayloads().stream().map(x -> fromJson(x, Analysis.class)).collect(toImmutableList());
actualAnalysisModeData.put(studyId, analyses);
}
assertMatchingData(actualAnalysisModeData, expectedData);
assertMatchingData(actualStudyModeData, expectedData);
}
use of org.apache.flink.shaded.guava30.com.google.common.collect.Maps in project SONG by overture-stack.
the class StudyWithDonorsServiceTest method testReadWithChildren.
@Test
public void testReadWithChildren() {
// Create random isolated study
val studyId = studyGenerator.createRandomStudy();
// Generate Random SequencingRead analyses
val analysisGenerator = createAnalysisGenerator(studyId, analysisService, randomGenerator);
val numAnalysis = 11;
val analysisMap = Maps.<String, SequencingReadAnalysis>newHashMap();
for (int i = 0; i < numAnalysis; i++) {
val sequencingReadAnalysis = analysisGenerator.createDefaultRandomSequencingReadAnalysis();
analysisMap.put(sequencingReadAnalysis.getAnalysisId(), sequencingReadAnalysis);
}
// Extract expected donors and verify
val expectedDonors = analysisMap.values().stream().flatMap(x -> x.getSample().stream()).map(CompositeEntity::getDonor).collect(toSet());
assertThat(expectedDonors).hasSize(numAnalysis);
assertThat(expectedDonors.stream().map(Donor::getDonorSubmitterId).distinct().count()).isEqualTo(numAnalysis);
assertThat(expectedDonors.stream().filter(x -> x.getStudyId().equals(studyId)).count()).isEqualTo(numAnalysis);
// Extract expected specimens and verify
val expectedSpecimens = analysisMap.values().stream().flatMap(x -> x.getSample().stream()).map(CompositeEntity::getSpecimen).collect(toSet());
assertThat(expectedSpecimens).hasSize(numAnalysis);
assertThat(expectedSpecimens.stream().map(Specimen::getSpecimenSubmitterId).distinct().count()).isEqualTo(numAnalysis);
// Extract expected samples and verify
val expectedSamples = analysisMap.values().stream().flatMap(x -> x.getSample().stream()).collect(toSet());
val expectedSampleSubmitterIds = expectedSamples.stream().map(Sample::getSampleSubmitterId).collect(toSet());
assertThat(expectedSamples).hasSize(numAnalysis);
assertThat(expectedSampleSubmitterIds).hasSize(numAnalysis);
// Run the target method to test, readWithChildren
val studyWithDonors = studyWithDonorsService.readWithChildren(studyId);
// Extract actual donors
val actualDonors = studyWithDonors.getDonors().stream().map(DonorWithSpecimens::createDonor).collect(toSet());
// Extract actual specimens
val actualSpecimens = studyWithDonors.getDonors().stream().map(DonorWithSpecimens::getSpecimens).flatMap(Collection::stream).map(SpecimenWithSamples::getSpecimen).collect(toSet());
// Extract actual samples
val actualSamples = studyWithDonors.getDonors().stream().map(DonorWithSpecimens::getSpecimens).flatMap(Collection::stream).map(SpecimenWithSamples::getSamples).flatMap(Collection::stream).collect(toSet());
val actualSampleSubmitterIds = actualSamples.stream().map(Sample::getSampleSubmitterId).collect(toSet());
// Verify expected donors and actual donors match
assertSetsMatch(expectedDonors, actualDonors);
// Verify expected specimens and actual specimens match
assertSetsMatch(expectedSpecimens, actualSpecimens);
// Verify expected sampleSubmitterIds and actual sampleSubmitterIds match
assertSetsMatch(expectedSampleSubmitterIds, actualSampleSubmitterIds);
}
use of org.apache.flink.shaded.guava30.com.google.common.collect.Maps in project SpinalTap by airbnb.
the class MysqlSchemaTrackerTest method testCreateDatabase.
@Test
public void testCreateDatabase() throws Exception {
Table<String, String, TreeMap<Integer, MysqlTableSchema>> allTableSchemaInStore = Tables.newCustomTable(Maps.newHashMap(), Maps::newHashMap);
allTableSchemaInStore.put(DATABASE_NAME, "table1", new TreeMap<Integer, MysqlTableSchema>() {
{
put(1, TABLE1_SCHEMA);
}
});
allTableSchemaInStore.put(DATABASE_NAME, "table2", new TreeMap<Integer, MysqlTableSchema>() {
{
put(1, TABLE2_SCHEMA);
}
});
when(schemaDatabase.listDatabases()).thenReturn(Sets.newHashSet(DATABASE_NAME, DATABASE2_NAME));
when(schemaStore.getAll()).thenReturn(allTableSchemaInStore);
when(schemaDatabase.fetchTableSchema(DATABASE_NAME)).thenReturn(ImmutableMap.of("table1", TABLE1_SCHEMA, "table2", TABLE2_SCHEMA));
when(schemaDatabase.fetchTableSchema(DATABASE2_NAME)).thenReturn(ImmutableMap.of("table1", DATABASE2_TABLE1_SCHEMA));
QueryEvent queryEvent = new QueryEvent(0, 0, binlogFilePos, DATABASE2_NAME, "CREATE DATABASE `database2`");
SchemaTracker schemaTracker = new MysqlSchemaTracker(schemaStore, schemaDatabase);
schemaTracker.processDDLStatement(queryEvent);
verify(schemaDatabase).applyDDLStatement("", queryEvent.getSql());
verify(schemaStore).put(DATABASE2_NAME, "table1", queryEvent.getBinlogFilePos(), queryEvent.getTimestamp(), queryEvent.getSql(), DATABASE2_TABLE1_SCHEMA.getColumnInfo());
}
use of org.apache.flink.shaded.guava30.com.google.common.collect.Maps in project presto by prestodb.
the class TestSelectiveOrcReader method testArraysWithSubfieldPruning.
@Test
public void testArraysWithSubfieldPruning() throws Exception {
tester.assertRoundTripWithSettings(arrayType(INTEGER), createList(NUM_ROWS, i -> ImmutableList.of(1, 2, 3, 4)), ImmutableList.of(OrcReaderSettings.builder().addRequiredSubfields(0, "c[1]").build(), OrcReaderSettings.builder().addRequiredSubfields(0, "c[1]", "c[2]").build(), OrcReaderSettings.builder().addRequiredSubfields(0, "c[2]").build()));
Random random = new Random(0);
tester.assertRoundTripWithSettings(arrayType(INTEGER), createList(NUM_ROWS, i -> ImmutableList.of(random.nextInt(10), random.nextInt(10), 3, 4)), ImmutableList.of(OrcReaderSettings.builder().addRequiredSubfields(0, "c[1]", "c[3]").setColumnFilters(ImmutableMap.of(0, ImmutableMap.of(new Subfield("c[1]"), BigintRange.of(0, 4, false)))).build(), OrcReaderSettings.builder().addRequiredSubfields(0, "c[2]", "c[3]").setColumnFilters(ImmutableMap.of(0, ImmutableMap.of(new Subfield("c[2]"), BigintRange.of(0, 4, false)))).build()));
// arrays of arrays
tester.assertRoundTripWithSettings(arrayType(arrayType(INTEGER)), createList(NUM_ROWS, i -> nCopies(1 + random.nextInt(5), ImmutableList.of(1, 2, 3))), ImmutableList.of(OrcReaderSettings.builder().addRequiredSubfields(0, "c[1][1]").build(), OrcReaderSettings.builder().addRequiredSubfields(0, "c[2][2]", "c[4][2]", "c[5][3]").build(), OrcReaderSettings.builder().addRequiredSubfields(0, "c[2][3]", "c[10][2]", "c[3][10]").build()));
// arrays of maps
tester.assertRoundTripWithSettings(arrayType(mapType(INTEGER, INTEGER)), createList(NUM_ROWS, i -> nCopies(5, ImmutableMap.of(1, 10, 2, 20))), ImmutableList.of(OrcReaderSettings.builder().addRequiredSubfields(0, "c[1][1]").build(), OrcReaderSettings.builder().addRequiredSubfields(0, "c[2][1]").build(), OrcReaderSettings.builder().addRequiredSubfields(0, "c[2][1]", "c[4][1]", "c[3][2]").build()));
}
Aggregations