use of edu.uci.ics.texera.storage.DataWriter in project textdb by TextDB.
the class ScanBasedSourceOperatorTest method setUp.
@BeforeClass
public static void setUp() throws TexeraException {
RelationManager relationManager = RelationManager.getInstance();
// create the people table and write tuples
relationManager.createTable(PEOPLE_TABLE, TestUtils.getDefaultTestIndex().resolve(PEOPLE_TABLE), TestConstants.SCHEMA_PEOPLE, LuceneAnalyzerConstants.standardAnalyzerString());
DataWriter peopleDataWriter = relationManager.getTableDataWriter(PEOPLE_TABLE);
peopleDataWriter.open();
for (Tuple tuple : TestConstants.getSamplePeopleTuples()) {
peopleDataWriter.insertTuple(tuple);
}
peopleDataWriter.close();
}
use of edu.uci.ics.texera.storage.DataWriter in project textdb by TextDB.
the class KeywordTestHelper method writeTestTables.
public static void writeTestTables() throws TexeraException {
RelationManager relationManager = RelationManager.getInstance();
// create the people table and write tuples
relationManager.createTable(PEOPLE_TABLE, TestUtils.getDefaultTestIndex().resolve(PEOPLE_TABLE), TestConstants.SCHEMA_PEOPLE, LuceneAnalyzerConstants.standardAnalyzerString());
DataWriter peopleDataWriter = relationManager.getTableDataWriter(PEOPLE_TABLE);
peopleDataWriter.open();
for (Tuple tuple : TestConstants.getSamplePeopleTuples()) {
peopleDataWriter.insertTuple(tuple);
}
peopleDataWriter.close();
// create the medline table and write tuples
relationManager.createTable(MEDLINE_TABLE, TestUtils.getDefaultTestIndex().resolve(MEDLINE_TABLE), keywordTestConstants.SCHEMA_MEDLINE, LuceneAnalyzerConstants.standardAnalyzerString());
DataWriter medDataWriter = relationManager.getTableDataWriter(MEDLINE_TABLE);
medDataWriter.open();
for (Tuple tuple : keywordTestConstants.getSampleMedlineRecord()) {
medDataWriter.insertTuple(tuple);
}
medDataWriter.close();
// create the people table and write tuples in Chinese
relationManager.createTable(CHINESE_TABLE, TestUtils.getDefaultTestIndex().resolve(CHINESE_TABLE), TestConstantsChinese.SCHEMA_PEOPLE, LuceneAnalyzerConstants.chineseAnalyzerString());
DataWriter chineseDataWriter = relationManager.getTableDataWriter(CHINESE_TABLE);
chineseDataWriter.open();
for (Tuple tuple : TestConstantsChinese.getSamplePeopleTuples()) {
chineseDataWriter.insertTuple(tuple);
}
chineseDataWriter.close();
}
use of edu.uci.ics.texera.storage.DataWriter in project textdb by TextDB.
the class NlpEntityTest method deleteData.
// table is cleared after each test case
@After
public void deleteData() throws TexeraException {
RelationManager relationManager = RelationManager.getInstance();
DataWriter oneSentenceDataWriter = relationManager.getTableDataWriter(ONE_SENTENCE_TABLE);
oneSentenceDataWriter.open();
oneSentenceDataWriter.clearData();
oneSentenceDataWriter.close();
DataWriter twoSentenceDataWriter = relationManager.getTableDataWriter(TWO_SENTENCE_TABLE);
twoSentenceDataWriter.open();
twoSentenceDataWriter.clearData();
twoSentenceDataWriter.close();
}
use of edu.uci.ics.texera.storage.DataWriter in project textdb by TextDB.
the class MedlineIndexWriter method writeMedlineIndex.
public static void writeMedlineIndex(Path medlineFilepath, String tableName) throws IOException, StorageException, ParseException {
RelationManager relationManager = RelationManager.getInstance();
DataWriter dataWriter = relationManager.getTableDataWriter(tableName);
dataWriter.open();
BufferedReader reader = Files.newBufferedReader(medlineFilepath);
String line;
while ((line = reader.readLine()) != null) {
try {
dataWriter.insertTuple(recordToTuple(line));
} catch (Exception e) {
e.printStackTrace();
}
}
reader.close();
dataWriter.close();
}
use of edu.uci.ics.texera.storage.DataWriter in project textdb by TextDB.
the class SampleExtraction method writeSampleIndex.
public static void writeSampleIndex() throws Exception {
// parse the original file
File sourceFileFolder = new File(promedFilesDirectory);
ArrayList<Tuple> fileTuples = new ArrayList<>();
for (File htmlFile : sourceFileFolder.listFiles()) {
StringBuilder sb = new StringBuilder();
Scanner scanner = new Scanner(htmlFile);
while (scanner.hasNext()) {
sb.append(scanner.nextLine());
}
scanner.close();
Tuple tuple = parsePromedHTML(htmlFile.getName(), sb.toString());
if (tuple != null) {
fileTuples.add(tuple);
}
}
// write tuples into the table
RelationManager relationManager = RelationManager.getInstance();
relationManager.deleteTable(PROMED_SAMPLE_TABLE);
relationManager.createTable(PROMED_SAMPLE_TABLE, Paths.get(promedIndexDirectory), PromedSchema.PROMED_SCHEMA, LuceneAnalyzerConstants.standardAnalyzerString());
DataWriter dataWriter = relationManager.getTableDataWriter(PROMED_SAMPLE_TABLE);
dataWriter.open();
for (Tuple tuple : fileTuples) {
dataWriter.insertTuple(tuple);
}
dataWriter.close();
}
Aggregations