use of com.hazelcast.jet.hadoop.file.generated.SpecificUser in project hazelcast by hazelcast.
the class ParquetFileFormatTest method shouldReadWithProjection.
@Test
public void shouldReadWithProjection() throws Exception {
createParquetFile();
String schema = "{" + " \"type\": \"record\"," + " \"name\": \"SpecificUser\"," + " \"namespace\": \"com.hazelcast.jet.hadoop.file.generated\"," + " \"fields\": [" + " {" + " \"name\": \"name\"," + " \"type\": \"string\"" + " }" + " ]" + "}";
FileSourceBuilder<SpecificUser> source = FileSources.files(currentDir + "/target/parquet").glob("file.parquet").option("parquet.avro.projection", schema).format(FileFormat.parquet());
assertItemsInSource(source, new SpecificUser("Frantisek", null), new SpecificUser("Ali", null));
}
use of com.hazelcast.jet.hadoop.file.generated.SpecificUser in project hazelcast by hazelcast.
the class ParquetFileFormatTest method createParquetFile.
private void createParquetFile(String filename, SpecificUser... users) throws IOException {
Path inputPath = new Path("target/parquet");
FileSystem fs = FileSystem.get(new Configuration());
fs.delete(inputPath, true);
Path filePath = new Path(inputPath, filename);
ParquetWriter<SpecificUser> writer = AvroParquetWriter.<SpecificUser>builder(filePath).withRowGroupSize(ParquetWriter.DEFAULT_BLOCK_SIZE).withPageSize(ParquetWriter.DEFAULT_PAGE_SIZE).withSchema(SpecificUser.SCHEMA$).withConf(new Configuration()).withCompressionCodec(CompressionCodecName.SNAPPY).withValidation(false).withDictionaryEncoding(false).build();
for (SpecificUser user : users) {
writer.write(user);
}
writer.close();
fs.close();
}
use of com.hazelcast.jet.hadoop.file.generated.SpecificUser in project hazelcast by hazelcast.
the class AvroFileFormatTest method shouldReadAvroWithSchema.
@Test
public void shouldReadAvroWithSchema() throws Exception {
createAvroFile();
FileSourceBuilder<SpecificUser> source = FileSources.files(currentDir + "/target/avro").glob("file.avro").format(FileFormat.avro());
assertItemsInSource(source, new SpecificUser("Frantisek", 7), new SpecificUser("Ali", 42));
}
use of com.hazelcast.jet.hadoop.file.generated.SpecificUser in project hazelcast by hazelcast.
the class AvroFileFormatTest method createAvroFile.
private static void createAvroFile(String filename, SpecificUser... users) throws IOException {
File target = new File("target/avro");
FileUtils.deleteDirectory(target);
target.mkdirs();
DataFileWriter<SpecificUser> fileWriter = new DataFileWriter<>(new SpecificDatumWriter<>(SpecificUser.class));
fileWriter.create(SpecificUser.SCHEMA$, new File("target/avro/" + filename));
for (SpecificUser user : users) {
fileWriter.append(user);
}
fileWriter.close();
}
use of com.hazelcast.jet.hadoop.file.generated.SpecificUser in project hazelcast by hazelcast.
the class ParquetFileFormatTest method shouldReadParquetFile.
@Test
public void shouldReadParquetFile() throws Exception {
createParquetFile();
FileSourceBuilder<SpecificUser> source = FileSources.files(currentDir + "/target/parquet").glob("file.parquet").format(FileFormat.parquet());
assertItemsInSource(source, new SpecificUser("Frantisek", 7), new SpecificUser("Ali", 42));
}
Aggregations