use of com.linkedin.pinot.core.segment.memory.PinotDataBuffer in project pinot by linkedin.
the class FixedByteWidthRowColDataFileWriterTest method testSpecialPaddingCharsForStringReaderWriter.
@Test
public void testSpecialPaddingCharsForStringReaderWriter() throws Exception {
for (int iter = 0; iter < 2; iter++) {
char paddingChar = (iter == 0) ? '%' : '\0';
final byte[] bytes1 = new byte[] { -17, -65, -67, -17, -65, -67, 32, 69, 120, 101, 99, 117, 116, 105, 118, 101 };
final byte[] bytes2 = new byte[] { -17, -65, -68, 32, 99, 97, 108, 103, 97, 114, 121, 32, 106, 117, 110, 107, 32, 114, 101, 109, 111, 118, 97, 108 };
File file = new File("test_single_col_writer.dat");
file.delete();
int rows = 100;
int cols = 1;
String testString1 = new String(bytes1);
String testString2 = new String(bytes2);
// System.out.println(Arrays.toString(bytes2));
int stringColumnMaxLength = Math.max(testString1.getBytes().length, testString2.getBytes().length);
int[] columnSizes = new int[] { stringColumnMaxLength };
FixedByteSingleValueMultiColWriter writer = new FixedByteSingleValueMultiColWriter(file, rows, cols, columnSizes);
String[] data = new String[rows];
for (int i = 0; i < rows; i++) {
String toPut = (i % 2 == 0) ? testString1 : testString2;
final int padding = stringColumnMaxLength - toPut.getBytes().length;
final StringBuilder bld = new StringBuilder();
bld.append(toPut);
for (int j = 0; j < padding; j++) {
bld.append(paddingChar);
}
data[i] = bld.toString();
writer.setString(i, 0, data[i]);
}
writer.close();
PinotDataBuffer mmapBuffer = PinotDataBuffer.fromFile(file, ReadMode.mmap, FileChannel.MapMode.READ_ONLY, "testing");
FixedByteSingleValueMultiColReader dataFileReader = new FixedByteSingleValueMultiColReader(mmapBuffer, rows, 1, new int[] { stringColumnMaxLength });
for (int i = 0; i < rows; i++) {
String stringInFile = dataFileReader.getString(i, 0);
Assert.assertEquals(stringInFile, data[i]);
Assert.assertEquals(StringUtils.remove(stringInFile, String.valueOf(paddingChar)), StringUtils.remove(data[i], String.valueOf(paddingChar)));
}
file.delete();
}
}
use of com.linkedin.pinot.core.segment.memory.PinotDataBuffer in project pinot by linkedin.
the class FixedByteWidthRowColDataFileWriterTest method testSpecialCharsForStringReaderWriter.
@Test
public void testSpecialCharsForStringReaderWriter() throws Exception {
final byte[] bytes1 = new byte[] { -17, -65, -67, -17, -65, -67, 32, 69, 120, 101, 99, 117, 116, 105, 118, 101 };
final byte[] bytes2 = new byte[] { -17, -65, -68, 32, 99, 97, 108, 103, 97, 114, 121, 32, 106, 117, 110, 107, 32, 114, 101, 109, 111, 118, 97, 108 };
File file = new File("test_single_col_writer.dat");
file.delete();
int rows = 100;
int cols = 1;
String testString1 = new String(bytes1);
String testString2 = new String(bytes2);
// System.out.println(Arrays.toString(bytes2));
int stringColumnMaxLength = Math.max(testString1.getBytes().length, testString2.getBytes().length);
int[] columnSizes = new int[] { stringColumnMaxLength };
FixedByteSingleValueMultiColWriter writer = new FixedByteSingleValueMultiColWriter(file, rows, cols, columnSizes);
String[] data = new String[rows];
for (int i = 0; i < rows; i++) {
String toPut = (i % 2 == 0) ? testString1 : testString2;
final int padding = stringColumnMaxLength - toPut.getBytes().length;
final StringBuilder bld = new StringBuilder();
bld.append(toPut);
for (int j = 0; j < padding; j++) {
bld.append(V1Constants.Str.DEFAULT_STRING_PAD_CHAR);
}
data[i] = bld.toString();
writer.setString(i, 0, data[i]);
}
writer.close();
PinotDataBuffer mmapBuffer = PinotDataBuffer.fromFile(file, ReadMode.mmap, FileChannel.MapMode.READ_ONLY, "testing");
FixedByteSingleValueMultiColReader dataFileReader = new FixedByteSingleValueMultiColReader(mmapBuffer, rows, 1, new int[] { stringColumnMaxLength });
for (int i = 0; i < rows; i++) {
String stringInFile = dataFileReader.getString(i, 0);
Assert.assertEquals(stringInFile, data[i]);
Assert.assertEquals(StringUtils.remove(stringInFile, String.valueOf(V1Constants.Str.DEFAULT_STRING_PAD_CHAR)), StringUtils.remove(data[i], String.valueOf(V1Constants.Str.DEFAULT_STRING_PAD_CHAR)));
}
file.delete();
}
use of com.linkedin.pinot.core.segment.memory.PinotDataBuffer in project pinot by linkedin.
the class FixedByteWidthRowColDataFileReaderTest method testSingleCol.
@Test
void testSingleCol() throws Exception {
String fileName = "test_single_col.dat";
File f = new File(fileName);
f.delete();
DataOutputStream dos = new DataOutputStream(new FileOutputStream(f));
int[] data = new int[100];
Random r = new Random();
for (int i = 0; i < data.length; i++) {
data[i] = r.nextInt();
dos.writeInt(data[i]);
}
dos.flush();
dos.close();
RandomAccessFile raf = new RandomAccessFile(f, "rw");
// System.out.println("file size: " + raf.getChannel().size());
raf.close();
PinotDataBuffer heapBuffer = PinotDataBuffer.fromFile(f, ReadMode.heap, FileChannel.MapMode.READ_ONLY, "testing");
FixedByteSingleValueMultiColReader heapReader = new FixedByteSingleValueMultiColReader(heapBuffer, data.length, 1, new int[] { 4 });
heapReader.open();
for (int i = 0; i < data.length; i++) {
Assert.assertEquals(heapReader.getInt(i, 0), data[i]);
}
heapBuffer.close();
heapReader.close();
// Not strictly required. Let the tests pass first...then we can remove
// TODO: remove me
PinotDataBuffer mmapBuffer = PinotDataBuffer.fromFile(f, ReadMode.mmap, FileChannel.MapMode.READ_ONLY, "mmap_testing");
FixedByteSingleValueMultiColReader mmapReader = new FixedByteSingleValueMultiColReader(mmapBuffer, data.length, 1, new int[] { 4 });
mmapReader.open();
for (int i = 0; i < data.length; i++) {
Assert.assertEquals(mmapReader.getInt(i, 0), data[i]);
}
mmapBuffer.close();
mmapReader.close();
f.delete();
}
use of com.linkedin.pinot.core.segment.memory.PinotDataBuffer in project pinot by linkedin.
the class FixedByteChunkSingleValueReaderWriteTest method testInt.
@Test
public void testInt() throws Exception {
int[] expected = new int[NUM_VALUES];
for (int i = 0; i < NUM_VALUES; i++) {
expected[i] = _random.nextInt();
}
File outFile = new File(TEST_FILE);
FileUtils.deleteQuietly(outFile);
ChunkCompressor compressor = ChunkCompressorFactory.getCompressor("snappy");
FixedByteChunkSingleValueWriter writer = new FixedByteChunkSingleValueWriter(outFile, compressor, NUM_VALUES, NUM_DOCS_PER_CHUNK, V1Constants.Numbers.INTEGER_SIZE);
for (int i = 0; i < NUM_VALUES; i++) {
writer.setInt(i, expected[i]);
}
writer.close();
PinotDataBuffer pinotDataBuffer = PinotDataBuffer.fromFile(outFile, ReadMode.mmap, FileChannel.MapMode.READ_ONLY, getClass().getName());
ChunkDecompressor uncompressor = ChunkCompressorFactory.getDecompressor("snappy");
FixedByteChunkSingleValueReader reader = new FixedByteChunkSingleValueReader(pinotDataBuffer, uncompressor);
ChunkReaderContext context = reader.createContext();
for (int i = 0; i < NUM_VALUES; i++) {
int actual = reader.getInt(i, context);
Assert.assertEquals(actual, expected[i]);
}
reader.close();
FileUtils.deleteQuietly(outFile);
}
use of com.linkedin.pinot.core.segment.memory.PinotDataBuffer in project pinot by linkedin.
the class FixedByteWidthRowColDataFileWriterTest method testSingleColLong.
@Test
public void testSingleColLong() throws Exception {
File wfile = new File("test_single_col_writer.dat");
wfile.delete();
final int rows = 100;
final int cols = 1;
final int[] columnSizes = new int[] { 8 };
FixedByteSingleValueMultiColWriter writer = new FixedByteSingleValueMultiColWriter(wfile, rows, cols, columnSizes);
final long[] data = new long[rows];
Random r = new Random();
for (int i = 0; i < rows; i++) {
data[i] = r.nextLong();
writer.setLong(i, 0, data[i]);
}
writer.close();
File rfile = new File("test_single_col_writer.dat");
PinotDataBuffer buffer = PinotDataBuffer.fromFile(rfile, ReadMode.mmap, FileChannel.MapMode.READ_WRITE, "testing");
FixedByteSingleValueMultiColReader reader = new FixedByteSingleValueMultiColReader(buffer, rows, cols, columnSizes);
for (int i = 0; i < rows; i++) {
Assert.assertEquals(reader.getLong(i, 0), data[i]);
}
reader.close();
rfile.delete();
}
Aggregations