use of herddb.utils.ExtendedDataOutputStream in project herddb by diennea.
the class FileDataStorageManager method writePage.
/**
* Write a record page
*
* @param newPage data to write
* @param file managed file used for sync operations
* @param stream output stream related to given managed file for write
* operations
* @return
* @throws IOException
*/
private long writePage(Collection<Record> newPage, ManagedFile file, OutputStream stream) throws IOException {
try (RecyclableByteArrayOutputStream oo = getWriteBuffer();
ExtendedDataOutputStream dataOutput = new ExtendedDataOutputStream(oo)) {
// version
dataOutput.writeVLong(1);
// flags for future implementations
dataOutput.writeVLong(0);
dataOutput.writeInt(newPage.size());
for (Record record : newPage) {
dataOutput.writeArray(record.key);
dataOutput.writeArray(record.value);
}
dataOutput.flush();
long hash = hashWritesEnabled ? XXHash64Utils.hash(oo.getBuffer(), 0, oo.size()) : NO_HASH_PRESENT;
dataOutput.writeLong(hash);
dataOutput.flush();
stream.write(oo.getBuffer(), 0, oo.size());
if (file != null) {
// O_DIRECT does not need fsync
file.sync();
}
return oo.size();
}
}
use of herddb.utils.ExtendedDataOutputStream in project herddb by diennea.
the class FileDataStorageManager method writeCheckpointSequenceNumber.
@Override
public Collection<PostCheckpointAction> writeCheckpointSequenceNumber(String tableSpace, LogSequenceNumber sequenceNumber) throws DataStorageManagerException {
Path tableSpaceDirectory = getTablespaceDirectory(tableSpace);
try {
Files.createDirectories(tableSpaceDirectory);
Path checkPointFile = getTablespaceCheckPointInfoFile(tableSpace, sequenceNumber);
Path parent = getParent(checkPointFile);
Files.createDirectories(parent);
Path checkpointFileTemp = parent.resolve(checkPointFile.getFileName() + ".tmp");
LOGGER.log(Level.INFO, "checkpoint for " + tableSpace + " at " + sequenceNumber + " to " + checkPointFile.toAbsolutePath().toString());
try (ManagedFile file = ManagedFile.open(checkpointFileTemp, requirefsync);
SimpleBufferedOutputStream buffer = new SimpleBufferedOutputStream(file.getOutputStream(), COPY_BUFFERS_SIZE);
ExtendedDataOutputStream dout = new ExtendedDataOutputStream(buffer)) {
// version
dout.writeVLong(1);
// flags for future implementations
dout.writeVLong(0);
dout.writeUTF(tableSpace);
dout.writeZLong(sequenceNumber.ledgerId);
dout.writeZLong(sequenceNumber.offset);
dout.flush();
file.sync();
} catch (IOException err) {
throw new DataStorageManagerException(err);
}
// write file atomically
Files.move(checkpointFileTemp, checkPointFile, StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING);
} catch (IOException err) {
throw new DataStorageManagerException(err);
}
Collection<PostCheckpointAction> result = new ArrayList<>();
try (DirectoryStream<Path> stream = Files.newDirectoryStream(tableSpaceDirectory)) {
for (Path p : stream) {
if (isTablespaceCheckPointInfoFile(p)) {
try {
LogSequenceNumber logPositionInFile = readLogSequenceNumberFromCheckpointInfoFile(tableSpace, p);
if (sequenceNumber.after(logPositionInFile)) {
LOGGER.log(Level.FINEST, "checkpoint info file " + p.toAbsolutePath() + ". will be deleted after checkpoint end");
result.add(new DeleteFileAction(tableSpace, "checkpoint", "delete checkpoint info file " + p.toAbsolutePath(), p));
}
} catch (DataStorageManagerException ignore) {
LOGGER.log(Level.SEVERE, "unparsable checkpoint info file " + p.toAbsolutePath(), ignore);
// do not auto-delete checkpoint files
}
}
}
} catch (IOException err) {
LOGGER.log(Level.SEVERE, "Could not list dir " + tableSpaceDirectory, err);
}
return result;
}
use of herddb.utils.ExtendedDataOutputStream in project herddb by diennea.
the class Tuple method serialize.
public static VisibleByteArrayOutputStream serialize(DataAccessor tuple, Column[] columns) throws IOException {
VisibleByteArrayOutputStream oo = new VisibleByteArrayOutputStream(1024);
try (ExtendedDataOutputStream eoo = new ExtendedDataOutputStream(oo)) {
int i = 0;
String[] fieldNames = tuple.getFieldNames();
for (String fieldName : fieldNames) {
if (!columns[i].name.toLowerCase().equals(fieldName)) {
throw new IOException("invalid schema for tuple " + Arrays.toString(fieldNames) + " <> " + Arrays.toString(columns));
}
Object value = tuple.get(fieldName);
if (value == null) {
eoo.writeVInt(ColumnTypes.NULL);
} else {
byte columnType;
if (value instanceof String) {
columnType = ColumnTypes.STRING;
} else if (value instanceof RawString) {
columnType = ColumnTypes.STRING;
} else if (value instanceof Integer) {
columnType = ColumnTypes.INTEGER;
} else if (value instanceof Long) {
columnType = ColumnTypes.LONG;
} else if (value instanceof java.sql.Timestamp) {
columnType = ColumnTypes.TIMESTAMP;
} else if (value instanceof Double) {
columnType = ColumnTypes.DOUBLE;
} else if (value instanceof Boolean) {
columnType = ColumnTypes.BOOLEAN;
} else if (value instanceof byte[]) {
columnType = ColumnTypes.BYTEARRAY;
} else {
throw new IOException("unsupported class " + value.getClass());
}
RecordSerializer.serializeTypeAndValue(value, columnType, eoo);
}
i++;
}
}
return oo;
}
use of herddb.utils.ExtendedDataOutputStream in project herddb by diennea.
the class Table method serialize.
public byte[] serialize() {
ByteArrayOutputStream oo = new ByteArrayOutputStream();
try (ExtendedDataOutputStream doo = new ExtendedDataOutputStream(oo)) {
// version
doo.writeVLong(1);
// flags for future implementations
doo.writeVLong(0);
doo.writeUTF(tablespace);
doo.writeUTF(name);
doo.writeUTF(uuid);
doo.writeByte(auto_increment ? 1 : 0);
doo.writeVInt(maxSerialPosition);
doo.writeByte(primaryKey.length);
for (String primaryKeyColumn : primaryKey) {
doo.writeUTF(primaryKeyColumn);
}
// flags for future implementations
doo.writeVInt(0 + ((foreignKeys != null && foreignKeys.length > 0) ? TABLEFLAGS_HAS_FOREIGN_KEYS : 0));
doo.writeVInt(columns.length);
for (Column c : columns) {
// version
doo.writeVLong(COLUMNVERSION_1);
if (c.defaultValue != null) {
doo.writeVLong(COLUMNFLAGS_HAS_DEFAULT_VALUE);
} else {
doo.writeVLong(COLUMNFLAGS_NO_FLAGS);
}
doo.writeUTF(c.name);
doo.writeVInt(c.type);
doo.writeVInt(c.serialPosition);
if (c.defaultValue != null) {
doo.writeArray(c.defaultValue);
}
}
if (foreignKeys != null && foreignKeys.length > 0) {
doo.writeVInt(foreignKeys.length);
for (ForeignKeyDef k : foreignKeys) {
doo.writeUTF(k.name);
doo.writeUTF(k.parentTableId);
doo.writeVInt(k.columns.length);
for (String col : k.columns) {
doo.writeUTF(col);
}
for (String col : k.parentTableColumns) {
doo.writeUTF(col);
}
doo.writeVInt(k.onUpdateAction);
doo.writeVInt(k.onDeleteAction);
}
}
} catch (IOException ee) {
throw new RuntimeException(ee);
}
return oo.toByteArray();
}
use of herddb.utils.ExtendedDataOutputStream in project herddb by diennea.
the class Index method serialize.
public byte[] serialize() {
ByteArrayOutputStream oo = new ByteArrayOutputStream();
try (ExtendedDataOutputStream doo = new ExtendedDataOutputStream(oo)) {
// version
doo.writeVLong(1);
// flags for future implementations
doo.writeVLong(0);
doo.writeUTF(tablespace);
doo.writeUTF(name);
doo.writeUTF(uuid);
doo.writeUTF(table);
int properties = 0;
if (unique) {
properties = properties | PROPERTY_UNIQUE;
}
// exensible for future implementation
doo.writeVInt(properties);
doo.writeUTF(type);
doo.writeVInt(columns.length);
for (Column c : columns) {
// version
doo.writeVLong(1);
// flags for future implementations
doo.writeVLong(0);
doo.writeUTF(c.name);
doo.writeVInt(c.type);
doo.writeVInt(c.serialPosition);
// flags for future implementations
doo.writeVInt(0);
}
} catch (IOException ee) {
throw new RuntimeException(ee);
}
return oo.toByteArray();
}
Aggregations