use of org.apache.hadoop_voltpatches.util.PureJavaCrc32 in project voltdb by VoltDB.
the class DRCatalogDiffEngine method serializeCatalogCommandsForDr.
public static DRCatalogCommands serializeCatalogCommandsForDr(Catalog catalog, int protocolVersion) {
Cluster cluster = catalog.getClusters().get("cluster");
Database db = cluster.getDatabases().get("database");
StringBuilder sb = new StringBuilder();
if (protocolVersion == -1 || protocolVersion >= DRProtocol.MULTICLUSTER_PROTOCOL_VERSION) {
cluster.writeCommandForField(sb, "drRole", true);
} else {
// The compatibility mode will not understand the new drRole field,
// so use the old field name. We'll remove this in v7.1 when the
// compatibility mode is deprecated.
db.writeCommandForField(sb, "isActiveActiveDRed", true);
}
for (Table t : db.getTables()) {
if (t.getIsdred() && t.getMaterializer() == null && !CatalogUtil.isTableExportOnly(db, t)) {
t.writeCreationCommand(sb);
t.writeFieldCommands(sb, null);
t.writeChildCommands(sb, Sets.newHashSet(Column.class, Index.class, Constraint.class, Statement.class), s_whiteListFields);
}
}
String catalogCommands = sb.toString();
PureJavaCrc32 crc = new PureJavaCrc32();
crc.update(catalogCommands.getBytes(Constants.UTF8ENCODING));
return new DRCatalogCommands(protocolVersion, crc.getValue(), Encoder.compressAndBase64Encode(catalogCommands));
}
use of org.apache.hadoop_voltpatches.util.PureJavaCrc32 in project voltdb by VoltDB.
the class ZKUtil method retrieveChunksAsBytes.
public static Pair<byte[], Integer> retrieveChunksAsBytes(ZooKeeper zk, String path, String prefix, boolean getCRC) throws Exception {
TreeSet<String> chunks = new TreeSet<String>();
while (true) {
boolean allUploadsComplete = true;
if (!chunks.contains(path + "/" + prefix + "_complete")) {
allUploadsComplete = false;
}
if (allUploadsComplete) {
break;
}
chunks = new TreeSet<String>(zk.getChildren(path, false));
for (String chunk : chunks) {
for (int ii = 0; ii < chunks.size(); ii++) {
if (chunk.startsWith(path + "/" + prefix)) {
chunks.add(chunk);
}
}
}
}
byte[][] resultBuffers = new byte[chunks.size() - 1][];
int ii = 0;
PureJavaCrc32 crc = getCRC ? new PureJavaCrc32() : null;
for (String chunk : chunks) {
if (chunk.endsWith("_complete"))
continue;
resultBuffers[ii] = zk.getData(chunk, false, null);
if (crc != null) {
crc.update(resultBuffers[ii]);
}
ii++;
}
return Pair.of(decompressBytes(resultBuffers), crc != null ? (int) crc.getValue() : null);
}
use of org.apache.hadoop_voltpatches.util.PureJavaCrc32 in project voltdb by VoltDB.
the class InMemoryJarfile method getCRC.
///////////////////////////////////////////////////////
// UTILITY
///////////////////////////////////////////////////////
// This method should be able to be killed and all usage replaced with
// getSha1Hash, in theory. We serialize this to pass it between the master
// and replica for DR, so there's probably some extra work to do beyond
// just replacing one method call with another, though.
public long getCRC() {
PureJavaCrc32 crc = new PureJavaCrc32();
for (Entry<String, byte[]> e : super.entrySet()) {
if (e.getKey().equals("buildinfo.txt") || e.getKey().equals("catalog-report.html")) {
continue;
}
// has a date which changes and causes test failures
if (e.getKey().equals(VoltCompiler.AUTOGEN_DDL_FILE_NAME)) {
byte[] ddlbytes = e.getValue();
int index = 0;
while (ddlbytes[index] != '\n') {
index++;
}
byte[] newddlbytes = Arrays.copyOfRange(ddlbytes, index, ddlbytes.length);
crc.update(e.getKey().getBytes(Constants.UTF8ENCODING));
crc.update(newddlbytes);
} else {
crc.update(e.getKey().getBytes(Constants.UTF8ENCODING));
crc.update(e.getValue());
}
}
return crc.getValue();
}
use of org.apache.hadoop_voltpatches.util.PureJavaCrc32 in project voltdb by VoltDB.
the class SnapshotUtil method CRCCheck.
/**
*
* This isn't just a CRC check. It also loads the file and returns it as
* a JSON object.
* Check if the CRC of the snapshot digest. Note that this only checks if
* the CRC at the beginning of the digest file matches the CRC of the digest
* file itself.
*
* @param f
* The snapshot digest file object
* @return The table list as a string
* @throws IOException
* If CRC does not match
*/
public static JSONObject CRCCheck(File f, VoltLogger logger) throws IOException {
final FileInputStream fis = new FileInputStream(f);
try {
final BufferedInputStream bis = new BufferedInputStream(fis);
ByteBuffer crcBuffer = ByteBuffer.allocate(4);
if (4 != bis.read(crcBuffer.array())) {
logger.warn("EOF while attempting to read CRC from snapshot digest " + f + " on host " + CoreUtils.getHostnameOrAddress());
return null;
}
final int crc = crcBuffer.getInt();
final InputStreamReader isr = new InputStreamReader(bis, StandardCharsets.UTF_8);
CharArrayWriter caw = new CharArrayWriter();
while (true) {
int nextChar = isr.read();
if (nextChar == -1) {
break;
}
//digests
if (nextChar == '\n') {
break;
}
caw.write(nextChar);
}
/*
* Try and parse the contents as a JSON object. If it succeeds then assume
* it is a the new version of the digest file. It is unlikely the old version
* will successfully parse as JSON because it starts with a number
* instead of an open brace.
*/
JSONObject obj = null;
try {
obj = new JSONObject(caw.toString());
} catch (JSONException e) {
//assume it is the old format
}
/*
* Convert the old style file to a JSONObject so it can be presented
* via a consistent interface.
*/
if (obj == null) {
String tableList = caw.toString();
byte[] tableListBytes = tableList.getBytes(StandardCharsets.UTF_8);
PureJavaCrc32 tableListCRC = new PureJavaCrc32();
tableListCRC.update(tableListBytes);
tableListCRC.update("\n".getBytes(StandardCharsets.UTF_8));
final int calculatedValue = (int) tableListCRC.getValue();
if (crc != calculatedValue) {
logger.warn("CRC of snapshot digest " + f + " did not match digest contents");
return null;
}
String[] tableNames = tableList.split(",");
long txnId = Long.valueOf(tableNames[0]);
obj = new JSONObject();
try {
obj.put("version", 0);
obj.put("txnId", txnId);
for (int ii = 1; ii < tableNames.length; ii++) {
obj.append("tables", tableNames[ii]);
}
} catch (JSONException e) {
logger.warn("Exception parsing JSON of digest " + f, e);
return null;
}
return obj;
} else {
/*
* Verify the CRC and then return the data as a JSON object.
*/
String tableList = caw.toString();
byte[] tableListBytes = tableList.getBytes(StandardCharsets.UTF_8);
PureJavaCrc32 tableListCRC = new PureJavaCrc32();
tableListCRC.update(tableListBytes);
final int calculatedValue = (int) tableListCRC.getValue();
if (crc != calculatedValue) {
logger.warn("CRC of snapshot digest " + f + " did not match digest contents");
return null;
}
return obj;
}
} catch (Exception e) {
logger.warn("Exception while parsing snapshot digest " + f, e);
return null;
} finally {
try {
if (fis != null)
fis.close();
} catch (IOException e) {
}
}
}
Aggregations