use of java.util.zip.Checksum in project voltdb by VoltDB.
the class LogFormatter method main.
/**
* @param args
*/
public static void main(String[] args) throws Exception {
if (args.length != 1) {
System.err.println("USAGE: LogFormatter log_file");
System.exit(2);
}
FileInputStream fis = new FileInputStream(args[0]);
BinaryInputArchive logStream = BinaryInputArchive.getArchive(fis);
FileHeader fhdr = new FileHeader();
fhdr.deserialize(logStream, "fileheader");
if (fhdr.getMagic() != FileTxnLog.TXNLOG_MAGIC) {
System.err.println("Invalid magic number for " + args[0]);
System.exit(2);
}
System.out.println("ZooKeeper Transactional Log File with dbid " + fhdr.getDbid() + " txnlog format version " + fhdr.getVersion());
int count = 0;
while (true) {
long crcValue;
byte[] bytes;
try {
crcValue = logStream.readLong("crcvalue");
bytes = logStream.readBuffer("txnEntry");
} catch (EOFException e) {
System.out.println("EOF reached after " + count + " txns.");
return;
}
if (bytes.length == 0) {
// Since we preallocate, we define EOF to be an
// empty transaction
System.out.println("EOF reached after " + count + " txns.");
return;
}
Checksum crc = new Adler32();
crc.update(bytes, 0, bytes.length);
if (crcValue != crc.getValue()) {
throw new IOException("CRC doesn't match " + crcValue + " vs " + crc.getValue());
}
InputArchive iab = BinaryInputArchive.getArchive(new ByteArrayInputStream(bytes));
TxnHeader hdr = new TxnHeader();
SerializeUtils.deserializeTxn(iab, hdr);
System.out.println(DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.LONG).format(new Date(hdr.getTime())) + " session 0x" + Long.toHexString(hdr.getClientId()) + " cxid 0x" + Long.toHexString(hdr.getCxid()) + " zxid 0x" + Long.toHexString(hdr.getZxid()) + " " + TraceFormatter.op2String(hdr.getType()));
if (logStream.readByte("EOR") != 'B') {
LOG.error("Last transaction was partial.");
throw new EOFException("Last transaction was partial.");
}
count++;
}
}
use of java.util.zip.Checksum in project voltdb by VoltDB.
the class FileTxnLog method append.
/**
* append an entry to the transaction log
* @param hdr the header of the transaction
* @param txn the transaction part of the entry
* returns true iff something appended, otw false
*/
public synchronized boolean append(TxnHeader hdr, Record txn) throws IOException {
if (hdr != null) {
if (hdr.getZxid() <= lastZxidSeen) {
LOG.warn("Current zxid " + hdr.getZxid() + " is <= " + lastZxidSeen + " for " + hdr.getType());
}
if (logStream == null) {
if (LOG.isInfoEnabled()) {
LOG.info("Creating new log file: log." + Long.toHexString(hdr.getZxid()));
}
logFileWrite = new File(logDir, ("log." + Long.toHexString(hdr.getZxid())));
fos = new FileOutputStream(logFileWrite);
logStream = new BufferedOutputStream(fos);
oa = BinaryOutputArchive.getArchive(logStream);
FileHeader fhdr = new FileHeader(TXNLOG_MAGIC, VERSION, dbId);
fhdr.serialize(oa, "fileheader");
currentSize = fos.getChannel().position();
streamsToFlush.add(fos);
}
padFile(fos);
byte[] buf = Util.marshallTxnEntry(hdr, txn);
if (buf == null || buf.length == 0) {
throw new IOException("Faulty serialization for header " + "and txn");
}
Checksum crc = makeChecksumAlgorithm();
crc.update(buf, 0, buf.length);
oa.writeLong(crc.getValue(), "txnEntryCRC");
Util.writeTxnBytes(oa, buf);
return true;
}
return false;
}
use of java.util.zip.Checksum in project lucene-solr by apache.
the class ReplicationHandler method getConfFileInfoFromCache.
/**
* For configuration files, checksum of the file is included because, unlike index files, they may have same content
* but different timestamps.
* <p/>
* The local conf files information is cached so that everytime it does not have to compute the checksum. The cache is
* refreshed only if the lastModified of the file changes
*/
List<Map<String, Object>> getConfFileInfoFromCache(NamedList<String> nameAndAlias, final Map<String, FileInfo> confFileInfoCache) {
List<Map<String, Object>> confFiles = new ArrayList<>();
synchronized (confFileInfoCache) {
File confDir = new File(core.getResourceLoader().getConfigDir());
Checksum checksum = null;
for (int i = 0; i < nameAndAlias.size(); i++) {
String cf = nameAndAlias.getName(i);
File f = new File(confDir, cf);
//must not happen
if (!f.exists() || f.isDirectory())
continue;
FileInfo info = confFileInfoCache.get(cf);
if (info == null || info.lastmodified != f.lastModified() || info.size != f.length()) {
if (checksum == null)
checksum = new Adler32();
info = new FileInfo(f.lastModified(), cf, f.length(), getCheckSum(checksum, f));
confFileInfoCache.put(cf, info);
}
Map<String, Object> m = info.getAsMap();
if (nameAndAlias.getVal(i) != null)
m.put(ALIAS, nameAndAlias.getVal(i));
confFiles.add(m);
}
}
return confFiles;
}
use of java.util.zip.Checksum in project lucene-solr by apache.
the class TestBufferedChecksum method testSimple.
public void testSimple() {
Checksum c = new BufferedChecksum(new CRC32());
c.update(1);
c.update(2);
c.update(3);
assertEquals(1438416925L, c.getValue());
}
use of java.util.zip.Checksum in project poi by apache.
the class IOUtils method calculateChecksum.
/**
* Calculate checksum on all the data read from input stream.
*
* This should be more efficient than the equivalent code
* {@code IOUtils.calculateChecksum(IOUtils.toByteArray(stream))}
*/
public static long calculateChecksum(InputStream stream) throws IOException {
Checksum sum = new CRC32();
byte[] buf = new byte[4096];
int count;
while ((count = stream.read(buf)) != -1) {
if (count > 0) {
sum.update(buf, 0, count);
}
}
return sum.getValue();
}
Aggregations