use of com.hedera.mirror.importer.exception.ParserException in project hedera-mirror-node by hashgraph.
the class SqlEntityListener method executeBatches.
private void executeBatches() {
try {
// batch save action may run asynchronously, triggering it before other operations can reduce latency
eventPublisher.publishEvent(new EntityBatchSaveEvent(this));
Stopwatch stopwatch = Stopwatch.createStarted();
// insert only operations
batchPersister.persist(assessedCustomFees);
batchPersister.persist(contractLogs);
batchPersister.persist(contractResults);
batchPersister.persist(contractStateChanges);
batchPersister.persist(cryptoTransfers);
batchPersister.persist(customFees);
batchPersister.persist(ethereumTransactions);
batchPersister.persist(fileData);
batchPersister.persist(liveHashes);
batchPersister.persist(nodeStakes);
batchPersister.persist(topicMessages);
batchPersister.persist(transactions);
batchPersister.persist(transactionSignatures);
// insert operations with conflict management
batchPersister.persist(contracts);
batchPersister.persist(cryptoAllowances);
batchPersister.persist(entities);
batchPersister.persist(nftAllowances);
batchPersister.persist(tokens.values());
// ingest tokenAccounts after tokens since some fields of token accounts depends on the associated token
batchPersister.persist(tokenAccounts.values());
batchPersister.persist(tokenAllowances);
// persist nft after token entity
batchPersister.persist(nfts.values());
batchPersister.persist(schedules.values());
// transfers operations should be last to ensure insert logic completeness, entities should already exist
batchPersister.persist(nonFeeTransfers);
batchPersister.persist(nftTransferState.values());
batchPersister.persist(stakingRewardTransfers);
batchPersister.persist(tokenTransfers);
// handle the transfers from token dissociate transactions after nft is processed
tokenDissociateTransferBatchPersister.persist(tokenDissociateTransfers);
log.info("Completed batch inserts in {}", stopwatch);
} catch (ParserException e) {
throw e;
} catch (Exception e) {
throw new ParserException(e);
} finally {
cleanup();
}
}
use of com.hedera.mirror.importer.exception.ParserException in project hedera-mirror-node by hashgraph.
the class PubSubRecordItemListener method onItem.
@Override
public void onItem(RecordItem recordItem) throws ImporterException {
TransactionBody body = recordItem.getTransactionBody();
TransactionRecord txRecord = recordItem.getRecord();
TransactionType transactionType = TransactionType.of(recordItem.getTransactionType());
TransactionHandler transactionHandler = transactionHandlerFactory.get(transactionType);
log.trace("Storing transaction body: {}", () -> Utility.printProtoMessage(body));
long consensusTimestamp = DomainUtils.timeStampInNanos(txRecord.getConsensusTimestamp());
EntityId entityId;
try {
entityId = transactionHandler.getEntity(recordItem);
} catch (InvalidEntityException e) {
// transaction can have invalid topic/contract/file id
log.warn("Invalid entity encountered for consensusTimestamp {} : {}", consensusTimestamp, e.getMessage());
entityId = null;
}
PubSubMessage pubSubMessage = buildPubSubMessage(consensusTimestamp, entityId, recordItem);
try {
sendPubSubMessage(pubSubMessage);
} catch (Exception e) {
// greater than that of last correctly sent txn.
throw new ParserException("Error sending transaction to pubsub", e);
}
log.debug("Published transaction : {}", consensusTimestamp);
if (addressBookService.isAddressBook(entityId)) {
FileID fileID = null;
byte[] fileBytes = null;
if (body.hasFileAppend()) {
fileID = body.getFileAppend().getFileID();
fileBytes = body.getFileAppend().getContents().toByteArray();
} else if (body.hasFileCreate()) {
fileID = txRecord.getReceipt().getFileID();
fileBytes = body.getFileCreate().getContents().toByteArray();
} else if (body.hasFileUpdate()) {
fileID = body.getFileUpdate().getFileID();
fileBytes = body.getFileUpdate().getContents().toByteArray();
}
FileData fileData = new FileData(consensusTimestamp, fileBytes, EntityId.of(fileID), recordItem.getTransactionType());
fileDataRepository.save(fileData);
addressBookService.update(fileData);
}
}
use of com.hedera.mirror.importer.exception.ParserException in project hedera-mirror-node by hashgraph.
the class BatchUpserter method persistItems.
@Override
protected void persistItems(Collection<?> items, Connection connection) {
if (CollectionUtils.isEmpty(items)) {
return;
}
try {
// create temp table to copy into
createTempTable(connection);
// copy items to temp table
copyItems(items, connection);
// insert items from temp table to final table
int insertCount = insertItems(connection);
// update items in final table from temp table
int updateCount = updateItems(connection);
log.debug("Inserted {} and updated {} from a total of {} rows to {}", insertCount, updateCount, items.size(), finalTableName);
} catch (Exception e) {
throw new ParserException(String.format("Error copying %d items to table %s", items.size(), finalTableName), e);
}
}
use of com.hedera.mirror.importer.exception.ParserException in project hedera-mirror-node by hashgraph.
the class Utility method aliasToEvmAddress.
/**
* Converts an ECDSA secp256k1 alias to a 20 byte EVM address by taking the keccak hash of it. Logic copied from
* services' AliasManager.
*
* @param alias the bytes representing a serialized Key protobuf
* @return the 20 byte EVM address
*/
public static byte[] aliasToEvmAddress(byte[] alias) {
try {
if (alias == null || alias.length == 0) {
return null;
}
var key = Key.parseFrom(alias);
if (key.getKeyCase() == Key.KeyCase.ECDSA_SECP256K1) {
var rawCompressedKey = DomainUtils.toBytes(key.getECDSASecp256K1());
ECPoint ecPoint = SECP256K1_CURVE.decodePoint(rawCompressedKey);
byte[] uncompressedKeyDer = ecPoint.getEncoded(false);
byte[] uncompressedKeyRaw = new byte[64];
System.arraycopy(uncompressedKeyDer, 1, uncompressedKeyRaw, 0, 64);
byte[] hashedKey = new Keccak.Digest256().digest(uncompressedKeyRaw);
return Arrays.copyOfRange(hashedKey, 12, 32);
}
return null;
} catch (Exception e) {
var aliasHex = Hex.encodeHexString(alias);
throw new ParserException("Unable to decode alias to EVM address: " + aliasHex, e);
}
}
use of com.hedera.mirror.importer.exception.ParserException in project hedera-mirror-node by hashgraph.
the class BatchInserter method persist.
@Override
public void persist(Collection<? extends Object> items) {
if (items == null || items.isEmpty()) {
return;
}
Connection connection = DataSourceUtils.getConnection(dataSource);
try {
Stopwatch stopwatch = Stopwatch.createStarted();
persistItems(items, connection);
insertDurationMetric.record(stopwatch.elapsed());
log.info("Copied {} rows to {} table in {}", items.size(), tableName, stopwatch);
} catch (Exception e) {
throw new ParserException(String.format("Error copying %d items to table %s", items.size(), tableName), e);
} finally {
DataSourceUtils.releaseConnection(connection, dataSource);
}
}
Aggregations