use of org.apache.commons.lang3.tuple.Triple in project BWAPI4J by OpenBW.
the class MapTest method assertEquals_MiniTileAltitudes.
/**
* Tests that each MiniTile's Altitude for all WalkPositions match between
* the original BWAPI/BWEM in C++ and this Java port.
*/
private void assertEquals_MiniTileAltitudes(AdvancedData data, BWEM_DummyData dummyBwemData) {
final List<ImmutableTriple<WalkPosition, Integer, Integer>> wrongAltitudes = new ArrayList<>();
for (int y = 0; y < data.getMapData().getWalkSize().getY(); ++y) {
for (int x = 0; x < data.getMapData().getWalkSize().getX(); ++x) {
final WalkPosition w = new WalkPosition(x, y);
final int expected = dummyBwemData.getMiniTileAltitudes()[data.getMapData().getWalkSize().getX() * y + x];
final int actual = data.getMiniTile(w).getAltitude().intValue();
// Assert.assertEquals(w + ": mini tile altitude is wrong.", expected, actual);
if (expected != actual) {
wrongAltitudes.add(new ImmutableTriple<>(w, expected, actual));
}
}
}
for (final ImmutableTriple<WalkPosition, Integer, Integer> triple : wrongAltitudes) {
logger.warn("Wrong MiniTile altitude for WalkPosition: " + triple.getLeft().toString() + ", expected=" + triple.getMiddle() + ", actual=" + triple.getRight());
}
}
use of org.apache.commons.lang3.tuple.Triple in project rskj by rsksmart.
the class RepositoryBlockStoreTest method test.
@Test
public void test() throws Exception {
// This Is how I produced RepositoryBlockStore_data.ser. I had a bitcoind in regtest with 613 blocks + genesis block
// NetworkParameters params = RegTestParams.get();
// Context context = new Context(params);
// Wallet wallet = new Wallet(context);
// BlockStore store = new SPVBlockStore(params, new File("spvBlockstore"));
// AbstractBlockChain chain = new BlockChain(context, wallet, store);
// PeerGroup peerGroup = new PeerGroup(context, chain);
// peerGroup.start();
// final DownloadProgressTracker listener = new DownloadProgressTracker();
// peerGroup.startBlockChainDownload(listener);
// listener.await();
// peerGroup.stop();
// StoredBlock storedBlock = chain.getChainHead();
// FileOutputStream fos = new FileOutputStream("RepositoryBlockStore_data.ser");
// ObjectOutputStream oos = new ObjectOutputStream(fos);
// for (int i = 0; i < 614; i++) {
// Triple<byte[], BigInteger , Integer> tripleStoredBlock = new ImmutableTriple<>(storedBlock.getHeader().bitcoinSerialize(), storedBlock.getChainWork(), storedBlock.getHeight());
// oos.writeObject(tripleStoredBlock);
// storedBlock = store.get(storedBlock.getHeader().getPrevBlockHash());
// }
// oos.close();
// Read original store
InputStream fileInputStream = ClassLoader.getSystemResourceAsStream("peg/RepositoryBlockStore_data.ser");
ObjectInputStream objectInputStream = new ObjectInputStream(fileInputStream);
Repository repository = new RepositoryImplForTesting();
RskSystemProperties config = new RskSystemProperties();
RepositoryBlockStore store = new RepositoryBlockStore(config, repository, PrecompiledContracts.BRIDGE_ADDR);
for (int i = 0; i < 614; i++) {
Triple<byte[], BigInteger, Integer> tripleStoredBlock = (Triple<byte[], BigInteger, Integer>) objectInputStream.readObject();
BtcBlock header = RegTestParams.get().getDefaultSerializer().makeBlock(tripleStoredBlock.getLeft());
StoredBlock storedBlock = new StoredBlock(header, tripleStoredBlock.getMiddle(), tripleStoredBlock.getRight());
if (i == 0) {
store.setChainHead(storedBlock);
}
store.put(storedBlock);
}
// Create a new instance of the store
RepositoryBlockStore store2 = new RepositoryBlockStore(config, repository, PrecompiledContracts.BRIDGE_ADDR);
// Check a specific block that used to fail when we had a bug
assertEquals(store.get(Sha256Hash.wrap("373941fe83961cf70e181e468abc5f9f7cc440c711c3d06948fa66f3912ed27a")), store2.get(Sha256Hash.wrap("373941fe83961cf70e181e468abc5f9f7cc440c711c3d06948fa66f3912ed27a")));
// Check new instance content is identical to the original one
StoredBlock storedBlock = store.getChainHead();
StoredBlock storedBlock2 = store2.getChainHead();
int headHeight = storedBlock.getHeight();
for (int i = 0; i < headHeight; i++) {
assertNotNull(storedBlock);
assertEquals(storedBlock, storedBlock2);
Sha256Hash prevBlockHash = storedBlock.getHeader().getPrevBlockHash();
storedBlock = store.get(prevBlockHash);
storedBlock2 = store2.get(prevBlockHash);
}
}
use of org.apache.commons.lang3.tuple.Triple in project sqlg by pietermartin.
the class BaseSqlDialect method flushEdgeCache.
@Override
public void flushEdgeCache(SqlgGraph sqlgGraph, Map<MetaEdge, Pair<SortedSet<String>, Map<SqlgEdge, Triple<SqlgVertex, SqlgVertex, Map<String, Object>>>>> edgeCache) {
for (MetaEdge metaEdge : edgeCache.keySet()) {
Pair<SortedSet<String>, Map<SqlgEdge, Triple<SqlgVertex, SqlgVertex, Map<String, Object>>>> triples = edgeCache.get(metaEdge);
Map<String, PropertyType> propertyTypeMap = sqlgGraph.getTopology().getTableFor(metaEdge.getSchemaTable().withPrefix(EDGE_PREFIX));
SortedSet<String> columns = triples.getLeft();
Map<SqlgEdge, Triple<SqlgVertex, SqlgVertex, Map<String, Object>>> rows = triples.getRight();
StringBuilder sql = new StringBuilder("INSERT INTO ");
sql.append(sqlgGraph.getSqlDialect().maybeWrapInQoutes(metaEdge.getSchemaTable().getSchema()));
sql.append(".");
sql.append(sqlgGraph.getSqlDialect().maybeWrapInQoutes(EDGE_PREFIX + metaEdge.getSchemaTable().getTable()));
sql.append(" (");
Map<String, PropertyColumn> propertyColumns = sqlgGraph.getTopology().getSchema(metaEdge.getSchemaTable().getSchema()).orElseThrow(() -> new IllegalStateException(String.format("Schema %s not found", metaEdge.getSchemaTable().getSchema()))).getEdgeLabel(metaEdge.getSchemaTable().getTable()).orElseThrow(() -> new IllegalStateException(String.format("EdgeLabel %s not found", metaEdge.getSchemaTable().getTable()))).getProperties();
int i = 1;
for (String column : columns) {
PropertyType propertyType = propertyTypeMap.get(column);
String[] sqlDefinitions = sqlgGraph.getSqlDialect().propertyTypeToSqlDefinition(propertyType);
int count = 1;
for (@SuppressWarnings("unused") String sqlDefinition : sqlDefinitions) {
if (count > 1) {
sql.append(sqlgGraph.getSqlDialect().maybeWrapInQoutes(column + propertyType.getPostFixes()[count - 2]));
} else {
sql.append(sqlgGraph.getSqlDialect().maybeWrapInQoutes(column));
}
if (count++ < sqlDefinitions.length) {
sql.append(",");
}
}
if (i++ < columns.size()) {
sql.append(", ");
}
}
if (!columns.isEmpty()) {
sql.append(", ");
}
sql.append(sqlgGraph.getSqlDialect().maybeWrapInQoutes(metaEdge.getOutLabel() + OUT_VERTEX_COLUMN_END));
sql.append(", ");
sql.append(sqlgGraph.getSqlDialect().maybeWrapInQoutes(metaEdge.getInLabel() + IN_VERTEX_COLUMN_END));
sql.append(") VALUES (");
i = 1;
for (String column : columns) {
PropertyType propertyType = propertyTypeMap.get(column);
String[] sqlDefinitions = sqlgGraph.getSqlDialect().propertyTypeToSqlDefinition(propertyType);
int count = 1;
// noinspection Duplicates
for (@SuppressWarnings("unused") String sqlDefinition : sqlDefinitions) {
if (count > 1) {
sql.append("?");
} else {
sql.append("?");
}
if (count++ < sqlDefinitions.length) {
sql.append(",");
}
}
if (i++ < columns.size()) {
sql.append(", ");
}
}
if (!columns.isEmpty()) {
sql.append(", ");
}
sql.append("?, ?");
sql.append(")");
if (sqlgGraph.getSqlDialect().needsSemicolon()) {
sql.append(";");
}
if (logger.isDebugEnabled()) {
logger.debug(sql.toString());
}
Connection conn = sqlgGraph.tx().getConnection();
try (PreparedStatement preparedStatement = conn.prepareStatement(sql.toString(), Statement.RETURN_GENERATED_KEYS)) {
List<SqlgEdge> sqlgEdges = new ArrayList<>();
for (Map.Entry<SqlgEdge, Triple<SqlgVertex, SqlgVertex, Map<String, Object>>> rowEntry : rows.entrySet()) {
i = 1;
SqlgEdge sqlgEdge = rowEntry.getKey();
sqlgEdges.add(sqlgEdge);
Triple<SqlgVertex, SqlgVertex, Map<String, Object>> parameterValueMap = rowEntry.getValue();
List<Pair<PropertyType, Object>> typeAndValues = new ArrayList<>();
for (String column : columns) {
PropertyColumn propertyColumn = propertyColumns.get(column);
typeAndValues.add(Pair.of(propertyColumn.getPropertyType(), parameterValueMap.getRight().get(column)));
}
i = SqlgUtil.setKeyValuesAsParameterUsingPropertyColumn(sqlgGraph, true, i, preparedStatement, typeAndValues);
preparedStatement.setLong(i++, ((RecordId) parameterValueMap.getLeft().id()).getId());
preparedStatement.setLong(i, ((RecordId) parameterValueMap.getMiddle().id()).getId());
preparedStatement.addBatch();
}
preparedStatement.executeBatch();
ResultSet generatedKeys = preparedStatement.getGeneratedKeys();
i = 0;
while (generatedKeys.next()) {
sqlgEdges.get(i++).setInternalPrimaryKey(RecordId.from(metaEdge.getSchemaTable(), generatedKeys.getLong(1)));
}
// insertGlobalUniqueIndex(keyValueMap, propertyColumns);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}
use of org.apache.commons.lang3.tuple.Triple in project sqlg by pietermartin.
the class SqlDialect method drop.
/**
* if the query traverses edges then the deletion logic is non trivial.
* The edges can not be deleted upfront as then we will not be able to travers to the leaf vertices anymore
* because the edges are no longer there to travers. In this case we need to drop foreign key constraint checking.
* Delete the vertices and then the edges using the same query.
* The edge query is the same as the vertex query with the last SchemaTableTree removed from the distinctQueryStack;
*
* @param sqlgGraph The graph.
* @param leafElementsToDelete The leaf elements of the query. eg. g.V().out().out() The last vertices returned by the gremlin query.
* @param edgesToDelete
* @param distinctQueryStack The query's SchemaTableTree stack as constructed by parsing.
* @return
*/
default List<Triple<SqlgSqlExecutor.DROP_QUERY, String, SchemaTable>> drop(SqlgGraph sqlgGraph, String leafElementsToDelete, Optional<String> edgesToDelete, LinkedList<SchemaTableTree> distinctQueryStack) {
List<Triple<SqlgSqlExecutor.DROP_QUERY, String, SchemaTable>> sqls = new ArrayList<>();
SchemaTableTree last = distinctQueryStack.getLast();
SchemaTableTree lastEdge = null;
// if the leaf elements are vertices then we need to delete its in and out edges.
boolean isVertex = last.getSchemaTable().isVertexTable();
VertexLabel lastVertexLabel = null;
if (isVertex) {
Optional<Schema> schemaOptional = sqlgGraph.getTopology().getSchema(last.getSchemaTable().getSchema());
Preconditions.checkState(schemaOptional.isPresent(), "BUG: %s not found in the topology.", last.getSchemaTable().getSchema());
Schema schema = schemaOptional.get();
Optional<VertexLabel> vertexLabelOptional = schema.getVertexLabel(last.getSchemaTable().withOutPrefix().getTable());
Preconditions.checkState(vertexLabelOptional.isPresent(), "BUG: %s not found in the topology.", last.getSchemaTable().withOutPrefix().getTable());
lastVertexLabel = vertexLabelOptional.get();
}
boolean queryTraversesEdge = isVertex && (distinctQueryStack.size() > 1);
EdgeLabel lastEdgeLabel = null;
if (queryTraversesEdge) {
lastEdge = distinctQueryStack.get(distinctQueryStack.size() - 2);
Optional<Schema> edgeSchema = sqlgGraph.getTopology().getSchema(lastEdge.getSchemaTable().getSchema());
Preconditions.checkState(edgeSchema.isPresent(), "BUG: %s not found in the topology.", lastEdge.getSchemaTable().getSchema());
Optional<EdgeLabel> edgeLabelOptional = edgeSchema.get().getEdgeLabel(lastEdge.getSchemaTable().withOutPrefix().getTable());
Preconditions.checkState(edgeLabelOptional.isPresent(), "BUG: %s not found in the topology.", lastEdge.getSchemaTable().getTable());
lastEdgeLabel = edgeLabelOptional.get();
}
if (isVertex) {
// First delete all edges except for this edge traversed to get to the vertices.
StringBuilder sb;
for (Map.Entry<String, EdgeLabel> edgeLabelEntry : lastVertexLabel.getOutEdgeLabels().entrySet()) {
EdgeLabel edgeLabel = edgeLabelEntry.getValue();
if (lastEdgeLabel == null || !edgeLabel.equals(lastEdgeLabel)) {
// Delete
sb = new StringBuilder();
sb.append("DELETE FROM ");
sb.append(maybeWrapInQoutes(edgeLabel.getSchema().getName()));
sb.append(".");
sb.append(maybeWrapInQoutes(Topology.EDGE_PREFIX + edgeLabel.getName()));
sb.append("\nWHERE ");
sb.append(maybeWrapInQoutes(lastVertexLabel.getSchema().getName() + "." + lastVertexLabel.getName() + Topology.OUT_VERTEX_COLUMN_END));
sb.append(" IN\n\t(");
sb.append(leafElementsToDelete);
sb.append(")");
sqls.add(Triple.of(SqlgSqlExecutor.DROP_QUERY.NORMAL, sb.toString(), SchemaTable.of(edgeLabel.getSchema().getName(), Topology.EDGE_PREFIX + edgeLabel.getName())));
}
}
for (Map.Entry<String, EdgeLabel> edgeLabelEntry : lastVertexLabel.getInEdgeLabels().entrySet()) {
EdgeLabel edgeLabel = edgeLabelEntry.getValue();
if (lastEdgeLabel == null || !edgeLabel.equals(lastEdgeLabel)) {
// Delete
sb = new StringBuilder();
sb.append("DELETE FROM ");
sb.append(maybeWrapInQoutes(edgeLabel.getSchema().getName()));
sb.append(".");
sb.append(maybeWrapInQoutes(Topology.EDGE_PREFIX + edgeLabel.getName()));
sb.append("\nWHERE ");
sb.append(maybeWrapInQoutes(lastVertexLabel.getSchema().getName() + "." + lastVertexLabel.getName() + Topology.IN_VERTEX_COLUMN_END));
sb.append(" IN\n\t(");
sb.append(leafElementsToDelete);
sb.append(")");
sqls.add(Triple.of(SqlgSqlExecutor.DROP_QUERY.NORMAL, sb.toString(), SchemaTable.of(edgeLabel.getSchema().getName(), Topology.EDGE_PREFIX + edgeLabel.getName())));
}
}
}
// Need to defer foreign key constraint checks.
if (queryTraversesEdge) {
String edgeTableName = (maybeWrapInQoutes(lastEdge.getSchemaTable().getSchema())) + "." + maybeWrapInQoutes(lastEdge.getSchemaTable().getTable());
sqls.add(Triple.of(SqlgSqlExecutor.DROP_QUERY.ALTER, this.sqlToTurnOffReferentialConstraintCheck(edgeTableName), lastEdge.getSchemaTable()));
}
// Delete the leaf vertices, if there are foreign keys then its been deferred.
StringBuilder sb = new StringBuilder();
sb.append("DELETE FROM ");
sb.append(maybeWrapInQoutes(last.getSchemaTable().getSchema()));
sb.append(".");
sb.append(maybeWrapInQoutes(last.getSchemaTable().getTable()));
sb.append("\nWHERE \"ID\" IN (\n\t");
sb.append(leafElementsToDelete);
sb.append(")");
sqls.add(Triple.of(SqlgSqlExecutor.DROP_QUERY.NORMAL, sb.toString(), null));
if (queryTraversesEdge) {
sb = new StringBuilder();
sb.append("DELETE FROM ");
sb.append(maybeWrapInQoutes(lastEdge.getSchemaTable().getSchema()));
sb.append(".");
sb.append(maybeWrapInQoutes(lastEdge.getSchemaTable().getTable()));
sb.append("\nWHERE \"ID\" IN (\n\t");
sb.append(edgesToDelete.get());
sb.append(")");
sqls.add(Triple.of(SqlgSqlExecutor.DROP_QUERY.EDGE, sb.toString(), lastEdge.getSchemaTable()));
}
// Enable the foreign key constraint
if (queryTraversesEdge) {
String edgeTableName = (maybeWrapInQoutes(lastEdge.getSchemaTable().getSchema())) + "." + maybeWrapInQoutes(lastEdge.getSchemaTable().getTable());
sqls.add(Triple.of(SqlgSqlExecutor.DROP_QUERY.ALTER, this.sqlToTurnOnReferentialConstraintCheck(edgeTableName), null));
}
return sqls;
}
use of org.apache.commons.lang3.tuple.Triple in project sqlg by pietermartin.
the class PostgresDialect method sqlTruncate.
@Override
public List<Triple<SqlgSqlExecutor.DROP_QUERY, String, SchemaTable>> sqlTruncate(SqlgGraph sqlgGraph, SchemaTable schemaTable) {
Preconditions.checkState(schemaTable.isWithPrefix(), "SqlDialect.sqlTruncate' schemaTable must start with a prefix %s or %s", Topology.VERTEX_PREFIX, Topology.EDGE_PREFIX);
List<Triple<SqlgSqlExecutor.DROP_QUERY, String, SchemaTable>> result = new ArrayList<>();
Optional<Schema> schemaOptional = sqlgGraph.getTopology().getSchema(schemaTable.getSchema());
Preconditions.checkState(schemaOptional.isPresent(), "BUG: %s not found in the topology.", schemaTable.getSchema());
Schema schema = schemaOptional.get();
List<String> edgesToTruncate = new ArrayList<>();
if (schemaTable.isVertexTable()) {
// Need to delete any in/out edges.
Optional<VertexLabel> vertexLabelOptional = schema.getVertexLabel(schemaTable.withOutPrefix().getTable());
Preconditions.checkState(vertexLabelOptional.isPresent(), "BUG: %s not found in the topology.", schemaTable.withOutPrefix().getTable());
VertexLabel vertexLabel = vertexLabelOptional.get();
Collection<EdgeLabel> outEdgeLabels = vertexLabel.getOutEdgeLabels().values();
for (EdgeLabel edgeLabel : outEdgeLabels) {
if (edgeLabel.getOutVertexLabels().size() == 1) {
// The edgeLabel is the vertexTable being deleted's only edge so we can truncate it.
edgesToTruncate.add(maybeWrapInQoutes(edgeLabel.getSchema().getName()) + "." + maybeWrapInQoutes(Topology.EDGE_PREFIX + edgeLabel.getName()));
} else {
throw new IllegalStateException("BUG: sqlTruncate should not be called when an edge has more than one out edge labels.");
}
}
Collection<EdgeLabel> inEdgeLabels = vertexLabel.getInEdgeLabels().values();
for (EdgeLabel edgeLabel : inEdgeLabels) {
if (edgeLabel.getInVertexLabels().size() == 1) {
// The edgeLabel is the vertexTable being deleted's only edge so we can truncate it.
edgesToTruncate.add(maybeWrapInQoutes(edgeLabel.getSchema().getName()) + "." + maybeWrapInQoutes(Topology.EDGE_PREFIX + edgeLabel.getName()));
} else {
throw new IllegalStateException("BUG: sqlTruncate should not be called when an edge has more than one in edge labels.");
}
}
}
StringBuilder sql = new StringBuilder("TRUNCATE ONLY ");
int count = 1;
for (String edgeToTruncate : edgesToTruncate) {
sql.append(edgeToTruncate);
sql.append(", ");
}
sql.append(maybeWrapInQoutes(schemaTable.getSchema())).append(".").append(maybeWrapInQoutes(schemaTable.getTable()));
result.add(Triple.of(SqlgSqlExecutor.DROP_QUERY.TRUNCATE, sql.toString(), schemaTable));
return result;
}
Aggregations