use of org.umlg.sqlg.structure.PropertyType in project sqlg by pietermartin.
the class PostgresDialect method bulkAddEdges.
@Override
public <L, R> void bulkAddEdges(SqlgGraph sqlgGraph, SchemaTable out, SchemaTable in, String edgeLabel, Pair<String, String> idFields, Collection<Pair<L, R>> uids) {
if (!sqlgGraph.tx().isInStreamingBatchMode() && !sqlgGraph.tx().isInStreamingWithLockBatchMode()) {
throw SqlgExceptions.invalidMode("Transaction must be in " + BatchManager.BatchModeType.STREAMING + " or " + BatchManager.BatchModeType.STREAMING_WITH_LOCK + " mode for bulkAddEdges");
}
if (!uids.isEmpty()) {
// createVertexLabel temp table and copy the uids into it
Map<String, PropertyType> columns = new HashMap<>();
Map<String, PropertyType> outProperties = sqlgGraph.getTopology().getTableFor(out.withPrefix(VERTEX_PREFIX));
Map<String, PropertyType> inProperties = sqlgGraph.getTopology().getTableFor(in.withPrefix(VERTEX_PREFIX));
PropertyType outPropertyType;
if (idFields.getLeft().equals(Topology.ID)) {
outPropertyType = PropertyType.INTEGER;
} else {
outPropertyType = outProperties.get(idFields.getLeft());
}
PropertyType inPropertyType;
if (idFields.getRight().equals(Topology.ID)) {
inPropertyType = PropertyType.INTEGER;
} else {
inPropertyType = inProperties.get(idFields.getRight());
}
columns.put("out", outPropertyType);
columns.put("in", inPropertyType);
SecureRandom random = new SecureRandom();
byte[] bytes = new byte[6];
random.nextBytes(bytes);
String tmpTableIdentified = Base64.getEncoder().encodeToString(bytes);
tmpTableIdentified = Topology.BULK_TEMP_EDGE + tmpTableIdentified;
sqlgGraph.getTopology().getPublicSchema().createTempTable(tmpTableIdentified, columns);
this.copyInBulkTempEdges(sqlgGraph, SchemaTable.of(out.getSchema(), tmpTableIdentified), uids, outPropertyType, inPropertyType);
// executeRegularQuery copy from select. select the edge ids to copy into the new table by joining on the temp table
Optional<VertexLabel> outVertexLabelOptional = sqlgGraph.getTopology().getVertexLabel(out.getSchema(), out.getTable());
Optional<VertexLabel> inVertexLabelOptional = sqlgGraph.getTopology().getVertexLabel(in.getSchema(), in.getTable());
Preconditions.checkState(outVertexLabelOptional.isPresent(), "Out VertexLabel must be present. Not found for %s", out.toString());
Preconditions.checkState(inVertexLabelOptional.isPresent(), "In VertexLabel must be present. Not found for %s", in.toString());
// noinspection OptionalGetWithoutIsPresent
sqlgGraph.getTopology().ensureEdgeLabelExist(edgeLabel, outVertexLabelOptional.get(), inVertexLabelOptional.get(), Collections.emptyMap());
StringBuilder sql = new StringBuilder("INSERT INTO \n");
sql.append(this.maybeWrapInQoutes(out.getSchema()));
sql.append(".");
sql.append(this.maybeWrapInQoutes(EDGE_PREFIX + edgeLabel));
sql.append(" (");
sql.append(this.maybeWrapInQoutes(out.getSchema() + "." + out.getTable() + Topology.OUT_VERTEX_COLUMN_END));
sql.append(",");
sql.append(this.maybeWrapInQoutes(in.getSchema() + "." + in.getTable() + Topology.IN_VERTEX_COLUMN_END));
sql.append(") \n");
sql.append("select _out.\"ID\" as \"");
sql.append(out.getSchema() + "." + out.getTable() + Topology.OUT_VERTEX_COLUMN_END);
sql.append("\", _in.\"ID\" as \"");
sql.append(in.getSchema() + "." + in.getTable() + Topology.IN_VERTEX_COLUMN_END);
sql.append("\" FROM ");
sql.append(this.maybeWrapInQoutes(in.getSchema()));
sql.append(".");
sql.append(this.maybeWrapInQoutes(VERTEX_PREFIX + in.getTable()));
sql.append(" _in join ");
sql.append(this.maybeWrapInQoutes(tmpTableIdentified) + " ab on ab.in = _in." + this.maybeWrapInQoutes(idFields.getRight()) + " join ");
sql.append(this.maybeWrapInQoutes(out.getSchema()));
sql.append(".");
sql.append(this.maybeWrapInQoutes(VERTEX_PREFIX + out.getTable()));
sql.append(" _out on ab.out = _out." + this.maybeWrapInQoutes(idFields.getLeft()));
if (logger.isDebugEnabled()) {
logger.debug(sql.toString());
}
Connection conn = sqlgGraph.tx().getConnection();
try (PreparedStatement preparedStatement = conn.prepareStatement(sql.toString())) {
preparedStatement.executeUpdate();
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}
use of org.umlg.sqlg.structure.PropertyType in project sqlg by pietermartin.
the class PostgresDialect method writeStreamingEdge.
@Override
public void writeStreamingEdge(Writer writer, SqlgEdge sqlgEdge, SqlgVertex outVertex, SqlgVertex inVertex, Map<String, Object> keyValueMap) {
try {
String encoding = "UTF-8";
writer.write(((RecordId) outVertex.id()).getId().toString());
writer.write(COPY_COMMAND_DELIMITER);
writer.write(((RecordId) inVertex.id()).getId().toString());
for (Map.Entry<String, Object> entry : keyValueMap.entrySet()) {
writer.write(COPY_COMMAND_DELIMITER);
Object value = entry.getValue();
PropertyType propertyType;
if (value == null) {
propertyType = PropertyType.STRING;
} else {
propertyType = PropertyType.from(value);
}
if (JSON_ARRAY == propertyType) {
throw SqlgExceptions.invalidPropertyType(propertyType);
}
valueToStreamBytes(writer, propertyType, value);
}
writer.write("\n");
} catch (Exception e) {
throw new RuntimeException(e);
}
}
use of org.umlg.sqlg.structure.PropertyType in project sqlg by pietermartin.
the class PostgresDialect method flushEdgeCache.
@Override
public void flushEdgeCache(SqlgGraph sqlgGraph, Map<MetaEdge, Pair<SortedSet<String>, Map<SqlgEdge, Triple<SqlgVertex, SqlgVertex, Map<String, Object>>>>> edgeCache) {
Connection con = sqlgGraph.tx().getConnection();
try {
for (MetaEdge metaEdge : edgeCache.keySet()) {
Pair<SortedSet<String>, Map<SqlgEdge, Triple<SqlgVertex, SqlgVertex, Map<String, Object>>>> triples = edgeCache.get(metaEdge);
Map<String, PropertyType> propertyTypeMap = sqlgGraph.getTopology().getTableFor(metaEdge.getSchemaTable().withPrefix(EDGE_PREFIX));
StringBuilder sql = new StringBuilder();
sql.append("COPY ");
sql.append(maybeWrapInQoutes(metaEdge.getSchemaTable().getSchema()));
sql.append(".");
sql.append(maybeWrapInQoutes(EDGE_PREFIX + metaEdge.getSchemaTable().getTable()));
sql.append(" (");
for (Triple<SqlgVertex, SqlgVertex, Map<String, Object>> triple : triples.getRight().values()) {
int count = 1;
sql.append(maybeWrapInQoutes(triple.getLeft().getSchema() + "." + triple.getLeft().getTable() + Topology.OUT_VERTEX_COLUMN_END));
sql.append(", ");
sql.append(maybeWrapInQoutes(triple.getMiddle().getSchema() + "." + triple.getMiddle().getTable() + Topology.IN_VERTEX_COLUMN_END));
for (String key : triples.getLeft()) {
if (count <= triples.getLeft().size()) {
sql.append(", ");
}
count++;
appendKeyForStream(propertyTypeMap.get(key), sql, key);
}
break;
}
sql.append(") ");
sql.append(" FROM stdin CSV DELIMITER '");
sql.append(COPY_COMMAND_DELIMITER);
sql.append("' ");
sql.append("QUOTE ");
sql.append(COPY_COMMAND_QUOTE);
sql.append(" ESCAPE '");
sql.append(ESCAPE);
sql.append("';");
if (logger.isDebugEnabled()) {
logger.debug(sql.toString());
}
long numberInserted = 0;
try (Writer writer = streamSql(sqlgGraph, sql.toString())) {
for (Map.Entry<SqlgEdge, Triple<SqlgVertex, SqlgVertex, Map<String, Object>>> sqlgEdgeTripleEntry : triples.getRight().entrySet()) {
SqlgEdge sqlgEdge = sqlgEdgeTripleEntry.getKey();
Triple<SqlgVertex, SqlgVertex, Map<String, Object>> outInVertexKeyValueMap = sqlgEdgeTripleEntry.getValue();
LinkedHashMap<String, Object> values = new LinkedHashMap<>();
for (String key : triples.getLeft()) {
values.put(key, outInVertexKeyValueMap.getRight().get(key));
}
writeStreamingEdge(writer, sqlgEdge, outInVertexKeyValueMap.getLeft(), outInVertexKeyValueMap.getMiddle(), values);
numberInserted++;
}
}
long endHigh;
sql.setLength(0);
sql.append("SELECT CURRVAL('" + maybeWrapInQoutes(metaEdge.getSchemaTable().getSchema()) + "." + maybeWrapInQoutes(EDGE_PREFIX + metaEdge.getSchemaTable().getTable() + "_ID_seq") + "');");
if (logger.isDebugEnabled()) {
logger.debug(sql.toString());
}
try (PreparedStatement preparedStatement = con.prepareStatement(sql.toString())) {
ResultSet resultSet = preparedStatement.executeQuery();
resultSet.next();
endHigh = resultSet.getLong(1);
resultSet.close();
}
// set the id on the vertex
long id = endHigh - numberInserted + 1;
for (SqlgEdge sqlgEdge : triples.getRight().keySet()) {
sqlgEdge.setInternalPrimaryKey(RecordId.from(metaEdge.getSchemaTable(), id++));
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
use of org.umlg.sqlg.structure.PropertyType in project sqlg by pietermartin.
the class PostgresDialect method mapEdgeToInputStream.
private InputStream mapEdgeToInputStream(Map<String, PropertyType> propertyTypeMap, Pair<SortedSet<String>, Map<SqlgEdge, Triple<SqlgVertex, SqlgVertex, Map<String, Object>>>> edgeCache) throws SQLException {
StringBuilder sb = new StringBuilder();
int count = 1;
for (Triple<SqlgVertex, SqlgVertex, Map<String, Object>> triple : edgeCache.getRight().values()) {
sb.append(((RecordId) triple.getLeft().id()).getId());
sb.append(COPY_COMMAND_DELIMITER);
sb.append(((RecordId) triple.getMiddle().id()).getId());
if (!edgeCache.getLeft().isEmpty()) {
sb.append(COPY_COMMAND_DELIMITER);
}
int countKeys = 1;
for (String key : edgeCache.getLeft()) {
PropertyType propertyType = propertyTypeMap.get(key);
Object value = triple.getRight().get(key);
switch(propertyType) {
case BYTE_ARRAY:
String valueOfArrayAsString = PGbytea.toPGString((byte[]) SqlgUtil.convertByteArrayToPrimitiveArray((Byte[]) value));
sb.append(valueOfArrayAsString);
break;
case byte_ARRAY:
valueOfArrayAsString = PGbytea.toPGString((byte[]) value);
sb.append(valueOfArrayAsString);
break;
default:
sb.append(valueToStringForBulkLoad(propertyType, value));
}
if (countKeys < edgeCache.getLeft().size()) {
sb.append(COPY_COMMAND_DELIMITER);
}
countKeys++;
}
if (count++ < edgeCache.getRight().size()) {
sb.append("\n");
}
}
return new ByteArrayInputStream(sb.toString().getBytes());
}
use of org.umlg.sqlg.structure.PropertyType in project sqlg by pietermartin.
the class PostgresDialect method constructCompleteCopyCommandSqlEdge.
@Override
public String constructCompleteCopyCommandSqlEdge(SqlgGraph sqlgGraph, SqlgEdge sqlgEdge, SqlgVertex outVertex, SqlgVertex inVertex, Map<String, Object> keyValueMap) {
Map<String, PropertyType> propertyTypeMap = sqlgGraph.getTopology().getTableFor(SchemaTable.of(sqlgEdge.getSchema(), EDGE_PREFIX + sqlgEdge.getTable()));
StringBuilder sql = new StringBuilder();
sql.append("COPY ");
sql.append(maybeWrapInQoutes(sqlgEdge.getSchema()));
sql.append(".");
sql.append(maybeWrapInQoutes(EDGE_PREFIX + sqlgEdge.getTable()));
sql.append(" (");
sql.append(maybeWrapInQoutes(outVertex.getSchema() + "." + outVertex.getTable() + Topology.OUT_VERTEX_COLUMN_END));
sql.append(", ");
sql.append(maybeWrapInQoutes(inVertex.getSchema() + "." + inVertex.getTable() + Topology.IN_VERTEX_COLUMN_END));
int count = 1;
for (String key : keyValueMap.keySet()) {
if (count <= keyValueMap.size()) {
sql.append(", ");
}
count++;
appendKeyForStream(propertyTypeMap.get(key), sql, key);
}
sql.append(") ");
sql.append(" FROM stdin CSV DELIMITER '");
sql.append(COPY_COMMAND_DELIMITER);
sql.append("' ");
sql.append("QUOTE ");
sql.append(COPY_COMMAND_QUOTE);
sql.append(";");
if (logger.isDebugEnabled()) {
logger.debug(sql.toString());
}
return sql.toString();
}
Aggregations