use of org.apache.commons.lang3.tuple.Triple in project sqlg by pietermartin.
the class PostgresDialect method flushEdgeCache.
@Override
public void flushEdgeCache(SqlgGraph sqlgGraph, Map<MetaEdge, Pair<SortedSet<String>, Map<SqlgEdge, Triple<SqlgVertex, SqlgVertex, Map<String, Object>>>>> edgeCache) {
Connection con = sqlgGraph.tx().getConnection();
try {
for (MetaEdge metaEdge : edgeCache.keySet()) {
Pair<SortedSet<String>, Map<SqlgEdge, Triple<SqlgVertex, SqlgVertex, Map<String, Object>>>> triples = edgeCache.get(metaEdge);
Map<String, PropertyType> propertyTypeMap = sqlgGraph.getTopology().getTableFor(metaEdge.getSchemaTable().withPrefix(EDGE_PREFIX));
StringBuilder sql = new StringBuilder();
sql.append("COPY ");
sql.append(maybeWrapInQoutes(metaEdge.getSchemaTable().getSchema()));
sql.append(".");
sql.append(maybeWrapInQoutes(EDGE_PREFIX + metaEdge.getSchemaTable().getTable()));
sql.append(" (");
for (Triple<SqlgVertex, SqlgVertex, Map<String, Object>> triple : triples.getRight().values()) {
int count = 1;
sql.append(maybeWrapInQoutes(triple.getLeft().getSchema() + "." + triple.getLeft().getTable() + Topology.OUT_VERTEX_COLUMN_END));
sql.append(", ");
sql.append(maybeWrapInQoutes(triple.getMiddle().getSchema() + "." + triple.getMiddle().getTable() + Topology.IN_VERTEX_COLUMN_END));
for (String key : triples.getLeft()) {
if (count <= triples.getLeft().size()) {
sql.append(", ");
}
count++;
appendKeyForStream(propertyTypeMap.get(key), sql, key);
}
break;
}
sql.append(") ");
sql.append(" FROM stdin CSV DELIMITER '");
sql.append(COPY_COMMAND_DELIMITER);
sql.append("' ");
sql.append("QUOTE ");
sql.append(COPY_COMMAND_QUOTE);
sql.append(" ESCAPE '");
sql.append(ESCAPE);
sql.append("';");
if (logger.isDebugEnabled()) {
logger.debug(sql.toString());
}
long numberInserted = 0;
try (Writer writer = streamSql(sqlgGraph, sql.toString())) {
for (Map.Entry<SqlgEdge, Triple<SqlgVertex, SqlgVertex, Map<String, Object>>> sqlgEdgeTripleEntry : triples.getRight().entrySet()) {
SqlgEdge sqlgEdge = sqlgEdgeTripleEntry.getKey();
Triple<SqlgVertex, SqlgVertex, Map<String, Object>> outInVertexKeyValueMap = sqlgEdgeTripleEntry.getValue();
LinkedHashMap<String, Object> values = new LinkedHashMap<>();
for (String key : triples.getLeft()) {
values.put(key, outInVertexKeyValueMap.getRight().get(key));
}
writeStreamingEdge(writer, sqlgEdge, outInVertexKeyValueMap.getLeft(), outInVertexKeyValueMap.getMiddle(), values);
numberInserted++;
}
}
long endHigh;
sql.setLength(0);
sql.append("SELECT CURRVAL('" + maybeWrapInQoutes(metaEdge.getSchemaTable().getSchema()) + "." + maybeWrapInQoutes(EDGE_PREFIX + metaEdge.getSchemaTable().getTable() + "_ID_seq") + "');");
if (logger.isDebugEnabled()) {
logger.debug(sql.toString());
}
try (PreparedStatement preparedStatement = con.prepareStatement(sql.toString())) {
ResultSet resultSet = preparedStatement.executeQuery();
resultSet.next();
endHigh = resultSet.getLong(1);
resultSet.close();
}
// set the id on the vertex
long id = endHigh - numberInserted + 1;
for (SqlgEdge sqlgEdge : triples.getRight().keySet()) {
sqlgEdge.setInternalPrimaryKey(RecordId.from(metaEdge.getSchemaTable(), id++));
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
use of org.apache.commons.lang3.tuple.Triple in project sqlg by pietermartin.
the class PostgresDialect method drop.
@Override
public List<Triple<SqlgSqlExecutor.DROP_QUERY, String, SchemaTable>> drop(SqlgGraph sqlgGraph, String leafElementsToDelete, Optional<String> edgesToDelete, LinkedList<SchemaTableTree> distinctQueryStack) {
List<Triple<SqlgSqlExecutor.DROP_QUERY, String, SchemaTable>> sqls = new ArrayList<>();
SchemaTableTree last = distinctQueryStack.getLast();
SchemaTableTree lastEdge = null;
// if the leaf elements are vertices then we need to delete its in and out edges.
boolean isVertex = last.getSchemaTable().isVertexTable();
VertexLabel lastVertexLabel = null;
if (isVertex) {
Optional<Schema> schemaOptional = sqlgGraph.getTopology().getSchema(last.getSchemaTable().getSchema());
Preconditions.checkState(schemaOptional.isPresent(), "BUG: %s not found in the topology.", last.getSchemaTable().getSchema());
Schema schema = schemaOptional.get();
Optional<VertexLabel> vertexLabelOptional = schema.getVertexLabel(last.getSchemaTable().withOutPrefix().getTable());
Preconditions.checkState(vertexLabelOptional.isPresent(), "BUG: %s not found in the topology.", last.getSchemaTable().withOutPrefix().getTable());
lastVertexLabel = vertexLabelOptional.get();
}
boolean queryTraversesEdge = isVertex && (distinctQueryStack.size() > 1);
EdgeLabel lastEdgeLabel = null;
if (queryTraversesEdge) {
lastEdge = distinctQueryStack.get(distinctQueryStack.size() - 2);
Optional<Schema> edgeSchema = sqlgGraph.getTopology().getSchema(lastEdge.getSchemaTable().getSchema());
Preconditions.checkState(edgeSchema.isPresent(), "BUG: %s not found in the topology.", lastEdge.getSchemaTable().getSchema());
Optional<EdgeLabel> edgeLabelOptional = edgeSchema.get().getEdgeLabel(lastEdge.getSchemaTable().withOutPrefix().getTable());
Preconditions.checkState(edgeLabelOptional.isPresent(), "BUG: %s not found in the topology.", lastEdge.getSchemaTable().getTable());
lastEdgeLabel = edgeLabelOptional.get();
}
if (isVertex) {
// First delete all edges except for this edge traversed to get to the vertices.
StringBuilder sb;
for (EdgeLabel edgeLabel : lastVertexLabel.getOutEdgeLabels().values()) {
if (lastEdgeLabel == null || !edgeLabel.equals(lastEdgeLabel)) {
// Delete
sb = new StringBuilder();
sb.append("WITH todelete AS (");
sb.append(leafElementsToDelete);
sb.append("\n)\nDELETE FROM ");
sb.append(maybeWrapInQoutes(edgeLabel.getSchema().getName()));
sb.append(".");
sb.append(maybeWrapInQoutes(Topology.EDGE_PREFIX + edgeLabel.getName()));
sb.append(" a USING todelete\nWHERE a.");
sb.append(maybeWrapInQoutes(lastVertexLabel.getSchema().getName() + "." + lastVertexLabel.getName() + Topology.OUT_VERTEX_COLUMN_END));
sb.append(" = todelete.");
sb.append(maybeWrapInQoutes("alias1"));
sqls.add(Triple.of(SqlgSqlExecutor.DROP_QUERY.NORMAL, sb.toString(), SchemaTable.of(edgeLabel.getSchema().getName(), Topology.EDGE_PREFIX + edgeLabel.getName())));
}
}
for (EdgeLabel edgeLabel : lastVertexLabel.getInEdgeLabels().values()) {
if (lastEdgeLabel == null || !edgeLabel.equals(lastEdgeLabel)) {
// Delete
sb = new StringBuilder();
sb.append("WITH todelete AS (");
sb.append(leafElementsToDelete);
sb.append("\n)\nDELETE FROM ");
sb.append(maybeWrapInQoutes(edgeLabel.getSchema().getName()));
sb.append(".");
sb.append(maybeWrapInQoutes(Topology.EDGE_PREFIX + edgeLabel.getName()));
sb.append(" a USING todelete\nWHERE a.");
sb.append(maybeWrapInQoutes(lastVertexLabel.getSchema().getName() + "." + lastVertexLabel.getName() + Topology.IN_VERTEX_COLUMN_END));
sb.append(" = todelete.");
sb.append(maybeWrapInQoutes("alias1"));
sqls.add(Triple.of(SqlgSqlExecutor.DROP_QUERY.NORMAL, sb.toString(), SchemaTable.of(edgeLabel.getSchema().getName(), Topology.EDGE_PREFIX + edgeLabel.getName())));
}
}
}
// Need to defer foreign key constraint checks.
if (queryTraversesEdge) {
sqls.add(Triple.of(SqlgSqlExecutor.DROP_QUERY.ALTER, "SET CONSTRAINTS ALL DEFERRED", null));
}
// Delete the leaf vertices, if there are foreign keys then its been deferred.
StringBuilder sb = new StringBuilder();
sb.append("WITH todelete AS (");
sb.append(leafElementsToDelete);
sb.append("\n)\nDELETE FROM ");
sb.append(maybeWrapInQoutes(last.getSchemaTable().getSchema()));
sb.append(".");
sb.append(maybeWrapInQoutes(last.getSchemaTable().getTable()));
sb.append(" a USING todelete\nWHERE a.");
sb.append(maybeWrapInQoutes("ID"));
sb.append(" = todelete.");
sb.append(maybeWrapInQoutes("alias1"));
sqls.add(Triple.of(SqlgSqlExecutor.DROP_QUERY.NORMAL, sb.toString(), last.getSchemaTable()));
if (queryTraversesEdge) {
sb = new StringBuilder();
sb.append("WITH todelete AS (");
sb.append(edgesToDelete.get());
sb.append("\n)\nDELETE FROM ");
sb.append(maybeWrapInQoutes(lastEdge.getSchemaTable().getSchema()));
sb.append(".");
sb.append(maybeWrapInQoutes(lastEdge.getSchemaTable().getTable()));
sb.append(" a USING todelete\nWHERE a.");
sb.append(maybeWrapInQoutes("ID"));
sb.append(" = todelete.");
sb.append(maybeWrapInQoutes("alias1"));
sqls.add(Triple.of(SqlgSqlExecutor.DROP_QUERY.EDGE, sb.toString(), lastEdge.getSchemaTable()));
}
// Enable the foreign key constraint
if (queryTraversesEdge) {
sqls.add(Triple.of(SqlgSqlExecutor.DROP_QUERY.ALTER, "SET CONSTRAINTS ALL IMMEDIATE", null));
}
return sqls;
}
use of org.apache.commons.lang3.tuple.Triple in project sqlg by pietermartin.
the class SqlgStartupManager method extractIndices.
private void extractIndices(DatabaseMetaData metadata, String catalog, String schema, String table, String label, boolean isVertex) throws SQLException {
String lastIndexName = null;
IndexType lastIndexType = null;
List<String> lastColumns = new LinkedList<>();
List<Triple<String, Boolean, String>> indexes = this.sqlDialect.getIndexInfo(metadata, catalog, schema, table, false, true);
for (Triple<String, Boolean, String> index : indexes) {
String indexName = index.getLeft();
boolean nonUnique = index.getMiddle();
String columnName = index.getRight();
if (lastIndexName == null) {
lastIndexName = indexName;
lastIndexType = nonUnique ? IndexType.NON_UNIQUE : IndexType.UNIQUE;
} else if (!lastIndexName.equals(indexName)) {
if (!this.sqlDialect.isSystemIndex(lastIndexName)) {
if (!Schema.GLOBAL_UNIQUE_INDEX_SCHEMA.equals(schema)) {
TopologyManager.addIndex(sqlgGraph, schema, label, isVertex, lastIndexName, lastIndexType, lastColumns);
}
}
lastColumns.clear();
lastIndexName = indexName;
lastIndexType = nonUnique ? IndexType.NON_UNIQUE : IndexType.UNIQUE;
}
lastColumns.add(columnName);
}
if (!this.sqlDialect.isSystemIndex(lastIndexName)) {
if (!Schema.GLOBAL_UNIQUE_INDEX_SCHEMA.equals(schema)) {
TopologyManager.addIndex(sqlgGraph, schema, label, isVertex, lastIndexName, lastIndexType, lastColumns);
}
}
}
use of org.apache.commons.lang3.tuple.Triple in project sqlg by pietermartin.
the class MSSqlServerDialect method flushEdgeGlobalUniqueIndexes.
@Override
public void flushEdgeGlobalUniqueIndexes(SqlgGraph sqlgGraph, Map<MetaEdge, Pair<SortedSet<String>, Map<SqlgEdge, Triple<SqlgVertex, SqlgVertex, Map<String, Object>>>>> edgeCache) {
for (MetaEdge metaEdge : edgeCache.keySet()) {
Pair<SortedSet<String>, Map<SqlgEdge, Triple<SqlgVertex, SqlgVertex, Map<String, Object>>>> triples = edgeCache.get(metaEdge);
Map<SqlgEdge, Triple<SqlgVertex, SqlgVertex, Map<String, Object>>> edgeMap = triples.getRight();
Map<String, PropertyColumn> propertyColumnMap = sqlgGraph.getTopology().getPropertiesFor(metaEdge.getSchemaTable().withPrefix(EDGE_PREFIX));
for (Map.Entry<String, PropertyColumn> propertyColumnEntry : propertyColumnMap.entrySet()) {
PropertyColumn propertyColumn = propertyColumnEntry.getValue();
for (GlobalUniqueIndex globalUniqueIndex : propertyColumn.getGlobalUniqueIndices()) {
try {
Connection connection = sqlgGraph.tx().getConnection();
SQLServerConnection sqlServerConnection = connection.unwrap(SQLServerConnection.class);
try (SQLServerBulkCopy bulkCopy = new SQLServerBulkCopy(sqlServerConnection)) {
bulkCopy.setDestinationTableName(sqlgGraph.getSqlDialect().maybeWrapInQoutes(Schema.GLOBAL_UNIQUE_INDEX_SCHEMA) + "." + sqlgGraph.getSqlDialect().maybeWrapInQoutes(VERTEX_PREFIX + globalUniqueIndex.getName()));
bulkCopy.writeToServer(new SQLServerEdgeGlobalUniqueIndexBulkRecord(bulkCopy, sqlgGraph, edgeMap, propertyColumn));
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}
}
}
use of org.apache.commons.lang3.tuple.Triple in project lobcder by skoulouzis.
the class WorkerServlet method doPost.
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException {
try {
// checks if the request actually contains upload file
if (!ServletFileUpload.isMultipartContent(request)) {
// if not, we stop here
PrintWriter writer = response.getWriter();
writer.println("Error: Form must has enctype=multipart/form-data.");
writer.flush();
return;
}
// configures upload settings
DiskFileItemFactory factory = new DiskFileItemFactory();
// sets memory threshold - beyond which files are stored in disk
factory.setSizeThreshold(this.bufferSize);
// sets temporary location to store files
factory.setRepository(Util.getUploadDir());
ServletFileUpload upload = new ServletFileUpload(factory);
// sets maximum size of upload file
// upload.setFileSizeMax(MAX_FILE_SIZE);
// sets maximum size of request (include file + form data)
// upload.setSizeMax(MAX_REQUEST_SIZE);
Map<String, Triple<Long, Long, Collection<Long>>> storagMap = parseQuery(request.getQueryString());
List<FileItem> formItems = upload.parseRequest(request);
Iterator<FileItem> iter = formItems.iterator();
FileItem item;
//
while (iter.hasNext()) {
item = iter.next();
if (item.getName() == null) {
continue;
}
String fileName = item.getName();
Triple<Long, Long, Collection<Long>> triple = storagMap.get(fileName);
StringBuilder storeName = new StringBuilder();
storeName.append(triple.getLeft()).append("-");
storeName.append(triple.getMiddle()).append("-");
for (Long l : triple.getRight()) {
storeName.append(l).append("-");
}
storeName.deleteCharAt(storeName.length() - 1);
String filePath = Util.getUploadDir() + File.separator + fileName + "_" + storeName.toString();
File storeFile = new File(filePath);
item.write(storeFile);
}
} catch (Exception ex) {
Logger.getLogger(WorkerServlet.class.getName()).log(Level.SEVERE, null, ex);
}
}
Aggregations