use of org.codehaus.jackson.node.ArrayNode in project neo4j by neo4j.
the class EnterpriseAuthenticationIT method shouldAllowExecutingEnterpriseBuiltInProceduresWithAuthDisabled.
@Test
public void shouldAllowExecutingEnterpriseBuiltInProceduresWithAuthDisabled() throws Exception {
// Given
startServerWithAuthDisabled();
// When
String method = "POST";
String path = "db/data/transaction/commit";
HTTP.RawPayload payload = HTTP.RawPayload.quotedJson("{'statements':[{'statement':'CALL dbms.listQueries()'}]}");
HTTP.Response response = HTTP.request(method, server.baseUri().resolve(path).toString(), payload);
// Then
assertThat(response.status(), equalTo(200));
ArrayNode errors = (ArrayNode) response.get("errors");
assertThat("Should have no errors", errors.size(), equalTo(0));
ArrayNode results = (ArrayNode) response.get("results");
ArrayNode data = (ArrayNode) results.get(0).get("data");
assertThat("Should see our own query", data.size(), equalTo(1));
}
use of org.codehaus.jackson.node.ArrayNode in project neo4j-documentation by neo4j.
the class EnterpriseAuthenticationDocIT method shouldHavePredefinedRoles.
@Test
public void shouldHavePredefinedRoles() throws Exception {
// Given
startServerWithConfiguredUser();
// When
String method = "POST";
String path = "db/data/transaction/commit";
HTTP.RawPayload payload = HTTP.RawPayload.quotedJson("{'statements':[{'statement':'CALL dbms.security.listRoles()'}]}");
HTTP.Response response = HTTP.withHeaders(HttpHeaders.AUTHORIZATION, challengeResponse("neo4j", "secret")).request(method, server.baseUri().resolve(path).toString(), payload);
// Then
assertThat(response.status(), equalTo(200));
ArrayNode errors = (ArrayNode) response.get("errors");
assertThat("Should have no errors", errors.size(), equalTo(0));
ArrayNode results = (ArrayNode) response.get("results");
ArrayNode data = (ArrayNode) results.get(0).get("data");
assertThat("Should have 5 predefined roles", data.size(), equalTo(5));
Stream<String> values = data.findValues("row").stream().map(row -> row.get(0).asText());
assertThat("Expected specific roles", values.collect(Collectors.toList()), hasItems("admin", "architect", "publisher", "editor", "reader"));
}
use of org.codehaus.jackson.node.ArrayNode in project nifi by apache.
the class ConvertJSONToSQL method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final boolean translateFieldNames = context.getProperty(TRANSLATE_FIELD_NAMES).asBoolean();
final boolean ignoreUnmappedFields = IGNORE_UNMATCHED_FIELD.getValue().equalsIgnoreCase(context.getProperty(UNMATCHED_FIELD_BEHAVIOR).getValue());
final String statementType = context.getProperty(STATEMENT_TYPE).getValue();
final String updateKeys = context.getProperty(UPDATE_KEY).evaluateAttributeExpressions(flowFile).getValue();
final String catalog = context.getProperty(CATALOG_NAME).evaluateAttributeExpressions(flowFile).getValue();
final String schemaName = context.getProperty(SCHEMA_NAME).evaluateAttributeExpressions(flowFile).getValue();
final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(flowFile).getValue();
final SchemaKey schemaKey = new SchemaKey(catalog, tableName);
final boolean includePrimaryKeys = UPDATE_TYPE.equals(statementType) && updateKeys == null;
// Is the unmatched column behaviour fail or warning?
final boolean failUnmappedColumns = FAIL_UNMATCHED_COLUMN.getValue().equalsIgnoreCase(context.getProperty(UNMATCHED_COLUMN_BEHAVIOR).getValue());
final boolean warningUnmappedColumns = WARNING_UNMATCHED_COLUMN.getValue().equalsIgnoreCase(context.getProperty(UNMATCHED_COLUMN_BEHAVIOR).getValue());
// Escape column names?
final boolean escapeColumnNames = context.getProperty(QUOTED_IDENTIFIERS).asBoolean();
// Quote table name?
final boolean quoteTableName = context.getProperty(QUOTED_TABLE_IDENTIFIER).asBoolean();
// Attribute prefix
final String attributePrefix = context.getProperty(SQL_PARAM_ATTR_PREFIX).evaluateAttributeExpressions(flowFile).getValue();
// get the database schema from the cache, if one exists. We do this in a synchronized block, rather than
// using a ConcurrentMap because the Map that we are using is a LinkedHashMap with a capacity such that if
// the Map grows beyond this capacity, old elements are evicted. We do this in order to avoid filling the
// Java Heap if there are a lot of different SQL statements being generated that reference different tables.
TableSchema schema;
synchronized (this) {
schema = schemaCache.get(schemaKey);
if (schema == null) {
// No schema exists for this table yet. Query the database to determine the schema and put it into the cache.
final DBCPService dbcpService = context.getProperty(CONNECTION_POOL).asControllerService(DBCPService.class);
try (final Connection conn = dbcpService.getConnection()) {
schema = TableSchema.from(conn, catalog, schemaName, tableName, translateFieldNames, includePrimaryKeys);
schemaCache.put(schemaKey, schema);
} catch (final SQLException e) {
getLogger().error("Failed to convert {} into a SQL statement due to {}; routing to failure", new Object[] { flowFile, e.toString() }, e);
session.transfer(flowFile, REL_FAILURE);
return;
}
}
}
// Parse the JSON document
final ObjectMapper mapper = new ObjectMapper();
final AtomicReference<JsonNode> rootNodeRef = new AtomicReference<>(null);
try {
session.read(flowFile, new InputStreamCallback() {
@Override
public void process(final InputStream in) throws IOException {
try (final InputStream bufferedIn = new BufferedInputStream(in)) {
rootNodeRef.set(mapper.readTree(bufferedIn));
}
}
});
} catch (final ProcessException pe) {
getLogger().error("Failed to parse {} as JSON due to {}; routing to failure", new Object[] { flowFile, pe.toString() }, pe);
session.transfer(flowFile, REL_FAILURE);
return;
}
final JsonNode rootNode = rootNodeRef.get();
// The node may or may not be a Json Array. If it isn't, we will create an
// ArrayNode and add just the root node to it. We do this so that we can easily iterate
// over the array node, rather than duplicating the logic or creating another function that takes many variables
// in order to implement the logic.
final ArrayNode arrayNode;
if (rootNode.isArray()) {
arrayNode = (ArrayNode) rootNode;
} else {
final JsonNodeFactory nodeFactory = JsonNodeFactory.instance;
arrayNode = new ArrayNode(nodeFactory);
arrayNode.add(rootNode);
}
final String fragmentIdentifier = UUID.randomUUID().toString();
final Set<FlowFile> created = new HashSet<>();
for (int i = 0; i < arrayNode.size(); i++) {
final JsonNode jsonNode = arrayNode.get(i);
final String sql;
final Map<String, String> attributes = new HashMap<>();
try {
// build the fully qualified table name
final StringBuilder tableNameBuilder = new StringBuilder();
if (catalog != null) {
tableNameBuilder.append(catalog).append(".");
}
if (schemaName != null) {
tableNameBuilder.append(schemaName).append(".");
}
tableNameBuilder.append(tableName);
final String fqTableName = tableNameBuilder.toString();
if (INSERT_TYPE.equals(statementType)) {
sql = generateInsert(jsonNode, attributes, fqTableName, schema, translateFieldNames, ignoreUnmappedFields, failUnmappedColumns, warningUnmappedColumns, escapeColumnNames, quoteTableName, attributePrefix);
} else if (UPDATE_TYPE.equals(statementType)) {
sql = generateUpdate(jsonNode, attributes, fqTableName, updateKeys, schema, translateFieldNames, ignoreUnmappedFields, failUnmappedColumns, warningUnmappedColumns, escapeColumnNames, quoteTableName, attributePrefix);
} else {
sql = generateDelete(jsonNode, attributes, fqTableName, schema, translateFieldNames, ignoreUnmappedFields, failUnmappedColumns, warningUnmappedColumns, escapeColumnNames, quoteTableName, attributePrefix);
}
} catch (final ProcessException pe) {
getLogger().error("Failed to convert {} to a SQL {} statement due to {}; routing to failure", new Object[] { flowFile, statementType, pe.toString() }, pe);
session.remove(created);
session.transfer(flowFile, REL_FAILURE);
return;
}
FlowFile sqlFlowFile = session.create(flowFile);
created.add(sqlFlowFile);
sqlFlowFile = session.write(sqlFlowFile, new OutputStreamCallback() {
@Override
public void process(final OutputStream out) throws IOException {
out.write(sql.getBytes(StandardCharsets.UTF_8));
}
});
attributes.put(CoreAttributes.MIME_TYPE.key(), "text/plain");
attributes.put(attributePrefix + ".table", tableName);
attributes.put(FRAGMENT_ID.key(), fragmentIdentifier);
attributes.put(FRAGMENT_COUNT.key(), String.valueOf(arrayNode.size()));
attributes.put(FRAGMENT_INDEX.key(), String.valueOf(i));
if (catalog != null) {
attributes.put(attributePrefix + ".catalog", catalog);
}
sqlFlowFile = session.putAllAttributes(sqlFlowFile, attributes);
session.transfer(sqlFlowFile, REL_SQL);
}
flowFile = copyAttributesToOriginal(session, flowFile, fragmentIdentifier, arrayNode.size());
session.transfer(flowFile, REL_ORIGINAL);
}
use of org.codehaus.jackson.node.ArrayNode in project nifi by apache.
the class AbstractJsonRowRecordReader method getRawNodeValue.
protected Object getRawNodeValue(final JsonNode fieldNode, final DataType dataType) throws IOException {
if (fieldNode == null || fieldNode.isNull()) {
return null;
}
if (fieldNode.isNumber()) {
return fieldNode.getNumberValue();
}
if (fieldNode.isBinary()) {
return fieldNode.getBinaryValue();
}
if (fieldNode.isBoolean()) {
return fieldNode.getBooleanValue();
}
if (fieldNode.isTextual()) {
return fieldNode.getTextValue();
}
if (fieldNode.isArray()) {
final ArrayNode arrayNode = (ArrayNode) fieldNode;
final int numElements = arrayNode.size();
final Object[] arrayElements = new Object[numElements];
int count = 0;
final DataType elementDataType;
if (dataType != null && dataType.getFieldType() == RecordFieldType.ARRAY) {
final ArrayDataType arrayDataType = (ArrayDataType) dataType;
elementDataType = arrayDataType.getElementType();
} else {
elementDataType = null;
}
for (final JsonNode node : arrayNode) {
final Object value = getRawNodeValue(node, elementDataType);
arrayElements[count++] = value;
}
return arrayElements;
}
if (fieldNode.isObject()) {
RecordSchema childSchema;
if (dataType != null && RecordFieldType.RECORD == dataType.getFieldType()) {
final RecordDataType recordDataType = (RecordDataType) dataType;
childSchema = recordDataType.getChildSchema();
} else {
childSchema = null;
}
if (childSchema == null) {
childSchema = new SimpleRecordSchema(Collections.emptyList());
}
final Iterator<String> fieldNames = fieldNode.getFieldNames();
final Map<String, Object> childValues = new HashMap<>();
while (fieldNames.hasNext()) {
final String childFieldName = fieldNames.next();
final Object childValue = getRawNodeValue(fieldNode.get(childFieldName), dataType);
childValues.put(childFieldName, childValue);
}
final MapRecord record = new MapRecord(childSchema, childValues);
return record;
}
return null;
}
use of org.codehaus.jackson.node.ArrayNode in project oxTrust by GluuFederation.
the class SchemaTypeUserSerializer method serialize.
@Override
public void serialize(User user, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException {
log.info(" serialize() ");
try {
ObjectMapper mapper = new ObjectMapper();
mapper.disable(SerializationConfig.Feature.FAIL_ON_EMPTY_BEANS);
JsonNode rootNode = mapper.convertValue(user, JsonNode.class);
Iterator<Map.Entry<String, JsonNode>> iterator = rootNode.getFields();
while (iterator.hasNext()) {
Map.Entry<String, JsonNode> rootNodeEntry = iterator.next();
if (!(SchemaTypeMapping.getSchemaTypeInstance(rootNodeEntry.getKey()) instanceof UserExtensionSchema)) {
if (rootNodeEntry.getValue() instanceof ObjectNode) {
if (rootNodeEntry.getKey().equalsIgnoreCase("name")) {
AttributeHolder attributeHolder = new AttributeHolder();
attributeHolder.setName(rootNodeEntry.getKey());
attributeHolder.setType("string");
attributeHolder.setDescription("Name object");
attributeHolder.setRequired(Boolean.FALSE);
List<AttributeHolder> nameAttributeHolders = new ArrayList<AttributeHolder>();
Iterator<Map.Entry<String, JsonNode>> nameIterator = rootNodeEntry.getValue().getFields();
while (nameIterator.hasNext()) {
Map.Entry<String, JsonNode> nameRootNodeEntry = nameIterator.next();
AttributeHolder nameAttributeHolder = new AttributeHolder();
nameAttributeHolder.setName(nameRootNodeEntry.getKey());
nameAttributeHolder.setType("string");
if (nameRootNodeEntry.getKey().equalsIgnoreCase("formatted")) {
nameAttributeHolder.setDescription("Formatted name on-the-fly for display. Using this in a query filter is not supported.");
nameAttributeHolder.setMutability("readOnly");
} else {
nameAttributeHolder.setDescription(nameRootNodeEntry.getKey());
}
if (nameRootNodeEntry.getKey().equalsIgnoreCase("givenName") || nameRootNodeEntry.getKey().equalsIgnoreCase("familyName")) {
nameAttributeHolder.setRequired(true);
} else {
nameAttributeHolder.setRequired(false);
}
nameAttributeHolders.add(nameAttributeHolder);
}
attributeHolder.setSubAttributes(nameAttributeHolders);
attributeHolders.add(attributeHolder);
}
} else if (rootNodeEntry.getValue() instanceof ArrayNode) {
AttributeHolder arrayNodeAttributeHolder = new AttributeHolder();
arrayNodeAttributeHolder.setName(rootNodeEntry.getKey());
if (rootNodeEntry.getKey().equalsIgnoreCase("groups")) {
arrayNodeAttributeHolder.setDescription(rootNodeEntry.getKey() + " list; using sub-attributes in a query filter is not supported (cross-querying)");
arrayNodeAttributeHolder.setCaseExact(Boolean.TRUE);
List<String> referenceTypes = new ArrayList<String>();
referenceTypes.add("Group");
arrayNodeAttributeHolder.setReferenceTypes(referenceTypes);
} else {
arrayNodeAttributeHolder.setDescription(rootNodeEntry.getKey() + " list");
arrayNodeAttributeHolder.setCaseExact(Boolean.FALSE);
}
arrayNodeAttributeHolder.setRequired(Boolean.FALSE);
arrayNodeAttributeHolder.setMultiValued(Boolean.TRUE);
if (rootNodeEntry.getKey().equalsIgnoreCase("schemas")) {
arrayNodeAttributeHolder.setUniqueness("server");
arrayNodeAttributeHolder.setType("string");
arrayNodeAttributeHolder.setCaseExact(Boolean.TRUE);
arrayNodeAttributeHolder.setReturned("always");
} else {
arrayNodeAttributeHolder.setType("complex");
}
if (rootNodeEntry.getKey().equalsIgnoreCase("photos")) {
arrayNodeAttributeHolder.setType("reference");
List<String> referenceTypes = new ArrayList<String>();
referenceTypes.add("uri");
arrayNodeAttributeHolder.setReferenceTypes(referenceTypes);
}
List<AttributeHolder> arrayNodeMapAttributeHolders = new ArrayList<AttributeHolder>();
Iterator<JsonNode> arrayNodeIterator = rootNodeEntry.getValue().getElements();
while (arrayNodeIterator.hasNext()) {
JsonNode jsonNode = arrayNodeIterator.next();
Iterator<Map.Entry<String, JsonNode>> arrayNodeMapIterator = jsonNode.getFields();
while (arrayNodeMapIterator.hasNext()) {
Map.Entry<String, JsonNode> arrayNodeMapRootNodeEntry = arrayNodeMapIterator.next();
AttributeHolder arrayNodeMapAttributeHolder = new AttributeHolder();
if (rootNodeEntry.getKey().equalsIgnoreCase("groups") && arrayNodeMapRootNodeEntry.getKey().equalsIgnoreCase("reference")) {
arrayNodeMapAttributeHolder.setName("$ref");
} else {
arrayNodeMapAttributeHolder.setName(arrayNodeMapRootNodeEntry.getKey());
}
arrayNodeMapAttributeHolder.setType("string");
arrayNodeMapAttributeHolder.setDescription(arrayNodeMapRootNodeEntry.getKey());
if (arrayNodeMapRootNodeEntry.getKey().equalsIgnoreCase("value") || arrayNodeMapRootNodeEntry.getKey().equalsIgnoreCase("type")) {
arrayNodeMapAttributeHolder.setRequired(Boolean.TRUE);
} else {
arrayNodeMapAttributeHolder.setRequired(Boolean.FALSE);
}
if (arrayNodeMapRootNodeEntry.getKey().equalsIgnoreCase("valueAsImageDataURI") || arrayNodeMapRootNodeEntry.getKey().equalsIgnoreCase("valueAsURI")) {
arrayNodeMapAttributeHolder.setMutability("readOnly");
arrayNodeMapAttributeHolder.setType("reference");
List<String> referenceTypes = new ArrayList<String>();
referenceTypes.add("uri");
arrayNodeMapAttributeHolder.setReferenceTypes(referenceTypes);
}
arrayNodeMapAttributeHolders.add(arrayNodeMapAttributeHolder);
}
arrayNodeAttributeHolder.setSubAttributes(arrayNodeMapAttributeHolders);
attributeHolders.add(arrayNodeAttributeHolder);
}
} else {
AttributeHolder attributeHolder = new AttributeHolder();
attributeHolder.setName(rootNodeEntry.getKey());
if (rootNodeEntry.getValue().isBoolean()) {
attributeHolder.setType("boolean");
} else {
attributeHolder.setType("string");
}
attributeHolder.setDescription(rootNodeEntry.getKey());
if (rootNodeEntry.getKey().equalsIgnoreCase("userName") || rootNodeEntry.getKey().equalsIgnoreCase("displayName")) {
attributeHolder.setRequired(Boolean.TRUE);
} else {
attributeHolder.setRequired(Boolean.FALSE);
}
if (rootNodeEntry.getKey().equalsIgnoreCase("id") || rootNodeEntry.getKey().equalsIgnoreCase("userName")) {
attributeHolder.setUniqueness("server");
attributeHolder.setReturned("always");
}
if (rootNodeEntry.getKey().equalsIgnoreCase("id") || rootNodeEntry.getKey().equalsIgnoreCase("externalId") || rootNodeEntry.getKey().equalsIgnoreCase("password")) {
attributeHolder.setCaseExact(Boolean.TRUE);
}
if (rootNodeEntry.getKey().equalsIgnoreCase("id")) {
attributeHolder.setMutability("readOnly");
}
attributeHolders.add(attributeHolder);
}
}
}
UserCoreSchema userCoreSchema = (UserCoreSchema) schemaType;
userCoreSchema.setAttributeHolders(attributeHolders);
schemaType = userCoreSchema;
} catch (Exception e) {
e.printStackTrace();
throw new IOException("Unexpected processing error; please check the User class structure.");
}
}
Aggregations