use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.DeserializationContext in project flink by apache.
the class LogicalWindowJsonDeserializer method deserialize.
@Override
public LogicalWindow deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException {
JsonNode jsonNode = jsonParser.readValueAsTree();
String kind = jsonNode.get(FIELD_NAME_KIND).asText().toUpperCase();
WindowReference alias = deserializationContext.readValue(jsonNode.get(FIELD_NAME_ALIAS).traverse(jsonParser.getCodec()), WindowReference.class);
FieldReferenceExpression timeField = deserializeFieldReferenceExpression(jsonNode.get(FIELD_NAME_TIME_FIELD), jsonParser, deserializationContext);
switch(kind) {
case KIND_TUMBLING:
boolean isTimeTumblingWindow = jsonNode.get(FIELD_NAME_IS_TIME_WINDOW).asBoolean();
if (isTimeTumblingWindow) {
Duration size = deserializationContext.readValue(traverse(jsonNode.get(FIELD_NAME_SIZE), jsonParser.getCodec()), Duration.class);
return new TumblingGroupWindow(alias, timeField, new ValueLiteralExpression(size));
} else {
long size = jsonNode.get(FIELD_NAME_SIZE).asLong();
return new TumblingGroupWindow(alias, timeField, new ValueLiteralExpression(size));
}
case KIND_SLIDING:
boolean isTimeSlidingWindow = jsonNode.get(FIELD_NAME_IS_TIME_WINDOW).asBoolean();
if (isTimeSlidingWindow) {
Duration size = deserializationContext.readValue(traverse(jsonNode.get(FIELD_NAME_SIZE), jsonParser.getCodec()), Duration.class);
Duration slide = deserializationContext.readValue(traverse(jsonNode.get(FIELD_NAME_SLIDE), jsonParser.getCodec()), Duration.class);
return new SlidingGroupWindow(alias, timeField, new ValueLiteralExpression(size), new ValueLiteralExpression(slide));
} else {
long size = jsonNode.get(FIELD_NAME_SIZE).asLong();
long slide = jsonNode.get(FIELD_NAME_SLIDE).asLong();
return new SlidingGroupWindow(alias, timeField, new ValueLiteralExpression(size), new ValueLiteralExpression(slide));
}
case KIND_SESSION:
Duration gap = deserializationContext.readValue(traverse(jsonNode.get(FIELD_NAME_GAP), jsonParser.getCodec()), Duration.class);
return new SessionGroupWindow(alias, timeField, new ValueLiteralExpression(gap));
default:
throw new TableException("Unknown Logical Window:" + jsonNode);
}
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.DeserializationContext in project flink by apache.
the class ChangelogModeJsonDeserializer method deserialize.
@Override
public ChangelogMode deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException {
ChangelogMode.Builder builder = ChangelogMode.newBuilder();
JsonNode rowKindsNode = jsonParser.readValueAsTree();
for (JsonNode rowKindNode : rowKindsNode) {
RowKind rowKind = RowKind.valueOf(rowKindNode.asText().toUpperCase());
builder.addContainedKind(rowKind);
}
return builder.build();
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.DeserializationContext in project flink by apache.
the class ColumnJsonDeserializer method deserialize.
@Override
public Column deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException {
ObjectNode jsonNode = jsonParser.readValueAsTree();
String columnName = jsonNode.required(NAME).asText();
String columnKind = Optional.ofNullable(jsonNode.get(KIND)).map(JsonNode::asText).orElse(KIND_PHYSICAL);
Column column;
switch(columnKind) {
case KIND_PHYSICAL:
column = deserializePhysicalColumn(columnName, jsonNode, jsonParser.getCodec(), ctx);
break;
case KIND_COMPUTED:
column = deserializeComputedColumn(columnName, jsonNode, jsonParser.getCodec(), ctx);
break;
case KIND_METADATA:
column = deserializeMetadataColumn(columnName, jsonNode, jsonParser.getCodec(), ctx);
break;
default:
throw new ValidationException(String.format("Cannot recognize column type '%s'. Allowed types: %s.", columnKind, SUPPORTED_KINDS));
}
return column.withComment(deserializeOptionalField(jsonNode, COMMENT, String.class, jsonParser.getCodec(), ctx).orElse(null));
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.DeserializationContext in project flink by apache.
the class ContextResolvedTableJsonDeserializer method deserialize.
@Override
public ContextResolvedTable deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException {
final CatalogPlanRestore planRestoreOption = SerdeContext.get(ctx).getConfiguration().get(PLAN_RESTORE_CATALOG_OBJECTS);
final CatalogManager catalogManager = SerdeContext.get(ctx).getFlinkContext().getCatalogManager();
final ObjectNode objectNode = jsonParser.readValueAsTree();
// Deserialize the two fields, if available
final ObjectIdentifier identifier = JsonSerdeUtil.deserializeOptionalField(objectNode, FIELD_NAME_IDENTIFIER, ObjectIdentifier.class, jsonParser.getCodec(), ctx).orElse(null);
ResolvedCatalogTable resolvedCatalogTable = JsonSerdeUtil.deserializeOptionalField(objectNode, FIELD_NAME_CATALOG_TABLE, ResolvedCatalogTable.class, jsonParser.getCodec(), ctx).orElse(null);
if (identifier == null && resolvedCatalogTable == null) {
throw new ValidationException(String.format("The input JSON is invalid because it doesn't contain '%s', nor the '%s'.", FIELD_NAME_IDENTIFIER, FIELD_NAME_CATALOG_TABLE));
}
if (identifier == null) {
if (isLookupForced(planRestoreOption)) {
throw missingIdentifier();
}
return ContextResolvedTable.anonymous(resolvedCatalogTable);
}
Optional<ContextResolvedTable> contextResolvedTableFromCatalog = isLookupEnabled(planRestoreOption) ? catalogManager.getTable(identifier) : Optional.empty();
// If we have a schema from the plan and from the catalog, we need to check they match.
if (contextResolvedTableFromCatalog.isPresent() && resolvedCatalogTable != null) {
ResolvedSchema schemaFromPlan = resolvedCatalogTable.getResolvedSchema();
ResolvedSchema schemaFromCatalog = contextResolvedTableFromCatalog.get().getResolvedSchema();
if (!areResolvedSchemasEqual(schemaFromPlan, schemaFromCatalog)) {
throw schemaNotMatching(identifier, schemaFromPlan, schemaFromCatalog);
}
}
if (resolvedCatalogTable == null || isLookupForced(planRestoreOption)) {
if (!isLookupEnabled(planRestoreOption)) {
throw lookupDisabled(identifier);
}
// We use what is stored inside the catalog
return contextResolvedTableFromCatalog.orElseThrow(() -> missingTableFromCatalog(identifier, isLookupForced(planRestoreOption)));
}
if (contextResolvedTableFromCatalog.isPresent()) {
// SCHEMA, so we just need to return the catalog query result
if (objectNode.at("/" + FIELD_NAME_CATALOG_TABLE + "/" + OPTIONS).isMissingNode()) {
return contextResolvedTableFromCatalog.get();
}
return contextResolvedTableFromCatalog.flatMap(ContextResolvedTable::getCatalog).map(c -> ContextResolvedTable.permanent(identifier, c, resolvedCatalogTable)).orElseGet(() -> ContextResolvedTable.temporary(identifier, resolvedCatalogTable));
}
return ContextResolvedTable.temporary(identifier, resolvedCatalogTable);
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.DeserializationContext in project flink by apache.
the class ShuffleJsonDeserializer method deserialize.
@Override
public Shuffle deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException {
JsonNode jsonNode = jsonParser.getCodec().readTree(jsonParser);
Shuffle.Type type = Shuffle.Type.valueOf(jsonNode.get("type").asText().toUpperCase());
switch(type) {
case ANY:
return ExecEdge.ANY_SHUFFLE;
case SINGLETON:
return ExecEdge.SINGLETON_SHUFFLE;
case BROADCAST:
return ExecEdge.BROADCAST_SHUFFLE;
case FORWARD:
return ExecEdge.FORWARD_SHUFFLE;
case HASH:
JsonNode keysNode = jsonNode.get("keys");
if (keysNode == null || keysNode.size() == 0) {
throw new TableException("Hash shuffle requires non-empty hash keys.");
}
int[] keys = new int[keysNode.size()];
for (int i = 0; i < keysNode.size(); ++i) {
keys[i] = keysNode.get(i).asInt();
}
return ExecEdge.hashShuffle(keys);
default:
throw new TableException("Unsupported shuffle type: " + type);
}
}
Aggregations