use of io.confluent.ksql.function.udf.Udf in project ksql by confluentinc.
the class JsonKeys method keys.
@Udf
public List<String> keys(@UdfParameter final String jsonObj) {
if (jsonObj == null) {
return null;
}
final JsonNode node = UdfJsonMapper.parseJson(jsonObj);
if (node.isMissingNode() || !node.isObject()) {
return null;
}
final List<String> ret = new ArrayList<>();
node.fieldNames().forEachRemaining(ret::add);
return ret;
}
use of io.confluent.ksql.function.udf.Udf in project ksql by confluentinc.
the class FunctionLoaderUtils method handleUdfReturnSchema.
// CHECKSTYLE_RULES.OFF: CyclomaticComplexity
static SchemaProvider handleUdfReturnSchema(final Class theClass, final ParamType javaReturnSchema, final String annotationSchema, final SqlTypeParser parser, final String schemaProviderFunctionName, final String functionName, final boolean isVariadic) {
// CHECKSTYLE_RULES.ON: CyclomaticComplexity
final Function<List<SqlArgument>, SqlType> schemaProvider;
if (!Udf.NO_SCHEMA_PROVIDER.equals(schemaProviderFunctionName)) {
schemaProvider = handleUdfSchemaProviderAnnotation(schemaProviderFunctionName, theClass, functionName);
} else if (!Udf.NO_SCHEMA.equals(annotationSchema)) {
final SqlType sqlType = parser.parse(annotationSchema).getSqlType();
schemaProvider = args -> sqlType;
} else if (!GenericsUtil.hasGenerics(javaReturnSchema)) {
// it is important to do this eagerly and not in the lambda so that
// we can fail early (when loading the UDF) instead of when the user
// attempts to use the UDF
final SqlType sqlType = fromJavaType(javaReturnSchema, functionName);
schemaProvider = args -> sqlType;
} else {
schemaProvider = null;
}
return (parameters, arguments) -> {
if (schemaProvider != null) {
final SqlType returnType = schemaProvider.apply(arguments);
if (!(ParamTypes.areCompatible(SqlArgument.of(returnType), javaReturnSchema, false))) {
throw new KsqlException(String.format("Return type %s of UDF %s does not match the declared " + "return type %s.", returnType, functionName.toUpperCase(), SchemaConverters.functionToSqlConverter().toSqlType(javaReturnSchema)));
}
return returnType;
}
final Map<GenericType, SqlType> genericMapping = new HashMap<>();
for (int i = 0; i < Math.min(parameters.size(), arguments.size()); i++) {
final ParamType schema = parameters.get(i);
if (schema instanceof LambdaType) {
if (isVariadic && i == parameters.size() - 1) {
throw new KsqlException(String.format("Lambda function %s cannot be variadic.", arguments.get(i).toString()));
}
genericMapping.putAll(GenericsUtil.reserveGenerics(schema, arguments.get(i)));
} else {
// we resolve any variadic as if it were an array so that the type
// structure matches the input type
final SqlType instance = isVariadic && i == parameters.size() - 1 ? SqlTypes.array(arguments.get(i).getSqlTypeOrThrow()) : arguments.get(i).getSqlTypeOrThrow();
genericMapping.putAll(GenericsUtil.reserveGenerics(schema, SqlArgument.of(instance)));
}
}
return GenericsUtil.applyResolved(javaReturnSchema, genericMapping);
};
}
use of io.confluent.ksql.function.udf.Udf in project ksql by confluentinc.
the class RegexpExtractAll method regexpExtractAll.
@Udf(description = "Returns all substrings of the " + "input that matches the regex pattern and the capturing group number specified")
public List<String> regexpExtractAll(@UdfParameter(description = "The regex pattern") final String pattern, @UdfParameter(description = "The input string to apply regex on") final String input, @UdfParameter(description = "The capturing group number") final Integer group) {
if (pattern == null || input == null || group == null) {
return null;
}
final Pattern p = getPattern(pattern);
final Matcher m = p.matcher(input);
if (group > m.groupCount()) {
return null;
}
final List<String> matches = new ArrayList<>();
while (m.find()) {
matches.add(m.group(group));
}
return matches;
}
use of io.confluent.ksql.function.udf.Udf in project ksql by confluentinc.
the class JsonArrayContains method contains.
@Udf
public <T> Boolean contains(@UdfParameter final String jsonArray, @UdfParameter final T val) {
try (JsonParser parser = PARSER_FACTORY.createParser(jsonArray)) {
if (parser.nextToken() != START_ARRAY) {
return false;
}
while (parser.nextToken() != null) {
final JsonToken token = parser.currentToken();
if (token == null) {
return val == null;
} else if (token == END_ARRAY) {
return false;
}
parser.skipChildren();
if (TOKEN_COMPAT.getOrDefault(token, foo -> false).test(val)) {
if (token == VALUE_NULL || (val != null && Objects.equals(parser.readValueAs(val.getClass()), val))) {
return true;
}
}
}
return false;
} catch (final IOException e) {
return false;
}
}
use of io.confluent.ksql.function.udf.Udf in project ksql by confluentinc.
the class JsonConcat method concat.
@Udf
public String concat(@UdfParameter final String... jsonStrings) {
if (jsonStrings == null) {
return null;
}
final List<JsonNode> nodes = new ArrayList<>(jsonStrings.length);
boolean allObjects = true;
for (final String jsonString : jsonStrings) {
if (jsonString == null) {
return null;
}
final JsonNode node = UdfJsonMapper.parseJson(jsonString);
if (node.isMissingNode()) {
return null;
}
if (allObjects && !node.isObject()) {
allObjects = false;
}
nodes.add(node);
}
JsonNode result = nodes.get(0);
if (allObjects) {
for (int i = 1; i < nodes.size(); i++) {
result = concatObjects((ObjectNode) result, (ObjectNode) nodes.get(i));
}
} else {
for (int i = 1; i < nodes.size(); i++) {
result = concatArrays(toArrayNode(result), toArrayNode(nodes.get(i)));
}
}
return UdfJsonMapper.writeValueAsJson(result);
}
Aggregations