use of com.datastax.driver.core.exceptions.InvalidTypeException in project nifi by apache.
the class PutCassandraQL method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
ComponentLog logger = getLogger();
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final long startNanos = System.nanoTime();
final long statementTimeout = context.getProperty(STATEMENT_TIMEOUT).evaluateAttributeExpressions(flowFile).asTimePeriod(TimeUnit.MILLISECONDS);
final Charset charset = Charset.forName(context.getProperty(CHARSET).evaluateAttributeExpressions(flowFile).getValue());
// The documentation for the driver recommends the session remain open the entire time the processor is running
// and states that it is thread-safe. This is why connectionSession is not in a try-with-resources.
final Session connectionSession = cassandraSession.get();
String cql = getCQL(session, flowFile, charset);
try {
PreparedStatement statement = connectionSession.prepare(cql);
BoundStatement boundStatement = statement.bind();
Map<String, String> attributes = flowFile.getAttributes();
for (final Map.Entry<String, String> entry : attributes.entrySet()) {
final String key = entry.getKey();
final Matcher matcher = CQL_TYPE_ATTRIBUTE_PATTERN.matcher(key);
if (matcher.matches()) {
final int parameterIndex = Integer.parseInt(matcher.group(1));
String paramType = entry.getValue();
if (StringUtils.isEmpty(paramType)) {
throw new ProcessException("Value of the " + key + " attribute is null or empty, it must contain a valid value");
}
paramType = paramType.trim();
final String valueAttrName = "cql.args." + parameterIndex + ".value";
final String parameterValue = attributes.get(valueAttrName);
try {
setStatementObject(boundStatement, parameterIndex - 1, valueAttrName, parameterValue, paramType);
} catch (final InvalidTypeException | IllegalArgumentException e) {
throw new ProcessException("The value of the " + valueAttrName + " is '" + parameterValue + "', which cannot be converted into the necessary data type: " + paramType, e);
}
}
}
try {
ResultSetFuture future = connectionSession.executeAsync(boundStatement);
if (statementTimeout > 0) {
future.getUninterruptibly(statementTimeout, TimeUnit.MILLISECONDS);
} else {
future.getUninterruptibly();
}
// Emit a Provenance SEND event
final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
// This isn't a real URI but since Cassandra is distributed we just use the cluster name
String transitUri = "cassandra://" + connectionSession.getCluster().getMetadata().getClusterName();
session.getProvenanceReporter().send(flowFile, transitUri, transmissionMillis, true);
session.transfer(flowFile, REL_SUCCESS);
} catch (final TimeoutException e) {
throw new ProcessException(e);
}
} catch (final NoHostAvailableException nhae) {
getLogger().error("No host in the Cassandra cluster can be contacted successfully to execute this statement", nhae);
// Log up to 10 error messages. Otherwise if a 1000-node cluster was specified but there was no connectivity,
// a thousand error messages would be logged. However we would like information from Cassandra itself, so
// cap the error limit at 10, format the messages, and don't include the stack trace (it is displayed by the
// logger message above).
getLogger().error(nhae.getCustomMessage(10, true, false));
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_RETRY);
} catch (final QueryExecutionException qee) {
logger.error("Cannot execute the statement with the requested consistency level successfully", qee);
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_RETRY);
} catch (final QueryValidationException qve) {
logger.error("The CQL statement {} is invalid due to syntax error, authorization issue, or another " + "validation problem; routing {} to failure", new Object[] { cql, flowFile }, qve);
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
} catch (final ProcessException e) {
logger.error("Unable to execute CQL select statement {} for {} due to {}; routing to failure", new Object[] { cql, flowFile, e });
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
}
}
use of com.datastax.driver.core.exceptions.InvalidTypeException in project java-driver by datastax.
the class QueryBuilderTest method should_include_original_cause_when_arguments_invalid.
@Test(groups = "unit")
public void should_include_original_cause_when_arguments_invalid() {
// Collection elements in protocol v2 must be at most 65535 bytes
// too big
ByteBuffer bb = ByteBuffer.allocate(65536);
List<ByteBuffer> value = Lists.newArrayList(bb);
BuiltStatement s = insertInto("foo").value("l", value);
try {
s.getValues(ProtocolVersion.V2, CodecRegistry.DEFAULT_INSTANCE);
fail("Expected an IllegalArgumentException");
} catch (InvalidTypeException e) {
assertThat(e.getCause()).isInstanceOf(IllegalArgumentException.class);
StringWriter writer = new StringWriter();
e.getCause().printStackTrace(new PrintWriter(writer));
String stackTrace = writer.toString();
assertThat(stackTrace).contains("Native protocol version 2 supports only elements with size up to 65535 bytes - " + "but element size is 65536 bytes");
}
}
use of com.datastax.driver.core.exceptions.InvalidTypeException in project java-driver by datastax.
the class AbstractArrayCodec method parse.
@Override
public T parse(String value) throws InvalidTypeException {
if (value == null || value.isEmpty() || value.equalsIgnoreCase("NULL"))
return null;
int idx = skipSpaces(value, 0);
if (value.charAt(idx++) != '[')
throw new InvalidTypeException(String.format("cannot parse list value from \"%s\", at character %d expecting '[' but got '%c'", value, idx, value.charAt(idx)));
idx = skipSpaces(value, idx);
if (value.charAt(idx) == ']')
return newInstance(0);
// first pass: determine array length
int length = getArrayLength(value, idx);
// second pass: parse elements
T array = newInstance(length);
int i = 0;
for (; idx < value.length(); i++) {
int n = skipLiteral(value, idx);
parseElement(value.substring(idx, n), array, i);
idx = skipSpaces(value, n);
if (value.charAt(idx) == ']')
return array;
idx = skipComma(value, idx);
idx = skipSpaces(value, idx);
}
throw new InvalidTypeException(String.format("Malformed list value \"%s\", missing closing ']'", value));
}
use of com.datastax.driver.core.exceptions.InvalidTypeException in project java-driver by datastax.
the class ObjectArrayCodec method serialize.
@Override
public ByteBuffer serialize(E[] value, ProtocolVersion protocolVersion) {
if (value == null)
return null;
int i = 0;
ByteBuffer[] bbs = new ByteBuffer[value.length];
for (E elt : value) {
if (elt == null) {
throw new NullPointerException("Collection elements cannot be null");
}
ByteBuffer bb;
try {
bb = eltCodec.serialize(elt, protocolVersion);
} catch (ClassCastException e) {
throw new InvalidTypeException(String.format("Invalid type for %s element, expecting %s but got %s", cqlType, eltCodec.getJavaType(), elt.getClass()), e);
}
bbs[i++] = bb;
}
return CodecUtils.pack(bbs, value.length, protocolVersion);
}
use of com.datastax.driver.core.exceptions.InvalidTypeException in project java-driver by datastax.
the class LocalDateCodec method parse.
@Override
public java.time.LocalDate parse(String value) {
if (value == null || value.isEmpty() || value.equalsIgnoreCase("NULL"))
return null;
// strip enclosing single quotes, if any
if (isQuoted(value))
value = unquote(value);
if (isLongLiteral(value)) {
long raw;
try {
raw = parseLong(value);
} catch (NumberFormatException e) {
throw new InvalidTypeException(String.format("Cannot parse date value from \"%s\"", value));
}
int days;
try {
days = fromCqlDateToDaysSinceEpoch(raw);
} catch (IllegalArgumentException e) {
throw new InvalidTypeException(String.format("Cannot parse date value from \"%s\"", value));
}
return EPOCH.plusDays(days);
}
try {
return java.time.LocalDate.parse(value, java.time.format.DateTimeFormatter.ISO_LOCAL_DATE);
} catch (RuntimeException e) {
throw new InvalidTypeException(String.format("Cannot parse date value from \"%s\"", value));
}
}
Aggregations