use of org.apache.nifi.hbase.put.PutFlowFile in project nifi by apache.
the class PutHBaseRecord method createPut.
protected PutFlowFile createPut(ProcessContext context, Record record, RecordSchema schema, FlowFile flowFile, String rowFieldName, String columnFamily, String timestampFieldName, String fieldEncodingStrategy, String rowEncodingStrategy, String complexFieldStrategy) throws PutCreationFailedInvokedException {
PutFlowFile retVal = null;
final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(flowFile).getValue();
final String nullStrategy = context.getProperty(NULL_FIELD_STRATEGY).getValue();
boolean asString = STRING_ENCODING_VALUE.equals(fieldEncodingStrategy);
final byte[] fam = clientService.toBytes(columnFamily);
if (record != null) {
final Long timestamp;
if (!StringUtils.isBlank(timestampFieldName)) {
try {
timestamp = record.getAsLong(timestampFieldName);
} catch (IllegalTypeConversionException e) {
throw new PutCreationFailedInvokedException("Could not convert " + timestampFieldName + " to a long", e);
}
if (timestamp == null) {
getLogger().warn("The value of timestamp field " + timestampFieldName + " was null, record will be inserted with latest timestamp");
}
} else {
timestamp = null;
}
List<PutColumn> columns = new ArrayList<>();
for (String name : schema.getFieldNames()) {
if (name.equals(rowFieldName) || name.equals(timestampFieldName)) {
continue;
}
Object val = record.getValue(name);
final byte[] fieldValueBytes;
if (val == null && nullStrategy.equals(NULL_FIELD_SKIP.getValue())) {
continue;
} else if (val == null && nullStrategy.equals(NULL_FIELD_EMPTY.getValue())) {
fieldValueBytes = EMPTY;
} else {
fieldValueBytes = asBytes(name, schema.getField(name).get().getDataType().getFieldType(), record, asString, complexFieldStrategy);
}
if (fieldValueBytes != null) {
columns.add(new PutColumn(fam, clientService.toBytes(name), fieldValueBytes, timestamp));
}
}
String rowIdValue = record.getAsString(rowFieldName);
if (rowIdValue == null) {
throw new PutCreationFailedInvokedException(String.format("Row ID was null for flowfile with ID %s", flowFile.getAttribute("uuid")));
}
byte[] rowId = getRow(rowIdValue, rowEncodingStrategy);
retVal = new PutFlowFile(tableName, rowId, columns, flowFile);
}
return retVal;
}
use of org.apache.nifi.hbase.put.PutFlowFile in project nifi by apache.
the class HBaseTestUtil method verifyPut.
public static void verifyPut(final String row, final String columnFamily, final Long timestamp, final Map<String, byte[]> columns, final List<PutFlowFile> puts) {
boolean foundPut = false;
for (final PutFlowFile put : puts) {
if (!row.equals(new String(put.getRow(), StandardCharsets.UTF_8))) {
continue;
}
if (put.getColumns() == null || put.getColumns().size() != columns.size()) {
continue;
}
// start off assuming we have all the columns
boolean foundAllColumns = true;
for (Map.Entry<String, byte[]> entry : columns.entrySet()) {
// determine if we have the current expected column
boolean foundColumn = false;
for (PutColumn putColumn : put.getColumns()) {
if (columnFamily.equals(new String(putColumn.getColumnFamily(), StandardCharsets.UTF_8)) && entry.getKey().equals(new String(putColumn.getColumnQualifier(), StandardCharsets.UTF_8)) && Arrays.equals(entry.getValue(), putColumn.getBuffer()) && ((timestamp == null && putColumn.getTimestamp() == null) || (timestamp != null && timestamp.equals(putColumn.getTimestamp())))) {
foundColumn = true;
break;
}
}
// if we didn't have the current expected column we know we don't have all expected columns
if (!foundColumn) {
foundAllColumns = false;
break;
}
}
// if we found all the expected columns this was a match so we can break
if (foundAllColumns) {
foundPut = true;
break;
}
}
assertTrue(foundPut);
}
use of org.apache.nifi.hbase.put.PutFlowFile in project nifi by apache.
the class TestPutHBaseCell method testSingleFlowFileWithTimestamp.
@Test
public void testSingleFlowFileWithTimestamp() throws IOException, InitializationException {
final String tableName = "nifi";
final String row = "row1";
final String columnFamily = "family1";
final String columnQualifier = "qualifier1";
final Long timestamp = 1L;
final TestRunner runner = TestRunners.newTestRunner(PutHBaseCell.class);
runner.setProperty(PutHBaseCell.TABLE_NAME, tableName);
runner.setProperty(PutHBaseCell.ROW_ID, row);
runner.setProperty(PutHBaseCell.COLUMN_FAMILY, columnFamily);
runner.setProperty(PutHBaseCell.COLUMN_QUALIFIER, columnQualifier);
runner.setProperty(PutHBaseCell.TIMESTAMP, timestamp.toString());
runner.setProperty(PutHBaseCell.BATCH_SIZE, "1");
final MockHBaseClientService hBaseClient = getHBaseClientService(runner);
final String content = "some content";
runner.enqueue(content.getBytes("UTF-8"));
runner.run();
runner.assertAllFlowFilesTransferred(PutHBaseCell.REL_SUCCESS);
final MockFlowFile outFile = runner.getFlowFilesForRelationship(PutHBaseCell.REL_SUCCESS).get(0);
outFile.assertContentEquals(content);
assertNotNull(hBaseClient.getFlowFilePuts());
assertEquals(1, hBaseClient.getFlowFilePuts().size());
List<PutFlowFile> puts = hBaseClient.getFlowFilePuts().get(tableName);
assertEquals(1, puts.size());
verifyPut(row, columnFamily, columnQualifier, timestamp, content, puts.get(0));
assertEquals(1, runner.getProvenanceEvents().size());
}
use of org.apache.nifi.hbase.put.PutFlowFile in project nifi by apache.
the class TestPutHBaseCell method testSingleFlowFileNoTimestamp.
@Test
public void testSingleFlowFileNoTimestamp() throws IOException, InitializationException {
final String tableName = "nifi";
final String row = "row1";
final String columnFamily = "family1";
final String columnQualifier = "qualifier1";
final TestRunner runner = TestRunners.newTestRunner(PutHBaseCell.class);
runner.setProperty(PutHBaseCell.TABLE_NAME, tableName);
runner.setProperty(PutHBaseCell.ROW_ID, row);
runner.setProperty(PutHBaseCell.COLUMN_FAMILY, columnFamily);
runner.setProperty(PutHBaseCell.COLUMN_QUALIFIER, columnQualifier);
runner.setProperty(PutHBaseCell.BATCH_SIZE, "1");
final MockHBaseClientService hBaseClient = getHBaseClientService(runner);
final String content = "some content";
runner.enqueue(content.getBytes("UTF-8"));
runner.run();
runner.assertAllFlowFilesTransferred(PutHBaseCell.REL_SUCCESS);
final MockFlowFile outFile = runner.getFlowFilesForRelationship(PutHBaseCell.REL_SUCCESS).get(0);
outFile.assertContentEquals(content);
assertNotNull(hBaseClient.getFlowFilePuts());
assertEquals(1, hBaseClient.getFlowFilePuts().size());
List<PutFlowFile> puts = hBaseClient.getFlowFilePuts().get(tableName);
assertEquals(1, puts.size());
verifyPut(row, columnFamily, columnQualifier, null, content, puts.get(0));
assertEquals(1, runner.getProvenanceEvents().size());
}
use of org.apache.nifi.hbase.put.PutFlowFile in project nifi by apache.
the class TestPutHBaseCell method testSingleFlowFileWithEL.
@Test
public void testSingleFlowFileWithEL() throws IOException, InitializationException {
final String tableName = "nifi";
final String row = "row1";
final String columnFamily = "family1";
final String columnQualifier = "qualifier1";
final Long timestamp = 1L;
final PutHBaseCell proc = new PutHBaseCell();
final TestRunner runner = getTestRunnerWithEL(proc);
runner.setProperty(PutHBaseCell.TIMESTAMP, "${hbase.timestamp}");
runner.setProperty(PutHBaseCell.BATCH_SIZE, "1");
final MockHBaseClientService hBaseClient = getHBaseClientService(runner);
final String content = "some content";
final Map<String, String> attributes = getAttributeMapWithEL(tableName, row, columnFamily, columnQualifier);
attributes.put("hbase.timestamp", timestamp.toString());
runner.enqueue(content.getBytes("UTF-8"), attributes);
runner.run();
runner.assertAllFlowFilesTransferred(PutHBaseCell.REL_SUCCESS);
final MockFlowFile outFile = runner.getFlowFilesForRelationship(PutHBaseCell.REL_SUCCESS).get(0);
outFile.assertContentEquals(content);
assertNotNull(hBaseClient.getFlowFilePuts());
assertEquals(1, hBaseClient.getFlowFilePuts().size());
List<PutFlowFile> puts = hBaseClient.getFlowFilePuts().get(tableName);
assertEquals(1, puts.size());
verifyPut(row, columnFamily, columnQualifier, timestamp, content, puts.get(0));
assertEquals(1, runner.getProvenanceEvents().size());
}
Aggregations