Search in sources :

Example 6 with RowResult

use of org.apache.kudu.client.RowResult in project gora by apache.

the class KuduStore method exists.

@Override
public boolean exists(K key) throws GoraException {
    try {
        ColumnSchema column = table.getSchema().getColumn(kuduMapping.getPrimaryKey().get(0).getName());
        ArrayList<KuduPredicate> equalPredicate = new ArrayList<>();
        equalPredicate.add(KuduClientUtils.createEqualPredicate(column, key));
        KuduScanner build = createScanner(equalPredicate, new ArrayList<>(), 1);
        RowResult waitFirstResult = KuduClientUtils.waitFirstResult(build);
        build.close();
        return waitFirstResult != null;
    } catch (Exception e) {
        throw new GoraException(e);
    }
}
Also used : RowResult(org.apache.kudu.client.RowResult) GoraException(org.apache.gora.util.GoraException) KuduScanner(org.apache.kudu.client.KuduScanner) ArrayList(java.util.ArrayList) ColumnSchema(org.apache.kudu.ColumnSchema) KuduPredicate(org.apache.kudu.client.KuduPredicate) KuduException(org.apache.kudu.client.KuduException) GoraException(org.apache.gora.util.GoraException) IOException(java.io.IOException)

Example 7 with RowResult

use of org.apache.kudu.client.RowResult in project gora by apache.

the class KuduStore method get.

@Override
public T get(K key, String[] fields) throws GoraException {
    String[] avFields = getFieldsToQuery(fields);
    List<String> dbFields = new ArrayList<>();
    for (String af : avFields) {
        dbFields.add(kuduMapping.getFields().get(af).getName());
    }
    try {
        ColumnSchema column = table.getSchema().getColumn(kuduMapping.getPrimaryKey().get(0).getName());
        ArrayList<KuduPredicate> equalPredicate = new ArrayList<>();
        equalPredicate.add(KuduClientUtils.createEqualPredicate(column, key));
        KuduScanner build = createScanner(equalPredicate, dbFields, 1);
        RowResult waitGetOneOrZero = KuduClientUtils.waitFirstResult(build);
        T resp = null;
        if (waitGetOneOrZero != null) {
            resp = newInstance(waitGetOneOrZero, fields);
        }
        build.close();
        return resp;
    } catch (Exception ex) {
        throw new GoraException(ex);
    }
}
Also used : RowResult(org.apache.kudu.client.RowResult) GoraException(org.apache.gora.util.GoraException) KuduScanner(org.apache.kudu.client.KuduScanner) ArrayList(java.util.ArrayList) ColumnSchema(org.apache.kudu.ColumnSchema) KuduPredicate(org.apache.kudu.client.KuduPredicate) KuduException(org.apache.kudu.client.KuduException) GoraException(org.apache.gora.util.GoraException) IOException(java.io.IOException)

Example 8 with RowResult

use of org.apache.kudu.client.RowResult in project apex-malhar by apache.

the class KuduInputOperatorCommons method truncateTable.

public void truncateTable() throws Exception {
    AbstractKuduPartitionScanner<UnitTestTablePojo, InputOperatorControlTuple> scannerForDeletingRows = unitTestStepwiseScanInputOperator.getScanner();
    List<KuduScanToken> scansForAllTablets = unitTestStepwiseScanInputOperator.getPartitioner().getKuduScanTokensForSelectAllColumns();
    ApexKuduConnection aCurrentConnection = scannerForDeletingRows.getConnectionPoolForThreads().get(0);
    KuduSession aSessionForDeletes = aCurrentConnection.getKuduClient().newSession();
    KuduTable currentTable = aCurrentConnection.getKuduTable();
    for (KuduScanToken aTabletScanToken : scansForAllTablets) {
        KuduScanner aScanner = aTabletScanToken.intoScanner(aCurrentConnection.getKuduClient());
        while (aScanner.hasMoreRows()) {
            RowResultIterator itrForRows = aScanner.nextRows();
            while (itrForRows.hasNext()) {
                RowResult aRow = itrForRows.next();
                int intRowKey = aRow.getInt("introwkey");
                String stringRowKey = aRow.getString("stringrowkey");
                long timestampRowKey = aRow.getLong("timestamprowkey");
                Delete aDeleteOp = currentTable.newDelete();
                aDeleteOp.getRow().addInt("introwkey", intRowKey);
                aDeleteOp.getRow().addString("stringrowkey", stringRowKey);
                aDeleteOp.getRow().addLong("timestamprowkey", timestampRowKey);
                aSessionForDeletes.apply(aDeleteOp);
            }
        }
    }
    aSessionForDeletes.close();
    // Sleep to allow for scans to complete
    Thread.sleep(2000);
}
Also used : Delete(org.apache.kudu.client.Delete) KuduScanToken(org.apache.kudu.client.KuduScanToken) KuduSession(org.apache.kudu.client.KuduSession) KuduTable(org.apache.kudu.client.KuduTable) RowResultIterator(org.apache.kudu.client.RowResultIterator) RowResult(org.apache.kudu.client.RowResult) KuduScanner(org.apache.kudu.client.KuduScanner)

Example 9 with RowResult

use of org.apache.kudu.client.RowResult in project hive by apache.

the class TestKuduInputFormat method testPredicate.

@Test
public void testPredicate() throws Exception {
    // Insert a second test row that will be filtered out.
    KuduTable table = harness.getClient().openTable(TABLE_NAME);
    KuduSession session = harness.getClient().newSession();
    Insert insert = table.newInsert();
    PartialRow row = insert.getRow();
    row.addByte("key", (byte) 2);
    row.addShort("int16", (short) 2);
    row.addInt("int32", 2);
    row.addLong("int64", 2L);
    row.addBoolean("bool", false);
    row.addFloat("float", 2.2f);
    row.addDouble("double", 2.2d);
    row.addString("string", "two");
    row.addBinary("binary", "two".getBytes(UTF_8));
    row.addTimestamp("timestamp", new Timestamp(NOW_MS + 1));
    row.addDecimal("decimal", new BigDecimal("2.222"));
    row.setNull("null");
    // Not setting the "default" column.
    session.apply(insert);
    session.close();
    KuduInputFormat input = new KuduInputFormat();
    // Test an equality predicate for each column.
    for (ColumnSchema col : SCHEMA.getColumns()) {
        // Skip binary columns because binary predicates are not supported. (HIVE-11370)
        if (col.getName().equals("null") || col.getName().equals("default") || col.getName().equals("binary")) {
            continue;
        }
        JobConf jobConf = new JobConf(BASE_CONF);
        String columnsStr = SCHEMA.getColumns().stream().map(ColumnSchema::getName).collect(Collectors.joining(","));
        jobConf.set(serdeConstants.LIST_COLUMNS, columnsStr);
        PrimitiveTypeInfo typeInfo = toHiveType(col.getType(), col.getTypeAttributes());
        ExprNodeDesc colExpr = new ExprNodeColumnDesc(typeInfo, col.getName(), null, false);
        ExprNodeDesc constExpr = new ExprNodeConstantDesc(typeInfo, ROW.getObject(col.getName()));
        List<ExprNodeDesc> children = Lists.newArrayList();
        children.add(colExpr);
        children.add(constExpr);
        ExprNodeGenericFuncDesc predicateExpr = new ExprNodeGenericFuncDesc(typeInfo, new GenericUDFOPEqual(), children);
        String filterExpr = SerializationUtilities.serializeExpression(predicateExpr);
        jobConf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
        InputSplit[] splits = input.getSplits(jobConf, 1);
        assertEquals(1, splits.length);
        KuduInputSplit split = (KuduInputSplit) splits[0];
        KuduRecordReader reader = (KuduRecordReader) input.getRecordReader(split, jobConf, null);
        assertTrue(reader.nextKeyValue());
        RowResult value = reader.getCurrentValue().getRowResult();
        verfiyRow(value);
        assertFalse("Extra row on column: " + col.getName(), reader.nextKeyValue());
    }
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) KuduSession(org.apache.kudu.client.KuduSession) PartialRow(org.apache.kudu.client.PartialRow) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) KuduTable(org.apache.kudu.client.KuduTable) ColumnSchema(org.apache.kudu.ColumnSchema) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) Insert(org.apache.kudu.client.Insert) Timestamp(java.sql.Timestamp) BigDecimal(java.math.BigDecimal) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) RowResult(org.apache.kudu.client.RowResult) KuduRecordReader(org.apache.hadoop.hive.kudu.KuduInputFormat.KuduRecordReader) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) GenericUDFOPEqual(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) JobConf(org.apache.hadoop.mapred.JobConf) KuduInputSplit(org.apache.hadoop.hive.kudu.KuduInputFormat.KuduInputSplit) InputSplit(org.apache.hadoop.mapred.InputSplit) KuduInputSplit(org.apache.hadoop.hive.kudu.KuduInputFormat.KuduInputSplit) Test(org.junit.Test)

Example 10 with RowResult

use of org.apache.kudu.client.RowResult in project apex-malhar by apache.

the class KuduClientTestCommons method lookUpAndPopulateRecord.

protected void lookUpAndPopulateRecord(UnitTestTablePojo keyInfo) throws Exception {
    KuduScanner scanner = kuduClient.newScannerBuilder(kuduTable).addPredicate(KuduPredicate.newComparisonPredicate(columnDefs.get("introwkey"), KuduPredicate.ComparisonOp.EQUAL, keyInfo.getIntrowkey())).addPredicate(KuduPredicate.newComparisonPredicate(columnDefs.get("stringrowkey"), KuduPredicate.ComparisonOp.EQUAL, keyInfo.getStringrowkey())).addPredicate(KuduPredicate.newComparisonPredicate(columnDefs.get("timestamprowkey"), KuduPredicate.ComparisonOp.EQUAL, keyInfo.getTimestamprowkey())).build();
    RowResultIterator rowResultItr = scanner.nextRows();
    while (rowResultItr.hasNext()) {
        RowResult thisRow = rowResultItr.next();
        keyInfo.setFloatdata(thisRow.getFloat("floatdata"));
        keyInfo.setBooldata(thisRow.getBoolean("booldata"));
        keyInfo.setBinarydata(thisRow.getBinary("binarydata"));
        keyInfo.setLongdata(thisRow.getLong("longdata"));
        keyInfo.setTimestampdata(thisRow.getLong("timestampdata"));
        keyInfo.setStringdata("stringdata");
        break;
    }
}
Also used : RowResult(org.apache.kudu.client.RowResult) KuduScanner(org.apache.kudu.client.KuduScanner) RowResultIterator(org.apache.kudu.client.RowResultIterator)

Aggregations

RowResult (org.apache.kudu.client.RowResult)14 KuduScanner (org.apache.kudu.client.KuduScanner)10 RowResultIterator (org.apache.kudu.client.RowResultIterator)8 ArrayList (java.util.ArrayList)7 ColumnSchema (org.apache.kudu.ColumnSchema)5 JobConf (org.apache.hadoop.mapred.JobConf)4 KuduException (org.apache.kudu.client.KuduException)4 KuduTable (org.apache.kudu.client.KuduTable)4 PartialRow (org.apache.kudu.client.PartialRow)4 Test (org.junit.Test)4 IOException (java.io.IOException)3 GoraException (org.apache.gora.util.GoraException)3 KuduInputSplit (org.apache.hadoop.hive.kudu.KuduInputFormat.KuduInputSplit)3 KuduRecordReader (org.apache.hadoop.hive.kudu.KuduInputFormat.KuduRecordReader)3 InputSplit (org.apache.hadoop.mapred.InputSplit)3 KuduPredicate (org.apache.kudu.client.KuduPredicate)3 KuduSession (org.apache.kudu.client.KuduSession)3 CoreMatchers.containsString (org.hamcrest.CoreMatchers.containsString)3 BigDecimal (java.math.BigDecimal)2 Timestamp (java.sql.Timestamp)2