Search in sources :

Example 86 with ImmutableBytesWritable

use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project phoenix by apache.

the class PDataTypeForArraysTest method testPositionSearchWithVarLengthArrayWithNullValue5.

@Test
public void testPositionSearchWithVarLengthArrayWithNullValue5() {
    String[] strArr = new String[5];
    strArr[0] = "abx";
    strArr[1] = "ereref";
    strArr[2] = "random";
    strArr[3] = null;
    strArr[4] = "ran";
    PhoenixArray arr = PArrayDataType.instantiatePhoenixArray(PVarchar.INSTANCE, strArr);
    byte[] bytes = PVarcharArray.INSTANCE.toBytes(arr);
    ImmutableBytesWritable ptr = new ImmutableBytesWritable(bytes);
    PArrayDataTypeDecoder.positionAtArrayElement(ptr, 3, PVarchar.INSTANCE, PVarchar.INSTANCE.getByteSize());
    int offset = ptr.getOffset();
    int length = ptr.getLength();
    byte[] bs = ptr.get();
    byte[] res = new byte[length];
    System.arraycopy(bs, offset, res, 0, length);
    assertEquals("", Bytes.toString(res));
}
Also used : ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) PhoenixArray(org.apache.phoenix.schema.types.PhoenixArray) PUnsignedSmallint(org.apache.phoenix.schema.types.PUnsignedSmallint) PUnsignedTinyint(org.apache.phoenix.schema.types.PUnsignedTinyint) PTinyint(org.apache.phoenix.schema.types.PTinyint) PSmallint(org.apache.phoenix.schema.types.PSmallint) Test(org.junit.Test)

Example 87 with ImmutableBytesWritable

use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project phoenix by apache.

the class PDataTypeForArraysTest method testPositionAtArrayElementWithDescArray.

@Test
public void testPositionAtArrayElementWithDescArray() {
    Object[] objects = new Object[] { "a", "b", null };
    PhoenixArray arr = new PhoenixArray(PVarchar.INSTANCE, objects);
    byte[] bytes = PVarcharArray.INSTANCE.toBytes(arr, PVarchar.INSTANCE, SortOrder.DESC);
    ImmutableBytesWritable ptr = new ImmutableBytesWritable(bytes);
    PArrayDataTypeDecoder.positionAtArrayElement(ptr, 2, PVarchar.INSTANCE, null);
    String value = (String) PVarchar.INSTANCE.toObject(ptr, SortOrder.DESC);
    assertEquals(null, value);
}
Also used : ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) PhoenixArray(org.apache.phoenix.schema.types.PhoenixArray) Test(org.junit.Test)

Example 88 with ImmutableBytesWritable

use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project honeycomb by altamiracorp.

the class BulkLoadMapper method map.

@Override
public void map(LongWritable offset, Text line, Context context) {
    try {
        Row row = rowParser.parseRow(line.toString());
        List<Put> puts = mutationFactory.insert(tableId, row);
        for (Put put : puts) {
            context.write(new ImmutableBytesWritable(put.getRow()), put);
        }
        context.getCounter(Counters.ROWS).increment(1);
        context.getCounter(Counters.PUTS).increment(puts.size());
    } catch (IOException e) {
        LOG.error("CSVParser unable to parse line: " + line.toString(), e);
        context.getCounter(Counters.FAILED_ROWS).increment(1);
    } catch (IllegalArgumentException e) {
        LOG.error(format("The line %s was incorrectly formatted. Error %s", line.toString(), e.getMessage()));
        context.getCounter(Counters.FAILED_ROWS).increment(1);
    } catch (ParseException e) {
        LOG.error(format("Parsing failed on line %s with message %s", line.toString(), e.getMessage()));
        context.getCounter(Counters.FAILED_ROWS).increment(1);
    } catch (Exception e) {
        LOG.error(format("The following error %s occurred during mapping" + " for line %s", e.getMessage(), line.toString()));
        context.getCounter(Counters.FAILED_ROWS).increment(1);
    }
}
Also used : ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) Row(com.nearinfinity.honeycomb.mysql.Row) IOException(java.io.IOException) ParseException(java.text.ParseException) Put(org.apache.hadoop.hbase.client.Put) ParseException(java.text.ParseException) IOException(java.io.IOException)

Example 89 with ImmutableBytesWritable

use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project pancm_project by xuwujing.

the class sparkSqlTest method main.

public static void main(String[] args) throws Exception {
    System.out.println("开始...");
    // System.setProperty("hadoop.home.dir", "E:\\hadoop");
    // System.setProperty("HADOOP_USER_NAME", "root");
    // System.setProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
    SparkSession spark = SparkSession.builder().appName("lcc_java_read_hbase_register_to_table").master("local[*]").getOrCreate();
    JavaSparkContext context = new JavaSparkContext(spark.sparkContext());
    Configuration conf = HBaseConfiguration.create();
    conf.set("hbase.zookeeper.property.clientPort", "2181");
    conf.set("hbase.zookeeper.quorum", "192.169.0.25");
    Scan scan = new Scan();
    String tableName = "t_student";
    conf.set(TableInputFormat.INPUT_TABLE, tableName);
    org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan proto = ProtobufUtil.toScan(scan);
    String ScanToString = Base64.encodeBytes(proto.toByteArray());
    conf.set(TableInputFormat.SCAN, ScanToString);
    JavaPairRDD<ImmutableBytesWritable, Result> myRDD = context.newAPIHadoopRDD(conf, TableInputFormat.class, ImmutableBytesWritable.class, Result.class);
    JavaRDD<Row> personsRDD = myRDD.map(new Function<Tuple2<ImmutableBytesWritable, Result>, Row>() {

        @Override
        public Row call(Tuple2<ImmutableBytesWritable, Result> tuple) throws Exception {
            // TODO Auto-generated method stub
            System.out.println("====tuple==========" + tuple);
            Result result = tuple._2();
            String rowkey = Bytes.toString(result.getRow());
            String name = Bytes.toString(result.getValue(Bytes.toBytes("lcc_liezu"), Bytes.toBytes("name")));
            String sex = Bytes.toString(result.getValue(Bytes.toBytes("lcc_liezu"), Bytes.toBytes("sex")));
            String age = Bytes.toString(result.getValue(Bytes.toBytes("lcc_liezu"), Bytes.toBytes("age")));
            // 这一点可以直接转化为row类型
            return (Row) RowFactory.create(rowkey, name, sex, age);
        }
    });
    List<StructField> structFields = new ArrayList<StructField>();
    structFields.add(DataTypes.createStructField("id", DataTypes.StringType, true));
    structFields.add(DataTypes.createStructField("name", DataTypes.StringType, true));
    structFields.add(DataTypes.createStructField("sex", DataTypes.StringType, true));
    structFields.add(DataTypes.createStructField("age", DataTypes.StringType, true));
    StructType schema = DataTypes.createStructType(structFields);
    Dataset stuDf = spark.createDataFrame(personsRDD, schema);
    // stuDf.select("id","name","age").write().mode(SaveMode.Append).parquet("par");
    stuDf.printSchema();
    stuDf.createOrReplaceTempView("Person");
    Dataset<Row> nameDf = spark.sql("select * from Person ");
    nameDf.show();
}
Also used : SparkSession(org.apache.spark.sql.SparkSession) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) Configuration(org.apache.hadoop.conf.Configuration) StructType(org.apache.spark.sql.types.StructType) ArrayList(java.util.ArrayList) Result(org.apache.hadoop.hbase.client.Result) StructField(org.apache.spark.sql.types.StructField) JavaSparkContext(org.apache.spark.api.java.JavaSparkContext) ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) Dataset(org.apache.spark.sql.Dataset) Tuple2(scala.Tuple2) Scan(org.apache.hadoop.hbase.client.Scan) Row(org.apache.spark.sql.Row)

Example 90 with ImmutableBytesWritable

use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project phoenix by apache.

the class ArrayRemoveFunction method modifierFunction.

@Override
protected boolean modifierFunction(ImmutableBytesWritable ptr, int length, int offset, byte[] arrayBytes, PDataType baseType, int arrayLength, Integer maxLength, Expression arrayExp) {
    SortOrder sortOrder = arrayExp.getSortOrder();
    if (ptr.getLength() == 0 || arrayBytes.length == 0) {
        ptr.set(arrayBytes, offset, length);
        return true;
    }
    PArrayDataTypeEncoder arrayDataTypeEncoder = new PArrayDataTypeEncoder(baseType, sortOrder);
    if (getRHSBaseType().equals(PChar.INSTANCE)) {
        int unpaddedCharLength = StringUtil.getUnpaddedCharLength(ptr.get(), ptr.getOffset(), ptr.getLength(), sortOrder);
        ptr.set(ptr.get(), offset, unpaddedCharLength);
    }
    for (int arrayIndex = 0; arrayIndex < arrayLength; arrayIndex++) {
        ImmutableBytesWritable ptr2 = new ImmutableBytesWritable(arrayBytes, offset, length);
        PArrayDataTypeDecoder.positionAtArrayElement(ptr2, arrayIndex, baseType, maxLength);
        if (baseType.compareTo(ptr2, sortOrder, ptr, sortOrder, baseType) != 0) {
            arrayDataTypeEncoder.appendValue(ptr2.get(), ptr2.getOffset(), ptr2.getLength());
        }
    }
    ptr.set(arrayDataTypeEncoder.encode());
    return true;
}
Also used : PArrayDataTypeEncoder(org.apache.phoenix.schema.types.PArrayDataTypeEncoder) ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) SortOrder(org.apache.phoenix.schema.SortOrder)

Aggregations

ImmutableBytesWritable (org.apache.hadoop.hbase.io.ImmutableBytesWritable)296 Test (org.junit.Test)86 Expression (org.apache.phoenix.expression.Expression)36 IOException (java.io.IOException)33 PhoenixArray (org.apache.phoenix.schema.types.PhoenixArray)30 ArrayList (java.util.ArrayList)28 Configuration (org.apache.hadoop.conf.Configuration)28 Result (org.apache.hadoop.hbase.client.Result)28 Cell (org.apache.hadoop.hbase.Cell)27 KeyValue (org.apache.hadoop.hbase.KeyValue)27 LiteralExpression (org.apache.phoenix.expression.LiteralExpression)27 PTable (org.apache.phoenix.schema.PTable)27 PDataType (org.apache.phoenix.schema.types.PDataType)26 PSmallint (org.apache.phoenix.schema.types.PSmallint)25 PTinyint (org.apache.phoenix.schema.types.PTinyint)23 Put (org.apache.hadoop.hbase.client.Put)20 PUnsignedSmallint (org.apache.phoenix.schema.types.PUnsignedSmallint)20 PUnsignedTinyint (org.apache.phoenix.schema.types.PUnsignedTinyint)20 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)19 List (java.util.List)18