use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project phoenix by apache.
the class PDataTypeForArraysTest method testPositionSearchWithVarLengthArrayWithNullValue5.
@Test
public void testPositionSearchWithVarLengthArrayWithNullValue5() {
String[] strArr = new String[5];
strArr[0] = "abx";
strArr[1] = "ereref";
strArr[2] = "random";
strArr[3] = null;
strArr[4] = "ran";
PhoenixArray arr = PArrayDataType.instantiatePhoenixArray(PVarchar.INSTANCE, strArr);
byte[] bytes = PVarcharArray.INSTANCE.toBytes(arr);
ImmutableBytesWritable ptr = new ImmutableBytesWritable(bytes);
PArrayDataTypeDecoder.positionAtArrayElement(ptr, 3, PVarchar.INSTANCE, PVarchar.INSTANCE.getByteSize());
int offset = ptr.getOffset();
int length = ptr.getLength();
byte[] bs = ptr.get();
byte[] res = new byte[length];
System.arraycopy(bs, offset, res, 0, length);
assertEquals("", Bytes.toString(res));
}
use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project phoenix by apache.
the class PDataTypeForArraysTest method testPositionAtArrayElementWithDescArray.
@Test
public void testPositionAtArrayElementWithDescArray() {
Object[] objects = new Object[] { "a", "b", null };
PhoenixArray arr = new PhoenixArray(PVarchar.INSTANCE, objects);
byte[] bytes = PVarcharArray.INSTANCE.toBytes(arr, PVarchar.INSTANCE, SortOrder.DESC);
ImmutableBytesWritable ptr = new ImmutableBytesWritable(bytes);
PArrayDataTypeDecoder.positionAtArrayElement(ptr, 2, PVarchar.INSTANCE, null);
String value = (String) PVarchar.INSTANCE.toObject(ptr, SortOrder.DESC);
assertEquals(null, value);
}
use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project honeycomb by altamiracorp.
the class BulkLoadMapper method map.
@Override
public void map(LongWritable offset, Text line, Context context) {
try {
Row row = rowParser.parseRow(line.toString());
List<Put> puts = mutationFactory.insert(tableId, row);
for (Put put : puts) {
context.write(new ImmutableBytesWritable(put.getRow()), put);
}
context.getCounter(Counters.ROWS).increment(1);
context.getCounter(Counters.PUTS).increment(puts.size());
} catch (IOException e) {
LOG.error("CSVParser unable to parse line: " + line.toString(), e);
context.getCounter(Counters.FAILED_ROWS).increment(1);
} catch (IllegalArgumentException e) {
LOG.error(format("The line %s was incorrectly formatted. Error %s", line.toString(), e.getMessage()));
context.getCounter(Counters.FAILED_ROWS).increment(1);
} catch (ParseException e) {
LOG.error(format("Parsing failed on line %s with message %s", line.toString(), e.getMessage()));
context.getCounter(Counters.FAILED_ROWS).increment(1);
} catch (Exception e) {
LOG.error(format("The following error %s occurred during mapping" + " for line %s", e.getMessage(), line.toString()));
context.getCounter(Counters.FAILED_ROWS).increment(1);
}
}
use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project pancm_project by xuwujing.
the class sparkSqlTest method main.
public static void main(String[] args) throws Exception {
System.out.println("开始...");
// System.setProperty("hadoop.home.dir", "E:\\hadoop");
// System.setProperty("HADOOP_USER_NAME", "root");
// System.setProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
SparkSession spark = SparkSession.builder().appName("lcc_java_read_hbase_register_to_table").master("local[*]").getOrCreate();
JavaSparkContext context = new JavaSparkContext(spark.sparkContext());
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.property.clientPort", "2181");
conf.set("hbase.zookeeper.quorum", "192.169.0.25");
Scan scan = new Scan();
String tableName = "t_student";
conf.set(TableInputFormat.INPUT_TABLE, tableName);
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan proto = ProtobufUtil.toScan(scan);
String ScanToString = Base64.encodeBytes(proto.toByteArray());
conf.set(TableInputFormat.SCAN, ScanToString);
JavaPairRDD<ImmutableBytesWritable, Result> myRDD = context.newAPIHadoopRDD(conf, TableInputFormat.class, ImmutableBytesWritable.class, Result.class);
JavaRDD<Row> personsRDD = myRDD.map(new Function<Tuple2<ImmutableBytesWritable, Result>, Row>() {
@Override
public Row call(Tuple2<ImmutableBytesWritable, Result> tuple) throws Exception {
// TODO Auto-generated method stub
System.out.println("====tuple==========" + tuple);
Result result = tuple._2();
String rowkey = Bytes.toString(result.getRow());
String name = Bytes.toString(result.getValue(Bytes.toBytes("lcc_liezu"), Bytes.toBytes("name")));
String sex = Bytes.toString(result.getValue(Bytes.toBytes("lcc_liezu"), Bytes.toBytes("sex")));
String age = Bytes.toString(result.getValue(Bytes.toBytes("lcc_liezu"), Bytes.toBytes("age")));
// 这一点可以直接转化为row类型
return (Row) RowFactory.create(rowkey, name, sex, age);
}
});
List<StructField> structFields = new ArrayList<StructField>();
structFields.add(DataTypes.createStructField("id", DataTypes.StringType, true));
structFields.add(DataTypes.createStructField("name", DataTypes.StringType, true));
structFields.add(DataTypes.createStructField("sex", DataTypes.StringType, true));
structFields.add(DataTypes.createStructField("age", DataTypes.StringType, true));
StructType schema = DataTypes.createStructType(structFields);
Dataset stuDf = spark.createDataFrame(personsRDD, schema);
// stuDf.select("id","name","age").write().mode(SaveMode.Append).parquet("par");
stuDf.printSchema();
stuDf.createOrReplaceTempView("Person");
Dataset<Row> nameDf = spark.sql("select * from Person ");
nameDf.show();
}
use of org.apache.hadoop.hbase.io.ImmutableBytesWritable in project phoenix by apache.
the class ArrayRemoveFunction method modifierFunction.
@Override
protected boolean modifierFunction(ImmutableBytesWritable ptr, int length, int offset, byte[] arrayBytes, PDataType baseType, int arrayLength, Integer maxLength, Expression arrayExp) {
SortOrder sortOrder = arrayExp.getSortOrder();
if (ptr.getLength() == 0 || arrayBytes.length == 0) {
ptr.set(arrayBytes, offset, length);
return true;
}
PArrayDataTypeEncoder arrayDataTypeEncoder = new PArrayDataTypeEncoder(baseType, sortOrder);
if (getRHSBaseType().equals(PChar.INSTANCE)) {
int unpaddedCharLength = StringUtil.getUnpaddedCharLength(ptr.get(), ptr.getOffset(), ptr.getLength(), sortOrder);
ptr.set(ptr.get(), offset, unpaddedCharLength);
}
for (int arrayIndex = 0; arrayIndex < arrayLength; arrayIndex++) {
ImmutableBytesWritable ptr2 = new ImmutableBytesWritable(arrayBytes, offset, length);
PArrayDataTypeDecoder.positionAtArrayElement(ptr2, arrayIndex, baseType, maxLength);
if (baseType.compareTo(ptr2, sortOrder, ptr, sortOrder, baseType) != 0) {
arrayDataTypeEncoder.appendValue(ptr2.get(), ptr2.getOffset(), ptr2.getLength());
}
}
ptr.set(arrayDataTypeEncoder.encode());
return true;
}
Aggregations