use of org.apache.avro.util.Utf8 in project eiger by wlloyd.
the class CFMetaData method toAvro.
// converts CFM to avro CfDef
public org.apache.cassandra.db.migration.avro.CfDef toAvro() {
org.apache.cassandra.db.migration.avro.CfDef cf = new org.apache.cassandra.db.migration.avro.CfDef();
cf.id = cfId;
cf.keyspace = new Utf8(ksName);
cf.name = new Utf8(cfName);
cf.column_type = new Utf8(cfType.name());
cf.comparator_type = new Utf8(comparator.toString());
if (subcolumnComparator != null) {
assert cfType == ColumnFamilyType.Super : String.format("%s CF %s should not have subcomparator %s defined", cfType, cfName, subcolumnComparator);
cf.subcomparator_type = new Utf8(subcolumnComparator.toString());
}
cf.comment = new Utf8(enforceCommentNotNull(comment));
cf.read_repair_chance = readRepairChance;
cf.replicate_on_write = replicateOnWrite;
cf.gc_grace_seconds = gcGraceSeconds;
cf.default_validation_class = defaultValidator == null ? null : new Utf8(defaultValidator.toString());
cf.key_validation_class = new Utf8(keyValidator.toString());
cf.min_compaction_threshold = minCompactionThreshold;
cf.max_compaction_threshold = maxCompactionThreshold;
cf.merge_shards_chance = mergeShardsChance;
cf.key_alias = keyAlias;
cf.column_metadata = new ArrayList<ColumnDef>(column_metadata.size());
for (ColumnDefinition cd : column_metadata.values()) cf.column_metadata.add(cd.toAvro());
cf.compaction_strategy = new Utf8(compactionStrategyClass.getName());
if (compactionStrategyOptions != null) {
cf.compaction_strategy_options = new HashMap<CharSequence, CharSequence>();
for (Map.Entry<String, String> e : compactionStrategyOptions.entrySet()) cf.compaction_strategy_options.put(new Utf8(e.getKey()), new Utf8(e.getValue()));
}
cf.compression_options = compressionParameters.asAvroOptions();
cf.bloom_filter_fp_chance = bloomFilterFpChance;
cf.caching = new Utf8(caching.toString());
return cf;
}
use of org.apache.avro.util.Utf8 in project eiger by wlloyd.
the class ColumnDefinition method toAvro.
public org.apache.cassandra.db.migration.avro.ColumnDef toAvro() {
org.apache.cassandra.db.migration.avro.ColumnDef cd = new org.apache.cassandra.db.migration.avro.ColumnDef();
cd.name = ByteBufferUtil.clone(name);
cd.validation_class = new Utf8(validator.toString());
cd.index_type = index_type == null ? null : org.apache.cassandra.db.migration.avro.IndexType.valueOf(index_type.name());
cd.index_name = index_name == null ? null : new Utf8(index_name);
cd.index_options = getCharSequenceMap(index_options);
return cd;
}
use of org.apache.avro.util.Utf8 in project drill by apache.
the class AvroRecordReader method processPrimitive.
private void processPrimitive(final Object value, final Schema.Type type, final String fieldName, final MapOrListWriterImpl writer) {
if (value == null) {
return;
}
switch(type) {
case STRING:
byte[] binary = null;
final int length;
if (value instanceof Utf8) {
binary = ((Utf8) value).getBytes();
length = ((Utf8) value).getByteLength();
} else {
binary = value.toString().getBytes(Charsets.UTF_8);
length = binary.length;
}
ensure(length);
buffer.setBytes(0, binary);
writer.varChar(fieldName).writeVarChar(0, length, buffer);
break;
case INT:
writer.integer(fieldName).writeInt((Integer) value);
break;
case LONG:
writer.bigInt(fieldName).writeBigInt((Long) value);
break;
case FLOAT:
writer.float4(fieldName).writeFloat4((Float) value);
break;
case DOUBLE:
writer.float8(fieldName).writeFloat8((Double) value);
break;
case BOOLEAN:
writer.bit(fieldName).writeBit((Boolean) value ? 1 : 0);
break;
case BYTES:
final ByteBuffer buf = (ByteBuffer) value;
length = buf.remaining();
ensure(length);
buffer.setBytes(0, buf);
writer.binary(fieldName).writeVarBinary(0, length, buffer);
break;
case NULL:
// Nothing to do for null type
break;
case ENUM:
final String symbol = value.toString();
final byte[] b;
try {
b = symbol.getBytes("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new DrillRuntimeException("Unable to read enum value for field: " + fieldName, e);
}
ensure(b.length);
buffer.setBytes(0, b);
writer.varChar(fieldName).writeVarChar(0, b.length, buffer);
break;
default:
throw new DrillRuntimeException("Unhandled Avro type: " + type.toString());
}
}
use of org.apache.avro.util.Utf8 in project gora by apache.
the class SolrStore method deserializeFieldValue.
@SuppressWarnings("unchecked")
private Object deserializeFieldValue(Field field, Schema fieldSchema, Object solrValue, T persistent) throws IOException {
Object fieldValue = null;
switch(fieldSchema.getType()) {
case MAP:
case ARRAY:
case RECORD:
@SuppressWarnings("rawtypes") SpecificDatumReader reader = getDatumReader(fieldSchema);
fieldValue = IOUtils.deserialize((byte[]) solrValue, reader, persistent.get(field.pos()));
break;
case ENUM:
fieldValue = AvroUtils.getEnumValue(fieldSchema, (String) solrValue);
break;
case FIXED:
throw new IOException("???");
// break;
case BYTES:
fieldValue = ByteBuffer.wrap((byte[]) solrValue);
break;
case STRING:
fieldValue = new Utf8(solrValue.toString());
break;
case UNION:
if (fieldSchema.getTypes().size() == 2 && isNullable(fieldSchema)) {
// schema [type0, type1]
Type type0 = fieldSchema.getTypes().get(0).getType();
Type type1 = fieldSchema.getTypes().get(1).getType();
// ["null","type"] or ["type","null"]
if (!type0.equals(type1)) {
if (type0.equals(Schema.Type.NULL))
fieldSchema = fieldSchema.getTypes().get(1);
else
fieldSchema = fieldSchema.getTypes().get(0);
} else {
fieldSchema = fieldSchema.getTypes().get(0);
}
fieldValue = deserializeFieldValue(field, fieldSchema, solrValue, persistent);
} else {
@SuppressWarnings("rawtypes") SpecificDatumReader unionReader = getDatumReader(fieldSchema);
fieldValue = IOUtils.deserialize((byte[]) solrValue, unionReader, persistent.get(field.pos()));
break;
}
break;
default:
fieldValue = solrValue;
}
return fieldValue;
}
use of org.apache.avro.util.Utf8 in project gora by apache.
the class DistributedLogManager method parseLine.
/**
* Parses a single log line in combined log format using StringTokenizers
*/
private Pageview parseLine(String line) throws ParseException {
StringTokenizer matcher = new StringTokenizer(line);
//parse the log line
String ip = matcher.nextToken();
//discard
matcher.nextToken();
matcher.nextToken();
long timestamp = dateFormat.parse(matcher.nextToken("]").substring(2)).getTime();
matcher.nextToken("\"");
String request = matcher.nextToken("\"");
String[] requestParts = request.split(" ");
String httpMethod = requestParts[0];
String url = requestParts[1];
matcher.nextToken(" ");
int httpStatusCode = Integer.parseInt(matcher.nextToken());
int responseSize = Integer.parseInt(matcher.nextToken());
matcher.nextToken("\"");
String referrer = matcher.nextToken("\"");
matcher.nextToken("\"");
String userAgent = matcher.nextToken("\"");
//construct and return pageview object
Pageview pageview = new Pageview();
pageview.setIp(new Utf8(ip));
pageview.setTimestamp(timestamp);
pageview.setHttpMethod(new Utf8(httpMethod));
pageview.setUrl(new Utf8(url));
pageview.setHttpStatusCode(httpStatusCode);
pageview.setResponseSize(responseSize);
pageview.setReferrer(new Utf8(referrer));
pageview.setUserAgent(new Utf8(userAgent));
return pageview;
}
Aggregations