use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class S3LogDeserializer method deserialize.
public static Object deserialize(S3LogStruct c, String row) throws Exception {
Matcher match = regexpat.matcher(row);
int t = 1;
try {
match.matches();
c.bucketowner = match.group(t++);
c.bucketname = match.group(t++);
} catch (Exception e) {
throw new SerDeException("S3 Log Regex did not match:" + row, e);
}
c.rdatetime = match.group(t++);
// Should we convert the datetime to the format Hive understands by default
// - either yyyy-mm-dd HH:MM:SS or seconds since epoch?
// Date d = dateparser.parse(c.rdatetime);
// c.rdatetimeepoch = d.getTime() / 1000;
c.rip = match.group(t++);
c.requester = match.group(t++);
c.requestid = match.group(t++);
c.operation = match.group(t++);
c.rkey = match.group(t++);
c.requesturi = match.group(t++);
// System.err.println(c.requesturi);
/*
* // Zemanta specific data extractor try { Matcher m2 =
* regexrid.matcher(c.requesturi); m2.find(); c.rid = m2.group(1); } catch
* (Exception e) { c.rid = null; }
*/
c.httpstatus = toInt(match.group(t++));
c.errorcode = match.group(t++);
c.bytessent = toInt(match.group(t++));
c.objsize = toInt(match.group(t++));
c.totaltime = toInt(match.group(t++));
c.turnaroundtime = toInt(match.group(t++));
c.referer = match.group(t++);
c.useragent = match.group(t++);
return (c);
}
use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class MapJoinBytesTableContainer method createInternalOi.
private LazyBinaryStructObjectInspector createInternalOi(MapJoinObjectSerDeContext valCtx) throws SerDeException {
// We are going to use LBSerDe to serialize values; create OI for retrieval.
List<? extends StructField> fields = ((StructObjectInspector) valCtx.getSerDe().getObjectInspector()).getAllStructFieldRefs();
List<String> colNames = new ArrayList<String>(fields.size());
List<ObjectInspector> colOis = new ArrayList<ObjectInspector>(fields.size());
for (int i = 0; i < fields.size(); ++i) {
StructField field = fields.get(i);
colNames.add(field.getFieldName());
// It would be nice if OI could return typeInfo...
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(field.getFieldObjectInspector().getTypeName());
colOis.add(LazyBinaryUtils.getLazyBinaryObjectInspectorFromTypeInfo(typeInfo));
}
return LazyBinaryObjectInspectorFactory.getLazyBinaryStructObjectInspector(colNames, colOis);
}
use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class MapJoinEagerRowContainer method write.
@Override
public void write(MapJoinObjectSerDeContext context, ObjectOutputStream out) throws IOException, SerDeException {
AbstractSerDe serde = context.getSerDe();
ObjectInspector valueObjectInspector = context.getStandardOI();
long numRows = rowCount();
long numRowsWritten = 0L;
out.writeLong(numRows);
for (List<Object> row = first(); row != null; row = next()) {
serde.serialize(row.toArray(), valueObjectInspector).write(out);
++numRowsWritten;
}
if (numRows != rowCount()) {
throw new ConcurrentModificationException("Values was modified while persisting");
}
if (numRowsWritten != numRows) {
throw new IllegalStateException("Expected to write " + numRows + " but wrote " + numRowsWritten);
}
}
use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class MapJoinKey method read.
@SuppressWarnings("deprecation")
public static MapJoinKey read(Output output, MapJoinObjectSerDeContext context, Writable writable) throws SerDeException, HiveException {
AbstractSerDe serde = context.getSerDe();
Object obj = serde.deserialize(writable);
MapJoinKeyObject result = new MapJoinKeyObject();
result.read(serde.getObjectInspector(), obj);
return result;
}
use of org.apache.hadoop.hive.serde2.SerDeException in project hive by apache.
the class MapJoinTableContainerSerDe method persist.
public void persist(ObjectOutputStream out, MapJoinPersistableTableContainer tableContainer) throws HiveException {
int numKeys = tableContainer.size();
try {
out.writeUTF(tableContainer.getClass().getName());
out.writeObject(tableContainer.getMetaData());
out.writeInt(numKeys);
for (Map.Entry<MapJoinKey, MapJoinRowContainer> entry : tableContainer.entrySet()) {
entry.getKey().write(keyContext, out);
entry.getValue().write(valueContext, out);
}
} catch (SerDeException e) {
String msg = "SerDe error while attempting to persist table container";
throw new HiveException(msg, e);
} catch (IOException e) {
String msg = "IO error while attempting to persist table container";
throw new HiveException(msg, e);
}
if (numKeys != tableContainer.size()) {
throw new ConcurrentModificationException("TableContainer was modified while persisting: " + tableContainer);
}
}
Aggregations