use of net.ripe.hadoop.pcap.packet.HttpPacket in project hadoop-pcap by RIPE-NCC.
the class HttpPcapReader method processPacketPayload.
@Override
protected void processPacketPayload(Packet packet, final byte[] payload) {
String protocol = (String) packet.get(Packet.PROTOCOL);
if (!PcapReader.PROTOCOL_TCP.equals(protocol))
return;
HttpPacket httpPacket = (HttpPacket) packet;
Integer srcPort = (Integer) packet.get(Packet.SRC_PORT);
Integer dstPort = (Integer) packet.get(Packet.DST_PORT);
if ((HTTP_PORT == srcPort || HTTP_PORT == dstPort) && packet.containsKey(Packet.REASSEMBLED_TCP_FRAGMENTS)) {
final SessionInputBuffer inBuf = new AbstractSessionInputBuffer() {
{
init(new ByteArrayInputStream(payload), 1024, params);
}
@Override
public boolean isDataAvailable(int timeout) throws IOException {
return true;
}
};
final SessionOutputBuffer outBuf = new AbstractSessionOutputBuffer() {
};
if (HTTP_PORT == srcPort) {
HttpMessageParser<HttpResponse> parser = new DefaultHttpResponseParser(inBuf, null, respFactory, params);
HttpClientConnection conn = new DefaultClientConnection() {
{
init(inBuf, outBuf, params);
}
@Override
protected void assertNotOpen() {
}
@Override
protected void assertOpen() {
}
};
try {
HttpResponse response = parser.parse();
conn.receiveResponseEntity(response);
propagateHeaders(httpPacket, response.getAllHeaders());
} catch (IOException e) {
LOG.error("IOException when decoding HTTP response", e);
} catch (HttpException e) {
LOG.error("HttpException when decoding HTTP response", e);
}
} else if (HTTP_PORT == dstPort) {
HttpMessageParser<HttpRequest> parser = new DefaultHttpRequestParser(inBuf, null, reqFactory, params);
try {
HttpRequest request = parser.parse();
propagateHeaders(httpPacket, request.getAllHeaders());
} catch (IOException e) {
LOG.error("IOException when decoding HTTP request", e);
} catch (HttpException e) {
LOG.error("HttpException when decoding HTTP request", e);
}
}
}
}
Aggregations