use of org.graylog.shaded.elasticsearch7.org.elasticsearch.action.index.IndexRequest in project pancm_project by xuwujing.
the class EsHighLevelRestTest1 method insert.
private static void insert() throws IOException {
String index = "test1";
String type = "_doc";
// 唯一编号
String id = "1";
IndexRequest request = new IndexRequest(index, type, id);
/*
* 第一种方式,通过jsonString进行创建
*/
// json
String jsonString = "{" + "\"uid\":\"1234\"," + "\"phone\":\"12345678909\"," + "\"msgcode\":\"1\"," + "\"sendtime\":\"2019-03-14 01:57:04\"," + "\"message\":\"xuwujing study Elasticsearch\"" + "}";
request.source(jsonString, XContentType.JSON);
/*
* 第二种方式,通过map创建,,会自动转换成json的数据
*/
Map<String, Object> jsonMap = new HashMap<>();
jsonMap.put("uid", 1234);
jsonMap.put("phone", 12345678909L);
jsonMap.put("msgcode", 1);
jsonMap.put("sendtime", "2019-03-14 01:57:04");
jsonMap.put("message", "xuwujing study Elasticsearch");
request.source(jsonMap);
/*
* 第三种方式 : 通过XContentBuilder对象进行创建
*/
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject();
{
builder.field("uid", 1234);
builder.field("phone", 12345678909L);
builder.field("msgcode", 1);
builder.timeField("sendtime", "2019-03-14 01:57:04");
builder.field("message", "xuwujing study Elasticsearch");
}
builder.endObject();
request.source(builder);
IndexResponse indexResponse = client.index(request, RequestOptions.DEFAULT);
// 如果是200则表示成功,否则就是失败
if (200 == indexResponse.status().getStatus()) {
}
// 对响应结果进行处理
String index1 = indexResponse.getIndex();
String type1 = indexResponse.getType();
String id1 = indexResponse.getId();
long version = indexResponse.getVersion();
// 如果是新增/修改的话的话
if (indexResponse.getResult() == DocWriteResponse.Result.CREATED) {
} else if (indexResponse.getResult() == DocWriteResponse.Result.UPDATED) {
}
ReplicationResponse.ShardInfo shardInfo = indexResponse.getShardInfo();
if (shardInfo.getTotal() != shardInfo.getSuccessful()) {
}
if (shardInfo.getFailed() > 0) {
for (ReplicationResponse.ShardInfo.Failure failure : shardInfo.getFailures()) {
String reason = failure.reason();
}
}
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.action.index.IndexRequest in project incubator-sdap-mudrod by apache.
the class CrawlerDetection method checkByRate.
private int checkByRate(ESDriver es, String user) {
int rate = Integer.parseInt(props.getProperty(MudrodConstants.REQUEST_RATE));
Pattern pattern = Pattern.compile("get (.*?) http/*");
Matcher matcher;
BoolQueryBuilder filterSearch = new BoolQueryBuilder();
filterSearch.must(QueryBuilders.termQuery("IP", user));
AggregationBuilder aggregation = AggregationBuilders.dateHistogram("by_minute").field("Time").dateHistogramInterval(DateHistogramInterval.MINUTE).order(Order.COUNT_DESC);
SearchResponse checkRobot = es.getClient().prepareSearch(logIndex).setTypes(httpType, ftpType).setQuery(filterSearch).setSize(0).addAggregation(aggregation).execute().actionGet();
Histogram agg = checkRobot.getAggregations().get("by_minute");
List<? extends Histogram.Bucket> botList = agg.getBuckets();
long maxCount = botList.get(0).getDocCount();
if (maxCount >= rate) {
return 0;
} else {
DateTime dt1 = null;
int toLast = 0;
SearchResponse scrollResp = es.getClient().prepareSearch(logIndex).setTypes(httpType, ftpType).setScroll(new TimeValue(60000)).setQuery(filterSearch).setSize(100).execute().actionGet();
while (true) {
for (SearchHit hit : scrollResp.getHits().getHits()) {
Map<String, Object> result = hit.getSource();
String logtype = (String) result.get("LogType");
if (logtype.equals(MudrodConstants.HTTP_LOG)) {
String request = (String) result.get("Request");
matcher = pattern.matcher(request.trim().toLowerCase());
boolean find = false;
while (matcher.find()) {
request = matcher.group(1);
result.put("RequestUrl", props.getProperty(MudrodConstants.BASE_URL) + request);
find = true;
}
if (!find) {
result.put("RequestUrl", request);
}
} else {
result.put("RequestUrl", result.get("Request"));
}
DateTimeFormatter fmt = ISODateTimeFormat.dateTime();
DateTime dt2 = fmt.parseDateTime((String) result.get("Time"));
if (dt1 == null) {
toLast = 0;
} else {
toLast = Math.abs(Seconds.secondsBetween(dt1, dt2).getSeconds());
}
result.put("ToLast", toLast);
IndexRequest ir = new IndexRequest(logIndex, cleanupType).source(result);
es.getBulkProcessor().add(ir);
dt1 = dt2;
}
scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet();
if (scrollResp.getHits().getHits().length == 0) {
break;
}
}
}
return 1;
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.action.index.IndexRequest in project incubator-sdap-mudrod by apache.
the class ImportLogFile method parseSingleLineFTP.
/**
* Parse a single FTP log entry
*
* @param log a single log line
* @param index the index name we wish to persist the log line to
* @param type one of the available protocols from which Mudrod logs are obtained.
*/
public void parseSingleLineFTP(String log, String index, String type) {
String ip = log.split(" +")[6];
String time = log.split(" +")[1] + ":" + log.split(" +")[2] + ":" + log.split(" +")[3] + ":" + log.split(" +")[4];
time = switchtoNum(time);
SimpleDateFormat formatter = new SimpleDateFormat("MM:dd:HH:mm:ss:yyyy");
Date date = null;
try {
date = formatter.parse(time);
} catch (ParseException e) {
LOG.error("Error whilst parsing the date.", e);
}
String bytes = log.split(" +")[7];
String request = log.split(" +")[8].toLowerCase();
if (!request.contains("/misc/") && !request.contains("readme")) {
IndexRequest ir;
try {
ir = new IndexRequest(index, type).source(jsonBuilder().startObject().field("LogType", MudrodConstants.FTP_LOG).field("IP", ip).field("Time", date).field("Request", request).field("Bytes", Long.parseLong(bytes)).endObject());
es.getBulkProcessor().add(ir);
} catch (NumberFormatException e) {
LOG.error("Error whilst processing numbers", e);
} catch (IOException e) {
LOG.error("IOError whilst adding to the bulk processor.", e);
}
}
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.action.index.IndexRequest in project incubator-sdap-mudrod by apache.
the class SessionStatistic method processSession.
public int processSession(ESDriver es, String sessionId) throws IOException, InterruptedException, ExecutionException {
String inputType = cleanupType;
String outputType = sessionStats;
DateTimeFormatter fmt = ISODateTimeFormat.dateTime();
String min = null;
String max = null;
DateTime start = null;
DateTime end = null;
int duration = 0;
float requestRate = 0;
int sessionCount = 0;
Pattern pattern = Pattern.compile("get (.*?) http/*");
StatsAggregationBuilder statsAgg = AggregationBuilders.stats("Stats").field("Time");
BoolQueryBuilder filterSearch = new BoolQueryBuilder();
filterSearch.must(QueryBuilders.termQuery("SessionID", sessionId));
SearchResponse sr = es.getClient().prepareSearch(logIndex).setTypes(inputType).setQuery(filterSearch).addAggregation(statsAgg).execute().actionGet();
Stats agg = sr.getAggregations().get("Stats");
min = agg.getMinAsString();
max = agg.getMaxAsString();
start = fmt.parseDateTime(min);
end = fmt.parseDateTime(max);
duration = Seconds.secondsBetween(start, end).getSeconds();
int searchDataListRequestCount = 0;
int searchDataRequestCount = 0;
int searchDataListRequestByKeywordsCount = 0;
int ftpRequestCount = 0;
int keywordsNum = 0;
String iP = null;
String keywords = "";
String views = "";
String downloads = "";
SearchResponse scrollResp = es.getClient().prepareSearch(logIndex).setTypes(inputType).setScroll(new TimeValue(60000)).setQuery(filterSearch).setSize(100).execute().actionGet();
while (true) {
for (SearchHit hit : scrollResp.getHits().getHits()) {
Map<String, Object> result = hit.getSource();
String request = (String) result.get("Request");
String logType = (String) result.get("LogType");
iP = (String) result.get("IP");
Matcher matcher = pattern.matcher(request.trim().toLowerCase());
while (matcher.find()) {
request = matcher.group(1);
}
String datasetlist = props.getProperty(MudrodConstants.SEARCH_MARKER);
String dataset = props.getProperty(MudrodConstants.VIEW_MARKER);
if (request.contains(datasetlist)) {
searchDataListRequestCount++;
RequestUrl requestURL = new RequestUrl();
String infoStr = requestURL.getSearchInfo(request) + ",";
String info = es.customAnalyzing(props.getProperty(MudrodConstants.ES_INDEX_NAME), infoStr);
if (!",".equals(info)) {
if ("".equals(keywords)) {
keywords = keywords + info;
} else {
String[] items = info.split(",");
String[] keywordList = keywords.split(",");
for (String item : items) {
if (!Arrays.asList(keywordList).contains(item)) {
keywords = keywords + item + ",";
}
}
}
}
}
if (request.startsWith(dataset)) {
searchDataRequestCount++;
if (findDataset(request) != null) {
String view = findDataset(request);
if ("".equals(views))
views = view;
else if (!views.contains(view))
views = views + "," + view;
}
}
if (MudrodConstants.FTP_LOG.equals(logType)) {
ftpRequestCount++;
String download = "";
String requestLowercase = request.toLowerCase();
if (!requestLowercase.endsWith(".jpg") && !requestLowercase.endsWith(".pdf") && !requestLowercase.endsWith(".txt") && !requestLowercase.endsWith(".gif")) {
download = request;
}
if ("".equals(downloads)) {
downloads = download;
} else {
if (!downloads.contains(download)) {
downloads = downloads + "," + download;
}
}
}
}
scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet();
// Break condition: No hits are returned
if (scrollResp.getHits().getHits().length == 0) {
break;
}
}
if (!"".equals(keywords)) {
keywordsNum = keywords.split(",").length;
}
if (searchDataListRequestCount != 0 && searchDataListRequestCount <= Integer.parseInt(props.getProperty(MudrodConstants.SEARCH_F)) && searchDataRequestCount != 0 && searchDataRequestCount <= Integer.parseInt(props.getProperty(MudrodConstants.VIEW_F)) && ftpRequestCount <= Integer.parseInt(props.getProperty(MudrodConstants.DOWNLOAD_F))) {
String sessionURL = props.getProperty(MudrodConstants.SESSION_PORT) + props.getProperty(MudrodConstants.SESSION_URL) + "?sessionid=" + sessionId + "&sessionType=" + outputType + "&requestType=" + inputType;
sessionCount = 1;
IndexRequest ir = new IndexRequest(logIndex, outputType).source(jsonBuilder().startObject().field("SessionID", sessionId).field("SessionURL", sessionURL).field("Duration", duration).field("Number of Keywords", keywordsNum).field("Time", min).field("End_time", max).field("searchDataListRequest_count", searchDataListRequestCount).field("searchDataListRequest_byKeywords_count", searchDataListRequestByKeywordsCount).field("searchDataRequest_count", searchDataRequestCount).field("keywords", es.customAnalyzing(logIndex, keywords)).field("views", views).field("downloads", downloads).field("request_rate", requestRate).field("Comments", "").field("Validation", 0).field("Produceby", 0).field("Correlation", 0).field("IP", iP).endObject());
es.getBulkProcessor().add(ir);
}
return sessionCount;
}
use of org.graylog.shaded.elasticsearch7.org.elasticsearch.action.index.IndexRequest in project elasticsearch-indexing-proxy by codelibs.
the class ProxyActionFilter method getExecutor.
@SuppressWarnings("unchecked")
private <Request extends ActionRequest, Response extends ActionResponse> Supplier<Response> getExecutor(final Task task, final String action, final Request request) {
if (BulkAction.NAME.equals(action)) {
final long startTime = System.nanoTime();
int count = 0;
final BulkRequest req = (BulkRequest) request;
for (final DocWriteRequest<?> subReq : req.requests()) {
if (indexingProxyService.isTargetIndex(subReq.index())) {
count++;
}
}
if (count == 0) {
return null;
} else if (count != req.requests().size()) {
throw new ElasticsearchException("Mixed target requests. ({} != {})", count, req.requests().size());
}
return () -> {
final List<BulkItemResponse> responseList = new ArrayList<>(req.requests().size());
for (int i = 0; i < req.requests().size(); i++) {
final DocWriteRequest<?> dwr = req.requests().get(i);
if (dwr instanceof IndexRequest) {
final IndexRequest r = (IndexRequest) dwr;
final String id = r.id() == null ? INDEX_UUID : r.id();
final IndexResponse response = new IndexResponse(new ShardId(new Index(r.index(), INDEX_UUID), 0), r.type(), id, r.version(), true);
responseList.add(new BulkItemResponse(i, r.opType(), response));
} else if (dwr instanceof UpdateRequest) {
final UpdateRequest r = (UpdateRequest) dwr;
final String id = r.id() == null ? INDEX_UUID : r.id();
final UpdateResponse response = new UpdateResponse(new ShardId(new Index(r.index(), INDEX_UUID), 0), r.type(), id, r.version(), Result.CREATED);
responseList.add(new BulkItemResponse(i, r.opType(), response));
} else if (dwr instanceof DeleteRequest) {
final DeleteRequest r = (DeleteRequest) dwr;
final String id = r.id() == null ? INDEX_UUID : r.id();
final DeleteResponse response = new DeleteResponse(new ShardId(new Index(r.index(), INDEX_UUID), 0), r.type(), id, r.version(), true);
response.setShardInfo(new ReplicationResponse.ShardInfo(1, 1, ReplicationResponse.EMPTY));
responseList.add(new BulkItemResponse(i, r.opType(), response));
} else {
responseList.add(new BulkItemResponse(i, dwr.opType(), new BulkItemResponse.Failure(dwr.index(), dwr.type(), dwr.id(), new ElasticsearchException("Unknown request: " + dwr))));
}
}
return (Response) new BulkResponse(responseList.toArray(new BulkItemResponse[responseList.size()]), (System.nanoTime() - startTime) / 1000000);
};
} else if (DeleteAction.NAME.equals(action)) {
final DeleteRequest req = (DeleteRequest) request;
if (!indexingProxyService.isTargetIndex(req.index())) {
return null;
}
return () -> {
final String id = req.id() == null ? INDEX_UUID : req.id();
final DeleteResponse res = new DeleteResponse(new ShardId(new Index(req.index(), INDEX_UUID), 0), req.type(), id, req.version(), true);
res.setShardInfo(new ReplicationResponse.ShardInfo(1, 1, ReplicationResponse.EMPTY));
return (Response) res;
};
} else if (DeleteByQueryAction.NAME.equals(action)) {
final long startTime = System.nanoTime();
int count = 0;
final DeleteByQueryRequest req = (DeleteByQueryRequest) request;
for (final String index : req.indices()) {
if (indexingProxyService.isTargetIndex(index)) {
count++;
}
}
if (count == 0) {
return null;
} else if (count != req.indices().length) {
throw new ElasticsearchException("Mixed target requests. ({} != {})", count, req.indices().length);
}
return () -> {
return (Response) new BulkByScrollResponse(TimeValue.timeValueNanos(System.nanoTime() - startTime), new BulkByScrollTask.Status(null, 0, 0, 0, 0, 0, 0, 0, 0, 0, TimeValue.ZERO, 0, null, TimeValue.ZERO), Collections.emptyList(), Collections.emptyList(), false);
};
} else if (IndexAction.NAME.equals(action)) {
final IndexRequest req = (IndexRequest) request;
if (!indexingProxyService.isTargetIndex(req.index())) {
return null;
}
return () -> {
final String id = req.id() == null ? INDEX_UUID : req.id();
return (Response) new IndexResponse(new ShardId(new Index(req.index(), INDEX_UUID), 0), req.type(), id, req.version(), true);
};
} else if (UpdateAction.NAME.equals(action)) {
final UpdateRequest req = (UpdateRequest) request;
if (!indexingProxyService.isTargetIndex(req.index())) {
return null;
}
return () -> {
final String id = req.id() == null ? INDEX_UUID : req.id();
return (Response) new UpdateResponse(new ShardId(new Index(req.index(), INDEX_UUID), 0), req.type(), id, req.version(), Result.CREATED);
};
} else if (UpdateByQueryAction.NAME.equals(action)) {
final long startTime = System.nanoTime();
int count = 0;
final UpdateByQueryRequest req = (UpdateByQueryRequest) request;
for (final String index : req.indices()) {
if (indexingProxyService.isTargetIndex(index)) {
count++;
}
}
if (count == 0) {
return null;
} else if (count != req.indices().length) {
throw new ElasticsearchException("Mixed target requests. ({} != {})", count, req.indices().length);
}
return () -> {
return (Response) new BulkByScrollResponse(TimeValue.timeValueNanos(System.nanoTime() - startTime), new BulkByScrollTask.Status(null, 0, 0, 0, 0, 0, 0, 0, 0, 0, TimeValue.ZERO, 0, null, TimeValue.ZERO), Collections.emptyList(), Collections.emptyList(), false);
};
}
return null;
}
Aggregations