Search in sources :

Example 21 with TypeReference

use of com.fasterxml.jackson.core.type.TypeReference in project hive by apache.

the class DruidQueryBasedInputFormat method splitSelectQuery.

/* Method that splits Select query depending on the threshold so read can be
   * parallelized. We will only contact the Druid broker to obtain all results. */
private static HiveDruidSplit[] splitSelectQuery(Configuration conf, String address, SelectQuery query, Path dummyPath) throws IOException {
    final int selectThreshold = (int) HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVE_DRUID_SELECT_THRESHOLD);
    final int numConnection = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVE_DRUID_NUM_HTTP_CONNECTION);
    final Period readTimeout = new Period(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_DRUID_HTTP_READ_TIMEOUT));
    final boolean isFetch = query.getContextBoolean(Constants.DRUID_QUERY_FETCH, false);
    if (isFetch) {
        // If it has a limit, we use it and we do not split the query
        return new HiveDruidSplit[] { new HiveDruidSplit(DruidStorageHandlerUtils.JSON_MAPPER.writeValueAsString(query), dummyPath, new String[] { address }) };
    }
    // We do not have the number of rows, thus we need to execute a
    // Segment Metadata query to obtain number of rows
    SegmentMetadataQueryBuilder metadataBuilder = new Druids.SegmentMetadataQueryBuilder();
    metadataBuilder.dataSource(query.getDataSource());
    metadataBuilder.intervals(query.getIntervals());
    metadataBuilder.merge(true);
    metadataBuilder.analysisTypes();
    SegmentMetadataQuery metadataQuery = metadataBuilder.build();
    Lifecycle lifecycle = new Lifecycle();
    HttpClient client = HttpClientInit.createClient(HttpClientConfig.builder().withNumConnections(numConnection).withReadTimeout(readTimeout.toStandardDuration()).build(), lifecycle);
    try {
        lifecycle.start();
    } catch (Exception e) {
        LOG.error("Lifecycle start issue");
        throw new IOException(org.apache.hadoop.util.StringUtils.stringifyException(e));
    }
    InputStream response;
    try {
        response = DruidStorageHandlerUtils.submitRequest(client, DruidStorageHandlerUtils.createRequest(address, metadataQuery));
    } catch (Exception e) {
        lifecycle.stop();
        throw new IOException(org.apache.hadoop.util.StringUtils.stringifyException(e));
    }
    // Retrieve results
    List<SegmentAnalysis> metadataList;
    try {
        metadataList = DruidStorageHandlerUtils.SMILE_MAPPER.readValue(response, new TypeReference<List<SegmentAnalysis>>() {
        });
    } catch (Exception e) {
        response.close();
        throw new IOException(org.apache.hadoop.util.StringUtils.stringifyException(e));
    } finally {
        lifecycle.stop();
    }
    if (metadataList == null) {
        throw new IOException("Connected to Druid but could not retrieve datasource information");
    }
    if (metadataList.isEmpty()) {
        // There are no rows for that time range, we can submit query as it is
        return new HiveDruidSplit[] { new HiveDruidSplit(DruidStorageHandlerUtils.JSON_MAPPER.writeValueAsString(query), dummyPath, new String[] { address }) };
    }
    if (metadataList.size() != 1) {
        throw new IOException("Information about segments should have been merged");
    }
    final long numRows = metadataList.get(0).getNumRows();
    query = query.withPagingSpec(PagingSpec.newSpec(Integer.MAX_VALUE));
    if (numRows <= selectThreshold) {
        // We are not going to split it
        return new HiveDruidSplit[] { new HiveDruidSplit(DruidStorageHandlerUtils.JSON_MAPPER.writeValueAsString(query), dummyPath, new String[] { address }) };
    }
    // If the query does not specify a timestamp, we obtain the total time using
    // a Time Boundary query. Then, we use the information to split the query
    // following the Select threshold configuration property
    final List<Interval> intervals = new ArrayList<>();
    if (query.getIntervals().size() == 1 && query.getIntervals().get(0).withChronology(ISOChronology.getInstanceUTC()).equals(DruidTable.DEFAULT_INTERVAL)) {
        // Default max and min, we should execute a time boundary query to get a
        // more precise range
        TimeBoundaryQueryBuilder timeBuilder = new Druids.TimeBoundaryQueryBuilder();
        timeBuilder.dataSource(query.getDataSource());
        TimeBoundaryQuery timeQuery = timeBuilder.build();
        lifecycle = new Lifecycle();
        client = HttpClientInit.createClient(HttpClientConfig.builder().withNumConnections(numConnection).withReadTimeout(readTimeout.toStandardDuration()).build(), lifecycle);
        try {
            lifecycle.start();
        } catch (Exception e) {
            LOG.error("Lifecycle start issue");
            throw new IOException(org.apache.hadoop.util.StringUtils.stringifyException(e));
        }
        try {
            response = DruidStorageHandlerUtils.submitRequest(client, DruidStorageHandlerUtils.createRequest(address, timeQuery));
        } catch (Exception e) {
            lifecycle.stop();
            throw new IOException(org.apache.hadoop.util.StringUtils.stringifyException(e));
        }
        // Retrieve results
        List<Result<TimeBoundaryResultValue>> timeList;
        try {
            timeList = DruidStorageHandlerUtils.SMILE_MAPPER.readValue(response, new TypeReference<List<Result<TimeBoundaryResultValue>>>() {
            });
        } catch (Exception e) {
            response.close();
            throw new IOException(org.apache.hadoop.util.StringUtils.stringifyException(e));
        } finally {
            lifecycle.stop();
        }
        if (timeList == null || timeList.isEmpty()) {
            throw new IOException("Connected to Druid but could not retrieve time boundary information");
        }
        if (timeList.size() != 1) {
            throw new IOException("We should obtain a single time boundary");
        }
        intervals.add(new Interval(timeList.get(0).getValue().getMinTime().getMillis(), timeList.get(0).getValue().getMaxTime().getMillis(), ISOChronology.getInstanceUTC()));
    } else {
        intervals.addAll(query.getIntervals());
    }
    // Create (numRows/default threshold) input splits
    int numSplits = (int) Math.ceil((double) numRows / selectThreshold);
    List<List<Interval>> newIntervals = createSplitsIntervals(intervals, numSplits);
    HiveDruidSplit[] splits = new HiveDruidSplit[numSplits];
    for (int i = 0; i < numSplits; i++) {
        // Create partial Select query
        final SelectQuery partialQuery = query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(newIntervals.get(i)));
        splits[i] = new HiveDruidSplit(DruidStorageHandlerUtils.JSON_MAPPER.writeValueAsString(partialQuery), dummyPath, new String[] { address });
    }
    return splits;
}
Also used : ArrayList(java.util.ArrayList) MultipleIntervalSegmentSpec(io.druid.query.spec.MultipleIntervalSegmentSpec) TimeBoundaryQuery(io.druid.query.timeboundary.TimeBoundaryQuery) Result(io.druid.query.Result) SegmentMetadataQuery(io.druid.query.metadata.metadata.SegmentMetadataQuery) SegmentMetadataQueryBuilder(io.druid.query.Druids.SegmentMetadataQueryBuilder) SegmentAnalysis(io.druid.query.metadata.metadata.SegmentAnalysis) TimeBoundaryQueryBuilder(io.druid.query.Druids.TimeBoundaryQueryBuilder) List(java.util.List) ArrayList(java.util.ArrayList) TypeReference(com.fasterxml.jackson.core.type.TypeReference) InputStream(java.io.InputStream) Lifecycle(com.metamx.common.lifecycle.Lifecycle) Period(org.joda.time.Period) IOException(java.io.IOException) JsonParseException(com.fasterxml.jackson.core.JsonParseException) JsonMappingException(com.fasterxml.jackson.databind.JsonMappingException) IOException(java.io.IOException) SelectQuery(io.druid.query.select.SelectQuery) HttpClient(com.metamx.http.client.HttpClient) Interval(org.joda.time.Interval)

Example 22 with TypeReference

use of com.fasterxml.jackson.core.type.TypeReference in project Java-Mandrill-Wrapper by cribbstechnologies.

the class MandrillRESTRequestTest method testSendMessageResponseConversion.

@Test
public void testSendMessageResponseConversion() throws IOException {
    this.initRequestWithActualMapper();
    StringWriter sw = new StringWriter();
    IOUtils.copy(this.getClass().getClassLoader().getResourceAsStream("messages/sendMessageResponse.txt"), sw);
    TypeReference responseListReference = new TypeReference<List<MessageResponse>>() {
    };
    BaseMandrillAnonymousListResponse<MessageResponse> response = (BaseMandrillAnonymousListResponse<MessageResponse>) this.request.convertAnonymousListResponseData(sw.toString(), SendMessageResponse.class, responseListReference);
    assertEquals(2, response.getList().size());
    MessageResponse resp = response.getList().get(0);
    assertEquals("example email", resp.getEmail());
    assertEquals("example status", resp.getStatus());
    resp = response.getList().get(1);
    assertEquals("example email2", resp.getEmail());
    assertEquals("example status2", resp.getStatus());
}
Also used : BaseMandrillAnonymousListResponse(com.cribbstechnologies.clients.mandrill.model.response.BaseMandrillAnonymousListResponse) SendMessageResponse(com.cribbstechnologies.clients.mandrill.model.response.message.SendMessageResponse) StringWriter(java.io.StringWriter) MessageResponse(com.cribbstechnologies.clients.mandrill.model.response.message.MessageResponse) SendMessageResponse(com.cribbstechnologies.clients.mandrill.model.response.message.SendMessageResponse) TypeReference(com.fasterxml.jackson.core.type.TypeReference) Test(org.junit.Test)

Example 23 with TypeReference

use of com.fasterxml.jackson.core.type.TypeReference in project Java-Mandrill-Wrapper by cribbstechnologies.

the class MandrillRESTRequestTest method testUrlListResponseConversion.

@Test
public void testUrlListResponseConversion() throws IOException {
    this.initRequestWithActualMapper();
    StringWriter sw = new StringWriter();
    IOUtils.copy(this.getClass().getClassLoader().getResourceAsStream("urls/urlList.txt"), sw);
    TypeReference urlListReference = new TypeReference<List<UrlResponse>>() {
    };
    BaseMandrillAnonymousListResponse<UrlResponse> response = (BaseMandrillAnonymousListResponse<UrlResponse>) this.request.convertAnonymousListResponseData(sw.toString(), UrlListResponse.class, urlListReference);
    assertEquals(2, response.getList().size());
    UrlResponse urlResponse = response.getList().get(0);
    assertEquals("example url1", urlResponse.getUrl());
    assertEquals(2, urlResponse.getSent());
    assertEquals(3, urlResponse.getClicks());
    assertEquals(4, urlResponse.getUnique_clicks());
    urlResponse = response.getList().get(1);
    assertEquals("example url11", urlResponse.getUrl());
    assertEquals(12, urlResponse.getSent());
    assertEquals(13, urlResponse.getClicks());
    assertEquals(14, urlResponse.getUnique_clicks());
}
Also used : BaseMandrillAnonymousListResponse(com.cribbstechnologies.clients.mandrill.model.response.BaseMandrillAnonymousListResponse) StringWriter(java.io.StringWriter) TimeUrlResponse(com.cribbstechnologies.clients.mandrill.model.response.urls.TimeUrlResponse) UrlResponse(com.cribbstechnologies.clients.mandrill.model.response.urls.UrlResponse) TypeReference(com.fasterxml.jackson.core.type.TypeReference) UrlListResponse(com.cribbstechnologies.clients.mandrill.model.response.urls.UrlListResponse) Test(org.junit.Test)

Example 24 with TypeReference

use of com.fasterxml.jackson.core.type.TypeReference in project Java-Mandrill-Wrapper by cribbstechnologies.

the class MandrillRESTRequestTest method testUrlTimeResponseConversion.

@Test
public void testUrlTimeResponseConversion() throws IOException {
    this.initRequestWithActualMapper();
    StringWriter sw = new StringWriter();
    IOUtils.copy(this.getClass().getClassLoader().getResourceAsStream("urls/urlTimeResponse.txt"), sw);
    TypeReference urlTimeReference = new TypeReference<List<TimeUrlResponse>>() {
    };
    BaseMandrillAnonymousListResponse<TimeUrlResponse> response = (BaseMandrillAnonymousListResponse<TimeUrlResponse>) this.request.convertAnonymousListResponseData(sw.toString(), UrlTimeResponse.class, urlTimeReference);
    assertEquals(2, response.getList().size());
    TimeUrlResponse timeUrlResponse = response.getList().get(0);
    assertEquals("example time1", timeUrlResponse.getTime());
    assertEquals(2, timeUrlResponse.getSent());
    assertEquals(3, timeUrlResponse.getClicks());
    assertEquals(4, timeUrlResponse.getUnique_clicks());
    timeUrlResponse = response.getList().get(1);
    assertEquals("example time11", timeUrlResponse.getTime());
    assertEquals(12, timeUrlResponse.getSent());
    assertEquals(13, timeUrlResponse.getClicks());
    assertEquals(14, timeUrlResponse.getUnique_clicks());
}
Also used : BaseMandrillAnonymousListResponse(com.cribbstechnologies.clients.mandrill.model.response.BaseMandrillAnonymousListResponse) StringWriter(java.io.StringWriter) TimeUrlResponse(com.cribbstechnologies.clients.mandrill.model.response.urls.TimeUrlResponse) TypeReference(com.fasterxml.jackson.core.type.TypeReference) UrlTimeResponse(com.cribbstechnologies.clients.mandrill.model.response.urls.UrlTimeResponse) Test(org.junit.Test)

Example 25 with TypeReference

use of com.fasterxml.jackson.core.type.TypeReference in project druid by druid-io.

the class WhiteListBasedDruidToTimelineEventConverter method readMap.

private ImmutableSortedMap<String, ImmutableList<String>> readMap(final String mapPath) {
    String fileContent;
    String actualPath = mapPath;
    try {
        if (Strings.isNullOrEmpty(mapPath)) {
            actualPath = this.getClass().getClassLoader().getResource("defaultWhiteListMap.json").getFile();
            LOGGER.info("using default whiteList map located at [%s]", actualPath);
            fileContent = CharStreams.toString(new InputStreamReader(this.getClass().getClassLoader().getResourceAsStream("defaultWhiteListMap.json")));
        } else {
            fileContent = Files.asCharSource(new File(mapPath), Charset.forName("UTF-8")).read();
        }
        return mapper.reader(new TypeReference<ImmutableSortedMap<String, ImmutableList<String>>>() {
        }).readValue(fileContent);
    } catch (IOException e) {
        throw new ISE(e, "Got an exception while parsing file [%s]", actualPath);
    }
}
Also used : InputStreamReader(java.io.InputStreamReader) ImmutableList(com.google.common.collect.ImmutableList) ISE(com.metamx.common.ISE) TypeReference(com.fasterxml.jackson.core.type.TypeReference) IOException(java.io.IOException) File(java.io.File)

Aggregations

TypeReference (com.fasterxml.jackson.core.type.TypeReference)87 IOException (java.io.IOException)34 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)31 Test (org.junit.Test)25 Map (java.util.Map)19 List (java.util.List)16 HashMap (java.util.HashMap)12 ArrayList (java.util.ArrayList)11 StringWriter (java.io.StringWriter)8 BaseMandrillAnonymousListResponse (com.cribbstechnologies.clients.mandrill.model.response.BaseMandrillAnonymousListResponse)7 ImmutableMap (com.google.common.collect.ImmutableMap)7 InputStream (java.io.InputStream)7 MalformedURLException (java.net.MalformedURLException)7 JsonNode (com.fasterxml.jackson.databind.JsonNode)5 Collectors (java.util.stream.Collectors)5 SimpleModule (com.fasterxml.jackson.databind.module.SimpleModule)4 AuditInfo (io.druid.audit.AuditInfo)4 PublicAccount (io.nem.sdk.model.account.PublicAccount)4 NetworkType (io.nem.sdk.model.blockchain.NetworkType)4 NamespaceId (io.nem.sdk.model.namespace.NamespaceId)4