Search in sources :

Example 71 with MapBasedInputRow

use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class HadoopDruidIndexerConfigTest method testNoneShardSpecBucketSelection.

@Test
public void testNoneShardSpecBucketSelection() {
    HadoopIngestionSpec spec = new HadoopIngestionSpec(new DataSchema("foo", null, new AggregatorFactory[0], new UniformGranularitySpec(Granularities.MINUTE, Granularities.MINUTE, ImmutableList.of(new Interval("2010-01-01/P1D"))), jsonMapper), new HadoopIOConfig(ImmutableMap.<String, Object>of("paths", "bar", "type", "static"), null, null), new HadoopTuningConfig(null, null, null, ImmutableMap.<Long, List<HadoopyShardSpec>>of(new DateTime("2010-01-01T01:00:00").getMillis(), Lists.newArrayList(new HadoopyShardSpec(NoneShardSpec.instance(), 1)), new DateTime("2010-01-01T02:00:00").getMillis(), Lists.newArrayList(new HadoopyShardSpec(NoneShardSpec.instance(), 2))), null, null, false, false, false, false, null, false, false, null, null, null, false, false));
    HadoopDruidIndexerConfig config = HadoopDruidIndexerConfig.fromSpec(spec);
    final List<String> dims = Arrays.asList("diM1", "dIM2");
    final ImmutableMap<String, Object> values = ImmutableMap.<String, Object>of("Dim1", "1", "DiM2", "2", "dim1", "3", "dim2", "4");
    final long ts1 = new DateTime("2010-01-01T01:00:01").getMillis();
    Assert.assertEquals(config.getBucket(new MapBasedInputRow(ts1, dims, values)).get().getShardNum(), 1);
    final long ts2 = new DateTime("2010-01-01T02:00:01").getMillis();
    Assert.assertEquals(config.getBucket(new MapBasedInputRow(ts2, dims, values)).get().getShardNum(), 2);
}
Also used : AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) DateTime(org.joda.time.DateTime) DataSchema(io.druid.segment.indexing.DataSchema) UniformGranularitySpec(io.druid.segment.indexing.granularity.UniformGranularitySpec) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) Interval(org.joda.time.Interval) Test(org.junit.Test)

Example 72 with MapBasedInputRow

use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class InputRowSerdeTest method testSerde.

@Test
public void testSerde() {
    InputRow in = new MapBasedInputRow(timestamp, dims, event);
    AggregatorFactory[] aggregatorFactories = new AggregatorFactory[] { new DoubleSumAggregatorFactory("agg_non_existing", "agg_non_existing_in"), new DoubleSumAggregatorFactory("m1out", "m1"), new LongSumAggregatorFactory("m2out", "m2"), new HyperUniquesAggregatorFactory("m3out", "m3"), // Unparseable from String to Long
    new LongSumAggregatorFactory("unparseable", "m3") };
    // Ignore Unparseable aggregator
    byte[] data = InputRowSerde.toBytes(in, aggregatorFactories, false);
    InputRow out = InputRowSerde.fromBytes(data, aggregatorFactories);
    Assert.assertEquals(timestamp, out.getTimestampFromEpoch());
    Assert.assertEquals(dims, out.getDimensions());
    Assert.assertEquals(Collections.EMPTY_LIST, out.getDimension("dim_non_existing"));
    Assert.assertEquals(ImmutableList.of("d1v"), out.getDimension("d1"));
    Assert.assertEquals(ImmutableList.of("d2v1", "d2v2"), out.getDimension("d2"));
    Assert.assertEquals(0.0f, out.getFloatMetric("agg_non_existing"), 0.00001);
    Assert.assertEquals(5.0f, out.getFloatMetric("m1out"), 0.00001);
    Assert.assertEquals(100L, out.getLongMetric("m2out"));
    Assert.assertEquals(1, ((HyperLogLogCollector) out.getRaw("m3out")).estimateCardinality(), 0.001);
    Assert.assertEquals(0L, out.getLongMetric("unparseable"));
}
Also used : DoubleSumAggregatorFactory(io.druid.query.aggregation.DoubleSumAggregatorFactory) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) InputRow(io.druid.data.input.InputRow) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) HyperUniquesAggregatorFactory(io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) HyperUniquesAggregatorFactory(io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory) DoubleSumAggregatorFactory(io.druid.query.aggregation.DoubleSumAggregatorFactory) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) Test(org.junit.Test)

Example 73 with MapBasedInputRow

use of io.druid.data.input.MapBasedInputRow in project druid by druid-io.

the class TwitterSpritzerFirehoseFactory method connect.

@Override
public Firehose connect(InputRowParser parser) throws IOException {
    final ConnectionLifeCycleListener connectionLifeCycleListener = new ConnectionLifeCycleListener() {

        @Override
        public void onConnect() {
            log.info("Connected_to_Twitter");
        }

        @Override
        public void onDisconnect() {
            log.info("Disconnect_from_Twitter");
        }

        /**
       * called before thread gets cleaned up
       */
        @Override
        public void onCleanUp() {
            log.info("Cleanup_twitter_stream");
        }
    };
    // ConnectionLifeCycleListener
    final TwitterStream twitterStream;
    final StatusListener statusListener;
    final int QUEUE_SIZE = 2000;
    /** This queue is used to move twitter events from the twitter4j thread to the druid ingest thread.   */
    final BlockingQueue<Status> queue = new ArrayBlockingQueue<Status>(QUEUE_SIZE);
    final long startMsec = System.currentTimeMillis();
    //
    //   set up Twitter Spritzer
    //
    twitterStream = new TwitterStreamFactory().getInstance();
    twitterStream.addConnectionLifeCycleListener(connectionLifeCycleListener);
    statusListener = new StatusListener() {

        // This is what really gets called to deliver stuff from twitter4j
        @Override
        public void onStatus(Status status) {
            // time to stop?
            if (Thread.currentThread().isInterrupted()) {
                throw new RuntimeException("Interrupted, time to stop");
            }
            try {
                boolean success = queue.offer(status, 15L, TimeUnit.SECONDS);
                if (!success) {
                    log.warn("queue too slow!");
                }
            } catch (InterruptedException e) {
                throw new RuntimeException("InterruptedException", e);
            }
        }

        @Override
        public void onDeletionNotice(StatusDeletionNotice statusDeletionNotice) {
        //log.info("Got a status deletion notice id:" + statusDeletionNotice.getStatusId());
        }

        @Override
        public void onTrackLimitationNotice(int numberOfLimitedStatuses) {
            // This notice will be sent each time a limited stream becomes unlimited.
            // If this number is high and or rapidly increasing, it is an indication that your predicate is too broad, and you should consider a predicate with higher selectivity.
            log.warn("Got track limitation notice:" + numberOfLimitedStatuses);
        }

        @Override
        public void onScrubGeo(long userId, long upToStatusId) {
        //log.info("Got scrub_geo event userId:" + userId + " upToStatusId:" + upToStatusId);
        }

        @Override
        public void onException(Exception ex) {
            ex.printStackTrace();
        }

        @Override
        public void onStallWarning(StallWarning warning) {
            System.out.println("Got stall warning:" + warning);
        }
    };
    twitterStream.addListener(statusListener);
    // creates a generic StatusStream
    twitterStream.sample();
    log.info("returned from sample()");
    return new Firehose() {

        private final Runnable doNothingRunnable = new Runnable() {

            public void run() {
            }
        };

        private long rowCount = 0L;

        private boolean waitIfmax = (getMaxEventCount() < 0L);

        private final Map<String, Object> theMap = new TreeMap<>();

        // DIY json parsing // private final ObjectMapper omapper = new ObjectMapper();
        private boolean maxTimeReached() {
            if (getMaxRunMinutes() <= 0) {
                return false;
            } else {
                return (System.currentTimeMillis() - startMsec) / 60000L >= getMaxRunMinutes();
            }
        }

        private boolean maxCountReached() {
            return getMaxEventCount() >= 0 && rowCount >= getMaxEventCount();
        }

        @Override
        public boolean hasMore() {
            if (maxCountReached() || maxTimeReached()) {
                return waitIfmax;
            } else {
                return true;
            }
        }

        @Override
        public InputRow nextRow() {
            // Interrupted to stop?
            if (Thread.currentThread().isInterrupted()) {
                throw new RuntimeException("Interrupted, time to stop");
            }
            // all done?
            if (maxCountReached() || maxTimeReached()) {
                if (waitIfmax) {
                    // sleep a long time instead of terminating
                    try {
                        log.info("reached limit, sleeping a long time...");
                        Thread.sleep(2000000000L);
                    } catch (InterruptedException e) {
                        throw new RuntimeException("InterruptedException", e);
                    }
                } else {
                // allow this event through, and the next hasMore() call will be false
                }
            }
            if (++rowCount % 1000 == 0) {
                log.info("nextRow() has returned %,d InputRows", rowCount);
            }
            Status status;
            try {
                status = queue.take();
            } catch (InterruptedException e) {
                throw new RuntimeException("InterruptedException", e);
            }
            theMap.clear();
            HashtagEntity[] hts = status.getHashtagEntities();
            String text = status.getText();
            theMap.put("text", (null == text) ? "" : text);
            theMap.put("htags", (hts.length > 0) ? Lists.transform(Arrays.asList(hts), new Function<HashtagEntity, String>() {

                @Nullable
                @Override
                public String apply(HashtagEntity input) {
                    return input.getText();
                }
            }) : ImmutableList.<String>of());
            long[] lcontrobutors = status.getContributors();
            List<String> contributors = new ArrayList<>();
            for (long contrib : lcontrobutors) {
                contributors.add(String.format("%d", contrib));
            }
            theMap.put("contributors", contributors);
            GeoLocation geoLocation = status.getGeoLocation();
            if (null != geoLocation) {
                double lat = status.getGeoLocation().getLatitude();
                double lon = status.getGeoLocation().getLongitude();
                theMap.put("lat", lat);
                theMap.put("lon", lon);
            } else {
                theMap.put("lat", null);
                theMap.put("lon", null);
            }
            if (status.getSource() != null) {
                Matcher m = sourcePattern.matcher(status.getSource());
                theMap.put("source", m.find() ? m.group(1) : status.getSource());
            }
            theMap.put("retweet", status.isRetweet());
            if (status.isRetweet()) {
                Status original = status.getRetweetedStatus();
                theMap.put("retweet_count", original.getRetweetCount());
                User originator = original.getUser();
                theMap.put("originator_screen_name", originator != null ? originator.getScreenName() : "");
                theMap.put("originator_follower_count", originator != null ? originator.getFollowersCount() : "");
                theMap.put("originator_friends_count", originator != null ? originator.getFriendsCount() : "");
                theMap.put("originator_verified", originator != null ? originator.isVerified() : "");
            }
            User user = status.getUser();
            final boolean hasUser = (null != user);
            theMap.put("follower_count", hasUser ? user.getFollowersCount() : 0);
            theMap.put("friends_count", hasUser ? user.getFriendsCount() : 0);
            theMap.put("lang", hasUser ? user.getLang() : "");
            // resolution in seconds, -1 if not available?
            theMap.put("utc_offset", hasUser ? user.getUtcOffset() : -1);
            theMap.put("statuses_count", hasUser ? user.getStatusesCount() : 0);
            theMap.put("user_id", hasUser ? String.format("%d", user.getId()) : "");
            theMap.put("screen_name", hasUser ? user.getScreenName() : "");
            theMap.put("location", hasUser ? user.getLocation() : "");
            theMap.put("verified", hasUser ? user.isVerified() : "");
            theMap.put("ts", status.getCreatedAt().getTime());
            List<String> dimensions = Lists.newArrayList(theMap.keySet());
            return new MapBasedInputRow(status.getCreatedAt().getTime(), dimensions, theMap);
        }

        @Override
        public Runnable commit() {
            // reuse the same object each time
            return doNothingRunnable;
        }

        @Override
        public void close() throws IOException {
            log.info("CLOSE twitterstream");
            // invokes twitterStream.cleanUp()
            twitterStream.shutdown();
        }
    };
}
Also used : User(twitter4j.User) Matcher(java.util.regex.Matcher) ArrayList(java.util.ArrayList) TwitterStreamFactory(twitter4j.TwitterStreamFactory) ArrayBlockingQueue(java.util.concurrent.ArrayBlockingQueue) StallWarning(twitter4j.StallWarning) ConnectionLifeCycleListener(twitter4j.ConnectionLifeCycleListener) HashtagEntity(twitter4j.HashtagEntity) MapBasedInputRow(io.druid.data.input.MapBasedInputRow) Status(twitter4j.Status) Firehose(io.druid.data.input.Firehose) IOException(java.io.IOException) TwitterStream(twitter4j.TwitterStream) StatusListener(twitter4j.StatusListener) StatusDeletionNotice(twitter4j.StatusDeletionNotice) GeoLocation(twitter4j.GeoLocation) Map(java.util.Map) TreeMap(java.util.TreeMap) Nullable(javax.annotation.Nullable)

Aggregations

MapBasedInputRow (io.druid.data.input.MapBasedInputRow)73 Test (org.junit.Test)51 DateTime (org.joda.time.DateTime)38 OnheapIncrementalIndex (io.druid.segment.incremental.OnheapIncrementalIndex)32 IncrementalIndex (io.druid.segment.incremental.IncrementalIndex)30 File (java.io.File)19 CountAggregatorFactory (io.druid.query.aggregation.CountAggregatorFactory)13 LongSumAggregatorFactory (io.druid.query.aggregation.LongSumAggregatorFactory)12 InputRow (io.druid.data.input.InputRow)11 IncrementalIndexTest (io.druid.segment.data.IncrementalIndexTest)11 Interval (org.joda.time.Interval)11 IOException (java.io.IOException)10 DimensionsSpec (io.druid.data.input.impl.DimensionsSpec)9 AggregatorFactory (io.druid.query.aggregation.AggregatorFactory)8 Row (io.druid.data.input.Row)7 TaskStatus (io.druid.indexing.common.TaskStatus)7 TaskToolbox (io.druid.indexing.common.TaskToolbox)7 TestIndexerMetadataStorageCoordinator (io.druid.indexing.test.TestIndexerMetadataStorageCoordinator)7 SpatialDimensionSchema (io.druid.data.input.impl.SpatialDimensionSchema)6 Pair (io.druid.java.util.common.Pair)6