Search in sources :

Example 21 with Stopwatch

use of org.apache.drill.shaded.guava.com.google.common.base.Stopwatch in project drill by apache.

the class FMPPMojo method execute.

@Override
public void execute() throws MojoExecutionException, MojoFailureException {
    if (project == null) {
        throw new MojoExecutionException("This plugin can only be used inside a project.");
    }
    String outputPath = output.getAbsolutePath();
    if ((!output.exists() && !output.mkdirs()) || !output.isDirectory()) {
        throw new MojoFailureException("can not write to output dir: " + outputPath);
    }
    String templatesPath = templates.getAbsolutePath();
    if (!templates.exists() || !templates.isDirectory()) {
        throw new MojoFailureException("templates not found in dir: " + outputPath);
    }
    // add the output directory path to the project source directories
    switch(scope) {
        case "compile":
            project.addCompileSourceRoot(outputPath);
            break;
        case "test":
            project.addTestCompileSourceRoot(outputPath);
            break;
        default:
            throw new MojoFailureException("scope must be compile or test");
    }
    final Stopwatch sw = Stopwatch.createStarted();
    try {
        getLog().info(format("Freemarker generation:\n scope: %s,\n config: %s,\n templates: %s", scope, config.getAbsolutePath(), templatesPath));
        final File tmp = Files.createTempDirectory("freemarker-tmp").toFile();
        String tmpPath = tmp.getAbsolutePath();
        final String tmpPathNormalized = tmpPath.endsWith(File.separator) ? tmpPath : tmpPath + File.separator;
        Settings settings = new Settings(new File("."));
        settings.set(Settings.NAME_SOURCE_ROOT, templatesPath);
        settings.set(Settings.NAME_OUTPUT_ROOT, tmp.getAbsolutePath());
        settings.load(config);
        settings.addProgressListener(new TerseConsoleProgressListener());
        settings.addProgressListener(new ProgressListener() {

            @Override
            public void notifyProgressEvent(Engine engine, int event, File src, int pMode, Throwable error, Object param) throws Exception {
                if (event == EVENT_END_PROCESSING_SESSION) {
                    getLog().info(format("Freemarker generation took %dms", sw.elapsed(TimeUnit.MILLISECONDS)));
                    sw.reset();
                    Report report = moveIfChanged(tmp, tmpPathNormalized);
                    if (!tmp.delete()) {
                        throw new MojoFailureException(format("can not delete %s", tmp));
                    }
                    getLog().info(format("Incremental output update took %dms", sw.elapsed(TimeUnit.MILLISECONDS)));
                    getLog().info(format("new: %d", report.newFiles));
                    getLog().info(format("changed: %d", report.changedFiles));
                    getLog().info(format("unchanged: %d", report.unchangedFiles));
                }
            }
        });
        if (addMavenDataLoader) {
            getLog().info("Adding maven data loader");
            settings.setEngineAttribute(MavenDataLoader.MAVEN_DATA_ATTRIBUTE, new MavenData(project));
            settings.add(Settings.NAME_DATA, format("maven: %s()", MavenDataLoader.class.getName()));
        }
        settings.execute();
    } catch (Exception e) {
        throw new MojoFailureException(MiscUtil.causeMessages(e), e);
    }
}
Also used : MojoExecutionException(org.apache.maven.plugin.MojoExecutionException) MojoFailureException(org.apache.maven.plugin.MojoFailureException) Stopwatch(org.apache.drill.shaded.guava.com.google.common.base.Stopwatch) IOException(java.io.IOException) MojoExecutionException(org.apache.maven.plugin.MojoExecutionException) MojoFailureException(org.apache.maven.plugin.MojoFailureException) TerseConsoleProgressListener(fmpp.progresslisteners.TerseConsoleProgressListener) ProgressListener(fmpp.ProgressListener) TerseConsoleProgressListener(fmpp.progresslisteners.TerseConsoleProgressListener) MavenData(org.apache.drill.fmpp.mojo.MavenDataLoader.MavenData) File(java.io.File) Settings(fmpp.setting.Settings) Engine(fmpp.Engine)

Example 22 with Stopwatch

use of org.apache.drill.shaded.guava.com.google.common.base.Stopwatch in project drill by apache.

the class DefaultSqlHandler method transform.

/**
 * Transform RelNode to a new RelNode, targeting the provided set of traits. Also will log the outcome if asked.
 *
 * @param plannerType The type of Planner to use.
 * @param phase The transformation phase we're running.
 * @param input The original RelNode
 * @param targetTraits The traits we are targeting for output.
 * @param log Whether to log the planning phase.
 * @return The transformed relnode.
 */
protected RelNode transform(PlannerType plannerType, PlannerPhase phase, RelNode input, RelTraitSet targetTraits, boolean log) {
    final Stopwatch watch = Stopwatch.createStarted();
    final RuleSet rules = config.getRules(phase);
    final RelTraitSet toTraits = targetTraits.simplify();
    final RelNode output;
    switch(plannerType) {
        case HEP_BOTTOM_UP:
        case HEP:
            {
                final HepProgramBuilder hepPgmBldr = new HepProgramBuilder();
                if (plannerType == PlannerType.HEP_BOTTOM_UP) {
                    hepPgmBldr.addMatchOrder(HepMatchOrder.BOTTOM_UP);
                }
                for (RelOptRule rule : rules) {
                    hepPgmBldr.addRuleInstance(rule);
                }
                // Set noDAG = true to avoid caching problems which lead to incorrect Drill work.
                final HepPlanner planner = new HepPlanner(hepPgmBldr.build(), context.getPlannerSettings(), true, null, RelOptCostImpl.FACTORY);
                JaninoRelMetadataProvider relMetadataProvider = Utilities.registerJaninoRelMetadataProvider();
                // Modify RelMetaProvider for every RelNode in the SQL operator Rel tree.
                input.accept(new MetaDataProviderModifier(relMetadataProvider));
                planner.setRoot(input);
                if (!input.getTraitSet().equals(targetTraits)) {
                    planner.changeTraits(input, toTraits);
                }
                output = planner.findBestExp();
                break;
            }
        case VOLCANO:
        default:
            {
                // as weird as it seems, the cluster's only planner is the volcano planner.
                final RelOptPlanner planner = input.getCluster().getPlanner();
                final Program program = Programs.of(rules);
                Preconditions.checkArgument(planner instanceof VolcanoPlanner, "Cluster is expected to be constructed using VolcanoPlanner. Was actually of type %s.", planner.getClass().getName());
                output = program.run(planner, input, toTraits, ImmutableList.of(), ImmutableList.of());
                break;
            }
    }
    if (log) {
        log(plannerType, phase, output, logger, watch);
    }
    return output;
}
Also used : RuleSet(org.apache.calcite.tools.RuleSet) Program(org.apache.calcite.tools.Program) RelNode(org.apache.calcite.rel.RelNode) Stopwatch(org.apache.drill.shaded.guava.com.google.common.base.Stopwatch) HepProgramBuilder(org.apache.calcite.plan.hep.HepProgramBuilder) JaninoRelMetadataProvider(org.apache.calcite.rel.metadata.JaninoRelMetadataProvider) VolcanoPlanner(org.apache.calcite.plan.volcano.VolcanoPlanner) RelTraitSet(org.apache.calcite.plan.RelTraitSet) HepPlanner(org.apache.calcite.plan.hep.HepPlanner) RelOptPlanner(org.apache.calcite.plan.RelOptPlanner) RelOptRule(org.apache.calcite.plan.RelOptRule)

Example 23 with Stopwatch

use of org.apache.drill.shaded.guava.com.google.common.base.Stopwatch in project drill by apache.

the class DbScanToIndexScanPrule method doOnMatch.

protected void doOnMatch(IndexLogicalPlanCallContext indexContext) {
    Stopwatch indexPlanTimer = Stopwatch.createStarted();
    final PlannerSettings settings = PrelUtil.getPlannerSettings(indexContext.call.getPlanner());
    final IndexCollection indexCollection = getIndexCollection(settings, indexContext.scan);
    if (indexCollection == null) {
        return;
    }
    logger.debug("Index Rule {} starts", this.description);
    RexBuilder builder = indexContext.filter.getCluster().getRexBuilder();
    RexNode condition = null;
    if (indexContext.lowerProject == null) {
        condition = indexContext.filter.getCondition();
    } else {
        // get the filter as if it were below the projection.
        condition = RelOptUtil.pushFilterPastProject(indexContext.filter.getCondition(), indexContext.lowerProject);
    }
    // save this pushed down condition, in case it is needed later to build filter when joining back primary table
    indexContext.origPushedCondition = condition;
    RewriteAsBinaryOperators visitor = new RewriteAsBinaryOperators(true, builder);
    condition = condition.accept(visitor);
    if (indexCollection.supportsIndexSelection()) {
        try {
            processWithIndexSelection(indexContext, settings, condition, indexCollection, builder);
        } catch (Exception e) {
            logger.warn("Exception while doing index planning ", e);
        }
    } else {
        throw new UnsupportedOperationException("Index collection must support index selection");
    }
    indexPlanTimer.stop();
    logger.info("index_plan_info: Index Planning took {} ms", indexPlanTimer.elapsed(TimeUnit.MILLISECONDS));
}
Also used : PlannerSettings(org.apache.drill.exec.planner.physical.PlannerSettings) RewriteAsBinaryOperators(org.apache.drill.exec.planner.logical.partition.RewriteAsBinaryOperators) Stopwatch(org.apache.drill.shaded.guava.com.google.common.base.Stopwatch) RexBuilder(org.apache.calcite.rex.RexBuilder) IndexCollection(org.apache.drill.exec.planner.index.IndexCollection) RexNode(org.apache.calcite.rex.RexNode)

Example 24 with Stopwatch

use of org.apache.drill.shaded.guava.com.google.common.base.Stopwatch in project drill by apache.

the class PageReader method next.

/**
 * Read the next page in the parent column chunk
 *
 * @return true if a page was found to read
 * @throws IOException
 */
public boolean next() throws IOException {
    this.pageValueCount = -1;
    this.valuesRead = this.valuesReadyToRead = 0;
    this.parentColumnReader.currDefLevel = -1;
    long totalValueCount = columnChunkMetaData.getValueCount();
    if (parentColumnReader.totalValuesRead >= totalValueCount) {
        return false;
    }
    clearDataBufferAndReaders();
    do {
        nextInternal();
        if (pageHeader == null) {
            throw new DrillRuntimeException(String.format("Failed to read another page having read %d of %d values from its " + "column chunk.", parentColumnReader.totalValuesRead, totalValueCount));
        }
    } while (// Continue until we hit a non-empty data page
    pageHeader.uncompressed_page_size == 0 || (pageHeader.getType() != PageType.DATA_PAGE && pageHeader.getType() != PageType.DATA_PAGE_V2));
    if (pageData == null) {
        throw new DrillRuntimeException(String.format("Failed to read another page having read %d of %d values from its " + "column chunk.", parentColumnReader.totalValuesRead, totalValueCount));
    }
    dataPageInfo = DataPageHeaderInfoProvider.builder(this.pageHeader);
    this.byteLength = this.pageHeader.uncompressed_page_size;
    this.pageValueCount = dataPageInfo.getNumValues();
    Stopwatch timer = Stopwatch.createStarted();
    // readPosInBytes is used for actually reading the values after we determine how many will fit in the vector
    // readyToReadPosInBytes serves a similar purpose for the vector types where we must count up the values that will
    // fit one record at a time, such as for variable length data. Both operations must start in the same location after the
    // definition and repetition level data which is stored alongside the page data itself
    this.readyToReadPosInBytes = this.readPosInBytes = decodeLevels();
    Encoding valueEncoding = METADATA_CONVERTER.getEncoding(dataPageInfo.getEncoding());
    parentColumnReader.usingDictionary = valueEncoding.usesDictionary();
    long timeDecode = timer.elapsed(TimeUnit.NANOSECONDS);
    stats.numDataPagesDecoded.incrementAndGet();
    stats.timeDataPageDecode.addAndGet(timeDecode);
    return true;
}
Also used : Stopwatch(org.apache.drill.shaded.guava.com.google.common.base.Stopwatch) Encoding(org.apache.parquet.column.Encoding) DrillRuntimeException(org.apache.drill.common.exceptions.DrillRuntimeException)

Example 25 with Stopwatch

use of org.apache.drill.shaded.guava.com.google.common.base.Stopwatch in project drill by apache.

the class PageReader method readUncompressedPage.

/**
 * Reads an uncompressed Parquet page without copying the buffer returned by the backing input stream.
 * @return uncompressed Parquet page data
 * @throws IOException
 */
protected DrillBuf readUncompressedPage() throws IOException {
    int outputSize = pageHeader.getUncompressed_page_size();
    long start = dataReader.getPos();
    Stopwatch timer = Stopwatch.createStarted();
    DrillBuf outputPageData = dataReader.getNext(outputSize);
    long timeToRead = timer.elapsed(TimeUnit.NANOSECONDS);
    if (logger.isTraceEnabled()) {
        logger.trace("Col: {}  readPos: {}  Uncompressed_size: {}  pageData: {}", columnChunkMetaData.toString(), dataReader.getPos(), outputSize, ByteBufUtil.hexDump(outputPageData));
    }
    this.updateStats(pageHeader, "Page Read", start, timeToRead, outputSize, outputSize);
    return outputPageData;
}
Also used : Stopwatch(org.apache.drill.shaded.guava.com.google.common.base.Stopwatch) DrillBuf(io.netty.buffer.DrillBuf)

Aggregations

Stopwatch (org.apache.drill.shaded.guava.com.google.common.base.Stopwatch)68 IOException (java.io.IOException)13 Path (org.apache.hadoop.fs.Path)12 ArrayList (java.util.ArrayList)8 DrillRuntimeException (org.apache.drill.common.exceptions.DrillRuntimeException)8 FileStatus (org.apache.hadoop.fs.FileStatus)8 DrillBuf (io.netty.buffer.DrillBuf)7 ByteBuffer (java.nio.ByteBuffer)7 SchemaPath (org.apache.drill.common.expression.SchemaPath)7 HashMap (java.util.HashMap)5 RelNode (org.apache.calcite.rel.RelNode)5 SchemaChangeException (org.apache.drill.exec.exception.SchemaChangeException)4 DrillbitEndpoint (org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint)4 VectorContainer (org.apache.drill.exec.record.VectorContainer)4 SelectionVector4 (org.apache.drill.exec.record.selection.SelectionVector4)4 ValueVector (org.apache.drill.exec.vector.ValueVector)4 CompressionCodecName (org.apache.parquet.hadoop.metadata.CompressionCodecName)4 File (java.io.File)3 ResultSet (java.sql.ResultSet)3 ResultSetMetaData (java.sql.ResultSetMetaData)3