Search in sources :

Example 31 with MutationState

use of org.apache.phoenix.execute.MutationState in project phoenix by apache.

the class PhoenixMetricsSink method putMetrics.

/**
     * Add a new metric record to be written.
     *
     * @param record
     */
@Override
public void putMetrics(MetricsRecord record) {
    // to do it here, in case it gets misconfigured
    if (!record.name().startsWith(TracingUtils.METRIC_SOURCE_KEY)) {
        return;
    }
    // don't initialize until we actually have something to write
    lazyInitialize();
    String stmt = "UPSERT INTO " + table + " (";
    // drop it into the queue of things that should be written
    List<String> keys = new ArrayList<String>();
    List<Object> values = new ArrayList<Object>();
    // we need to keep variable values in a separate set since they may have spaces, which
    // causes the parser to barf. Instead, we need to add them after the statement is prepared
    List<String> variableValues = new ArrayList<String>(record.tags().size());
    keys.add(TRACE.columnName);
    values.add(Long.parseLong(record.name().substring(TracingUtils.METRIC_SOURCE_KEY.length())));
    keys.add(DESCRIPTION.columnName);
    values.add(VARIABLE_VALUE);
    variableValues.add(record.description());
    // add each of the metrics
    for (AbstractMetric metric : record.metrics()) {
        // name of the metric is also the column name to which we write
        keys.add(MetricInfo.getColumnName(metric.name()));
        values.add(metric.value());
    }
    // get the tags out so we can set them later (otherwise, need to be a single value)
    int annotationCount = 0;
    int tagCount = 0;
    for (MetricsTag tag : record.tags()) {
        if (tag.name().equals(ANNOTATION.traceName)) {
            addDynamicEntry(keys, values, variableValues, ANNOTATION_FAMILY, tag, ANNOTATION, annotationCount);
            annotationCount++;
        } else if (tag.name().equals(TAG.traceName)) {
            addDynamicEntry(keys, values, variableValues, TAG_FAMILY, tag, TAG, tagCount);
            tagCount++;
        } else if (tag.name().equals(HOSTNAME.traceName)) {
            keys.add(HOSTNAME.columnName);
            values.add(VARIABLE_VALUE);
            variableValues.add(tag.value());
        } else if (tag.name().equals("Context")) {
        // ignored
        } else {
            LOG.error("Got an unexpected tag: " + tag);
        }
    }
    // add the tag count, now that we know it
    keys.add(TAG_COUNT);
    // ignore the hostname in the tags, if we know it
    values.add(tagCount);
    keys.add(ANNOTATION_COUNT);
    values.add(annotationCount);
    // compile the statement together
    stmt += COMMAS.join(keys);
    stmt += ") VALUES (" + COMMAS.join(values) + ")";
    if (LOG.isTraceEnabled()) {
        LOG.trace("Logging metrics to phoenix table via: " + stmt);
        LOG.trace("With tags: " + variableValues);
    }
    try {
        PreparedStatement ps = conn.prepareStatement(stmt);
        // add everything that wouldn't/may not parse
        int index = 1;
        for (String tag : variableValues) {
            ps.setString(index++, tag);
        }
        // Not going through the standard route of using statement.execute() as that code path
        // is blocked if the metadata hasn't been been upgraded to the new minor release. 
        MutationPlan plan = ps.unwrap(PhoenixPreparedStatement.class).compileMutation(stmt);
        MutationState state = conn.unwrap(PhoenixConnection.class).getMutationState();
        MutationState newState = plan.execute();
        state.join(newState);
    } catch (SQLException e) {
        LOG.error("Could not write metric: \n" + record + " to prepared statement:\n" + stmt, e);
    }
}
Also used : PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) SQLException(java.sql.SQLException) ArrayList(java.util.ArrayList) AbstractMetric(org.apache.hadoop.metrics2.AbstractMetric) PreparedStatement(java.sql.PreparedStatement) PhoenixPreparedStatement(org.apache.phoenix.jdbc.PhoenixPreparedStatement) MetricsTag(org.apache.hadoop.metrics2.MetricsTag) MutationPlan(org.apache.phoenix.compile.MutationPlan) MutationState(org.apache.phoenix.execute.MutationState) PhoenixPreparedStatement(org.apache.phoenix.jdbc.PhoenixPreparedStatement)

Example 32 with MutationState

use of org.apache.phoenix.execute.MutationState in project phoenix by apache.

the class PhoenixIndexImportDirectMapper method map.

@Override
protected void map(NullWritable key, PhoenixIndexDBWritable record, Context context) throws IOException, InterruptedException {
    try {
        final List<Object> values = record.getValues();
        indxWritable.setValues(values);
        indxWritable.write(this.pStatement);
        this.pStatement.execute();
        final PhoenixConnection pconn = connection.unwrap(PhoenixConnection.class);
        MutationState currentMutationState = pconn.getMutationState();
        if (mutationState == null) {
            mutationState = currentMutationState;
        }
        // Keep accumulating Mutations till batch size
        mutationState.join(currentMutationState);
        // Write Mutation Batch
        if (context.getCounter(PhoenixJobCounters.INPUT_RECORDS).getValue() % batchSize == 0) {
            writeBatch(mutationState, context);
            mutationState = null;
        }
        // Make sure progress is reported to Application Master.
        context.progress();
    } catch (SQLException e) {
        LOG.error(" Error {}  while read/write of a record ", e.getMessage());
        context.getCounter(PhoenixJobCounters.FAILED_RECORDS).increment(1);
        throw new RuntimeException(e);
    }
    context.getCounter(PhoenixJobCounters.INPUT_RECORDS).increment(1);
}
Also used : PhoenixConnection(org.apache.phoenix.jdbc.PhoenixConnection) MutationState(org.apache.phoenix.execute.MutationState) SQLException(java.sql.SQLException)

Example 33 with MutationState

use of org.apache.phoenix.execute.MutationState in project phoenix by apache.

the class TransactionUtil method getTableTimestamp.

public static Long getTableTimestamp(PhoenixConnection connection, boolean transactional) throws SQLException {
    Long timestamp = null;
    if (!transactional) {
        return timestamp;
    }
    MutationState mutationState = connection.getMutationState();
    if (!mutationState.isTransactionStarted()) {
        mutationState.startTransaction();
    }
    timestamp = convertToMilliseconds(mutationState.getInitialWritePointer());
    return timestamp;
}
Also used : MutationState(org.apache.phoenix.execute.MutationState)

Aggregations

MutationState (org.apache.phoenix.execute.MutationState)33 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)12 PLong (org.apache.phoenix.schema.types.PLong)12 PUnsignedLong (org.apache.phoenix.schema.types.PUnsignedLong)11 MetaDataMutationResult (org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult)10 MutationPlan (org.apache.phoenix.compile.MutationPlan)9 Mutation (org.apache.hadoop.hbase.client.Mutation)8 Scan (org.apache.hadoop.hbase.client.Scan)8 MutationCode (org.apache.phoenix.coprocessor.MetaDataProtocol.MutationCode)8 SQLExceptionInfo (org.apache.phoenix.exception.SQLExceptionInfo)8 SQLException (java.sql.SQLException)7 PTable (org.apache.phoenix.schema.PTable)7 PostDDLCompiler (org.apache.phoenix.compile.PostDDLCompiler)6 PhoenixStatement (org.apache.phoenix.jdbc.PhoenixStatement)6 PreparedStatement (java.sql.PreparedStatement)5 ArrayList (java.util.ArrayList)5 List (java.util.List)5 ImmutableBytesWritable (org.apache.hadoop.hbase.io.ImmutableBytesWritable)5 ColumnResolver (org.apache.phoenix.compile.ColumnResolver)5 HashMap (java.util.HashMap)4