Search in sources :

Example 1 with ScheduledQuery._Fields

use of org.apache.hadoop.hive.metastore.api.ScheduledQuery._Fields in project hive by apache.

the class QTestScheduledQueryCleaner method clearScheduledQueries.

private void clearScheduledQueries(HiveConf conf) {
    if (System.getenv(QTestUtil.QTEST_LEAVE_FILES) != null) {
        return;
    }
    Set<String> scheduledQueries = ScheduledQueryCreationRegistryHook.getSchedules();
    for (String name : scheduledQueries) {
        ScheduledQueryMaintenanceRequest request = new ScheduledQueryMaintenanceRequest();
        request.setType(ScheduledQueryMaintenanceRequestType.DROP);
        ScheduledQuery schq = new ScheduledQuery();
        schq.setScheduleKey(new ScheduledQueryKey(name, conf.getVar(ConfVars.HIVE_SCHEDULED_QUERIES_NAMESPACE)));
        request.setScheduledQuery(schq);
        try {
            // propagate new conf to meta store
            Hive db = Hive.get(conf);
            db.getMSC().scheduledQueryMaintenance(request);
            db.close(false);
        } catch (Exception e) {
            LOG.error("Can't remove scheduled query: " + name + " " + e.getMessage());
        }
    }
    scheduledQueries.clear();
}
Also used : Hive(org.apache.hadoop.hive.ql.metadata.Hive) ScheduledQueryMaintenanceRequest(org.apache.hadoop.hive.metastore.api.ScheduledQueryMaintenanceRequest) ScheduledQueryKey(org.apache.hadoop.hive.metastore.api.ScheduledQueryKey) ScheduledQuery(org.apache.hadoop.hive.metastore.api.ScheduledQuery)

Example 2 with ScheduledQuery._Fields

use of org.apache.hadoop.hive.metastore.api.ScheduledQuery._Fields in project hive by apache.

the class ScheduledQueryAnalyzer method interpretAstNode.

private ScheduledQuery interpretAstNode(ASTNode ast) throws SemanticException {
    // child0 is the schedule name
    String scheduleName = ast.getChild(0).getText();
    String clusterNamespace = conf.getVar(ConfVars.HIVE_SCHEDULED_QUERIES_NAMESPACE);
    LOG.info("scheduled query namespace:" + clusterNamespace);
    ScheduledQueryKey key = new ScheduledQueryKey(scheduleName, clusterNamespace);
    ScheduledQuery ret = new ScheduledQuery(key);
    // child 1..n are arguments/options/etc
    for (int i = 1; i < ast.getChildCount(); i++) {
        processScheduledQueryAstNode(ret, (ASTNode) ast.getChild(i));
    }
    return ret;
}
Also used : ScheduledQueryKey(org.apache.hadoop.hive.metastore.api.ScheduledQueryKey) ScheduledQuery(org.apache.hadoop.hive.metastore.api.ScheduledQuery)

Example 3 with ScheduledQuery._Fields

use of org.apache.hadoop.hive.metastore.api.ScheduledQuery._Fields in project hive by apache.

the class ScheduledQueryAnalyzer method composeOverlayObject.

/**
 * Composes an overlay object.
 *
 * Output is a flattened view of the input objects.
 * having the value from the first one which has it defined from the overlays.
 */
private ScheduledQuery composeOverlayObject(ScheduledQuery... overlays) {
    ScheduledQuery ret = new ScheduledQuery();
    _Fields[] q = ScheduledQuery._Fields.values();
    for (_Fields field : q) {
        for (ScheduledQuery o : overlays) {
            if (o.isSet(field)) {
                ret.setFieldValue(field, o.getFieldValue(field));
                break;
            }
        }
    }
    return ret;
}
Also used : ScheduledQuery._Fields(org.apache.hadoop.hive.metastore.api.ScheduledQuery._Fields) ScheduledQuery(org.apache.hadoop.hive.metastore.api.ScheduledQuery)

Example 4 with ScheduledQuery._Fields

use of org.apache.hadoop.hive.metastore.api.ScheduledQuery._Fields in project hive by apache.

the class ScheduledQueryAnalyzer method fillScheduledQuery.

private ScheduledQuery fillScheduledQuery(ScheduledQueryMaintenanceRequestType type, ScheduledQuery schqChanges) throws SemanticException {
    if (type == ScheduledQueryMaintenanceRequestType.CREATE) {
        return composeOverlayObject(schqChanges, buildEmptySchq());
    } else {
        try {
            ScheduledQuery schqStored = db.getMSC().getScheduledQuery(schqChanges.getScheduleKey());
            if (schqChanges.isSetUser()) {
                // in case the user will change; we have to run an authorization check beforehand
                checkAuthorization(type, schqStored);
            }
            // clear the next execution time
            schqStored.setNextExecutionIsSet(false);
            return composeOverlayObject(schqChanges, schqStored);
        } catch (TException e) {
            throw new SemanticException("unable to get Scheduled query" + e);
        }
    }
}
Also used : TException(org.apache.thrift.TException) ScheduledQuery(org.apache.hadoop.hive.metastore.api.ScheduledQuery)

Example 5 with ScheduledQuery._Fields

use of org.apache.hadoop.hive.metastore.api.ScheduledQuery._Fields in project hive by apache.

the class StatObjectConverter method setFieldsIntoOldStats.

/**
 * Set field values in oldStatObj from newStatObj
 * @param oldStatObj
 * @param newStatObj
 */
public static void setFieldsIntoOldStats(ColumnStatisticsObj oldStatObj, ColumnStatisticsObj newStatObj) {
    _Fields typeNew = newStatObj.getStatsData().getSetField();
    _Fields typeOld = oldStatObj.getStatsData().getSetField();
    typeNew = typeNew == typeOld ? typeNew : null;
    switch(typeNew) {
        case BOOLEAN_STATS:
            BooleanColumnStatsData oldBooleanStatsData = oldStatObj.getStatsData().getBooleanStats();
            BooleanColumnStatsData newBooleanStatsData = newStatObj.getStatsData().getBooleanStats();
            if (newBooleanStatsData.isSetNumTrues()) {
                oldBooleanStatsData.setNumTrues(newBooleanStatsData.getNumTrues());
            }
            if (newBooleanStatsData.isSetNumFalses()) {
                oldBooleanStatsData.setNumFalses(newBooleanStatsData.getNumFalses());
            }
            if (newBooleanStatsData.isSetNumNulls()) {
                oldBooleanStatsData.setNumNulls(newBooleanStatsData.getNumNulls());
            }
            if (newBooleanStatsData.isSetBitVectors()) {
                oldBooleanStatsData.setBitVectors(newBooleanStatsData.getBitVectors());
            }
            break;
        case LONG_STATS:
            {
                LongColumnStatsData oldLongStatsData = oldStatObj.getStatsData().getLongStats();
                LongColumnStatsData newLongStatsData = newStatObj.getStatsData().getLongStats();
                if (newLongStatsData.isSetHighValue()) {
                    oldLongStatsData.setHighValue(newLongStatsData.getHighValue());
                }
                if (newLongStatsData.isSetLowValue()) {
                    oldLongStatsData.setLowValue(newLongStatsData.getLowValue());
                }
                if (newLongStatsData.isSetNumNulls()) {
                    oldLongStatsData.setNumNulls(newLongStatsData.getNumNulls());
                }
                if (newLongStatsData.isSetNumDVs()) {
                    oldLongStatsData.setNumDVs(newLongStatsData.getNumDVs());
                }
                if (newLongStatsData.isSetBitVectors()) {
                    oldLongStatsData.setBitVectors(newLongStatsData.getBitVectors());
                }
                break;
            }
        case DOUBLE_STATS:
            {
                DoubleColumnStatsData oldDoubleStatsData = oldStatObj.getStatsData().getDoubleStats();
                DoubleColumnStatsData newDoubleStatsData = newStatObj.getStatsData().getDoubleStats();
                if (newDoubleStatsData.isSetHighValue()) {
                    oldDoubleStatsData.setHighValue(newDoubleStatsData.getHighValue());
                }
                if (newDoubleStatsData.isSetLowValue()) {
                    oldDoubleStatsData.setLowValue(newDoubleStatsData.getLowValue());
                }
                if (newDoubleStatsData.isSetNumNulls()) {
                    oldDoubleStatsData.setNumNulls(newDoubleStatsData.getNumNulls());
                }
                if (newDoubleStatsData.isSetNumDVs()) {
                    oldDoubleStatsData.setNumDVs(newDoubleStatsData.getNumDVs());
                }
                if (newDoubleStatsData.isSetBitVectors()) {
                    oldDoubleStatsData.setBitVectors(newDoubleStatsData.getBitVectors());
                }
                break;
            }
        case STRING_STATS:
            {
                StringColumnStatsData oldStringStatsData = oldStatObj.getStatsData().getStringStats();
                StringColumnStatsData newStringStatsData = newStatObj.getStatsData().getStringStats();
                if (newStringStatsData.isSetMaxColLen()) {
                    oldStringStatsData.setMaxColLen(newStringStatsData.getMaxColLen());
                }
                if (newStringStatsData.isSetAvgColLen()) {
                    oldStringStatsData.setAvgColLen(newStringStatsData.getAvgColLen());
                }
                if (newStringStatsData.isSetNumNulls()) {
                    oldStringStatsData.setNumNulls(newStringStatsData.getNumNulls());
                }
                if (newStringStatsData.isSetNumDVs()) {
                    oldStringStatsData.setNumDVs(newStringStatsData.getNumDVs());
                }
                if (newStringStatsData.isSetBitVectors()) {
                    oldStringStatsData.setBitVectors(newStringStatsData.getBitVectors());
                }
                break;
            }
        case BINARY_STATS:
            BinaryColumnStatsData oldBinaryStatsData = oldStatObj.getStatsData().getBinaryStats();
            BinaryColumnStatsData newBinaryStatsData = newStatObj.getStatsData().getBinaryStats();
            if (newBinaryStatsData.isSetMaxColLen()) {
                oldBinaryStatsData.setMaxColLen(newBinaryStatsData.getMaxColLen());
            }
            if (newBinaryStatsData.isSetAvgColLen()) {
                oldBinaryStatsData.setAvgColLen(newBinaryStatsData.getAvgColLen());
            }
            if (newBinaryStatsData.isSetNumNulls()) {
                oldBinaryStatsData.setNumNulls(newBinaryStatsData.getNumNulls());
            }
            if (newBinaryStatsData.isSetBitVectors()) {
                oldBinaryStatsData.setBitVectors(newBinaryStatsData.getBitVectors());
            }
            break;
        case DECIMAL_STATS:
            {
                DecimalColumnStatsData oldDecimalStatsData = oldStatObj.getStatsData().getDecimalStats();
                DecimalColumnStatsData newDecimalStatsData = newStatObj.getStatsData().getDecimalStats();
                if (newDecimalStatsData.isSetHighValue()) {
                    oldDecimalStatsData.setHighValue(newDecimalStatsData.getHighValue());
                }
                if (newDecimalStatsData.isSetLowValue()) {
                    oldDecimalStatsData.setLowValue(newDecimalStatsData.getLowValue());
                }
                if (newDecimalStatsData.isSetNumNulls()) {
                    oldDecimalStatsData.setNumNulls(newDecimalStatsData.getNumNulls());
                }
                if (newDecimalStatsData.isSetNumDVs()) {
                    oldDecimalStatsData.setNumDVs(newDecimalStatsData.getNumDVs());
                }
                if (newDecimalStatsData.isSetBitVectors()) {
                    oldDecimalStatsData.setBitVectors(newDecimalStatsData.getBitVectors());
                }
                break;
            }
        case DATE_STATS:
            {
                DateColumnStatsData oldDateStatsData = oldStatObj.getStatsData().getDateStats();
                DateColumnStatsData newDateStatsData = newStatObj.getStatsData().getDateStats();
                if (newDateStatsData.isSetHighValue()) {
                    oldDateStatsData.setHighValue(newDateStatsData.getHighValue());
                }
                if (newDateStatsData.isSetLowValue()) {
                    oldDateStatsData.setLowValue(newDateStatsData.getLowValue());
                }
                if (newDateStatsData.isSetNumNulls()) {
                    oldDateStatsData.setNumNulls(newDateStatsData.getNumNulls());
                }
                if (newDateStatsData.isSetNumDVs()) {
                    oldDateStatsData.setNumDVs(newDateStatsData.getNumDVs());
                }
                if (newDateStatsData.isSetBitVectors()) {
                    oldDateStatsData.setBitVectors(newDateStatsData.getBitVectors());
                }
                break;
            }
        case TIMESTAMP_STATS:
            {
                TimestampColumnStatsData oldTimestampStatsData = oldStatObj.getStatsData().getTimestampStats();
                TimestampColumnStatsData newTimestampStatsData = newStatObj.getStatsData().getTimestampStats();
                if (newTimestampStatsData.isSetHighValue()) {
                    oldTimestampStatsData.setHighValue(newTimestampStatsData.getHighValue());
                }
                if (newTimestampStatsData.isSetLowValue()) {
                    oldTimestampStatsData.setLowValue(newTimestampStatsData.getLowValue());
                }
                if (newTimestampStatsData.isSetNumNulls()) {
                    oldTimestampStatsData.setNumNulls(newTimestampStatsData.getNumNulls());
                }
                if (newTimestampStatsData.isSetNumDVs()) {
                    oldTimestampStatsData.setNumDVs(newTimestampStatsData.getNumDVs());
                }
                if (newTimestampStatsData.isSetBitVectors()) {
                    oldTimestampStatsData.setBitVectors(newTimestampStatsData.getBitVectors());
                }
                break;
            }
        default:
            throw new IllegalArgumentException("Unknown stats type: " + typeNew.toString());
    }
}
Also used : BooleanColumnStatsData(org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData) DoubleColumnStatsData(org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData) DecimalColumnStatsData(org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData) ColumnStatisticsData._Fields(org.apache.hadoop.hive.metastore.api.ColumnStatisticsData._Fields) DateColumnStatsData(org.apache.hadoop.hive.metastore.api.DateColumnStatsData) StringColumnStatsData(org.apache.hadoop.hive.metastore.api.StringColumnStatsData) LongColumnStatsData(org.apache.hadoop.hive.metastore.api.LongColumnStatsData) TimestampColumnStatsData(org.apache.hadoop.hive.metastore.api.TimestampColumnStatsData) BinaryColumnStatsData(org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData)

Aggregations

ScheduledQuery (org.apache.hadoop.hive.metastore.api.ScheduledQuery)23 ScheduledQueryMaintenanceRequest (org.apache.hadoop.hive.metastore.api.ScheduledQueryMaintenanceRequest)13 MetastoreUnitTest (org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest)11 Test (org.junit.Test)11 ScheduledQueryKey (org.apache.hadoop.hive.metastore.api.ScheduledQueryKey)10 ScheduledQueryPollRequest (org.apache.hadoop.hive.metastore.api.ScheduledQueryPollRequest)5 ScheduledQueryPollResponse (org.apache.hadoop.hive.metastore.api.ScheduledQueryPollResponse)5 MScheduledExecution (org.apache.hadoop.hive.metastore.model.MScheduledExecution)4 PersistenceManager (javax.jdo.PersistenceManager)3 ColumnStatisticsData._Fields (org.apache.hadoop.hive.metastore.api.ColumnStatisticsData._Fields)3 Feature (edu.berkeley.cs.amplab.carat.thrift.Feature)2 ProcessInfo (edu.berkeley.cs.amplab.carat.thrift.ProcessInfo)2 Sample._Fields (edu.berkeley.cs.amplab.carat.thrift.Sample._Fields)2 HashMap (java.util.HashMap)2 LinkedList (java.util.LinkedList)2 List (java.util.List)2 ObjectStore (org.apache.hadoop.hive.metastore.ObjectStore)2 ScheduledQueryProgressInfo (org.apache.hadoop.hive.metastore.api.ScheduledQueryProgressInfo)2 FieldMetaData (org.apache.thrift.meta_data.FieldMetaData)2 BatteryDetails (edu.berkeley.cs.amplab.carat.thrift.BatteryDetails)1