Search in sources :

Example 6 with Serializable

use of java.io.Serializable in project hive by apache.

the class GenMapRedUtils method joinUnionPlan.

/*
   * join current union task to old task
   */
public static void joinUnionPlan(GenMRProcContext opProcCtx, UnionOperator currUnionOp, Task<? extends Serializable> currentUnionTask, Task<? extends Serializable> existingTask, boolean local) throws SemanticException {
    assert currUnionOp != null;
    GenMRUnionCtx uCtx = opProcCtx.getUnionTask(currUnionOp);
    assert uCtx != null;
    setUnionPlan(opProcCtx, local, existingTask, uCtx, true);
    List<Task<? extends Serializable>> parTasks = null;
    if (opProcCtx.getRootTasks().contains(currentUnionTask)) {
        opProcCtx.getRootTasks().remove(currentUnionTask);
        if (!opProcCtx.getRootTasks().contains(existingTask) && (existingTask.getParentTasks() == null || existingTask.getParentTasks().isEmpty())) {
            opProcCtx.getRootTasks().add(existingTask);
        }
    }
    if ((currentUnionTask != null) && (currentUnionTask.getParentTasks() != null) && !currentUnionTask.getParentTasks().isEmpty()) {
        parTasks = new ArrayList<Task<? extends Serializable>>();
        parTasks.addAll(currentUnionTask.getParentTasks());
        Object[] parTaskArr = parTasks.toArray();
        for (Object parTask : parTaskArr) {
            ((Task<? extends Serializable>) parTask).removeDependentTask(currentUnionTask);
        }
    }
    if ((currentUnionTask != null) && (parTasks != null)) {
        for (Task<? extends Serializable> parTask : parTasks) {
            parTask.addDependentTask(existingTask);
            if (opProcCtx.getRootTasks().contains(existingTask)) {
                opProcCtx.getRootTasks().remove(existingTask);
            }
        }
    }
    opProcCtx.setCurrTask(existingTask);
}
Also used : SparkTask(org.apache.hadoop.hive.ql.exec.spark.SparkTask) ConditionalTask(org.apache.hadoop.hive.ql.exec.ConditionalTask) Task(org.apache.hadoop.hive.ql.exec.Task) MoveTask(org.apache.hadoop.hive.ql.exec.MoveTask) MapRedTask(org.apache.hadoop.hive.ql.exec.mr.MapRedTask) DependencyCollectionTask(org.apache.hadoop.hive.ql.exec.DependencyCollectionTask) Serializable(java.io.Serializable) GenMRUnionCtx(org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMRUnionCtx)

Example 7 with Serializable

use of java.io.Serializable in project hive by apache.

the class MapReduceCompiler method generateTaskTree.

@Override
protected void generateTaskTree(List<Task<? extends Serializable>> rootTasks, ParseContext pCtx, List<Task<MoveWork>> mvTask, Set<ReadEntity> inputs, Set<WriteEntity> outputs) throws SemanticException {
    // generate map reduce plans
    ParseContext tempParseContext = getParseContext(pCtx, rootTasks);
    GenMRProcContext procCtx = new GenMRProcContext(conf, // Must be deterministic order map for consistent q-test output across Java versions
    new LinkedHashMap<Operator<? extends OperatorDesc>, Task<? extends Serializable>>(), tempParseContext, mvTask, rootTasks, new LinkedHashMap<Operator<? extends OperatorDesc>, GenMapRedCtx>(), inputs, outputs);
    // create a walker which walks the tree in a DFS manner while maintaining
    // the operator stack.
    // The dispatcher generates the plan from the operator tree
    Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
    opRules.put(new RuleRegExp(new String("R1"), TableScanOperator.getOperatorName() + "%"), new GenMRTableScan1());
    opRules.put(new RuleRegExp(new String("R2"), TableScanOperator.getOperatorName() + "%.*" + ReduceSinkOperator.getOperatorName() + "%"), new GenMRRedSink1());
    opRules.put(new RuleRegExp(new String("R3"), ReduceSinkOperator.getOperatorName() + "%.*" + ReduceSinkOperator.getOperatorName() + "%"), new GenMRRedSink2());
    opRules.put(new RuleRegExp(new String("R4"), FileSinkOperator.getOperatorName() + "%"), new GenMRFileSink1());
    opRules.put(new RuleRegExp(new String("R5"), UnionOperator.getOperatorName() + "%"), new GenMRUnion1());
    opRules.put(new RuleRegExp(new String("R6"), UnionOperator.getOperatorName() + "%.*" + ReduceSinkOperator.getOperatorName() + "%"), new GenMRRedSink3());
    opRules.put(new RuleRegExp(new String("R7"), MapJoinOperator.getOperatorName() + "%"), MapJoinFactory.getTableScanMapJoin());
    // The dispatcher fires the processor corresponding to the closest matching
    // rule and passes the context along
    Dispatcher disp = new DefaultRuleDispatcher(new GenMROperator(), opRules, procCtx);
    GraphWalker ogw = new GenMapRedWalker(disp);
    ArrayList<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pCtx.getTopOps().values());
    ogw.startWalking(topNodes, null);
}
Also used : ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) MapJoinOperator(org.apache.hadoop.hive.ql.exec.MapJoinOperator) UnionOperator(org.apache.hadoop.hive.ql.exec.UnionOperator) FileSinkOperator(org.apache.hadoop.hive.ql.exec.FileSinkOperator) GenMROperator(org.apache.hadoop.hive.ql.optimizer.GenMROperator) TableScanOperator(org.apache.hadoop.hive.ql.exec.TableScanOperator) Operator(org.apache.hadoop.hive.ql.exec.Operator) ConditionalTask(org.apache.hadoop.hive.ql.exec.ConditionalTask) MapRedTask(org.apache.hadoop.hive.ql.exec.mr.MapRedTask) Task(org.apache.hadoop.hive.ql.exec.Task) Serializable(java.io.Serializable) Node(org.apache.hadoop.hive.ql.lib.Node) ArrayList(java.util.ArrayList) Dispatcher(org.apache.hadoop.hive.ql.lib.Dispatcher) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) LinkedHashMap(java.util.LinkedHashMap) GenMRTableScan1(org.apache.hadoop.hive.ql.optimizer.GenMRTableScan1) GenMapRedCtx(org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx) GenMRProcContext(org.apache.hadoop.hive.ql.optimizer.GenMRProcContext) GraphWalker(org.apache.hadoop.hive.ql.lib.GraphWalker) GenMROperator(org.apache.hadoop.hive.ql.optimizer.GenMROperator) NodeProcessor(org.apache.hadoop.hive.ql.lib.NodeProcessor) GenMRUnion1(org.apache.hadoop.hive.ql.optimizer.GenMRUnion1) GenMRFileSink1(org.apache.hadoop.hive.ql.optimizer.GenMRFileSink1) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) GenMRRedSink2(org.apache.hadoop.hive.ql.optimizer.GenMRRedSink2) GenMRRedSink1(org.apache.hadoop.hive.ql.optimizer.GenMRRedSink1) GenMRRedSink3(org.apache.hadoop.hive.ql.optimizer.GenMRRedSink3) Rule(org.apache.hadoop.hive.ql.lib.Rule) OperatorDesc(org.apache.hadoop.hive.ql.plan.OperatorDesc)

Example 8 with Serializable

use of java.io.Serializable in project hive by apache.

the class MapReduceCompiler method setInputFormat.

// loop over all the tasks recursively
@Override
protected void setInputFormat(Task<? extends Serializable> task) {
    if (task instanceof ExecDriver) {
        MapWork work = ((MapredWork) task.getWork()).getMapWork();
        HashMap<String, Operator<? extends OperatorDesc>> opMap = work.getAliasToWork();
        if (!opMap.isEmpty()) {
            for (Operator<? extends OperatorDesc> op : opMap.values()) {
                setInputFormat(work, op);
            }
        }
    } else if (task instanceof ConditionalTask) {
        List<Task<? extends Serializable>> listTasks = ((ConditionalTask) task).getListTasks();
        for (Task<? extends Serializable> tsk : listTasks) {
            setInputFormat(tsk);
        }
    }
    if (task.getChildTasks() != null) {
        for (Task<? extends Serializable> childTask : task.getChildTasks()) {
            setInputFormat(childTask);
        }
    }
}
Also used : ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) MapJoinOperator(org.apache.hadoop.hive.ql.exec.MapJoinOperator) UnionOperator(org.apache.hadoop.hive.ql.exec.UnionOperator) FileSinkOperator(org.apache.hadoop.hive.ql.exec.FileSinkOperator) GenMROperator(org.apache.hadoop.hive.ql.optimizer.GenMROperator) TableScanOperator(org.apache.hadoop.hive.ql.exec.TableScanOperator) Operator(org.apache.hadoop.hive.ql.exec.Operator) ConditionalTask(org.apache.hadoop.hive.ql.exec.ConditionalTask) MapRedTask(org.apache.hadoop.hive.ql.exec.mr.MapRedTask) Task(org.apache.hadoop.hive.ql.exec.Task) Serializable(java.io.Serializable) MapWork(org.apache.hadoop.hive.ql.plan.MapWork) MapredWork(org.apache.hadoop.hive.ql.plan.MapredWork) ConditionalTask(org.apache.hadoop.hive.ql.exec.ConditionalTask) ExecDriver(org.apache.hadoop.hive.ql.exec.mr.ExecDriver) List(java.util.List) ArrayList(java.util.ArrayList) OperatorDesc(org.apache.hadoop.hive.ql.plan.OperatorDesc)

Example 9 with Serializable

use of java.io.Serializable in project hive by apache.

the class HiveInputFormat method pushFilters.

public static void pushFilters(JobConf jobConf, TableScanOperator tableScan) {
    // ensure filters are not set from previous pushFilters
    jobConf.unset(TableScanDesc.FILTER_TEXT_CONF_STR);
    jobConf.unset(TableScanDesc.FILTER_EXPR_CONF_STR);
    Utilities.unsetSchemaEvolution(jobConf);
    TableScanDesc scanDesc = tableScan.getConf();
    if (scanDesc == null) {
        return;
    }
    Utilities.addTableSchemaToConf(jobConf, tableScan);
    // construct column name list and types for reference by filter push down
    Utilities.setColumnNameList(jobConf, tableScan);
    Utilities.setColumnTypeList(jobConf, tableScan);
    // push down filters
    ExprNodeGenericFuncDesc filterExpr = (ExprNodeGenericFuncDesc) scanDesc.getFilterExpr();
    if (filterExpr == null) {
        return;
    }
    String serializedFilterObj = scanDesc.getSerializedFilterObject();
    String serializedFilterExpr = scanDesc.getSerializedFilterExpr();
    boolean hasObj = serializedFilterObj != null, hasExpr = serializedFilterExpr != null;
    if (!hasObj) {
        Serializable filterObject = scanDesc.getFilterObject();
        if (filterObject != null) {
            serializedFilterObj = SerializationUtilities.serializeObject(filterObject);
        }
    }
    if (serializedFilterObj != null) {
        jobConf.set(TableScanDesc.FILTER_OBJECT_CONF_STR, serializedFilterObj);
    }
    if (!hasExpr) {
        serializedFilterExpr = SerializationUtilities.serializeExpression(filterExpr);
    }
    String filterText = filterExpr.getExprString();
    if (LOG.isDebugEnabled()) {
        LOG.debug("Pushdown initiated with filterText = " + filterText + ", filterExpr = " + filterExpr + ", serializedFilterExpr = " + serializedFilterExpr + " (" + (hasExpr ? "desc" : "new") + ")" + (serializedFilterObj == null ? "" : (", serializedFilterObj = " + serializedFilterObj + " (" + (hasObj ? "desc" : "new") + ")")));
    }
    jobConf.set(TableScanDesc.FILTER_TEXT_CONF_STR, filterText);
    jobConf.set(TableScanDesc.FILTER_EXPR_CONF_STR, serializedFilterExpr);
}
Also used : Serializable(java.io.Serializable) TableScanDesc(org.apache.hadoop.hive.ql.plan.TableScanDesc) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)

Example 10 with Serializable

use of java.io.Serializable in project hibernate-orm by hibernate.

the class EntityInstantiator method replaceNonAuditIdProxies.

@SuppressWarnings({ "unchecked" })
private void replaceNonAuditIdProxies(Map versionsEntity, Number revision) {
    final Map originalId = (Map) versionsEntity.get(enversService.getAuditEntitiesConfiguration().getOriginalIdPropName());
    for (Object key : originalId.keySet()) {
        final Object value = originalId.get(key);
        if (value instanceof HibernateProxy) {
            final HibernateProxy hibernateProxy = (HibernateProxy) value;
            final LazyInitializer initializer = hibernateProxy.getHibernateLazyInitializer();
            final String entityName = initializer.getEntityName();
            final Serializable entityId = initializer.getIdentifier();
            if (enversService.getEntitiesConfigurations().isVersioned(entityName)) {
                final String entityClassName = enversService.getEntitiesConfigurations().get(entityName).getEntityClassName();
                final Class entityClass = ReflectionTools.loadClass(entityClassName, enversService.getClassLoaderService());
                final ToOneDelegateSessionImplementor delegate = new ToOneDelegateSessionImplementor(versionsReader, entityClass, entityId, revision, RevisionType.DEL.equals(versionsEntity.get(enversService.getAuditEntitiesConfiguration().getRevisionTypePropName())), enversService);
                originalId.put(key, versionsReader.getSessionImplementor().getFactory().getMetamodel().entityPersister(entityName).createProxy(entityId, delegate));
            }
        }
    }
}
Also used : ToOneDelegateSessionImplementor(org.hibernate.envers.internal.entities.mapper.relation.lazy.ToOneDelegateSessionImplementor) LazyInitializer(org.hibernate.proxy.LazyInitializer) Serializable(java.io.Serializable) Map(java.util.Map) HibernateProxy(org.hibernate.proxy.HibernateProxy)

Aggregations

Serializable (java.io.Serializable)3100 Test (org.junit.Test)906 HashMap (java.util.HashMap)789 ArrayList (java.util.ArrayList)420 Map (java.util.Map)370 List (java.util.List)208 QName (org.alfresco.service.namespace.QName)197 IOException (java.io.IOException)174 Metacard (ddf.catalog.data.Metacard)145 NodeRef (org.alfresco.service.cmr.repository.NodeRef)135 LinkedHashMap (java.util.LinkedHashMap)128 HashSet (java.util.HashSet)122 Date (java.util.Date)114 ByteArrayInputStream (java.io.ByteArrayInputStream)82 ParserContext (org.mvel2.ParserContext)82 Set (java.util.Set)80 File (java.io.File)75 ObjectInputStream (java.io.ObjectInputStream)65 Session (org.hibernate.Session)65 URI (java.net.URI)64