Search in sources :

Example 16 with LinkedHashSet

use of java.util.LinkedHashSet in project hadoop by apache.

the class TestZKRMStateStorePerf method run.

@SuppressWarnings("unchecked")
@Override
public int run(String[] args) {
    LOG.info("Starting ZKRMStateStorePerf ver." + version);
    int numApp = ZK_PERF_NUM_APP_DEFAULT;
    int numAppAttemptPerApp = ZK_PERF_NUM_APPATTEMPT_PER_APP;
    String hostPort = null;
    boolean launchLocalZK = true;
    if (args.length == 0) {
        System.err.println("Missing arguments.");
        return -1;
    }
    for (int i = 0; i < args.length; i++) {
        // parse command line
        if (args[i].equalsIgnoreCase("-appsize")) {
            numApp = Integer.parseInt(args[++i]);
        } else if (args[i].equalsIgnoreCase("-appattemptsize")) {
            numAppAttemptPerApp = Integer.parseInt(args[++i]);
        } else if (args[i].equalsIgnoreCase("-hostPort")) {
            hostPort = args[++i];
            launchLocalZK = false;
        } else if (args[i].equalsIgnoreCase("-workingZnode")) {
            workingZnode = args[++i];
        } else {
            System.err.println("Illegal argument: " + args[i]);
            return -1;
        }
    }
    if (launchLocalZK) {
        try {
            setUpZKServer();
        } catch (Exception e) {
            System.err.println("failed to setup. : " + e.getMessage());
            return -1;
        }
    }
    initStore(hostPort);
    long submitTime = System.currentTimeMillis();
    long startTime = System.currentTimeMillis() + 1234;
    ArrayList<ApplicationId> applicationIds = new ArrayList<>();
    ArrayList<RMApp> rmApps = new ArrayList<>();
    ArrayList<ApplicationAttemptId> attemptIds = new ArrayList<>();
    HashMap<ApplicationId, Set<ApplicationAttemptId>> appIdsToAttemptId = new HashMap<>();
    TestDispatcher dispatcher = new TestDispatcher();
    store.setRMDispatcher(dispatcher);
    for (int i = 0; i < numApp; i++) {
        ApplicationId appId = ApplicationId.newInstance(clusterTimeStamp, i);
        applicationIds.add(appId);
        ArrayList<ApplicationAttemptId> attemptIdsForThisApp = new ArrayList<>();
        for (int j = 0; j < numAppAttemptPerApp; j++) {
            ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(appId, j);
            attemptIdsForThisApp.add(attemptId);
        }
        appIdsToAttemptId.put(appId, new LinkedHashSet(attemptIdsForThisApp));
        attemptIds.addAll(attemptIdsForThisApp);
    }
    for (ApplicationId appId : applicationIds) {
        RMApp app = null;
        try {
            app = storeApp(store, appId, submitTime, startTime);
        } catch (Exception e) {
            System.err.println("failed to create Application Znode. : " + e.getMessage());
            return -1;
        }
        waitNotify(dispatcher);
        rmApps.add(app);
    }
    for (ApplicationAttemptId attemptId : attemptIds) {
        Token<AMRMTokenIdentifier> tokenId = generateAMRMToken(attemptId, appTokenMgr);
        SecretKey clientTokenKey = clientToAMTokenMgr.createMasterKey(attemptId);
        try {
            storeAttempt(store, attemptId, ContainerId.newContainerId(attemptId, 0L).toString(), tokenId, clientTokenKey, dispatcher);
        } catch (Exception e) {
            System.err.println("failed to create AppAttempt Znode. : " + e.getMessage());
            return -1;
        }
    }
    long storeStart = System.currentTimeMillis();
    try {
        store.loadState();
    } catch (Exception e) {
        System.err.println("failed to locaState from ZKRMStateStore. : " + e.getMessage());
        return -1;
    }
    long storeEnd = System.currentTimeMillis();
    long loadTime = storeEnd - storeStart;
    String resultMsg = "ZKRMStateStore takes " + loadTime + " msec to loadState.";
    LOG.info(resultMsg);
    System.out.println(resultMsg);
    // cleanup
    try {
        for (RMApp app : rmApps) {
            ApplicationStateData appState = ApplicationStateData.newInstance(app.getSubmitTime(), app.getStartTime(), app.getApplicationSubmissionContext(), app.getUser());
            ApplicationId appId = app.getApplicationId();
            Map m = mock(Map.class);
            when(m.keySet()).thenReturn(appIdsToAttemptId.get(appId));
            appState.attempts = m;
            store.removeApplicationStateInternal(appState);
        }
    } catch (Exception e) {
        System.err.println("failed to cleanup. : " + e.getMessage());
        return -1;
    }
    return 0;
}
Also used : LinkedHashSet(java.util.LinkedHashSet) RMApp(org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp) LinkedHashSet(java.util.LinkedHashSet) Set(java.util.Set) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) ApplicationStateData(org.apache.hadoop.yarn.server.resourcemanager.recovery.records.ApplicationStateData) SecretKey(javax.crypto.SecretKey) AMRMTokenIdentifier(org.apache.hadoop.yarn.security.AMRMTokenIdentifier) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) HashMap(java.util.HashMap) Map(java.util.Map)

Example 17 with LinkedHashSet

use of java.util.LinkedHashSet in project hive by apache.

the class UnitTestPropertiesParser method generateFullTestSet.

private LinkedHashMap<String, LinkedHashSet<TestInfo>> generateFullTestSet(RootConfig rootConfig, Map<String, ModuleConfig> moduleConfigs, List<TestDir> unitTestDirs) throws IOException {
    LinkedHashMap<String, LinkedHashSet<TestInfo>> result = new LinkedHashMap<>();
    for (TestDir unitTestDir : unitTestDirs) {
        for (File classFile : fileListProvider.listFiles(unitTestDir.path, new String[] { "class" }, true)) {
            String className = classFile.getName();
            if (className.startsWith("Test") && !className.contains("$")) {
                String testName = className.replaceAll("\\.class$", "");
                String pathPrefix = getPathPrefix(classFile, rootConfig.subDirForPrefix);
                String moduleName = getModuleNameFromPathPrefix(pathPrefix);
                logger.debug("In {}, found class {} with pathPrefix={}, moduleName={}", unitTestDir.path, className, pathPrefix, moduleName);
                ModuleConfig moduleConfig = moduleConfigs.get(moduleName);
                if (moduleConfig == null) {
                    moduleConfig = FAKE_MODULE_CONFIG;
                }
                TestInfo testInfo = checkAndGetTestInfo(moduleName, pathPrefix, testName, rootConfig, moduleConfig);
                if (testInfo != null) {
                    logger.info("Adding test: " + testInfo);
                    addTestToResult(result, testInfo);
                }
            } else {
                logger.trace("In {}, found class {} with pathPrefix={}. Not a test", unitTestDir.path, className);
            }
        }
    }
    return result;
}
Also used : LinkedHashSet(java.util.LinkedHashSet) File(java.io.File) LinkedHashMap(java.util.LinkedHashMap)

Example 18 with LinkedHashSet

use of java.util.LinkedHashSet in project kafka by apache.

the class ConnectorConfig method enrich.

/**
     * Returns an enriched {@link ConfigDef} building upon the {@code ConfigDef}, using the current configuration specified in {@code props} as an input.
     * <p>
     * {@code requireFullConfig} specifies whether required config values that are missing should cause an exception to be thrown.
     */
public static ConfigDef enrich(ConfigDef baseConfigDef, Map<String, String> props, boolean requireFullConfig) {
    final List<String> transformAliases = (List<String>) ConfigDef.parseType(TRANSFORMS_CONFIG, props.get(TRANSFORMS_CONFIG), Type.LIST);
    if (transformAliases == null || transformAliases.isEmpty()) {
        return baseConfigDef;
    }
    final ConfigDef newDef = new ConfigDef(baseConfigDef);
    for (String alias : new LinkedHashSet<>(transformAliases)) {
        final String prefix = TRANSFORMS_CONFIG + "." + alias + ".";
        final String group = TRANSFORMS_GROUP + ": " + alias;
        int orderInGroup = 0;
        final String transformationTypeConfig = prefix + "type";
        final ConfigDef.Validator typeValidator = new ConfigDef.Validator() {

            @Override
            public void ensureValid(String name, Object value) {
                getConfigDefFromTransformation(transformationTypeConfig, (Class) value);
            }
        };
        newDef.define(transformationTypeConfig, Type.CLASS, ConfigDef.NO_DEFAULT_VALUE, typeValidator, Importance.HIGH, "Class for the '" + alias + "' transformation.", group, orderInGroup++, Width.LONG, "Transformation type for " + alias, Collections.<String>emptyList(), new TransformationClassRecommender());
        final ConfigDef transformationConfigDef;
        try {
            final String className = props.get(transformationTypeConfig);
            final Class<?> cls = (Class<?>) ConfigDef.parseType(transformationTypeConfig, className, Type.CLASS);
            transformationConfigDef = getConfigDefFromTransformation(transformationTypeConfig, cls);
        } catch (ConfigException e) {
            if (requireFullConfig) {
                throw e;
            } else {
                continue;
            }
        }
        newDef.embed(prefix, group, orderInGroup, transformationConfigDef);
    }
    return newDef;
}
Also used : LinkedHashSet(java.util.LinkedHashSet) ConfigException(org.apache.kafka.common.config.ConfigException) ArrayList(java.util.ArrayList) List(java.util.List) ConfigDef(org.apache.kafka.common.config.ConfigDef)

Example 19 with LinkedHashSet

use of java.util.LinkedHashSet in project hive by apache.

the class LineageLogger method createSourceVertices.

/**
   * Convert a list of columns to a set of vertices.
   * Use cached vertices if possible.
   */
private Set<Vertex> createSourceVertices(Map<String, Vertex> vertexCache, Collection<BaseColumnInfo> baseCols) {
    Set<Vertex> sources = new LinkedHashSet<Vertex>();
    if (baseCols != null && !baseCols.isEmpty()) {
        for (BaseColumnInfo col : baseCols) {
            Table table = col.getTabAlias().getTable();
            if (table.isTemporary()) {
                // Ignore temporary tables
                continue;
            }
            Vertex.Type type = Vertex.Type.TABLE;
            String tableName = table.getDbName() + "." + table.getTableName();
            FieldSchema fieldSchema = col.getColumn();
            String label = tableName;
            if (fieldSchema != null) {
                type = Vertex.Type.COLUMN;
                label = tableName + "." + fieldSchema.getName();
            }
            sources.add(getOrCreateVertex(vertexCache, label, type));
        }
    }
    return sources;
}
Also used : LinkedHashSet(java.util.LinkedHashSet) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) BaseColumnInfo(org.apache.hadoop.hive.ql.hooks.LineageInfo.BaseColumnInfo)

Example 20 with LinkedHashSet

use of java.util.LinkedHashSet in project hive by apache.

the class CorrelationOptimizer method findPossibleAutoConvertedJoinOperators.

private void findPossibleAutoConvertedJoinOperators() throws SemanticException {
    // based on hive.auto.convert.join.noconditionaltask.size.
    for (JoinOperator joinOp : pCtx.getJoinOps()) {
        boolean isAbleToGuess = true;
        boolean mayConvert = false;
        // Get total size and individual alias's size
        long aliasTotalKnownInputSize = 0;
        Map<String, Long> aliasToSize = new HashMap<String, Long>();
        Map<Integer, Set<String>> posToAliases = new HashMap<Integer, Set<String>>();
        for (int pos = 0; pos < joinOp.getNumParent(); pos++) {
            Operator<? extends OperatorDesc> op = joinOp.getParentOperators().get(pos);
            Set<TableScanOperator> topOps = CorrelationUtilities.findTableScanOperators(op);
            if (topOps.isEmpty()) {
                isAbleToGuess = false;
                break;
            }
            Set<String> aliases = new LinkedHashSet<String>();
            for (TableScanOperator tsop : topOps) {
                Table table = tsop.getConf().getTableMetadata();
                if (table == null) {
                    // table should not be null.
                    throw new SemanticException("The table of " + tsop.getName() + " " + tsop.getIdentifier() + " is null, which is not expected.");
                }
                String alias = tsop.getConf().getAlias();
                aliases.add(alias);
                Path p = table.getPath();
                ContentSummary resultCs = null;
                try {
                    FileSystem fs = table.getPath().getFileSystem(pCtx.getConf());
                    resultCs = fs.getContentSummary(p);
                } catch (IOException e) {
                    LOG.warn("Encounter a error while querying content summary of table " + table.getCompleteName() + " from FileSystem. " + "Cannot guess if CommonJoinOperator will optimize " + joinOp.getName() + " " + joinOp.getIdentifier());
                }
                if (resultCs == null) {
                    isAbleToGuess = false;
                    break;
                }
                long size = resultCs.getLength();
                aliasTotalKnownInputSize += size;
                Long es = aliasToSize.get(alias);
                if (es == null) {
                    es = new Long(0);
                }
                es += size;
                aliasToSize.put(alias, es);
            }
            posToAliases.put(pos, aliases);
        }
        if (!isAbleToGuess) {
            LOG.info("Cannot guess if CommonJoinOperator will optimize " + joinOp.getName() + " " + joinOp.getIdentifier());
            continue;
        }
        JoinDesc joinDesc = joinOp.getConf();
        Byte[] order = joinDesc.getTagOrder();
        int numAliases = order.length;
        Set<Integer> bigTableCandidates = MapJoinProcessor.getBigTableCandidates(joinDesc.getConds());
        if (bigTableCandidates.isEmpty()) {
            continue;
        }
        long ThresholdOfSmallTblSizeSum = HiveConf.getLongVar(pCtx.getConf(), HiveConf.ConfVars.HIVESMALLTABLESFILESIZE);
        for (int i = 0; i < numAliases; i++) {
            // this table cannot be big table
            if (!bigTableCandidates.contains(i)) {
                continue;
            }
            Set<String> aliases = posToAliases.get(i);
            long aliasKnownSize = Utilities.sumOf(aliasToSize, aliases);
            if (!CommonJoinTaskDispatcher.cannotConvert(aliasKnownSize, aliasTotalKnownInputSize, ThresholdOfSmallTblSizeSum)) {
                mayConvert = true;
            }
        }
        if (mayConvert) {
            LOG.info(joinOp.getName() + " " + joinOp.getIdentifier() + " may be converted to MapJoin by CommonJoinResolver");
            skipedJoinOperators.add(joinOp);
        }
    }
}
Also used : MapJoinOperator(org.apache.hadoop.hive.ql.exec.MapJoinOperator) JoinOperator(org.apache.hadoop.hive.ql.exec.JoinOperator) LinkedHashSet(java.util.LinkedHashSet) TableScanOperator(org.apache.hadoop.hive.ql.exec.TableScanOperator) Set(java.util.Set) HashSet(java.util.HashSet) LinkedHashSet(java.util.LinkedHashSet) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) FileSystem(org.apache.hadoop.fs.FileSystem) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) Path(org.apache.hadoop.fs.Path) Table(org.apache.hadoop.hive.ql.metadata.Table) IOException(java.io.IOException) ContentSummary(org.apache.hadoop.fs.ContentSummary) JoinDesc(org.apache.hadoop.hive.ql.plan.JoinDesc)

Aggregations

LinkedHashSet (java.util.LinkedHashSet)1252 ArrayList (java.util.ArrayList)241 Set (java.util.Set)154 HashSet (java.util.HashSet)128 HashMap (java.util.HashMap)115 File (java.io.File)102 IOException (java.io.IOException)97 Map (java.util.Map)97 Test (org.junit.Test)94 List (java.util.List)86 LinkedHashMap (java.util.LinkedHashMap)77 ProcessResult (org.asqatasun.entity.audit.ProcessResult)77 SourceCodeRemark (org.asqatasun.entity.audit.SourceCodeRemark)73 LinkedList (java.util.LinkedList)51 Iterator (java.util.Iterator)38 TreeSet (java.util.TreeSet)34 URL (java.net.URL)33 Collection (java.util.Collection)28 Feature (edu.illinois.cs.cogcomp.edison.features.Feature)24 ProcessRemark (org.asqatasun.entity.audit.ProcessRemark)23