Search in sources :

Example 21 with LinkedList

use of java.util.LinkedList in project hive by apache.

the class SemanticAnalyzer method walkASTMarkTABREF.

private void walkASTMarkTABREF(ASTNode ast, Set<String> cteAlias) throws SemanticException {
    Queue<Node> queue = new LinkedList<>();
    queue.add(ast);
    Map<HivePrivilegeObject, MaskAndFilterInfo> basicInfos = new LinkedHashMap<>();
    while (!queue.isEmpty()) {
        ASTNode astNode = (ASTNode) queue.poll();
        if (astNode.getToken().getType() == HiveParser.TOK_TABREF) {
            int aliasIndex = 0;
            StringBuffer additionalTabInfo = new StringBuffer();
            for (int index = 1; index < astNode.getChildCount(); index++) {
                ASTNode ct = (ASTNode) astNode.getChild(index);
                if (ct.getToken().getType() == HiveParser.TOK_TABLEBUCKETSAMPLE || ct.getToken().getType() == HiveParser.TOK_TABLESPLITSAMPLE || ct.getToken().getType() == HiveParser.TOK_TABLEPROPERTIES) {
                    additionalTabInfo.append(ctx.getTokenRewriteStream().toString(ct.getTokenStartIndex(), ct.getTokenStopIndex()));
                } else {
                    aliasIndex = index;
                }
            }
            ASTNode tableTree = (ASTNode) (astNode.getChild(0));
            String tabIdName = getUnescapedName(tableTree);
            String alias;
            if (aliasIndex != 0) {
                alias = unescapeIdentifier(astNode.getChild(aliasIndex).getText());
            } else {
                alias = getUnescapedUnqualifiedTableName(tableTree);
            }
            // select * from TAB2 [no masking]
            if (cteAlias.contains(tabIdName)) {
                continue;
            }
            String replacementText = null;
            Table table = null;
            try {
                table = getTableObjectByName(tabIdName);
            } catch (HiveException e) {
                // Table may not be found when materialization of CTE is on.
                LOG.info("Table " + tabIdName + " is not found in walkASTMarkTABREF.");
                continue;
            }
            List<String> colNames = new ArrayList<>();
            List<String> colTypes = new ArrayList<>();
            for (FieldSchema col : table.getAllCols()) {
                colNames.add(col.getName());
                colTypes.add(col.getType());
            }
            basicInfos.put(new HivePrivilegeObject(table.getDbName(), table.getTableName(), colNames), new MaskAndFilterInfo(colTypes, additionalTabInfo.toString(), alias, astNode, table.isView()));
        }
        if (astNode.getChildCount() > 0 && !ignoredTokens.contains(astNode.getToken().getType())) {
            for (Node child : astNode.getChildren()) {
                queue.offer(child);
            }
        }
    }
    List<HivePrivilegeObject> basicPrivObjs = new ArrayList<>();
    basicPrivObjs.addAll(basicInfos.keySet());
    List<HivePrivilegeObject> needRewritePrivObjs = tableMask.applyRowFilterAndColumnMasking(basicPrivObjs);
    if (needRewritePrivObjs != null && !needRewritePrivObjs.isEmpty()) {
        for (HivePrivilegeObject privObj : needRewritePrivObjs) {
            MaskAndFilterInfo info = basicInfos.get(privObj);
            String replacementText = tableMask.create(privObj, info);
            if (replacementText != null) {
                // We don't support masking/filtering against ACID query at the moment
                if (ctx.getIsUpdateDeleteMerge()) {
                    throw new SemanticException(ErrorMsg.MASKING_FILTERING_ON_ACID_NOT_SUPPORTED, privObj.getDbname(), privObj.getObjectName());
                }
                tableMask.setNeedsRewrite(true);
                tableMask.addTranslation(info.astNode, replacementText);
            }
        }
    }
}
Also used : Table(org.apache.hadoop.hive.ql.metadata.Table) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) Node(org.apache.hadoop.hive.ql.lib.Node) RelNode(org.apache.calcite.rel.RelNode) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) LinkedList(java.util.LinkedList) LinkedHashMap(java.util.LinkedHashMap) CalciteSemanticException(org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException)

Example 22 with LinkedList

use of java.util.LinkedList in project hbase by apache.

the class TestSizeFailures method setUpBeforeClass.

@BeforeClass
public static void setUpBeforeClass() throws Exception {
    // Uncomment the following lines if more verbosity is needed for
    // debugging (see HBASE-12285 for details).
    //((Log4JLogger)RpcServer.LOG).getLogger().setLevel(Level.ALL);
    //((Log4JLogger)RpcClient.LOG).getLogger().setLevel(Level.ALL);
    //((Log4JLogger)ScannerCallable.LOG).getLogger().setLevel(Level.ALL);
    Configuration conf = TEST_UTIL.getConfiguration();
    // ignore sanity checks in the server
    conf.setBoolean("hbase.table.sanity.checks", true);
    TEST_UTIL.startMiniCluster(SLAVES);
    // Write a bunch of data
    TABLENAME = TableName.valueOf("testSizeFailures");
    List<byte[]> qualifiers = new ArrayList<>();
    for (int i = 1; i <= 10; i++) {
        qualifiers.add(Bytes.toBytes(Integer.toString(i)));
    }
    HColumnDescriptor hcd = new HColumnDescriptor(FAMILY);
    HTableDescriptor desc = new HTableDescriptor(TABLENAME);
    desc.addFamily(hcd);
    byte[][] splits = new byte[9][2];
    for (int i = 1; i < 10; i++) {
        int split = 48 + i;
        splits[i - 1][0] = (byte) (split >>> 8);
        splits[i - 1][0] = (byte) (split);
    }
    TEST_UTIL.getAdmin().createTable(desc, splits);
    Connection conn = TEST_UTIL.getConnection();
    try (Table table = conn.getTable(TABLENAME)) {
        List<Put> puts = new LinkedList<>();
        for (int i = 0; i < NUM_ROWS; i++) {
            Put p = new Put(Bytes.toBytes(Integer.toString(i)));
            for (int j = 0; j < NUM_COLS; j++) {
                byte[] value = new byte[50];
                Bytes.random(value);
                p.addColumn(FAMILY, Bytes.toBytes(Integer.toString(j)), value);
            }
            puts.add(p);
            if (puts.size() == 1000) {
                table.batch(puts, null);
                puts.clear();
            }
        }
        if (puts.size() > 0) {
            table.batch(puts, null);
        }
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) BeforeClass(org.junit.BeforeClass)

Example 23 with LinkedList

use of java.util.LinkedList in project zeppelin by apache.

the class RemoteInterpreterEventPoller method sendResourcePoolResponseGetAll.

private void sendResourcePoolResponseGetAll(ResourceSet resourceSet) {
    Client client = null;
    boolean broken = false;
    try {
        client = interpreterProcess.getClient();
        List<String> resourceList = new LinkedList<>();
        Gson gson = new Gson();
        for (Resource r : resourceSet) {
            resourceList.add(gson.toJson(r));
        }
        client.resourcePoolResponseGetAll(resourceList);
    } catch (Exception e) {
        logger.error(e.getMessage(), e);
        broken = true;
    } finally {
        if (client != null) {
            interpreterProcess.releaseClient(client, broken);
        }
    }
}
Also used : Resource(org.apache.zeppelin.resource.Resource) RemoteZeppelinServerResource(org.apache.zeppelin.interpreter.RemoteZeppelinServerResource) Gson(com.google.gson.Gson) Client(org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService.Client) LinkedList(java.util.LinkedList) TException(org.apache.thrift.TException) InvocationTargetException(java.lang.reflect.InvocationTargetException)

Example 24 with LinkedList

use of java.util.LinkedList in project zeppelin by apache.

the class SparkDependencyResolver method updateCompilerClassPath.

private void updateCompilerClassPath(URL[] urls) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException {
    JavaPlatform platform = (JavaPlatform) global.platform();
    MergedClassPath<AbstractFile> newClassPath = mergeUrlsIntoClassPath(platform, urls);
    Method[] methods = platform.getClass().getMethods();
    for (Method m : methods) {
        if (m.getName().endsWith("currentClassPath_$eq")) {
            m.invoke(platform, new Some(newClassPath));
            break;
        }
    }
    // NOTE: Must use reflection until this is exposed/fixed upstream in Scala
    List<String> classPaths = new LinkedList<>();
    for (URL url : urls) {
        classPaths.add(url.getPath());
    }
    // Reload all jars specified into our compiler
    global.invalidateClassPathEntries(scala.collection.JavaConversions.asScalaBuffer(classPaths).toList());
}
Also used : AbstractFile(scala.reflect.io.AbstractFile) Some(scala.Some) JavaPlatform(scala.tools.nsc.backend.JavaPlatform) Method(java.lang.reflect.Method) LinkedList(java.util.LinkedList) URL(java.net.URL)

Example 25 with LinkedList

use of java.util.LinkedList in project zeppelin by apache.

the class DepInterpreterTest method setUp.

@Before
public void setUp() throws Exception {
    Properties p = getTestProperties();
    dep = new DepInterpreter(p);
    dep.open();
    InterpreterGroup intpGroup = new InterpreterGroup();
    intpGroup.put("note", new LinkedList<Interpreter>());
    intpGroup.get("note").add(new SparkInterpreter(p));
    intpGroup.get("note").add(dep);
    dep.setInterpreterGroup(intpGroup);
    context = new InterpreterContext("note", "id", null, "title", "text", new AuthenticationInfo(), new HashMap<String, Object>(), new GUI(), new AngularObjectRegistry(intpGroup.getId(), null), null, new LinkedList<InterpreterContextRunner>(), null);
}
Also used : HashMap(java.util.HashMap) GUI(org.apache.zeppelin.display.GUI) Properties(java.util.Properties) AuthenticationInfo(org.apache.zeppelin.user.AuthenticationInfo) AngularObjectRegistry(org.apache.zeppelin.display.AngularObjectRegistry) LinkedList(java.util.LinkedList) Before(org.junit.Before)

Aggregations

LinkedList (java.util.LinkedList)10856 Test (org.junit.Test)1545 List (java.util.List)1517 HashMap (java.util.HashMap)1413 ArrayList (java.util.ArrayList)1368 Map (java.util.Map)915 IOException (java.io.IOException)826 File (java.io.File)721 HashSet (java.util.HashSet)632 LinkedHashMap (java.util.LinkedHashMap)390 GenericValue (org.apache.ofbiz.entity.GenericValue)296 Iterator (java.util.Iterator)281 Set (java.util.Set)274 Date (java.util.Date)249 GenericEntityException (org.apache.ofbiz.entity.GenericEntityException)232 Collection (java.util.Collection)208 Collectors (java.util.stream.Collectors)162 Delegator (org.apache.ofbiz.entity.Delegator)162 URL (java.net.URL)159 Locale (java.util.Locale)159