Search in sources :

Example 51 with PrivilegedExceptionAction

use of java.security.PrivilegedExceptionAction in project kylo by Teradata.

the class TestKerberosKinit method testHiveJdbcConnection.

private void testHiveJdbcConnection(final String configResources, final String keytab, final String realUserPrincipal, final String proxyUser, final String hiveHostName) throws Exception {
    final Configuration configuration = TestKerberosKinit.createConfigurationFromList(configResources);
    UserGroupInformation realugi = TestKerberosKinit.generateKerberosTicket(configuration, keytab, realUserPrincipal);
    System.out.println(" ");
    System.out.println("Sucessfully got a kerberos ticket in the JVM");
    HiveConnection realUserConnection = (HiveConnection) realugi.doAs(new PrivilegedExceptionAction<Connection>() {

        public Connection run() {
            Connection connection = null;
            Statement stmt = null;
            ResultSet res = null;
            try {
                Class.forName(DRIVER_NAME);
                String url = hiveHostName;
                if (proxyUser != null) {
                    url = url + ";hive.server2.proxy.user=" + proxyUser;
                }
                System.out.println("Hive URL: " + url);
                connection = DriverManager.getConnection(url);
                Class.forName(DRIVER_NAME);
                System.out.println("creating statement");
                stmt = connection.createStatement();
                String sql = "show databases";
                res = stmt.executeQuery(sql);
                System.out.println(" \n");
                System.out.println("Executing the Hive Query:");
                System.out.println(" ");
                System.out.println("List of Databases");
                while (res.next()) {
                    System.out.println(res.getString(1));
                }
            } catch (Exception e) {
                throw new RuntimeException("Error creating connection with proxy user", e);
            } finally {
                JdbcUtils.closeResultSet(res);
                JdbcUtils.closeStatement(stmt);
                JdbcUtils.closeConnection(connection);
            }
            return connection;
        }
    });
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) Statement(java.sql.Statement) Connection(java.sql.Connection) HiveConnection(org.apache.hive.jdbc.HiveConnection) ResultSet(java.sql.ResultSet) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) IOException(java.io.IOException) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) HiveConnection(org.apache.hive.jdbc.HiveConnection)

Example 52 with PrivilegedExceptionAction

use of java.security.PrivilegedExceptionAction in project nifi by apache.

the class HiveWriterTest method setup.

@Before
public void setup() throws Exception {
    hiveEndPoint = mock(HiveEndPoint.class);
    txnsPerBatch = 100;
    autoCreatePartitions = true;
    callTimeout = 0;
    executorService = mock(ExecutorService.class);
    streamingConnection = mock(StreamingConnection.class);
    transactionBatch = mock(TransactionBatch.class);
    userGroupInformation = mock(UserGroupInformation.class);
    hiveConf = mock(HiveConf.class);
    recordWriter = mock(RecordWriter.class);
    recordWriterCallable = mock(Callable.class);
    when(recordWriterCallable.call()).thenReturn(recordWriter);
    when(hiveEndPoint.newConnection(autoCreatePartitions, hiveConf, userGroupInformation)).thenReturn(streamingConnection);
    when(streamingConnection.fetchTransactionBatch(txnsPerBatch, recordWriter)).thenReturn(transactionBatch);
    when(executorService.submit(isA(Callable.class))).thenAnswer(invocation -> {
        Future future = mock(Future.class);
        Answer<Object> answer = i -> ((Callable) invocation.getArguments()[0]).call();
        when(future.get()).thenAnswer(answer);
        when(future.get(anyLong(), any(TimeUnit.class))).thenAnswer(answer);
        return future;
    });
    when(userGroupInformation.doAs(isA(PrivilegedExceptionAction.class))).thenAnswer(invocation -> {
        try {
            try {
                return ((PrivilegedExceptionAction) invocation.getArguments()[0]).run();
            } catch (UncheckedExecutionException e) {
                // Creation of strict json writer will fail due to external deps, this gives us chance to catch it
                for (StackTraceElement stackTraceElement : e.getStackTrace()) {
                    if (stackTraceElement.toString().startsWith("org.apache.hive.hcatalog.streaming.StrictJsonWriter.<init>(")) {
                        return recordWriterCallable.call();
                    }
                }
                throw e;
            }
        } catch (IOException | Error | RuntimeException | InterruptedException e) {
            throw e;
        } catch (Throwable e) {
            throw new UndeclaredThrowableException(e);
        }
    });
    initWriter();
}
Also used : Callable(java.util.concurrent.Callable) StreamingException(org.apache.hive.hcatalog.streaming.StreamingException) HiveEndPoint(org.apache.hive.hcatalog.streaming.HiveEndPoint) Answer(org.mockito.stubbing.Answer) Future(java.util.concurrent.Future) RecordWriter(org.apache.hive.hcatalog.streaming.RecordWriter) UncheckedExecutionException(com.google.common.util.concurrent.UncheckedExecutionException) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Matchers.anyLong(org.mockito.Matchers.anyLong) ExecutorService(java.util.concurrent.ExecutorService) Before(org.junit.Before) StreamingConnection(org.apache.hive.hcatalog.streaming.StreamingConnection) Matchers.isA(org.mockito.Matchers.isA) Assert.assertNotNull(org.junit.Assert.assertNotNull) TransactionBatch(org.apache.hive.hcatalog.streaming.TransactionBatch) HiveConf(org.apache.hadoop.hive.conf.HiveConf) Test(org.junit.Test) IOException(java.io.IOException) Mockito.when(org.mockito.Mockito.when) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) UndeclaredThrowableException(java.lang.reflect.UndeclaredThrowableException) TimeUnit(java.util.concurrent.TimeUnit) Matchers.any(org.mockito.Matchers.any) InvalidTable(org.apache.hive.hcatalog.streaming.InvalidTable) Assert.assertEquals(org.junit.Assert.assertEquals) Mockito.mock(org.mockito.Mockito.mock) UncheckedExecutionException(com.google.common.util.concurrent.UncheckedExecutionException) TransactionBatch(org.apache.hive.hcatalog.streaming.TransactionBatch) StreamingConnection(org.apache.hive.hcatalog.streaming.StreamingConnection) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) IOException(java.io.IOException) Callable(java.util.concurrent.Callable) RecordWriter(org.apache.hive.hcatalog.streaming.RecordWriter) UndeclaredThrowableException(java.lang.reflect.UndeclaredThrowableException) ExecutorService(java.util.concurrent.ExecutorService) HiveEndPoint(org.apache.hive.hcatalog.streaming.HiveEndPoint) Future(java.util.concurrent.Future) TimeUnit(java.util.concurrent.TimeUnit) HiveConf(org.apache.hadoop.hive.conf.HiveConf) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) Before(org.junit.Before)

Example 53 with PrivilegedExceptionAction

use of java.security.PrivilegedExceptionAction in project nifi by apache.

the class GetHDFS method processBatchOfFiles.

protected void processBatchOfFiles(final List<Path> files, final ProcessContext context, final ProcessSession session) {
    // process the batch of files
    InputStream stream = null;
    CompressionCodec codec = null;
    Configuration conf = getConfiguration();
    FileSystem hdfs = getFileSystem();
    final boolean keepSourceFiles = context.getProperty(KEEP_SOURCE_FILE).asBoolean();
    final Double bufferSizeProp = context.getProperty(BUFFER_SIZE).asDataSize(DataUnit.B);
    int bufferSize = bufferSizeProp != null ? bufferSizeProp.intValue() : conf.getInt(BUFFER_SIZE_KEY, BUFFER_SIZE_DEFAULT);
    final Path rootDir = new Path(context.getProperty(DIRECTORY).evaluateAttributeExpressions().getValue());
    final CompressionType compressionType = CompressionType.valueOf(context.getProperty(COMPRESSION_CODEC).toString());
    final boolean inferCompressionCodec = compressionType == CompressionType.AUTOMATIC;
    if (inferCompressionCodec || compressionType != CompressionType.NONE) {
        codec = getCompressionCodec(context, getConfiguration());
    }
    final CompressionCodecFactory compressionCodecFactory = new CompressionCodecFactory(conf);
    for (final Path file : files) {
        try {
            if (!getUserGroupInformation().doAs((PrivilegedExceptionAction<Boolean>) () -> hdfs.exists(file))) {
                // if file is no longer there then move on
                continue;
            }
            final String originalFilename = file.getName();
            final String relativePath = getPathDifference(rootDir, file);
            stream = getUserGroupInformation().doAs((PrivilegedExceptionAction<FSDataInputStream>) () -> hdfs.open(file, bufferSize));
            final String outputFilename;
            // Check if we should infer compression codec
            if (inferCompressionCodec) {
                codec = compressionCodecFactory.getCodec(file);
            }
            // Check if compression codec is defined (inferred or otherwise)
            if (codec != null) {
                stream = codec.createInputStream(stream);
                outputFilename = StringUtils.removeEnd(originalFilename, codec.getDefaultExtension());
            } else {
                outputFilename = originalFilename;
            }
            FlowFile flowFile = session.create();
            final StopWatch stopWatch = new StopWatch(true);
            flowFile = session.importFrom(stream, flowFile);
            stopWatch.stop();
            final String dataRate = stopWatch.calculateDataRate(flowFile.getSize());
            final long millis = stopWatch.getDuration(TimeUnit.MILLISECONDS);
            flowFile = session.putAttribute(flowFile, CoreAttributes.PATH.key(), relativePath.isEmpty() ? "." : relativePath);
            flowFile = session.putAttribute(flowFile, CoreAttributes.FILENAME.key(), outputFilename);
            if (!keepSourceFiles && !getUserGroupInformation().doAs((PrivilegedExceptionAction<Boolean>) () -> hdfs.delete(file, false))) {
                getLogger().warn("Could not remove {} from HDFS. Not ingesting this file ...", new Object[] { file });
                session.remove(flowFile);
                continue;
            }
            session.getProvenanceReporter().receive(flowFile, file.toString());
            session.transfer(flowFile, REL_SUCCESS);
            getLogger().info("retrieved {} from HDFS {} in {} milliseconds at a rate of {}", new Object[] { flowFile, file, millis, dataRate });
            session.commit();
        } catch (final Throwable t) {
            getLogger().error("Error retrieving file {} from HDFS due to {}", new Object[] { file, t });
            session.rollback();
            context.yield();
        } finally {
            IOUtils.closeQuietly(stream);
            stream = null;
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FlowFile(org.apache.nifi.flowfile.FlowFile) Configuration(org.apache.hadoop.conf.Configuration) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) InputStream(java.io.InputStream) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) StopWatch(org.apache.nifi.util.StopWatch) CompressionCodecFactory(org.apache.hadoop.io.compress.CompressionCodecFactory) FileSystem(org.apache.hadoop.fs.FileSystem) CompressionCodec(org.apache.hadoop.io.compress.CompressionCodec)

Example 54 with PrivilegedExceptionAction

use of java.security.PrivilegedExceptionAction in project jdk8u_jdk by JetBrains.

the class CertStoreHelper method getInstance.

public static CertStoreHelper getInstance(final String type) throws NoSuchAlgorithmException {
    CertStoreHelper helper = cache.get(type);
    if (helper != null) {
        return helper;
    }
    final String cl = classMap.get(type);
    if (cl == null) {
        throw new NoSuchAlgorithmException(type + " not available");
    }
    try {
        helper = AccessController.doPrivileged(new PrivilegedExceptionAction<CertStoreHelper>() {

            public CertStoreHelper run() throws ClassNotFoundException {
                try {
                    Class<?> c = Class.forName(cl, true, null);
                    CertStoreHelper csh = (CertStoreHelper) c.newInstance();
                    cache.put(type, csh);
                    return csh;
                } catch (InstantiationException | IllegalAccessException e) {
                    throw new AssertionError(e);
                }
            }
        });
        return helper;
    } catch (PrivilegedActionException e) {
        throw new NoSuchAlgorithmException(type + " not available", e.getException());
    }
}
Also used : PrivilegedActionException(java.security.PrivilegedActionException) NoSuchAlgorithmException(java.security.NoSuchAlgorithmException) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction)

Example 55 with PrivilegedExceptionAction

use of java.security.PrivilegedExceptionAction in project jdk8u_jdk by JetBrains.

the class BreakDictionary method readDictionaryFile.

private void readDictionaryFile(final String dictionaryName) throws IOException, MissingResourceException {
    BufferedInputStream in;
    try {
        in = AccessController.doPrivileged(new PrivilegedExceptionAction<BufferedInputStream>() {

            @Override
            public BufferedInputStream run() throws Exception {
                return new BufferedInputStream(getClass().getResourceAsStream("/sun/text/resources/" + dictionaryName));
            }
        });
    } catch (PrivilegedActionException e) {
        throw new InternalError(e.toString(), e);
    }
    byte[] buf = new byte[8];
    if (in.read(buf) != 8) {
        throw new MissingResourceException("Wrong data length", dictionaryName, "");
    }
    // check version
    int version = RuleBasedBreakIterator.getInt(buf, 0);
    if (version != supportedVersion) {
        throw new MissingResourceException("Dictionary version(" + version + ") is unsupported", dictionaryName, "");
    }
    // get data size
    int len = RuleBasedBreakIterator.getInt(buf, 4);
    buf = new byte[len];
    if (in.read(buf) != len) {
        throw new MissingResourceException("Wrong data length", dictionaryName, "");
    }
    // close the stream
    in.close();
    int l;
    int offset = 0;
    // read in the column map for BMP characteres (this is serialized in
    // its internal form: an index array followed by a data array)
    l = RuleBasedBreakIterator.getInt(buf, offset);
    offset += 4;
    short[] temp = new short[l];
    for (int i = 0; i < l; i++, offset += 2) {
        temp[i] = RuleBasedBreakIterator.getShort(buf, offset);
    }
    l = RuleBasedBreakIterator.getInt(buf, offset);
    offset += 4;
    byte[] temp2 = new byte[l];
    for (int i = 0; i < l; i++, offset++) {
        temp2[i] = buf[offset];
    }
    columnMap = new CompactByteArray(temp, temp2);
    // read in numCols and numColGroups
    numCols = RuleBasedBreakIterator.getInt(buf, offset);
    offset += 4;
    numColGroups = RuleBasedBreakIterator.getInt(buf, offset);
    offset += 4;
    // read in the row-number index
    l = RuleBasedBreakIterator.getInt(buf, offset);
    offset += 4;
    rowIndex = new short[l];
    for (int i = 0; i < l; i++, offset += 2) {
        rowIndex[i] = RuleBasedBreakIterator.getShort(buf, offset);
    }
    // load in the populated-cells bitmap: index first, then bitmap list
    l = RuleBasedBreakIterator.getInt(buf, offset);
    offset += 4;
    rowIndexFlagsIndex = new short[l];
    for (int i = 0; i < l; i++, offset += 2) {
        rowIndexFlagsIndex[i] = RuleBasedBreakIterator.getShort(buf, offset);
    }
    l = RuleBasedBreakIterator.getInt(buf, offset);
    offset += 4;
    rowIndexFlags = new int[l];
    for (int i = 0; i < l; i++, offset += 4) {
        rowIndexFlags[i] = RuleBasedBreakIterator.getInt(buf, offset);
    }
    // load in the row-shift index
    l = RuleBasedBreakIterator.getInt(buf, offset);
    offset += 4;
    rowIndexShifts = new byte[l];
    for (int i = 0; i < l; i++, offset++) {
        rowIndexShifts[i] = buf[offset];
    }
    // load in the actual state table
    l = RuleBasedBreakIterator.getInt(buf, offset);
    offset += 4;
    table = new short[l];
    for (int i = 0; i < l; i++, offset += 2) {
        table[i] = RuleBasedBreakIterator.getShort(buf, offset);
    }
    // finally, prepare the column map for supplementary characters
    l = RuleBasedBreakIterator.getInt(buf, offset);
    offset += 4;
    int[] temp3 = new int[l];
    for (int i = 0; i < l; i++, offset += 4) {
        temp3[i] = RuleBasedBreakIterator.getInt(buf, offset);
    }
    supplementaryCharColumnMap = new SupplementaryCharacterData(temp3);
}
Also used : BufferedInputStream(java.io.BufferedInputStream) PrivilegedActionException(java.security.PrivilegedActionException) MissingResourceException(java.util.MissingResourceException) SupplementaryCharacterData(sun.text.SupplementaryCharacterData) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) CompactByteArray(sun.text.CompactByteArray)

Aggregations

PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)387 IOException (java.io.IOException)199 PrivilegedActionException (java.security.PrivilegedActionException)135 Test (org.junit.Test)104 Connection (org.apache.hadoop.hbase.client.Connection)81 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)76 Table (org.apache.hadoop.hbase.client.Table)62 TableName (org.apache.hadoop.hbase.TableName)57 Result (org.apache.hadoop.hbase.client.Result)56 Scan (org.apache.hadoop.hbase.client.Scan)55 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)53 Delete (org.apache.hadoop.hbase.client.Delete)48 InterruptedIOException (java.io.InterruptedIOException)47 Cell (org.apache.hadoop.hbase.Cell)38 CellScanner (org.apache.hadoop.hbase.CellScanner)38 Configuration (org.apache.hadoop.conf.Configuration)36 File (java.io.File)33 AuthorizationException (org.apache.hadoop.security.authorize.AuthorizationException)33 Path (org.apache.hadoop.fs.Path)23 ArrayList (java.util.ArrayList)22