use of java.security.PrivilegedExceptionAction in project kylo by Teradata.
the class TestKerberosKinit method testHiveJdbcConnection.
private void testHiveJdbcConnection(final String configResources, final String keytab, final String realUserPrincipal, final String proxyUser, final String hiveHostName) throws Exception {
final Configuration configuration = TestKerberosKinit.createConfigurationFromList(configResources);
UserGroupInformation realugi = TestKerberosKinit.generateKerberosTicket(configuration, keytab, realUserPrincipal);
System.out.println(" ");
System.out.println("Sucessfully got a kerberos ticket in the JVM");
HiveConnection realUserConnection = (HiveConnection) realugi.doAs(new PrivilegedExceptionAction<Connection>() {
public Connection run() {
Connection connection = null;
Statement stmt = null;
ResultSet res = null;
try {
Class.forName(DRIVER_NAME);
String url = hiveHostName;
if (proxyUser != null) {
url = url + ";hive.server2.proxy.user=" + proxyUser;
}
System.out.println("Hive URL: " + url);
connection = DriverManager.getConnection(url);
Class.forName(DRIVER_NAME);
System.out.println("creating statement");
stmt = connection.createStatement();
String sql = "show databases";
res = stmt.executeQuery(sql);
System.out.println(" \n");
System.out.println("Executing the Hive Query:");
System.out.println(" ");
System.out.println("List of Databases");
while (res.next()) {
System.out.println(res.getString(1));
}
} catch (Exception e) {
throw new RuntimeException("Error creating connection with proxy user", e);
} finally {
JdbcUtils.closeResultSet(res);
JdbcUtils.closeStatement(stmt);
JdbcUtils.closeConnection(connection);
}
return connection;
}
});
}
use of java.security.PrivilegedExceptionAction in project nifi by apache.
the class HiveWriterTest method setup.
@Before
public void setup() throws Exception {
hiveEndPoint = mock(HiveEndPoint.class);
txnsPerBatch = 100;
autoCreatePartitions = true;
callTimeout = 0;
executorService = mock(ExecutorService.class);
streamingConnection = mock(StreamingConnection.class);
transactionBatch = mock(TransactionBatch.class);
userGroupInformation = mock(UserGroupInformation.class);
hiveConf = mock(HiveConf.class);
recordWriter = mock(RecordWriter.class);
recordWriterCallable = mock(Callable.class);
when(recordWriterCallable.call()).thenReturn(recordWriter);
when(hiveEndPoint.newConnection(autoCreatePartitions, hiveConf, userGroupInformation)).thenReturn(streamingConnection);
when(streamingConnection.fetchTransactionBatch(txnsPerBatch, recordWriter)).thenReturn(transactionBatch);
when(executorService.submit(isA(Callable.class))).thenAnswer(invocation -> {
Future future = mock(Future.class);
Answer<Object> answer = i -> ((Callable) invocation.getArguments()[0]).call();
when(future.get()).thenAnswer(answer);
when(future.get(anyLong(), any(TimeUnit.class))).thenAnswer(answer);
return future;
});
when(userGroupInformation.doAs(isA(PrivilegedExceptionAction.class))).thenAnswer(invocation -> {
try {
try {
return ((PrivilegedExceptionAction) invocation.getArguments()[0]).run();
} catch (UncheckedExecutionException e) {
// Creation of strict json writer will fail due to external deps, this gives us chance to catch it
for (StackTraceElement stackTraceElement : e.getStackTrace()) {
if (stackTraceElement.toString().startsWith("org.apache.hive.hcatalog.streaming.StrictJsonWriter.<init>(")) {
return recordWriterCallable.call();
}
}
throw e;
}
} catch (IOException | Error | RuntimeException | InterruptedException e) {
throw e;
} catch (Throwable e) {
throw new UndeclaredThrowableException(e);
}
});
initWriter();
}
use of java.security.PrivilegedExceptionAction in project nifi by apache.
the class GetHDFS method processBatchOfFiles.
protected void processBatchOfFiles(final List<Path> files, final ProcessContext context, final ProcessSession session) {
// process the batch of files
InputStream stream = null;
CompressionCodec codec = null;
Configuration conf = getConfiguration();
FileSystem hdfs = getFileSystem();
final boolean keepSourceFiles = context.getProperty(KEEP_SOURCE_FILE).asBoolean();
final Double bufferSizeProp = context.getProperty(BUFFER_SIZE).asDataSize(DataUnit.B);
int bufferSize = bufferSizeProp != null ? bufferSizeProp.intValue() : conf.getInt(BUFFER_SIZE_KEY, BUFFER_SIZE_DEFAULT);
final Path rootDir = new Path(context.getProperty(DIRECTORY).evaluateAttributeExpressions().getValue());
final CompressionType compressionType = CompressionType.valueOf(context.getProperty(COMPRESSION_CODEC).toString());
final boolean inferCompressionCodec = compressionType == CompressionType.AUTOMATIC;
if (inferCompressionCodec || compressionType != CompressionType.NONE) {
codec = getCompressionCodec(context, getConfiguration());
}
final CompressionCodecFactory compressionCodecFactory = new CompressionCodecFactory(conf);
for (final Path file : files) {
try {
if (!getUserGroupInformation().doAs((PrivilegedExceptionAction<Boolean>) () -> hdfs.exists(file))) {
// if file is no longer there then move on
continue;
}
final String originalFilename = file.getName();
final String relativePath = getPathDifference(rootDir, file);
stream = getUserGroupInformation().doAs((PrivilegedExceptionAction<FSDataInputStream>) () -> hdfs.open(file, bufferSize));
final String outputFilename;
// Check if we should infer compression codec
if (inferCompressionCodec) {
codec = compressionCodecFactory.getCodec(file);
}
// Check if compression codec is defined (inferred or otherwise)
if (codec != null) {
stream = codec.createInputStream(stream);
outputFilename = StringUtils.removeEnd(originalFilename, codec.getDefaultExtension());
} else {
outputFilename = originalFilename;
}
FlowFile flowFile = session.create();
final StopWatch stopWatch = new StopWatch(true);
flowFile = session.importFrom(stream, flowFile);
stopWatch.stop();
final String dataRate = stopWatch.calculateDataRate(flowFile.getSize());
final long millis = stopWatch.getDuration(TimeUnit.MILLISECONDS);
flowFile = session.putAttribute(flowFile, CoreAttributes.PATH.key(), relativePath.isEmpty() ? "." : relativePath);
flowFile = session.putAttribute(flowFile, CoreAttributes.FILENAME.key(), outputFilename);
if (!keepSourceFiles && !getUserGroupInformation().doAs((PrivilegedExceptionAction<Boolean>) () -> hdfs.delete(file, false))) {
getLogger().warn("Could not remove {} from HDFS. Not ingesting this file ...", new Object[] { file });
session.remove(flowFile);
continue;
}
session.getProvenanceReporter().receive(flowFile, file.toString());
session.transfer(flowFile, REL_SUCCESS);
getLogger().info("retrieved {} from HDFS {} in {} milliseconds at a rate of {}", new Object[] { flowFile, file, millis, dataRate });
session.commit();
} catch (final Throwable t) {
getLogger().error("Error retrieving file {} from HDFS due to {}", new Object[] { file, t });
session.rollback();
context.yield();
} finally {
IOUtils.closeQuietly(stream);
stream = null;
}
}
}
use of java.security.PrivilegedExceptionAction in project jdk8u_jdk by JetBrains.
the class CertStoreHelper method getInstance.
public static CertStoreHelper getInstance(final String type) throws NoSuchAlgorithmException {
CertStoreHelper helper = cache.get(type);
if (helper != null) {
return helper;
}
final String cl = classMap.get(type);
if (cl == null) {
throw new NoSuchAlgorithmException(type + " not available");
}
try {
helper = AccessController.doPrivileged(new PrivilegedExceptionAction<CertStoreHelper>() {
public CertStoreHelper run() throws ClassNotFoundException {
try {
Class<?> c = Class.forName(cl, true, null);
CertStoreHelper csh = (CertStoreHelper) c.newInstance();
cache.put(type, csh);
return csh;
} catch (InstantiationException | IllegalAccessException e) {
throw new AssertionError(e);
}
}
});
return helper;
} catch (PrivilegedActionException e) {
throw new NoSuchAlgorithmException(type + " not available", e.getException());
}
}
use of java.security.PrivilegedExceptionAction in project jdk8u_jdk by JetBrains.
the class BreakDictionary method readDictionaryFile.
private void readDictionaryFile(final String dictionaryName) throws IOException, MissingResourceException {
BufferedInputStream in;
try {
in = AccessController.doPrivileged(new PrivilegedExceptionAction<BufferedInputStream>() {
@Override
public BufferedInputStream run() throws Exception {
return new BufferedInputStream(getClass().getResourceAsStream("/sun/text/resources/" + dictionaryName));
}
});
} catch (PrivilegedActionException e) {
throw new InternalError(e.toString(), e);
}
byte[] buf = new byte[8];
if (in.read(buf) != 8) {
throw new MissingResourceException("Wrong data length", dictionaryName, "");
}
// check version
int version = RuleBasedBreakIterator.getInt(buf, 0);
if (version != supportedVersion) {
throw new MissingResourceException("Dictionary version(" + version + ") is unsupported", dictionaryName, "");
}
// get data size
int len = RuleBasedBreakIterator.getInt(buf, 4);
buf = new byte[len];
if (in.read(buf) != len) {
throw new MissingResourceException("Wrong data length", dictionaryName, "");
}
// close the stream
in.close();
int l;
int offset = 0;
// read in the column map for BMP characteres (this is serialized in
// its internal form: an index array followed by a data array)
l = RuleBasedBreakIterator.getInt(buf, offset);
offset += 4;
short[] temp = new short[l];
for (int i = 0; i < l; i++, offset += 2) {
temp[i] = RuleBasedBreakIterator.getShort(buf, offset);
}
l = RuleBasedBreakIterator.getInt(buf, offset);
offset += 4;
byte[] temp2 = new byte[l];
for (int i = 0; i < l; i++, offset++) {
temp2[i] = buf[offset];
}
columnMap = new CompactByteArray(temp, temp2);
// read in numCols and numColGroups
numCols = RuleBasedBreakIterator.getInt(buf, offset);
offset += 4;
numColGroups = RuleBasedBreakIterator.getInt(buf, offset);
offset += 4;
// read in the row-number index
l = RuleBasedBreakIterator.getInt(buf, offset);
offset += 4;
rowIndex = new short[l];
for (int i = 0; i < l; i++, offset += 2) {
rowIndex[i] = RuleBasedBreakIterator.getShort(buf, offset);
}
// load in the populated-cells bitmap: index first, then bitmap list
l = RuleBasedBreakIterator.getInt(buf, offset);
offset += 4;
rowIndexFlagsIndex = new short[l];
for (int i = 0; i < l; i++, offset += 2) {
rowIndexFlagsIndex[i] = RuleBasedBreakIterator.getShort(buf, offset);
}
l = RuleBasedBreakIterator.getInt(buf, offset);
offset += 4;
rowIndexFlags = new int[l];
for (int i = 0; i < l; i++, offset += 4) {
rowIndexFlags[i] = RuleBasedBreakIterator.getInt(buf, offset);
}
// load in the row-shift index
l = RuleBasedBreakIterator.getInt(buf, offset);
offset += 4;
rowIndexShifts = new byte[l];
for (int i = 0; i < l; i++, offset++) {
rowIndexShifts[i] = buf[offset];
}
// load in the actual state table
l = RuleBasedBreakIterator.getInt(buf, offset);
offset += 4;
table = new short[l];
for (int i = 0; i < l; i++, offset += 2) {
table[i] = RuleBasedBreakIterator.getShort(buf, offset);
}
// finally, prepare the column map for supplementary characters
l = RuleBasedBreakIterator.getInt(buf, offset);
offset += 4;
int[] temp3 = new int[l];
for (int i = 0; i < l; i++, offset += 4) {
temp3[i] = RuleBasedBreakIterator.getInt(buf, offset);
}
supplementaryCharColumnMap = new SupplementaryCharacterData(temp3);
}
Aggregations