use of org.apache.accumulo.core.client.security.tokens.AuthenticationToken in project hive by apache.
the class HiveAccumuloHelper method unwrapAuthenticationToken.
/**
* Wrapper around <code>ConfiguratorBase.unwrapAuthenticationToken</code> which only exists in
* 1.7.0 and new. Uses reflection to not break compat.
*
* @param jobConf
* JobConf object
* @param token
* The DelegationTokenStub instance
* @return A DelegationTokenImpl created from the Token in the Job's credentials
* @throws IOException
* If the token fails to be unwrapped
*/
public AuthenticationToken unwrapAuthenticationToken(JobConf jobConf, AuthenticationToken token) throws IOException {
try {
Class<?> configuratorBaseClass = JavaUtils.loadClass(CONFIGURATOR_BASE_CLASS_NAME);
Method unwrapAuthenticationTokenMethod = configuratorBaseClass.getMethod(UNWRAP_AUTHENTICATION_TOKEN_METHOD_NAME, JobConf.class, AuthenticationToken.class);
// ConfiguratorBase.unwrapAuthenticationToken(conf, token);
return (AuthenticationToken) unwrapAuthenticationTokenMethod.invoke(null, jobConf, token);
} catch (Exception e) {
throw new IOException("Failed to unwrap AuthenticationToken", e);
}
}
use of org.apache.accumulo.core.client.security.tokens.AuthenticationToken in project hive by apache.
the class HiveAccumuloTableInputFormat method configure.
/**
* Configure the underlying AccumuloInputFormat
*
* @param conf
* Job configuration
* @param instance
* Accumulo instance
* @param connector
* Accumulo connector
* @param accumuloParams
* Connection information to the Accumulo instance
* @param columnMapper
* Configuration of Hive to Accumulo columns
* @param iterators
* Any iterators to be configured server-side
* @param ranges
* Accumulo ranges on for the query
* @throws AccumuloSecurityException
* @throws AccumuloException
* @throws SerDeException
*/
protected void configure(JobConf conf, Instance instance, Connector connector, AccumuloConnectionParameters accumuloParams, ColumnMapper columnMapper, List<IteratorSetting> iterators, Collection<Range> ranges) throws AccumuloSecurityException, AccumuloException, SerDeException, IOException {
// Handle implementation of Instance and invoke appropriate InputFormat method
if (instance instanceof MockInstance) {
setMockInstance(conf, instance.getInstanceName());
} else {
setZooKeeperInstance(conf, instance.getInstanceName(), instance.getZooKeepers(), accumuloParams.useSasl());
}
// Set the username/passwd for the Accumulo connection
if (accumuloParams.useSasl()) {
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
// If we have Kerberos credentials, we should obtain the delegation token
if (ugi.hasKerberosCredentials()) {
Connector conn = accumuloParams.getConnector();
AuthenticationToken token = helper.getDelegationToken(conn);
// Send the DelegationToken down to the Configuration for Accumulo to use
setConnectorInfo(conf, accumuloParams.getAccumuloUserName(), token);
// Convert the Accumulo token in a Hadoop token
Token<? extends TokenIdentifier> accumuloToken = helper.getHadoopToken(token);
log.info("Adding Hadoop Token for Accumulo to Job's Credentials");
// Add the Hadoop token to the JobConf
helper.mergeTokenIntoJobConf(conf, accumuloToken);
if (!ugi.addToken(accumuloToken)) {
throw new IOException("Failed to add Accumulo Token to UGI");
}
}
try {
helper.addTokenFromUserToJobConf(ugi, conf);
} catch (IOException e) {
throw new IOException("Current user did not contain necessary delegation Tokens " + ugi, e);
}
} else {
setConnectorInfo(conf, accumuloParams.getAccumuloUserName(), new PasswordToken(accumuloParams.getAccumuloPassword()));
}
// Read from the given Accumulo table
setInputTableName(conf, accumuloParams.getAccumuloTableName());
// Check Configuration for any user-provided Authorization definition
Authorizations auths = AccumuloSerDeParameters.getAuthorizationsFromConf(conf);
if (null == auths) {
// Default to all of user's authorizations when no configuration is provided
auths = connector.securityOperations().getUserAuthorizations(accumuloParams.getAccumuloUserName());
}
// Implicitly handles users providing invalid authorizations
setScanAuthorizations(conf, auths);
// restrict with any filters found from WHERE predicates.
addIterators(conf, iterators);
// not setting ranges scans the entire table
if (null != ranges) {
log.info("Setting ranges: " + ranges);
setRanges(conf, ranges);
}
// Restrict the set of columns that we want to read from the Accumulo table
HashSet<Pair<Text, Text>> pairs = getPairCollection(columnMapper.getColumnMappings());
if (null != pairs && !pairs.isEmpty()) {
fetchColumns(conf, pairs);
}
}
use of org.apache.accumulo.core.client.security.tokens.AuthenticationToken in project hive by apache.
the class HiveAccumuloHelper method getHadoopToken.
public Token<? extends TokenIdentifier> getHadoopToken(AuthenticationToken delegationToken) throws IOException {
try {
// DelegationTokenImpl class
Class<?> delegationTokenClass = JavaUtils.loadClass(DELEGATION_TOKEN_IMPL_CLASS_NAME);
// Methods on DelegationToken
Method getIdentifierMethod = delegationTokenClass.getMethod(GET_IDENTIFIER_METHOD_NAME);
Method getPasswordMethod = delegationTokenClass.getMethod(GET_PASSWORD_METHOD_NAME);
Method getServiceNameMethod = delegationTokenClass.getMethod(GET_SERVICE_NAME_METHOD_NAME);
// Treat the TokenIdentifier implementation as the abstract class to avoid dependency issues
// AuthenticationTokenIdentifier identifier = delegationToken.getIdentifier();
TokenIdentifier identifier = (TokenIdentifier) getIdentifierMethod.invoke(delegationToken);
// delegationToken.getPassword(), identifier.getKind(), delegationToken.getServiceName());
return new Token<TokenIdentifier>(identifier.getBytes(), (byte[]) getPasswordMethod.invoke(delegationToken), identifier.getKind(), (Text) getServiceNameMethod.invoke(delegationToken));
} catch (Exception e) {
throw new IOException("Failed to create Hadoop token from Accumulo DelegationToken", e);
}
}
use of org.apache.accumulo.core.client.security.tokens.AuthenticationToken in project hive by apache.
the class TestHiveAccumuloTableOutputFormat method testSaslConfiguration.
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testSaslConfiguration() throws IOException, AccumuloException, AccumuloSecurityException {
final HiveAccumuloTableOutputFormat outputFormat = Mockito.mock(HiveAccumuloTableOutputFormat.class);
final AuthenticationToken authToken = Mockito.mock(AuthenticationToken.class);
final Token hadoopToken = Mockito.mock(Token.class);
final HiveAccumuloHelper helper = Mockito.mock(HiveAccumuloHelper.class);
final AccumuloConnectionParameters cnxnParams = Mockito.mock(AccumuloConnectionParameters.class);
final Connector connector = Mockito.mock(Connector.class);
// Set UGI to use Kerberos
// Have to use the string constant to support hadoop 1
conf.set("hadoop.security.authentication", "kerberos");
UserGroupInformation.setConfiguration(conf);
// Set the current UGI to a fake user
UserGroupInformation user1 = UserGroupInformation.createUserForTesting(user, new String[0]);
// Use that as the "current user"
Mockito.when(outputFormat.getCurrentUser()).thenReturn(user1);
// Turn off passwords, enable sasl and set a keytab
conf.unset(AccumuloConnectionParameters.USER_PASS);
// Call the real method instead of the mock
Mockito.doCallRealMethod().when(outputFormat).configureAccumuloOutputFormat(conf);
// Return our mocked objects
Mockito.when(outputFormat.getHelper()).thenReturn(helper);
Mockito.when(outputFormat.getConnectionParams(conf)).thenReturn(cnxnParams);
Mockito.when(cnxnParams.getConnector()).thenReturn(connector);
Mockito.when(helper.getDelegationToken(connector)).thenReturn(authToken);
Mockito.when(helper.getHadoopToken(authToken)).thenReturn(hadoopToken);
// Stub AccumuloConnectionParameters actions
Mockito.when(cnxnParams.useSasl()).thenReturn(true);
Mockito.when(cnxnParams.getAccumuloUserName()).thenReturn(user);
Mockito.when(cnxnParams.getAccumuloInstanceName()).thenReturn(instanceName);
Mockito.when(cnxnParams.getZooKeepers()).thenReturn(zookeepers);
// Stub OutputFormat actions
Mockito.when(outputFormat.hasKerberosCredentials(user1)).thenReturn(true);
// Invoke the method
outputFormat.configureAccumuloOutputFormat(conf);
// The AccumuloInputFormat methods
Mockito.verify(outputFormat).setZooKeeperInstanceWithErrorChecking(conf, instanceName, zookeepers, true);
Mockito.verify(outputFormat).setConnectorInfoWithErrorChecking(conf, user, authToken);
Mockito.verify(outputFormat).setDefaultAccumuloTableName(conf, outputTable);
// Other methods we expect
Mockito.verify(helper).mergeTokenIntoJobConf(conf, hadoopToken);
// Make sure the token made it into the UGI
Collection<Token<? extends TokenIdentifier>> tokens = user1.getTokens();
Assert.assertEquals(1, tokens.size());
Assert.assertEquals(hadoopToken, tokens.iterator().next());
}
use of org.apache.accumulo.core.client.security.tokens.AuthenticationToken in project gora by apache.
the class AccumuloStore method initialize.
/**
* Initialize the data store by reading the credentials, setting the client's properties up and
* reading the mapping file. Initialize is called when then the call to
* {@link org.apache.gora.store.DataStoreFactory#createDataStore} is made.
*
* @param keyClass
* @param persistentClass
* @param properties
*/
@Override
public void initialize(Class<K> keyClass, Class<T> persistentClass, Properties properties) {
try {
super.initialize(keyClass, persistentClass, properties);
String mock = DataStoreFactory.findProperty(properties, this, MOCK_PROPERTY, null);
String mappingFile = DataStoreFactory.getMappingFile(properties, this, DEFAULT_MAPPING_FILE);
String user = DataStoreFactory.findProperty(properties, this, USERNAME_PROPERTY, null);
String password = DataStoreFactory.findProperty(properties, this, PASSWORD_PROPERTY, null);
mapping = readMapping(mappingFile);
if (mapping.encoder == null || "".equals(mapping.encoder)) {
encoder = new BinaryEncoder();
} else {
try {
encoder = (Encoder) getClass().getClassLoader().loadClass(mapping.encoder).newInstance();
} catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
throw new IOException(e);
}
}
try {
AuthenticationToken token = new PasswordToken(password);
if (mock == null || !mock.equals("true")) {
String instance = DataStoreFactory.findProperty(properties, this, INSTANCE_NAME_PROPERTY, null);
String zookeepers = DataStoreFactory.findProperty(properties, this, ZOOKEEPERS_NAME_PROPERTY, null);
conn = new ZooKeeperInstance(instance, zookeepers).getConnector(user, token);
} else {
conn = new MockInstance().getConnector(user, token);
}
credentials = new Credentials(user, token);
if (autoCreateSchema && !schemaExists())
createSchema();
} catch (AccumuloException | AccumuloSecurityException e) {
throw new IOException(e);
}
} catch (IOException e) {
LOG.error(e.getMessage(), e);
}
}
Aggregations