use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.
the class SmokeTest method main.
public static void main(String[] args) throws Exception {
SmokeTest test = new SmokeTest();
Configuration conf = MetastoreConf.newMetastoreConf();
IMetaStoreClient client = new HiveMetaStoreClient(conf);
test.runTest(client);
}
use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project storm by apache.
the class HiveSetupUtil method createDbAndTable.
public static void createDbAndTable(HiveConf conf, String databaseName, String tableName, List<String> partVals, String[] colNames, String[] colTypes, String[] partNames, String dbLocation) throws Exception {
IMetaStoreClient client = new HiveMetaStoreClient(conf);
try {
Database db = new Database();
db.setName(databaseName);
db.setLocationUri(dbLocation);
client.createDatabase(db);
Table tbl = new Table();
tbl.setDbName(databaseName);
tbl.setTableName(tableName);
tbl.setTableType(TableType.MANAGED_TABLE.toString());
StorageDescriptor sd = new StorageDescriptor();
sd.setCols(getTableColumns(colNames, colTypes));
sd.setNumBuckets(1);
sd.setLocation(dbLocation + Path.SEPARATOR + tableName);
if (partNames != null && partNames.length != 0) {
tbl.setPartitionKeys(getPartitionKeys(partNames));
}
tbl.setSd(sd);
sd.setBucketCols(new ArrayList<String>(2));
sd.setSerdeInfo(new SerDeInfo());
sd.getSerdeInfo().setName(tbl.getTableName());
sd.getSerdeInfo().setParameters(new HashMap<String, String>());
sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1");
sd.getSerdeInfo().setSerializationLib(OrcSerde.class.getName());
sd.setInputFormat(OrcInputFormat.class.getName());
sd.setOutputFormat(OrcOutputFormat.class.getName());
Map<String, String> tableParams = new HashMap<String, String>();
tbl.setParameters(tableParams);
client.createTable(tbl);
try {
if (partVals != null && partVals.size() > 0) {
addPartition(client, tbl, partVals);
}
} catch (AlreadyExistsException e) {
}
} finally {
client.close();
}
}
use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.
the class TestHadoopAuthBridge23 method obtainTokenAndAddIntoUGI.
private void obtainTokenAndAddIntoUGI(UserGroupInformation clientUgi, String tokenSig) throws Exception {
String tokenStrForm = getDelegationTokenStr(clientUgi, clientUgi);
Token<DelegationTokenIdentifier> t = new Token<DelegationTokenIdentifier>();
t.decodeFromUrlString(tokenStrForm);
//check whether the username in the token is what we expect
DelegationTokenIdentifier d = new DelegationTokenIdentifier();
d.readFields(new DataInputStream(new ByteArrayInputStream(t.getIdentifier())));
Assert.assertTrue("Usernames don't match", clientUgi.getShortUserName().equals(d.getUser().getShortUserName()));
if (tokenSig != null) {
conf.setVar(HiveConf.ConfVars.METASTORE_TOKEN_SIGNATURE, tokenSig);
t.setService(new Text(tokenSig));
}
//add the token to the clientUgi for securely talking to the metastore
clientUgi.addToken(t);
//Create the metastore client as the clientUgi. Doing so this
//way will give the client access to the token that was added earlier
//in the clientUgi
HiveMetaStoreClient hiveClient = clientUgi.doAs(new PrivilegedExceptionAction<HiveMetaStoreClient>() {
public HiveMetaStoreClient run() throws Exception {
HiveMetaStoreClient hiveClient = new HiveMetaStoreClient(conf);
return hiveClient;
}
});
Assert.assertTrue("Couldn't connect to metastore", hiveClient != null);
//try out some metastore operations
createDBAndVerifyExistence(hiveClient);
hiveClient.close();
//Now cancel the delegation token
HiveMetaStore.cancelDelegationToken(tokenStrForm);
//now metastore connection should fail
hiveClient = clientUgi.doAs(new PrivilegedExceptionAction<HiveMetaStoreClient>() {
public HiveMetaStoreClient run() {
try {
return new HiveMetaStoreClient(conf);
} catch (MetaException e) {
return null;
}
}
});
Assert.assertTrue("Expected metastore operations to fail", hiveClient == null);
}
use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.
the class TestTableIterable method testNumReturned.
@Test
public void testNumReturned() throws MetaException, InvalidOperationException, UnknownDBException, TException {
HiveMetaStoreClient msc = mock(HiveMetaStoreClient.class);
// create a mocked metastore client that returns 3 table objects every time it is called
// will use same size for TableIterable batch fetch size
List<Table> threeTables = Arrays.asList(new Table(), new Table(), new Table());
when(msc.getTableObjectsByName(anyString(), anyListOf(String.class))).thenReturn(threeTables);
List<String> tableNames = Arrays.asList("a", "b", "c", "d", "e", "f");
TableIterable tIterable = new TableIterable(msc, "dummy", tableNames, threeTables.size());
tIterable.iterator();
Iterator<Table> tIter = tIterable.iterator();
int size = 0;
while (tIter.hasNext()) {
size++;
tIter.next();
}
assertEquals("Number of table objects returned", size, tableNames.size());
verify(msc).getTableObjectsByName("dummy", Arrays.asList("a", "b", "c"));
verify(msc).getTableObjectsByName("dummy", Arrays.asList("d", "e", "f"));
}
use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.
the class TestHCatOutputFormat method setUp.
@Override
protected void setUp() throws Exception {
super.setUp();
hiveConf = new HiveConf(this.getClass());
try {
client = new HiveMetaStoreClient(hiveConf);
initTable();
} catch (Throwable e) {
LOG.error("Unable to open the metastore", e);
throw new Exception(e);
}
}
Aggregations