use of org.apache.accumulo.core.client.admin.TableOperations in project hive by apache.
the class AccumuloTestSetup method createAccumuloTable.
protected void createAccumuloTable(Connector conn) throws TableExistsException, TableNotFoundException, AccumuloException, AccumuloSecurityException {
TableOperations tops = conn.tableOperations();
if (tops.exists(TABLE_NAME)) {
tops.delete(TABLE_NAME);
}
tops.create(TABLE_NAME);
boolean[] booleans = new boolean[] { true, false, true };
byte[] bytes = new byte[] { Byte.MIN_VALUE, -1, Byte.MAX_VALUE };
short[] shorts = new short[] { Short.MIN_VALUE, -1, Short.MAX_VALUE };
int[] ints = new int[] { Integer.MIN_VALUE, -1, Integer.MAX_VALUE };
long[] longs = new long[] { Long.MIN_VALUE, -1, Long.MAX_VALUE };
String[] strings = new String[] { "Hadoop, Accumulo", "Hive", "Test Strings" };
float[] floats = new float[] { Float.MIN_VALUE, -1.0F, Float.MAX_VALUE };
double[] doubles = new double[] { Double.MIN_VALUE, -1.0, Double.MAX_VALUE };
HiveDecimal[] decimals = new HiveDecimal[] { HiveDecimal.create("3.14159"), HiveDecimal.create("2.71828"), HiveDecimal.create("0.57721") };
Date[] dates = new Date[] { Date.valueOf("2014-01-01"), Date.valueOf("2014-03-01"), Date.valueOf("2014-05-01") };
Timestamp[] timestamps = new Timestamp[] { new Timestamp(50), new Timestamp(100), new Timestamp(150) };
BatchWriter bw = conn.createBatchWriter(TABLE_NAME, new BatchWriterConfig());
final String cf = "cf";
try {
for (int i = 0; i < 3; i++) {
Mutation m = new Mutation("key-" + i);
m.put(cf, "cq-boolean", Boolean.toString(booleans[i]));
m.put(cf.getBytes(), "cq-byte".getBytes(), new byte[] { bytes[i] });
m.put(cf, "cq-short", Short.toString(shorts[i]));
m.put(cf, "cq-int", Integer.toString(ints[i]));
m.put(cf, "cq-long", Long.toString(longs[i]));
m.put(cf, "cq-string", strings[i]);
m.put(cf, "cq-float", Float.toString(floats[i]));
m.put(cf, "cq-double", Double.toString(doubles[i]));
m.put(cf, "cq-decimal", decimals[i].toString());
m.put(cf, "cq-date", dates[i].toString());
m.put(cf, "cq-timestamp", timestamps[i].toString());
bw.addMutation(m);
}
} finally {
bw.close();
}
}
use of org.apache.accumulo.core.client.admin.TableOperations in project hive by apache.
the class AccumuloStorageHandler method preCreateTable.
@Override
public void preCreateTable(Table table) throws MetaException {
boolean isExternal = isExternalTable(table);
if (table.getSd().getLocation() != null) {
throw new MetaException("Location can't be specified for Accumulo");
}
Map<String, String> serdeParams = table.getSd().getSerdeInfo().getParameters();
String columnMapping = serdeParams.get(AccumuloSerDeParameters.COLUMN_MAPPINGS);
if (columnMapping == null) {
throw new MetaException(AccumuloSerDeParameters.COLUMN_MAPPINGS + " missing from SERDEPROPERTIES");
}
try {
String tblName = getTableName(table);
Connector connector = connectionParams.getConnector();
TableOperations tableOpts = connector.tableOperations();
// Attempt to create the table, taking EXTERNAL into consideration
if (!tableOpts.exists(tblName)) {
if (!isExternal) {
tableOpts.create(tblName);
} else {
throw new MetaException("Accumulo table " + tblName + " doesn't exist even though declared external");
}
} else {
if (!isExternal) {
throw new MetaException("Table " + tblName + " already exists in Accumulo. Use CREATE EXTERNAL TABLE to register with Hive.");
}
}
} catch (AccumuloSecurityException e) {
throw new MetaException(StringUtils.stringifyException(e));
} catch (TableExistsException e) {
throw new MetaException(StringUtils.stringifyException(e));
} catch (AccumuloException e) {
throw new MetaException(StringUtils.stringifyException(e));
}
}
Aggregations