use of org.apache.hadoop.hbase.client.HBaseAdmin in project phoenix by apache.
the class AlterTableIT method testSetHColumnPropertyAndAddColumnForDefaultCFForTableWithOnlyPKCols.
@Test
public void testSetHColumnPropertyAndAddColumnForDefaultCFForTableWithOnlyPKCols() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.setAutoCommit(false);
try {
String ddl = "create table " + dataTableFullName + " (" + " id char(1) NOT NULL," + " col1 integer NOT NULL," + " col2 bigint NOT NULL," + " CONSTRAINT NAME_PK PRIMARY KEY (id, col1, col2)" + " ) " + generateDDLOptions("TTL=86400, SALT_BUCKETS = 4, DEFAULT_COLUMN_FAMILY='XYZ'");
conn.createStatement().execute(ddl);
ddl = "ALTER TABLE " + dataTableFullName + " ADD COL3 INTEGER IN_MEMORY=true";
conn.createStatement().execute(ddl);
conn.commit();
try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(1, columnFamilies.length);
assertEquals(true, columnFamilies[0].isInMemory());
assertEquals("XYZ", columnFamilies[0].getNameAsString());
}
} finally {
conn.close();
}
}
use of org.apache.hadoop.hbase.client.HBaseAdmin in project phoenix by apache.
the class AlterTableIT method testSetPropertyAndAddColumnWhenTableHasExplicitDefaultColumnFamily.
@Test
public void testSetPropertyAndAddColumnWhenTableHasExplicitDefaultColumnFamily() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
String ddl = "CREATE TABLE " + dataTableFullName + " " + " (a_string varchar not null, col1 integer, CF1.col2 integer" + " CONSTRAINT pk PRIMARY KEY (a_string)) " + generateDDLOptions("DEFAULT_COLUMN_FAMILY = 'XYZ'");
try {
conn.createStatement().execute(ddl);
conn.createStatement().execute("ALTER TABLE " + dataTableFullName + " ADD col4 integer, CF1.col5 integer, CF2.col6 integer IN_MEMORY=true, CF1.REPLICATION_SCOPE=1, CF2.IN_MEMORY=false, XYZ.REPLICATION_SCOPE=1 ");
try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName)).getColumnFamilies();
assertEquals(3, columnFamilies.length);
assertEquals("CF1", columnFamilies[0].getNameAsString());
assertTrue(columnFamilies[0].isInMemory());
assertEquals(1, columnFamilies[0].getScope());
assertEquals("CF2", columnFamilies[1].getNameAsString());
assertFalse(columnFamilies[1].isInMemory());
assertEquals(0, columnFamilies[1].getScope());
assertEquals("XYZ", columnFamilies[2].getNameAsString());
assertTrue(columnFamilies[2].isInMemory());
assertEquals(1, columnFamilies[2].getScope());
}
} finally {
conn.close();
}
}
use of org.apache.hadoop.hbase.client.HBaseAdmin in project phoenix by apache.
the class AlterTableIT method testTTLAssignmentForNewEmptyCF.
@Test
public void testTTLAssignmentForNewEmptyCF() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.setAutoCommit(false);
try {
String ddl = "create table " + dataTableFullName + " (" + " id char(1) NOT NULL," + " col1 integer NOT NULL," + " col2 bigint NOT NULL," + " CONSTRAINT NAME_PK PRIMARY KEY (id, col1, col2)" + " ) " + generateDDLOptions("TTL=86400, SALT_BUCKETS = 4, DEFAULT_COLUMN_FAMILY='XYZ'");
conn.createStatement().execute(ddl);
ddl = "ALTER TABLE " + dataTableFullName + " ADD NEWCF.COL3 INTEGER IN_MEMORY=true";
conn.createStatement().execute(ddl);
conn.commit();
try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("NEWCF", columnFamilies[0].getNameAsString());
assertEquals(true, columnFamilies[0].isInMemory());
assertEquals(86400, columnFamilies[0].getTimeToLive());
assertEquals("XYZ", columnFamilies[1].getNameAsString());
assertEquals(false, columnFamilies[1].isInMemory());
assertEquals(86400, columnFamilies[1].getTimeToLive());
}
ddl = "ALTER TABLE " + dataTableFullName + " SET TTL=1000";
conn.createStatement().execute(ddl);
conn.commit();
try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("NEWCF", columnFamilies[0].getNameAsString());
assertEquals(true, columnFamilies[0].isInMemory());
assertEquals(1000, columnFamilies[0].getTimeToLive());
assertEquals("XYZ", columnFamilies[1].getNameAsString());
assertEquals(false, columnFamilies[1].isInMemory());
assertEquals(86400, columnFamilies[1].getTimeToLive());
}
// the new column will be assigned to the column family XYZ. With the a KV column getting added for XYZ,
// the column family will start showing up in PTable.getColumnFamilies() after the column is added. Thus
// being a new column family for the PTable, it will end up inheriting the TTL of the emptyCF (NEWCF).
ddl = "ALTER TABLE " + dataTableFullName + " ADD COL3 INTEGER";
conn.createStatement().execute(ddl);
conn.commit();
try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("NEWCF", columnFamilies[0].getNameAsString());
assertEquals(true, columnFamilies[0].isInMemory());
assertEquals(1000, columnFamilies[0].getTimeToLive());
assertEquals("XYZ", columnFamilies[1].getNameAsString());
assertEquals(false, columnFamilies[1].isInMemory());
assertEquals(1000, columnFamilies[1].getTimeToLive());
}
} finally {
conn.close();
}
}
use of org.apache.hadoop.hbase.client.HBaseAdmin in project phoenix by apache.
the class AlterTableIT method testAddingPkColAndSettingProperties.
@Test
public void testAddingPkColAndSettingProperties() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
try {
String ddl = "create table " + dataTableFullName + " (" + " k1 char(1) NOT NULL," + " k2 integer NOT NULL," + " col1 bigint," + " CONSTRAINT NAME_PK PRIMARY KEY (k1, k2)" + " ) " + tableDDLOptions;
conn.createStatement().execute(ddl);
// set HTableProperty when adding a pk column should fail
ddl = "ALTER TABLE " + dataTableFullName + " ADD k3 DECIMAL PRIMARY KEY COMPACTION_ENABLED = false";
try {
conn.createStatement().execute(ddl);
fail();
} catch (SQLException e) {
assertEquals(SQLExceptionCode.CANNOT_SET_TABLE_PROPERTY_ADD_COLUMN.getErrorCode(), e.getErrorCode());
}
// set HColumnProperty when adding only a pk column should fail
ddl = "ALTER TABLE " + dataTableFullName + " ADD k3 DECIMAL PRIMARY KEY REPLICATION_SCOPE = 0";
try {
conn.createStatement().execute(ddl);
fail();
} catch (SQLException e) {
assertEquals(SQLExceptionCode.SET_UNSUPPORTED_PROP_ON_ALTER_TABLE.getErrorCode(), e.getErrorCode());
}
// set phoenix table property when adding a pk column should fail
ddl = "ALTER TABLE " + dataTableFullName + " ADD k3 DECIMAL PRIMARY KEY DISABLE_WAL = true";
try {
conn.createStatement().execute(ddl);
fail();
} catch (SQLException e) {
assertEquals(SQLExceptionCode.CANNOT_SET_TABLE_PROPERTY_ADD_COLUMN.getErrorCode(), e.getErrorCode());
}
// set HColumnProperty property when adding a pk column and other key value columns should work
ddl = "ALTER TABLE " + dataTableFullName + " ADD k3 DECIMAL PRIMARY KEY, col2 bigint, CF.col3 bigint IN_MEMORY = true, CF.IN_MEMORY=false, CF.REPLICATION_SCOPE = 1";
conn.createStatement().execute(ddl);
// assert that k3 was added as new pk
ResultSet rs = conn.getMetaData().getPrimaryKeys("", schemaName, dataTableName);
assertTrue(rs.next());
assertEquals("K1", rs.getString("COLUMN_NAME"));
assertEquals(1, rs.getShort("KEY_SEQ"));
assertTrue(rs.next());
assertEquals("K2", rs.getString("COLUMN_NAME"));
assertEquals(2, rs.getShort("KEY_SEQ"));
assertTrue(rs.next());
assertEquals("K3", rs.getString("COLUMN_NAME"));
assertEquals(3, rs.getShort("KEY_SEQ"));
assertFalse(rs.next());
try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("0", columnFamilies[0].getNameAsString());
assertEquals(true, columnFamilies[0].isInMemory());
assertEquals(0, columnFamilies[0].getScope());
assertEquals("CF", columnFamilies[1].getNameAsString());
assertEquals(false, columnFamilies[1].isInMemory());
assertEquals(1, columnFamilies[1].getScope());
}
} finally {
conn.close();
}
}
use of org.apache.hadoop.hbase.client.HBaseAdmin in project phoenix by apache.
the class AlterTableIT method testSetHColumnPropertyForTableWithOnlyPKCols2.
@Test
public void testSetHColumnPropertyForTableWithOnlyPKCols2() throws Exception {
Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.setAutoCommit(false);
try {
String ddl = "create table " + dataTableFullName + " (" + " id char(1) NOT NULL," + " col1 integer NOT NULL," + " col2 bigint NOT NULL," + " CONSTRAINT NAME_PK PRIMARY KEY (id, col1, col2)" + " ) " + generateDDLOptions("TTL=86400, SALT_BUCKETS = 4");
conn.createStatement().execute(ddl);
ddl = "ALTER TABLE " + dataTableFullName + " SET IN_MEMORY=true";
conn.createStatement().execute(ddl);
conn.commit();
try (HBaseAdmin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
HTableDescriptor tableDesc = admin.getTableDescriptor(Bytes.toBytes(dataTableFullName));
HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(1, columnFamilies.length);
assertEquals(true, columnFamilies[0].isInMemory());
assertEquals("0", columnFamilies[0].getNameAsString());
}
} finally {
conn.close();
}
}
Aggregations