Search in sources :

Example 21 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project Solbase by Photobucket.

the class IndexWriter method addTermVector.

public void addTermVector(TermDocMetadata termDocMeta, int startDocId, int endDocId) throws CorruptIndexException, IOException {
    // getting terVector and doc tables
    HTableInterface termVectorTable = SolbaseUtil.getTermVectorTable();
    try {
        byte[] key = termDocMeta.getFieldTermKey();
        ByteBuffer buf = termDocMeta.serialize();
        int docNumber = termDocMeta.getDocId();
        Put put = null;
        switch(TermDocMetadataLoader.storageType) {
            case KEY_ONLY:
                {
                    put = new Put(Bytes.add(Bytes.add(key, SolbaseUtil.delimiter, Bytes.toBytes(docNumber)), Bytes.toBytes(buf)));
                    put.add(SolbaseUtil.termVectorDocColumnFamilyName, Bytes.toBytes(""), Bytes.toBytes(""));
                }
                break;
            case WIDE_ROW:
                int chunkId = TermDocMetadataLoader.getChunkId(docNumber);
                put = new Put(Bytes.add(key, SolbaseUtil.delimiter, Bytes.toBytes(chunkId)));
                put.add(SolbaseUtil.termVectorDocColumnFamilyName, Bytes.toBytes(docNumber), Bytes.toBytes(buf));
                break;
            case NARROW_ROW:
            default:
                {
                    put = new Put(Bytes.add(key, SolbaseUtil.delimiter, Bytes.toBytes(docNumber)));
                    put.add(SolbaseUtil.termVectorDocColumnFamilyName, Bytes.toBytes(""), Bytes.toBytes(buf));
                }
        }
        termVectorTable.put(put);
    } catch (Exception e) {
        logger.error("failed to add term vector: " + termDocMeta.getTerm().toString() + " and docId: " + termDocMeta.docId);
    } finally {
        SolbaseUtil.releaseTable(termVectorTable);
    }
}
Also used : HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) ByteBuffer(java.nio.ByteBuffer) Put(org.apache.hadoop.hbase.client.Put) CorruptIndexException(org.apache.lucene.index.CorruptIndexException) IOException(java.io.IOException) SolbaseException(org.solbase.common.SolbaseException)

Example 22 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project Solbase by Photobucket.

the class IndexWriter method deleteTermVector.

/**
	 *
	 * @param termDocMeta - term vector to be deleted
	 * @param startDocId
	 * @param endDocId
	 * @param compare - if true, it will compare new and old term vectors and if same, don't bother deleting term vector
	 * @return boolean - indicating whether term vector's been deleted
	 */
public boolean deleteTermVector(TermDocMetadata termDocMeta, int startDocId, int endDocId, boolean compare) {
    // to update, we should first delete existing term doc meta data.
    // getting terVector and doc tables
    HTableInterface termVectorTable = SolbaseUtil.getTermVectorTable();
    ResultScanner fieldScanner = null;
    try {
        byte[] key = termDocMeta.getFieldTermKey();
        int docNumber = termDocMeta.getDocId();
        Delete delete = null;
        switch(TermDocMetadataLoader.storageType) {
            case KEY_ONLY:
                {
                    byte[] termBeginKey = Bytes.add(key, SolbaseUtil.delimiter, Bytes.toBytes(docNumber));
                    byte[] termEndKey = Bytes.add(key, SolbaseUtil.delimiter, Bytes.toBytes(docNumber + 1));
                    Scan fieldScan = new Scan(termBeginKey, termEndKey);
                    fieldScan.addFamily(SolbaseUtil.termVectorDocColumnFamilyName);
                    fieldScanner = termVectorTable.getScanner(fieldScan);
                    Result termDoc;
                    termDoc = fieldScanner.next();
                    fieldScanner.close();
                    if (termDoc != null && !termDoc.isEmpty()) {
                        if (compare) {
                            byte[] oldRow = termDoc.getRow();
                            ByteBuffer buf = termDocMeta.serialize();
                            byte[] newRow = Bytes.add(Bytes.add(key, SolbaseUtil.delimiter, Bytes.toBytes(docNumber)), Bytes.toBytes(buf));
                            // if term vector hasn't changed, don't bother deleting
                            if (!ArrayUtils.isEquals(oldRow, newRow)) {
                                delete = new Delete(termDoc.getRow());
                            }
                        } else {
                            delete = new Delete(termDoc.getRow());
                        }
                    }
                }
                break;
            case WIDE_ROW:
                int chunkId = TermDocMetadataLoader.getChunkId(docNumber);
                delete = new Delete(Bytes.add(key, SolbaseUtil.delimiter, Bytes.toBytes(chunkId)));
                break;
            case NARROW_ROW:
            default:
                {
                    delete = new Delete(Bytes.add(key, SolbaseUtil.delimiter, Bytes.toBytes(docNumber)));
                }
        }
        if (delete != null) {
            termVectorTable.delete(delete);
            logger.info("deleting term vector: " + termDocMeta.getTerm().toString() + " docId: " + docNumber);
            return true;
        }
    } catch (IOException e) {
        throw new SolbaseException(SolbaseException.ErrorCode.SERVER_ERROR, e.getMessage());
    } finally {
        if (fieldScanner != null) {
            fieldScanner.close();
        }
        SolbaseUtil.releaseTable(termVectorTable);
    }
    return false;
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) SolbaseException(org.solbase.common.SolbaseException) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Scan(org.apache.hadoop.hbase.client.Scan) IOException(java.io.IOException) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) ByteBuffer(java.nio.ByteBuffer) Result(org.apache.hadoop.hbase.client.Result)

Example 23 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project Solbase by Photobucket.

the class SolbaseUtil method getDocId.

// sequence doc id mapping to actual silo.picture_id
public static Integer getDocId(String key) throws IOException {
    HTableInterface docIdKeyMap = SolbaseUtil.getDocKeyIdMapTable();
    try {
        Get get = new Get(Bytes.toBytes(key));
        Result result = docIdKeyMap.get(get);
        if (result.isEmpty()) {
            return null;
        }
        byte[] docId = result.getValue(Bytes.toBytes("docId"), Bytes.toBytes(""));
        int doc = Bytes.toInt(docId);
        return doc;
    } finally {
        SolbaseUtil.releaseTable(docIdKeyMap);
    }
}
Also used : Get(org.apache.hadoop.hbase.client.Get) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) Result(org.apache.hadoop.hbase.client.Result)

Example 24 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project Solbase by Photobucket.

the class SolbaseUtil method getSequenceId.

public static int getSequenceId() {
    HTableInterface sequence = SolbaseUtil.getSequenceTable();
    Get get = new Get(Bytes.toBytes("sequence"));
    try {
        Result result = sequence.get(get);
        if (result == null || result.isEmpty()) {
            int docId = new Long(sequence.incrementColumnValue(Bytes.toBytes("sequence"), Bytes.toBytes("id"), Bytes.toBytes(""), 1, true)).intValue();
            return docId;
        } else {
            byte[] val = result.getValue(Bytes.toBytes("id"), Bytes.toBytes(""));
            return new Long(Bytes.toLong(val)).intValue();
        }
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    return -1;
}
Also used : Get(org.apache.hadoop.hbase.client.Get) IOException(java.io.IOException) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) Result(org.apache.hadoop.hbase.client.Result)

Example 25 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project Solbase by Photobucket.

the class SolbaseCoreContainer method readSchemaXMLBytes.

public static byte[] readSchemaXMLBytes(String indexName) throws IOException {
    HTableInterface table = SolbaseUtil.getSchemaInfoTable();
    try {
        int idx = indexName.indexOf("~");
        if (idx >= 0) {
            indexName = indexName.substring(0, idx);
        }
        Get schemaGet = new Get(Bytes.toBytes(indexName));
        Result schemaQueryResult = table.get(schemaGet);
        byte[] schemaValue = schemaQueryResult.getValue(Bytes.toBytes("info"), Bytes.toBytes("schema"));
        return schemaValue;
    } finally {
        SolbaseUtil.releaseTable(table);
    }
}
Also used : Get(org.apache.hadoop.hbase.client.Get) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) Result(org.apache.hadoop.hbase.client.Result)

Aggregations

HTableInterface (org.apache.hadoop.hbase.client.HTableInterface)117 Result (org.apache.hadoop.hbase.client.Result)43 Put (org.apache.hadoop.hbase.client.Put)41 IOException (java.io.IOException)36 ArrayList (java.util.ArrayList)26 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)23 Get (org.apache.hadoop.hbase.client.Get)21 Scan (org.apache.hadoop.hbase.client.Scan)21 Test (org.junit.Test)20 SQLException (java.sql.SQLException)19 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)17 Connection (java.sql.Connection)15 HashMap (java.util.HashMap)15 HBaseAdmin (org.apache.hadoop.hbase.client.HBaseAdmin)13 Delete (org.apache.hadoop.hbase.client.Delete)12 Mutation (org.apache.hadoop.hbase.client.Mutation)12 PhoenixIOException (org.apache.phoenix.exception.PhoenixIOException)11 ResultSet (java.sql.ResultSet)10 Configuration (org.apache.hadoop.conf.Configuration)9 ConnectionQueryServices (org.apache.phoenix.query.ConnectionQueryServices)9