Search in sources :

Example 76 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project Solbase by Photobucket.

the class SolbaseUtil method generateDocId.

// sequence generator for generating doc id
public static int generateDocId(String key) throws IOException {
    HTableInterface sequence = SolbaseUtil.getSequenceTable();
    HTableInterface docIdKeyMap = SolbaseUtil.getDocKeyIdMapTable();
    try {
        int docId = new Long(sequence.incrementColumnValue(Bytes.toBytes("sequence"), Bytes.toBytes("id"), Bytes.toBytes(""), 1, true)).intValue();
        Put mapping = new Put(Bytes.toBytes(key));
        mapping.add(Bytes.toBytes("docId"), Bytes.toBytes(""), Bytes.toBytes(docId));
        docIdKeyMap.put(mapping);
        return docId;
    } finally {
        SolbaseUtil.releaseTable(sequence);
        SolbaseUtil.releaseTable(docIdKeyMap);
    }
}
Also used : HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) Put(org.apache.hadoop.hbase.client.Put)

Example 77 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project Solbase by Photobucket.

the class IndexWriter method deleteDocument.

public void deleteDocument(Put documentPut) {
    HTableInterface docTable = SolbaseUtil.getDocTable();
    try {
        documentPut.add(SolbaseUtil.timestampColumnFamilyName, SolbaseUtil.tombstonedColumnFamilyQualifierBytes, Bytes.toBytes(1));
        docTable.put(documentPut);
    } catch (IOException e) {
        e.printStackTrace();
    } finally {
        SolbaseUtil.releaseTable(docTable);
    }
}
Also used : IOException(java.io.IOException) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface)

Example 78 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project Solbase by Photobucket.

the class IndexWriter method updateTermVectorVersionId.

public void updateTermVectorVersionId(TermDocMetadata termDocMeta, int startDocId, int endDocId) {
    HTableInterface versionIdTable = SolbaseUtil.getTermVectorVersionIDTable();
    Term term = termDocMeta.getTerm();
    byte[] fieldTermKey = SolbaseUtil.generateTermKey(term);
    Put updatePut = new Put(Bytes.add(fieldTermKey, Bytes.toBytes(startDocId), Bytes.toBytes(endDocId)));
    if (termDocMeta.versionIdentifier == null) {
        // we havn't loaded this term into cache yet, but need to do update with
        try {
            TermDocMetadataVersionIdentifier versionIdentifier = TermDocMetadataLoader.getStaticVersionIdentifier(term, startDocId, endDocId);
            updatePut.add(SolbaseUtil.timestampColumnFamilyName, Bytes.toBytes(""), Bytes.toBytes(versionIdentifier.getVersionIdentifier()));
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    } else {
        updatePut.add(SolbaseUtil.timestampColumnFamilyName, Bytes.toBytes(""), Bytes.toBytes(termDocMeta.versionIdentifier.getVersionIdentifier()));
    }
    try {
        versionIdTable.put(updatePut);
    } catch (IOException e) {
        e.printStackTrace();
    } finally {
        SolbaseUtil.releaseTable(versionIdTable);
    }
}
Also used : Term(org.apache.lucene.index.Term) IOException(java.io.IOException) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) Put(org.apache.hadoop.hbase.client.Put)

Example 79 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project Solbase by Photobucket.

the class IndexWriter method deleteTermVectorVersionId.

public void deleteTermVectorVersionId(TermDocMetadata termDocMeta) {
    HTableInterface versionIdTable = SolbaseUtil.getTermVectorVersionIDTable();
    Term term = termDocMeta.getTerm();
    byte[] fieldTermKey = SolbaseUtil.generateTermKey(term);
    Delete delete = new Delete(fieldTermKey);
    try {
        versionIdTable.delete(delete);
    } catch (IOException e) {
        e.printStackTrace();
    } finally {
        SolbaseUtil.releaseTable(versionIdTable);
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Term(org.apache.lucene.index.Term) IOException(java.io.IOException) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface)

Example 80 with HTableInterface

use of org.apache.hadoop.hbase.client.HTableInterface in project Solbase by Photobucket.

the class IndexWriter method addDocument.

public void addDocument(Put documentPut, Document doc) {
    byte[] docId = documentPut.getRow();
    String uniqId = doc.get("global_uniq_id");
    if (uniqId != null && docId != null) {
        // for remote server update via solr update, we want to use
        // getDocTable(), but for now map/red can use local htable
        HTableInterface docTable = SolbaseUtil.getDocTable();
        // insert document to doctable
        try {
            documentPut.add(SolbaseUtil.timestampColumnFamilyName, SolbaseUtil.tombstonedColumnFamilyQualifierBytes, Bytes.toBytes(0));
            docTable.put(documentPut);
        } catch (IOException e) {
            throw new SolbaseException(SolbaseException.ErrorCode.SERVER_ERROR, e.getMessage());
        } finally {
            SolbaseUtil.releaseTable(docTable);
        }
        // need to insert to docKeyIdMap
        Put mapping = new Put(Bytes.toBytes(uniqId));
        mapping.add(Bytes.toBytes("docId"), Bytes.toBytes(""), SolbaseUtil.randomize(docId));
        mapping.add(SolbaseUtil.docIdColumnFamilyName, SolbaseUtil.tombstonedColumnFamilyQualifierBytes, Bytes.toBytes(0));
        updateDocKeyIdMap(mapping);
        logger.info("adding document: " + Bytes.toInt(SolbaseUtil.randomize(docId)) + " uniqId: " + uniqId);
    } else {
        if (uniqId == null) {
            logger.info("uniqId is null: " + doc.toString());
        } else if (docId == null) {
            logger.info("docId is null: " + doc.toString());
        } else {
            logger.info("both uniqId and docId are null: " + doc.toString());
        }
    }
}
Also used : SolbaseException(org.solbase.common.SolbaseException) IOException(java.io.IOException) HTableInterface(org.apache.hadoop.hbase.client.HTableInterface) Put(org.apache.hadoop.hbase.client.Put)

Aggregations

HTableInterface (org.apache.hadoop.hbase.client.HTableInterface)117 Result (org.apache.hadoop.hbase.client.Result)43 Put (org.apache.hadoop.hbase.client.Put)41 IOException (java.io.IOException)36 ArrayList (java.util.ArrayList)26 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)23 Get (org.apache.hadoop.hbase.client.Get)21 Scan (org.apache.hadoop.hbase.client.Scan)21 Test (org.junit.Test)20 SQLException (java.sql.SQLException)19 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)17 Connection (java.sql.Connection)15 HashMap (java.util.HashMap)15 HBaseAdmin (org.apache.hadoop.hbase.client.HBaseAdmin)13 Delete (org.apache.hadoop.hbase.client.Delete)12 Mutation (org.apache.hadoop.hbase.client.Mutation)12 PhoenixIOException (org.apache.phoenix.exception.PhoenixIOException)11 ResultSet (java.sql.ResultSet)10 Configuration (org.apache.hadoop.conf.Configuration)9 ConnectionQueryServices (org.apache.phoenix.query.ConnectionQueryServices)9