Search in sources :

Example 1 with MemcachedException

use of net.rubyeye.xmemcached.exception.MemcachedException in project java-docs-samples by GoogleCloudPlatform.

the class MemcacheServlet method doGet.

@Override
public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException, ServletException {
    String addr = System.getenv().containsKey("GAE_MEMCACHE_HOST") ? System.getenv("GAE_MEMCACHE_HOST") : "localhost";
    String port = System.getenv().containsKey("GAE_MEMCACHE_HOST") ? System.getenv("GAE_MEMCACHE_PORT") : "11211";
    String key = "count";
    MemcachedClientBuilder builder = new XMemcachedClientBuilder(AddrUtil.getAddresses(addr + ":" + port));
    MemcachedClient client = builder.build();
    long count = 0L;
    try {
        count = client.incr(key, 1L, 0L);
    } catch (TimeoutException | InterruptedException | MemcachedException e) {
        throw new ServletException("Memcache error", e);
    }
    resp.setContentType("text/plain");
    resp.getWriter().print("Value is " + count + "\n");
}
Also used : ServletException(javax.servlet.ServletException) XMemcachedClientBuilder(net.rubyeye.xmemcached.XMemcachedClientBuilder) MemcachedClientBuilder(net.rubyeye.xmemcached.MemcachedClientBuilder) MemcachedClient(net.rubyeye.xmemcached.MemcachedClient) XMemcachedClientBuilder(net.rubyeye.xmemcached.XMemcachedClientBuilder) TimeoutException(java.util.concurrent.TimeoutException) MemcachedException(net.rubyeye.xmemcached.exception.MemcachedException)

Example 2 with MemcachedException

use of net.rubyeye.xmemcached.exception.MemcachedException in project Solbase by Photobucket.

the class DocumentLoader method processDocument.

private ParsedDoc processDocument(Document newDoc, Document oldDoc, String indexName, int docNumber, SolbaseIndexUtil indexUtil, IndexWriter writer, boolean updateStore) {
    try {
        @SuppressWarnings("unchecked") List<Fieldable> newFields = newDoc.getFields();
        boolean termVectorChanged = false;
        for (Fieldable field : newFields) {
            if (field.isIndexed() || field instanceof EmbeddedSortField) {
                termVectorChanged = true;
                break;
            }
        }
        // do diff on terms
        if (termVectorChanged) {
            Field docIdField = oldDoc.getField("docId");
            // cloning old doc, so it won't conflict with read
            oldDoc = new Document(oldDoc);
            oldDoc.removeField("docId");
            // parsing old doc to get all terms
            try {
                ParsedDoc oldParsedDoc = writer.parseDoc(oldDoc, schema.getAnalyzer(), indexName, docNumber, indexUtil.getSortFieldNames());
                List<Term> oldTerms = oldParsedDoc.getAllTerms();
                List<TermDocMetadata> oldTermDocMetas = oldParsedDoc.getTermDocMetadatas();
                Document mergedDoc = mergeOldAndNew(oldDoc, newDoc);
                ParsedDoc parsedDoc = writer.parseDoc(mergedDoc, schema.getAnalyzer(), indexName, docNumber, indexUtil.getSortFieldNames());
                List<TermDocMetadata> newTermDocMetas = parsedDoc.getTermDocMetadatas();
                List<Term> newTerms = parsedDoc.getAllTerms();
                List<Term> updateList = new ArrayList<Term>(oldTerms);
                List<Term> deleteList = new ArrayList<Term>(oldTerms);
                List<Term> addList = new ArrayList<Term>(newTerms);
                Collections.copy(updateList, oldTerms);
                Collections.copy(deleteList, oldTerms);
                Collections.copy(addList, newTerms);
                updateList.retainAll(newTerms);
                deleteList.removeAll(newTerms);
                addList.removeAll(oldTerms);
                int shardNum = SolbaseShardUtil.getShardNum(indexName);
                int startDocId = SolbaseShardUtil.getStartDocId(shardNum);
                int endDocId = SolbaseShardUtil.getEndDocId(shardNum);
                // updating tv first
                for (TermDocMetadata termDocMeta : newTermDocMetas) {
                    Term term = termDocMeta.getTerm();
                    if (updateList.contains(term)) {
                        logger.debug("updating this term: " + term.toString());
                        ReaderCache.updateTermDocsMetadata(term, termDocMeta, indexName, writer, LayeredCache.ModificationType.UPDATE, updateStore, startDocId, endDocId);
                    } else if (addList.contains(term)) {
                        ReaderCache.updateTermDocsMetadata(term, termDocMeta, indexName, writer, LayeredCache.ModificationType.ADD, updateStore, startDocId, endDocId);
                    }
                }
                // clean up deletes
                if (deleteList.size() > 0) {
                    for (TermDocMetadata termDocMeta : oldTermDocMetas) {
                        Term term = termDocMeta.getTerm();
                        if (deleteList.contains(term)) {
                            ReaderCache.updateTermDocsMetadata(term, termDocMeta, indexName, writer, LayeredCache.ModificationType.DELETE, updateStore, startDocId, endDocId);
                        }
                    }
                }
                parsedDoc.getDocument().add(docIdField);
                return parsedDoc;
            } catch (NullPointerException e) {
                return null;
            }
        } else {
            Document mergedDoc = mergeOldAndNew(oldDoc, newDoc);
            ParsedDoc parsedDoc = writer.parseDoc(mergedDoc, schema.getAnalyzer(), indexName, docNumber, indexUtil.getSortFieldNames());
            return parsedDoc;
        }
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (InterruptedException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (MemcachedException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (TimeoutException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    return null;
}
Also used : ArrayList(java.util.ArrayList) EmbeddedSortField(org.apache.lucene.document.EmbeddedSortField) Term(org.apache.lucene.index.Term) IOException(java.io.IOException) Document(org.apache.lucene.document.Document) SolrInputDocument(org.apache.solr.common.SolrInputDocument) EmbeddedIndexedIntField(org.apache.solr.schema.EmbeddedIndexedIntField) SchemaField(org.apache.solr.schema.SchemaField) Field(org.apache.lucene.document.Field) EmbeddedSortField(org.apache.lucene.document.EmbeddedSortField) ParsedDoc(org.solbase.indexer.ParsedDoc) Fieldable(org.apache.lucene.document.Fieldable) MemcachedException(net.rubyeye.xmemcached.exception.MemcachedException) TimeoutException(java.util.concurrent.TimeoutException)

Example 3 with MemcachedException

use of net.rubyeye.xmemcached.exception.MemcachedException in project Solbase by Photobucket.

the class SolbaseIndexWriter method delete.

public void delete(DeleteUpdateCommand cmd) throws IOException {
    deleteByIdCommands.incrementAndGet();
    deleteByIdCommandsCumulative.incrementAndGet();
    if (!cmd.fromPending && !cmd.fromCommitted) {
        numErrors.incrementAndGet();
        numErrorsCumulative.incrementAndGet();
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "meaningless command: " + cmd);
    }
    if (!cmd.fromPending || !cmd.fromCommitted) {
        numErrors.incrementAndGet();
        numErrorsCumulative.incrementAndGet();
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "operation not supported" + cmd);
    }
    // Delete all terms/fields/etc
    String indexName = core.getName();
    writer.setIndexName(indexName);
    writer.setIndexUtil(indexUtil);
    int docId = Integer.parseInt(cmd.id);
    logger.info("deleting doc: " + docId);
    try {
        CachedObjectWrapper<Document, Long> wrapper = ReaderCache.getDocument(docId, null, indexName, 0, 0);
        boolean updateStore = cmd.getUpdateStore();
        ParsedDoc parsedDoc = new ParsedDoc();
        parsedDoc.setIndexName(indexName);
        parsedDoc.setIndexUtil(indexUtil);
        parsedDoc.setIndexWriter(writer);
        parsedDoc.setUpdateStore(updateStore);
        int shardNum = SolbaseShardUtil.getShardNum(indexName);
        int startDocId = SolbaseShardUtil.getStartDocId(shardNum);
        int endDocId = SolbaseShardUtil.getEndDocId(shardNum);
        ReaderCache.updateDocument(docId, parsedDoc, indexName, writer, LayeredCache.ModificationType.DELETE, updateStore, startDocId, endDocId);
    } catch (InterruptedException e) {
        logger.info("delete doc failed: " + docId);
        logger.info(e.toString());
    } catch (MemcachedException e) {
        logger.info("delete doc failed: " + docId);
        logger.info(e.toString());
    } catch (TimeoutException e) {
        logger.info("delete doc failed: " + docId);
        logger.info(e.toString());
    } catch (SolbaseException e) {
        logger.info("delete doc failed: " + docId);
        logger.info(e.toString());
    }
}
Also used : ParsedDoc(org.solbase.indexer.ParsedDoc) SolbaseException(org.solbase.common.SolbaseException) AtomicLong(java.util.concurrent.atomic.AtomicLong) Document(org.apache.lucene.document.Document) SolrException(org.apache.solr.common.SolrException) MemcachedException(net.rubyeye.xmemcached.exception.MemcachedException) TimeoutException(java.util.concurrent.TimeoutException)

Example 4 with MemcachedException

use of net.rubyeye.xmemcached.exception.MemcachedException in project Solbase by Photobucket.

the class SolbaseIndexWriter method editDoc.

/**
 * Doing edit logic here. instead of blindingly inserting, we need to compare new doc with old doc and do appropriate modification
 * to tv and doc
 * @param newDoc
 * @param indexName
 * @return
 */
public boolean editDoc(Document newDoc, String indexName, int docNumber, boolean updateStore) {
    try {
        CachedObjectWrapper<Document, Long> cachedObj = ReaderCache.getDocument(docNumber, null, indexName, 0, 0);
        if (cachedObj == null || cachedObj.getValue() == null) {
            // document doesn't exist, so let's just bail out here
            return true;
        }
        ParsedDoc parsedDoc = new ParsedDoc(newDoc);
        parsedDoc.setIndexName(indexName);
        parsedDoc.setIndexUtil(indexUtil);
        parsedDoc.setIndexWriter(writer);
        parsedDoc.setUpdateStore(updateStore);
        int shardNum = SolbaseShardUtil.getShardNum(indexName);
        int startDocId = SolbaseShardUtil.getStartDocId(shardNum);
        int endDocId = SolbaseShardUtil.getEndDocId(shardNum);
        ReaderCache.updateDocument(docNumber, parsedDoc, indexName, writer, LayeredCache.ModificationType.UPDATE, updateStore, startDocId, endDocId);
        return true;
    } catch (IOException e) {
        logger.info("edit doc failed: " + docNumber);
        logger.info(e.toString());
    } catch (InterruptedException e) {
        logger.info("edit doc failed: " + docNumber);
        logger.info(e.toString());
    } catch (MemcachedException e) {
        logger.info("edit doc failed: " + docNumber);
        logger.info(e.toString());
    } catch (TimeoutException e) {
        logger.info("edit doc failed: " + docNumber);
        logger.info(e.toString());
    } catch (SolbaseException e) {
        logger.info("edit doc failed: " + docNumber);
        logger.info(e.toString());
    }
    return false;
}
Also used : ParsedDoc(org.solbase.indexer.ParsedDoc) SolbaseException(org.solbase.common.SolbaseException) AtomicLong(java.util.concurrent.atomic.AtomicLong) IOException(java.io.IOException) Document(org.apache.lucene.document.Document) MemcachedException(net.rubyeye.xmemcached.exception.MemcachedException) TimeoutException(java.util.concurrent.TimeoutException)

Example 5 with MemcachedException

use of net.rubyeye.xmemcached.exception.MemcachedException in project dubbo by alibaba.

the class MemcachedProtocol method protocolBindingRefer.

@Override
public <T> Invoker<T> protocolBindingRefer(final Class<T> type, final URL url) throws RpcException {
    try {
        String address = url.getAddress();
        String backup = url.getParameter(RemotingConstants.BACKUP_KEY);
        if (backup != null && backup.length() > 0) {
            address += "," + backup;
        }
        MemcachedClientBuilder builder = new XMemcachedClientBuilder(AddrUtil.getAddresses(address));
        final MemcachedClient memcachedClient = builder.build();
        final int expiry = url.getParameter("expiry", 0);
        final String get = url.getParameter("get", "get");
        final String set = url.getParameter("set", Map.class.equals(type) ? "put" : "set");
        final String delete = url.getParameter("delete", Map.class.equals(type) ? "remove" : "delete");
        return new AbstractInvoker<T>(type, url) {

            @Override
            protected Result doInvoke(Invocation invocation) throws Throwable {
                try {
                    Object value = null;
                    if (get.equals(invocation.getMethodName())) {
                        if (invocation.getArguments().length != 1) {
                            throw new IllegalArgumentException("The memcached get method arguments mismatch, must only one arguments. interface: " + type.getName() + ", method: " + invocation.getMethodName() + ", url: " + url);
                        }
                        value = memcachedClient.get(String.valueOf(invocation.getArguments()[0]));
                    } else if (set.equals(invocation.getMethodName())) {
                        if (invocation.getArguments().length != 2) {
                            throw new IllegalArgumentException("The memcached set method arguments mismatch, must be two arguments. interface: " + type.getName() + ", method: " + invocation.getMethodName() + ", url: " + url);
                        }
                        memcachedClient.set(String.valueOf(invocation.getArguments()[0]), expiry, invocation.getArguments()[1]);
                    } else if (delete.equals(invocation.getMethodName())) {
                        if (invocation.getArguments().length != 1) {
                            throw new IllegalArgumentException("The memcached delete method arguments mismatch, must only one arguments. interface: " + type.getName() + ", method: " + invocation.getMethodName() + ", url: " + url);
                        }
                        memcachedClient.delete(String.valueOf(invocation.getArguments()[0]));
                    } else {
                        throw new UnsupportedOperationException("Unsupported method " + invocation.getMethodName() + " in memcached service.");
                    }
                    return AsyncRpcResult.newDefaultAsyncResult(value, invocation);
                } catch (Throwable t) {
                    RpcException re = new RpcException("Failed to invoke memcached service method. interface: " + type.getName() + ", method: " + invocation.getMethodName() + ", url: " + url + ", cause: " + t.getMessage(), t);
                    if (t instanceof TimeoutException || t instanceof SocketTimeoutException) {
                        re.setCode(RpcException.TIMEOUT_EXCEPTION);
                    } else if (t instanceof MemcachedException || t instanceof IOException) {
                        re.setCode(RpcException.NETWORK_EXCEPTION);
                    }
                    throw re;
                }
            }

            @Override
            public void destroy() {
                super.destroy();
                try {
                    memcachedClient.shutdown();
                } catch (Throwable e) {
                    logger.warn(e.getMessage(), e);
                }
            }
        };
    } catch (Throwable t) {
        throw new RpcException("Failed to refer memcached service. interface: " + type.getName() + ", url: " + url + ", cause: " + t.getMessage(), t);
    }
}
Also used : XMemcachedClientBuilder(net.rubyeye.xmemcached.XMemcachedClientBuilder) MemcachedClientBuilder(net.rubyeye.xmemcached.MemcachedClientBuilder) Invocation(org.apache.dubbo.rpc.Invocation) AbstractInvoker(org.apache.dubbo.rpc.protocol.AbstractInvoker) IOException(java.io.IOException) SocketTimeoutException(java.net.SocketTimeoutException) MemcachedClient(net.rubyeye.xmemcached.MemcachedClient) RpcException(org.apache.dubbo.rpc.RpcException) XMemcachedClientBuilder(net.rubyeye.xmemcached.XMemcachedClientBuilder) TimeoutException(java.util.concurrent.TimeoutException) SocketTimeoutException(java.net.SocketTimeoutException) MemcachedException(net.rubyeye.xmemcached.exception.MemcachedException)

Aggregations

TimeoutException (java.util.concurrent.TimeoutException)9 MemcachedException (net.rubyeye.xmemcached.exception.MemcachedException)9 IOException (java.io.IOException)7 Document (org.apache.lucene.document.Document)5 ParsedDoc (org.solbase.indexer.ParsedDoc)5 MemcachedClient (net.rubyeye.xmemcached.MemcachedClient)4 XMemcachedClientBuilder (net.rubyeye.xmemcached.XMemcachedClientBuilder)4 MemcachedClientBuilder (net.rubyeye.xmemcached.MemcachedClientBuilder)3 SolbaseException (org.solbase.common.SolbaseException)3 SocketTimeoutException (java.net.SocketTimeoutException)2 AtomicLong (java.util.concurrent.atomic.AtomicLong)2 SolrInputDocument (org.apache.solr.common.SolrInputDocument)2 SchemaField (org.apache.solr.schema.SchemaField)2 Invocation (com.alibaba.dubbo.rpc.Invocation)1 RpcException (com.alibaba.dubbo.rpc.RpcException)1 RpcResult (com.alibaba.dubbo.rpc.RpcResult)1 AbstractInvoker (com.alibaba.dubbo.rpc.protocol.AbstractInvoker)1 Gson (com.google.gson.Gson)1 ArrayList (java.util.ArrayList)1 ServletException (javax.servlet.ServletException)1