use of org.apache.solr.common.SolrDocument in project lucene-solr by apache.
the class SolrDocumentTest method testUnsupportedStuff.
public void testUnsupportedStuff() {
SolrDocument doc = new SolrDocument();
try {
doc.getFieldValueMap().clear();
fail("should be unsupported!");
} catch (UnsupportedOperationException ex) {
}
try {
doc.getFieldValueMap().containsValue(null);
fail("should be unsupported!");
} catch (UnsupportedOperationException ex) {
}
try {
doc.getFieldValueMap().entrySet();
fail("should be unsupported!");
} catch (UnsupportedOperationException ex) {
}
try {
doc.getFieldValueMap().putAll(null);
fail("should be unsupported!");
} catch (UnsupportedOperationException ex) {
}
try {
doc.getFieldValueMap().values();
fail("should be unsupported!");
} catch (UnsupportedOperationException ex) {
}
try {
doc.getFieldValueMap().remove("key");
fail("should be unsupported!");
} catch (UnsupportedOperationException ex) {
}
try {
doc.getFieldValueMap().put("key", "value");
fail("should be unsupported!");
} catch (UnsupportedOperationException ex) {
}
try {
doc.getFieldValuesMap().clear();
fail("should be unsupported!");
} catch (UnsupportedOperationException ex) {
}
try {
doc.getFieldValuesMap().containsValue(null);
fail("should be unsupported!");
} catch (UnsupportedOperationException ex) {
}
try {
doc.getFieldValuesMap().entrySet();
fail("should be unsupported!");
} catch (UnsupportedOperationException ex) {
}
try {
doc.getFieldValuesMap().putAll(null);
fail("should be unsupported!");
} catch (UnsupportedOperationException ex) {
}
try {
doc.getFieldValuesMap().values();
fail("should be unsupported!");
} catch (UnsupportedOperationException ex) {
}
try {
doc.getFieldValuesMap().remove("key");
fail("should be unsupported!");
} catch (UnsupportedOperationException ex) {
}
try {
doc.getFieldValueMap().put("key", Collections.EMPTY_LIST);
fail("should be unsupported!");
} catch (UnsupportedOperationException ex) {
}
assertEquals(null, doc.getFieldValueMap().get("aaa"));
doc.setField("aaa", "bbb");
assertEquals("bbb", doc.getFieldValueMap().get("aaa"));
}
use of org.apache.solr.common.SolrDocument in project lucene-solr by apache.
the class TestJavaBinCodec method generateSolrDocumentWithChildDocs.
private SolrDocument generateSolrDocumentWithChildDocs() {
SolrDocument parentDocument = new SolrDocument();
parentDocument.addField("id", "1");
parentDocument.addField("subject", "parentDocument");
SolrDocument childDocument = new SolrDocument();
childDocument.addField("id", "2");
childDocument.addField("cat", "foo");
SolrDocument secondKid = new SolrDocument();
secondKid.addField("id", "22");
secondKid.addField("cat", "bar");
SolrDocument grandChildDocument = new SolrDocument();
grandChildDocument.addField("id", "3");
childDocument.addChildDocument(grandChildDocument);
parentDocument.addChildDocument(childDocument);
parentDocument.addChildDocument(secondKid);
return parentDocument;
}
use of org.apache.solr.common.SolrDocument in project lucene-solr by apache.
the class TestJavaBinCodec method doDecode.
public static int doDecode(byte[][] buffers, long iter, JavaBinCodec.StringCache stringCache) throws IOException {
int ret = 0;
int bufnum = -1;
InputStream empty = new InputStream() {
@Override
public int read() throws IOException {
return -1;
}
};
while (--iter >= 0) {
if (++bufnum >= buffers.length)
bufnum = 0;
byte[] buf = buffers[bufnum];
JavaBinCodec javabin = new JavaBinCodec(null, stringCache);
FastInputStream in = new FastInputStream(empty, buf, 0, buf.length);
Object o = javabin.unmarshal(in);
if (o instanceof SolrDocument) {
ret += ((SolrDocument) o).size();
}
}
return ret;
}
use of org.apache.solr.common.SolrDocument in project lucene-solr by apache.
the class TestJavaBinCodec method genBinaryFiles.
public void genBinaryFiles() throws IOException {
Object data = generateAllDataTypes();
byte[] out = getBytes(data);
FileOutputStream fs = new FileOutputStream(new File(BIN_FILE_LOCATION));
BufferedOutputStream bos = new BufferedOutputStream(fs);
bos.write(out);
bos.close();
//Binary file with child documents
SolrDocument sdoc = generateSolrDocumentWithChildDocs();
fs = new FileOutputStream(new File(BIN_FILE_LOCATION_CHILD_DOCS));
bos = new BufferedOutputStream(fs);
bos.write(getBytes(sdoc));
bos.close();
}
use of org.apache.solr.common.SolrDocument in project lucene-solr by apache.
the class DistribCursorPagingTest method assertFullWalkNoDups.
/**
* <p>
* Given a set of params, executes a cursor query using {@link CursorMarkParams#CURSOR_MARK_START}
* and then continuously walks the results using {@link CursorMarkParams#CURSOR_MARK_START} as long
* as a non-0 number of docs ar returned. This method records the the set of all id's
* (must be positive ints) encountered and throws an assertion failure if any id is
* encountered more then once, or if the set grows above maxSize
* </p>
*
* <p>
* Note that this method explicitly uses the "cloudClient" for executing the queries,
* instead of relying on the test infrastructure to execute the queries redundently
* against both the cloud client as well as a control client. This is because term stat
* differences in a sharded setup can result in different scores for documents compared
* to the control index -- which can affect the sorting in some cases and cause false
* negatives in the response comparisons (even if we don't include "score" in the "fl")
* </p>
*/
public SentinelIntSet assertFullWalkNoDups(int maxSize, SolrParams params) throws Exception {
SentinelIntSet ids = new SentinelIntSet(maxSize, -1);
String cursorMark = CURSOR_MARK_START;
int docsOnThisPage = Integer.MAX_VALUE;
while (0 < docsOnThisPage) {
final SolrParams p = p(params, CURSOR_MARK_PARAM, cursorMark);
QueryResponse rsp = cloudClient.query(p);
String nextCursorMark = assertHashNextCursorMark(rsp);
SolrDocumentList docs = extractDocList(rsp);
docsOnThisPage = docs.size();
if (null != params.getInt(CommonParams.ROWS)) {
int rows = params.getInt(CommonParams.ROWS);
assertTrue("Too many docs on this page: " + rows + " < " + docsOnThisPage, docsOnThisPage <= rows);
}
if (0 == docsOnThisPage) {
assertEquals("no more docs, but " + CURSOR_MARK_NEXT + " isn't same", cursorMark, nextCursorMark);
}
for (SolrDocument doc : docs) {
int id = ((Integer) doc.get("id")).intValue();
if (ids.exists(id)) {
String msg = "(" + p + ") walk already seen: " + id;
try {
queryAndCompareShards(params("distrib", "false", "q", "id:" + id));
} catch (AssertionError ae) {
throw new AssertionError(msg + ", found shard inconsistency that would explain it...", ae);
}
rsp = cloudClient.query(params("q", "id:" + id));
throw new AssertionError(msg + ", don't know why; q=id:" + id + " gives: " + rsp.toString());
}
ids.put(id);
assertFalse("id set bigger then max allowed (" + maxSize + "): " + ids.size(), maxSize < ids.size());
}
cursorMark = nextCursorMark;
}
return ids;
}
Aggregations