use of com.torodb.core.d2r.CollectionData in project torodb by torodb.
the class DefaultToBackendFunction method apply.
public Iterable<BackendTransactionJob> apply(CollectionData collectionData) {
ArrayList<BackendTransactionJob> jobs = new ArrayList<>();
for (DocPartData docPartData : collectionData.orderedDocPartData()) {
assert docPartData.getMetaDocPart() instanceof BatchMetaDocPart : "This function can only use inputs whose meta doc part information is an instance of " + BatchMetaDocPart.class;
BatchMetaDocPart metaDocPart = (BatchMetaDocPart) docPartData.getMetaDocPart();
if (metaDocPart.isCreatedOnCurrentBatch()) {
jobs.add(factory.createAddDocPartDdlJob(database, collection, metaDocPart));
metaDocPart.streamScalars().map((scalar) -> factory.createAddScalarDdlJob(database, collection, metaDocPart, scalar)).forEachOrdered((job) -> jobs.add(job));
metaDocPart.streamFields().map((field) -> factory.createAddFieldDdlJob(database, collection, metaDocPart, field)).forEachOrdered((job) -> jobs.add(job));
} else {
//it already exists, we only need to add the new scalars and fields
for (ImmutableMetaScalar newScalar : metaDocPart.getOnBatchModifiedMetaScalars()) {
jobs.add(factory.createAddScalarDdlJob(database, collection, metaDocPart, newScalar));
}
for (ImmutableMetaField newField : metaDocPart.getOnBatchModifiedMetaFields()) {
jobs.add(factory.createAddFieldDdlJob(database, collection, metaDocPart, newField));
}
}
jobs.add(factory.insert(database, collection, docPartData));
}
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Executing the following jobs: {}", jobs);
}
return jobs;
}
use of com.torodb.core.d2r.CollectionData in project torodb by torodb.
the class D2RTranslationBatchFunctionTest method testApply.
@Test
public void testApply() {
D2RTranslator translator = mock(D2RTranslator.class);
CollectionData colData = mock(CollectionData.class);
KvDocument doc1 = mock(KvDocument.class);
KvDocument doc2 = mock(KvDocument.class);
given(translator.getCollectionDataAccumulator()).willReturn(colData);
given(translatorFactory.createTranslator(metaDb, batchMetaCol)).willReturn(translator);
List<KvDocument> docs = Lists.newArrayList(doc1, doc2);
//when
CollectionData result = fun.apply(docs);
//then
verify(batchMetaCol).newBatch();
verify(translator).translate(doc1);
verify(translator).translate(doc2);
verify(translator).getCollectionDataAccumulator();
verifyNoMoreInteractions(translator);
verify(translatorFactory).createTranslator(metaDb, batchMetaCol);
verifyNoMoreInteractions(translatorFactory);
assertEquals(colData, result);
}
use of com.torodb.core.d2r.CollectionData in project torodb by torodb.
the class DefaultToBackendFunctionTest method testApply_newField.
@Test
public void testApply_newField() {
MockSettings settings = new MockSettingsImpl().defaultAnswer((t) -> {
throw new AssertionError("Method " + t.getMethod() + " was not expected to be called");
});
BatchMetaDocPart withNewFieldsDocPart = mock(BatchMetaDocPart.class, settings);
doReturn(false).when(withNewFieldsDocPart).isCreatedOnCurrentBatch();
doReturn(Lists.newArrayList(new ImmutableMetaField("newFieldName", "newFieldId", FieldType.INTEGER))).when(withNewFieldsDocPart).getOnBatchModifiedMetaFields();
doReturn(Collections.emptyList()).when(withNewFieldsDocPart).getOnBatchModifiedMetaScalars();
DocPartData withNewData = mock(DocPartData.class);
given(withNewData.getMetaDocPart()).willReturn(withNewFieldsDocPart);
CollectionData collectionData = mock(CollectionData.class);
given(collectionData.orderedDocPartData()).willReturn(Lists.<DocPartData>newArrayList(withNewData));
//when
Iterable<BackendTransactionJob> result = fun.apply(collectionData);
ArrayList<BackendTransactionJob> resultList = Lists.newArrayList(result);
//then
assertEquals("Expected 2 jobs to do, but " + resultList.size() + " were recived", 2, resultList.size());
{
Optional<BackendTransactionJob> insertJob = resultList.stream().filter((job) -> job instanceof InsertBackendJob && ((InsertBackendJob) job).getDataToInsert().equals(withNewData)).findAny();
assertTrue(insertJob.isPresent());
Optional<BackendTransactionJob> addFieldJob = resultList.stream().filter((job) -> {
if (!(job instanceof AddFieldDdlJob)) {
return false;
}
AddFieldDdlJob castedJob = (AddFieldDdlJob) job;
return castedJob.getDocPart().equals(withNewFieldsDocPart) && castedJob.getField().getName().equals("newFieldName") && castedJob.getField().getIdentifier().equals("newFieldId");
}).findAny();
assertTrue(addFieldJob.isPresent());
int addFieldIndex = resultList.indexOf(addFieldJob.get());
int insertIndex = resultList.indexOf(insertJob.get());
assert addFieldIndex >= 0;
assert insertIndex >= 0;
assertTrue("For a given doc part, all related add fields jobs must be executed before insert " + "jobs, but in this case the add field job has index " + addFieldIndex + " and the insert job has index " + insertIndex, addFieldIndex < insertIndex);
}
}
use of com.torodb.core.d2r.CollectionData in project torodb by torodb.
the class SameThreadInsertPipeline method insert.
@Override
public void insert(Stream<KvDocument> docs) throws RollbackException, UserException {
D2RTranslationBatchFunction d2rFun = new D2RTranslationBatchFunction(translatorFactory, metaDb, mutableMetaCollection);
DefaultToBackendFunction r2BackendFun = new DefaultToBackendFunction(jobFactory, metaDb, mutableMetaCollection);
try {
Iterators.partition(docs.iterator(), docBatchSize).forEachRemaining(list -> {
CollectionData collData = d2rFun.apply(list);
Iterable<BackendTransactionJob> jobs = r2BackendFun.apply(collData);
jobs.forEach(Unchecked.consumer(job -> job.execute(backendConnection)));
});
} catch (UncheckedException ex) {
Throwable cause = ex.getCause();
if (cause != null && cause instanceof UserException) {
throw (UserException) cause;
}
throw ex;
}
}
use of com.torodb.core.d2r.CollectionData in project torodb by torodb.
the class DefaultToBackendFunctionTest method testNoBatchMetaDocPart.
@Test
public void testNoBatchMetaDocPart() {
CollectionData collectionData = mock(CollectionData.class);
DocPartData data1 = mock(DocPartData.class);
given(data1.getMetaDocPart()).willReturn(new WrapperMutableMetaDocPart(new ImmutableMetaDocPart(tableRefFactory.createRoot(), "aDocPartName"), (o) -> {
}));
given(collectionData.orderedDocPartData()).willReturn(Collections.singleton(data1));
//when
try {
fun.apply(collectionData);
//then
fail("An exception was expected when a metadoc part which is not a " + BatchMetaDocPart.class + " is used");
} catch (AssertionError | ClassCastException ex) {
}
}
Aggregations