use of com.torodb.core.dsl.backend.BackendTransactionJob in project torodb by torodb.
the class DefaultToBackendFunction method apply.
public Iterable<BackendTransactionJob> apply(CollectionData collectionData) {
ArrayList<BackendTransactionJob> jobs = new ArrayList<>();
for (DocPartData docPartData : collectionData.orderedDocPartData()) {
assert docPartData.getMetaDocPart() instanceof BatchMetaDocPart : "This function can only use inputs whose meta doc part information is an instance of " + BatchMetaDocPart.class;
BatchMetaDocPart metaDocPart = (BatchMetaDocPart) docPartData.getMetaDocPart();
if (metaDocPart.isCreatedOnCurrentBatch()) {
jobs.add(factory.createAddDocPartDdlJob(database, collection, metaDocPart));
metaDocPart.streamScalars().map((scalar) -> factory.createAddScalarDdlJob(database, collection, metaDocPart, scalar)).forEachOrdered((job) -> jobs.add(job));
metaDocPart.streamFields().map((field) -> factory.createAddFieldDdlJob(database, collection, metaDocPart, field)).forEachOrdered((job) -> jobs.add(job));
} else {
//it already exists, we only need to add the new scalars and fields
for (ImmutableMetaScalar newScalar : metaDocPart.getOnBatchModifiedMetaScalars()) {
jobs.add(factory.createAddScalarDdlJob(database, collection, metaDocPart, newScalar));
}
for (ImmutableMetaField newField : metaDocPart.getOnBatchModifiedMetaFields()) {
jobs.add(factory.createAddFieldDdlJob(database, collection, metaDocPart, newField));
}
}
jobs.add(factory.insert(database, collection, docPartData));
}
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Executing the following jobs: {}", jobs);
}
return jobs;
}
use of com.torodb.core.dsl.backend.BackendTransactionJob in project torodb by torodb.
the class DefaultToBackendFunctionTest method testApply_newField.
@Test
public void testApply_newField() {
MockSettings settings = new MockSettingsImpl().defaultAnswer((t) -> {
throw new AssertionError("Method " + t.getMethod() + " was not expected to be called");
});
BatchMetaDocPart withNewFieldsDocPart = mock(BatchMetaDocPart.class, settings);
doReturn(false).when(withNewFieldsDocPart).isCreatedOnCurrentBatch();
doReturn(Lists.newArrayList(new ImmutableMetaField("newFieldName", "newFieldId", FieldType.INTEGER))).when(withNewFieldsDocPart).getOnBatchModifiedMetaFields();
doReturn(Collections.emptyList()).when(withNewFieldsDocPart).getOnBatchModifiedMetaScalars();
DocPartData withNewData = mock(DocPartData.class);
given(withNewData.getMetaDocPart()).willReturn(withNewFieldsDocPart);
CollectionData collectionData = mock(CollectionData.class);
given(collectionData.orderedDocPartData()).willReturn(Lists.<DocPartData>newArrayList(withNewData));
//when
Iterable<BackendTransactionJob> result = fun.apply(collectionData);
ArrayList<BackendTransactionJob> resultList = Lists.newArrayList(result);
//then
assertEquals("Expected 2 jobs to do, but " + resultList.size() + " were recived", 2, resultList.size());
{
Optional<BackendTransactionJob> insertJob = resultList.stream().filter((job) -> job instanceof InsertBackendJob && ((InsertBackendJob) job).getDataToInsert().equals(withNewData)).findAny();
assertTrue(insertJob.isPresent());
Optional<BackendTransactionJob> addFieldJob = resultList.stream().filter((job) -> {
if (!(job instanceof AddFieldDdlJob)) {
return false;
}
AddFieldDdlJob castedJob = (AddFieldDdlJob) job;
return castedJob.getDocPart().equals(withNewFieldsDocPart) && castedJob.getField().getName().equals("newFieldName") && castedJob.getField().getIdentifier().equals("newFieldId");
}).findAny();
assertTrue(addFieldJob.isPresent());
int addFieldIndex = resultList.indexOf(addFieldJob.get());
int insertIndex = resultList.indexOf(insertJob.get());
assert addFieldIndex >= 0;
assert insertIndex >= 0;
assertTrue("For a given doc part, all related add fields jobs must be executed before insert " + "jobs, but in this case the add field job has index " + addFieldIndex + " and the insert job has index " + insertIndex, addFieldIndex < insertIndex);
}
}
use of com.torodb.core.dsl.backend.BackendTransactionJob in project torodb by torodb.
the class SameThreadInsertPipeline method insert.
@Override
public void insert(Stream<KvDocument> docs) throws RollbackException, UserException {
D2RTranslationBatchFunction d2rFun = new D2RTranslationBatchFunction(translatorFactory, metaDb, mutableMetaCollection);
DefaultToBackendFunction r2BackendFun = new DefaultToBackendFunction(jobFactory, metaDb, mutableMetaCollection);
try {
Iterators.partition(docs.iterator(), docBatchSize).forEachRemaining(list -> {
CollectionData collData = d2rFun.apply(list);
Iterable<BackendTransactionJob> jobs = r2BackendFun.apply(collData);
jobs.forEach(Unchecked.consumer(job -> job.execute(backendConnection)));
});
} catch (UncheckedException ex) {
Throwable cause = ex.getCause();
if (cause != null && cause instanceof UserException) {
throw (UserException) cause;
}
throw ex;
}
}
use of com.torodb.core.dsl.backend.BackendTransactionJob in project torodb by torodb.
the class DefaultToBackendFunctionTest method testApplyEmpty.
@Test
public void testApplyEmpty() {
CollectionData collectionData = mock(CollectionData.class);
given(collectionData.orderedDocPartData()).willReturn(Collections.<DocPartData>emptyList());
//when
Iterable<BackendTransactionJob> resultIterable = fun.apply(collectionData);
//then
assertTrue("An empty iterator was expected", Iterables.isEmpty(resultIterable));
}
use of com.torodb.core.dsl.backend.BackendTransactionJob in project torodb by torodb.
the class DefaultToBackendFunctionTest method testApply_newDocPart.
@Test
public void testApply_newDocPart() {
MockSettings settings = new MockSettingsImpl().defaultAnswer((t) -> {
throw new AssertionError("Method " + t.getMethod() + " was not expected to be called");
});
BatchMetaDocPart allNewDocPart = mock(BatchMetaDocPart.class, settings);
doReturn(true).when(allNewDocPart).isCreatedOnCurrentBatch();
doReturn(Lists.newArrayList(Lists.newArrayList(new ImmutableMetaField("newFieldName", "newFieldId", FieldType.BOOLEAN))).stream()).when(allNewDocPart).streamFields();
doReturn(Lists.newArrayList(new ImmutableMetaScalar("newScalarId", FieldType.BOOLEAN)).stream()).when(allNewDocPart).streamScalars();
DocPartData allNewData = mock(DocPartData.class);
given(allNewData.getMetaDocPart()).willReturn(allNewDocPart);
CollectionData collectionData = mock(CollectionData.class);
given(collectionData.orderedDocPartData()).willReturn(Lists.<DocPartData>newArrayList(allNewData));
//when
Iterable<BackendTransactionJob> result = fun.apply(collectionData);
ArrayList<BackendTransactionJob> resultList = Lists.newArrayList(result);
//then
assertEquals("Expected 4 jobs to do, but " + resultList.size() + " were recived", 4, resultList.size());
{
Optional<BackendTransactionJob> insertJob = resultList.stream().filter((job) -> job instanceof InsertBackendJob && ((InsertBackendJob) job).getDataToInsert().equals(allNewData)).findAny();
assertTrue(insertJob.isPresent());
Optional<BackendTransactionJob> addFieldJob = resultList.stream().filter((job) -> {
if (!(job instanceof AddFieldDdlJob)) {
return false;
}
AddFieldDdlJob castedJob = (AddFieldDdlJob) job;
return castedJob.getDocPart().equals(allNewDocPart) && castedJob.getField().getName().equals("newFieldName") && castedJob.getField().getIdentifier().equals("newFieldId");
}).findAny();
assertTrue(addFieldJob.isPresent());
Optional<BackendTransactionJob> addScalarJob = resultList.stream().filter((job) -> {
if (!(job instanceof AddScalarDddlJob)) {
return false;
}
AddScalarDddlJob castedJob = (AddScalarDddlJob) job;
return castedJob.getDocPart().equals(allNewDocPart) && castedJob.getScalar().getIdentifier().equals("newScalarId") && castedJob.getScalar().getType().equals(FieldType.BOOLEAN);
}).findAny();
assertTrue(addScalarJob.isPresent());
Optional<BackendTransactionJob> createDocPartJob = resultList.stream().filter((job) -> {
if (!(job instanceof AddDocPartDdlJob)) {
return false;
}
AddDocPartDdlJob castedJob = (AddDocPartDdlJob) job;
return castedJob.getDocPart().equals(allNewDocPart);
}).findAny();
assertTrue(createDocPartJob.isPresent());
int createDocPartIndex = resultList.indexOf(createDocPartJob.get());
int addFieldIndex = resultList.indexOf(addFieldJob.get());
int addScalarIndex = resultList.indexOf(addScalarJob.get());
int insertIndex = resultList.indexOf(insertJob.get());
assert createDocPartIndex >= 0;
assert addFieldIndex >= 0;
assert addScalarIndex >= 0;
assert insertIndex >= 0;
assertTrue("For a given doc part, all related add fields jobs must be executed before insert " + "jobs, but in this case the add field job has index " + addFieldIndex + " and the insert job has index " + insertIndex, addFieldIndex < insertIndex);
assertTrue("For a given doc part, all related add scalar jobs must be executed before insert " + "jobs, but in this case the add scalr job has index " + addScalarIndex + " and the insert job has index " + insertIndex, addScalarIndex < insertIndex);
assertTrue("For a given doc part, all related create doc part jobs must be executed " + "before add field jobs, but in this case the create doc part job has index " + createDocPartIndex + " and " + "the add field job has index " + addFieldIndex, createDocPartIndex < addFieldIndex);
}
}
Aggregations