use of org.teiid.language.Insert in project teiid by teiid.
the class TestJDBCUpdateExecution method testBulkUpdate.
@Test
public void testBulkUpdate() throws Exception {
// $NON-NLS-1$
Insert command = (Insert) TranslationHelper.helpTranslate(TranslationHelper.BQT_VDB, "insert into BQT1.SmallA (IntKey) values (1)");
Parameter param = new Parameter();
param.setType(Integer.class);
param.setValueIndex(0);
ExpressionValueSource evs = new ExpressionValueSource(Arrays.asList((Expression) param));
command.setValueSource(evs);
List<List<?>> vals = new ArrayList<List<?>>();
for (int i = 0; i < 8; i++) {
vals.add(Arrays.asList(i));
}
command.setParameterValues(vals.iterator());
Connection connection = Mockito.mock(Connection.class);
PreparedStatement p = Mockito.mock(PreparedStatement.class);
Mockito.stub(p.executeBatch()).toReturn(new int[] { 1, 1 });
// $NON-NLS-1$
Mockito.stub(connection.prepareStatement("INSERT INTO SmallA (IntKey) VALUES (?)")).toReturn(p);
JDBCExecutionFactory config = new JDBCExecutionFactory();
config.setMaxPreparedInsertBatchSize(2);
ResultSet r = Mockito.mock(ResultSet.class);
ResultSetMetaData rs = Mockito.mock(ResultSetMetaData.class);
Mockito.stub(r.getMetaData()).toReturn(rs);
Mockito.stub(p.getGeneratedKeys()).toReturn(r);
FakeExecutionContextImpl context = new FakeExecutionContextImpl();
((org.teiid.query.util.CommandContext) context.getCommandContext()).setReturnAutoGeneratedKeys(Collections.EMPTY_LIST);
JDBCUpdateExecution updateExecution = new JDBCUpdateExecution(command, connection, context, config);
updateExecution.execute();
assertArrayEquals(new int[] { 1, 1, 1, 1, 1, 1, 1, 1 }, updateExecution.getUpdateCounts());
}
use of org.teiid.language.Insert in project teiid by teiid.
the class MongoDBUpdateExecution method executeInternal.
private void executeInternal() throws TranslatorException {
DBCollection collection = getCollection(this.visitor.mongoDoc.getTargetTable());
MongoDocument mongoDoc = this.visitor.mongoDoc;
AggregationOptions options = this.executionFactory.getOptions(this.executionContext.getBatchSize());
List<WriteResult> executionResults = new ArrayList<WriteResult>();
if (this.command instanceof Insert) {
// get pull key based documents to embed
LinkedHashMap<String, DBObject> embeddedDocuments = fetchEmbeddedDocuments();
// check if this document need to be embedded in any other document
if (mongoDoc.isMerged()) {
DBObject match = getInsertMatch(mongoDoc, this.visitor.columnValues);
BasicDBObject insert = this.visitor.getInsert(embeddedDocuments);
if (mongoDoc.getMergeKey().getAssociation() == Association.MANY) {
removeParentKey(mongoDoc, insert);
BasicDBObject insertDoc = new BasicDBObject(mongoDoc.getQualifiedName(true), insert);
// $NON-NLS-1$ //$NON-NLS-2$
LogManager.logDetail(LogConstants.CTX_CONNECTOR, "update - {\"$match\": {" + match + "}}");
// $NON-NLS-1$ //$NON-NLS-2$
LogManager.logDetail(LogConstants.CTX_CONNECTOR, "update - {\"$push\": {" + insertDoc + "}}");
// $NON-NLS-1$
executionResults.add(collection.update(match, new BasicDBObject("$push", insertDoc), false, true, WriteConcern.ACKNOWLEDGED));
} else {
// $NON-NLS-1$
insert.remove("_id");
BasicDBObject insertDoc = new BasicDBObject(mongoDoc.getQualifiedName(true), insert);
// $NON-NLS-1$ //$NON-NLS-2$
LogManager.logDetail(LogConstants.CTX_CONNECTOR, "update - {\"$match\": {" + match + "}}");
// $NON-NLS-1$ //$NON-NLS-2$
LogManager.logDetail(LogConstants.CTX_CONNECTOR, "update - {\"$set\": {" + insertDoc + "}}");
// $NON-NLS-1$
executionResults.add(collection.update(match, new BasicDBObject("$set", insertDoc), false, true, WriteConcern.ACKNOWLEDGED));
}
} else {
for (String docName : embeddedDocuments.keySet()) {
DBObject embeddedDoc = embeddedDocuments.get(docName);
// $NON-NLS-1$
embeddedDoc.removeField("_id");
}
// gets its own collection
BasicDBObject in = this.visitor.getInsert(embeddedDocuments);
// $NON-NLS-1$ //$NON-NLS-2$
LogManager.logDetail(LogConstants.CTX_CONNECTOR, "{\"insert\": {" + in + "}}");
executionResults.add(collection.insert(in, WriteConcern.ACKNOWLEDGED));
}
} else if (this.command instanceof Update) {
// get pull key based documents to embed
LinkedHashMap<String, DBObject> embeddedDocuments = fetchEmbeddedDocuments();
DBObject match = new BasicDBObject();
if (this.visitor.match != null) {
match = this.visitor.match;
}
if (mongoDoc.isMerged()) {
// multi items in array update not available, http://jira.mongodb.org/browse/SERVER-1243
// this work-around for above issue
List<String> parentKeyNames = parentKeyNames(mongoDoc);
// $NON-NLS-1$
DBObject documentMatch = new BasicDBObject("$match", match);
// $NON-NLS-1$
DBObject projection = new BasicDBObject("$project", buildProjectForUpdate(mongoDoc));
Cursor output = collection.aggregate(Arrays.asList(documentMatch, projection), options);
while (output.hasNext()) {
BasicDBObject row = (BasicDBObject) output.next();
buildUpdate(mongoDoc, collection, row, parentKeyNames, 0, null, executionResults, new UpdateOperationImpl());
}
} else {
for (String docName : embeddedDocuments.keySet()) {
DBObject embeddedDoc = embeddedDocuments.get(docName);
// $NON-NLS-1$
embeddedDoc.removeField("_id");
}
BasicDBObject u = this.visitor.getUpdate(embeddedDocuments);
// $NON-NLS-1$ //$NON-NLS-2$
LogManager.logDetail(LogConstants.CTX_CONNECTOR, "update - {\"$match\": {" + match + "}}");
// $NON-NLS-1$ //$NON-NLS-2$
LogManager.logDetail(LogConstants.CTX_CONNECTOR, "update - {\"$set\": {" + u + "}}");
// $NON-NLS-1$
executionResults.add(collection.update(match, new BasicDBObject("$set", u), false, true, WriteConcern.ACKNOWLEDGED));
}
// how else to handle it.
if (mongoDoc.isEmbeddable()) {
updateReferenceTables(collection, mongoDoc, match, options);
}
} else {
// Delete
DBObject match = new BasicDBObject();
if (this.visitor.match != null) {
match = this.visitor.match;
}
if (mongoDoc.isEmbeddable()) {
// $NON-NLS-1$
DBObject m = new BasicDBObject("$match", match);
Cursor output = collection.aggregate(Arrays.asList(m), options);
while (output.hasNext()) {
DBObject row = output.next();
if (row != null) {
for (MergeDetails ref : mongoDoc.getEmbeddedIntoReferences()) {
DBCollection parent = getCollection(ref.getParentTable());
DBObject parentMatch = buildParentMatch(row, ref);
// $NON-NLS-1$
DBObject refMatch = new BasicDBObject("$match", parentMatch);
Cursor referenceOutput = parent.aggregate(Arrays.asList(refMatch), options);
if (referenceOutput.hasNext()) {
throw new TranslatorException(MongoDBPlugin.Util.gs(MongoDBPlugin.Event.TEIID18010, this.visitor.mongoDoc.getTargetTable().getName(), ref.getParentTable()));
}
}
}
}
}
if (mongoDoc.isMerged()) {
List<String> parentKeyNames = parentKeyNames(mongoDoc);
// $NON-NLS-1$
DBObject documentMatch = new BasicDBObject("$match", match);
// $NON-NLS-1$
DBObject projection = new BasicDBObject("$project", buildProjectForUpdate(mongoDoc));
Cursor output = collection.aggregate(Arrays.asList(documentMatch, projection), options);
while (output.hasNext()) {
BasicDBObject row = (BasicDBObject) output.next();
buildUpdate(mongoDoc, collection, row, parentKeyNames, 0, null, executionResults, new DeleteOperationImpl(match));
}
} else {
// $NON-NLS-1$ //$NON-NLS-2$
LogManager.logDetail(LogConstants.CTX_CONNECTOR, "remove - {\"$match\": {" + match + "}}");
executionResults.add(collection.remove(match, WriteConcern.ACKNOWLEDGED));
}
}
if (!executionResults.isEmpty()) {
if (this.command instanceof Insert) {
if (this.executionContext.getCommandContext().isReturnAutoGeneratedKeys()) {
addAutoGeneretedKeys(executionResults.get(0));
}
}
int updated = 0;
for (WriteResult result : executionResults) {
updated += result.getN();
}
this.results = new int[1];
this.results[0] = updated;
}
}
use of org.teiid.language.Insert in project teiid by teiid.
the class TestMongoDBUpdateVisitor method helpExecute.
private void helpExecute(String query, String collection, String expected, String match, MergeDetails pushKey, List<MergeDetails> pullKeys) throws Exception {
Command cmd = this.utility.parseCommand(query);
MongoDBUpdateVisitor visitor = new MongoDBUpdateVisitor(this.translator, this.utility.createRuntimeMetadata(), Mockito.mock(DB.class));
visitor.visitNode(cmd);
if (!visitor.exceptions.isEmpty()) {
throw visitor.exceptions.get(0);
}
assertEquals(collection, visitor.mongoDoc.getTargetTable().getName());
if (cmd instanceof Insert) {
assertEquals("wrong insert", expected, visitor.getInsert(this.docs).toString());
} else if (cmd instanceof Update) {
assertEquals("wrong update", expected, visitor.getUpdate(this.docs).toString());
} else if (cmd instanceof Delete) {
}
if (visitor.match != null) {
assertEquals("match wrong", match, visitor.match.toString());
}
MongoDocument doc = visitor.mongoDoc;
if (doc.isMerged()) {
assertEquals("Wrong PushKey", pushKey.toString(), visitor.mongoDoc.getMergeKey().toString());
}
if (!visitor.mongoDoc.getEmbeddedReferences().isEmpty()) {
assertEquals("Wrong PullKeys", visitor.mongoDoc.getEmbeddedReferences().toString(), pullKeys.toString());
}
this.docs = null;
}
use of org.teiid.language.Insert in project teiid by teiid.
the class InsertExecutionImpl method execute.
@Override
public void execute() throws TranslatorException {
try {
Insert insert = (Insert) command;
if (insert.getParameterValues() == null) {
DataPayload data = new DataPayload();
data.setType(this.objectName);
buildSingleRowInsertPayload(insert, data);
if (insert.isUpsert()) {
result = getConnection().upsert(data);
} else {
result = getConnection().create(data);
}
} else {
if (this.activeJob == null) {
this.activeJob = getConnection().createBulkJob(this.objectName, insert.isUpsert() ? OperationEnum.upsert : OperationEnum.insert, false);
counts = new ArrayList<Integer>();
}
if (this.activeJob.getState() == JobStateEnum.Open) {
while (this.rowIter.hasNext()) {
List<SObject> rows = buildBulkRowPayload(insert, this.rowIter, this.executionFactory.getMaxBulkInsertBatchSize());
batches.add(getConnection().addBatch(rows, activeJob));
}
this.activeJob = getConnection().closeJob(this.activeJob.getId());
}
BatchResult[] batchResult = getConnection().getBulkResults(this.activeJob, batches);
for (BatchResult br : batchResult) {
for (Result r : br.getResult()) {
if (r.isSuccess() && r.isCreated()) {
counts.add(1);
} else if (r.getErrors().length > 0) {
counts.add(Statement.EXECUTE_FAILED);
this.context.addWarning(new SQLWarning(r.getErrors()[0].getMessage(), r.getErrors()[0].getStatusCode().name()));
} else {
counts.add(Statement.SUCCESS_NO_INFO);
}
}
}
// now process the next set of batch rows
this.activeJob = null;
}
} catch (ResourceException e) {
throw new TranslatorException(e);
}
}
use of org.teiid.language.Insert in project teiid by teiid.
the class TestBulkInsertExecution method testFlowAndErrorReturn.
@Test
public void testFlowAndErrorReturn() throws Exception {
NamedTable table = new NamedTable("temp", null, Mockito.mock(Table.class));
ArrayList<ColumnReference> elements = new ArrayList<ColumnReference>();
elements.add(new ColumnReference(table, "one", Mockito.mock(Column.class), Integer.class));
elements.add(new ColumnReference(table, "two", Mockito.mock(Column.class), String.class));
List<Expression> values = new ArrayList<Expression>();
Parameter param = new Parameter();
param.setType(DataTypeManager.DefaultDataClasses.INTEGER);
param.setValueIndex(0);
values.add(param);
param = new Parameter();
param.setType(DataTypeManager.DefaultDataClasses.STRING);
param.setValueIndex(1);
values.add(param);
ExpressionValueSource valueSource = new ExpressionValueSource(values);
Insert insert = new Insert(table, elements, valueSource);
insert.setParameterValues(Arrays.asList(Arrays.asList(2, '2'), Arrays.asList(2, '2'), Arrays.asList(3, '3')).iterator());
Result r1 = Mockito.mock(Result.class);
Result r2 = Mockito.mock(Result.class);
Result r3 = Mockito.mock(Result.class);
Mockito.when(r1.isSuccess()).thenReturn(true);
Mockito.when(r1.isCreated()).thenReturn(true);
Mockito.when(r2.isSuccess()).thenReturn(true);
Mockito.when(r2.isCreated()).thenReturn(true);
Mockito.when(r3.isSuccess()).thenReturn(false);
Mockito.when(r3.isCreated()).thenReturn(false);
com.sforce.async.Error error = new com.sforce.async.Error();
error.setMessage("failed, check your data");
error.setStatusCode(StatusCode.CANNOT_DISABLE_LAST_ADMIN);
Mockito.when(r3.getErrors()).thenReturn(new Error[] { error });
BatchResult batchResult = Mockito.mock(BatchResult.class);
Mockito.when(batchResult.getResult()).thenReturn(new Result[] { r1 }).thenReturn((new Result[] { r2 })).thenReturn(new Result[] { r3 });
SalesforceConnection connection = Mockito.mock(SalesforceConnection.class);
JobInfo jobInfo = Mockito.mock(JobInfo.class);
Mockito.when(connection.createBulkJob(Mockito.anyString(), Mockito.eq(OperationEnum.insert), Mockito.eq(false))).thenReturn(jobInfo);
Mockito.when(connection.getBulkResults(Mockito.any(JobInfo.class), Mockito.anyList())).thenReturn(new BatchResult[] { batchResult, batchResult, batchResult });
SalesForceExecutionFactory config = new SalesForceExecutionFactory();
config.setMaxBulkInsertBatchSize(1);
InsertExecutionImpl updateExecution = new InsertExecutionImpl(config, insert, connection, Mockito.mock(RuntimeMetadata.class), Mockito.mock(ExecutionContext.class));
while (true) {
try {
updateExecution.execute();
org.junit.Assert.assertArrayEquals(new int[] { 1, 1, -3 }, updateExecution.getUpdateCounts());
break;
} catch (DataNotAvailableException e) {
continue;
}
}
Mockito.verify(connection, Mockito.times(1)).createBulkJob(Mockito.anyString(), Mockito.eq(OperationEnum.insert), Mockito.eq(false));
Mockito.verify(connection, Mockito.times(1)).getBulkResults(Mockito.any(JobInfo.class), Mockito.anyList());
}
Aggregations