use of org.craftercms.studio.api.v2.dal.RepoOperation in project studio by craftercms.
the class GitContentRepository method getOperationsFromDelta.
@Override
public List<RepoOperation> getOperationsFromDelta(String site, String commitIdFrom, String commitIdTo) {
List<RepoOperation> operations = new ArrayList<>();
try {
GitRepositoryHelper helper = GitRepositoryHelper.getHelper(studioConfiguration, securityService, userServiceInternal, encryptor, generalLockService, retryingRepositoryOperationFacade);
Repository repository = helper.getRepository(site, StringUtils.isEmpty(site) ? GLOBAL : SANDBOX);
if (repository != null) {
try {
// Get the sandbox repo, and then get a reference to the commitId we received and another for head
boolean fromEmptyRepo = StringUtils.isEmpty(commitIdFrom);
String firstCommitId = getRepoFirstCommitId(site);
if (fromEmptyRepo) {
commitIdFrom = firstCommitId;
}
Repository repo = helper.getRepository(site, SANDBOX);
ObjectId objCommitIdFrom = repo.resolve(commitIdFrom);
ObjectId objCommitIdTo = repo.resolve(commitIdTo);
if (Objects.nonNull(objCommitIdFrom) && Objects.nonNull(objCommitIdTo)) {
ObjectId objFirstCommitId = repo.resolve(firstCommitId);
try (Git git = new Git(repo)) {
if (fromEmptyRepo) {
try (RevWalk walk = new RevWalk(repo)) {
RevCommit firstCommit = walk.parseCommit(objFirstCommitId);
try (ObjectReader reader = repo.newObjectReader()) {
CanonicalTreeParser firstCommitTreeParser = new CanonicalTreeParser();
// reset(reader, firstCommitTree.getId());
firstCommitTreeParser.reset();
// Diff the two commit Ids
long startDiffMark1 = logger.isDebugEnabled() ? System.currentTimeMillis() : 0;
List<DiffEntry> diffEntries = git.diff().setOldTree(firstCommitTreeParser).setNewTree(null).call();
if (logger.isDebugEnabled()) {
logger.debug("Diff from " + objFirstCommitId.getName() + " to null " + "finished in " + ((System.currentTimeMillis() - startDiffMark1) / 1000) + " seconds");
logger.debug("Number of diff entries " + diffEntries.size());
}
// Now that we have a diff, let's itemize the file changes, pack them into a TO
// and add them to the list of RepoOperations to return to the caller
// also include date/time of commit by taking number of seconds and multiply by 1000 and
// convert to java date before sending over
operations.addAll(processDiffEntry(git, diffEntries, firstCommit.getId()));
}
}
}
// let's do it
if (!objCommitIdFrom.equals(objCommitIdTo)) {
// Compare HEAD with commitId we're given
// Get list of commits between commitId and HEAD in chronological order
RevTree fromTree = helper.getTreeForCommit(repo, objCommitIdFrom.getName());
RevTree toTree = helper.getTreeForCommit(repo, objCommitIdTo.getName());
if (fromTree != null && toTree != null) {
try (ObjectReader reader = repo.newObjectReader()) {
CanonicalTreeParser fromCommitTreeParser = new CanonicalTreeParser();
CanonicalTreeParser toCommitTreeParser = new CanonicalTreeParser();
fromCommitTreeParser.reset(reader, fromTree.getId());
toCommitTreeParser.reset(reader, toTree.getId());
// Diff the two commit Ids
long startDiffMark2 = logger.isDebugEnabled() ? System.currentTimeMillis() : 0;
List<DiffEntry> diffEntries = git.diff().setOldTree(fromCommitTreeParser).setNewTree(toCommitTreeParser).call();
if (logger.isDebugEnabled()) {
logger.debug("Diff from " + objCommitIdFrom.getName() + " to " + objCommitIdTo.getName() + " finished in " + ((System.currentTimeMillis() - startDiffMark2) / 1000) + " seconds");
logger.debug("Number of diff entries " + diffEntries.size());
}
// Now that we have a diff, let's itemize the file changes, pack them into a TO
// and add them to the list of RepoOperations to return to the caller
// also include date/time of commit by taking number of seconds and multiply by 1000 and
// convert to java date before sending over
operations.addAll(processDiffEntry(git, diffEntries, objCommitIdTo));
}
}
}
} catch (GitAPIException e) {
logger.error("Error getting operations for site " + site + " from commit ID: " + commitIdFrom + " to commit ID: " + commitIdTo, e);
}
}
} catch (IOException e) {
logger.error("Error getting operations for site " + site + " from commit ID: " + commitIdFrom + " to commit ID: " + commitIdTo, e);
}
}
} catch (CryptoException e) {
logger.error("Error getting operations for site " + site + " from commit ID: " + commitIdFrom + " to commit ID: " + commitIdTo, e);
}
return operations;
}
use of org.craftercms.studio.api.v2.dal.RepoOperation in project studio by craftercms.
the class StudioAuditLogProcessingTask method processAuditLogFromRepo.
private void processAuditLogFromRepo(String siteId, int batchSize) throws SiteNotFoundException {
List<GitLog> unauditedGitlogs = contentRepository.getUnauditedCommits(siteId, batchSize);
if (unauditedGitlogs != null) {
SiteFeed siteFeed = siteService.getSite(siteId);
for (GitLog gl : unauditedGitlogs) {
if (contentRepository.commitIdExists(siteId, gl.getCommitId())) {
String prevCommitId = gl.getCommitId() + PREVIOUS_COMMIT_SUFFIX;
List<RepoOperation> operations = contentRepository.getOperationsFromDelta(siteId, prevCommitId, gl.getCommitId());
for (RepoOperation repoOperation : operations) {
Map<String, String> activityInfo = new HashMap<String, String>();
String contentClass;
AuditLog auditLog;
switch(repoOperation.getAction()) {
case CREATE:
case COPY:
contentClass = contentService.getContentTypeClass(siteId, repoOperation.getPath());
if (repoOperation.getPath().endsWith(DmConstants.XML_PATTERN)) {
activityInfo.put(DmConstants.KEY_CONTENT_TYPE, contentClass);
}
logger.debug("Insert audit log for site: " + siteId + " path: " + repoOperation.getPath());
auditLog = auditServiceInternal.createAuditLogEntry();
auditLog.setOperation(OPERATION_CREATE);
auditLog.setOperationTimestamp(repoOperation.getDateTime());
auditLog.setSiteId(siteFeed.getId());
auditLog.setActorId(repoOperation.getAuthor());
auditLog.setActorDetails(repoOperation.getAuthor());
auditLog.setPrimaryTargetId(siteId + ":" + repoOperation.getPath());
auditLog.setPrimaryTargetType(TARGET_TYPE_CONTENT_ITEM);
auditLog.setPrimaryTargetValue(repoOperation.getPath());
auditLog.setPrimaryTargetSubtype(contentService.getContentTypeClass(siteId, repoOperation.getPath()));
auditLog.setOrigin(ORIGIN_GIT);
auditServiceInternal.insertAuditLog(auditLog);
break;
case UPDATE:
contentClass = contentService.getContentTypeClass(siteId, repoOperation.getPath());
if (repoOperation.getPath().endsWith(DmConstants.XML_PATTERN)) {
activityInfo.put(DmConstants.KEY_CONTENT_TYPE, contentClass);
}
logger.debug("Insert audit log for site: " + siteId + " path: " + repoOperation.getPath());
auditLog = auditServiceInternal.createAuditLogEntry();
auditLog.setOperation(OPERATION_UPDATE);
auditLog.setOperationTimestamp(repoOperation.getDateTime());
auditLog.setSiteId(siteFeed.getId());
auditLog.setActorId(repoOperation.getAuthor());
auditLog.setActorDetails(repoOperation.getAuthor());
auditLog.setOrigin(ORIGIN_GIT);
auditLog.setPrimaryTargetId(siteId + ":" + repoOperation.getPath());
auditLog.setPrimaryTargetType(TARGET_TYPE_CONTENT_ITEM);
auditLog.setPrimaryTargetValue(repoOperation.getPath());
auditLog.setPrimaryTargetSubtype(contentService.getContentTypeClass(siteId, repoOperation.getPath()));
auditServiceInternal.insertAuditLog(auditLog);
break;
case DELETE:
contentClass = contentService.getContentTypeClass(siteId, repoOperation.getPath());
if (repoOperation.getPath().endsWith(DmConstants.XML_PATTERN)) {
activityInfo.put(DmConstants.KEY_CONTENT_TYPE, contentClass);
}
logger.debug("Insert audit log for site: " + siteId + " path: " + repoOperation.getPath());
auditLog = auditServiceInternal.createAuditLogEntry();
auditLog.setOperation(OPERATION_DELETE);
auditLog.setOperationTimestamp(repoOperation.getDateTime());
auditLog.setSiteId(siteFeed.getId());
auditLog.setOrigin(ORIGIN_GIT);
auditLog.setActorId(repoOperation.getAuthor());
auditLog.setActorDetails(repoOperation.getAuthor());
auditLog.setPrimaryTargetId(siteId + ":" + repoOperation.getPath());
auditLog.setPrimaryTargetType(TARGET_TYPE_CONTENT_ITEM);
auditLog.setPrimaryTargetValue(repoOperation.getPath());
auditLog.setPrimaryTargetSubtype(contentService.getContentTypeClass(siteId, repoOperation.getPath()));
auditServiceInternal.insertAuditLog(auditLog);
break;
case MOVE:
contentClass = contentService.getContentTypeClass(siteId, repoOperation.getMoveToPath());
if (repoOperation.getMoveToPath().endsWith(DmConstants.XML_PATTERN)) {
activityInfo.put(DmConstants.KEY_CONTENT_TYPE, contentClass);
}
logger.debug("Insert audit log for site: " + siteId + " path: " + repoOperation.getMoveToPath());
auditLog = auditServiceInternal.createAuditLogEntry();
auditLog.setOperation(OPERATION_MOVE);
auditLog.setOperationTimestamp(repoOperation.getDateTime());
auditLog.setSiteId(siteFeed.getId());
auditLog.setActorId(repoOperation.getAuthor());
auditLog.setActorDetails(repoOperation.getAuthor());
auditLog.setOrigin(ORIGIN_GIT);
auditLog.setPrimaryTargetId(siteId + ":" + repoOperation.getMoveToPath());
auditLog.setPrimaryTargetType(TARGET_TYPE_CONTENT_ITEM);
auditLog.setPrimaryTargetValue(repoOperation.getMoveToPath());
auditLog.setPrimaryTargetSubtype(contentService.getContentTypeClass(siteId, repoOperation.getMoveToPath()));
auditServiceInternal.insertAuditLog(auditLog);
break;
default:
logger.error("Error: Unknown repo operation for site " + siteId + " operation: " + repoOperation.getAction());
break;
}
}
}
contentRepository.markGitLogAudited(siteId, gl.getCommitId());
}
}
}
use of org.craftercms.studio.api.v2.dal.RepoOperation in project studio by craftercms.
the class SiteServiceImpl method processRepoOperations.
private boolean processRepoOperations(String siteId, List<RepoOperation> repoOperations, Path file) throws IOException {
boolean toReturn = true;
long startProcessRepoOperationMark = logger.isDebugEnabled() ? System.currentTimeMillis() : 0;
for (RepoOperation repoOperation : repoOperations) {
switch(repoOperation.getAction()) {
case CREATE:
case COPY:
Files.write(file, upsertItemStateRow(siteId, repoOperation.getPath()).getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
Files.write(file, "\n\n".getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
Files.write(file, upsertItemMetadataRow(siteId, repoOperation.getPath(), repoOperation.getAuthor(), repoOperation.getDateTime(), repoOperation.getCommitId()).getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
Files.write(file, "\n\n".getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
logger.debug("Extract dependencies for site: " + siteId + " path: " + repoOperation.getPath());
addDependenciesScriptSnippets(siteId, repoOperation.getPath(), null, file);
break;
case UPDATE:
Files.write(file, transitionSaveItemStateRow(siteId, repoOperation.getPath()).getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
Files.write(file, "\n\n".getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
Files.write(file, updateItemMetadataRow(siteId, repoOperation.getPath(), repoOperation.getAuthor(), repoOperation.getDateTime(), repoOperation.getCommitId()).getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
Files.write(file, "\n\n".getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
logger.debug("Extract dependencies for site: " + siteId + " path: " + repoOperation.getPath());
addDependenciesScriptSnippets(siteId, repoOperation.getPath(), null, file);
break;
case DELETE:
Files.write(file, deleteItemStateRow(siteId, repoOperation.getPath()).getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
Files.write(file, "\n\n".getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
Files.write(file, deleteItemMetadataRow(siteId, repoOperation.getPath()).getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
Files.write(file, "\n\n".getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
Files.write(file, deleteDependencyRows(siteId, repoOperation.getPath()).getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
Files.write(file, "\n\n".getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
break;
case MOVE:
Files.write(file, moveItemStateRow(siteId, repoOperation.getPath(), repoOperation.getMoveToPath()).getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
Files.write(file, "\n\n".getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
Files.write(file, transitionSaveItemStateRow(siteId, repoOperation.getMoveToPath()).getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
Files.write(file, "\n\n".getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
Files.write(file, moveItemMetadataRow(siteId, repoOperation.getPath(), repoOperation.getMoveToPath()).getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
Files.write(file, "\n\n".getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
Files.write(file, updateItemMetadataRow(siteId, repoOperation.getMoveToPath(), repoOperation.getAuthor(), repoOperation.getDateTime(), repoOperation.getCommitId()).getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
Files.write(file, "\n\n".getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
addDependenciesScriptSnippets(siteId, repoOperation.getMoveToPath(), repoOperation.getPath(), file);
break;
default:
logger.error("Error: Unknown repo operation for site " + siteId + " operation: " + repoOperation.getAction());
toReturn = false;
break;
}
}
if (logger.isDebugEnabled()) {
logger.debug("Process Repo operations finished in " + (System.currentTimeMillis() - startProcessRepoOperationMark) + " milliseconds");
}
return toReturn;
}
use of org.craftercms.studio.api.v2.dal.RepoOperation in project studio by craftercms.
the class SiteServiceImpl method syncDatabaseWithRepo.
@Override
@ValidateParams
public boolean syncDatabaseWithRepo(@ValidateStringParam(name = "site") String site, @ValidateStringParam(name = "fromCommitId") String fromCommitId, boolean generateAuditLog) throws SiteNotFoundException {
boolean toReturn = true;
String repoLastCommitId = contentRepository.getRepoLastCommitId(site);
long startGetOperationsFromDeltaMark = logger.isDebugEnabled() ? System.currentTimeMillis() : 0;
List<RepoOperation> repoOperationsDelta = contentRepositoryV2.getOperationsFromDelta(site, fromCommitId, repoLastCommitId);
if (logger.isDebugEnabled()) {
logger.debug("Get Repo Operations from Delta finished in " + (System.currentTimeMillis() - startGetOperationsFromDeltaMark) + " milliseconds");
logger.debug("Number of Repo operations from delta " + repoOperationsDelta.size());
}
if (CollectionUtils.isEmpty(repoOperationsDelta)) {
logger.debug("Database is up to date with repository for site: " + site);
contentRepositoryV2.markGitLogVerifiedProcessed(site, fromCommitId);
updateLastCommitId(site, repoLastCommitId);
updateLastVerifiedGitlogCommitId(site, repoLastCommitId);
return toReturn;
}
logger.info("Syncing database with repository for site: " + site + " fromCommitId = " + (StringUtils.isEmpty(fromCommitId) ? "Empty repo" : fromCommitId));
logger.debug("Operations to sync: ");
for (RepoOperation repoOperation : repoOperationsDelta) {
logger.debug("\tOperation: " + repoOperation.getAction().toString() + " " + repoOperation.getPath());
}
long startUpdateDBMark = logger.isDebugEnabled() ? System.currentTimeMillis() : 0;
StudioDBScriptRunner studioDBScriptRunner = studioDBScriptRunnerFactory.getDBScriptRunner();
try {
String scriptFilename = "repoOperations_" + UUID.randomUUID();
Path scriptPath = Files.createTempFile(scriptFilename, ".sql");
toReturn = processRepoOperations(site, repoOperationsDelta, scriptPath);
studioDBScriptRunner.execute(scriptPath.toFile());
} catch (IOException e) {
logger.error("Error while creating db script file for processing created files for site " + site);
}
if (logger.isDebugEnabled()) {
logger.debug("Update DB finished in " + (System.currentTimeMillis() - startUpdateDBMark) + " milliseconds");
}
// At this point we have attempted to process all operations, some may have failed
// We will update the lastCommitId of the database ignoring errors if any
logger.debug("Done syncing operations with a result of: " + toReturn);
logger.debug("Syncing database lastCommitId for site: " + site);
// Update database
logger.debug("Update last commit id " + repoLastCommitId + " for site " + site);
updateLastCommitId(site, repoLastCommitId);
updateLastVerifiedGitlogCommitId(site, repoLastCommitId);
if (logger.isDebugEnabled()) {
logger.debug("Update DB finished in " + (System.currentTimeMillis() - startUpdateDBMark) + " milliseconds");
}
logger.info("Done syncing database with repository for site: " + site + " fromCommitId = " + (StringUtils.isEmpty(fromCommitId) ? "Empty repo" : fromCommitId) + " with a final result of: " + toReturn);
logger.info("Last commit ID for site: " + site + " is " + repoLastCommitId);
if (!toReturn) {
// Some operations failed during sync database from repo
// Must log and make some noise here, this isn't great
logger.error("Some operations failed to sync to database for site: " + site + " see previous error logs");
}
return toReturn;
}
use of org.craftercms.studio.api.v2.dal.RepoOperation in project studio by craftercms.
the class DeploymentServiceImpl method createCommitItems.
private List<PublishRequest> createCommitItems(String site, String environment, List<String> commitIds, ZonedDateTime scheduledDate, String approver, String comment) {
List<PublishRequest> newItems = new ArrayList<PublishRequest>(commitIds.size());
String packageId = UUID.randomUUID().toString();
logger.debug("Get repository operations for each commit id and create publish request items");
for (String commitId : commitIds) {
logger.debug("Get repository operations for commit " + commitId);
List<RepoOperation> operations = contentRepositoryV2.getOperationsFromDelta(site, commitId + PREVIOUS_COMMIT_SUFFIX, commitId);
for (RepoOperation op : operations) {
logger.debug("Creating publish request item: ");
PublishRequest item = new PublishRequest();
item.setId(++CTED_AUTOINCREMENT);
item.setSite(site);
item.setEnvironment(environment);
item.setScheduledDate(scheduledDate);
item.setState(PublishRequest.State.READY_FOR_LIVE);
item.setCommitId(commitId);
item.setUser(approver);
item.setPackageId(packageId);
item.setSubmissionComment(comment);
switch(op.getAction()) {
case CREATE:
case COPY:
item.setPath(op.getPath());
item.setAction(PublishRequest.Action.NEW);
item.setContentTypeClass(contentService.getContentTypeClass(site, op.getPath()));
break;
case UPDATE:
item.setPath(op.getPath());
item.setAction(PublishRequest.Action.UPDATE);
item.setContentTypeClass(contentService.getContentTypeClass(site, op.getPath()));
break;
case DELETE:
item.setPath(op.getPath());
item.setAction(PublishRequest.Action.DELETE);
item.setContentTypeClass(contentService.getContentTypeClass(site, op.getPath()));
break;
case MOVE:
item.setPath(op.getMoveToPath());
item.setOldPath(op.getPath());
item.setAction(PublishRequest.Action.MOVE);
item.setContentTypeClass(contentService.getContentTypeClass(site, op.getPath()));
break;
default:
logger.error("Error: Unknown repo operation for site " + site + " operation: " + op.getAction());
continue;
}
logger.debug("\tPath: " + item.getPath() + " operation: " + item.getAction());
newItems.add(item);
}
}
logger.debug("Created " + newItems.size() + " publish request items for queue");
return newItems;
}
Aggregations