use of org.apache.commons.lang3.StringUtils.isEmpty in project intellij-plugins by JetBrains.
the class DartServerCompletionContributor method createLookupElement.
private static LookupElement createLookupElement(@NotNull final Project project, @NotNull final CompletionSuggestion suggestion) {
final Element element = suggestion.getElement();
final Location location = element == null ? null : element.getLocation();
final DartLookupObject lookupObject = new DartLookupObject(project, location);
final String lookupString = suggestion.getCompletion();
LookupElementBuilder lookup = LookupElementBuilder.create(lookupObject, lookupString);
// keywords are bold
if (suggestion.getKind().equals(CompletionSuggestionKind.KEYWORD)) {
lookup = lookup.bold();
}
final int dotIndex = lookupString.indexOf('.');
if (dotIndex > 0 && dotIndex < lookupString.length() - 1 && StringUtil.isJavaIdentifier(lookupString.substring(0, dotIndex)) && StringUtil.isJavaIdentifier(lookupString.substring(dotIndex + 1))) {
// 'path.Context' should match 'Conte' prefix
lookup = lookup.withLookupString(lookupString.substring(dotIndex + 1));
}
boolean shouldSetSelection = true;
if (element != null) {
// @deprecated
if (element.isDeprecated()) {
lookup = lookup.strikeout();
}
// append type parameters
final String typeParameters = element.getTypeParameters();
if (typeParameters != null) {
lookup = lookup.appendTailText(typeParameters, false);
}
// append parameters
final String parameters = element.getParameters();
if (parameters != null) {
lookup = lookup.appendTailText(parameters, false);
}
// append return type
final String returnType = element.getReturnType();
if (!StringUtils.isEmpty(returnType)) {
lookup = lookup.withTypeText(returnType, true);
}
// icon
Icon icon = getBaseImage(element);
if (icon != null) {
icon = applyVisibility(icon, element.isPrivate());
icon = applyOverlay(icon, element.isFinal(), AllIcons.Nodes.FinalMark);
icon = applyOverlay(icon, element.isConst(), AllIcons.Nodes.FinalMark);
lookup = lookup.withIcon(icon);
}
// Prepare for typing arguments, if any.
if (CompletionSuggestionKind.INVOCATION.equals(suggestion.getKind())) {
shouldSetSelection = false;
final List<String> parameterNames = suggestion.getParameterNames();
if (parameterNames != null) {
lookup = lookup.withInsertHandler((context, item) -> {
// like in JavaCompletionUtil.insertParentheses()
final boolean needRightParenth = CodeInsightSettings.getInstance().AUTOINSERT_PAIR_BRACKET || parameterNames.isEmpty() && context.getCompletionChar() != '(';
if (parameterNames.isEmpty()) {
final ParenthesesInsertHandler<LookupElement> handler = ParenthesesInsertHandler.getInstance(false, false, false, needRightParenth, false);
handler.handleInsert(context, item);
} else {
final ParenthesesInsertHandler<LookupElement> handler = ParenthesesInsertHandler.getInstance(true, false, false, needRightParenth, false);
handler.handleInsert(context, item);
// Show parameters popup.
final Editor editor = context.getEditor();
final PsiElement psiElement = lookupObject.getElement();
if (DartCodeInsightSettings.getInstance().INSERT_DEFAULT_ARG_VALUES) {
// Insert argument defaults if provided.
final String argumentListString = suggestion.getDefaultArgumentListString();
if (argumentListString != null) {
final Document document = editor.getDocument();
int offset = editor.getCaretModel().getOffset();
// At this point caret is expected to be right after the opening paren.
// But if user was completing using Tab over the existing method call with arguments then old arguments are still there,
// if so, skip inserting argumentListString
final CharSequence text = document.getCharsSequence();
if (text.charAt(offset - 1) == '(' && text.charAt(offset) == ')') {
document.insertString(offset, argumentListString);
PsiDocumentManager.getInstance(project).commitDocument(document);
final TemplateBuilderImpl builder = (TemplateBuilderImpl) TemplateBuilderFactory.getInstance().createTemplateBuilder(context.getFile());
final int[] ranges = suggestion.getDefaultArgumentListTextRanges();
// Only proceed if ranges are provided and well-formed.
if (ranges != null && (ranges.length & 1) == 0) {
int index = 0;
while (index < ranges.length) {
final int start = ranges[index];
final int length = ranges[index + 1];
final String arg = argumentListString.substring(start, start + length);
final TextExpression expression = new TextExpression(arg);
final TextRange range = new TextRange(offset + start, offset + start + length);
index += 2;
builder.replaceRange(range, "group_" + (index - 1), expression, true);
}
builder.run(editor, true);
}
}
}
}
AutoPopupController.getInstance(project).autoPopupParameterInfo(editor, psiElement);
}
});
}
}
}
// Use selection offset / length.
if (shouldSetSelection) {
lookup = lookup.withInsertHandler((context, item) -> {
final Editor editor = context.getEditor();
final int startOffset = context.getStartOffset() + suggestion.getSelectionOffset();
final int endOffset = startOffset + suggestion.getSelectionLength();
editor.getCaretModel().moveToOffset(startOffset);
if (endOffset > startOffset) {
editor.getSelectionModel().setSelection(startOffset, endOffset);
}
});
}
return PrioritizedLookupElement.withPriority(lookup, suggestion.getRelevance());
}
use of org.apache.commons.lang3.StringUtils.isEmpty in project nifi by apache.
the class QueryDatabaseTable method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) throws ProcessException {
// Fetch the column/table info once
if (!setupComplete.get()) {
super.setup(context);
}
ProcessSession session = sessionFactory.createSession();
final List<FlowFile> resultSetFlowFiles = new ArrayList<>();
final ComponentLog logger = getLogger();
final DBCPService dbcpService = context.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class);
final DatabaseAdapter dbAdapter = dbAdapters.get(context.getProperty(DB_TYPE).getValue());
final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions().getValue();
final String columnNames = context.getProperty(COLUMN_NAMES).evaluateAttributeExpressions().getValue();
final String maxValueColumnNames = context.getProperty(MAX_VALUE_COLUMN_NAMES).evaluateAttributeExpressions().getValue();
final String customWhereClause = context.getProperty(WHERE_CLAUSE).evaluateAttributeExpressions().getValue();
final Integer fetchSize = context.getProperty(FETCH_SIZE).evaluateAttributeExpressions().asInteger();
final Integer maxRowsPerFlowFile = context.getProperty(MAX_ROWS_PER_FLOW_FILE).evaluateAttributeExpressions().asInteger();
final Integer outputBatchSizeField = context.getProperty(OUTPUT_BATCH_SIZE).evaluateAttributeExpressions().asInteger();
final int outputBatchSize = outputBatchSizeField == null ? 0 : outputBatchSizeField;
final Integer maxFragments = context.getProperty(MAX_FRAGMENTS).isSet() ? context.getProperty(MAX_FRAGMENTS).evaluateAttributeExpressions().asInteger() : 0;
final JdbcCommon.AvroConversionOptions options = JdbcCommon.AvroConversionOptions.builder().recordName(tableName).maxRows(maxRowsPerFlowFile).convertNames(context.getProperty(NORMALIZE_NAMES_FOR_AVRO).asBoolean()).useLogicalTypes(context.getProperty(USE_AVRO_LOGICAL_TYPES).asBoolean()).defaultPrecision(context.getProperty(DEFAULT_PRECISION).evaluateAttributeExpressions().asInteger()).defaultScale(context.getProperty(DEFAULT_SCALE).evaluateAttributeExpressions().asInteger()).build();
final StateManager stateManager = context.getStateManager();
final StateMap stateMap;
try {
stateMap = stateManager.getState(Scope.CLUSTER);
} catch (final IOException ioe) {
getLogger().error("Failed to retrieve observed maximum values from the State Manager. Will not perform " + "query until this is accomplished.", ioe);
context.yield();
return;
}
// Make a mutable copy of the current state property map. This will be updated by the result row callback, and eventually
// set as the current state map (after the session has been committed)
final Map<String, String> statePropertyMap = new HashMap<>(stateMap.toMap());
// If an initial max value for column(s) has been specified using properties, and this column is not in the state manager, sync them to the state property map
for (final Map.Entry<String, String> maxProp : maxValueProperties.entrySet()) {
String maxPropKey = maxProp.getKey().toLowerCase();
String fullyQualifiedMaxPropKey = getStateKey(tableName, maxPropKey);
if (!statePropertyMap.containsKey(fullyQualifiedMaxPropKey)) {
String newMaxPropValue;
// but store the new initial max value under the fully-qualified key.
if (statePropertyMap.containsKey(maxPropKey)) {
newMaxPropValue = statePropertyMap.get(maxPropKey);
} else {
newMaxPropValue = maxProp.getValue();
}
statePropertyMap.put(fullyQualifiedMaxPropKey, newMaxPropValue);
}
}
List<String> maxValueColumnNameList = StringUtils.isEmpty(maxValueColumnNames) ? null : Arrays.asList(maxValueColumnNames.split("\\s*,\\s*"));
final String selectQuery = getQuery(dbAdapter, tableName, columnNames, maxValueColumnNameList, customWhereClause, statePropertyMap);
final StopWatch stopWatch = new StopWatch(true);
final String fragmentIdentifier = UUID.randomUUID().toString();
try (final Connection con = dbcpService.getConnection();
final Statement st = con.createStatement()) {
if (fetchSize != null && fetchSize > 0) {
try {
st.setFetchSize(fetchSize);
} catch (SQLException se) {
// Not all drivers support this, just log the error (at debug level) and move on
logger.debug("Cannot set fetch size to {} due to {}", new Object[] { fetchSize, se.getLocalizedMessage() }, se);
}
}
String jdbcURL = "DBCPService";
try {
DatabaseMetaData databaseMetaData = con.getMetaData();
if (databaseMetaData != null) {
jdbcURL = databaseMetaData.getURL();
}
} catch (SQLException se) {
// Ignore and use default JDBC URL. This shouldn't happen unless the driver doesn't implement getMetaData() properly
}
final Integer queryTimeout = context.getProperty(QUERY_TIMEOUT).evaluateAttributeExpressions().asTimePeriod(TimeUnit.SECONDS).intValue();
// timeout in seconds
st.setQueryTimeout(queryTimeout);
try {
logger.debug("Executing query {}", new Object[] { selectQuery });
final ResultSet resultSet = st.executeQuery(selectQuery);
int fragmentIndex = 0;
while (true) {
final AtomicLong nrOfRows = new AtomicLong(0L);
FlowFile fileToProcess = session.create();
try {
fileToProcess = session.write(fileToProcess, out -> {
// Max values will be updated in the state property map by the callback
final MaxValueResultSetRowCollector maxValCollector = new MaxValueResultSetRowCollector(tableName, statePropertyMap, dbAdapter);
try {
nrOfRows.set(JdbcCommon.convertToAvroStream(resultSet, out, options, maxValCollector));
} catch (SQLException | RuntimeException e) {
throw new ProcessException("Error during database query or conversion of records to Avro.", e);
}
});
} catch (ProcessException e) {
// Add flowfile to results before rethrowing so it will be removed from session in outer catch
resultSetFlowFiles.add(fileToProcess);
throw e;
}
if (nrOfRows.get() > 0) {
// set attribute how many rows were selected
fileToProcess = session.putAttribute(fileToProcess, RESULT_ROW_COUNT, String.valueOf(nrOfRows.get()));
fileToProcess = session.putAttribute(fileToProcess, RESULT_TABLENAME, tableName);
fileToProcess = session.putAttribute(fileToProcess, CoreAttributes.MIME_TYPE.key(), JdbcCommon.MIME_TYPE_AVRO_BINARY);
if (maxRowsPerFlowFile > 0) {
fileToProcess = session.putAttribute(fileToProcess, "fragment.identifier", fragmentIdentifier);
fileToProcess = session.putAttribute(fileToProcess, "fragment.index", String.valueOf(fragmentIndex));
}
logger.info("{} contains {} Avro records; transferring to 'success'", new Object[] { fileToProcess, nrOfRows.get() });
session.getProvenanceReporter().receive(fileToProcess, jdbcURL, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
resultSetFlowFiles.add(fileToProcess);
// If we've reached the batch size, send out the flow files
if (outputBatchSize > 0 && resultSetFlowFiles.size() >= outputBatchSize) {
session.transfer(resultSetFlowFiles, REL_SUCCESS);
session.commit();
resultSetFlowFiles.clear();
}
} else {
// If there were no rows returned, don't send the flowfile
session.remove(fileToProcess);
context.yield();
break;
}
fragmentIndex++;
if (maxFragments > 0 && fragmentIndex >= maxFragments) {
break;
}
}
// Even though the maximum value and total count are known at this point, to maintain consistent behavior if Output Batch Size is set, do not store the attributes
if (outputBatchSize == 0) {
for (int i = 0; i < resultSetFlowFiles.size(); i++) {
// Add maximum values as attributes
for (Map.Entry<String, String> entry : statePropertyMap.entrySet()) {
// Get just the column name from the key
String key = entry.getKey();
String colName = key.substring(key.lastIndexOf(NAMESPACE_DELIMITER) + NAMESPACE_DELIMITER.length());
resultSetFlowFiles.set(i, session.putAttribute(resultSetFlowFiles.get(i), "maxvalue." + colName, entry.getValue()));
}
// set count on all FlowFiles
if (maxRowsPerFlowFile > 0) {
resultSetFlowFiles.set(i, session.putAttribute(resultSetFlowFiles.get(i), "fragment.count", Integer.toString(fragmentIndex)));
}
}
}
} catch (final SQLException e) {
throw e;
}
session.transfer(resultSetFlowFiles, REL_SUCCESS);
} catch (final ProcessException | SQLException e) {
logger.error("Unable to execute SQL select query {} due to {}", new Object[] { selectQuery, e });
if (!resultSetFlowFiles.isEmpty()) {
session.remove(resultSetFlowFiles);
}
context.yield();
} finally {
session.commit();
try {
// Update the state
stateManager.setState(statePropertyMap, Scope.CLUSTER);
} catch (IOException ioe) {
getLogger().error("{} failed to update State Manager, maximum observed values will not be recorded", new Object[] { this, ioe });
}
}
}
use of org.apache.commons.lang3.StringUtils.isEmpty in project nifi by apache.
the class Notify method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
final ComponentLog logger = getLogger();
final PropertyValue signalIdProperty = context.getProperty(RELEASE_SIGNAL_IDENTIFIER);
final PropertyValue counterNameProperty = context.getProperty(SIGNAL_COUNTER_NAME);
final PropertyValue deltaProperty = context.getProperty(SIGNAL_COUNTER_DELTA);
final String attributeCacheRegex = context.getProperty(ATTRIBUTE_CACHE_REGEX).getValue();
final Integer bufferCount = context.getProperty(SIGNAL_BUFFER_COUNT).asInteger();
// the cache client used to interact with the distributed cache.
final AtomicDistributedMapCacheClient cache = context.getProperty(DISTRIBUTED_CACHE_SERVICE).asControllerService(AtomicDistributedMapCacheClient.class);
final WaitNotifyProtocol protocol = new WaitNotifyProtocol(cache);
final Map<String, SignalBuffer> signalBuffers = new HashMap<>();
for (int i = 0; i < bufferCount; i++) {
final FlowFile flowFile = session.get();
if (flowFile == null) {
break;
}
// Signal id is computed from attribute 'RELEASE_SIGNAL_IDENTIFIER' with expression language support
final String signalId = signalIdProperty.evaluateAttributeExpressions(flowFile).getValue();
// if the computed value is null, or empty, we transfer the flow file to failure relationship
if (StringUtils.isBlank(signalId)) {
logger.error("FlowFile {} has no attribute for given Release Signal Identifier", new Object[] { flowFile });
// set 'notified' attribute
session.transfer(session.putAttribute(flowFile, NOTIFIED_ATTRIBUTE_NAME, String.valueOf(false)), REL_FAILURE);
continue;
}
String counterName = counterNameProperty.evaluateAttributeExpressions(flowFile).getValue();
if (StringUtils.isEmpty(counterName)) {
counterName = WaitNotifyProtocol.DEFAULT_COUNT_NAME;
}
int delta = 1;
if (deltaProperty.isSet()) {
final String deltaStr = deltaProperty.evaluateAttributeExpressions(flowFile).getValue();
try {
delta = Integer.parseInt(deltaStr);
} catch (final NumberFormatException e) {
logger.error("Failed to calculate delta for FlowFile {} due to {}", new Object[] { flowFile, e }, e);
session.transfer(session.putAttribute(flowFile, NOTIFIED_ATTRIBUTE_NAME, String.valueOf(false)), REL_FAILURE);
continue;
}
}
if (!signalBuffers.containsKey(signalId)) {
signalBuffers.put(signalId, new SignalBuffer());
}
final SignalBuffer signalBuffer = signalBuffers.get(signalId);
if (StringUtils.isNotEmpty(attributeCacheRegex)) {
flowFile.getAttributes().entrySet().stream().filter(e -> (!e.getKey().equals("uuid") && e.getKey().matches(attributeCacheRegex))).forEach(e -> signalBuffer.attributesToCache.put(e.getKey(), e.getValue()));
}
signalBuffer.incrementDelta(counterName, delta);
signalBuffer.flowFiles.add(flowFile);
if (logger.isDebugEnabled()) {
logger.debug("Cached release signal identifier {} counterName {} from FlowFile {}", new Object[] { signalId, counterName, flowFile });
}
}
signalBuffers.forEach((signalId, signalBuffer) -> {
// retry after yielding for a while.
try {
protocol.notify(signalId, signalBuffer.deltas, signalBuffer.attributesToCache);
signalBuffer.flowFiles.forEach(flowFile -> session.transfer(session.putAttribute(flowFile, NOTIFIED_ATTRIBUTE_NAME, String.valueOf(true)), REL_SUCCESS));
} catch (IOException e) {
throw new RuntimeException(String.format("Unable to communicate with cache when processing %s due to %s", signalId, e), e);
}
});
}
use of org.apache.commons.lang3.StringUtils.isEmpty in project nifi by apache.
the class ExecuteSparkInteractive method onTrigger.
@Override
public void onTrigger(ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final ComponentLog log = getLogger();
final LivySessionService livySessionService = context.getProperty(LIVY_CONTROLLER_SERVICE).asControllerService(LivySessionService.class);
final Map<String, String> livyController = livySessionService.getSession();
if (livyController == null || livyController.isEmpty()) {
log.debug("No Spark session available (yet), routing flowfile to wait");
session.transfer(flowFile, REL_WAIT);
return;
}
final long statusCheckInterval = context.getProperty(STATUS_CHECK_INTERVAL).evaluateAttributeExpressions(flowFile).asTimePeriod(TimeUnit.MILLISECONDS);
Charset charset;
try {
charset = Charset.forName(context.getProperty(CHARSET).evaluateAttributeExpressions(flowFile).getValue());
} catch (Exception e) {
log.warn("Illegal character set name specified, defaulting to UTF-8");
charset = StandardCharsets.UTF_8;
}
String sessionId = livyController.get("sessionId");
String livyUrl = livyController.get("livyUrl");
String code = context.getProperty(CODE).evaluateAttributeExpressions(flowFile).getValue();
if (StringUtils.isEmpty(code)) {
try (InputStream inputStream = session.read(flowFile)) {
// If no code was provided, assume it is in the content of the incoming flow file
code = IOUtils.toString(inputStream, charset);
} catch (IOException ioe) {
log.error("Error reading input flowfile, penalizing and routing to failure", new Object[] { flowFile, ioe.getMessage() }, ioe);
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
return;
}
}
code = StringEscapeUtils.escapeJavaScript(code);
String payload = "{\"code\":\"" + code + "\"}";
try {
final JSONObject result = submitAndHandleJob(livyUrl, livySessionService, sessionId, payload, statusCheckInterval);
log.debug("ExecuteSparkInteractive Result of Job Submit: " + result);
if (result == null) {
session.transfer(flowFile, REL_FAILURE);
} else {
try {
final JSONObject output = result.getJSONObject("data");
flowFile = session.write(flowFile, out -> out.write(output.toString().getBytes()));
flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), LivySessionService.APPLICATION_JSON);
session.transfer(flowFile, REL_SUCCESS);
} catch (JSONException je) {
// The result doesn't contain the data, just send the output object as the flow file content to failure (after penalizing)
log.error("Spark Session returned an error, sending the output JSON object as the flow file content to failure (after penalizing)");
flowFile = session.write(flowFile, out -> out.write(result.toString().getBytes()));
flowFile = session.putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), LivySessionService.APPLICATION_JSON);
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
}
}
} catch (IOException ioe) {
log.error("Failure processing flowfile {} due to {}, penalizing and routing to failure", new Object[] { flowFile, ioe.getMessage() }, ioe);
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
}
}
use of org.apache.commons.lang3.StringUtils.isEmpty in project CzechIdMng by bcvsolutions.
the class SchedulerController method find.
/**
* Finds scheduled tasks
*
* @return all tasks
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
@ResponseBody
@RequestMapping(method = RequestMethod.GET)
@PreAuthorize("hasAuthority('" + CoreGroupPermission.SCHEDULER_READ + "')")
@ApiOperation(value = "Search scheduled tasks", nickname = "searchSchedulerTasks", tags = { SchedulerController.TAG }, authorizations = { @Authorization(value = SwaggerConfig.AUTHENTICATION_BASIC, scopes = { @AuthorizationScope(scope = CoreGroupPermission.SCHEDULER_READ, description = "") }), @Authorization(value = SwaggerConfig.AUTHENTICATION_CIDMST, scopes = { @AuthorizationScope(scope = CoreGroupPermission.SCHEDULER_READ, description = "") }) })
@ApiImplicitParams({ @ApiImplicitParam(name = "page", dataType = "string", paramType = "query", value = "Results page you want to retrieve (0..N)"), @ApiImplicitParam(name = "size", dataType = "string", paramType = "query", value = "Number of records per page."), @ApiImplicitParam(name = "sort", allowMultiple = true, dataType = "string", paramType = "query", value = "Sorting criteria in the format: property(,asc|desc). " + "Default sort order is ascending. " + "Multiple sort criteria are supported.") })
public Resources<Task> find(@RequestParam(required = false) MultiValueMap<String, Object> parameters, @PageableDefault Pageable pageable) {
String text = getParameterConverter().toString(parameters, DataFilter.PARAMETER_TEXT);
List<Task> tasks = schedulerService.getAllTasks().stream().filter(task -> {
// filter - like name or description only
return StringUtils.isEmpty(text) || task.getTaskType().getSimpleName().toLowerCase().contains(text.toLowerCase()) || (task.getDescription() != null && task.getDescription().toLowerCase().contains(text.toLowerCase()));
}).sorted((taskOne, taskTwo) -> {
Sort sort = pageable.getSort();
if (pageable.getSort() == null) {
return 0;
}
int compareAscValue = 0;
boolean asc = true;
// "naive" sort implementation
if (sort.getOrderFor(PROPERTY_TASK_TYPE) != null) {
asc = sort.getOrderFor(PROPERTY_TASK_TYPE).isAscending();
compareAscValue = taskOne.getTaskType().getSimpleName().compareTo(taskTwo.getTaskType().getSimpleName());
}
if (sort.getOrderFor(PROPERTY_DESCRIPTION) != null) {
asc = sort.getOrderFor(PROPERTY_DESCRIPTION).isAscending();
compareAscValue = taskOne.getDescription().compareTo(taskTwo.getDescription());
}
if (sort.getOrderFor(PROPERTY_INSTANCE_ID) != null) {
asc = sort.getOrderFor(PROPERTY_INSTANCE_ID).isAscending();
compareAscValue = taskOne.getInstanceId().compareTo(taskTwo.getInstanceId());
}
return asc ? compareAscValue : compareAscValue * -1;
}).collect(Collectors.toList());
// "naive" pagination
int first = pageable.getPageNumber() * pageable.getPageSize();
int last = pageable.getPageSize() + first;
List<Task> taskPage = tasks.subList(first < tasks.size() ? first : tasks.size() > 0 ? tasks.size() - 1 : 0, last < tasks.size() ? last : tasks.size());
//
return pageToResources(new PageImpl(taskPage, pageable, tasks.size()), Task.class);
}
Aggregations