use of org.apache.druid.java.util.common.IAE in project druid by druid-io.
the class ArrayOfStringsNullSafeSerdeTest method testIllegalStrLength.
@Test
public void testIllegalStrLength() {
// bytes for length = -2
final byte[] bytes = { -2, -1, -1, -1 };
IAE exception = Assert.assertThrows(IAE.class, () -> serde.deserializeFromMemory(Memory.wrap(bytes), 1));
Assert.assertEquals("Illegal strLength [-2] at offset [4]. Must be -1, 0 or a positive integer.", exception.getMessage());
}
use of org.apache.druid.java.util.common.IAE in project druid by druid-io.
the class DataSourceAnalysis method flattenJoin.
/**
* Flatten a datasource into two parts: the left-hand side datasource (the 'base' datasource), and a list of join
* clauses, if any.
*
* @throws IllegalArgumentException if dataSource cannot be fully flattened.
*/
private static Triple<DataSource, DimFilter, List<PreJoinableClause>> flattenJoin(final JoinDataSource dataSource) {
DataSource current = dataSource;
DimFilter currentDimFilter = null;
final List<PreJoinableClause> preJoinableClauses = new ArrayList<>();
while (current instanceof JoinDataSource) {
final JoinDataSource joinDataSource = (JoinDataSource) current;
current = joinDataSource.getLeft();
if (currentDimFilter != null) {
throw new IAE("Left filters are only allowed when left child is direct table access");
}
currentDimFilter = joinDataSource.getLeftFilter();
preJoinableClauses.add(new PreJoinableClause(joinDataSource.getRightPrefix(), joinDataSource.getRight(), joinDataSource.getJoinType(), joinDataSource.getConditionAnalysis()));
}
// Join clauses were added in the order we saw them while traversing down, but we need to apply them in the
// going-up order. So reverse them.
Collections.reverse(preJoinableClauses);
return Triple.of(current, currentDimFilter, preJoinableClauses);
}
use of org.apache.druid.java.util.common.IAE in project druid by druid-io.
the class BroadcastSegmentIndexedTableTest method checkNonIndexedReader.
private void checkNonIndexedReader(String columnName) {
checkColumnSelectorFactory(columnName);
try (final Closer closer = Closer.create()) {
final int columnIndex = columnNames.indexOf(columnName);
final int numRows = backingSegment.asStorageAdapter().getNumRows();
final IndexedTable.Reader reader = broadcastTable.columnReader(columnIndex);
closer.register(reader);
final SimpleAscendingOffset offset = new SimpleAscendingOffset(numRows);
final BaseColumn theColumn = backingSegment.asQueryableIndex().getColumnHolder(columnName).getColumn();
closer.register(theColumn);
final BaseObjectColumnValueSelector<?> selector = theColumn.makeColumnValueSelector(offset);
// compare with selector make sure reader can read correct values
for (int row = 0; row < numRows; row++) {
offset.setCurrentOffset(row);
Assert.assertEquals(selector.getObject(), reader.read(row));
}
// make sure it doesn't have an index since it isn't a key column
try {
Assert.assertEquals(null, broadcastTable.columnIndex(columnIndex));
} catch (IAE iae) {
Assert.assertEquals(StringUtils.format("Column[%d] is not a key column", columnIndex), iae.getMessage());
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
use of org.apache.druid.java.util.common.IAE in project druid by druid-io.
the class SqlSegmentsMetadataQuery method markSegments.
/**
* Marks the provided segments as either used or unused.
*
* Returns the number of segments actually modified.
*/
public int markSegments(final Collection<SegmentId> segmentIds, final boolean used) {
final String dataSource;
if (segmentIds.isEmpty()) {
return 0;
} else {
dataSource = segmentIds.iterator().next().getDataSource();
if (segmentIds.stream().anyMatch(segment -> !dataSource.equals(segment.getDataSource()))) {
throw new IAE("Segments to drop must all be part of the same datasource");
}
}
final PreparedBatch batch = handle.prepareBatch(StringUtils.format("UPDATE %s SET used = ? WHERE datasource = ? AND id = ?", dbTables.getSegmentsTable()));
for (SegmentId segmentId : segmentIds) {
batch.add(used, dataSource, segmentId.toString());
}
final int[] segmentChanges = batch.execute();
return computeNumChangedSegments(segmentIds.stream().map(SegmentId::toString).collect(Collectors.toList()), segmentChanges);
}
use of org.apache.druid.java.util.common.IAE in project druid by druid-io.
the class LDAPRoleProvider method getRoles.
@Override
public Set<String> getRoles(String authorizerPrefix, AuthenticationResult authenticationResult) {
Set<String> roleNames = new HashSet<>();
Map<String, BasicAuthorizerGroupMapping> groupMappingMap = cacheManager.getGroupMappingMap(authorizerPrefix);
if (groupMappingMap == null) {
throw new IAE("Could not load groupMappingMap for authorizer [%s]", authorizerPrefix);
}
Map<String, BasicAuthorizerUser> userMap = cacheManager.getUserMap(authorizerPrefix);
if (userMap == null) {
throw new IAE("Could not load userMap for authorizer [%s]", authorizerPrefix);
}
// Get the groups assigned to the LDAP user
SearchResult searchResult = Optional.ofNullable(authenticationResult.getContext()).map(contextMap -> contextMap.get(BasicAuthUtils.SEARCH_RESULT_CONTEXT_KEY)).map(p -> {
if (p instanceof SearchResult) {
return (SearchResult) p;
} else {
return null;
}
}).orElse(null);
if (searchResult != null) {
try {
Set<LdapName> groupNamesFromLdap = getGroupsFromLdap(searchResult);
if (groupNamesFromLdap.isEmpty()) {
LOG.debug("User %s is not mapped to any groups", authenticationResult.getIdentity());
} else {
// Get the roles mapped to LDAP groups from the metastore.
// This allows us to authorize groups LDAP user belongs
roleNames.addAll(getRoles(groupMappingMap, groupNamesFromLdap));
}
} catch (NamingException e) {
LOG.error(e, "Exception in looking up groups for user %s", authenticationResult.getIdentity());
}
}
// Get the roles assigned to LDAP user from the metastore.
// This allow us to authorize LDAP users regardless of whether they belong to any groups or not in LDAP.
BasicAuthorizerUser user = userMap.get(authenticationResult.getIdentity());
if (user != null) {
roleNames.addAll(user.getRoles());
}
return roleNames;
}
Aggregations