Search in sources :

Example 1 with MapReduceInfoResponseList

use of datawave.webservice.results.mr.MapReduceInfoResponseList in project datawave by NationalSecurityAgency.

the class MapReduceStatePersisterBean method find.

/**
 * Returns all MapReduce jobs for the current user
 *
 * @return list of map reduce information
 */
public MapReduceInfoResponseList find() {
    // Find out who/what called this method
    Principal p = ctx.getCallerPrincipal();
    String sid = p.getName();
    Set<Authorizations> auths = new HashSet<>();
    if (p instanceof DatawavePrincipal) {
        DatawavePrincipal dp = (DatawavePrincipal) p;
        sid = dp.getShortName();
        for (Collection<String> cbAuths : dp.getAuthorizations()) auths.add(new Authorizations(cbAuths.toArray(new String[cbAuths.size()])));
    }
    log.trace(sid + " has authorizations " + auths);
    MapReduceInfoResponseList result = new MapReduceInfoResponseList();
    Connector c = null;
    try {
        Map<String, String> trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace());
        c = connectionFactory.getConnection(AccumuloConnectionFactory.Priority.ADMIN, trackingMap);
        tableCheck(c);
        try (Scanner scanner = ScannerHelper.createScanner(c, TABLE_NAME, auths)) {
            scanner.fetchColumnFamily(new Text(sid));
            // We need to create a response for each job
            String previousRow = sid;
            Map<Key, Value> batch = new HashMap<>();
            for (Entry<Key, Value> entry : scanner) {
                if (!previousRow.equals(entry.getKey().getRow().toString()) && !batch.isEmpty()) {
                    MapReduceInfoResponse response = populateResponse(batch.entrySet());
                    if (null != response)
                        result.getResults().add(response);
                    batch.clear();
                } else {
                    batch.put(entry.getKey(), entry.getValue());
                }
                previousRow = entry.getKey().getRow().toString();
            }
            if (!batch.isEmpty()) {
                MapReduceInfoResponse response = populateResponse(batch.entrySet());
                if (null != response)
                    result.getResults().add(response);
                batch.clear();
            }
            return result;
        }
    } catch (IOException ioe) {
        QueryException qe = new QueryException(DatawaveErrorCode.RESPONSE_POPULATION_ERROR, ioe);
        log.error(qe);
        result.addException(qe);
        return result;
    } catch (Exception e) {
        QueryException qe = new QueryException(DatawaveErrorCode.QUERY_SETUP_ERROR, e);
        log.error(qe);
        result.addException(qe.getBottomQueryException());
        return result;
    } finally {
        try {
            connectionFactory.returnConnection(c);
        } catch (Exception e) {
            log.error("Error returning connection to connection pool", e);
        }
    }
}
Also used : Connector(org.apache.accumulo.core.client.Connector) Scanner(org.apache.accumulo.core.client.Scanner) Authorizations(org.apache.accumulo.core.security.Authorizations) HashMap(java.util.HashMap) Text(org.apache.hadoop.io.Text) MapReduceInfoResponseList(datawave.webservice.results.mr.MapReduceInfoResponseList) IOException(java.io.IOException) DatawavePrincipal(datawave.security.authorization.DatawavePrincipal) TableNotFoundException(org.apache.accumulo.core.client.TableNotFoundException) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) TableExistsException(org.apache.accumulo.core.client.TableExistsException) AccumuloSecurityException(org.apache.accumulo.core.client.AccumuloSecurityException) IOException(java.io.IOException) AccumuloException(org.apache.accumulo.core.client.AccumuloException) QueryException(datawave.webservice.query.exception.QueryException) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) QueryException(datawave.webservice.query.exception.QueryException) MapReduceInfoResponse(datawave.webservice.results.mr.MapReduceInfoResponse) Value(org.apache.accumulo.core.data.Value) Principal(java.security.Principal) DatawavePrincipal(datawave.security.authorization.DatawavePrincipal) Key(org.apache.accumulo.core.data.Key) HashSet(java.util.HashSet)

Example 2 with MapReduceInfoResponseList

use of datawave.webservice.results.mr.MapReduceInfoResponseList in project datawave by NationalSecurityAgency.

the class MapReduceStatePersisterBean method findById.

/**
 * Information for a specific map reduce id
 *
 * @param id
 *            map reduce id
 * @return list of map reduce information
 */
public MapReduceInfoResponseList findById(String id) {
    // Find out who/what called this method
    Principal p = ctx.getCallerPrincipal();
    String sid = p.getName();
    Set<Authorizations> auths = new HashSet<>();
    if (p instanceof DatawavePrincipal) {
        DatawavePrincipal dp = (DatawavePrincipal) p;
        sid = dp.getShortName();
        for (Collection<String> cbAuths : dp.getAuthorizations()) auths.add(new Authorizations(cbAuths.toArray(new String[cbAuths.size()])));
    }
    log.trace(sid + " has authorizations " + auths);
    MapReduceInfoResponseList result = new MapReduceInfoResponseList();
    Connector c = null;
    try {
        Map<String, String> trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace());
        c = connectionFactory.getConnection(AccumuloConnectionFactory.Priority.ADMIN, trackingMap);
        tableCheck(c);
        try (Scanner scanner = ScannerHelper.createScanner(c, TABLE_NAME, auths)) {
            Range range = new Range(id);
            scanner.setRange(range);
            scanner.fetchColumnFamily(new Text(sid));
            MapReduceInfoResponse response = populateResponse(scanner);
            if (null != response)
                result.getResults().add(response);
            return result;
        }
    } catch (IOException ioe) {
        QueryException qe = new QueryException(DatawaveErrorCode.RESPONSE_POPULATION_ERROR, ioe);
        log.error(qe);
        result.addException(qe);
        return result;
    } catch (Exception e) {
        QueryException qe = new QueryException(DatawaveErrorCode.QUERY_SETUP_ERROR, e);
        log.error(qe);
        result.addException(qe.getBottomQueryException());
        return result;
    } finally {
        try {
            connectionFactory.returnConnection(c);
        } catch (Exception e) {
            log.error("Error returning connection to connection pool", e);
        }
    }
}
Also used : Connector(org.apache.accumulo.core.client.Connector) Scanner(org.apache.accumulo.core.client.Scanner) Authorizations(org.apache.accumulo.core.security.Authorizations) Text(org.apache.hadoop.io.Text) MapReduceInfoResponseList(datawave.webservice.results.mr.MapReduceInfoResponseList) IOException(java.io.IOException) Range(org.apache.accumulo.core.data.Range) DatawavePrincipal(datawave.security.authorization.DatawavePrincipal) TableNotFoundException(org.apache.accumulo.core.client.TableNotFoundException) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) TableExistsException(org.apache.accumulo.core.client.TableExistsException) AccumuloSecurityException(org.apache.accumulo.core.client.AccumuloSecurityException) IOException(java.io.IOException) AccumuloException(org.apache.accumulo.core.client.AccumuloException) QueryException(datawave.webservice.query.exception.QueryException) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) QueryException(datawave.webservice.query.exception.QueryException) MapReduceInfoResponse(datawave.webservice.results.mr.MapReduceInfoResponse) Principal(java.security.Principal) DatawavePrincipal(datawave.security.authorization.DatawavePrincipal) HashSet(java.util.HashSet)

Example 3 with MapReduceInfoResponseList

use of datawave.webservice.results.mr.MapReduceInfoResponseList in project datawave by NationalSecurityAgency.

the class MapReduceStatePersisterBean method remove.

/**
 * Removes Bulk Results information and related directory in HDFS for the given job id.
 *
 * @param id
 *            bulk results id
 */
public void remove(String id) throws QueryException {
    // Find out who/what called this method
    Principal p = ctx.getCallerPrincipal();
    String sid = p.getName();
    if (p instanceof DatawavePrincipal) {
        DatawavePrincipal dp = (DatawavePrincipal) p;
        sid = dp.getShortName();
    }
    MapReduceInfoResponseList results = findById(id);
    if (null == results)
        throw new NotFoundQueryException(DatawaveErrorCode.NO_QUERY_OBJECT_MATCH);
    if (results.getResults().size() > 1)
        throw new NotFoundQueryException(DatawaveErrorCode.TOO_MANY_QUERY_OBJECT_MATCHES);
    else {
        MapReduceInfoResponse r = results.getResults().get(0);
        List<Mutation> indexEntries = new ArrayList<>();
        Mutation m = new Mutation(r.getId());
        m.putDelete(sid, WORKING_DIRECTORY);
        m.putDelete(sid, HDFS);
        m.putDelete(sid, JT);
        m.putDelete(sid, NAME);
        m.putDelete(sid, RESULTS_LOCATION);
        m.putDelete(sid, PARAMS);
        for (JobExecution job : r.getJobExecutions()) {
            m.putDelete(sid, STATE + NULL + job.getMapReduceJobId());
            Mutation i = new Mutation(job.getMapReduceJobId());
            i.putDelete(sid, r.getId());
            indexEntries.add(i);
        }
        Connector c = null;
        try {
            Map<String, String> trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace());
            c = connectionFactory.getConnection(AccumuloConnectionFactory.Priority.ADMIN, trackingMap);
            tableCheck(c);
            // using BatchWriter instead of MultiTableBatchWriter because Mock CB does not support
            // MultiTableBatchWriter
            BatchWriterConfig bwCfg = new BatchWriterConfig().setMaxLatency(10, TimeUnit.SECONDS).setMaxMemory(10240L).setMaxWriteThreads(1);
            try (BatchWriter tableWriter = c.createBatchWriter(TABLE_NAME, bwCfg);
                BatchWriter indexWriter = c.createBatchWriter(INDEX_TABLE_NAME, bwCfg)) {
                tableWriter.addMutation(m);
                for (Mutation i : indexEntries) indexWriter.addMutation(i);
            }
        } catch (RuntimeException re) {
            throw re;
        } catch (Exception e) {
            QueryException qe = new QueryException(DatawaveErrorCode.JOB_STATE_UPDATE_ERROR, e, MessageFormat.format("job_id: {0}", id));
            log.error(qe);
            throw new QueryException(qe);
        } finally {
            try {
                connectionFactory.returnConnection(c);
            } catch (Exception e) {
                log.error("Error creating query", e);
            }
        }
    }
}
Also used : Connector(org.apache.accumulo.core.client.Connector) ArrayList(java.util.ArrayList) MapReduceInfoResponseList(datawave.webservice.results.mr.MapReduceInfoResponseList) DatawavePrincipal(datawave.security.authorization.DatawavePrincipal) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) TableNotFoundException(org.apache.accumulo.core.client.TableNotFoundException) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) TableExistsException(org.apache.accumulo.core.client.TableExistsException) AccumuloSecurityException(org.apache.accumulo.core.client.AccumuloSecurityException) IOException(java.io.IOException) AccumuloException(org.apache.accumulo.core.client.AccumuloException) QueryException(datawave.webservice.query.exception.QueryException) JobExecution(datawave.webservice.results.mr.JobExecution) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) QueryException(datawave.webservice.query.exception.QueryException) MapReduceInfoResponse(datawave.webservice.results.mr.MapReduceInfoResponse) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) Mutation(org.apache.accumulo.core.data.Mutation) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Principal(java.security.Principal) DatawavePrincipal(datawave.security.authorization.DatawavePrincipal)

Example 4 with MapReduceInfoResponseList

use of datawave.webservice.results.mr.MapReduceInfoResponseList in project datawave by NationalSecurityAgency.

the class MapReduceStatePersisterTest method testDontFindSomeoneElsesJob.

@Test
public void testDontFindSomeoneElsesJob() throws Exception {
    // create some entries
    testPersistentCreate();
    PowerMock.resetAll();
    DatawaveUser user = new DatawaveUser(SubjectIssuerDNPair.of("CN=Gal Some Other sogal, OU=acme", "CN=ca, OU=acme"), UserType.USER, Arrays.asList(auths), null, null, 0L);
    principal = new DatawavePrincipal(Collections.singletonList(user));
    EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal);
    HashMap<String, String> trackingMap = new HashMap<>();
    expect(connectionFactory.getTrackingMap(EasyMock.anyObject())).andReturn(trackingMap);
    expect(connectionFactory.getConnection(EasyMock.eq(AccumuloConnectionFactory.Priority.ADMIN), EasyMock.eq(trackingMap))).andReturn(connection);
    connectionFactory.returnConnection(connection);
    replayAll();
    MapReduceInfoResponseList result = bean.findById(id);
    verifyAll();
    assertEquals(0, result.getResults().size());
}
Also used : HashMap(java.util.HashMap) DatawaveUser(datawave.security.authorization.DatawaveUser) MapReduceInfoResponseList(datawave.webservice.results.mr.MapReduceInfoResponseList) DatawavePrincipal(datawave.security.authorization.DatawavePrincipal) Test(org.junit.Test)

Example 5 with MapReduceInfoResponseList

use of datawave.webservice.results.mr.MapReduceInfoResponseList in project datawave by NationalSecurityAgency.

the class MapReduceStatePersisterTest method testDontFindSomeoneElsesResults.

@Test
public void testDontFindSomeoneElsesResults() throws Exception {
    // create some entries
    testPersistentCreate();
    PowerMock.resetAll();
    id = UUID.randomUUID().toString();
    testPersistentCreate();
    PowerMock.resetAll();
    id = UUID.randomUUID().toString();
    testPersistentCreate();
    PowerMock.resetAll();
    DatawaveUser user = new DatawaveUser(SubjectIssuerDNPair.of("CN=Gal Some Other sogal, OU=acme", "CN=ca, OU=acme"), UserType.USER, Arrays.asList(auths), null, null, 0L);
    principal = new DatawavePrincipal(Collections.singletonList(user));
    EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal);
    HashMap<String, String> trackingMap = new HashMap<>();
    expect(connectionFactory.getTrackingMap(EasyMock.anyObject())).andReturn(trackingMap);
    expect(connectionFactory.getConnection(EasyMock.eq(AccumuloConnectionFactory.Priority.ADMIN), EasyMock.eq(trackingMap))).andReturn(connection);
    connectionFactory.returnConnection(connection);
    replayAll();
    MapReduceInfoResponseList result = bean.find();
    verifyAll();
    assertEquals(0, result.getResults().size());
}
Also used : HashMap(java.util.HashMap) DatawaveUser(datawave.security.authorization.DatawaveUser) MapReduceInfoResponseList(datawave.webservice.results.mr.MapReduceInfoResponseList) DatawavePrincipal(datawave.security.authorization.DatawavePrincipal) Test(org.junit.Test)

Aggregations

MapReduceInfoResponseList (datawave.webservice.results.mr.MapReduceInfoResponseList)14 NotFoundQueryException (datawave.webservice.query.exception.NotFoundQueryException)9 MapReduceInfoResponse (datawave.webservice.results.mr.MapReduceInfoResponse)9 QueryException (datawave.webservice.query.exception.QueryException)7 IOException (java.io.IOException)7 HashMap (java.util.HashMap)7 DatawavePrincipal (datawave.security.authorization.DatawavePrincipal)6 NotFoundException (datawave.webservice.common.exception.NotFoundException)6 Produces (javax.ws.rs.Produces)6 GZIP (org.jboss.resteasy.annotations.GZIP)6 Test (org.junit.Test)5 DatawaveWebApplicationException (datawave.webservice.common.exception.DatawaveWebApplicationException)4 BadRequestQueryException (datawave.webservice.query.exception.BadRequestQueryException)4 UnauthorizedQueryException (datawave.webservice.query.exception.UnauthorizedQueryException)4 GenericResponse (datawave.webservice.result.GenericResponse)3 JobExecution (datawave.webservice.results.mr.JobExecution)3 Principal (java.security.Principal)3 ArrayList (java.util.ArrayList)3 PUT (javax.ws.rs.PUT)3 AccumuloException (org.apache.accumulo.core.client.AccumuloException)3