Search in sources :

Example 1 with ScanJob

use of org.janusgraph.diskstorage.keycolumnvalue.scan.ScanJob in project janusgraph by JanusGraph.

the class CassandraScanJobIT method testSimpleScan.

@Test
public void testSimpleScan() throws InterruptedException, ExecutionException, IOException, BackendException {
    int keys = 1000;
    int cols = 40;
    String[][] values = KeyValueStoreUtil.generateData(keys, cols);
    // Make it only half the number of columns for every 2nd key
    for (int i = 0; i < values.length; i++) {
        if (i % 2 == 0)
            values[i] = Arrays.copyOf(values[i], cols / 2);
    }
    log.debug("Loading values: " + keys + "x" + cols);
    KeyColumnValueStoreManager mgr = new CassandraThriftStoreManager(GraphDatabaseConfiguration.buildGraphConfiguration());
    KeyColumnValueStore store = mgr.openDatabase("edgestore");
    StoreTransaction tx = mgr.beginTransaction(StandardBaseTransactionConfig.of(TimestampProviders.MICRO));
    KeyColumnValueStoreUtil.loadValues(store, tx, values);
    // noop on Cassandra, but harmless
    tx.commit();
    SimpleScanJobRunner runner = (ScanJob job, Configuration jobConf, String rootNSName) -> {
        try {
            return new CassandraHadoopScanRunner(job).scanJobConf(jobConf).scanJobConfRoot(rootNSName).partitionerOverride("org.apache.cassandra.dht.Murmur3Partitioner").run();
        } catch (ClassNotFoundException e) {
            throw new RuntimeException(e);
        }
    };
    SimpleScanJob.runBasicTests(keys, cols, runner);
}
Also used : KeyColumnValueStore(org.janusgraph.diskstorage.keycolumnvalue.KeyColumnValueStore) JanusGraphHadoopConfiguration(org.janusgraph.hadoop.config.JanusGraphHadoopConfiguration) GraphDatabaseConfiguration(org.janusgraph.graphdb.configuration.GraphDatabaseConfiguration) CassandraThriftStoreManager(org.janusgraph.diskstorage.cassandra.thrift.CassandraThriftStoreManager) StoreTransaction(org.janusgraph.diskstorage.keycolumnvalue.StoreTransaction) KeyColumnValueStoreManager(org.janusgraph.diskstorage.keycolumnvalue.KeyColumnValueStoreManager) CassandraHadoopScanRunner(org.janusgraph.hadoop.scan.CassandraHadoopScanRunner) ScanJob(org.janusgraph.diskstorage.keycolumnvalue.scan.ScanJob) Test(org.junit.Test) JanusGraphBaseTest(org.janusgraph.graphdb.JanusGraphBaseTest)

Example 2 with ScanJob

use of org.janusgraph.diskstorage.keycolumnvalue.scan.ScanJob in project janusgraph by JanusGraph.

the class CQLScanJobIT method testSimpleScan.

@Test
public void testSimpleScan() throws InterruptedException, ExecutionException, IOException, BackendException {
    int keys = 1000;
    int cols = 40;
    String[][] values = KeyValueStoreUtil.generateData(keys, cols);
    // Make it only half the number of columns for every 2nd key
    for (int i = 0; i < values.length; i++) {
        if (i % 2 == 0)
            values[i] = Arrays.copyOf(values[i], cols / 2);
    }
    log.debug("Loading values: " + keys + "x" + cols);
    KeyColumnValueStoreManager mgr = new CQLStoreManager(GraphDatabaseConfiguration.buildGraphConfiguration());
    KeyColumnValueStore store = mgr.openDatabase("edgestore");
    StoreTransaction tx = mgr.beginTransaction(StandardBaseTransactionConfig.of(TimestampProviders.MICRO));
    KeyColumnValueStoreUtil.loadValues(store, tx, values);
    // noop on Cassandra, but harmless
    tx.commit();
    SimpleScanJobRunner runner = (ScanJob job, Configuration jobConf, String rootNSName) -> {
        try {
            return new CQLHadoopScanRunner(job).scanJobConf(jobConf).scanJobConfRoot(rootNSName).partitionerOverride("org.apache.cassandra.dht.Murmur3Partitioner").run();
        } catch (ClassNotFoundException e) {
            throw new RuntimeException(e);
        }
    };
    SimpleScanJob.runBasicTests(keys, cols, runner);
}
Also used : KeyColumnValueStore(org.janusgraph.diskstorage.keycolumnvalue.KeyColumnValueStore) WriteConfiguration(org.janusgraph.diskstorage.configuration.WriteConfiguration) JanusGraphHadoopConfiguration(org.janusgraph.hadoop.config.JanusGraphHadoopConfiguration) Configuration(org.janusgraph.diskstorage.configuration.Configuration) GraphDatabaseConfiguration(org.janusgraph.graphdb.configuration.GraphDatabaseConfiguration) StoreTransaction(org.janusgraph.diskstorage.keycolumnvalue.StoreTransaction) CQLHadoopScanRunner(org.janusgraph.hadoop.scan.CQLHadoopScanRunner) CQLStoreManager(org.janusgraph.diskstorage.cql.CQLStoreManager) SimpleScanJobRunner(org.janusgraph.diskstorage.SimpleScanJobRunner) KeyColumnValueStoreManager(org.janusgraph.diskstorage.keycolumnvalue.KeyColumnValueStoreManager) SimpleScanJob(org.janusgraph.diskstorage.SimpleScanJob) ScanJob(org.janusgraph.diskstorage.keycolumnvalue.scan.ScanJob) Test(org.junit.jupiter.api.Test) JanusGraphBaseTest(org.janusgraph.graphdb.JanusGraphBaseTest)

Example 3 with ScanJob

use of org.janusgraph.diskstorage.keycolumnvalue.scan.ScanJob in project janusgraph by JanusGraph.

the class KeyColumnValueStoreTest method scanTestWithSimpleJob.

@Test
public void scanTestWithSimpleJob() throws Exception {
    int keys = 1000, columns = 40;
    String[][] values = KeyValueStoreUtil.generateData(keys, columns);
    // Make it only half the number of columns for every 2nd key
    for (int i = 0; i < values.length; i++) {
        if (i % 2 == 0)
            values[i] = Arrays.copyOf(values[i], columns / 2);
    }
    log.debug("Loading values: " + keys + "x" + columns);
    loadValues(values);
    clopen();
    StandardScanner scanner = new StandardScanner(manager);
    SimpleScanJobRunner runner = (ScanJob job, Configuration jobConf, String rootNSName) -> runSimpleJob(scanner, job, jobConf);
    SimpleScanJob.runBasicTests(keys, columns, runner);
}
Also used : StandardScanner(org.janusgraph.diskstorage.keycolumnvalue.scan.StandardScanner) Configuration(org.janusgraph.diskstorage.configuration.Configuration) ScanJob(org.janusgraph.diskstorage.keycolumnvalue.scan.ScanJob) Test(org.junit.jupiter.api.Test) JanusGraphBaseStoreFeaturesTest(org.janusgraph.JanusGraphBaseStoreFeaturesTest)

Example 4 with ScanJob

use of org.janusgraph.diskstorage.keycolumnvalue.scan.ScanJob in project janusgraph by JanusGraph.

the class HadoopScanRunner method runJob.

public static ScanMetrics runJob(org.apache.hadoop.conf.Configuration hadoopConf, Class<? extends InputFormat> inputFormat, String jobName, Class<? extends Mapper> mapperClass) throws IOException, InterruptedException, ClassNotFoundException {
    Job job = Job.getInstance(hadoopConf);
    // job.setJarByClass(HadoopScanMapper.class);
    job.setJarByClass(mapperClass);
    // job.setJobName(HadoopScanMapper.class.getSimpleName() + "[" + scanJob + "]");
    job.setJobName(jobName);
    job.setOutputKeyClass(NullWritable.class);
    job.setOutputValueClass(NullWritable.class);
    job.setMapOutputKeyClass(NullWritable.class);
    job.setMapOutputValueClass(NullWritable.class);
    job.setNumReduceTasks(0);
    // job.setMapperClass(HadoopScanMapper.class);
    job.setMapperClass(mapperClass);
    job.setOutputFormatClass(NullOutputFormat.class);
    job.setInputFormatClass(inputFormat);
    boolean success = job.waitForCompletion(true);
    if (!success) {
        String f;
        try {
            // Just in case one of Job's methods throws an exception
            f = String.format("MapReduce JobID %s terminated abnormally: %s", job.getJobID().toString(), getJobFailureString(job));
        } catch (RuntimeException e) {
            f = "Job failed (unable to read job status programmatically -- see MapReduce logs for information)";
        }
        throw new IOException(f);
    } else {
        return new CountersScanMetrics(job.getCounters());
    }
}
Also used : CountersScanMetrics(org.janusgraph.hadoop.CountersScanMetrics) IOException(java.io.IOException) VertexScanJob(org.janusgraph.graphdb.olap.VertexScanJob) Job(org.apache.hadoop.mapreduce.Job) ScanJob(org.janusgraph.diskstorage.keycolumnvalue.scan.ScanJob)

Aggregations

ScanJob (org.janusgraph.diskstorage.keycolumnvalue.scan.ScanJob)4 Configuration (org.janusgraph.diskstorage.configuration.Configuration)2 KeyColumnValueStore (org.janusgraph.diskstorage.keycolumnvalue.KeyColumnValueStore)2 KeyColumnValueStoreManager (org.janusgraph.diskstorage.keycolumnvalue.KeyColumnValueStoreManager)2 StoreTransaction (org.janusgraph.diskstorage.keycolumnvalue.StoreTransaction)2 JanusGraphBaseTest (org.janusgraph.graphdb.JanusGraphBaseTest)2 GraphDatabaseConfiguration (org.janusgraph.graphdb.configuration.GraphDatabaseConfiguration)2 JanusGraphHadoopConfiguration (org.janusgraph.hadoop.config.JanusGraphHadoopConfiguration)2 Test (org.junit.jupiter.api.Test)2 IOException (java.io.IOException)1 Job (org.apache.hadoop.mapreduce.Job)1 JanusGraphBaseStoreFeaturesTest (org.janusgraph.JanusGraphBaseStoreFeaturesTest)1 SimpleScanJob (org.janusgraph.diskstorage.SimpleScanJob)1 SimpleScanJobRunner (org.janusgraph.diskstorage.SimpleScanJobRunner)1 CassandraThriftStoreManager (org.janusgraph.diskstorage.cassandra.thrift.CassandraThriftStoreManager)1 WriteConfiguration (org.janusgraph.diskstorage.configuration.WriteConfiguration)1 CQLStoreManager (org.janusgraph.diskstorage.cql.CQLStoreManager)1 StandardScanner (org.janusgraph.diskstorage.keycolumnvalue.scan.StandardScanner)1 VertexScanJob (org.janusgraph.graphdb.olap.VertexScanJob)1 CountersScanMetrics (org.janusgraph.hadoop.CountersScanMetrics)1