Search in sources :

Example 1 with AnnotatedClass

use of org.nd4j.shade.jackson.databind.introspect.AnnotatedClass in project deeplearning4j by deeplearning4j.

the class TestCustomLayers method testCustomOutputLayerMLN.

@Test
public void testCustomOutputLayerMLN() {
    //First: Ensure that the CustomOutputLayer class is registered
    ObjectMapper mapper = NeuralNetConfiguration.mapper();
    AnnotatedClass ac = AnnotatedClass.construct(Layer.class, mapper.getSerializationConfig().getAnnotationIntrospector(), null);
    Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac, mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector());
    Set<Class<?>> registeredSubtypes = new HashSet<>();
    boolean found = false;
    for (NamedType nt : types) {
        System.out.println(nt);
        //            registeredSubtypes.add(nt.getType());
        if (nt.getType() == CustomOutputLayer.class)
            found = true;
    }
    assertTrue("CustomOutputLayer: not registered with NeuralNetConfiguration mapper", found);
    //Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).learningRate(0.1).list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new CustomOutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10).build()).pretrain(false).backprop(true).build();
    String json = conf.toJson();
    String yaml = conf.toYaml();
    System.out.println(json);
    MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
    assertEquals(conf, confFromJson);
    MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
    assertEquals(conf, confFromYaml);
    //Third: check initialization
    Nd4j.getRandom().setSeed(12345);
    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();
    assertTrue(net.getLayer(1) instanceof CustomOutputLayerImpl);
    //Fourth: compare to an equivalent standard output layer (should be identical)
    MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder().seed(12345).learningRate(0.1).weightInit(WeightInit.XAVIER).list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10).build()).pretrain(false).backprop(true).build();
    Nd4j.getRandom().setSeed(12345);
    MultiLayerNetwork net2 = new MultiLayerNetwork(conf2);
    net2.init();
    assertEquals(net2.params(), net.params());
    INDArray testFeatures = Nd4j.rand(1, 10);
    INDArray testLabels = Nd4j.zeros(1, 10);
    testLabels.putScalar(0, 3, 1.0);
    DataSet ds = new DataSet(testFeatures, testLabels);
    assertEquals(net2.output(testFeatures), net.output(testFeatures));
    assertEquals(net2.score(ds), net.score(ds), 1e-6);
}
Also used : OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) CustomOutputLayer(org.deeplearning4j.nn.layers.custom.testclasses.CustomOutputLayer) DataSet(org.nd4j.linalg.dataset.DataSet) NamedType(org.nd4j.shade.jackson.databind.jsontype.NamedType) CustomOutputLayerImpl(org.deeplearning4j.nn.layers.custom.testclasses.CustomOutputLayerImpl) CustomOutputLayer(org.deeplearning4j.nn.layers.custom.testclasses.CustomOutputLayer) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) INDArray(org.nd4j.linalg.api.ndarray.INDArray) AnnotatedClass(org.nd4j.shade.jackson.databind.introspect.AnnotatedClass) AnnotatedClass(org.nd4j.shade.jackson.databind.introspect.AnnotatedClass) MultiLayerNetwork(org.deeplearning4j.nn.multilayer.MultiLayerNetwork) ObjectMapper(org.nd4j.shade.jackson.databind.ObjectMapper) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 2 with AnnotatedClass

use of org.nd4j.shade.jackson.databind.introspect.AnnotatedClass in project deeplearning4j by deeplearning4j.

the class TestCustomLayers method testJsonMultiLayerNetwork.

@Test
public void testJsonMultiLayerNetwork() {
    //First: Ensure that the CustomLayer class is registered
    ObjectMapper mapper = NeuralNetConfiguration.mapper();
    AnnotatedClass ac = AnnotatedClass.construct(Layer.class, mapper.getSerializationConfig().getAnnotationIntrospector(), null);
    Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac, mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector());
    Set<Class<?>> registeredSubtypes = new HashSet<>();
    boolean found = false;
    for (NamedType nt : types) {
        System.out.println(nt);
        //            registeredSubtypes.add(nt.getType());
        if (nt.getType() == CustomLayer.class)
            found = true;
    }
    assertTrue("CustomLayer: not registered with NeuralNetConfiguration mapper", found);
    //Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().learningRate(0.1).list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new CustomLayer(3.14159)).layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10).build()).pretrain(false).backprop(true).build();
    String json = conf.toJson();
    String yaml = conf.toYaml();
    System.out.println(json);
    MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
    assertEquals(conf, confFromJson);
    MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
    assertEquals(conf, confFromYaml);
}
Also used : OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) CustomOutputLayer(org.deeplearning4j.nn.layers.custom.testclasses.CustomOutputLayer) CustomLayer(org.deeplearning4j.nn.layers.custom.testclasses.CustomLayer) NamedType(org.nd4j.shade.jackson.databind.jsontype.NamedType) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) DenseLayer(org.deeplearning4j.nn.conf.layers.DenseLayer) AnnotatedClass(org.nd4j.shade.jackson.databind.introspect.AnnotatedClass) AnnotatedClass(org.nd4j.shade.jackson.databind.introspect.AnnotatedClass) ObjectMapper(org.nd4j.shade.jackson.databind.ObjectMapper) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 3 with AnnotatedClass

use of org.nd4j.shade.jackson.databind.introspect.AnnotatedClass in project deeplearning4j by deeplearning4j.

the class CustomPreprocessorTest method testCustomPreprocessor.

@Test
public void testCustomPreprocessor() {
    //First: Ensure that the CustomLayer class is registered
    ObjectMapper mapper = NeuralNetConfiguration.mapper();
    AnnotatedClass ac = AnnotatedClass.construct(InputPreProcessor.class, mapper.getSerializationConfig().getAnnotationIntrospector(), null);
    Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac, mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector());
    boolean found = false;
    for (NamedType nt : types) {
        //            System.out.println(nt);
        if (nt.getType() == MyCustomPreprocessor.class) {
            found = true;
            break;
        }
    }
    assertTrue("MyCustomPreprocessor: not registered with NeuralNetConfiguration mapper", found);
    //Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().learningRate(0.1).list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10).build()).inputPreProcessor(0, new MyCustomPreprocessor()).pretrain(false).backprop(true).build();
    String json = conf.toJson();
    String yaml = conf.toYaml();
    System.out.println(json);
    MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
    assertEquals(conf, confFromJson);
    MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
    assertEquals(conf, confFromYaml);
    assertTrue(confFromJson.getInputPreProcess(0) instanceof MyCustomPreprocessor);
}
Also used : MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) DenseLayer(org.deeplearning4j.nn.conf.layers.DenseLayer) AnnotatedClass(org.nd4j.shade.jackson.databind.introspect.AnnotatedClass) NamedType(org.nd4j.shade.jackson.databind.jsontype.NamedType) MyCustomPreprocessor(org.deeplearning4j.nn.conf.preprocessor.custom.MyCustomPreprocessor) ObjectMapper(org.nd4j.shade.jackson.databind.ObjectMapper) Test(org.junit.Test)

Example 4 with AnnotatedClass

use of org.nd4j.shade.jackson.databind.introspect.AnnotatedClass in project deeplearning4j by deeplearning4j.

the class NeuralNetConfiguration method registerSubtypes.

private static synchronized void registerSubtypes(ObjectMapper mapper) {
    //Register concrete subtypes for JSON serialization
    List<Class<?>> classes = Arrays.<Class<?>>asList(InputPreProcessor.class, ILossFunction.class, IActivation.class, Layer.class, GraphVertex.class, ReconstructionDistribution.class);
    List<String> classNames = new ArrayList<>(6);
    for (Class<?> c : classes) classNames.add(c.getName());
    // First: scan the classpath and find all instances of the 'baseClasses' classes
    if (subtypesClassCache == null) {
        //Check system property:
        String prop = System.getProperty(CUSTOM_FUNCTIONALITY);
        if (prop != null && !Boolean.parseBoolean(prop)) {
            subtypesClassCache = Collections.emptySet();
        } else {
            List<Class<?>> interfaces = Arrays.<Class<?>>asList(InputPreProcessor.class, ILossFunction.class, IActivation.class, ReconstructionDistribution.class);
            List<Class<?>> classesList = Arrays.<Class<?>>asList(Layer.class, GraphVertex.class);
            Collection<URL> urls = ClasspathHelper.forClassLoader();
            List<URL> scanUrls = new ArrayList<>();
            for (URL u : urls) {
                String path = u.getPath();
                if (!path.matches(".*/jre/lib/.*jar")) {
                    //Skip JRE/JDK JARs
                    scanUrls.add(u);
                }
            }
            Reflections reflections = new Reflections(new ConfigurationBuilder().filterInputsBy(new FilterBuilder().exclude(//Consider only .class files (to avoid debug messages etc. on .dlls, etc
            "^(?!.*\\.class$).*$").exclude("^org.nd4j.*").exclude("^org.datavec.*").exclude(//JavaCPP
            "^org.bytedeco.*").exclude(//Jackson
            "^com.fasterxml.*").exclude(//Apache commons, Spark, log4j etc
            "^org.apache.*").exclude("^org.projectlombok.*").exclude("^com.twelvemonkeys.*").exclude("^org.joda.*").exclude("^org.slf4j.*").exclude("^com.google.*").exclude("^org.reflections.*").exclude(//Logback
            "^ch.qos.*")).addUrls(scanUrls).setScanners(new DL4JSubTypesScanner(interfaces, classesList)));
            org.reflections.Store store = reflections.getStore();
            Iterable<String> subtypesByName = store.getAll(DL4JSubTypesScanner.class.getSimpleName(), classNames);
            Set<? extends Class<?>> subtypeClasses = Sets.newHashSet(ReflectionUtils.forNames(subtypesByName));
            subtypesClassCache = new HashSet<>();
            for (Class<?> c : subtypeClasses) {
                if (Modifier.isAbstract(c.getModifiers()) || Modifier.isInterface(c.getModifiers())) {
                    //log.info("Skipping abstract/interface: {}",c);
                    continue;
                }
                subtypesClassCache.add(c);
            }
        }
    }
    //Second: get all currently registered subtypes for this mapper
    Set<Class<?>> registeredSubtypes = new HashSet<>();
    for (Class<?> c : classes) {
        AnnotatedClass ac = AnnotatedClass.construct(c, mapper.getSerializationConfig().getAnnotationIntrospector(), null);
        Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac, mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector());
        for (NamedType nt : types) {
            registeredSubtypes.add(nt.getType());
        }
    }
    //Third: register all _concrete_ subtypes that are not already registered
    List<NamedType> toRegister = new ArrayList<>();
    for (Class<?> c : subtypesClassCache) {
        //Check if it's concrete or abstract...
        if (Modifier.isAbstract(c.getModifiers()) || Modifier.isInterface(c.getModifiers())) {
            //log.info("Skipping abstract/interface: {}",c);
            continue;
        }
        if (!registeredSubtypes.contains(c)) {
            String name;
            if (ClassUtils.isInnerClass(c)) {
                Class<?> c2 = c.getDeclaringClass();
                name = c2.getSimpleName() + "$" + c.getSimpleName();
            } else {
                name = c.getSimpleName();
            }
            toRegister.add(new NamedType(c, name));
            if (log.isDebugEnabled()) {
                for (Class<?> baseClass : classes) {
                    if (baseClass.isAssignableFrom(c)) {
                        log.debug("Registering class for JSON serialization: {} as subtype of {}", c.getName(), baseClass.getName());
                        break;
                    }
                }
            }
        }
    }
    mapper.registerSubtypes(toRegister.toArray(new NamedType[toRegister.size()]));
}
Also used : ConfigurationBuilder(org.reflections.util.ConfigurationBuilder) NamedType(org.nd4j.shade.jackson.databind.jsontype.NamedType) URL(java.net.URL) AnnotatedClass(org.nd4j.shade.jackson.databind.introspect.AnnotatedClass) FilterBuilder(org.reflections.util.FilterBuilder) DL4JSubTypesScanner(org.deeplearning4j.util.reflections.DL4JSubTypesScanner) AnnotatedClass(org.nd4j.shade.jackson.databind.introspect.AnnotatedClass) Reflections(org.reflections.Reflections)

Example 5 with AnnotatedClass

use of org.nd4j.shade.jackson.databind.introspect.AnnotatedClass in project deeplearning4j by deeplearning4j.

the class TestCustomActivation method testCustomActivationFn.

@Test
public void testCustomActivationFn() {
    //First: Ensure that the CustomActivation class is registered
    ObjectMapper mapper = NeuralNetConfiguration.mapper();
    AnnotatedClass ac = AnnotatedClass.construct(IActivation.class, mapper.getSerializationConfig().getAnnotationIntrospector(), null);
    Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac, mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector());
    boolean found = false;
    for (NamedType nt : types) {
        System.out.println(nt);
        if (nt.getType() == CustomActivation.class)
            found = true;
    }
    assertTrue("CustomActivation: not registered with NeuralNetConfiguration mapper", found);
    //Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().learningRate(0.1).list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).activation(new CustomActivation()).build()).layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10).build()).pretrain(false).backprop(true).build();
    String json = conf.toJson();
    String yaml = conf.toYaml();
    System.out.println(json);
    MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
    assertEquals(conf, confFromJson);
    MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
    assertEquals(conf, confFromYaml);
}
Also used : OutputLayer(org.deeplearning4j.nn.conf.layers.OutputLayer) NamedType(org.nd4j.shade.jackson.databind.jsontype.NamedType) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) MultiLayerConfiguration(org.deeplearning4j.nn.conf.MultiLayerConfiguration) DenseLayer(org.deeplearning4j.nn.conf.layers.DenseLayer) AnnotatedClass(org.nd4j.shade.jackson.databind.introspect.AnnotatedClass) ObjectMapper(org.nd4j.shade.jackson.databind.ObjectMapper) CustomActivation(org.deeplearning4j.nn.layers.custom.testclasses.CustomActivation) Test(org.junit.Test)

Aggregations

AnnotatedClass (org.nd4j.shade.jackson.databind.introspect.AnnotatedClass)5 NamedType (org.nd4j.shade.jackson.databind.jsontype.NamedType)5 MultiLayerConfiguration (org.deeplearning4j.nn.conf.MultiLayerConfiguration)4 Test (org.junit.Test)4 ObjectMapper (org.nd4j.shade.jackson.databind.ObjectMapper)4 DenseLayer (org.deeplearning4j.nn.conf.layers.DenseLayer)3 OutputLayer (org.deeplearning4j.nn.conf.layers.OutputLayer)3 HashSet (java.util.HashSet)2 NeuralNetConfiguration (org.deeplearning4j.nn.conf.NeuralNetConfiguration)2 CustomOutputLayer (org.deeplearning4j.nn.layers.custom.testclasses.CustomOutputLayer)2 URL (java.net.URL)1 MyCustomPreprocessor (org.deeplearning4j.nn.conf.preprocessor.custom.MyCustomPreprocessor)1 CustomActivation (org.deeplearning4j.nn.layers.custom.testclasses.CustomActivation)1 CustomLayer (org.deeplearning4j.nn.layers.custom.testclasses.CustomLayer)1 CustomOutputLayerImpl (org.deeplearning4j.nn.layers.custom.testclasses.CustomOutputLayerImpl)1 MultiLayerNetwork (org.deeplearning4j.nn.multilayer.MultiLayerNetwork)1 DL4JSubTypesScanner (org.deeplearning4j.util.reflections.DL4JSubTypesScanner)1 INDArray (org.nd4j.linalg.api.ndarray.INDArray)1 DataSet (org.nd4j.linalg.dataset.DataSet)1 Reflections (org.reflections.Reflections)1