use of org.nd4j.shade.jackson.databind.jsontype.NamedType in project deeplearning4j by deeplearning4j.
the class CustomPreprocessorTest method testCustomPreprocessor.
@Test
public void testCustomPreprocessor() {
//First: Ensure that the CustomLayer class is registered
ObjectMapper mapper = NeuralNetConfiguration.mapper();
AnnotatedClass ac = AnnotatedClass.construct(InputPreProcessor.class, mapper.getSerializationConfig().getAnnotationIntrospector(), null);
Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac, mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector());
boolean found = false;
for (NamedType nt : types) {
// System.out.println(nt);
if (nt.getType() == MyCustomPreprocessor.class) {
found = true;
break;
}
}
assertTrue("MyCustomPreprocessor: not registered with NeuralNetConfiguration mapper", found);
//Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().learningRate(0.1).list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10).build()).inputPreProcessor(0, new MyCustomPreprocessor()).pretrain(false).backprop(true).build();
String json = conf.toJson();
String yaml = conf.toYaml();
System.out.println(json);
MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
assertEquals(conf, confFromJson);
MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
assertEquals(conf, confFromYaml);
assertTrue(confFromJson.getInputPreProcess(0) instanceof MyCustomPreprocessor);
}
use of org.nd4j.shade.jackson.databind.jsontype.NamedType in project deeplearning4j by deeplearning4j.
the class NeuralNetConfiguration method registerSubtypes.
private static synchronized void registerSubtypes(ObjectMapper mapper) {
//Register concrete subtypes for JSON serialization
List<Class<?>> classes = Arrays.<Class<?>>asList(InputPreProcessor.class, ILossFunction.class, IActivation.class, Layer.class, GraphVertex.class, ReconstructionDistribution.class);
List<String> classNames = new ArrayList<>(6);
for (Class<?> c : classes) classNames.add(c.getName());
// First: scan the classpath and find all instances of the 'baseClasses' classes
if (subtypesClassCache == null) {
//Check system property:
String prop = System.getProperty(CUSTOM_FUNCTIONALITY);
if (prop != null && !Boolean.parseBoolean(prop)) {
subtypesClassCache = Collections.emptySet();
} else {
List<Class<?>> interfaces = Arrays.<Class<?>>asList(InputPreProcessor.class, ILossFunction.class, IActivation.class, ReconstructionDistribution.class);
List<Class<?>> classesList = Arrays.<Class<?>>asList(Layer.class, GraphVertex.class);
Collection<URL> urls = ClasspathHelper.forClassLoader();
List<URL> scanUrls = new ArrayList<>();
for (URL u : urls) {
String path = u.getPath();
if (!path.matches(".*/jre/lib/.*jar")) {
//Skip JRE/JDK JARs
scanUrls.add(u);
}
}
Reflections reflections = new Reflections(new ConfigurationBuilder().filterInputsBy(new FilterBuilder().exclude(//Consider only .class files (to avoid debug messages etc. on .dlls, etc
"^(?!.*\\.class$).*$").exclude("^org.nd4j.*").exclude("^org.datavec.*").exclude(//JavaCPP
"^org.bytedeco.*").exclude(//Jackson
"^com.fasterxml.*").exclude(//Apache commons, Spark, log4j etc
"^org.apache.*").exclude("^org.projectlombok.*").exclude("^com.twelvemonkeys.*").exclude("^org.joda.*").exclude("^org.slf4j.*").exclude("^com.google.*").exclude("^org.reflections.*").exclude(//Logback
"^ch.qos.*")).addUrls(scanUrls).setScanners(new DL4JSubTypesScanner(interfaces, classesList)));
org.reflections.Store store = reflections.getStore();
Iterable<String> subtypesByName = store.getAll(DL4JSubTypesScanner.class.getSimpleName(), classNames);
Set<? extends Class<?>> subtypeClasses = Sets.newHashSet(ReflectionUtils.forNames(subtypesByName));
subtypesClassCache = new HashSet<>();
for (Class<?> c : subtypeClasses) {
if (Modifier.isAbstract(c.getModifiers()) || Modifier.isInterface(c.getModifiers())) {
//log.info("Skipping abstract/interface: {}",c);
continue;
}
subtypesClassCache.add(c);
}
}
}
//Second: get all currently registered subtypes for this mapper
Set<Class<?>> registeredSubtypes = new HashSet<>();
for (Class<?> c : classes) {
AnnotatedClass ac = AnnotatedClass.construct(c, mapper.getSerializationConfig().getAnnotationIntrospector(), null);
Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac, mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector());
for (NamedType nt : types) {
registeredSubtypes.add(nt.getType());
}
}
//Third: register all _concrete_ subtypes that are not already registered
List<NamedType> toRegister = new ArrayList<>();
for (Class<?> c : subtypesClassCache) {
//Check if it's concrete or abstract...
if (Modifier.isAbstract(c.getModifiers()) || Modifier.isInterface(c.getModifiers())) {
//log.info("Skipping abstract/interface: {}",c);
continue;
}
if (!registeredSubtypes.contains(c)) {
String name;
if (ClassUtils.isInnerClass(c)) {
Class<?> c2 = c.getDeclaringClass();
name = c2.getSimpleName() + "$" + c.getSimpleName();
} else {
name = c.getSimpleName();
}
toRegister.add(new NamedType(c, name));
if (log.isDebugEnabled()) {
for (Class<?> baseClass : classes) {
if (baseClass.isAssignableFrom(c)) {
log.debug("Registering class for JSON serialization: {} as subtype of {}", c.getName(), baseClass.getName());
break;
}
}
}
}
}
mapper.registerSubtypes(toRegister.toArray(new NamedType[toRegister.size()]));
}
use of org.nd4j.shade.jackson.databind.jsontype.NamedType in project deeplearning4j by deeplearning4j.
the class TestCustomLayers method testCustomOutputLayerMLN.
@Test
public void testCustomOutputLayerMLN() {
//First: Ensure that the CustomOutputLayer class is registered
ObjectMapper mapper = NeuralNetConfiguration.mapper();
AnnotatedClass ac = AnnotatedClass.construct(Layer.class, mapper.getSerializationConfig().getAnnotationIntrospector(), null);
Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac, mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector());
Set<Class<?>> registeredSubtypes = new HashSet<>();
boolean found = false;
for (NamedType nt : types) {
System.out.println(nt);
// registeredSubtypes.add(nt.getType());
if (nt.getType() == CustomOutputLayer.class)
found = true;
}
assertTrue("CustomOutputLayer: not registered with NeuralNetConfiguration mapper", found);
//Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).learningRate(0.1).list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new CustomOutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10).build()).pretrain(false).backprop(true).build();
String json = conf.toJson();
String yaml = conf.toYaml();
System.out.println(json);
MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
assertEquals(conf, confFromJson);
MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
assertEquals(conf, confFromYaml);
//Third: check initialization
Nd4j.getRandom().setSeed(12345);
MultiLayerNetwork net = new MultiLayerNetwork(conf);
net.init();
assertTrue(net.getLayer(1) instanceof CustomOutputLayerImpl);
//Fourth: compare to an equivalent standard output layer (should be identical)
MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder().seed(12345).learningRate(0.1).weightInit(WeightInit.XAVIER).list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10).build()).pretrain(false).backprop(true).build();
Nd4j.getRandom().setSeed(12345);
MultiLayerNetwork net2 = new MultiLayerNetwork(conf2);
net2.init();
assertEquals(net2.params(), net.params());
INDArray testFeatures = Nd4j.rand(1, 10);
INDArray testLabels = Nd4j.zeros(1, 10);
testLabels.putScalar(0, 3, 1.0);
DataSet ds = new DataSet(testFeatures, testLabels);
assertEquals(net2.output(testFeatures), net.output(testFeatures));
assertEquals(net2.score(ds), net.score(ds), 1e-6);
}
use of org.nd4j.shade.jackson.databind.jsontype.NamedType in project deeplearning4j by deeplearning4j.
the class TestCustomLayers method testJsonMultiLayerNetwork.
@Test
public void testJsonMultiLayerNetwork() {
//First: Ensure that the CustomLayer class is registered
ObjectMapper mapper = NeuralNetConfiguration.mapper();
AnnotatedClass ac = AnnotatedClass.construct(Layer.class, mapper.getSerializationConfig().getAnnotationIntrospector(), null);
Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac, mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector());
Set<Class<?>> registeredSubtypes = new HashSet<>();
boolean found = false;
for (NamedType nt : types) {
System.out.println(nt);
// registeredSubtypes.add(nt.getType());
if (nt.getType() == CustomLayer.class)
found = true;
}
assertTrue("CustomLayer: not registered with NeuralNetConfiguration mapper", found);
//Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().learningRate(0.1).list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(1, new CustomLayer(3.14159)).layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10).build()).pretrain(false).backprop(true).build();
String json = conf.toJson();
String yaml = conf.toYaml();
System.out.println(json);
MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
assertEquals(conf, confFromJson);
MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
assertEquals(conf, confFromYaml);
}
use of org.nd4j.shade.jackson.databind.jsontype.NamedType in project deeplearning4j by deeplearning4j.
the class TestCustomActivation method testCustomActivationFn.
@Test
public void testCustomActivationFn() {
//First: Ensure that the CustomActivation class is registered
ObjectMapper mapper = NeuralNetConfiguration.mapper();
AnnotatedClass ac = AnnotatedClass.construct(IActivation.class, mapper.getSerializationConfig().getAnnotationIntrospector(), null);
Collection<NamedType> types = mapper.getSubtypeResolver().collectAndResolveSubtypes(ac, mapper.getSerializationConfig(), mapper.getSerializationConfig().getAnnotationIntrospector());
boolean found = false;
for (NamedType nt : types) {
System.out.println(nt);
if (nt.getType() == CustomActivation.class)
found = true;
}
assertTrue("CustomActivation: not registered with NeuralNetConfiguration mapper", found);
//Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works...
MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().learningRate(0.1).list().layer(0, new DenseLayer.Builder().nIn(10).nOut(10).activation(new CustomActivation()).build()).layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(10).nOut(10).build()).pretrain(false).backprop(true).build();
String json = conf.toJson();
String yaml = conf.toYaml();
System.out.println(json);
MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json);
assertEquals(conf, confFromJson);
MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml);
assertEquals(conf, confFromYaml);
}
Aggregations