use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.
the class DeepDream method processStats.
/**
* Measure style neural setup.
*
* @param style the style
* @return the neural setup
*/
public NeuralSetup processStats(final StyleSetup<T> style) {
NeuralSetup<T> self = new NeuralSetup(style);
Tensor contentInput = Tensor.fromRGB(style.contentImage);
self.contentTarget = new ContentTarget();
for (final T layerType : getLayerTypes()) {
System.gc();
final PipelineNetwork network = layerType.texture();
self.contentTarget.content.put(layerType, network.eval(contentInput).getDataAndFree().getAndFree(0));
logger.info(String.format("target_content_%s=%s", layerType.name(), self.contentTarget.content.get(layerType).prettyPrint()));
}
return self;
}
use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.
the class ImageClassifier method deepDream.
/**
* Deep dream.
*
* @param log the log
* @param image the image
* @param targetCategoryIndex the target category index
* @param totalCategories the total categories
* @param config the config
* @param network the network
* @param lossLayer the loss layer
* @param targetValue the target value
*/
public void deepDream(@Nonnull final NotebookOutput log, final Tensor image, final int targetCategoryIndex, final int totalCategories, Function<IterativeTrainer, IterativeTrainer> config, final Layer network, final Layer lossLayer, final double targetValue) {
@Nonnull List<Tensor[]> data = Arrays.<Tensor[]>asList(new Tensor[] { image, new Tensor(1, 1, totalCategories).set(targetCategoryIndex, targetValue) });
log.code(() -> {
for (Tensor[] tensors : data) {
ImageClassifier.log.info(log.image(tensors[0].toImage(), "") + tensors[1]);
}
});
log.code(() -> {
@Nonnull ArrayList<StepRecord> history = new ArrayList<>();
@Nonnull PipelineNetwork clamp = new PipelineNetwork(1);
clamp.add(new ActivationLayer(ActivationLayer.Mode.RELU));
clamp.add(new LinearActivationLayer().setBias(255).setScale(-1).freeze());
clamp.add(new ActivationLayer(ActivationLayer.Mode.RELU));
clamp.add(new LinearActivationLayer().setBias(255).setScale(-1).freeze());
@Nonnull PipelineNetwork supervised = new PipelineNetwork(2);
supervised.wrap(lossLayer, supervised.add(network.freeze(), supervised.wrap(clamp, supervised.getInput(0))), supervised.getInput(1));
// TensorList[] gpuInput = data.stream().map(data1 -> {
// return CudnnHandle.apply(gpu -> {
// Precision precision = Precision.Float;
// return CudaTensorList.wrap(gpu.getPtr(TensorArray.wrap(data1), precision, MemoryType.Managed), 1, image.getDimensions(), precision);
// });
// }).toArray(i -> new TensorList[i]);
// @Nonnull Trainable trainable = new TensorListTrainable(supervised, gpuInput).setVerbosity(1).setMask(true);
@Nonnull Trainable trainable = new ArrayTrainable(supervised, 1).setVerbose(true).setMask(true, false).setData(data);
config.apply(new IterativeTrainer(trainable).setMonitor(getTrainingMonitor(history, supervised)).setOrientation(new QQN()).setLineSearchFactory(name -> new ArmijoWolfeSearch()).setTimeout(60, TimeUnit.MINUTES)).setTerminateThreshold(Double.NEGATIVE_INFINITY).runAndFree();
return TestUtil.plot(history);
});
}
use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.
the class StyleTransfer method fitnessNetwork.
/**
* Fitness function pipeline network.
*
* @param setup the setup
* @return the pipeline network
*/
@Nonnull
public PipelineNetwork fitnessNetwork(NeuralSetup setup) {
PipelineNetwork pipelineNetwork = getInstance().getNetwork();
Map<T, DAGNode> nodes = new HashMap<>();
Map<T, UUID> ids = getInstance().getNodes();
ids.forEach((l, id) -> nodes.put(l, pipelineNetwork.getChildNode(id)));
PipelineNetwork network = buildNetwork(setup, nodes, pipelineNetwork);
// network = withClamp(network);
ArtistryUtil.setPrecision(network, setup.style.precision);
return network;
}
use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.
the class StyleTransfer method getContentComponents.
/**
* Gets content components.
*
* @param setup the setup
* @param nodeMap the node map
* @return the content components
*/
@Nonnull
public ArrayList<Tuple2<Double, DAGNode>> getContentComponents(NeuralSetup<T> setup, final Map<T, DAGNode> nodeMap) {
ArrayList<Tuple2<Double, DAGNode>> contentComponents = new ArrayList<>();
for (final T layerType : getLayerTypes()) {
final DAGNode node = nodeMap.get(layerType);
final double coeff_content = !setup.style.content.params.containsKey(layerType) ? 0 : setup.style.content.params.get(layerType);
final PipelineNetwork network1 = (PipelineNetwork) node.getNetwork();
if (coeff_content != 0) {
Tensor content = setup.contentTarget.content.get(layerType);
contentComponents.add(new Tuple2<>(coeff_content, network1.wrap(new MeanSqLossLayer().setAlpha(1.0 / content.rms()), node, network1.wrap(new ValueLayer(content), new DAGNode[] {}))));
}
}
return contentComponents;
}
use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.
the class ArtistryUtil method getClamp.
/**
* Gets clamp.
*
* @param max the max
* @return the clamp
*/
@Nonnull
public static PipelineNetwork getClamp(final int max) {
@Nonnull PipelineNetwork clamp = new PipelineNetwork(1);
clamp.add(new ActivationLayer(ActivationLayer.Mode.RELU));
clamp.add(new LinearActivationLayer().setBias(max).setScale(-1).freeze());
clamp.add(new ActivationLayer(ActivationLayer.Mode.RELU));
clamp.add(new LinearActivationLayer().setBias(max).setScale(-1).freeze());
return clamp;
}
Aggregations