use of boofcv.struct.feature.TupleDesc_F64 in project BoofCV by lessthanoptimal.
the class UtilFeature method combine.
/**
* Concats the list of tuples together into one big feature. The combined feature must be large
* enough to store all the inputs.
*
* @param inputs List of tuples.
* @param combined Storage for combined output. If null a new instance will be declared.
* @return Resulting combined.
*/
public static TupleDesc_F64 combine(List<TupleDesc_F64> inputs, TupleDesc_F64 combined) {
int N = 0;
for (int i = 0; i < inputs.size(); i++) {
N += inputs.get(i).size();
}
if (combined == null) {
combined = new TupleDesc_F64(N);
} else {
if (N != combined.size())
throw new RuntimeException("The combined feature needs to be " + N + " not " + combined.size());
}
int start = 0;
for (int i = 0; i < inputs.size(); i++) {
double[] v = inputs.get(i).value;
System.arraycopy(v, 0, combined.value, start, v.length);
start += v.length;
}
return combined;
}
use of boofcv.struct.feature.TupleDesc_F64 in project BoofCV by lessthanoptimal.
the class CompareConvertedDescriptionsApp method main.
public static void main(String[] args) {
String file1 = UtilIO.pathExample("stitch/kayak_01.jpg");
String file2 = UtilIO.pathExample("stitch/kayak_02.jpg");
InterestPointDetector<GrayF32> detector = FactoryInterestPoint.fastHessian(new ConfigFastHessian(1, 10, -1, 2, 9, 4, 4));
DescribeRegionPoint<GrayF32, TupleDesc_F64> describeA = (DescribeRegionPoint) FactoryDescribeRegionPoint.surfStable(null, GrayF32.class);
ConvertTupleDesc<TupleDesc_F64, TupleDesc_S8> converter = FactoryConvertTupleDesc.real_F64_S8(describeA.createDescription().size());
DescribeRegionPoint<GrayF32, TupleDesc_S8> describeB = new DescribeRegionPointConvert<>(describeA, converter);
ScoreAssociation<TupleDesc_F64> scoreA = FactoryAssociation.scoreSad(TupleDesc_F64.class);
ScoreAssociation<TupleDesc_S8> scoreB = FactoryAssociation.scoreSad(TupleDesc_S8.class);
BufferedImage image1 = UtilImageIO.loadImage(file1);
BufferedImage image2 = UtilImageIO.loadImage(file2);
visualize("Original", image1, image2, detector, describeA, scoreA);
visualize("Modified", image1, image2, detector, describeB, scoreB);
System.out.println("Done");
}
use of boofcv.struct.feature.TupleDesc_F64 in project BoofCV by lessthanoptimal.
the class ExampleClassifySceneKnn method computeHistograms.
/**
* For all the images in the training data set it computes a {@link HistogramScene}. That data structure
* contains the word histogram and the scene that the histogram belongs to.
*/
private List<HistogramScene> computeHistograms(FeatureToWordHistogram_F64 featuresToHistogram) {
List<String> scenes = getScenes();
// Processed results which will be passed into the k-NN algorithm
List<HistogramScene> memory;
memory = new ArrayList<>();
for (int sceneIndex = 0; sceneIndex < scenes.size(); sceneIndex++) {
String scene = scenes.get(sceneIndex);
System.out.println(" " + scene);
List<String> imagePaths = train.get(scene);
for (String path : imagePaths) {
GrayU8 image = UtilImageIO.loadImage(path, GrayU8.class);
// reset before processing a new image
featuresToHistogram.reset();
describeImage.process(image);
for (TupleDesc_F64 d : describeImage.getDescriptions()) {
featuresToHistogram.addFeature(d);
}
featuresToHistogram.process();
// The histogram is already normalized so that it sums up to 1. This provides invariance
// against the overall number of features changing.
double[] histogram = featuresToHistogram.getHistogram();
// Create the data structure used by the KNN classifier
HistogramScene imageHist = new HistogramScene(NUMBER_OF_WORDS);
imageHist.setHistogram(histogram);
imageHist.type = sceneIndex;
memory.add(imageHist);
}
}
return memory;
}
use of boofcv.struct.feature.TupleDesc_F64 in project BoofCV by lessthanoptimal.
the class ExampleClassifySceneKnn method computeClusters.
/**
* Extract dense features across the training set. Then clusters are found within those features.
*/
private AssignCluster<double[]> computeClusters() {
System.out.println("Image Features");
// computes features in the training image set
List<TupleDesc_F64> features = new ArrayList<>();
for (String scene : train.keySet()) {
List<String> imagePaths = train.get(scene);
System.out.println(" " + scene);
for (String path : imagePaths) {
GrayU8 image = UtilImageIO.loadImage(path, GrayU8.class);
describeImage.process(image);
// the descriptions will get recycled on the next call, so create a copy
for (TupleDesc_F64 d : describeImage.getDescriptions()) {
features.add(d.copy());
}
}
}
// add the features to the overall list which the clusters will be found inside of
for (int i = 0; i < features.size(); i++) {
cluster.addReference(features.get(i));
}
System.out.println("Clustering");
// Find the clusters. This can take a bit
cluster.process(NUMBER_OF_WORDS);
UtilIO.save(cluster.getAssignment(), CLUSTER_FILE_NAME);
return cluster.getAssignment();
}
use of boofcv.struct.feature.TupleDesc_F64 in project BoofCV by lessthanoptimal.
the class ExampleColorHistogramLookup method histogramGray.
/**
* Computes a histogram from the gray scale intensity image alone. Probably the least effective at looking up
* similar images.
*/
public static List<double[]> histogramGray(List<File> images) {
List<double[]> points = new ArrayList<>();
GrayU8 gray = new GrayU8(1, 1);
for (File f : images) {
BufferedImage buffered = UtilImageIO.loadImage(f.getPath());
if (buffered == null)
throw new RuntimeException("Can't load image!");
gray.reshape(buffered.getWidth(), buffered.getHeight());
ConvertBufferedImage.convertFrom(buffered, gray, true);
TupleDesc_F64 imageHist = new TupleDesc_F64(150);
HistogramFeatureOps.histogram(gray, 255, imageHist);
// normalize so that image size doesn't matter
UtilFeature.normalizeL2(imageHist);
points.add(imageHist.value);
}
return points;
}
Aggregations