Commit e7e3cfd3 authored by Chris Coughlin's avatar Chris Coughlin

Initial implementation of Otsu's image segmentation algorithm, Canny bug...

Initial implementation of Otsu's image segmentation algorithm, Canny bug fixes, and dependency updates
parent 0c1dab4d
......@@ -2,7 +2,7 @@
<!--
~ C:/Users/chris/IdeaProjects/myriad/core/pom.xml
~
~ Copyright (c) 2017 Emphysic LLC.
~ Copyright (c) 2018 Emphysic LLC.
~
~ Licensed under the Apache License, Version 2.0 (the "License");
~ you may not use this file except in compliance with the License.
......@@ -41,7 +41,7 @@
<dependency>
<groupId>com.github.haifengl</groupId>
<artifactId>smile-core</artifactId>
<version>1.3.0</version>
<version>1.5.0</version>
</dependency>
<dependency>
<groupId>com.github</groupId>
......@@ -56,7 +56,7 @@
<dependency>
<groupId>com.aparapi</groupId>
<artifactId>aparapi</artifactId>
<version>1.3.4</version>
<version>1.4.1</version>
</dependency>
<dependency>
<groupId>com.twelvemonkeys.imageio</groupId>
......@@ -66,7 +66,7 @@
<dependency>
<groupId>org.apache.mahout</groupId>
<artifactId>mahout-mr</artifactId>
<version>0.10.0</version>
<version>0.13.0</version>
</dependency>
<dependency>
<groupId>com.mashape.unirest</groupId>
......
/*
* com.emphysic.myriad.core.data.ops.CannyOperation
*
* Copyright (c) 2017 Emphysic LLC.
* Copyright (c) 2018 Emphysic LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
......@@ -65,10 +65,9 @@ public class CannyOperation implements DatasetOperation {
* NONE - do not attempt to automatically calculate
* MEAN - use the mean of the input data to calculate lower and upper thresholds
* MEDIAN - use the median of the input data to calculate lower and upper thresholds
* OTSU - use Otsu's Method (https://en.wikipedia.org/wiki/Otsu%27s_method ) to calculate lower and upper thresholds
* (not yet implemented)
* OTSU1D - use Otsu's Method (https://en.wikipedia.org/wiki/Otsu%27s_method ) to calculate lower and upper thresholds
*/
public enum AutoThreshold {NONE, MEAN, MEDIAN, OTSU}
public enum AutoThreshold {NONE, MEAN, MEDIAN, OTSU1D}
/**
* Whether and how to automatically calculate thresholds
......@@ -127,24 +126,33 @@ public class CannyOperation implements DatasetOperation {
@Override
public Dataset run(Dataset input) {
if (input != null) {
Dataset result = new Dataset(input.getWidth(), input.getHeight());
switch (autoThreshold) {
case MEAN:
// Set thresholds w. mean
// (1-sigma)*[mean value] and set the high threshold to (1+sigma)*[mean value]
Double mean = Math.mean(result.getData());
Double mean = Math.mean(input.getData());
setLowerThreshold((1 - sigmaThreshold) * mean);
setUpperThreshold((1 + sigmaThreshold) * mean);
break;
case MEDIAN:
// Set thresholds w. median
// (1-sigma)*[median value] and (1+sigma)*[median value]
Double median = Math.median(result.getData());
Double median = Math.median(input.getData());
setLowerThreshold((1 - sigmaThreshold) * median);
setUpperThreshold((1 + sigmaThreshold) * median);
break;
case OTSU:
// Not yet implemented
case OTSU1D:
// Sets upper threshold to Otsu's 1D threshold and
// lower to one half this value
int t = Otsu1dOperation.calcThreshold(input, true);
// Thresholds are calculated w. normalized grayscale data between 0-255
// so we need to scale back to our original input
Double max = Math.max(input.getData());
Double min = Math.min(input.getData());
Double threshold = (t / 255) * (max - min) + min;
setLowerThreshold(threshold / 2);
setUpperThreshold(threshold);
break;
default:
break;
}
......@@ -160,6 +168,7 @@ public class CannyOperation implements DatasetOperation {
}
Dataset roundedAngles = new Dataset(angles, origAngles.getWidth(), origAngles.getHeight());
Dataset origMags = G.getMagnitudes();
Dataset result = new Dataset(input.getWidth(), input.getHeight());
// Non-Maximum Suppression - thin edges by only keeping the local maximum along the gradient direction
for (int i=0; i<origMags.getWidth(); i++) {
for (int j=0; j<origMags.getHeight(); j++) {
......
/*
* com.emphysic.myriad.core.data.ops.Otsu1dOperation
*
* Copyright (c) 2018 Emphysic LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.emphysic.myriad.core.data.ops;
import com.emphysic.myriad.core.data.io.Dataset;
import com.emphysic.myriad.core.data.ops.math.Histogram;
import java.util.Map;
/**
* Otsu's global thresholding algorithm (1D). Based on Balarini and Nesmachnow,
* "A C++ Implementation of Otsu’s Image Segmentation Method, Image Processing On Line, 6 (2016), pp. 155–164"
* https://doi.org/10.5201/ipol.2016.158
*/
public class Otsu1dOperation implements DatasetOperation {
private static final long serialVersionUID = 1L; // try never to change - indicates backwards compatibility is broken
private static final int VERSION = 1; // current implementation version
@Override
public Dataset run(Dataset input) {
if (input != null && input.getSize() > 0) {
Dataset copy = Histogram.grayScale(input);
Dataset result = new Dataset(copy.getWidth(), copy.getHeight());
int threshold = calcThreshold(copy);
for (int i=0; i<copy.getWidth(); i++) {
for (int j=0; j<copy.getHeight(); j++) {
double val = copy.get(i, j);
double out = 0;
if (val > threshold) {
out = 1;
}
result.set(i, j, out);
}
}
return result;
}
return null;
}
@Override
public long getSerializationVersion() {
return serialVersionUID;
}
@Override
public int getVersion() {
return VERSION;
}
@Override
public void initCurrentVersion(Map<String, Object> objectMap) {}
/**
* Calculates Otsu's threshold for a given Dataset.
* @param input Dataset to threshold
* @return threshold value between 0 and 255
*/
public static int calcThreshold(Dataset input) {
return calcThreshold(input, false);
}
/**
* Calculates Otsu's threshold for a given Dataset.
* @param input Dataset to threshold
* @param normalize if true, convert input data to gray scale prior to calculating the threshold
* @return threshold value between 0 and 255
*/
public static int calcThreshold(Dataset input, boolean normalize) {
int threshold = 0;
int[] hist = Histogram.hist1d(input, normalize);
int suma = 0;
for (int i=0; i<hist.length; i++) {
suma += i * hist[i];
}
int q1 = 0;
int q2;
int sumb = 0;
double u1;
double u2;
double sigma;
double var_max = 0;
for (int j=0; j<hist.length; j++) {
q1 += hist[j];
if (q1 == 0) {
continue;
}
q2 = input.getSize() - q1;
if (q2 == 0) {
break;
}
sumb += j * hist[j];
u1 = sumb / q1;
u2 = (suma - sumb) / q2;
sigma = q1 * q2 * Math.pow(u1 - u2, 2);
if (sigma > var_max) {
threshold = j;
var_max = sigma;
}
}
return threshold;
}
}
/*
* com.emphysic.myriad.core.data.ops.math.Histogram
*
* Copyright (c) 2018 Emphysic LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.emphysic.myriad.core.data.ops.math;
import com.emphysic.myriad.core.data.io.Dataset;
import com.emphysic.myriad.core.data.ops.NormalizeSignalOperation;
/**
* Simple histogram functions.
*/
public class Histogram {
/**
* Calculates the one-dimensional histogram for a given Dataset.
* @param input Dataset to examine
* @param normalize if true, input is normalized prior to calculating the histogram
* @param max_intensity maximum intensity to use
* @return array of size [max_intensity + 1] with a count of the number of elements in input with that value
*/
public static int[] hist1d(Dataset input, boolean normalize, int max_intensity) {
Dataset normalized;
if (normalize) {
normalized = grayScale(input, max_intensity);
} else {
normalized = new Dataset(input);
}
int[] hist = new int[max_intensity + 1];
for (int i=0; i<normalized.getSize(); i++) {
int val = (int)normalized.getData()[i];
hist[val] += 1;
}
return hist;
}
/**
* Converts a Dataset into a gray scale representation by normalizing between 0 and a maximum value.
* @param input input to normalize
* @param max_intensity maximum value to use
* @return Dataset with scaled amplitudes
*/
public static Dataset grayScale(Dataset input, int max_intensity) {
Dataset normalized = new NormalizeSignalOperation().run(input);
for(int i=0; i < normalized.getWidth(); i++) {
for (int j = 0; j < normalized.getHeight(); j++) {
normalized.set(i, j, max_intensity * normalized.get(i, j));
}
}
return normalized;
}
/**
* Converts a Dataset into a gray scale representation by normalizing between 0 and 255.
* @param input input to normalize
* @return Dataset with values between 0 and 255.
*/
public static Dataset grayScale(Dataset input) {
return grayScale(input, 255);
}
/**
* Calculates the histogram for a given Dataset.
* @param input Dataset to examine
* @return 255-level histogram
*/
public static int[] hist1d(Dataset input) {
return hist1d(input, true, 255);
}
/**
* Calculates the histogram for a given Dataset.
* @param input Dataset to examine
* @param normalize if true, normalize the data between 0-255 prior to calculating the histogram
* @return histogram of data
*/
public static int[] hist1d(Dataset input, boolean normalize) {
return hist1d(input, normalize, 255);
}
}
......@@ -2,7 +2,7 @@
<!--
~ C:/Users/chris/IdeaProjects/myriad/network/pom.xml
~
~ Copyright (c) 2017 Emphysic LLC.
~ Copyright (c) 2018 Emphysic LLC.
~
~ Licensed under the Apache License, Version 2.0 (the "License");
~ you may not use this file except in compliance with the License.
......@@ -32,67 +32,67 @@
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-actor_2.11</artifactId>
<version>2.4.17</version>
<version>2.5.8</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-agent_2.11</artifactId>
<version>2.4.17</version>
<version>2.5.8</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-persistence_2.11</artifactId>
<version>2.4.17</version>
<version>2.5.8</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-persistence-tck_2.11</artifactId>
<version>2.4.17</version>
<version>2.5.8</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-slf4j_2.11</artifactId>
<version>2.4.17</version>
<version>2.5.8</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-testkit_2.11</artifactId>
<version>2.4.17</version>
<version>2.5.8</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-cluster_2.11</artifactId>
<version>2.4.17</version>
<version>2.5.8</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-cluster-metrics_2.11</artifactId>
<version>2.4.17</version>
<version>2.5.8</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-cluster-sharding_2.11</artifactId>
<version>2.4.17</version>
<version>2.5.8</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-cluster-tools_2.11</artifactId>
<version>2.4.17</version>
<version>2.5.8</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-remote_2.11</artifactId>
<version>2.4.17</version>
<version>2.5.8</version>
</dependency>
<dependency>
<groupId>com.emphysic.myriad</groupId>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment