OpenCVSharp4で勾配構造テンソルを使った二値化

OpenCVチュートリアルこの章をOpenCVSharp4仕様に書き換えた。

構造テンソルを使うと、勾配の方向や勾配の一貫性などがわかる。

解説↓

 

negizoku.hatenablog.jp

 

 

プログラム

using OpenCvSharp;  

namespace Anisotropic_image_segmentation_by_a_gradient_structure_tensor {  
    class Program {  
        static void Main(string[] args) {  
            //画像の読込  
            Mat src = new Mat(@"D:\gst_input.jpg", ImreadModes.Grayscale);  

            //cohency(勾配の一貫性)と傾く方向を計算  
            Mat imgCoherency = new Mat();  
            Mat imgOrientation = new Mat();  
            int windowWidth = 52;  
            calcGST(ref src, out imgCoherency, out imgOrientation, windowWidth);  


            //cohencyで二値化  
            Mat imgCoherencyBin = new Mat();  
            double cThresh = 0.43;  
            Cv2.Threshold(imgCoherency, imgCoherencyBin, cThresh, 255, ThresholdTypes.Binary);  

            //傾く方向で二値化  
            Mat imgOrientationBin = new Mat();  
            double lowThresh = 35,  
                highThresh = 114;  
            Cv2.InRange(imgOrientation, new Scalar(lowThresh), new Scalar(highThresh), imgOrientationBin);  


            //cohencyと傾く方向の論理積をとる  
            Mat imgBin = new Mat();  
            imgOrientationBin.ConvertTo(imgOrientationBin, MatType.CV_8UC1);  
            imgCoherencyBin.ConvertTo(imgCoherencyBin, MatType.CV_8UC1);  
            imgBin = imgOrientationBin.Mul(imgCoherencyBin);  


            //表示  
            imgCoherency *= 255;  
            imgOrientation.Normalize(0, 255, NormTypes.MinMax);  
            imgCoherency.ConvertTo(imgCoherency, MatType.CV_8UC1);  
            imgOrientation.ConvertTo(imgOrientation, MatType.CV_8UC1);  

            Cv2.ApplyColorMap(imgCoherency, imgCoherency, ColormapTypes.Viridis);  
            Cv2.ApplyColorMap(imgOrientation, imgOrientation, ColormapTypes.Viridis);  

            Cv2.ImShow("result", src + imgBin);  
            Cv2.ImShow("cohency", imgCoherency);  
            Cv2.ImShow("orientation", imgOrientation);  

            Cv2.WaitKey();  
        }  

        static void calcGST(ref Mat inputImg, out Mat imgCoherency, out Mat imgOrientation, int windowWidth) {  
            //画像をコピー  
            Mat img = new Mat();  
            inputImg.ConvertTo(img, MatType.CV_32FC1);  

            //差分を取る  
            Mat imgDiffX = new Mat(),  
                imgDiffY = new Mat();  

            Cv2.Sobel(img, imgDiffX, MatType.CV_32FC1, 1, 0);  
            Cv2.Sobel(img, imgDiffY, MatType.CV_32FC1, 0, 1);  

            //差分の積を計算  
            Mat imgDiffXX = imgDiffX.Mul(imgDiffX);  
            Mat imgDiffYY = imgDiffY.Mul(imgDiffY);  
            Mat imgDiffXY = imgDiffX.Mul(imgDiffY);  

            //窓関数フィルター  
            Mat J11 = new Mat(),  
                J22 = new Mat(),  
                J12 = new Mat();  

            Cv2.BoxFilter(imgDiffXX, J11, MatType.CV_32FC1, new Size(windowWidth, windowWidth));  
            Cv2.BoxFilter(imgDiffYY, J22, MatType.CV_32FC1, new Size(windowWidth, windowWidth));  
            Cv2.BoxFilter(imgDiffXY, J12, MatType.CV_32FC1, new Size(windowWidth, windowWidth));  

            //固有値計算  
            Mat eigenVal1 = new Mat(),  
                eigenVal2 = new Mat();  
            Mat D = (J11 - J22).Mul(J11 - J22) + 4 * (J12.Mul(J12));  
            Mat rootD = new Mat();  
            Cv2.Sqrt(D, rootD);  

            eigenVal1 = J11 + J22 + rootD;//大きい方  
            eigenVal2 = J11 + J22 - rootD;//小さい方  

            //勾配一貫性(cohency)  
            imgCoherency = new Mat();  
            Cv2.Divide(eigenVal1 - eigenVal2, eigenVal1 + eigenVal2, imgCoherency);  

            //傾く方向を計算  
            imgOrientation = new Mat();  
            Cv2.Phase(J22 - J11, 2 * J12, imgOrientation, true);  
        }  
    }  
}  

結果

入力画像

f:id:negizoku:20200920233143j:plain




出力画像
f:id:negizoku:20200920233155p:plain