前言
VisionMaster(后简称VM)作为一款功能强大的工业图形图像处理软件,可对工业中遇到的各种图像进行处理,同时还有功能强大的算子以及很全面的例程,作为图像处理的主程序是非常不错的。当前VM提供了VM算法平台、VM SDK开发、算子SDK开发和算子模块开发四种开发模式兼顾各种开发族群。如何实现图像的正确输入是开发者在使用通用算法库时经常遇到的首要问题,当前市面上又存在多种图像格式,如Bitmap、Mat和Halcon中的图像类型等,而VM这么多种开发模式又分别有不同的图像格式,本文将介绍如何实现这些图像类型之间的互转。以下除了自定义算法模块开发中是C++语言,其它都是C#语言的示例代码。
VM中对应的图像类型如下:
-
相机:图像数据流(此处图像数据流类型是MyCamera.MV_FRAME_OUT,是来自MvCameraControl.Net.dll,即海康机器人工业相机SDK,在MVS SDK和算子SDK中都有这个dll;算子SDK的MVDCamera.Net.dll也可以进行相机取流,它是对MvCameraControl.Net.dll的二次封装,用MVDCamera.Net.dll时,图像数据流类型是CMvdImage); -
VM:脚本输入图像(图像类型是ImageData); -
VM SDK:流程输入图像(ImageBaseData_V2,VM4.2 SDK新增)、Group输入图像(ImageBaseData_V2,VM4.2 SDK新增)、图像源SDK输入图像(ImageBaseData)、模块输入图像(InputImageData)、流程输出图像(区分VM4.0 SDK和VM4.2 SDK的获取输出图像的方式,VM4.2获取流程图像的类型是ImageBaseData_V2) ; -
算子SDK:输入图像(CMvdImage,是C#中图像类型),(IMvdImage是C++中图像类型); -
算法模块:输入图像(HKA_IMAGE,是C++中图像类型)。
需要注意的是, 三通道图像类型转换时,Bitmap、Mat为BGR,VM和二次开发为RGB。 下面针对常用图像转换场景,提供对应的图像转换示例供大家参考(每种图像转换用函数表达,函数输入某种图像类型,返回转换后的某种图像类型)。
图像转换
1.相机取流转VM对应类型图像格式
相机截取帧格式为MyCamera.MV_FRAME_OUT,以下为分别转换为流程输入图像、Group输入图像、图像源SDK输入图像、模块输入图像、算子输入图像的示例代码。
1.1 相机采图转流程输入和Group输入(ImageBaseData_V2)
public ImageBaseData_V2 CCDToImageBaseDataV2(MyCamera.MV_FRAME_OUT frameOut)
{
ImageBaseData_V2 imageBaseDataV2 = new ImageBaseData_V2();
if (frameOut.stFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8)
{
imageBaseDataV2 = new ImageBaseData_V2(frameOut.pBufAddr, frameOut.stFrameInfo.nFrameLen, frameOut.stFrameInfo.nWidth, frameOut.stFrameInfo.nHeight, VMPixelFormat.VM_PIXEL_MONO_08);
}
else if (frameOut.stFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed)
{
imageBaseDataV2 = new ImageBaseData_V2(frameOut.pBufAddr, frameOut.stFrameInfo.nFrameLen, frameOut.stFrameInfo.nWidth, frameOut.stFrameInfo.nHeight, VMPixelFormat.VM_PIXEL_RGB24_C3);
}
return imageBaseDataV2;
}
var image = CCDToImageBaseDataV2(stFrameOut);
var procedure = VmSolution.Instance["流程1"] as VmProcedure;
procedure.ModuParams.SetInputImage_V2("ImageData", image);
var group = VmSolution.Instance["流程1.组合模块1"] as IMVSGroupTool;
group.ModuParams.SetInputImage_V2("ImageData", image);
1.2 相机采图转图像源SDK输入(ImageBaseData)
public ImageBaseData CCDToImageBaseData(MyCamera.MV_FRAME_OUT frameOut)
{
ImageBaseData imageBaseData = new ImageBaseData();
imageBaseData.Width = frameOut.stFrameInfo.nWidth;
imageBaseData.Height = frameOut.stFrameInfo.nHeight;
imageBaseData.DataLen = frameOut.stFrameInfo.nFrameLen;
if (frameOut.stFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8)
{
imageBaseData.Pixelformat = (int)VMPixelFormat.VM_PIXEL_MONO_08;
}
else if (frameOut.stFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed)
{
imageBaseData.Pixelformat = (int)VMPixelFormat.VM_PIXEL_RGB24_C3;
}
imageBaseData.ImageData = new byte[frameOut.stFrameInfo.nFrameLen];
Marshal.Copy(frameOut.pBufAddr, imageBaseData.ImageData, 0, (int)frameOut.stFrameInfo.nFrameLen);
return imageBaseData;
}
var image = CCDToImageBaseData(stFrameOut);
ImageSourceModuleTool imageSourceModuleTool = (ImageSourceModuleTool)VmSolution.Instance["流程1.图像源1"];
imageSourceModuleTool.SetImageData(image);
1.3 相机采图转模块输入(InputImageData)
public InputImageData CCDToInputImageData(MyCamera.MV_FRAME_OUT frameOut)
{
InputImageData inputImageData = new InputImageData();
inputImageData.Names.DataName = "InImage";
inputImageData.Names.HeightName = "InImageHeight";
inputImageData.Names.WidthName = "InImageWidth";
inputImageData.Names.PixelFormatName = "InImagePixelFormat";
inputImageData.Width = frameOut.stFrameInfo.nWidth;
inputImageData.Height = frameOut.stFrameInfo.nHeight;
inputImageData.DataLen = frameOut.stFrameInfo.nFrameLen;
if (frameOut.stFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8)
{
inputImageData.Pixelformat = ImagePixelFormat.IMAGE_PIXEL_FORMAT_MONO8;
}
else if (frameOut.stFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed)
{
inputImageData.Pixelformat = ImagePixelFormat.IMAGE_PIXEL_FORMAT_RGB24;
}
inputImageData.Data = Marshal.AllocHGlobal((int)frameOut.stFrameInfo.nFrameLen);
byte[] imagedataBuffer = new byte[(int)frameOut.stFrameInfo.nFrameLen];
Marshal.Copy(frameOut.pBufAddr, imagedataBuffer, 0, (int)frameOut.stFrameInfo.nFrameLen);
Marshal.Copy(imagedataBuffer, 0, inputImageData.Data, (int)frameOut.stFrameInfo.nFrameLen);
return inputImageData;
}
var image= CCDToInputImageData(stFrameOut);
var circlefindTool = VmSolution.Instance["流程1.圆查找1"] as IMVSImageEnhanceModuCs.IMVSImageEnhanceModuTool;
circlefindTool.ModuParams.SetInputImage(image);
1.4 相机采图转算子输入(CmvdImage)
public CMvdImage CCDToCMvdImage(MyCamera.MV_FRAME_OUT frameOut)
{
VisionDesigner.CMvdImage cMvdImage = new VisionDesigner.CMvdImage();
VisionDesigner.MVD_IMAGE_DATA_INFO stImageData = new VisionDesigner.MVD_IMAGE_DATA_INFO();
stImageData.stDataChannel[0].nLen = (uint)(frameOut.stFrameInfo.nFrameLen);
stImageData.stDataChannel[0].nSize = (uint)(frameOut.stFrameInfo.nFrameLen);
byte[] m_BufForDriver1 = new byte[frameOut.stFrameInfo.nFrameLen];
Marshal.Copy(frameOut.pBufAddr, m_BufForDriver1, 0, (int)frameOut.stFrameInfo.nFrameLen);
stImageData.stDataChannel[0].arrDataBytes = m_BufForDriver1;
if (frameOut.stFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_Mono8)
{
stImageData.stDataChannel[0].nRowStep = (uint)frameOut.stFrameInfo.nWidth;
cMvdImage.InitImage((uint)frameOut.stFrameInfo.nWidth, (uint)frameOut.stFrameInfo.nHeight, MVD_PIXEL_FORMAT.MVD_PIXEL_MONO_08, stImageData);
}
else if (frameOut.stFrameInfo.enPixelType == MyCamera.MvGvspPixelType.PixelType_Gvsp_RGB8_Packed)
{
stImageData.stDataChannel[0].nRowStep = (uint)frameOut.stFrameInfo.nWidth * 3;
cMvdImage.InitImage((uint)frameOut.stFrameInfo.nWidth, (uint)frameOut.stFrameInfo.nHeight, MVD_PIXEL_FORMAT.MVD_PIXEL_RGB_RGB24_C3, stImageData);
}
return cMvdImage;
}
2.Bitmap取图与VM对应图像格式互转
Bitmap与 流程输入图像、Group输入图像、图像源SDK输入图像、模块输入图像、算子输入图像,流程输出图像互相转换的示例代码如下所示。
2.1 Bitmap转流程输入和Group输入(ImageBaseData_V2)
public ImageBaseData_V2 BitmapToImageBaseData_V2(Bitmap bmpInputImg)
{
ImageBaseData_V2 imageBaseData_V2;
System.Drawing.Imaging.PixelFormat bitPixelFormat = bmpInputImg.PixelFormat;
BitmapData bmData = bmpInputImg.LockBits(new Rectangle(0, 0, bmpInputImg.Width, bmpInputImg.Height), ImageLockMode.ReadOnly, bitPixelFormat);
if (bitPixelFormat == System.Drawing.Imaging.PixelFormat.Format8bppIndexed)
{
Int32 bitmapDataSize = bmData.Stride * bmData.Height;
int offset = bmData.Stride - bmData.Width;
Int32 ImageBaseDataSize = bmData.Width * bmData.Height;
byte[] _BitImageBufferBytes = new byte[bitmapDataSize];
byte[] _ImageBaseDataBufferBytes = new byte[ImageBaseDataSize];
Marshal.Copy(bmData.Scan0, _BitImageBufferBytes, 0, bitmapDataSize);
int bitmapIndex = 0;
int ImageBaseDataIndex = 0;
for (int i = 0; i < bmData.Height; i++)
{
for (int j = 0; j < bmData.Width; j++)
{
_ImageBaseDataBufferBytes[ImageBaseDataIndex++] = _BitImageBufferBytes[bitmapIndex++];
}
bitmapIndex += offset;
}
IntPtr _ImageBaseDataIntptr = Marshal.AllocHGlobal(ImageBaseDataSize);
Marshal.Copy(_ImageBaseDataBufferBytes, 0, _ImageBaseDataIntptr, ImageBaseDataSize);
imageBaseData_V2 = new ImageBaseData_V2(_ImageBaseDataIntptr, (uint)ImageBaseDataSize, bmData.Width, bmData.Height, VMPixelFormat.VM_PIXEL_MONO_08);
Marshal.FreeHGlobal(_ImageBaseDataIntptr);
}
else if (bitPixelFormat == System.Drawing.Imaging.PixelFormat.Format24bppRgb)
{
Int32 bitmapDataSize = bmData.Stride * bmData.Height;
int offset = bmData.Stride - bmData.Width * 3;
Int32 ImageBaseDataSize = bmData.Width * bmData.Height * 3;
byte[] _BitImageBufferBytes = new byte[bitmapDataSize];
byte[] _ImageBaseDataBufferBytes = new byte[ImageBaseDataSize];
Marshal.Copy(bmData.Scan0, _BitImageBufferBytes, 0, bitmapDataSize);
int bitmapIndex = 0;
int ImageBaseDataIndex = 0;
for (int i = 0; i < bmData.Height; i++)
{
for (int j = 0; j < bmData.Width; j++)
{
_ImageBaseDataBufferBytes[ImageBaseDataIndex++] = _BitImageBufferBytes[bitmapIndex + 2];
_ImageBaseDataBufferBytes[ImageBaseDataIndex++] = _BitImageBufferBytes[bitmapIndex + 1];
_ImageBaseDataBufferBytes[ImageBaseDataIndex++] = _BitImageBufferBytes[bitmapIndex];
bitmapIndex += 3;
}
bitmapIndex += offset;
}
IntPtr _ImageBaseDataIntptr = Marshal.AllocHGlobal(ImageBaseDataSize);
Marshal.Copy(_ImageBaseDataBufferBytes, 0, _ImageBaseDataIntptr, ImageBaseDataSize);
imageBaseData_V2 = new ImageBaseData_V2(_ImageBaseDataIntptr, (uint)ImageBaseDataSize, bmData.Width, bmData.Height, VMPixelFormat.VM_PIXEL_RGB24_C3);
Marshal.FreeHGlobal(_ImageBaseDataIntptr);
}
bmpInputImg.UnlockBits(bmData);
return imageBaseData_V2;
}
2.2 Bitmap转图像源SDK输入(ImageBaseData)
public ImageBaseData BitmapToImageBaseData(Bitmap bmpInputImg)
{
ImageBaseData imageBaseData;
System.Drawing.Imaging.PixelFormat bitPixelFormat = bmpInputImg.PixelFormat;
BitmapData bmData = bmpInputImg.LockBits(new Rectangle(0, 0, bmpInputImg.Width, bmpInputImg.Height), ImageLockMode.ReadOnly, bitPixelFormat);
if (bitPixelFormat == System.Drawing.Imaging.PixelFormat.Format8bppIndexed)
{
Int32 bitmapDataSize = bmData.Stride * bmData.Height;
int offset = bmData.Stride - bmData.Width;
Int32 ImageBaseDataSize = bmData.Width * bmData.Height;
byte[] _BitImageBufferBytes = new byte[bitmapDataSize];
byte[] _ImageBaseDataBufferBytes = new byte[ImageBaseDataSize];
Marshal.Copy(bmData.Scan0, _BitImageBufferBytes, 0, bitmapDataSize);
int bitmapIndex = 0;
int ImageBaseDataIndex = 0;
for (int i = 0; i < bmData.Height; i++)
{
for (int j = 0; j < bmData.Width; j++)
{
_ImageBaseDataBufferBytes[ImageBaseDataIndex++] = _BitImageBufferBytes[bitmapIndex++];
}
bitmapIndex += offset;
}
imageBaseData = new ImageBaseData(_ImageBaseDataBufferBytes, (uint)ImageBaseDataSize, bmData.Width, bmData.Height, (int)VMPixelFormat.VM_PIXEL_MONO_08);
}
else if (bitPixelFormat == System.Drawing.Imaging.PixelFormat.Format24bppRgb)
{
Int32 bitmapDataSize = bmData.Stride * bmData.Height;
int offset = bmData.Stride - bmData.Width * 3;
Int32 ImageBaseDataSize = bmData.Width * bmData.Height * 3;
byte[] _BitImageBufferBytes = new byte[bitmapDataSize];
byte[] _ImageBaseDataBufferBytes = new byte[ImageBaseDataSize];
Marshal.Copy(bmData.Scan0, _BitImageBufferBytes, 0, bitmapDataSize);
int bitmapIndex = 0;
int ImageBaseDataIndex = 0;
for (int i = 0; i < bmData.Height; i++)
{
for (int j = 0; j < bmData.Width; j++)
{
_ImageBaseDataBufferBytes[ImageBaseDataIndex++] = _BitImageBufferBytes[bitmapIndex + 2];
_ImageBaseDataBufferBytes[ImageBaseDataIndex++] = _BitImageBufferBytes[bitmapIndex + 1];
_ImageBaseDataBufferBytes[ImageBaseDataIndex++] = _BitImageBufferBytes[bitmapIndex];
bitmapIndex += 3;
}
bitmapIndex += offset;
}
imageBaseData = new ImageBaseData(_ImageBaseDataBufferBytes, (uint)ImageBaseDataSize, bmData.Width, bmData.Height, (int)VMPixelFormat.VM_PIXEL_RGB24_C3);
}
bmpInputImg.UnlockBits(bmData);
return imageBaseData;
}
2.3 Bitmap转模块输入(InputImageData)
public InputImageData BitmapToInputImageData(Bitmap bmpInputImg){
InputImageData inputImageData = new InputImageData();
System.Drawing.Imaging.PixelFormat bitPixelFormat = bmpInputImg.PixelFormat;
BitmapData bmData = bmpInputImg.LockBits(new Rectangle(0, 0, bmpInputImg.Width, bmpInputImg.Height), ImageLockMode.ReadOnly, bitPixelFormat);
inputImageData.Names.DataName = "InImage";
inputImageData.Names.HeightName = "InImageHeight";
inputImageData.Names.WidthName = "InImageWidth";
inputImageData.Names.PixelFormatName = "InImagePixelFormat";
inputImageData.Width = bmData.Width;
inputImageData.Height = bmData.Height;
if (bitPixelFormat == System.Drawing.Imaging.PixelFormat.Format8bppIndexed)
{ Int32 bitmapDataSize = bmData.Stride * bmData.Height;
int offset = bmData.Stride - bmData.Width;
Int32 ImageBaseDataSize = bmData.Width * bmData.Height;
byte[] _BitImageBufferBytes = new byte[bitmapDataSize];
byte[] _ImageBaseDataBufferBytes = new byte[ImageBaseDataSize];
Marshal.Copy(bmData.Scan0, _BitImageBufferBytes, 0, bitmapDataSize);
int bitmapIndex = 0; int ImageBaseDataIndex = 0;
for (int i = 0; i < bmData.Height; i++) {
for (int j = 0; j < bmData.Width; j++) {
_ImageBaseDataBufferBytes[ImageBaseDataIndex++] = _BitImageBufferBytes[bitmapIndex++];
} bitmapIndex += offset; }
inputImageData.Pixelformat = ImagePixelFormat.IMAGE_PIXEL_FORMAT_MONO8;
inputImageData.DataLen = (uint)ImageBaseDataSize;
inputImageData.Data = Marshal.AllocHGlobal(ImageBaseDataSize);
Marshal.Copy(_ImageBaseDataBufferBytes, 0, inputImageData.Data, _ImageBaseDataBufferBytes.Length);
}
else if (bitPixelFormat == System.Drawing.Imaging.PixelFormat.Format24bppRgb)
{ Int32 bitmapDataSize = bmData.Stride * bmData.Height;
int offset = bmData.Stride - bmData.Width * 3;
Int32 ImageBaseDataSize = bmData.Width * bmData.Height * 3;
byte[] _BitImageBufferBytes = new byte[bitmapDataSize];
byte[] _ImageBaseDataBufferBytes = new byte[ImageBaseDataSize];
Marshal.Copy(bmData.Scan0, _BitImageBufferBytes, 0, bitmapDataSize);
int bitmapIndex = 0; int ImageBaseDataIndex = 0;
for (int i = 0; i < bmData.Height; i++) {
for (int j = 0; j < bmData.Width; j++) {
_ImageBaseDataBufferBytes[ImageBaseDataIndex++] = _BitImageBufferBytes[bitmapIndex + 2];
_ImageBaseDataBufferBytes[ImageBaseDataIndex++] = _BitImageBufferBytes[bitmapIndex + 1];
_ImageBaseDataBufferBytes[ImageBaseDataIndex++] = _BitImageBufferBytes[bitmapIndex];
bitmapIndex += 3; } bitmapIndex += offset;
}
inputImageData.Pixelformat = ImagePixelFormat.IMAGE_PIXEL_FORMAT_RGB24;
inputImageData.DataLen = (uint)ImageBaseDataSize;
inputImageData.Data = Marshal.AllocHGlobal(ImageBaseDataSize);
Marshal.Copy(_ImageBaseDataBufferBytes, 0, inputImageData.Data, _ImageBaseDataBufferBytes.Length);
} bmpInputImg.UnlockBits(bmData);
2.4 Bitmap转算子输入(CmvdImage)
public CMvdImage BitmapToCMvdImage(Bitmap bmpInputImg)
{
CMvdImage cMvdImage = new CMvdImage();
System.Drawing.Imaging.PixelFormat bitPixelFormat = bmpInputImg.PixelFormat;
BitmapData bmData = bmpInputImg.LockBits(new Rectangle(0, 0, bmpInputImg.Width, bmpInputImg.Height), ImageLockMode.ReadOnly, bitPixelFormat);
if (bitPixelFormat == System.Drawing.Imaging.PixelFormat.Format8bppIndexed)
{
Int32 bitmapDataSize = bmData.Stride * bmData.Height;
int offset = bmData.Stride - bmData.Width;
Int32 ImageBaseDataSize = bmData.Width * bmData.Height;
byte[] _BitImageBufferBytes = new byte[bitmapDataSize];
byte[] _ImageBaseDataBufferBytes = new byte[ImageBaseDataSize];
Marshal.Copy(bmData.Scan0, _BitImageBufferBytes, 0, bitmapDataSize);
int bitmapIndex = 0;
int ImageBaseDataIndex = 0;
for (int i = 0; i < bmData.Height; i++)
{
for (int j = 0; j < bmData.Width; j++)
{
_ImageBaseDataBufferBytes[ImageBaseDataIndex++] = _BitImageBufferBytes[bitmapIndex++];
}
bitmapIndex += offset;
}
MVD_IMAGE_DATA_INFO stImageData = new MVD_IMAGE_DATA_INFO();
stImageData.stDataChannel[0].nRowStep = (uint)bmData.Width;
stImageData.stDataChannel[0].nLen = (uint)ImageBaseDataSize;
stImageData.stDataChannel[0].nSize = (uint)ImageBaseDataSize;
stImageData.stDataChannel[0].arrDataBytes = _ImageBaseDataBufferBytes;
cMvdImage.InitImage((uint)bmData.Width, (uint)bmData.Height, MVD_PIXEL_FORMAT.MVD_PIXEL_MONO_08, stImageData);
}
else if (bitPixelFormat == System.Drawing.Imaging.PixelFormat.Format24bppRgb)
{
Int32 bitmapDataSize = bmData.Stride * bmData.Height;
int offset = bmData.Stride - bmData.Width * 3;
Int32 ImageBaseDataSize = bmData.Width * bmData.Height * 3;
byte[] _BitImageBufferBytes = new byte[bitmapDataSize];
byte[] _ImageBaseDataBufferBytes = new byte[ImageBaseDataSize];
Marshal.Copy(bmData.Scan0, _BitImageBufferBytes, 0, bitmapDataSize);
int bitmapIndex = 0;
int ImageBaseDataIndex = 0;
for (int i = 0; i < bmData.Height; i++)
{
for (int j = 0; j < bmData.Width; j++)
{
_ImageBaseDataBufferBytes[ImageBaseDataIndex++] = _BitImageBufferBytes[bitmapIndex + 2];
_ImageBaseDataBufferBytes[ImageBaseDataIndex++] = _BitImageBufferBytes[bitmapIndex + 1];
_ImageBaseDataBufferBytes[ImageBaseDataIndex++] = _BitImageBufferBytes[bitmapIndex];
bitmapIndex += 3;
}
bitmapIndex += offset;
}
MVD_IMAGE_DATA_INFO stImageData = new MVD_IMAGE_DATA_INFO();
stImageData.stDataChannel[0].nRowStep = (uint)bmData.Width * 3;
stImageData.stDataChannel[0].nLen = (uint)ImageBaseDataSize;
stImageData.stDataChannel[0].nSize = (uint)ImageBaseDataSize;
stImageData.stDataChannel[0].arrDataBytes = _ImageBaseDataBufferBytes;
cMvdImage.InitImage((uint)bmData.Width, (uint)bmData.Height, MVD_PIXEL_FORMAT.MVD_PIXEL_RGB_RGB24_C3, stImageData);
}
bmpInputImg.UnlockBits(bmData);
return cMvdImage;
}
2.5 流程输出(ImageBaseData_V2)转Bitmap
public Bitmap ImageBaseData_V2ToBitmap(ImageBaseData_V2 imageBaseData_V2)
{
Bitmap bmpInputImg;
byte[] buffer = new byte[imageBaseData_V2.DataLen];
Marshal.Copy(imageBaseData_V2.ImageData, buffer, 0, buffer.Length);
if (VMPixelFormat.VM_PIXEL_MONO_08 == imageBaseData_V2.Pixelformat)
{
Int32 imageWidth = Convert.ToInt32(imageBaseData_V2.Width);
Int32 imageHeight = Convert.ToInt32(imageBaseData_V2.Height);
System.Drawing.Imaging.PixelFormat bitMaPixelFormat = System.Drawing.Imaging.PixelFormat.Format8bppIndexed;
bmpInputImg = new Bitmap(imageWidth, imageHeight, bitMaPixelFormat);
int offset = imageWidth % 4 != 0 ? (4 - imageWidth % 4) : 0;
int strid = imageWidth + offset;
int bitmapBytesLenth = strid * imageHeight;
byte[] bitmapDataBytes = new byte[bitmapBytesLenth];
for (int i = 0; i < imageHeight; i++)
{
for (int j = 0; j < strid; j++)
{
int bitIndex = i * strid + j;
int mvdIndex = i * imageWidth + j;
if (j >= imageWidth)
{
bitmapDataBytes[bitIndex] = 0;
}
else
{
bitmapDataBytes[bitIndex] = buffer[mvdIndex];
}
}
}
BitmapData bitmapData = bmpInputImg.LockBits(new Rectangle(0, 0, imageWidth, imageHeight), ImageLockMode.WriteOnly, bitMaPixelFormat);
IntPtr imageBufferPtr = bitmapData.Scan0;
Marshal.Copy(bitmapDataBytes, 0, imageBufferPtr, bitmapBytesLenth);
bmpInputImg.UnlockBits(bitmapData);
var colorPalettes = bmpInputImg.Palette;
for (int j = 0; j < 256; j++)
{
colorPalettes.Entries[j] = Color.FromArgb(j, j, j);
}
bmpInputImg.Palette = colorPalettes;
}
else if (VMPixelFormat.VM_PIXEL_RGB24_C3 == imageBaseData_V2.Pixelformat)
{
Int32 imageWidth = Convert.ToInt32(imageBaseData_V2.Width);
Int32 imageHeight = Convert.ToInt32(imageBaseData_V2.Height);
System.Drawing.Imaging.PixelFormat bitMaPixelFormat = System.Drawing.Imaging.PixelFormat.Format24bppRgb;
bmpInputImg = new Bitmap(imageWidth, imageHeight, bitMaPixelFormat);
int offset = imageWidth % 4 != 0 ? (4 - (imageWidth * 3) % 4) : 0;
int strid = imageWidth * 3 + offset;
int bitmapBytesLenth = strid * imageHeight;
byte[] bitmapDataBytes = new byte[bitmapBytesLenth];
for (int i = 0; i < imageHeight; i++)
{
for (int j = 0; j < imageWidth; j++)
{
int mvdIndex = i * imageWidth * 3 + j * 3;
int bitIndex = i * strid + j * 3;
bitmapDataBytes[bitIndex] = buffer[mvdIndex + 2];
bitmapDataBytes[bitIndex + 1] = buffer[mvdIndex + 1];
bitmapDataBytes[bitIndex + 2] = buffer[mvdIndex];
}
for (int k = 0; k < offset; k++)
{
bitmapDataBytes[i * strid + imageWidth * 3 + k] = 0;
}
}
BitmapData bitmapData = bmpInputImg.LockBits(new Rectangle(0, 0, imageWidth, imageHeight), ImageLockMode.WriteOnly, bitMaPixelFormat);
IntPtr imageBufferPtr = bitmapData.Scan0;
Marshal.Copy(bitmapDataBytes, 0, imageBufferPtr, bitmapBytesLenth);
bmpInputImg.UnlockBits(bitmapData);
}
return bmpInputImg;
}
3.Mat取图与VM对应图像格式互转
Mat与流程输入、Group输入、图像源SDK输入、模块输入、算子输入、算子输出、流程输出、脚本图像互转代码示例如下:
3.1 Mat转流程输入和Group输入(ImageBaseData_V2)
public ImageBaseData_V2 MatToImageBaseData_V2(Mat matInputImg)
{
ImageBaseData_V2 imageBaseData_V2;
uint dataLen = (uint)(matInputImg.Width * matInputImg.Height * matInputImg.Channels());
if (1 == matInputImg.Channels())
{
imageBaseData_V2 = new ImageBaseData_V2(matInputImg.Ptr(0), dataLen, matInputImg.Width, matInputImg.Height, VMPixelFormat.VM_PIXEL_MONO_08);
}
else if (3 == matInputImg.Channels())
{
Cv2.CvtColor(matInputImg, matInputImg, ColorConversionCodes.BGR2RGB);
imageBaseData_V2 = new ImageBaseData_V2(matInputImg.Ptr(0), dataLen, matInputImg.Width, matInputImg.Height, VMPixelFormat.VM_PIXEL_RGB24_C3);
}
return imageBaseData_V2;
}
3.2 Mat转图像源SDK输入(ImageBaseData)
public ImageBaseData MatToImageBaseData(Mat matInputImg)
{
ImageBaseData imageBaseData;
uint dataLen = (uint)(matInputImg.Width * matInputImg.Height * matInputImg.Channels());
byte[] buffer = new byte[dataLen];
Marshal.Copy(matInputImg.Ptr(0), buffer, 0, buffer.Length);
if (1 == matInputImg.Channels())
{
imageBaseData = new ImageBaseData(buffer, dataLen, matInputImg.Width, matInputImg.Height, (int)VMPixelFormat.VM_PIXEL_MONO_08);
}
else if (3 == matInputImg.Channels())
{
for (int i = 0; i < buffer.Length - 2; i += 3)
{
byte temp = buffer[i];
buffer[i] = buffer[i + 2];
buffer[i + 2] = temp;
}
imageBaseData = new ImageBaseData(buffer, dataLen, matInputImg.Width, matInputImg.Height, (int)VMPixelFormat.VM_PIXEL_RGB24_C3);
}
return imageBaseData;
}
3.3 Mat转模块输入(InputImageData)
public InputImageData MatToInputImageData(Mat matInputImg)
{
InputImageData inputImageData = new InputImageData();
uint dataLen = (uint)(matInputImg.Width * matInputImg.Height * matInputImg.Channels());
byte[] buffer = new byte[dataLen];
Marshal.Copy(matInputImg.Ptr(0), buffer, 0, buffer.Length);
inputImageData.Names.DataName = "InImage";
inputImageData.Names.HeightName = "InImageHeight";
inputImageData.Names.WidthName = "InImageWidth";
inputImageData.Names.PixelFormatName = "InImagePixelFormat";
inputImageData.Width = matInputImg.Width;
inputImageData.Height = matInputImg.Height;
inputImageData.DataLen = dataLen;
inputImageData.Data = Marshal.AllocHGlobal((int)dataLen);
if (1 == matInputImg.Channels())
{
inputImageData.Pixelformat = ImagePixelFormat.IMAGE_PIXEL_FORMAT_MONO8;
Marshal.Copy(buffer, 0, inputImageData.Data, buffer.Length);
}
else if (3 == matInputImg.Channels())
{
for (int i = 0; i < buffer.Length - 2; i += 3)
{
byte temp = buffer[i];
buffer[i] = buffer[i + 2];
buffer[i + 2] = temp;
}
inputImageData.Pixelformat = ImagePixelFormat.IMAGE_PIXEL_FORMAT_RGB24;
Marshal.Copy(buffer, 0, inputImageData.Data, buffer.Length);
}
return inputImageData;
}
3.4 Mat与算子图像(CmvdImage)互转
public CMvdImage MatToCMvdImage(Mat matInputImg)
{
CMvdImage cMvdImage = new CMvdImage();
uint dataLen = (uint)(matInputImg.Width * matInputImg.Height * matInputImg.Channels());
byte[] buffer = new byte[dataLen];
Marshal.Copy(matInputImg.Ptr(0), buffer, 0, buffer.Length);
if (1 == matInputImg.Channels())
{
MVD_IMAGE_DATA_INFO stImageData = new MVD_IMAGE_DATA_INFO();
stImageData.stDataChannel[0].nRowStep = (uint)matInputImg.Width;
stImageData.stDataChannel[0].nLen = dataLen;
stImageData.stDataChannel[0].nSize = dataLen;
stImageData.stDataChannel[0].arrDataBytes = buffer;
cMvdImage.InitImage((uint)matInputImg.Width, (uint)matInputImg.Height, MVD_PIXEL_FORMAT.MVD_PIXEL_MONO_08, stImageData);
}
else if (3 == matInputImg.Channels())
{
for (int i = 0; i < buffer.Length - 2; i += 3)
{
byte temp = buffer[i];
buffer[i] = buffer[i + 2];
buffer[i + 2] = temp;
}
MVD_IMAGE_DATA_INFO stImageData = new MVD_IMAGE_DATA_INFO();
stImageData.stDataChannel[0].nRowStep = (uint)matInputImg.Width * 3;
stImageData.stDataChannel[0].nLen = dataLen;
stImageData.stDataChannel[0].nSize = dataLen;
stImageData.stDataChannel[0].arrDataBytes = buffer;
cMvdImage.InitImage((uint)matInputImg.Width, (uint)matInputImg.Height, MVD_PIXEL_FORMAT.MVD_PIXEL_RGB_RGB24_C3, stImageData);
}
return cMvdImage;
}
public Mat CMvdImageToMat(CMvdImage cMvdImage)
{
Mat matInputImg = new Mat();
cMvdImage.InitImage("D:\\2.jpg");
byte[] buffer = new byte[cMvdImage.GetImageData(0).arrDataBytes.Length];
buffer = cMvdImage.GetImageData(0).arrDataBytes;
if (MVD_PIXEL_FORMAT.MVD_PIXEL_MONO_08 == cMvdImage.PixelFormat)
{
matInputImg.Create((int)cMvdImage.Height, (int)cMvdImage.Width, MatType.CV_8UC1);
Marshal.Copy(buffer, 0, matInputImg.Ptr(0), buffer.Length);
}
else if (MVD_PIXEL_FORMAT.MVD_PIXEL_RGB_RGB24_C3 == cMvdImage.PixelFormat)
{
matInputImg.Create((int)cMvdImage.Height, (int)cMvdImage.Width, MatType.CV_8UC3);
for (int i = 0; i < buffer.Length - 2; i += 3)
{
byte temp = buffer[i];
buffer[i] = buffer[i + 2];
buffer[i + 2] = temp;
}
Marshal.Copy(buffer, 0, matInputImg.Ptr(0), buffer.Length);
}
return matInputImg;
}
3.5 流程输出(ImageBaseData_V2)转Mat
public Mat ImageBaseData_V2ToMat(ImageBaseData_V2 imageBaseData_V2){
Mat matInputImg = new Mat();
byte[] buffer = new byte[imageBaseData_V2.DataLen];
Marshal.Copy(imageBaseData_V2.ImageData, buffer, 0, buffer.Length);
if (VMPixelFormat.VM_PIXEL_MONO_08 == imageBaseData_V2.Pixelformat) {
matInputImg.Create(imageBaseData_V2.Height, imageBaseData_V2.Width, MatType.CV_8UC1);
Marshal.Copy(buffer, 0, matInputImg.Ptr(0), buffer.Length); }
else if (VMPixelFormat.VM_PIXEL_RGB24_C3 == imageBaseData_V2.Pixelformat)
{
matInputImg.Create(imageBaseData_V2.Height, imageBaseData_V2.Width, MatType.CV_8UC3);
byte temp = buffer[i]; buffer[i] = buffer[i + 2];
buffer[i + 2] = temp; }
Marshal.Copy(buffer, 0, matInputImg.Ptr(0), buffer.Length); }
return matInputImg;}
3.6 Mat与脚本图像(ImageData)互转
public ImageData MatToImageData(Mat matImage)
{
ImageData imgOut = new ImageData();
byte[] buffer = new Byte[matImage.Width * matImage.Height * matImage.Channels()];
Marshal.Copy(matImage.Ptr(0), buffer, 0, buffer.Length);
if (1 == matImage.Channels())
{
imgOut.Buffer = buffer;
imgOut.Width = matImage.Width;
imgOut.Heigth = matImage.Height;
imgOut.PixelFormat = ImagePixelFormate.MONO8;
}
else if (3 == matImage.Channels())
{
for (int i = 0; i < buffer.Length - 2; i += 3)
{
byte temp = buffer[i];
buffer[i] = buffer[i + 2];
buffer[i + 2] = temp;
}
imgOut.Buffer = buffer;
imgOut.Width = matImage.Width;
imgOut.Heigth = matImage.Height;
imgOut.PixelFormat = ImagePixelFormate.RGB24;
}
return imgOut;
}
public Mat ImageDataToMat(ImageData img)
{
Mat matInputImg = new Mat();
if(ImagePixelFormate.MONO8 == img.PixelFormat)
{
Mat matImage = Mat.Zeros(img.Heigth, img.Width, MatType.CV_8UC1);
IntPtr grayPtr = Marshal.AllocHGlobal(img.Width * img.Heigth);
Marshal.Copy(img.Buffer, 0, matImage.Ptr(0), img.Buffer.Length);
Marshal.FreeHGlobal(grayPtr);
}
else if (ImagePixelFormate.RGB24 == img.PixelFormat)
{
Mat matImage = Mat.Zeros(img.Heigth, img.Width, MatType.CV_8UC3);
IntPtr rgbPtr = Marshal.AllocHGlobal(img.Width * img.Heigth * 3);
Marshal.Copy(img.Buffer, 0, matImage.Ptr(0), img.Buffer.Length);
Cv2.CvtColor(matImage, matImage, ColorConversionCodes.RGB2BGR);
Marshal.FreeHGlobal(rgbPtr);
}
return matInputImg;
}
4.Halcon与VM图像格式互转
Halcon与流程输入、Group输入、图像源SDK输入、模块输入、算子输入、算子输出、流程输出、脚本图像 互转示例代码如下 :
4.1 Halcon图像转流程输入和Group输入(ImageBaseData_V2)
public static ImageBaseData_V2 HalconImageToImageBaseDataV2(HObject hImageObj)
{
try
{
ImageBaseData_V2 imageBaseData = new ImageBaseData_V2();
HTuple imageWidth = 0;
HTuple imageHeight = 0;
HTuple objClass = hImageObj.GetObjClass();
if (objClass.S.Equals("image"))
{
HTuple imageType;
HOperatorSet.GetImageType(hImageObj, out imageType);
if (imageType.S.Equals("byte"))
{
HTuple channels = 0;
HOperatorSet.CountChannels(hImageObj, out channels);
if (channels.I == 1)
{
HTuple imagePointer;
HOperatorSet.GetImagePointer1(hImageObj, out imagePointer, out imageType, out imageWidth, out imageHeight);
imageBaseData.Width = imageWidth.I;
imageBaseData.Height = imageHeight.I;
imageBaseData.Pixelformat = VMPixelFormat.VM_PIXEL_MONO_08;
int stride = imageWidth.I;
if (stride % 4 != 0)
{
stride += 4 - stride % 4;
}
imageBaseData.DataLen = (uint)(stride * imageBaseData.Height);
imageBaseData.ImageData = imagePointer;
}
else if (channels.I == 3)
{
HTuple redChannel;
HTuple greenChannel;
HTuple blueChannel;
HOperatorSet.GetImagePointer3(hImageObj, out redChannel, out greenChannel, out blueChannel, out imageType, out imageWidth, out imageHeight);
imageBaseData.Width = imageWidth.I;
imageBaseData.Height = imageHeight.I;
imageBaseData.Pixelformat = VMPixelFormat.VM_PIXEL_RGB24_C3;
int stride = imageWidth.I;
if (stride % 4 != 0)
{
stride += 4 - stride % 4;
}
imageBaseData.DataLen = (uint)(stride * imageBaseData.Height * 3);
imageBaseData.ImageData = redChannel;
}
else
{
hImageObj?.Dispose();
throw new Exception("不支持单通道,三通道以外的图像");
}
}
else
{
hImageObj?.Dispose();
throw new Exception("不支持8bit以外的位深度图像");
}
}
else
{
hImageObj?.Dispose();
throw new Exception("HObject非图像类型对象");
}
return imageBaseData;
}
catch (Exception ex)
{
hImageObj?.Dispose();
throw new Exception(ex.Message);
}
}
4.2 Halcon图像转图像源SDK输入(ImageBaseData)
public static ImageBaseData HalconImageToImageBaseData(HObject hImageObj)
{
try
{
ImageBaseData imageBaseData = new ImageBaseData();
HTuple imageWidth = 0;
HTuple imageHeight = 0;
HTuple objClass = hImageObj.GetObjClass();
if (objClass.S.Equals("image"))
{
HTuple imageType;
HOperatorSet.GetImageType(hImageObj, out imageType);
if (imageType.S.Equals("byte"))
{
HTuple channels = 0;
HOperatorSet.CountChannels(hImageObj, out channels);
if (channels.I == 1)
{
HTuple imagePointer;
HOperatorSet.GetImagePointer1(hImageObj, out imagePointer, out imageType, out imageWidth, out imageHeight);
imageBaseData.Width = imageWidth.I;
imageBaseData.Height = imageHeight.I;
imageBaseData.Pixelformat = (int)VMPixelFormat.VM_PIXEL_MONO_08;
int stride = imageWidth.I;
if (stride % 4 != 0)
{
stride += 4 - stride % 4;
}
imageBaseData.DataLen = (uint)(stride * imageBaseData.Height);
imageBaseData.ImageData = new byte[stride * imageHeight.I];
Marshal.Copy(imagePointer, imageBaseData.ImageData, 0, stride * imageHeight.I);
}
else if (channels.I == 3)
{
HTuple redChannel;
HTuple greenChannel;
HTuple blueChannel;
HOperatorSet.GetImagePointer3(hImageObj, out redChannel, out greenChannel, out blueChannel, out imageType, out imageWidth, out imageHeight);
imageBaseData.Width = imageWidth.I;
imageBaseData.Height = imageHeight.I;
imageBaseData.Pixelformat = (int)VMPixelFormat.VM_PIXEL_RGB24_C3;
int stride = imageWidth.I;
if (stride % 4 != 0)
{
stride += 4 - stride % 4;
}
imageBaseData.DataLen = (uint)(stride * imageBaseData.Height * 3);
imageBaseData.ImageData = new byte[stride * imageHeight.I * 3];
Marshal.Copy(redChannel, imageBaseData.ImageData, 0, stride * imageHeight * 3);
}
else
{
hImageObj?.Dispose();
throw new Exception("不支持单通道,三通道以外的图像");
}
}
else
{
hImageObj?.Dispose();
throw new Exception("不支持8bit以外的位深度图像");
}
}
else
{
hImageObj?.Dispose();
throw new Exception("HObject非图像类型对象");
}
return imageBaseData;
}
catch (Exception ex)
{
hImageObj?.Dispose();
throw new Exception(ex.Message);
}
}
4.3 Halcon图像转模块输入(InputImageData)
public static InputImageData HalconImageToModuleInputImage(HObject hImageObj)
{
try
{
InputImageData inputImageData = new InputImageData();
HTuple imageWidth = 0;
HTuple imageHeight = 0;
HTuple objClass = hImageObj.GetObjClass();
if (objClass.S.Equals("image"))
{
HTuple imageType;
HOperatorSet.GetImageType(hImageObj, out imageType);
if (imageType.S.Equals("byte"))
{
HTuple channels = 0;
HOperatorSet.CountChannels(hImageObj, out channels);
if (channels.I == 1)
{
HTuple imagePointer;
HOperatorSet.GetImagePointer1(hImageObj, out imagePointer, out imageType, out imageWidth, out imageHeight);
inputImageData.Width = imageWidth.I;
inputImageData.Height = imageHeight.I;
inputImageData.Pixelformat = ImagePixelFormat.IMAGE_PIXEL_FORMAT_MONO8;
int stride = imageWidth.I;
if (stride % 4 != 0)
{
stride += 4 - stride % 4;
}
inputImageData.DataLen = (uint)(stride * inputImageData.Height);
inputImageData.Data = imagePointer;
inputImageData.Names.DataName = "InImage";
inputImageData.Names.WidthName = "InImageWidth";
inputImageData.Names.HeightName = "InImageHeight";
inputImageData.Names.PixelFormatName = "InImagePixelFormat";
}
else if (channels.I == 3)
{
HTuple redChannel;
HTuple greenChannel;
HTuple blueChannel;
HOperatorSet.GetImagePointer3(hImageObj, out redChannel, out greenChannel, out blueChannel, out imageType, out imageWidth, out imageHeight);
inputImageData.Width = imageWidth.I;
inputImageData.Height = imageHeight.I;
inputImageData.Pixelformat = ImagePixelFormat.IMAGE_PIXEL_FORMAT_RGB24;
int stride = imageWidth.I;
if (stride % 4 != 0)
{
stride += 4 - stride % 4;
}
inputImageData.DataLen = (uint)(stride * inputImageData.Height * 3);
inputImageData.Data = redChannel;
inputImageData.Names.DataName = "InImage";
inputImageData.Names.WidthName = "InImageWidth";
inputImageData.Names.HeightName = "InImageHeight";
inputImageData.Names.PixelFormatName = "InImagePixelFormat";
}
else
{
hImageObj?.Dispose();
throw new Exception("不支持单通道,三通道以外的图像");
}
}
else
{
hImageObj?.Dispose();
throw new Exception("不支持8bit以外的位深度图像");
}
}
else
{
hImageObj?.Dispose();
throw new Exception("HObject非图像类型对象");
}
return inputImageData;
}
catch (Exception ex)
{
hImageObj?.Dispose();
throw new Exception(ex.Message);
}
}
4.4 Halcon图像与算子图像(CmvdImage)互转
public static CMvdImage HalconImageToCMvdImage(HObject hImageObj)
{
try
{
CMvdImage image = new CMvdImage();
HTuple imageWidth = 0;
HTuple imageHeight = 0;
HTuple objClass = hImageObj.GetObjClass();
if (objClass.S.Equals("image"))
{
HTuple imageType;
HOperatorSet.GetImageType(hImageObj, out imageType);
if (imageType.S.Equals("byte"))
{
HTuple channels = 0;
HOperatorSet.CountChannels(hImageObj, out channels);
if (channels.I == 1)
{
HTuple imagePointer;
HOperatorSet.GetImagePointer1(hImageObj, out imagePointer, out imageType, out imageWidth, out imageHeight);
int stride = imageWidth.I;
if (stride % 4 != 0)
{
stride += 4 - stride % 4;
}
MVD_IMAGE_DATA_INFO imageDataInfo = new MVD_IMAGE_DATA_INFO();
imageDataInfo.stDataChannel = new MVD_DATA_CHANNEL_INFO[4];
imageDataInfo.stDataChannel[0] = new MVD_DATA_CHANNEL_INFO();
imageDataInfo.stDataChannel[0].arrDataBytes = new byte[stride * imageHeight.I];
imageDataInfo.stDataChannel[0].nRowStep = (uint)stride;
imageDataInfo.stDataChannel[0].nSize = (uint)(stride * imageHeight.I);
imageDataInfo.stDataChannel[0].nLen = (uint)(imageWidth.I * imageHeight.I);
Marshal.Copy(imagePointer, imageDataInfo.stDataChannel[0].arrDataBytes, 0, stride * imageHeight.I);
image.InitImage((uint)imageWidth.I, (uint)imageHeight.I, MVD_PIXEL_FORMAT.MVD_PIXEL_MONO_08, imageDataInfo);
}
else if (channels.I == 3)
{
HTuple redChannel;
HTuple greenChannel;
HTuple blueChannel;
HOperatorSet.GetImagePointer3(hImageObj, out redChannel, out greenChannel, out blueChannel, out imageType, out imageWidth, out imageHeight);
int stride = imageWidth.I;
if (stride % 4 != 0)
{
stride += 4 - stride % 4;
}
MVD_IMAGE_DATA_INFO imageDataInfo = new MVD_IMAGE_DATA_INFO();
imageDataInfo.stDataChannel = new MVD_DATA_CHANNEL_INFO[4];
imageDataInfo.stDataChannel[0] = new MVD_DATA_CHANNEL_INFO();
imageDataInfo.stDataChannel[0].arrDataBytes = new byte[stride * imageHeight.I];
imageDataInfo.stDataChannel[0].nRowStep = (uint)stride;
imageDataInfo.stDataChannel[0].nSize = (uint)(stride * imageHeight.I);
imageDataInfo.stDataChannel[0].nLen = (uint)(imageWidth.I * imageHeight.I);
Marshal.Copy(redChannel, imageDataInfo.stDataChannel[0].arrDataBytes, 0, stride * imageHeight.I);
imageDataInfo.stDataChannel[1] = new MVD_DATA_CHANNEL_INFO();
imageDataInfo.stDataChannel[1].arrDataBytes = new byte[stride * imageHeight.I];
imageDataInfo.stDataChannel[1].nRowStep = (uint)stride;
imageDataInfo.stDataChannel[1].nSize = (uint)(stride * imageHeight.I);
imageDataInfo.stDataChannel[1].nLen = (uint)(imageWidth.I * imageHeight.I);
Marshal.Copy(greenChannel, imageDataInfo.stDataChannel[1].arrDataBytes, 0, stride * imageHeight.I);
imageDataInfo.stDataChannel[2] = new MVD_DATA_CHANNEL_INFO();
imageDataInfo.stDataChannel[2].arrDataBytes = new byte[stride * imageHeight.I];
imageDataInfo.stDataChannel[2].nRowStep = (uint)stride;
imageDataInfo.stDataChannel[2].nSize = (uint)(stride * imageHeight.I);
imageDataInfo.stDataChannel[2].nLen = (uint)(imageWidth.I * imageHeight.I);
Marshal.Copy(blueChannel, imageDataInfo.stDataChannel[2].arrDataBytes, 0, stride * imageHeight.I);
image.InitImage((uint)imageWidth.I, (uint)imageHeight.I, MVD_PIXEL_FORMAT.MVD_PIXEL_RGB_RGB24_C3, imageDataInfo);
}
else
{
hImageObj?.Dispose();
throw new Exception("不支持单通道,三通道以外的图像");
}
}
else
{
hImageObj?.Dispose();
throw new Exception("不支持8bit以外的位深度图像");
}
}
else
{
hImageObj?.Dispose();
throw new Exception("HObject非图像类型对象");
}
return image;
}
catch (Exception ex)
{
hImageObj?.Dispose();
throw new Exception(ex.Message);
}
}
public static HObject CMvdImageToHalconImage(CMvdImage image)
{
IntPtr imagePointer = IntPtr.Zero;
IntPtr redChannel = IntPtr.Zero;
IntPtr greenChannel = IntPtr.Zero;
IntPtr blueChannel = IntPtr.Zero;
try
{
HObject imageObj = new HObject();
HTuple width = image.Width;
HTuple height = image.Height;
if (image.PixelFormat == MVD_PIXEL_FORMAT.MVD_PIXEL_MONO_08)
{
imagePointer = Marshal.AllocHGlobal(image.GetImageData(0).arrDataBytes.Length);
Marshal.Copy(image.GetImageData(0).arrDataBytes, 0, imagePointer, image.GetImageData(0).arrDataBytes.Length);
HOperatorSet.GenImage1(out imageObj, "byte", width, height, imagePointer);
}
else if (image.PixelFormat == MVD_PIXEL_FORMAT.MVD_PIXEL_RGB_RGB24_C3)
{
redChannel = Marshal.AllocHGlobal(image.GetImageData(0).arrDataBytes.Length);
greenChannel = Marshal.AllocHGlobal(image.GetImageData(1).arrDataBytes.Length);
blueChannel = Marshal.AllocHGlobal(image.GetImageData(2).arrDataBytes.Length);
Marshal.Copy(image.GetImageData(0).arrDataBytes, 0, redChannel, image.GetImageData(0).arrDataBytes.Length);
Marshal.Copy(image.GetImageData(1).arrDataBytes, 0, greenChannel, image.GetImageData(1).arrDataBytes.Length);
Marshal.Copy(image.GetImageData(2).arrDataBytes, 0, blueChannel, image.GetImageData(2).arrDataBytes.Length);
HOperatorSet.GenImage3(out imageObj, "byte", width, height, redChannel, greenChannel, blueChannel);
}
return imageObj;
}
catch (Exception ex)
{
Marshal.FreeHGlobal(imagePointer);
Marshal.FreeHGlobal(redChannel);
Marshal.FreeHGlobal(greenChannel);
Marshal.FreeHGlobal(blueChannel);
throw new Exception(ex.Message);
}
}
4.5 流程输出(ImageBaseData_V2)转Halcon图像
public static HObject ImageBaseDataV2ToHalconImage(ImageBaseData_V2 image)
{
try
{
HObject imageObj = new HObject();
HTuple width = image.Width;
HTuple height = image.Height;
if (image.Pixelformat == VMPixelFormat.VM_PIXEL_MONO_08)
{
HOperatorSet.GenImage1(out imageObj, "byte", width, height, image.ImageData);
}
else if (image.Pixelformat == VMPixelFormat.VM_PIXEL_RGB24_C3)
{
HOperatorSet.GenImage3(out imageObj, "byte", width, height, image.ImageData, image.ImageData + 1, image.ImageData + 2);
}
return imageObj;
}
catch (Exception ex)
{
throw new Exception(ex.Message);
}
}
4.6 halcon图像与脚本图像(ImageData)互转
public static ImageData HalconImageToImageData(HObject hImageObj)
{
try
{
ImageData imageData = new ImageData();
HTuple imageWidth = 0;
HTuple imageHeight = 0;
HTuple objClass = hImageObj.GetObjClass();
if (objClass.S.Equals("image"))
{
HTuple imageType;
HOperatorSet.GetImageType(hImageObj, out imageType);
if (imageType.S.Equals("byte"))
{
HTuple channels = 0;
HOperatorSet.CountChannels(hImageObj, out channels);
if (channels.I == 1)
{
HTuple imagePointer;
HOperatorSet.GetImagePointer1(hImageObj, out imagePointer, out imageType, out imageWidth, out imageHeight);
imageData.Width = imageWidth.I;
imageData.Heigth = imageHeight.I;
imageData.PixelFormat = ImagePixelFormate.MONO8;
int stride = imageWidth.I;
if (stride % 4 != 0)
{
stride += 4 - stride % 4;
}
imageData.Buffer = new byte[stride * imageHeight.I];
Marshal.Copy(imagePointer, imageData.Buffer, 0, stride * imageHeight.I);
}
else if (channels.I == 3)
{
HTuple redChannel;
HTuple greenChannel;
HTuple blueChannel;
HOperatorSet.GetImagePointer3(hImageObj, out redChannel, out greenChannel, out blueChannel, out imageType, out imageWidth, out imageHeight);
imageData.Width = imageWidth.I;
imageData.Heigth = imageHeight.I;
imageData.PixelFormat = ImagePixelFormate.RGB24;
int stride = imageWidth.I;
if (stride % 4 != 0)
{
stride += 4 - stride % 4;
}
imageData.Buffer = new byte[stride * imageHeight.I * 3];
Marshal.Copy(redChannel, imageData.Buffer, 0, stride * imageHeight.I * 3);
}
else
{
hImageObj?.Dispose();
throw new Exception("不支持单通道,三通道以外的图像");
}
}
else
{
hImageObj?.Dispose();
throw new Exception("不支持8bit以外的位深度图像");
}
}
else
{
hImageObj?.Dispose();
throw new Exception("HObject非图像类型对象");
}
return imageData;
}
catch (Exception ex)
{
hImageObj?.Dispose();
throw new Exception(ex.Message);
}
}
public static HObject ImageDataToHalconImage(ImageData image)
{
IntPtr imagePointer = IntPtr.Zero;
IntPtr redChannel = IntPtr.Zero;
IntPtr greenChannel = IntPtr.Zero;
IntPtr blueChannel = IntPtr.Zero;
try
{
HObject imageObj = new HObject();
HTuple width = image.Width;
HTuple height = image.Heigth;
if (image.PixelFormat == ImagePixelFormate.MONO8)
{
imagePointer = Marshal.AllocHGlobal(image.Buffer.Length);
Marshal.Copy(image.Buffer, 0, imagePointer, image.Buffer.Length);
HOperatorSet.GenImage1(out imageObj, "byte", width, height, imagePointer);
}
else if (image.PixelFormat == ImagePixelFormate.RGB24)
{
byte[] imageRedBuffer = new byte[image.Buffer.Length / 3];
byte[] imageGreBuffer = new byte[image.Buffer.Length / 3];
byte[] imageBluBuffer = new byte[image.Buffer.Length / 3];
int index = 0;
for (int i = 0; i < image.Buffer.Length; index++, i += 3)
{
imageRedBuffer[index] = image.Buffer[i];
imageGreBuffer[index] = image.Buffer[i + 1];
imageBluBuffer[index] = image.Buffer[i + 2];
}
redChannel = Marshal.AllocHGlobal(imageRedBuffer.Length);
greenChannel = Marshal.AllocHGlobal(imageGreBuffer.Length);
blueChannel = Marshal.AllocHGlobal(imageBluBuffer.Length);
Marshal.Copy(imageRedBuffer, 0, redChannel, imageRedBuffer.Length);
Marshal.Copy(imageGreBuffer, 0, greenChannel, imageGreBuffer.Length);
Marshal.Copy(imageBluBuffer, 0, blueChannel, imageBluBuffer.Length);
HOperatorSet.GenImage3(out imageObj, "byte", width, height, redChannel, greenChannel, blueChannel);
}
return imageObj;
}
catch (Exception ex)
{
Marshal.FreeHGlobal(imagePointer);
Marshal.FreeHGlobal(redChannel);
Marshal.FreeHGlobal(greenChannel);
Marshal.FreeHGlobal(blueChannel);
throw new Exception(ex.Message);
}
}
5.流程图像与算子图像
VM SDK开发中流程输入输出图像都是ImageBaseData_V2,算子SDK开发中算子输入输出图像都是CmvdImage,两者可以实现互转。
5.1 流程图像转算子图像
public CMvdImage ImageBaseData_V2ToCMvdImage(ImageBaseData_V2 ImageBaseDataV2)
{
VisionDesigner.CMvdImage cmvdImage = new VisionDesigner.CMvdImage();
VisionDesigner.MVD_IMAGE_DATA_INFO stImageData = new VisionDesigner.MVD_IMAGE_DATA_INFO();
if (VMPixelFormat.VM_PIXEL_MONO_08 == ImageBaseDataV2.Pixelformat)
{
stImageData.stDataChannel[0].nRowStep = (uint)ImageBaseDataV2.Width;
stImageData.stDataChannel[0].nLen = (uint)(ImageBaseDataV2.Width * ImageBaseDataV2.Height);
stImageData.stDataChannel[0].nSize = (uint)(ImageBaseDataV2.Width * ImageBaseDataV2.Height);
byte[] m_BufForDriver1 = new byte[ImageBaseDataV2.Width * ImageBaseDataV2.Height];
Marshal.Copy(ImageBaseDataV2.ImageData, m_BufForDriver1, 0, ((int)ImageBaseDataV2.Width * ImageBaseDataV2.Height));
stImageData.stDataChannel[0].arrDataBytes = m_BufForDriver1;
cmvdImage.InitImage((uint)ImageBaseDataV2.Width, (uint)ImageBaseDataV2.Height, MVD_PIXEL_FORMAT.MVD_PIXEL_MONO_08, stImageData);
}
else if (VMPixelFormat.VM_PIXEL_RGB24_C3 == ImageBaseDataV2.Pixelformat)
{
stImageData.stDataChannel[0].nRowStep = (uint)ImageBaseDataV2.Width * 3;
stImageData.stDataChannel[0].nLen = (uint)(ImageBaseDataV2.Width * ImageBaseDataV2.Height * 3);
stImageData.stDataChannel[0].nSize = (uint)(ImageBaseDataV2.Width * ImageBaseDataV2.Height * 3);
byte[] m_BufForDriver1 = new byte[3 * (ImageBaseDataV2.Width * ImageBaseDataV2.Height)];
Marshal.Copy(ImageBaseDataV2.ImageData, m_BufForDriver1, 0, ((int)(ImageBaseDataV2.Width * ImageBaseDataV2.Height) * 3));
stImageData.stDataChannel[0].arrDataBytes = m_BufForDriver1;
cmvdImage.InitImage((uint)ImageBaseDataV2.Width, (uint)ImageBaseDataV2.Height, MVD_PIXEL_FORMAT.MVD_PIXEL_RGB_RGB24_C3, stImageData);
}
return cmvdImage;
}
5.2 算子图像转流程图像
public ImageBaseData_V2 CMvdImageToImageBaseData_V2(CMvdImage cmvdImage)
{
VM.PlatformSDKCS.ImageBaseData_V2 ImageBaseDataV2 = null;
if (MVD_PIXEL_FORMAT.MVD_PIXEL_MONO_08 == cmvdImage.PixelFormat)
{
var cmvdImageData = cmvdImage.GetImageData();
IntPtr imagedata = Marshal.AllocHGlobal(cmvdImageData.stDataChannel[0].arrDataBytes.Length);
Marshal.Copy(cmvdImageData.stDataChannel[0].arrDataBytes, 0, imagedata, cmvdImageData.stDataChannel[0].arrDataBytes.Length);
ImageBaseDataV2 = new ImageBaseData_V2(imagedata, (uint)cmvdImageData.stDataChannel[0].arrDataBytes.Length, (int)cmvdImage.Width, (int)cmvdImage.Height, VMPixelFormat.VM_PIXEL_MONO_08);
}
else if (MVD_PIXEL_FORMAT.MVD_PIXEL_RGB_RGB24_C3 == cmvdImage.PixelFormat)
{
var cmvdImageData = cmvdImage.GetImageData();
IntPtr imagedata = Marshal.AllocHGlobal(cmvdImageData.stDataChannel[0].arrDataBytes.Length);
Marshal.Copy(cmvdImageData.stDataChannel[0].arrDataBytes, 0, imagedata, cmvdImageData.stDataChannel[0].arrDataBytes.Length);
ImageBaseDataV2 = new ImageBaseData_V2(imagedata, (uint)cmvdImageData.stDataChannel[0].arrDataBytes.Length, (int)cmvdImage.Width, (int)cmvdImage.Height, VMPixelFormat.VM_PIXEL_RGB24_C3);
}
return ImageBaseDataV2;
}
6.算法模块图像与Mat、Halcon、算子图像互转的方法(C++)
算法模块图像与Mat、Halcon图像、算子图像实现互转,自定义算法模块开发时,在C++工程中,算法的图像类型为HKA_IMAGE。
6.1 HKA_IMAGE与Mat互转
Mat CAlgorithmModule::HKAImageToMat(HKA_IMAGE inputimage)
{
Mat mat, mat1;
if (inputimage.format == HKA_IMG_MONO_08)
{
mat = Mat(inputimage.height, inputimage.width, CV_8UC1, inputimage.data[0]);
}
else if (inputimage.format == HKA_IMG_RGB_RGB24_C3)
{
mat1 = Mat(inputimage.height, inputimage.width, CV_8UC3, inputimage.data[0]);
cvtColor(mat1, mat, COLOR_RGB2BGR);
}
return mat;
}
HKA_IMAGE CAlgorithmModule::MatToHKAImage(Mat mat)
{
HKA_IMAGE inputimage;
Mat mat1;
if (mat.channels() == 1)
{
inputimage = { HKA_IMG_MONO_08, 0 };
inputimage.width = mat.cols;
inputimage.height = mat.rows;
inputimage.format = HKA_IMG_MONO_08;
inputimage.step[0] = mat.cols;
inputimage.data[0] = (char*)malloc(inputimage.width * inputimage.height);
if (inputimage.data[0] != NULL)
{
memset(inputimage.data[0], 0, inputimage.width * inputimage.height);
inputimage.data[0] = mat.data;
}
}
else if (mat.channels() == 3)
{
cvtColor(mat, mat1, COLOR_BGR2RGB);
inputimage = { HKA_IMG_RGB_RGB24_C3, 0 };
inputimage.width = mat1.cols;
inputimage.height = mat1.rows;
inputimage.format = HKA_IMG_RGB_RGB24_C3;
inputimage.step[0] = mat1.cols * 3;
inputimage.data[0] = (char*)malloc(inputimage.width * inputimage.height * 3);
if (inputimage.data[0] != NULL)
{
memset(inputimage.data[0], 0, inputimage.width * inputimage.height * 3);
inputimage.data[0] = mat1.data;
}
}
return inputimage;
}
6.2 HKA_IMAGE与Halcon图像互转
HObject CAlgorithmModule::HKAImageToHImage(HKA_IMAGE hka_image)
{
HObject h_image;
GenEmptyObj(&h_image);
if (HKA_IMG_MONO_08 == hka_image.format)
{
unsigned char* GrayData = new unsigned char[static_cast<size_t>(hka_image.height * hka_image.width)];
memcpy(GrayData, hka_image.data[0], static_cast<size_t>(hka_image.height * hka_image.width));
GenImage1(&h_image, "byte", hka_image.width, hka_image.height, reinterpret_cast<Hlong>(GrayData));
delete[] GrayData;
}
if (HKA_IMG_RGB_RGB24_C3 == hka_image.format)
{
unsigned char* dataRed = new unsigned char[hka_image.width * hka_image.height];
unsigned char* dataGreen = new unsigned char[hka_image.width * hka_image.height];
unsigned char* dataBlue = new unsigned char[hka_image.width * hka_image.height];
memcpy(dataRed, hka_image.data[0], static_cast<size_t>(hka_image.height * hka_image.width));
memcpy(dataGreen, hka_image.data[1], static_cast<size_t>(hka_image.height * hka_image.width));
memcpy(dataBlue, hka_image.data[2], static_cast<size_t>(hka_image.height * hka_image.width));
GenImage3(&h_image, "byte", hka_image.width, hka_image.height, reinterpret_cast<Hlong>(dataRed), reinterpret_cast<Hlong>(dataGreen), reinterpret_cast<Hlong>(dataBlue));
}
return h_image;
}
HKA_IMAGE CAlgorithmModule::HImageToHKAImage(HalconCpp::HObject h_image)
{
HKA_IMAGE image;
HTuple h_channels;
HTuple h_row{ 0 }, h_col{ 0 };
ConvertImageType(h_image, &h_image, "byte");
CountChannels(h_image, &h_channels);
HTuple h_gray_value;
if (h_channels.I() == 1)
{
HTuple imagePointer, imageType, imageWidth, imageHeight;
image = { HKA_IMG_MONO_08, 0 };
GetImagePointer1(h_image, &imagePointer, &imageType, &imageWidth, &imageHeight);
image.width = imageWidth.I();
image.height = imageHeight.I();
image.step[0] = image.width;
image.data[0] = (unsigned char*)malloc(image.width * image.height);
if (nullptr != image.data[0])
{
memset(image.data[0], 0, image.width * image.height);
memcpy_s(image.data[0], image.width * image.height, (byte*)imagePointer[0].L(), imageWidth.I() * imageHeight.I());
}
}
if (h_channels.I() == 3)
{
HTuple imagePointer, imageType, imageWidth, imageHeight;
image = { HKA_IMG_RGB_RGB24_C3, 0 };
HTuple imageRedPointer;
HTuple imageGreenPointer;
HTuple imageBluePointer;
GetImagePointer3(h_image, &imageRedPointer, &imageGreenPointer, &imageBluePointer, &imageType, &imageWidth, &imageHeight);
image.width = imageWidth.I();
image.height = imageHeight.I();
image.step[0] = imageWidth.I();
image.step[1] = imageWidth.I();
image.step[2] = imageWidth.I();
image.data[0] = (unsigned char*)malloc(image.width * image.height);
image.data[1] = (unsigned char*)malloc(image.width * image.height);
image.data[2] = (unsigned char*)malloc(image.width * image.height);
if (nullptr != image.data[0])
{
memset(image.data[0], 0, image.width * image.height);
memcpy_s(image.data[0], image.width * image.height, (byte*)imageRedPointer[0].L(), imageWidth.I() * imageHeight.I());
}
if (nullptr != image.data[1])
{
memset(image.data[1], 0, image.width * image.height);
memcpy_s(image.data[1], image.width * image.height, (byte*)imageGreenPointer[0].L(), imageWidth.I() * imageHeight.I());
}
if (nullptr != image.data[2])
{
memset(image.data[2], 0, image.width * image.height);
memcpy_s(image.data[2], image.width * image.height, (byte*)imageBluePointer[0].L(), imageWidth.I() * imageHeight.I());
}
}
return image;
}
6.3 HKA_IMAGE与算子图像(CmvdImage)互转
IMvdImage* CAlgorithmModule::HKAImageToIMvdImage(HKA_IMAGE inputimage)
{
IMvdImage* iMvdImage = NULL;
CreateImageInstance(&iMvdImage);
MVD_IMAGE_DATA_INFO stImageData;
if (inputimage.format == HKA_IMG_MONO_08)
{
uint dataLen = (uint)(inputimage.width * inputimage.height);
stImageData.stDataChannel[0].nRowStep = inputimage.width;
stImageData.stDataChannel[0].nLen = dataLen;
stImageData.stDataChannel[0].nSize = dataLen;
stImageData.stDataChannel[0].pData = (unsigned char*)malloc(inputimage.width * inputimage.height);
memset(stImageData.stDataChannel[0].pData, 0, inputimage.width * inputimage.height);
stImageData.stDataChannel[0].pData = (unsigned char*)inputimage.data[0];
iMvdImage->InitImage(inputimage.width, inputimage.height, MVD_PIXEL_MONO_08, stImageData);
}
else if (inputimage.format == HKA_IMG_RGB_RGB24_C3)
{
uint dataLen = (uint)(inputimage.width * inputimage.height * 3);
stImageData.stDataChannel[0].nRowStep = inputimage.width * 3;
stImageData.stDataChannel[0].nLen = dataLen;
stImageData.stDataChannel[0].nSize = dataLen;
stImageData.stDataChannel[0].pData = (unsigned char*)malloc(inputimage.width * inputimage.height * 3);
memset(stImageData.stDataChannel[0].pData, 0, inputimage.width * inputimage.height);
stImageData.stDataChannel[0].pData = (unsigned char*)inputimage.data[0];
iMvdImage->InitImage(inputimage.width, inputimage.height, MVD_PIXEL_RGB_RGB24_C3, stImageData);
}
return iMvdImage;
}
HKA_IMAGE CAlgorithmModule::IMvdImageToHKA_IMAGE(IMvdImage* iMvdImage)
{
HKA_IMAGE inputimage;
if (iMvdImage->GetPixelFormat() == MVD_PIXEL_MONO_08)
{
inputimage = { HKA_IMG_MONO_08, 0 };
inputimage.width = iMvdImage->GetWidth();
inputimage.height = iMvdImage->GetHeight();
inputimage.format = HKA_IMG_MONO_08;
inputimage.step[0] = iMvdImage->GetWidth();
inputimage.data[0] = (char*)malloc(inputimage.width * inputimage.height);
if (inputimage.data[0] != NULL)
{
memset(inputimage.data[0], 0, inputimage.width * inputimage.height);
inputimage.data[0] = iMvdImage->GetImageData()->stDataChannel[0].pData;
}
}
else if (iMvdImage->GetPixelFormat() == MVD_PIXEL_RGB_RGB24_C3)
{
inputimage = { HKA_IMG_RGB_RGB24_C3, 0 };
inputimage.width = iMvdImage->GetWidth();
inputimage.height = iMvdImage->GetHeight();
inputimage.format = HKA_IMG_RGB_RGB24_C3;
inputimage.step[0] = iMvdImage->GetWidth() * 3;
inputimage.data[0] = (char*)malloc(inputimage.width * inputimage.height * 3);
if (inputimage.data[0] != NULL)
{
memset(inputimage.data[0], 0, inputimage.width * inputimage.height * 3);
inputimage.data[0] = iMvdImage->GetImageData()->stDataChannel[0].pData;
}
}
return inputimage;
}
总结
以上基本包含大部分图像类型和VM图像类型互转的方法,其实本质上都是图像数据(地址或byte数组)、图像宽高及像素格式的赋值及拷贝操作,其它图像类型的转换可以参考这些方式实现。
|