пишу на Objective-C, с opencv впервые, много не знаю через cv::Mat не получается, вылетает приложение, поэтому все делаю через IplImage, хотелось бы добиться результата как в этом коде
int main()
{
// Load test images
Mat a;
Mat b;
if (a.empty() || b.empty())
return -1;
bool isFirstFrame = true;
VideoCapture stream(-1);
while (true) {
if(isFirstFrame){
stream.read(a);
isFirstFrame = false;
} else {
if(!(stream.read(b ) //get one frame from video
break;
Mat a_blurred, b_blurred;
blur(a, a_blurred, Size(4,4));
blur(b, b_blurred, Size(4,4));
// Get absolute difference image
Mat c;
absdiff(b_blurred, a_blurred, c);
// Split image to each channels
vector<Mat> channels;
split(c, channels);
// Apply threshold to each channel and combine the results
Mat d = Mat::zeros(c.size(), CV_8UC1);
for (int i = 0; i < channels.size(); i++)
{
Mat thresh;
threshold(channels[i], thresh, 45, 255, CV_THRESH_BINARY);
d |= thresh;
}
// Perform morphological close operation to filling in the gaps
Mat kernel, e;
getStructuringElement(MORPH_RECT, Size(10,10));
morphologyEx(d, e, MORPH_CLOSE, kernel, Point(-1,-1), 5);
// Find all contours
vector<vector<Point> > contours;
findContours(e.clone(), contours, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE);
// Select only large enough contours
vector<vector<Point> > intruders;
for (int i = 0; i < contours.size(); i++)
{
double area = contourArea(contours[i]);
if (area > 10000)
intruders.push_back(contours[i]);
}
// Use the filtered blobs above to create a mask image to
// extract the foreground object
Mat mask = Mat::zeros(b.size(), CV_8UC3);
drawContours(mask, intruders, -1, CV_RGB(255,255,255), -1);
// Highlight the foreground object by darken the rest of the image
if (intruders.size())
{
b = (b/4 & ~mask) + (b & mask);
drawContours(b, intruders, -1, CV_RGB(255,255,255), 2);
}
imshow("b", b );
waitKey(27);
}
}
return 0;
}
Подскажите в чем разница?
int main(){
bool isFirstFrame = true;
VideoCapture stream(-1);
IplImage *imggray1 = nullptr;
Mat a, b;
while (true) {
if(isFirstFrame){
stream.read(a);
IplImage img1 = a;
imggray1 = cvCreateImage(cvGetSize(&img1), 8, 3);
cvConvertImage(&img1, imggray1, CV_GRAY2BGR);
cvSmooth(imggray1, imggray1, CV_BLUR,4,4);
isFirstFrame = false;
} else {
if(!(stream.read(b ) //get one frame from video
break;
IplImage img2 = b;
IplImage *imggray2 = cvCreateImage( cvGetSize( &img2 ), IPL_DEPTH_8U, 3);
cvConvertImage(&img2, imggray2, CV_GRAY2BGR);
cvSmooth(imggray2, imggray2, CV_BLUR, 4, 4);
IplImage *imggray3 = cvCreateImage( cvGetSize( &img2 ), IPL_DEPTH_8U, 3);
cvAbsDiff(imggray2, imggray1, imggray3);
IplImage *r = cvCreateImage(cvGetSize(imggray3), 8, 1);
IplImage *g = cvCreateImage(cvGetSize(imggray3), 8, 1);
IplImage *b = cvCreateImage(cvGetSize(imggray3), 8, 1);
IplImage *diff = cvCreateImage(cvGetSize(imggray3), 8, 3);
cvSplit(imggray3, b, g, r, 0);
IplImage *tr, *tg, *tb; // для промежуточного хранения
tr = cvCreateImage(cvGetSize(imggray3), IPL_DEPTH_8U, 1);
tg = cvCreateImage(cvGetSize(imggray3), IPL_DEPTH_8U, 1);
tb = cvCreateImage(cvGetSize(imggray3), IPL_DEPTH_8U, 1);
cvThreshold(r, tr, 45, 255, CV_THRESH_BINARY);
cvThreshold(g, tg, 45, 255, CV_THRESH_BINARY);
cvThreshold(b, tb, 45, 255, CV_THRESH_BINARY);
cvMerge(tb, tg, tr, 0, diff);
IplImage *clearedImage = cvCreateImage(cvGetSize(diff), IPL_DEPTH_8U, 3);
IplImage *temp = cvCreateImage(cvGetSize(diff), IPL_DEPTH_8U, 3);
IplConvKernel *element = cvCreateStructuringElementEx(10,10,1,1,CV_SHAPE_RECT);
cvMorphologyEx(clearedImage, clearedImage, temp, element, 5);
cvErode(diff, clearedImage, element);
cvDilate(clearedImage, clearedImage, element);
IplImage *dest = cvCreateImage(cvGetSize(diff), IPL_DEPTH_8U, 1);
cvCvtColor(diff, dest, CV_RGB2GRAY);
cvEqualizeHist(dest, dest);
cvShowImage("Dest", dest);
//find contours
CvMemStorage *storage = cvCreateMemStorage(0);
CvSeq *contours = 0;
double area = 0;
cvFindContours( dest, storage, &contours, sizeof(CvContour), CV_RETR_EXTERNAL, CV_CHAIN_APPROX_SIMPLE );
if(contours != 0){
for(CvSeq *c = contours; c != 0; c = c->h_next) {
area = cvContourArea(c, CV_WHOLE_SEQ);
if(fabs(area) > 1000) {
cvApproxPoly(c, sizeof(CvContour), storage, CV_POLY_APPROX_DP,3,1);
cvDrawContours(&img2, c, CV_RGB(0,255,0), CV_RGB(0,255,0),0,2,8);
}
}
}
cvShowImage("Original2", &img2);
cvReleaseImage(&temp);
cvReleaseImage(&r);
cvReleaseImage(&g);
cvReleaseImage(&b );
cvReleaseImage(&tr);
cvReleaseImage(&tg);
cvReleaseImage(&tb);
cvReleaseImage(&clearedImage);
cvReleaseImage(&dest);
cvReleaseImage(&diff);
cvReleaseImage(&imggray2);
cvReleaseImage(&imggray3);
cvReleaseMemStorage(&storage);
waitKey(27);
}
}
return 0;
}