I was wondering how to find the average intensity of a gray scaled ROI. I currently use the code below, however when I try to print
roi_gray_mean
I get the output in the console
<built-in method mean of numpy.ndarray object at 0x181c95b530>
<built-in method mean of numpy.ndarray object at 0x181c95b670>
<built-in method mean of numpy.ndarray object at 0x181c95b4e0>
......
Below is my code.
import cv2
import numpy as np
img = cv2.imread("/Users/2020shatgiskessell/Desktop/roomimage.jpg")
roomimg = cv2.resize(img, (0,0), fx=0.5, fy=0.5)
gray = cv2.cvtColor(roomimg, cv2.COLOR_BGR2GRAY)
#edge detection
ret, thresh = cv2.threshold(gray, 127, 255, cv2.THRESH_BINARY_INV)
edge = cv2.Canny(thresh, 100, 200)
#create nodes by iterating through 10x10 blocks and checking the neighbors
height,width,channels = roomimg.shape
for i in range (0,width,10):
for j in range (0,height,10):
roi_gray = gray[width: i+10,height: j+10]
cv2.rectangle(edge, (i,j), (i+10, j+10), (128, 128, 128), 1)
roi_gray_mean = roi_gray.mean
print (roi_gray_mean)
cv2.imshow('Image Edges', edge)
if cv2.waitKey(0) & 0xff == 27:
cv2.destroyAllWindows()