forked from XPUB/si_7-IRIS
motion detector csensibility corrected. some comments added
parent
b99136a813
commit
e0ff1654dd
@ -1,87 +1,76 @@
|
|||||||
# import the necessary packages
|
#!/usr/bin/env python
|
||||||
|
# coding=utf-8
|
||||||
|
|
||||||
|
# trigger espeak colophon by covering the camera with a finger
|
||||||
|
|
||||||
|
# import dependencies
|
||||||
|
# sudo pip install PiCamera[array]
|
||||||
|
# sudo pip install aplay
|
||||||
|
|
||||||
import imutils
|
import imutils
|
||||||
from imutils.video import VideoStream
|
from imutils.video import VideoStream
|
||||||
import argparse
|
from io import BytesIO
|
||||||
import datetime
|
from PIL import Image
|
||||||
import time, sys
|
import time, sys
|
||||||
from time import sleep
|
from time import sleep
|
||||||
import cv2
|
import cv2
|
||||||
|
import datetime
|
||||||
|
from datetime import datetime
|
||||||
|
from subprocess import call
|
||||||
|
|
||||||
# construct the argument parser and parse the arguments
|
# check open cv version
|
||||||
ap = argparse.ArgumentParser()
|
print(cv2.__version__)
|
||||||
ap.add_argument("-a", "--min-area", type=int, default=5000, help="minimum area size")
|
# choose video source from camera pi
|
||||||
args = vars(ap.parse_args())
|
|
||||||
|
|
||||||
# using picamera as default
|
|
||||||
vs = VideoStream(usePiCamera=True).start()
|
vs = VideoStream(usePiCamera=True).start()
|
||||||
|
# let camera warm up
|
||||||
sleep(2.0)
|
sleep(2.0)
|
||||||
|
|
||||||
# initialize the first frame in the video stream
|
image = vs.read()
|
||||||
firstFrame = None
|
count = 0
|
||||||
occupied = False
|
success = True
|
||||||
# loop over the frames of the video
|
buffer = []
|
||||||
while True:
|
|
||||||
# grab the current frame and initialize the occupied/unoccupied
|
|
||||||
# text
|
|
||||||
frame = vs.read()
|
|
||||||
frame = frame if args.get("video", None) is None else frame[1]
|
|
||||||
text = "Unoccupied"
|
|
||||||
# if the frame could not be grabbed, then we have reached the end
|
|
||||||
# of the video
|
|
||||||
if frame is None:
|
|
||||||
break
|
|
||||||
|
|
||||||
# resize the frame, convert it to grayscale, and blur it
|
|
||||||
frame = imutils.resize(frame, width=500)
|
|
||||||
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
|
|
||||||
gray = cv2.GaussianBlur(gray, (210, 210), 0)
|
|
||||||
|
|
||||||
# if the first frame is None, initialize it
|
|
||||||
if firstFrame is None:
|
|
||||||
firstFrame = gray
|
|
||||||
continue
|
|
||||||
|
|
||||||
# compute the absolute difference between the current frame and
|
playing = False
|
||||||
# first frame
|
|
||||||
frameDelta = cv2.absdiff(firstFrame, gray)
|
|
||||||
thresh = cv2.threshold(frameDelta, 25, 255, cv2.THRESH_BINARY)[1]
|
|
||||||
|
|
||||||
# dilate the thresholded image to fill in holes, then find contours
|
while success:
|
||||||
# on thresholded image
|
|
||||||
thresh = cv2.dilate(thresh, None, iterations=2)
|
|
||||||
cnts = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL,
|
|
||||||
cv2.CHAIN_APPROX_SIMPLE)
|
|
||||||
cnts = cnts[0] if imutils.is_cv2() else cnts[1]
|
|
||||||
|
|
||||||
# loop over the contours
|
# save live frame as JPEG file
|
||||||
for c in cnts:
|
cv2.imwrite("check_frame.jpg", image)
|
||||||
# if the contour is too small, ignore it
|
image = vs.read()
|
||||||
if cv2.contourArea(c) < args["min_area"]:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# compute the bounding box for the contour, draw it on the frame,
|
# open the frame
|
||||||
# and update the text
|
img = Image.open("check_frame.jpg")
|
||||||
(x, y, w, h) = cv2.boundingRect(c)
|
# resize the frame
|
||||||
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)
|
img2 = img.resize((1, 1))
|
||||||
text = "Occupied"
|
# get the color of pixel
|
||||||
if not occupied:
|
color = img2.getpixel((0, 0))
|
||||||
occupied = True
|
# print the color of pixel
|
||||||
print ('occupied')
|
print('#{:02x}{:02x}{:02x}'.format(*color))
|
||||||
sys.exit()
|
# create a sum of the 3 parts that constitute the value
|
||||||
|
sum = color[0] + color[1] + color[2]
|
||||||
|
# this allows the color not to be complete black, but also dark shades, by raising the sum value, we can increase the sensability
|
||||||
|
if sum < 10 :
|
||||||
|
buffer.append(True)
|
||||||
|
else :
|
||||||
|
# the values are set to be datetime.now to prevent the triggering of the colophon while all the values are False
|
||||||
|
buffer.append(datetime.now())
|
||||||
|
|
||||||
if occupied and text == "Unoccupied":
|
# create an array with all the values that we got, when the array is bigger than 300 values, start erasing being the first one first to go
|
||||||
occupied = False
|
if len(buffer) > 300:
|
||||||
print ("Unoccupied")
|
buffer.pop(0)
|
||||||
|
print(buffer)
|
||||||
|
|
||||||
# show the frame and record if the user presses a key
|
# For calling the colophon.wav some conditions must be met.
|
||||||
# cv2.imshow("Security Feed", frame)
|
# By the order of:
|
||||||
cv2.imwrite('normal.jpg', frame)
|
# all the values must be the same
|
||||||
#cv2.imshow("Thresh", thresh)
|
# we need to have at least 200 values, this prevents it from starting when there is just 1 value
|
||||||
cv2.imwrite('thereshold.jpg', thresh)
|
if ( (len(set(buffer))==1) & ( len(buffer) > 200 ) & ( playing == False ) ):
|
||||||
#cv2.imshow("Frame Delta", frameDelta)
|
print ("All elements in list are same")
|
||||||
cv2.imwrite('Gaussianblur.jpg', frameDelta)
|
call(["aplay", "/home/pi/colophon/colophon.wav"])
|
||||||
#key = cv2.waitKey(1) & 0xFF
|
# to record the espeak sentence into a .wav file ->
|
||||||
|
# -> espeak "sentence goes here" -ven+whisper -s 150 --stdout > colophon.wav
|
||||||
|
# espeak 'Iris Version 0.5 Contributors: Gill Baldwin, Simon Browne, Tancredi Di Giovanni, Paloma García, Rita Graça, Artemis Gryllaki, Pedro Sá Couto, Biyi Wen, Bohye Woo, Silvio Lorusso, Aymeric Mansoux, André Castro, Steve Rushton, Michael Murtaugh, Leslie Robbins. Produced and published by the Experimental Publishing (XPUB) program of the Piet Zwart Institute, Rotterdam, December 2018. A collaboration between the Research Department of Het Nieuwe Instituut and XPUB.
|
||||||
|
playing = True
|
||||||
|
|
||||||
# cleanup the camera and close any open windows
|
else:
|
||||||
vs.stop() if args.get("video", None) is None else vs.release()
|
print ("All elements in list are not same")
|
||||||
#cv2.destroyAllWindows()
|
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
|
# location of the directory
|
||||||
DIR=/var/www/html/lifeHackAgent/
|
DIR=/var/www/html/lifeHackAgent/
|
||||||
while [ 1 ]
|
while [ 1 ]
|
||||||
do
|
do
|
||||||
python "$DIR"motion_detector_2.py -a 10
|
python "$DIR"motion_detector_2.py -a 6000
|
||||||
python "$DIR"guru-pirate.py
|
python "$DIR"guru-pirate.py
|
||||||
sleep 10
|
sleep 5
|
||||||
done
|
done
|
||||||
|
Loading…
Reference in New Issue