...
 
Commits (2)
...@@ -98,6 +98,9 @@ instance/ ...@@ -98,6 +98,9 @@ instance/
# Sphinx documentation # Sphinx documentation
docs/_build/ docs/_build/
resources/csv resources/csv
resources/img_data/models/
# PyBuilder # PyBuilder
target/ target/
......
...@@ -29,9 +29,9 @@ parser.add_argument('-5', action='append_const', dest='emotions', const='disgust ...@@ -29,9 +29,9 @@ parser.add_argument('-5', action='append_const', dest='emotions', const='disgust
parser.add_argument('-6', action='append_const', dest='emotions', const='anger', help='anger') parser.add_argument('-6', action='append_const', dest='emotions', const='anger', help='anger')
parser.add_argument('-d', '--dataset', action='store', dest='dataset', default='resources/img_data/dataset/', parser.add_argument('-d', '--dataset', action='store', dest='dataset', default='resources/img_data/dataset/',
help='path to dataset') help='path to dataset')
parser.add_argument('-i' '--iterations', action='store', dest='iterations', type=int, default=30, parser.add_argument('-i' '--iterations', action='store', dest='iterations', type=int, default=1,
help='number of iterations') help='number of iterations')
parser.add_argument('-p', '--properties', nargs='+', dest='properties', help='pre-processing steps for logging') parser.add_argument('-p', '--properties', nargs='*', dest='properties', help='pre-processing steps for logging')
parser.add_argument('-t', '--test', action='store_true', dest='test', help='prevent writing new model to classifier') parser.add_argument('-t', '--test', action='store_true', dest='test', help='prevent writing new model to classifier')
parser.add_argument('-c', '--csv', action='store_true', dest='csv', help='activate csv output') parser.add_argument('-c', '--csv', action='store_true', dest='csv', help='activate csv output')
parser.add_argument('-x', '--email', action='store_true', dest='email', help='activate email notifications') parser.add_argument('-x', '--email', action='store_true', dest='email', help='activate email notifications')
...@@ -44,9 +44,6 @@ if not arguments.emotions: ...@@ -44,9 +44,6 @@ if not arguments.emotions:
logging.info('Fisherface training started') logging.info('Fisherface training started')
if arguments.email:
sendMail('Fisherface training started')
def _get_faces_from_emotion(emotion): def _get_faces_from_emotion(emotion):
""" """
Get all the files to an emotion from the dataset, mix them, and split them into a training and test set. Get all the files to an emotion from the dataset, mix them, and split them into a training and test set.
...@@ -70,9 +67,9 @@ def image_preprocessing(image): ...@@ -70,9 +67,9 @@ def image_preprocessing(image):
""" """
img = cv2.imread(image) # open image img = cv2.imread(image) # open image
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) # convert to grayscale gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) # convert to grayscale
clahe = cv2.createCLAHE(2.0, (8, 8)) clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8))
norm = clahe.apply(gray) face = clahe.apply(gray)
return norm return face
def make_sets(): def make_sets():
...@@ -137,9 +134,6 @@ for i in range(1, arguments.iterations + 1): ...@@ -137,9 +134,6 @@ for i in range(1, arguments.iterations + 1):
logging.info("{} : {}%".format(i, int(correct))) logging.info("{} : {}%".format(i, int(correct)))
metascore.append(correct) metascore.append(correct)
if arguments.email and i % (int(arguments.iterations / 4)) == 0:
sendMail(str(i) + ' iterations done', body='up-to-date average: {}%'.format(np.mean(metascore)))
# Argument parser # Argument parser
if arguments.csv: if arguments.csv:
file = open("resources/csv/{}.csv".format('_'.join(arguments.properties).lower()), "w") file = open("resources/csv/{}.csv".format('_'.join(arguments.properties).lower()), "w")
...@@ -153,8 +147,7 @@ logging.info("Fisherface training finished - {}% average\n".format(np.mean(metas ...@@ -153,8 +147,7 @@ logging.info("Fisherface training finished - {}% average\n".format(np.mean(metas
# Argument parser # Argument parser
if not arguments.test: if not arguments.test:
fishface.write('resources/models/detection_model.xml') fishface.write('resources/models/detection_model.xml')
logging.info('saved trained classifier')
# Argument parser # Argument parser
if arguments.email: if arguments.email:
sendMail('Fisherface training finished', filepath='resources/models/detection_model.xml') sendMail('Fisherface training finished', body=str(arguments), filepath='resources/models/detection_model.xml')
This diff is collapsed.
...@@ -11,9 +11,17 @@ from face_detect import extract_faces ...@@ -11,9 +11,17 @@ from face_detect import extract_faces
from image_commons import nparray_as_image, draw_with_alpha, draw_img from image_commons import nparray_as_image, draw_with_alpha, draw_img
parser = argparse.ArgumentParser(description='ProjectMood Emotion Detection') parser = argparse.ArgumentParser(description='ProjectMood Emotion Detection')
parser.add_argument('-0', action='append_const', dest='emotions', const='neutral', help='neutral')
parser.add_argument('-1', action='append_const', dest='emotions', const='happy', help='happy')
parser.add_argument('-2', action='append_const', dest='emotions', const='sadness', help='sadness')
parser.add_argument('-3', action='append_const', dest='emotions', const='surprise', help='surprise')
parser.add_argument('-4', action='append_const', dest='emotions', const='fear', help='fear')
parser.add_argument('-5', action='append_const', dest='emotions', const='disgust', help='disgust')
parser.add_argument('-6', action='append_const', dest='emotions', const='anger', help='anger')
parser.add_argument('-b', '--buffer', action='store', dest='buffer', default=12, type=int, help='size of ringbuffer') parser.add_argument('-b', '--buffer', action='store', dest='buffer', default=12, type=int, help='size of ringbuffer')
parser.add_argument('-m', '--model', action='store', dest='model', default='resources/models/detection_model.xml', parser.add_argument('-m', '--model', action='store', dest='model', default='resources/models/detection_model.xml',
help='path to model') help='path to model')
parser.add_argument('-r', '--resize', action='store', type=int, dest='resizefactor')
arguments = parser.parse_args() arguments = parser.parse_args()
...@@ -26,7 +34,6 @@ def _load_emoticons(emotions): ...@@ -26,7 +34,6 @@ def _load_emoticons(emotions):
return [nparray_as_image(cv2.imread('resources/emojis/{}.png'.format(emotion), -1), mode=None) for emotion in return [nparray_as_image(cv2.imread('resources/emojis/{}.png'.format(emotion), -1), mode=None) for emotion in
emotions] emotions]
def show_webcam_and_run(model, emoticons, window_size=(600, 600), window_name=parser.description, update_time=1): def show_webcam_and_run(model, emoticons, window_size=(600, 600), window_name=parser.description, update_time=1):
""" """
Shows a webcam image, recognizes faces and emotions in real time and draws emoticons next to the faces. Shows a webcam image, recognizes faces and emotions in real time and draws emoticons next to the faces.
...@@ -42,9 +49,12 @@ def show_webcam_and_run(model, emoticons, window_size=(600, 600), window_name=pa ...@@ -42,9 +49,12 @@ def show_webcam_and_run(model, emoticons, window_size=(600, 600), window_name=pa
vc = WebcamVideoStream().start() vc = WebcamVideoStream().start()
# a random image from the dataset to determine the image format (important for Fisherface) if not arguments.resizefactor:
random = cv2.imread('resources/img_data/dataset/{}/0.jpg'.format(emotions[0])) # a random image from the dataset to determine the image format (important for Fisherface)
resizefactor = np.size(random, 0) random = cv2.imread('resources/img_data/dataset/{}/0.jpg'.format(arguments.emotions[0]))
resizefactor = np.size(random, 0)
else:
resizefactor = arguments.resizefactor
# The RingBuffer stores the last x Predictions # The RingBuffer stores the last x Predictions
buffer = RingBuffer(arguments.buffer) buffer = RingBuffer(arguments.buffer)
...@@ -61,6 +71,8 @@ def show_webcam_and_run(model, emoticons, window_size=(600, 600), window_name=pa ...@@ -61,6 +71,8 @@ def show_webcam_and_run(model, emoticons, window_size=(600, 600), window_name=pa
# Get the entries as an array # Get the entries as an array
predictions = buffer.get() predictions = buffer.get()
print(predictions)
# Read the processed input image # Read the processed input image
processed_image = nparray_as_image(normalized_face[:, :], mode='L') processed_image = nparray_as_image(normalized_face[:, :], mode='L')
w, h = vc.size() w, h = vc.size()
...@@ -89,10 +101,7 @@ def show_webcam_and_run(model, emoticons, window_size=(600, 600), window_name=pa ...@@ -89,10 +101,7 @@ def show_webcam_and_run(model, emoticons, window_size=(600, 600), window_name=pa
if __name__ == '__main__': if __name__ == '__main__':
# The emotions in the Dataset folder should also be loaded in the application emoticons = _load_emoticons(arguments.emotions)
_, emotions, _ = next(os.walk('resources/img_data/dataset'), (None, [], None))
emoticons = _load_emoticons(emotions)
fisher_face = cv2.face.FisherFaceRecognizer_create() fisher_face = cv2.face.FisherFaceRecognizer_create()
......