Commit 0ab70fd8 authored by tihmels's avatar tihmels
Browse files

Process Model für Cluster Einsatz vorbereitet

parent c2e98706
""" """
Diese Klasse macht das Training des Models möglich Diese Klasse macht das Training des Models möglich
""" """
import argparse
import cv2 import cv2
import glob import glob
...@@ -9,21 +10,47 @@ import numpy as np ...@@ -9,21 +10,47 @@ import numpy as np
import sys import sys
import logging import logging
from email_service import sendMail
logfile = 'logs/process_model.log'
""" """
Erstellt und gibt das Log-File aus Erstellt und gibt das Log-File aus
""" """
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)-8s %(message)s', logging.basicConfig(level=logging.NOTSET, format='%(asctime)s %(levelname)-8s %(message)s',
datefmt='%m-%d %H:%M', datefmt='%m-%d %H:%M',
filename='logs/process_model.log') filename=logfile)
""" """
Liest Input Parameter Argument Parser erlaubt Parameter für die Verarbeitung anzugeben.
""" """
args = sys.argv
logging.debug('Fisherface training initialized')
file = open("{}.csv".format('_'.join(args[1:]).lower()), "w") parser = argparse.ArgumentParser(description='Process Model Application')
parser.add_argument('--dataset', action='store', dest='dataset', default='resources/img_data/dataset/', help='path to dataset')
parser.add_argument('-i', action='store', dest='iterations', type=int, default=30, help='declare processing iterations')
parser.add_argument('-e', action='append', dest='emotions', default=['happy', 'neutral', 'surprise'], help='declare emotions that should be processed')
parser.add_argument('-p', action='append', dest='properties', help='pre-processing steps')
parser.add_argument('--test', action='store_true', help='prevent writing new model to file system')
parser.add_argument('--csv', action='store_true', help='activate csv processing')
parser.add_argument('--email', action='store_true', help='activate email notifications')
arguments = parser.parse_args()
logging.debug(arguments)
dataset_path = arguments.dataset
iterations = arguments.iterations
emotions = arguments.emotions
properties = arguments.properties
csv = arguments.csv
email = arguments.email
test = arguments.test
"""
Liest Input Parameter
"""
logging.info('Fisherface training started')
if email:
sendMail('Fisherface training started')
def _get_faces_from_emotion(emotion): def _get_faces_from_emotion(emotion):
""" """
...@@ -31,7 +58,7 @@ def _get_faces_from_emotion(emotion): ...@@ -31,7 +58,7 @@ def _get_faces_from_emotion(emotion):
:param emotion: Die Emotion :param emotion: Die Emotion
:return: training, prediction :return: training, prediction
""" """
files = glob.glob('img_data/dataset/{}/*'.format(emotion)) files = glob.glob(dataset_path + '{}/*'.format(emotion))
random.shuffle(files) random.shuffle(files)
""" """
...@@ -95,27 +122,31 @@ def run_recognizer(): ...@@ -95,27 +122,31 @@ def run_recognizer():
cnt += 1 cnt += 1
return ((100 * correct) / (correct + incorrect)) return ((100 * correct) / (correct + incorrect))
if len(args) > 1:
tags = ', '.join(args[1:])
logging.debug(tags.upper())
""" """
Emotions Liste Emotions Liste
""" """
emotions = ["happy", "neutral", "surprise"]
fishface = cv2.face.FisherFaceRecognizer_create() fishface = cv2.face.FisherFaceRecognizer_create()
metascore = [] metascore = []
for i in range(0, 40): for i in range(1, iterations+1):
correct = run_recognizer() correct = run_recognizer()
file.write("{}\n".format(int(correct))) logging.info("{} : {}%".format(i, int(correct)))
logging.debug("{} : {}%".format(i, int(correct)))
metascore.append(correct) metascore.append(correct)
file.close() if i % (int(iterations/4)) == 0 and email:
sendMail(str(i) + ' iterations done', body='up-to-date average: {}%'.format(np.mean(metascore)))
if csv:
file = open("resources/csv/{}.csv".format('_'.join(properties).lower()), "w")
for entry in metascore:
file.write("{}\n".format(int(entry)))
file.close()
logging.info("Fisherface training finished - {}% average\n".format(np.mean(metascore)))
if not test:
fishface.write('img_data/models/detection_model.xml')
logging.debug("{} iterations - {}% average\n".format(len(metascore), np.mean(metascore))) if email:
fishface.write('img_data/models/detection_model.xml') sendMail('Fisherface training finished')
...@@ -10,9 +10,11 @@ import os ...@@ -10,9 +10,11 @@ import os
from email_service import sendMail from email_service import sendMail
from face_detect import locate_faces from face_detect import locate_faces
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s', logfile = 'logs/sorted_set_facedetector.log'
logging.basicConfig(level=logging.NOTSET, format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m-%d %H:%M', datefmt='%m-%d %H:%M',
filename='logs/sorted_set_facedetector.log', filename=logfile,
filemode='w') filemode='w')
""" """
Argument Parser erlaubt Parameter für die Verarbeitung anzugeben. Argument Parser erlaubt Parameter für die Verarbeitung anzugeben.
...@@ -23,39 +25,45 @@ parser.add_argument('--source', action='store', dest='img_source', default='reso ...@@ -23,39 +25,45 @@ parser.add_argument('--source', action='store', dest='img_source', default='reso
help='path to image source') help='path to image source')
parser.add_argument('--dataset', action='store', dest='dataset', default='resources/img_data/dataset/', help='path to dataset') parser.add_argument('--dataset', action='store', dest='dataset', default='resources/img_data/dataset/', help='path to dataset')
parser.add_argument('--r', action='store', dest='resize', default=150, type=int, help='resize factor') parser.add_argument('--r', action='store', dest='resize', default=150, type=int, help='resize factor')
parser.add_argument('-e', action='append', dest='emotions', default=['happy', 'neutral', 'surprised'], help='declare emotions that should be processed') parser.add_argument('-e', action='append', dest='emotions', default=['happy', 'neutral', 'surprise'], help='declare emotions that should be processed')
parser.add_argument('-c', action='store', dest='scaleFactor', default=1.1, type=float, parser.add_argument('-c', action='store', dest='scaleFactor', default=1.1, type=float,
help='scale factor - haar') help='scale factor - haar')
parser.add_argument('-n', action='store', dest='minNeighbors', default=6, type=int, help='min neighbors - haar') parser.add_argument('-n', action='store', dest='minNeighbors', default=6, type=int, help='min neighbors - haar')
parser.add_argument('-s', action='store', dest='minSize', default=40, type=int, help='min size - haar') parser.add_argument('-s', action='store', dest='minSize', default=40, type=int, help='min size - haar')
parser.add_argument('-x', action='store_true', help='activate email notifications') parser.add_argument('--email', action='store_true', help='activate email notifications')
arguments = parser.parse_args() arguments = parser.parse_args()
logging.DEBUG(arguments) logging.debug(arguments)
source_path = arguments.img_source
dataset_path = arguments.dataset
resizeFactor = arguments.resize
emotions = arguments.emotions
scaleFactor = arguments.scaleFactor
minNeighbors = arguments.minNeighbors
minSize = arguments.minSize
email = arguments.email
datasetPath = arguments.dataset
if len(glob.glob(datasetPath + '*')) > 0: if len(glob.glob(dataset_path + '*')) > 0:
deleteDataset = input( deleteDataset = input(
'Im Dataset befinden sich Dateien. Durch diesen Vorgang werden die existierenden Daten gelöscht. Fortfahren (y/n): ') 'Im Dataset befinden sich Dateien. Durch diesen Vorgang werden die existierenden Daten gelöscht. Fortfahren (y/n): ')
if deleteDataset == 'y': if deleteDataset == 'y':
for file in glob.glob(datasetPath + '*'): for file in glob.glob(dataset_path + '*'):
shutil.rmtree(file) shutil.rmtree(file)
else: else:
logging.info('Execution canceled from user')
sys.exit() sys.exit()
totalFiles: int = 0 totalFiles: int = 0
totalFaces: int = 0 totalFaces: int = 0
undetected: list = [] undetected: list = []
img_source = arguments.img_source
def detect_faces(emotion): def detect_faces(emotion):
""" """
Holt alle Dateien zu einer Emotion aus dem sorted_set Holt alle Dateien zu einer Emotion aus dem sorted_set
""" """
files = glob.glob(img_source + '{}/*'.format(emotion)) files = glob.glob(source_path + '{}/*'.format(emotion))
global undetected global undetected
global totalFaces global totalFaces
...@@ -68,42 +76,41 @@ def detect_faces(emotion): ...@@ -68,42 +76,41 @@ def detect_faces(emotion):
frame = cv2.imread(f) # Open image frame = cv2.imread(f) # Open image
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) # Convert image to grayscale gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) # Convert image to grayscale
facefeatures = locate_faces(gray, arguments.scaleFactor, arguments.minNeighbors, (arguments.minSize, arguments.minSize)) facefeatures = locate_faces(gray, scaleFactor, minNeighbors, (minSize, minSize))
if facefeatures is '': if facefeatures is '':
logging.info('No face detected ' + f) logging.info('No face detected ' + f)
undetected.append(f) undetected.append(f)
if len(undetected) % 200 == 0 and email:
if len(undetected) == 150 and arguments.x: sendMail('Already ' + str(len(undetected)) + ' not detected faces', filepath=logfile)
sendMail('Already 150 not detected faces', filepath='logs/sorted_set_facedetector.log')
else: else:
# Cut and save face # Cut and save face
for (x, y, w, h) in facefeatures: # get coordinates and size of rectangle containing face for (x, y, w, h) in facefeatures: # get coordinates and size of rectangle containing face
totalFaces += 1 totalFaces += 1
gray = gray[y:y + h, x:x + w] # Cut the frame to size gray = gray[y:y + h, x:x + w] # Cut the frame to size
try: try:
out = cv2.resize(gray, (arguments.resize, arguments.resize)) # Resize face so all images have same size out = cv2.resize(gray, (resizeFactor, resizeFactor)) # Resize face so all images have same size
success = cv2.imwrite(datasetPath + '{}/{}.jpg'.format(emotion, fileNumber), out) # Write image success = cv2.imwrite(dataset_path + '{}/{}.jpg'.format(emotion, fileNumber), out) # Write image
if not success: if not success:
logging.error('Problem while writing file occurred...') logging.error('Problem while writing file ' + f + ' occurred...' )
if arguments.x: if email:
sendMail('Problem while writing file', body=f + ' to ' + datasetPath + '{}/{}.jpg'.format(emotion, fileNumber)) sendMail('Problem while writing file', body=f + ' to ' + datasetPath + '{}/{}.jpg'.format(emotion, fileNumber))
except: except:
logging.error('Some error with ' + f) logging.error('Some error with ' + f)
if arguments.x: if email:
sendMail('Problem while writing file', body=f) sendMail('Problem while writing file', body=f)
pass # If error, pass file pass # If error, pass file
totalFiles += 1 # Increment image number totalFiles += 1 # Increment image number
fileNumber += 1 fileNumber += 1
if arguments.x: if email:
sendMail('Facedetector started notification') sendMail('Facedetector started notification')
for emotion in arguments.emotions: for emotion in emotions:
if not os.path.exists(datasetPath + emotion): if not os.path.exists(dataset_path + emotion):
os.makedirs(datasetPath + emotion) os.makedirs(dataset_path + emotion)
detect_faces(emotion) # Call functional detect_faces(emotion) # Call functional
...@@ -113,5 +120,5 @@ logging.info('In {} files no face could be detected'.format(totalFiles - totalFa ...@@ -113,5 +120,5 @@ logging.info('In {} files no face could be detected'.format(totalFiles - totalFa
for f in undetected: for f in undetected:
logging.info(f) logging.info(f)
if arguments.x: if email:
sendMail('Facedetector finished notification', filepath='logs/sorted_set_facedetector.log') sendMail('Facedetector finished notification', filepath=logfile)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment