diff --git a/Jarvis.py b/Jarvis.py index c20efd09..836abb7a 100644 --- a/Jarvis.py +++ b/Jarvis.py @@ -1,9 +1,11 @@ from database.mongodb import create_connection, login_user from database.localStorageServer import server from src.utils.functions import application +from src.emotion_recognition import run_emotion_detection # Import emotion detection logic from datetime import datetime, timedelta import streamlit as st +# Initialize session state if not present if "user" not in st.session_state: st.session_state.update({ 'password': None, @@ -12,6 +14,7 @@ 'verified': False, }) +# Function to fetch credentials from local storage def get_credentials(): conn = server() return ( @@ -21,16 +24,26 @@ def get_credentials(): conn.getLocalStorageVal("verified"), ) +# Main execution block if __name__ == "__main__": today = datetime.now() + # Fetch credentials from local storage if password is None if st.session_state['password'] is None: st.session_state['password'], st.session_state['user'], st.session_state['expiration_date'], st.session_state['verified'] = get_credentials() + # Check if the user's session is valid and not expired if st.session_state['expiration_date'] > today.isoformat() and not st.session_state['verified']: conn = create_connection() if login_user(conn, st.session_state['user'][0], st.session_state['password']): st.session_state['verified'] = True server().setLocalStorageVal("verified", True) + # Run the application if the user is verified + if st.session_state['verified']: + # Optionally, run emotion detection here after user is verified + run_emotion_detection() # Trigger the emotion recognition (this could be interactive in the UI) + + # Run the application logic based on verified user status application(st.session_state['verified']).run() + diff --git a/emotion_model.h5 b/emotion_model.h5 new file mode 100644 index 00000000..530224fe --- /dev/null +++ b/emotion_model.h5 @@ -0,0 +1 @@ +model.save('assets/emotion_model.h5') # Save the trained model to 'assets' folder diff --git a/emotion_recognition.py b/emotion_recognition.py new file mode 100644 index 00000000..6eb47b2a --- /dev/null +++ b/emotion_recognition.py @@ -0,0 +1,60 @@ +import numpy as np +import cv2 +from keras.models import load_model +from keras.utils import np_utils + +# Emotion dictionary +emotion_dict = {0: "Angry", 1: "Disgust", 2: "Fear", 3: "Happy", 4: "Sad", 5: "Surprise", 6: "Neutral"} + +def preprocess_image(image): + image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) # Convert to grayscale + image = cv2.resize(image, (48, 48)) # Resize to match model input size + image = image.astype('float32') / 255.0 # Normalize pixel values + image = np.reshape(image, (1, 48, 48, 1)) # Add a channel dimension + return image + +def load_emotion_model(): + # Load the pre-trained model + model = load_model('assets/emotion_model.h5') + return model + +def detect_emotion(frame, model): + gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) + face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_default.xml') + + # Detect faces + faces = face_cascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30)) + + for (x, y, w, h) in faces: + face = gray[y:y+h, x:x+w] + face = preprocess_image(face) # Preprocess the face for emotion prediction + emotion_pred = model.predict(face) # Get prediction + max_index = np.argmax(emotion_pred[0]) # Get the emotion with highest probability + emotion = emotion_dict[max_index] # Get emotion name + color = (0, 255, 0) # Color for bounding box (green) + + # Draw the rectangle and emotion label on the frame + cv2.rectangle(frame, (x, y), (x+w, y+h), color, 2) + cv2.putText(frame, emotion, (x, y-10), cv2.FONT_HERSHEY_SIMPLEX, 1, color, 2) + + return frame + +def run_emotion_detection(): + model = load_emotion_model() + cap = cv2.VideoCapture(0) # Start webcam + + while True: + ret, frame = cap.read() + if not ret: + break + frame = detect_emotion(frame, model) + cv2.imshow('Emotion Recognition', frame) + + if cv2.waitKey(1) & 0xFF == ord('q'): # Exit if 'q' is pressed + break + + cap.release() + cv2.destroyAllWindows() + +if __name__ == '__main__': + run_emotion_detection() # Run the emotion detection function when this script is executed diff --git a/requirements.txt b/requirements.txt index ef5b47b9..c97351ff 100644 Binary files a/requirements.txt and b/requirements.txt differ diff --git a/streamlit_app.py b/streamlit_app.py new file mode 100644 index 00000000..938466c6 --- /dev/null +++ b/streamlit_app.py @@ -0,0 +1,8 @@ +import streamlit as st +from src.emotion_recognition import run_emotion_detection + +st.title("Emotion Recognition with Jarvis") +st.write("Click the button to start emotion detection.") + +if st.button('Start Emotion Recognition'): + run_emotion_detection()