Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions ISSR_AI4MH_Yixing_Fan/pipeline/fetch_reddit.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
"anxiety attack", "feeling empty", "burnout", "mental exhaustion", "can't sleep",
"constant worry", "feeling worthless", "no motivation", "social isolation", "panic disorder",
"intrusive thoughts", "emotional pain", "mental fog", "dissociation", "feeling trapped",
"racing thoughts", "mood swings", "emotional breakdown", "mental health crisis", "therapy needed"
"racing thoughts", "mood swings", "emotional breakdown", "mental health crisis", "therapy needed" , "feel like a burden" ,"nobody cares about me"
]

subreddits = [
Expand All @@ -41,7 +41,7 @@
"mentalhealthawareness", "bipolar", "traumatoolbox", "mentalhealthmemes", "mentalhealthart",
"mentalhealthvideos", "mentalhealthresources", "mentalhealthadvice", "mentalhealthstories",
"mentalhealthchat", "mentalhealthhelp", "mentalhealthcommunity",
"mentalhealthmatters", "mentalhealthwarriors"
"mentalhealthmatters", "mentalhealthwarriors","healthanxiety" ,"KindVoice"
]

# ========== Preprocessing function ==========
Expand Down
42 changes: 25 additions & 17 deletions ISSR_Communication_Analysis_Tool_Samuel_Kalu/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

import base64
import io
import traceback
import warnings

import gradio as gr
Expand All @@ -21,12 +22,13 @@


def get_image_base64(path):
if not os.path.exists(path):
raise FileNotFoundError(f"Image asset not found at '{path}'")
with open(path, "rb") as image_file:
encoded = base64.b64encode(image_file.read()).decode("utf-8")
return f'<img src="data:image/png;base64,{encoded}" width="300" style="display: block; margin: 0 auto;"/>'



def process_multiple_videos(
folder_path=None,
video_files=None,
Expand Down Expand Up @@ -61,7 +63,6 @@ def process_multiple_videos(

if folder_path and os.path.isdir(folder_path):
logger.info(f"Processing videos from folder: {folder_path}")
# Pass checkbox states to process_all_videos_from_path
process_all_videos_from_path(
folder_path,
output_dir,
Expand All @@ -78,6 +79,16 @@ def process_multiple_videos(
f"Finished processing videos from folder. Found {len(processed_csvs)} CSVs."
)

elif folder_path and not os.path.isdir(folder_path):
logger.warning(f"Provided folder path does not exist: '{folder_path}'")
return (
f"Error: Folder path '{folder_path}' does not exist or is not a directory. Please check the path and try again.",
gr.update(visible=False, value=None),
gr.update(visible=False, value=None),
gr.update(visible=False, value=None),
gr.update(open=False),
)

elif video_files:
logger.info(f"Processing uploaded video files: {video_files}")
for idx, video_path in enumerate(video_files):
Expand All @@ -95,7 +106,6 @@ def process_multiple_videos(
)

original_filename = os.path.basename(video_path)
# Ensure proper CSV filename generation from video filename
output_csv_filename = os.path.splitext(original_filename)[0] + ".csv"
output_csv_path = os.path.join(output_dir, output_csv_filename)

Expand All @@ -104,8 +114,6 @@ def process_multiple_videos(
)

try:
# Call the process_video function from src.pipeline
# Pass checkbox states to process_video
process_video(
video_path,
output_csv_path,
Expand All @@ -118,8 +126,6 @@ def process_multiple_videos(
f"Successfully processed uploaded video: {original_filename}"
)
except Exception as e:
import traceback

logger.error(
f"Exception while processing {original_filename}: {str(e)}"
)
Expand Down Expand Up @@ -147,10 +153,8 @@ def process_multiple_videos(

if processed_csvs:
try:
# Display the first processed CSV in the DataFrame
df = pd.read_csv(processed_csvs[0])
logger.info(f"Displaying data from: {os.path.basename(processed_csvs[0])}")
# Generate plots and convert to base64 HTML
plot_img = plot_speaker_charts(df)
return (
f"Successfully processed {len(processed_csvs)} video(s). Displaying data from {os.path.basename(processed_csvs[0])}.",
Expand All @@ -160,8 +164,6 @@ def process_multiple_videos(
gr.update(open=True),
)
except Exception as e:
import traceback

logger.error(
f"Error reading processed CSV {os.path.basename(processed_csvs[0])}: {str(e)}"
)
Expand Down Expand Up @@ -214,12 +216,18 @@ def create_interface():
}
"""

logo_path = "assets/trip_lab_logo.png"
if not os.path.exists(logo_path):
logger.warning(f"Logo asset not found at '{logo_path}'. Displaying without logo.")
html_img = ""
else:
html_img = get_image_base64(logo_path)

with gr.Blocks(
title="Driving Simulator Communication Analysis Tool",
theme=gr.themes.Monochrome(),
css=custom_css, # Added custom CSS
css=custom_css,
) as demo:
html_img = get_image_base64("assets/trip_lab_logo.png")
gr.HTML(
f"""
<div style="text-align: center;">
Expand All @@ -239,12 +247,12 @@ def create_interface():
video_files = gr.File(
label="Upload Videos (Optional)",
file_count="multiple",
type="filepath", # Ensures a temporary file path is provided
type="filepath",
file_types=[".mp4", ".mov", ".avi", ".webm"],
)

with gr.Group():
with gr.Row(): # Make checkboxes inline
with gr.Row():
checkbox_ner = gr.Checkbox(
label="NER (Named Entity Recognition)",
value=True,
Expand Down Expand Up @@ -279,7 +287,7 @@ def create_interface():
download_csv = gr.File(
label="Download Processed CSV",
visible=False,
type="filepath", # This is for download, not upload
type="filepath",
file_count="single",
)
with gr.Accordion("Show Plots", open=True) as plot_accordion:
Expand All @@ -301,4 +309,4 @@ def create_interface():


demo = create_interface()
demo.launch()
demo.launch()
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,20 @@
warnings.filterwarnings("ignore")

analyzer = SentimentIntensityAnalyzer()
hf_analyzer = None


def analyze_sentiment(text):
def analyze_sentiment(text, use_hf=False):
"""Perform sentiment analysis using VADER and return a composite score from -1 to +1."""
global hf_analyzer
if use_hf:
if hf_analyzer is None:
from transformers import pipeline
hf_analyzer = pipeline("sentiment-analysis")
result = hf_analyzer(text)[0]
label = result["label"].upper()
confidence = float(result["score"])
return confidence if label == "POSITIVE" else -confidence

scores = analyzer.polarity_scores(text)
sentiment_score = scores["compound"]
return sentiment_score
return scores["compound"]
Loading