This commit is contained in:
Leon
2025-07-15 22:54:35 +02:00
commit f7eda17284
89 changed files with 18535 additions and 0 deletions

View File

@@ -0,0 +1 @@
"""Core application modules."""

View File

@@ -0,0 +1,32 @@
from pydantic import AliasChoices, Field
from pydantic_settings import BaseSettings, SettingsConfigDict
"""Configuration settings for the Letterfeed application."""
class Settings(BaseSettings):
"""Application settings, loaded from environment variables or .env file."""
model_config = SettingsConfigDict(
env_file=".env", extra="ignore", env_prefix="LETTERFEED_", frozen=True
)
database_url: str = Field(
"sqlite:////data/letterfeed.db",
validation_alias=AliasChoices("DATABASE_URL", "LETTERFEED_DATABASE_URL"),
)
app_base_url: str = Field(
"http://localhost:8000",
validation_alias=AliasChoices("APP_BASE_URL", "LETTERFEED_APP_BASE_URL"),
)
imap_server: str = ""
imap_username: str = ""
imap_password: str = ""
search_folder: str = "INBOX"
move_to_folder: str | None = None
mark_as_read: bool = False
email_check_interval: int = 15
auto_add_new_senders: bool = False
settings = Settings()

View File

@@ -0,0 +1,25 @@
from sqlalchemy import create_engine
from sqlalchemy.orm import declarative_base, sessionmaker
from app.core.config import settings
from app.core.logging import get_logger
"""Database connection and session management."""
logger = get_logger(__name__)
engine = create_engine(settings.database_url, connect_args={"check_same_thread": False})
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
def get_db():
"""Dependency that provides a database session."""
logger.debug("Creating new database session")
db = SessionLocal()
try:
yield db
finally:
logger.debug("Closing database session")
db.close()

42
backend/app/core/imap.py Normal file
View File

@@ -0,0 +1,42 @@
import imaplib
from app.core.logging import get_logger
"""IMAP utility functions for connecting to mail servers and fetching folders."""
logger = get_logger(__name__)
def _test_imap_connection(server, username, password):
"""Test the IMAP connection with the given credentials."""
logger.info(f"Testing IMAP connection to {server} for user {username}")
try:
mail = imaplib.IMAP4_SSL(server)
mail.login(username, password)
mail.logout()
logger.info("IMAP connection successful")
return True, "Connection successful"
except Exception as e:
logger.error(f"IMAP connection failed: {e}")
return False, str(e)
def get_folders(server, username, password):
"""Fetch a list of IMAP folders from the mail server."""
logger.info(f"Fetching IMAP folders from {server} for user {username}")
try:
mail = imaplib.IMAP4_SSL(server)
mail.login(username, password)
status, folders = mail.list()
mail.logout()
if status == "OK":
folder_list = [
folder.decode().split(' "/" ')[1].strip('"') for folder in folders
]
logger.info(f"Found {len(folder_list)} folders")
return folder_list
logger.warning(f"Failed to list IMAP folders, status: {status}")
return []
except Exception as e:
logger.error(f"Error fetching IMAP folders: {e}")
return []

View File

@@ -0,0 +1,51 @@
import logging
from logging.config import dictConfig
"""Logging configuration for the application."""
def setup_logging():
"""Set up the logging configuration for the application."""
log_config = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"default": {
"()": "logging.Formatter",
"fmt": "%(asctime)s - %(name)s - %(levelname)s - %(message)s",
},
},
"handlers": {
"default": {
"formatter": "default",
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout",
},
},
"loggers": {
"app": {
"handlers": ["default"],
"level": "INFO",
"propagate": True,
},
"uvicorn": {
"handlers": ["default"],
"level": "INFO",
"propagate": False,
},
"uvicorn.error": {
"level": "INFO",
},
"uvicorn.access": {
"handlers": ["default"],
"level": "INFO",
"propagate": False,
},
},
}
dictConfig(log_config)
def get_logger(name: str):
"""Return a logger instance with the given name."""
return logging.getLogger(name)

View File

@@ -0,0 +1,54 @@
from apscheduler.schedulers.background import BackgroundScheduler
from app.core.database import SessionLocal
from app.core.logging import get_logger
from app.crud.settings import get_settings
from app.services.email_processor import process_emails
"""Scheduler for background tasks like email processing."""
logger = get_logger(__name__)
def job():
"""Process emails as a scheduled job."""
logger.info("Scheduler job starting: process_emails")
db = SessionLocal()
try:
process_emails(db)
logger.info("Scheduler job finished: process_emails")
except Exception as e:
logger.error(f"Error in scheduled job process_emails: {e}", exc_info=True)
finally:
db.close()
scheduler = BackgroundScheduler()
def start_scheduler_with_interval():
"""Start the scheduler with an interval based on application settings."""
logger.info("Attempting to start scheduler...")
db = SessionLocal()
try:
settings = get_settings(db)
interval = settings.email_check_interval if settings else 15
logger.info(f"Setting scheduler interval to {interval} minutes")
scheduler.add_job(
job,
"interval",
minutes=interval,
id="email_check_job",
replace_existing=True,
)
if not scheduler.running:
# Run the job immediately once
job()
scheduler.start()
logger.info("Scheduler started.")
else:
logger.info("Scheduler is already running.")
except Exception as e:
logger.error(f"Failed to start scheduler: {e}", exc_info=True)
finally:
db.close()