"""Logger view.""" import csv import io import datetime from flask import Blueprint, render_template, request, send_file, jsonify from ..db import db from ..models import ActivityLog, ActivityCategory bp = Blueprint("logger", __name__, url_prefix="/logs") @bp.route("/") def list_logs(): # For the new modern view, we only need to provide initial filter values and categories # The actual data loading will be handled by JavaScript via the API endpoint # Get filter parameters for initial state category = request.args.get("category") start_date = request.args.get("start_date") end_date = request.args.get("end_date") search_term = request.args.get("search_term") if search_term == "None": search_term = None categories = [e.value for e in ActivityCategory] return render_template( "logs.html.jinja", categories=categories, category=category, start_date=start_date, end_date=end_date, search_term=search_term, app_title="PaperScraper", ) @bp.route("/download") def download_logs(): # Filters - reuse logic from list_logs category = request.args.get("category") start_date = request.args.get("start_date") end_date = request.args.get("end_date") search_term = request.args.get("search_term") query = ActivityLog.query if category: query = query.filter(ActivityLog.category == category) if start_date: start_date_dt = datetime.datetime.strptime(start_date, "%Y-%m-%d") query = query.filter(ActivityLog.timestamp >= start_date_dt) if end_date: end_date_dt = datetime.datetime.strptime(end_date, "%Y-%m-%d") + datetime.timedelta(days=1) query = query.filter(ActivityLog.timestamp <= end_date_dt) if search_term: query = query.filter(db.or_( ActivityLog.action.contains(search_term), ActivityLog.description.contains(search_term) )) logs = query.order_by(ActivityLog.timestamp.desc()).all() # Prepare CSV data csv_data = io.StringIO() csv_writer = csv.writer(csv_data) csv_writer.writerow(["Timestamp", "Category", "Action", "Description", "Extra Data"]) # Header for log in logs: csv_writer.writerow([ log.timestamp, log.category, log.action, log.description, log.extra_data # Consider formatting this better ]) # Create response filename = f"logs_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.csv" csv_data.seek(0) output = io.BytesIO(csv_data.getvalue().encode('utf-8')) output.seek(0) return send_file( output, mimetype="text/csv", as_attachment=True, download_name=filename ) @bp.route("//detail") def log_detail(log_id): log = ActivityLog.query.get_or_404(log_id) return render_template("partials/log_detail_modal.html.jinja", log=log) @bp.route("/api") def get_logs_api(): """Unified API endpoint for getting activity logs with filtering and pagination support.""" try: # Pagination parameters page = request.args.get('page', 1, type=int) per_page = request.args.get('per_page', 50, type=int) # Legacy limit parameter for backward compatibility limit = request.args.get('limit', type=int) if limit and not request.args.get('page'): # Legacy mode: use limit without pagination query = ActivityLog.query # Apply filters categories = request.args.getlist('category') if categories: query = query.filter(ActivityLog.category.in_(categories)) status = request.args.get('status') if status: query = query.filter(ActivityLog.status == status) start_date = request.args.get('start_date') if start_date: start_date_dt = datetime.datetime.strptime(start_date, "%Y-%m-%d") query = query.filter(ActivityLog.timestamp >= start_date_dt) end_date = request.args.get('end_date') if end_date: end_date_dt = datetime.datetime.strptime(end_date, "%Y-%m-%d") + datetime.timedelta(days=1) query = query.filter(ActivityLog.timestamp <= end_date_dt) search_term = request.args.get('search_term') if search_term and search_term != "None": query = query.filter(db.or_( ActivityLog.action.contains(search_term), ActivityLog.description.contains(search_term) )) logs = query.order_by(ActivityLog.timestamp.desc()).limit(limit).all() return jsonify({ "success": True, "logs": [{ "id": log.id, "timestamp": log.timestamp.isoformat(), "action": log.action, "status": log.status, "description": log.description, "category": log.category, "paper_id": log.paper_id, "extra_data": log.extra_data } for log in logs] }) # Ensure reasonable per_page limits per_page = min(per_page, 100) # Cap at 100 items per page # Build query with filtering query = ActivityLog.query # Filter by categories if specified categories = request.args.getlist('category') if categories: query = query.filter(ActivityLog.category.in_(categories)) # Filter by status if specified status = request.args.get('status') if status: query = query.filter(ActivityLog.status == status) # Date filters start_date = request.args.get('start_date') if start_date: start_date_dt = datetime.datetime.strptime(start_date, "%Y-%m-%d") query = query.filter(ActivityLog.timestamp >= start_date_dt) end_date = request.args.get('end_date') if end_date: end_date_dt = datetime.datetime.strptime(end_date, "%Y-%m-%d") + datetime.timedelta(days=1) query = query.filter(ActivityLog.timestamp <= end_date_dt) # Search term filter search_term = request.args.get('search_term') if search_term and search_term != "None": query = query.filter(db.or_( ActivityLog.action.contains(search_term), ActivityLog.description.contains(search_term) )) # Order by most recent first and paginate pagination = query.order_by(ActivityLog.timestamp.desc()).paginate( page=page, per_page=per_page, error_out=False ) return jsonify({ "success": True, "logs": [{ "id": log.id, "timestamp": log.timestamp.isoformat(), "action": log.action, "status": log.status, "description": log.description, "category": log.category, "paper_id": log.paper_id, "extra_data": log.extra_data } for log in pagination.items], "pagination": { "page": pagination.page, "pages": pagination.pages, "per_page": pagination.per_page, "total": pagination.total, "has_next": pagination.has_next, "has_prev": pagination.has_prev, "next_num": pagination.next_num if pagination.has_next else None, "prev_num": pagination.prev_num if pagination.has_prev else None } }) except Exception as e: return jsonify({ "success": False, "message": f"Error getting logs: {str(e)}" }), 500