From 012163ba3f96c1cf790caf6de376d7d259c5daf6 Mon Sep 17 00:00:00 2001 From: Michael Beck Date: Fri, 23 May 2025 16:13:25 +0200 Subject: [PATCH] fixes dummy and single paper processing --- scipaperloader/blueprints/api.py | 92 +++- scipaperloader/blueprints/config.py | 36 +- scipaperloader/blueprints/scraper.py | 151 +++++- scipaperloader/scrapers/dummy.py | 43 +- .../templates/config/general.html.jinja | 25 + scipaperloader/templates/scraper.html.jinja | 430 +++++++++++++++++- 6 files changed, 741 insertions(+), 36 deletions(-) diff --git a/scipaperloader/blueprints/api.py b/scipaperloader/blueprints/api.py index e0c3d8f..435793a 100644 --- a/scipaperloader/blueprints/api.py +++ b/scipaperloader/blueprints/api.py @@ -1,6 +1,7 @@ from datetime import datetime from flask import Blueprint, jsonify, request -from ..models import ActivityLog, ActivityCategory +from ..models import ActivityLog, ActivityCategory, PaperMetadata +from .. import db bp = Blueprint("api", __name__, url_prefix="/api") @@ -47,4 +48,91 @@ def get_activity_logs(): } result.append(log_data) - return jsonify(result) \ No newline at end of file + return jsonify(result) + +@bp.route("/papers") +def search_papers(): + """ + Search for papers by title, DOI, or ID. + + Query parameters: + - query: Search term (required) + - limit: Maximum number of results (default: 10) + """ + query = request.args.get('query', '') + limit = int(request.args.get('limit', 10)) + + if not query: + return jsonify({ + "success": False, + "message": "Search query is required", + "papers": [] + }) + + # Try to parse query as an ID first + try: + paper_id = int(query) + paper_by_id = PaperMetadata.query.get(paper_id) + if paper_by_id: + return jsonify({ + "success": True, + "papers": [{ + "id": paper_by_id.id, + "title": paper_by_id.title, + "doi": paper_by_id.doi, + "journal": paper_by_id.journal, + "status": paper_by_id.status, + "created_at": paper_by_id.created_at.isoformat() if paper_by_id.created_at else None, + "updated_at": paper_by_id.updated_at.isoformat() if paper_by_id.updated_at else None + }] + }) + except ValueError: + pass # Not an ID, continue with text search + + # Search in title and DOI + search_term = f"%{query}%" + papers = PaperMetadata.query.filter( + db.or_( + PaperMetadata.title.ilike(search_term), + PaperMetadata.doi.ilike(search_term) + ) + ).limit(limit).all() + + return jsonify({ + "success": True, + "papers": [{ + "id": paper.id, + "title": paper.title, + "doi": paper.doi, + "journal": paper.journal, + "status": paper.status, + "created_at": paper.created_at.isoformat() if paper.created_at else None, + "updated_at": paper.updated_at.isoformat() if paper.updated_at else None + } for paper in papers] + }) + +@bp.route("/papers/") +def get_paper(paper_id): + """Get details of a single paper by ID.""" + paper = PaperMetadata.query.get(paper_id) + + if not paper: + return jsonify({ + "success": False, + "message": f"Paper with ID {paper_id} not found" + }) + + return jsonify({ + "success": True, + "paper": { + "id": paper.id, + "title": paper.title, + "doi": paper.doi, + "journal": paper.journal, + "status": paper.status, + "error_msg": paper.error_msg, + "file_path": paper.file_path, + "created_at": paper.created_at.isoformat() if paper.created_at else None, + "updated_at": paper.updated_at.isoformat() if paper.updated_at else None + } + }) \ No newline at end of file diff --git a/scipaperloader/blueprints/config.py b/scipaperloader/blueprints/config.py index 1a6aa8c..4808cce 100644 --- a/scipaperloader/blueprints/config.py +++ b/scipaperloader/blueprints/config.py @@ -2,7 +2,7 @@ from flask import Blueprint, render_template, redirect, url_for, request, flash, jsonify, current_app from ..db import db # Import the new model -from ..models import VolumeConfig, ScheduleConfig, ActivityLog, DownloadPathConfig +from ..models import VolumeConfig, ScheduleConfig, ActivityLog, DownloadPathConfig, PaperMetadata from ..defaults import MAX_VOLUME import os # Import os for path validation from scipaperloader.scrapers import __path__ as scrapers_path @@ -402,4 +402,36 @@ def api_update_config(): return jsonify({ "success": False, "message": f"Unexpected error: {str(e)}" - }) \ No newline at end of file + }) + + +@bp.route("/delete_all_papers", methods=["POST"]) +def delete_all_papers(): + """Delete all paper records from the database.""" + try: + # Count papers before deletion for logging purposes + paper_count = PaperMetadata.query.count() + + # Delete all records from the PaperMetadata table + PaperMetadata.query.delete() + db.session.commit() + + # Log the action + ActivityLog.log_config_change( + config_key="database", + old_value=f"{paper_count} papers", + new_value="0 papers", + description=f"Deleted all {paper_count} papers from the database" + ) + + flash(f"Successfully deleted all {paper_count} papers from the database.", "success") + except Exception as e: + db.session.rollback() + flash(f"Failed to delete papers: {str(e)}", "error") + ActivityLog.log_error( + error_message=f"Failed to delete all papers: {str(e)}", + exception=e, + source="config.delete_all_papers" + ) + + return redirect(url_for("config.general")) \ No newline at end of file diff --git a/scipaperloader/blueprints/scraper.py b/scipaperloader/blueprints/scraper.py index 7592c01..902a6dc 100644 --- a/scipaperloader/blueprints/scraper.py +++ b/scipaperloader/blueprints/scraper.py @@ -6,13 +6,13 @@ import os # Import os for path joining from datetime import datetime, timedelta from flask import Blueprint, jsonify, render_template, request, current_app, flash # Import the new model -from ..models import VolumeConfig, ActivityLog, PaperMetadata, ActivityCategory, ScheduleConfig, ScraperState, DownloadPathConfig +from ..models import VolumeConfig, ActivityLog, PaperMetadata, ActivityCategory, ScheduleConfig, ScraperState, DownloadPathConfig, ScraperModuleConfig from ..db import db from ..celery import celery from ..defaults import MAX_VOLUME from celery.schedules import crontab from sqlalchemy import func -from scipaperloader.scrapers.factory import get_scraper +from scipaperloader.scrapers.factory import get_scraper, get_available_scrapers bp = Blueprint("scraper", __name__, url_prefix="/scraper") @@ -767,3 +767,150 @@ def process_paper(self, paper_id): "status": result.status, "message": result.message } + + +@celery.task(bind=True) +@celery.task(bind=True) +def process_paper_with_scraper(self, paper_id, scraper_module): + """Process a paper using a specific scraper module.""" + from scipaperloader.models import PaperMetadata + import importlib + from ..scrapers.base import BaseScraper + + paper = PaperMetadata.query.get(paper_id) + if not paper: + return {"status": "error", "message": f"Paper with ID {paper_id} not found"} + + try: + # Import the specified scraper module + module = importlib.import_module(f"scipaperloader.scrapers.{scraper_module}") + cls = getattr(module, "Scraper") + + # Validate that it's a BaseScraper + if not issubclass(cls, BaseScraper): + error_msg = f"Scraper class in module '{scraper_module}' does not inherit from BaseScraper" + ActivityLog.log_error( + error_message=error_msg, + source="process_paper_with_scraper" + ) + return {"status": "error", "message": error_msg} + + # Instantiate and use the scraper + scraper = cls() + result = scraper.scrape(paper.doi) + + return { + "paper_id": paper_id, + "status": result.status, + "message": result.message, + "scraper": scraper_module + } + + except (ImportError, AttributeError) as e: + error_msg = f"Failed to load scraper module '{scraper_module}': {str(e)}" + ActivityLog.log_error( + error_message=error_msg, + source="process_paper_with_scraper" + ) + return {"status": "error", "message": error_msg} + except Exception as e: + error_msg = f"Error processing paper with scraper '{scraper_module}': {str(e)}" + ActivityLog.log_error( + error_message=error_msg, + source="process_paper_with_scraper", + exception=e + ) + return {"status": "error", "message": error_msg} + + +@bp.route("/process_single/", methods=["POST"]) +def process_single_paper(paper_id): + """Process a single paper by ID.""" + try: + # Check if paper exists + paper = PaperMetadata.query.get(paper_id) + if not paper: + return jsonify({ + "success": False, + "message": f"Paper with ID {paper_id} not found" + }) + + # Get the scraper module name from the request + scraper_module = None + if request.is_json and request.json: + scraper_module = request.json.get('scraper_module') + + # Update status to Pending + old_status = paper.status + paper.status = "Pending" + paper.updated_at = datetime.utcnow() + db.session.commit() + + # Log that we're processing this paper + ActivityLog.log_scraper_activity( + action="manual_process_paper", + paper_id=paper_id, + status="pending", + description=f"Manual processing initiated for paper: {paper.title}" + + (f" using {scraper_module} scraper" if scraper_module else "") + ) + + # Start the task (without delay since it's manual) + if scraper_module: + task = process_paper_with_scraper.delay(paper_id, scraper_module) + else: + task = process_paper.delay(paper_id) + + return jsonify({ + "success": True, + "task_id": task.id, + "message": f"Processing paper '{paper.title}' (ID: {paper_id})" + + (f" using {scraper_module} scraper" if scraper_module else "") + + f". Previous status: {old_status}" + }) + + except Exception as e: + db.session.rollback() + ActivityLog.log_error( + error_message=f"Failed to process paper {paper_id}: {str(e)}", + exception=e, + source="process_single_paper" + ) + return jsonify({ + "success": False, + "message": f"Error: {str(e)}" + }) + + +@bp.route("/available_scrapers") +def available_scrapers(): + """Get list of available scraper modules.""" + from scipaperloader.scrapers.factory import get_available_scrapers + from ..models import ScraperModuleConfig + + try: + scrapers = get_available_scrapers() + current_module = ScraperModuleConfig.get_current_module() + + return jsonify({ + "success": True, + "scrapers": [ + { + "name": s["name"], + "description": s["description"], + "is_current": s["name"] == current_module + } for s in scrapers + ], + "current": current_module + }) + + except Exception as e: + ActivityLog.log_error( + error_message=f"Failed to get available scrapers: {str(e)}", + source="available_scrapers" + ) + return jsonify({ + "success": False, + "message": f"Error: {str(e)}", + "scrapers": [] + }) diff --git a/scipaperloader/scrapers/dummy.py b/scipaperloader/scrapers/dummy.py index df60354..6ab671b 100644 --- a/scipaperloader/scrapers/dummy.py +++ b/scipaperloader/scrapers/dummy.py @@ -1,5 +1,6 @@ import time import random +import os from datetime import datetime from .base import BaseScraper, ScrapeResult from flask import current_app @@ -31,15 +32,47 @@ class Scraper(BaseScraper): success = random.random() < 0.8 if success: - # Get download path and simulate file creation + # Get download path and create an actual dummy file download_path = DownloadPathConfig.get_path() file_name = f"{doi.replace('/', '_')}.pdf" file_path = f"{download_path}/{file_name}" - # Update paper status - paper.status = "Done" - paper.file_path = file_path - paper.error_msg = None + # Create directory if it doesn't exist + os.makedirs(download_path, exist_ok=True) + + # Create a simple dummy PDF file + try: + with open(file_path, 'w') as f: + f.write(f"Dummy PDF file for paper with DOI: {doi}\n") + f.write(f"Title: {paper.title}\n") + f.write(f"Journal: {paper.journal}\n") + f.write(f"Generated: {datetime.utcnow().isoformat()}\n") + f.write("\nThis is a dummy file created by the SciPaperLoader dummy scraper.\n") + + # Update paper status + paper.status = "Done" + paper.file_path = file_path + paper.error_msg = None + except Exception as e: + # Handle file creation errors + error_msg = f"Failed to create dummy file: {str(e)}" + paper.status = "Failed" + paper.error_msg = error_msg + + ActivityLog.log_scraper_activity( + action="dummy_scrape_file_error", + status="error", + description=error_msg, + paper_id=paper.id + ) + + return ScrapeResult( + status="error", + message=error_msg, + data={"error_code": "file_creation_error"}, + duration=time.time() - start_time, + timestamp=datetime.utcnow() + ) # Log success ActivityLog.log_scraper_activity( diff --git a/scipaperloader/templates/config/general.html.jinja b/scipaperloader/templates/config/general.html.jinja index a474561..24324ce 100644 --- a/scipaperloader/templates/config/general.html.jinja +++ b/scipaperloader/templates/config/general.html.jinja @@ -90,6 +90,31 @@ + + +
+
+
+
+
Database Management
+
+
+
+
Delete All Papers
+

This action will permanently delete all paper records from the + database. This cannot be undone.

+ +
+ +
+
+
+
+
+
diff --git a/scipaperloader/templates/scraper.html.jinja b/scipaperloader/templates/scraper.html.jinja index 750b15f..388a7ee 100644 --- a/scipaperloader/templates/scraper.html.jinja +++ b/scipaperloader/templates/scraper.html.jinja @@ -36,6 +36,28 @@ max-width: 350px; z-index: 1050; } + + .search-results-container { + max-height: 300px; + overflow-y: auto; + } + + /* Paper status badges */ + .badge-new { + background-color: #17a2b8; + } + + .badge-pending { + background-color: #ffc107; + } + + .badge-done { + background-color: #28a745; + } + + .badge-failed { + background-color: #dc3545; + } {% endblock styles %} @@ -89,6 +111,61 @@ + +
+
+
+
+
Process Single Paper
+
+
+
+
+
+
+ + +
+
+
+
+
+ + +
+ Select which scraper to use for processing the paper +
+
+
+
+ +
+ + + + + + + + + + + + + +
IDTitleDOIStatusActions
+
+ +
+
+
+
+
+
@@ -164,12 +241,19 @@ const resetButton = document.getElementById('resetButton'); const notificationsToggle = document.getElementById('notificationsToggle'); const activityLog = document.getElementById('activityLog'); + const searchForm = document.getElementById('searchPaperForm'); + const searchInput = document.getElementById('paperSearchInput'); + const searchResults = document.getElementById('searchResults'); + const processingStatus = document.getElementById('processingStatus'); + const paperSearchResults = document.getElementById('paperSearchResults'); + const scraperSelect = document.getElementById('scraperSelect'); // Initialize the page document.addEventListener('DOMContentLoaded', function () { initStatusPolling(); loadActivityStats(currentTimeRange); loadRecentActivity(); + loadAvailableScrapers(); // Initialize event listeners startButton.addEventListener('click', startScraper); @@ -177,6 +261,10 @@ stopButton.addEventListener('click', stopScraper); resetButton.addEventListener('click', resetScraper); notificationsToggle.addEventListener('click', toggleNotifications); + searchForm.addEventListener('submit', function (e) { + e.preventDefault(); + searchPapers(); + }); document.getElementById('volumeForm').addEventListener('submit', function (e) { e.preventDefault(); @@ -193,6 +281,185 @@ }); }); + // Load available scraper modules + function loadAvailableScrapers() { + fetch('/scraper/available_scrapers') + .then(response => response.json()) + .then(data => { + if (data.success && data.scrapers && data.scrapers.length > 0) { + // Clear previous options except the default one + while (scraperSelect.options.length > 1) { + scraperSelect.remove(1); + } + + // Add each scraper as an option + data.scrapers.forEach(scraper => { + const option = document.createElement('option'); + option.value = scraper.name; + option.textContent = `${scraper.name} - ${scraper.description.substring(0, 50)}${scraper.description.length > 50 ? '...' : ''}`; + if (scraper.is_current) { + option.textContent += ' (system default)'; + } + scraperSelect.appendChild(option); + }); + } else { + // If no scrapers or error, add a note + const option = document.createElement('option'); + option.disabled = true; + option.textContent = 'No scrapers available'; + scraperSelect.appendChild(option); + } + }) + .catch(error => { + console.error('Error loading scrapers:', error); + const option = document.createElement('option'); + option.disabled = true; + option.textContent = 'Error loading scrapers'; + scraperSelect.appendChild(option); + }); + } + + // Search papers function + function searchPapers() { + const query = searchInput.value.trim(); + + if (!query) { + showFlashMessage('Please enter a search term', 'warning'); + return; + } + + // Show loading message + paperSearchResults.innerHTML = 'Searching papers...'; + searchResults.classList.remove('d-none'); + + // Fetch papers from API + fetch(`/api/papers?query=${encodeURIComponent(query)}`) + .then(response => response.json()) + .then(data => { + if (!data.papers || data.papers.length === 0) { + paperSearchResults.innerHTML = 'No papers found matching your search'; + return; + } + + paperSearchResults.innerHTML = ''; + + data.papers.forEach(paper => { + const row = document.createElement('tr'); + + // Create status badge + let statusBadge = ''; + if (paper.status === 'New') { + statusBadge = 'New'; + } else if (paper.status === 'Pending') { + statusBadge = 'Pending'; + } else if (paper.status === 'Done') { + statusBadge = 'Done'; + } else if (paper.status === 'Failed') { + statusBadge = 'Failed'; + } else { + statusBadge = `${paper.status}`; + } + + // Create process button (enabled only for papers not in 'Pending' status) + const processButtonDisabled = paper.status === 'Pending' ? 'disabled' : ''; + + // Truncate title if too long + const truncatedTitle = paper.title.length > 70 ? paper.title.substring(0, 70) + '...' : paper.title; + + row.innerHTML = ` + ${paper.id} + ${truncatedTitle} + ${paper.doi || 'N/A'} + ${statusBadge} + + + + `; + + paperSearchResults.appendChild(row); + }); + + // Add event listeners to the process buttons + document.querySelectorAll('.process-paper-btn').forEach(btn => { + btn.addEventListener('click', function () { + processSinglePaper(this.getAttribute('data-paper-id')); + }); + }); + }) + .catch(error => { + console.error('Error searching papers:', error); + paperSearchResults.innerHTML = 'Error searching papers'; + }); + } + + // Process a single paper + function processSinglePaper(paperId) { + // Disable all process buttons to prevent multiple clicks + document.querySelectorAll('.process-paper-btn').forEach(btn => { + btn.disabled = true; + }); + + // Show processing status + processingStatus.textContent = 'Processing paper...'; + processingStatus.classList.remove('d-none'); + + // Get selected scraper + const selectedScraper = scraperSelect.value; + + // Send request to process the paper + fetch(`/scraper/process_single/${paperId}`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + scraper_module: selectedScraper + }) + }) + .then(response => response.json()) + .then(data => { + if (data.success) { + processingStatus.textContent = data.message; + processingStatus.className = 'alert alert-success mt-3'; + + // Update status in the search results + const row = document.querySelector(`.process-paper-btn[data-paper-id="${paperId}"]`).closest('tr'); + const statusCell = row.querySelector('td:nth-child(4)'); + statusCell.innerHTML = 'Pending'; + + // Show notification + showFlashMessage(data.message, 'success'); + + // Set up polling to check paper status and refresh activity + pollPaperStatus(paperId, 3000, 20); + } else { + processingStatus.textContent = data.message; + processingStatus.className = 'alert alert-danger mt-3'; + showFlashMessage(data.message, 'error'); + } + }) + .catch(error => { + console.error('Error processing paper:', error); + processingStatus.textContent = 'Error: Could not process paper'; + processingStatus.className = 'alert alert-danger mt-3'; + showFlashMessage('Error processing paper', 'error'); + }) + .finally(() => { + // Re-enable the process buttons after a short delay + setTimeout(() => { + document.querySelectorAll('.process-paper-btn').forEach(btn => { + if (btn.getAttribute('data-paper-id') !== paperId) { + btn.disabled = false; + } + }); + }, 1000); + }); + } + // Status polling function initStatusPolling() { updateStatus(); @@ -285,39 +552,39 @@ if (confirm("Are you sure you want to reset the scraper? This will stop all current tasks, optionally clear non-pending papers, and restart the scraper.")) { // Disable button to prevent multiple clicks resetButton.disabled = true; - + // Show a loading message showFlashMessage('Resetting scraper, please wait...', 'info'); - - fetch('/scraper/reset', { + + fetch('/scraper/reset', { method: 'POST', headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ + body: JSON.stringify({ clear_papers: true // You could make this configurable with a checkbox }) }) - .then(response => response.json()) - .then(data => { - if (data.success) { - showFlashMessage('Scraper has been completely reset and restarted', 'success'); - // Update everything - updateStatus(); - loadActivityStats(currentTimeRange); - setTimeout(() => { loadRecentActivity(); }, 1000); - } else { - showFlashMessage(data.message || 'Error resetting scraper', 'error'); - } - // Re-enable button - resetButton.disabled = false; - }) - .catch(error => { - console.error("Error resetting scraper:", error); - showFlashMessage('Error resetting scraper: ' + error.message, 'error'); - // Re-enable button - resetButton.disabled = false; - }); + .then(response => response.json()) + .then(data => { + if (data.success) { + showFlashMessage('Scraper has been completely reset and restarted', 'success'); + // Update everything + updateStatus(); + loadActivityStats(currentTimeRange); + setTimeout(() => { loadRecentActivity(); }, 1000); + } else { + showFlashMessage(data.message || 'Error resetting scraper', 'error'); + } + // Re-enable button + resetButton.disabled = false; + }) + .catch(error => { + console.error("Error resetting scraper:", error); + showFlashMessage('Error resetting scraper: ' + error.message, 'error'); + // Re-enable button + resetButton.disabled = false; + }); } } @@ -345,6 +612,97 @@ notificationsEnabled = notificationsToggle.checked; } + // Poll paper status until it changes from Pending + function pollPaperStatus(paperId, interval = 3000, maxAttempts = 20) { + let attempts = 0; + + // Immediately refresh activity log to show the initial pending status + loadRecentActivity(); + + const checkStatus = () => { + attempts++; + console.log(`Checking status of paper ${paperId}, attempt ${attempts}/${maxAttempts}`); + + // Fetch the current paper status + fetch(`/api/papers/${paperId}`) + .then(response => response.json()) + .then(data => { + if (data && data.paper) { + const paper = data.paper; + console.log(`Paper status: ${paper.status}`); + + // Update the UI with the current status + const row = document.querySelector(`.process-paper-btn[data-paper-id="${paperId}"]`).closest('tr'); + if (row) { + const statusCell = row.querySelector('td:nth-child(4)'); + let statusBadge = ''; + + if (paper.status === 'New') { + statusBadge = 'New'; + } else if (paper.status === 'Pending') { + statusBadge = 'Pending'; + } else if (paper.status === 'Done') { + statusBadge = 'Done'; + } else if (paper.status === 'Failed') { + statusBadge = 'Failed'; + } else { + statusBadge = `${paper.status}`; + } + + statusCell.innerHTML = statusBadge; + + // Update processing status message if status changed + if (paper.status !== 'Pending') { + if (paper.status === 'Done') { + processingStatus.textContent = `Paper processed successfully: ${paper.title}`; + processingStatus.className = 'alert alert-success mt-3'; + } else if (paper.status === 'Failed') { + processingStatus.textContent = `Paper processing failed: ${paper.error_msg || 'Unknown error'}`; + processingStatus.className = 'alert alert-danger mt-3'; + } + } + } + + // Always refresh activity log + loadRecentActivity(); + + // If status is still pending and we haven't reached max attempts, check again + if (paper.status === 'Pending' && attempts < maxAttempts) { + setTimeout(checkStatus, interval); + } else { + // If status changed or we reached max attempts, refresh chart data too + loadActivityStats(currentTimeRange); + + // Show notification if status changed + if (paper.status !== 'Pending') { + const status = paper.status === 'Done' ? 'success' : 'error'; + const message = paper.status === 'Done' + ? `Paper processed successfully: ${paper.title}` + : `Paper processing failed: ${paper.error_msg || 'Unknown error'}`; + showFlashMessage(message, status); + } + + // If we hit max attempts but status is still pending, show a message + if (paper.status === 'Pending' && attempts >= maxAttempts) { + processingStatus.textContent = 'Paper is still being processed. Check the activity log for updates.'; + processingStatus.className = 'alert alert-info mt-3'; + } + } + } + }) + .catch(error => { + console.error(`Error polling paper status: ${error}`); + // If there's an error, we can still try again if under max attempts + if (attempts < maxAttempts) { + setTimeout(checkStatus, interval); + } + }); + }; + + // Start checking + setTimeout(checkStatus, interval); + } + // Load data functions function loadActivityStats(hours) { fetch(`/scraper/stats?hours=${hours}`) @@ -359,8 +717,10 @@ .then(response => response.json()) .then(data => { renderActivityLog(data); + console.log("Activity log refreshed with latest data"); }) - .catch(() => { + .catch((error) => { + console.error("Failed to load activity logs:", error); // If the API endpoint doesn't exist, just show a message activityLog.innerHTML = 'Activity log API not available'; }); @@ -467,6 +827,26 @@ }); } + // Flash message function + function showFlashMessage(message, type) { + const flashContainer = document.createElement('div'); + flashContainer.className = `alert alert-${type === 'error' ? 'danger' : type} alert-dismissible fade show notification`; + flashContainer.innerHTML = ` + ${message} + + `; + + document.body.appendChild(flashContainer); + + // Auto dismiss after 5 seconds + setTimeout(() => { + flashContainer.classList.remove('show'); + setTimeout(() => { + flashContainer.remove(); + }, 150); // Remove after fade out animation + }, 5000); + } + // WebSocket for real-time notifications function setupWebSocket() { // If WebSocket is available, implement it here