- Created blueprints/it_audit/ with 5 routes: - /it-audit/form (it_audit_form) - /it-audit/save (it_audit_save) - /api/it-audit/matches/<company_id> - /api/it-audit/history/<company_id> - /api/it-audit/export - Added endpoint aliases for backward compatibility - Removed ~600 lines from app.py (8750 -> 8150) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
8151 lines
300 KiB
Python
8151 lines
300 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Norda Biznes Partner - Flask Application
|
|
====================================
|
|
|
|
Main Flask application for Norda Biznes company directory with AI chat.
|
|
|
|
Features:
|
|
- User authentication with email confirmation
|
|
- Company directory with advanced search
|
|
- AI chat assistant powered by Google Gemini
|
|
- PostgreSQL database integration
|
|
- Analytics dashboard for chat insights
|
|
|
|
Author: Norda Biznes Development Team
|
|
Created: 2025-11-23
|
|
"""
|
|
|
|
import os
|
|
import logging
|
|
import secrets
|
|
import re
|
|
import json
|
|
import time
|
|
from collections import deque
|
|
from pathlib import Path
|
|
from datetime import datetime, timedelta, date
|
|
from flask import Flask, render_template, request, jsonify, redirect, url_for, flash, session, Response, send_file
|
|
from flask_login import login_user, logout_user, login_required, current_user
|
|
# Note: CSRFProtect, Limiter, LoginManager are imported from extensions.py (line ~250)
|
|
from werkzeug.security import generate_password_hash, check_password_hash
|
|
from dotenv import load_dotenv
|
|
from user_agents import parse as parse_user_agent
|
|
import uuid
|
|
import traceback as tb_module
|
|
|
|
# Load environment variables (override any existing env vars)
|
|
# Try .env first, then nordabiz_config.txt for production flexibility
|
|
import os
|
|
if os.path.exists('.env'):
|
|
load_dotenv('.env', override=True)
|
|
elif os.path.exists('nordabiz_config.txt'):
|
|
load_dotenv('nordabiz_config.txt', override=True)
|
|
else:
|
|
load_dotenv(override=True)
|
|
|
|
# ============================================================
|
|
# GLOBAL CONSTANTS - MARKETING
|
|
# ============================================================
|
|
# Liczba podmiotów gospodarczych (cel marketingowy Izby NORDA)
|
|
# Używana we wszystkich miejscach wyświetlających liczbę firm
|
|
COMPANY_COUNT_MARKETING = 150
|
|
|
|
# Configure logging with in-memory buffer for debug panel
|
|
class DebugLogHandler(logging.Handler):
|
|
"""Custom handler that stores logs in memory for real-time viewing"""
|
|
def __init__(self, max_logs=500):
|
|
super().__init__()
|
|
self.logs = deque(maxlen=max_logs)
|
|
|
|
def emit(self, record):
|
|
log_entry = {
|
|
'timestamp': datetime.now().isoformat(),
|
|
'level': record.levelname,
|
|
'logger': record.name,
|
|
'message': self.format(record),
|
|
'module': record.module,
|
|
'funcName': record.funcName,
|
|
'lineno': record.lineno
|
|
}
|
|
self.logs.append(log_entry)
|
|
|
|
# Create debug handler
|
|
debug_handler = DebugLogHandler(max_logs=500)
|
|
debug_handler.setFormatter(logging.Formatter('%(message)s'))
|
|
|
|
logging.basicConfig(
|
|
level=logging.INFO,
|
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
|
)
|
|
|
|
# Add debug handler to root logger
|
|
logging.getLogger().addHandler(debug_handler)
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
# Security logger for fail2ban integration
|
|
# Logs to /var/log/nordabiznes/security.log in production
|
|
security_logger = logging.getLogger('security')
|
|
security_logger.setLevel(logging.WARNING)
|
|
_security_log_path = '/var/log/nordabiznes/security.log'
|
|
if os.path.exists('/var/log/nordabiznes'):
|
|
_security_handler = logging.FileHandler(_security_log_path)
|
|
_security_handler.setFormatter(logging.Formatter(
|
|
'%(asctime)s [%(levelname)s] %(message)s',
|
|
datefmt='%Y-%m-%d %H:%M:%S'
|
|
))
|
|
security_logger.addHandler(_security_handler)
|
|
|
|
# Import database models
|
|
from database import (
|
|
init_db,
|
|
SessionLocal,
|
|
User,
|
|
Company,
|
|
Category,
|
|
Service,
|
|
Competency,
|
|
CompanyDigitalMaturity,
|
|
CompanyWebsiteAnalysis,
|
|
CompanyQualityTracking,
|
|
CompanyWebsiteContent,
|
|
CompanyAIInsights,
|
|
CompanyEvent,
|
|
CompanySocialMedia,
|
|
CompanyContact,
|
|
AIChatConversation,
|
|
AIChatMessage,
|
|
AIChatFeedback,
|
|
AIAPICostLog,
|
|
ForumTopic,
|
|
ForumReply,
|
|
ForumAttachment,
|
|
NordaEvent,
|
|
EventAttendee,
|
|
PrivateMessage,
|
|
Classified,
|
|
UserNotification,
|
|
CompanyRecommendation,
|
|
MembershipFee,
|
|
MembershipFeeConfig,
|
|
Person,
|
|
CompanyPerson,
|
|
GBPAudit,
|
|
ITAudit,
|
|
KRSAudit,
|
|
CompanyPKD,
|
|
CompanyFinancialReport,
|
|
UserSession,
|
|
UserBlock,
|
|
PageView,
|
|
UserClick,
|
|
AnalyticsDaily,
|
|
PopularPagesDaily,
|
|
SearchQuery,
|
|
ConversionEvent,
|
|
JSError,
|
|
PopularSearchesDaily,
|
|
HourlyActivity,
|
|
AuditLog,
|
|
SecurityAlert,
|
|
ZOPKNews
|
|
)
|
|
|
|
# Import services
|
|
import gemini_service
|
|
from nordabiz_chat import NordaBizChatEngine
|
|
from search_service import search_companies
|
|
import krs_api_service
|
|
from file_upload_service import FileUploadService
|
|
|
|
# Security service for audit log, alerting, GeoIP, 2FA
|
|
try:
|
|
from security_service import (
|
|
log_audit, create_security_alert, get_client_ip,
|
|
is_ip_allowed, geoip_check, init_security_service,
|
|
generate_totp_secret, get_totp_uri, verify_totp,
|
|
generate_backup_codes, verify_backup_code, requires_2fa
|
|
)
|
|
SECURITY_SERVICE_AVAILABLE = True
|
|
except ImportError as e:
|
|
SECURITY_SERVICE_AVAILABLE = False
|
|
logger.warning(f"Security service not available: {e}")
|
|
|
|
# News service for fetching company news
|
|
try:
|
|
from news_service import NewsService, get_news_service, init_news_service
|
|
NEWS_SERVICE_AVAILABLE = True
|
|
except ImportError:
|
|
NEWS_SERVICE_AVAILABLE = False
|
|
logger.warning("News service not available")
|
|
|
|
# SEO audit components for triggering audits via API
|
|
import sys
|
|
_scripts_path = os.path.join(os.path.dirname(__file__), 'scripts')
|
|
if _scripts_path not in sys.path:
|
|
sys.path.insert(0, _scripts_path)
|
|
|
|
try:
|
|
from seo_audit import SEOAuditor, SEO_AUDIT_VERSION
|
|
SEO_AUDIT_AVAILABLE = True
|
|
except ImportError as e:
|
|
SEO_AUDIT_AVAILABLE = False
|
|
logger.warning(f"SEO audit service not available: {e}")
|
|
|
|
# GBP (Google Business Profile) audit service
|
|
try:
|
|
from gbp_audit_service import (
|
|
GBPAuditService,
|
|
audit_company as gbp_audit_company,
|
|
get_company_audit as gbp_get_company_audit,
|
|
fetch_google_business_data as gbp_fetch_google_data
|
|
)
|
|
GBP_AUDIT_AVAILABLE = True
|
|
GBP_AUDIT_VERSION = '1.0'
|
|
except ImportError as e:
|
|
GBP_AUDIT_AVAILABLE = False
|
|
GBP_AUDIT_VERSION = None
|
|
logger.warning(f"GBP audit service not available: {e}")
|
|
|
|
# KRS (Krajowy Rejestr Sądowy) audit service
|
|
try:
|
|
from krs_audit_service import parse_krs_pdf, parse_krs_pdf_full
|
|
KRS_AUDIT_AVAILABLE = True
|
|
KRS_AUDIT_VERSION = '1.0'
|
|
except ImportError as e:
|
|
KRS_AUDIT_AVAILABLE = False
|
|
KRS_AUDIT_VERSION = None
|
|
logger.warning(f"KRS audit service not available: {e}")
|
|
|
|
# Initialize Flask app
|
|
app = Flask(__name__)
|
|
|
|
# Security: Require strong SECRET_KEY (no default value allowed)
|
|
SECRET_KEY = os.getenv('SECRET_KEY')
|
|
if not SECRET_KEY or len(SECRET_KEY) < 32:
|
|
raise ValueError("SECRET_KEY must be set in environment variables and be at least 32 characters long")
|
|
app.config['SECRET_KEY'] = SECRET_KEY
|
|
|
|
app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(days=7)
|
|
|
|
# Security configurations
|
|
app.config['WTF_CSRF_ENABLED'] = True
|
|
app.config['WTF_CSRF_TIME_LIMIT'] = None # No time limit for CSRF tokens
|
|
app.config['SESSION_COOKIE_SECURE'] = os.getenv('FLASK_ENV') != 'development' # HTTPS only in production
|
|
app.config['SESSION_COOKIE_HTTPONLY'] = True
|
|
app.config['SESSION_COOKIE_SAMESITE'] = 'Lax'
|
|
|
|
# Template filters
|
|
@app.template_filter('ensure_url')
|
|
def ensure_url_filter(url):
|
|
"""Ensure URL has http:// or https:// scheme"""
|
|
if url and not url.startswith(('http://', 'https://')):
|
|
return f'https://{url}'
|
|
return url
|
|
|
|
# Initialize extensions from centralized extensions.py
|
|
from extensions import csrf, limiter, login_manager
|
|
|
|
csrf.init_app(app)
|
|
|
|
# Initialize rate limiter with Redis storage (persistent across restarts)
|
|
# Falls back to memory if Redis unavailable
|
|
_redis_available = False
|
|
try:
|
|
import redis
|
|
_redis_client = redis.Redis(host='localhost', port=6379, db=0)
|
|
_redis_client.ping()
|
|
_redis_available = True
|
|
logger.info("Rate limiter using Redis storage")
|
|
except Exception:
|
|
logger.warning("Redis unavailable, rate limiter using memory storage")
|
|
|
|
# Note: default_limits are set in extensions.py
|
|
# Here we only configure storage
|
|
if _redis_available:
|
|
limiter._storage_uri = "redis://localhost:6379/0"
|
|
else:
|
|
limiter._storage_uri = "memory://"
|
|
limiter.init_app(app)
|
|
|
|
@limiter.request_filter
|
|
def is_admin_exempt():
|
|
"""Exempt logged-in admins from rate limiting."""
|
|
from flask_login import current_user
|
|
try:
|
|
return current_user.is_authenticated and current_user.is_admin
|
|
except Exception:
|
|
return False
|
|
|
|
# Initialize database
|
|
init_db()
|
|
|
|
# Initialize Login Manager (imported from extensions.py)
|
|
login_manager.init_app(app)
|
|
login_manager.login_view = 'login' # Will change to 'auth.login' after full migration
|
|
login_manager.login_message = 'Zaloguj się, aby uzyskać dostęp do tej strony.'
|
|
|
|
# Initialize Gemini service
|
|
try:
|
|
gemini_service.init_gemini_service(model='3-flash') # Gemini 3 Flash Preview - najnowszy model, 7x lepszy reasoning
|
|
logger.info("Gemini service initialized successfully")
|
|
except Exception as e:
|
|
logger.error(f"Failed to initialize Gemini service: {e}")
|
|
|
|
# Register blueprints (Phase 1: reports, community)
|
|
from blueprints import register_blueprints
|
|
register_blueprints(app)
|
|
logger.info("Blueprints registered")
|
|
|
|
@login_manager.user_loader
|
|
def load_user(user_id):
|
|
"""Load user from database"""
|
|
db = SessionLocal()
|
|
try:
|
|
return db.query(User).filter_by(id=int(user_id)).first()
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# ============================================================
|
|
# TEMPLATE CONTEXT PROCESSORS
|
|
# ============================================================
|
|
|
|
@app.context_processor
|
|
def inject_globals():
|
|
"""Inject global variables into all templates"""
|
|
return {
|
|
'current_year': datetime.now().year,
|
|
'now': datetime.now(), # Must be value, not method - templates use now.strftime()
|
|
'COMPANY_COUNT': COMPANY_COUNT_MARKETING, # Liczba podmiotów (cel marketingowy)
|
|
}
|
|
|
|
|
|
@app.context_processor
|
|
def inject_notifications():
|
|
"""Inject unread notifications count into all templates"""
|
|
if current_user.is_authenticated:
|
|
db = SessionLocal()
|
|
try:
|
|
unread_count = db.query(UserNotification).filter(
|
|
UserNotification.user_id == current_user.id,
|
|
UserNotification.is_read == False
|
|
).count()
|
|
return {'unread_notifications_count': unread_count}
|
|
finally:
|
|
db.close()
|
|
return {'unread_notifications_count': 0}
|
|
|
|
|
|
# ============================================================
|
|
# NOTIFICATION HELPERS
|
|
# ============================================================
|
|
|
|
def create_notification(user_id, title, message, notification_type='info',
|
|
related_type=None, related_id=None, action_url=None):
|
|
"""
|
|
Create a notification for a user.
|
|
|
|
Args:
|
|
user_id: ID of the user to notify
|
|
title: Notification title
|
|
message: Notification message/body
|
|
notification_type: Type of notification (news, system, message, event, alert)
|
|
related_type: Type of related entity (company_news, event, message, etc.)
|
|
related_id: ID of the related entity
|
|
action_url: URL to navigate when notification is clicked
|
|
|
|
Returns:
|
|
UserNotification object or None on error
|
|
"""
|
|
db = SessionLocal()
|
|
try:
|
|
notification = UserNotification(
|
|
user_id=user_id,
|
|
title=title,
|
|
message=message,
|
|
notification_type=notification_type,
|
|
related_type=related_type,
|
|
related_id=related_id,
|
|
action_url=action_url
|
|
)
|
|
db.add(notification)
|
|
db.commit()
|
|
db.refresh(notification)
|
|
logger.info(f"Created notification for user {user_id}: {title}")
|
|
return notification
|
|
except Exception as e:
|
|
logger.error(f"Error creating notification: {e}")
|
|
db.rollback()
|
|
return None
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
def create_news_notification(company_id, news_id, news_title):
|
|
"""
|
|
Create notification for company owner when their news is approved.
|
|
|
|
Args:
|
|
company_id: ID of the company
|
|
news_id: ID of the approved news
|
|
news_title: Title of the news
|
|
"""
|
|
db = SessionLocal()
|
|
try:
|
|
# Find users associated with this company
|
|
users = db.query(User).filter(
|
|
User.company_id == company_id,
|
|
User.is_active == True
|
|
).all()
|
|
|
|
for user in users:
|
|
create_notification(
|
|
user_id=user.id,
|
|
title="Nowa aktualnosc o Twojej firmie",
|
|
message=f"Aktualnosc '{news_title}' zostala zatwierdzona i jest widoczna na profilu firmy.",
|
|
notification_type='news',
|
|
related_type='company_news',
|
|
related_id=news_id,
|
|
action_url=f"/company/{company_id}"
|
|
)
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# ============================================================
|
|
# USER ANALYTICS - TRACKING HELPERS
|
|
# ============================================================
|
|
|
|
# Global variable to store current page_view_id for templates
|
|
_current_page_view_id = {}
|
|
|
|
def get_or_create_analytics_session():
|
|
"""
|
|
Get existing analytics session or create new one.
|
|
Returns the database session ID (integer).
|
|
Includes GeoIP lookup and UTM parameter parsing.
|
|
"""
|
|
analytics_session_id = session.get('analytics_session_id')
|
|
|
|
if not analytics_session_id:
|
|
analytics_session_id = str(uuid.uuid4())
|
|
session['analytics_session_id'] = analytics_session_id
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
user_session = db.query(UserSession).filter_by(session_id=analytics_session_id).first()
|
|
|
|
if not user_session:
|
|
# Parse user agent
|
|
ua_string = request.headers.get('User-Agent', '')
|
|
try:
|
|
ua = parse_user_agent(ua_string)
|
|
device_type = 'mobile' if ua.is_mobile else ('tablet' if ua.is_tablet else 'desktop')
|
|
browser = ua.browser.family
|
|
browser_version = ua.browser.version_string
|
|
os_name = ua.os.family
|
|
os_version = ua.os.version_string
|
|
except Exception:
|
|
device_type = 'desktop'
|
|
browser = 'Unknown'
|
|
browser_version = ''
|
|
os_name = 'Unknown'
|
|
os_version = ''
|
|
|
|
# GeoIP lookup
|
|
country, city, region = None, None, None
|
|
ip_address = request.headers.get('X-Forwarded-For', request.remote_addr)
|
|
if ip_address:
|
|
ip_address = ip_address.split(',')[0].strip()
|
|
try:
|
|
from security_service import get_geoip_info
|
|
geo_info = get_geoip_info(ip_address)
|
|
if geo_info:
|
|
country = geo_info.get('country')
|
|
city = geo_info.get('city')
|
|
region = geo_info.get('region')
|
|
except Exception as e:
|
|
logger.debug(f"GeoIP lookup failed for {ip_address}: {e}")
|
|
|
|
# UTM parameters (z pierwszego requestu sesji)
|
|
utm_source = request.args.get('utm_source', '')[:255] or None
|
|
utm_medium = request.args.get('utm_medium', '')[:255] or None
|
|
utm_campaign = request.args.get('utm_campaign', '')[:255] or None
|
|
utm_term = request.args.get('utm_term', '')[:255] or None
|
|
utm_content = request.args.get('utm_content', '')[:255] or None
|
|
|
|
user_session = UserSession(
|
|
session_id=analytics_session_id,
|
|
user_id=current_user.id if current_user.is_authenticated else None,
|
|
ip_address=ip_address,
|
|
user_agent=ua_string[:2000] if ua_string else None,
|
|
device_type=device_type,
|
|
browser=browser[:50] if browser else None,
|
|
browser_version=browser_version[:20] if browser_version else None,
|
|
os=os_name[:50] if os_name else None,
|
|
os_version=os_version[:20] if os_version else None,
|
|
# GeoIP
|
|
country=country,
|
|
city=city,
|
|
region=region,
|
|
# UTM
|
|
utm_source=utm_source,
|
|
utm_medium=utm_medium,
|
|
utm_campaign=utm_campaign,
|
|
utm_term=utm_term,
|
|
utm_content=utm_content
|
|
)
|
|
db.add(user_session)
|
|
db.commit()
|
|
db.refresh(user_session)
|
|
else:
|
|
# Update last activity AND duration
|
|
user_session.last_activity_at = datetime.now()
|
|
user_session.duration_seconds = int(
|
|
(datetime.now() - user_session.started_at).total_seconds()
|
|
)
|
|
if current_user.is_authenticated and not user_session.user_id:
|
|
user_session.user_id = current_user.id
|
|
db.commit()
|
|
|
|
return user_session.id
|
|
except Exception as e:
|
|
logger.error(f"Analytics session error: {e}")
|
|
db.rollback()
|
|
return None
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
def track_conversion(event_type: str, company_id: int = None, target_type: str = None,
|
|
target_value: str = None, metadata: dict = None):
|
|
"""
|
|
Track conversion event.
|
|
|
|
Args:
|
|
event_type: Type of conversion (register, login, contact_click, rsvp, message, classified)
|
|
company_id: Related company ID (for contact_click)
|
|
target_type: What was clicked (email, phone, website)
|
|
target_value: The value (email address, phone number, etc.)
|
|
metadata: Additional data as dict
|
|
"""
|
|
try:
|
|
analytics_session_id = session.get('analytics_session_id')
|
|
session_db_id = None
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
if analytics_session_id:
|
|
user_session = db.query(UserSession).filter_by(session_id=analytics_session_id).first()
|
|
if user_session:
|
|
session_db_id = user_session.id
|
|
|
|
# Określ kategorię konwersji
|
|
category_map = {
|
|
'register': 'acquisition',
|
|
'login': 'activation',
|
|
'contact_click': 'engagement',
|
|
'rsvp': 'engagement',
|
|
'message': 'engagement',
|
|
'classified': 'engagement'
|
|
}
|
|
|
|
conversion = ConversionEvent(
|
|
session_id=session_db_id,
|
|
user_id=current_user.id if current_user.is_authenticated else None,
|
|
event_type=event_type,
|
|
event_category=category_map.get(event_type, 'other'),
|
|
company_id=company_id,
|
|
target_type=target_type,
|
|
target_value=target_value[:500] if target_value else None,
|
|
source_page=request.url[:500] if request.url else None,
|
|
referrer=request.referrer[:500] if request.referrer else None,
|
|
event_metadata=metadata
|
|
)
|
|
db.add(conversion)
|
|
db.commit()
|
|
logger.info(f"Conversion tracked: {event_type} company={company_id} target={target_type}")
|
|
except Exception as e:
|
|
logger.error(f"Conversion tracking error: {e}")
|
|
db.rollback()
|
|
finally:
|
|
db.close()
|
|
except Exception as e:
|
|
logger.error(f"Conversion tracking outer error: {e}")
|
|
|
|
|
|
@app.before_request
|
|
def check_geoip():
|
|
"""Block requests from high-risk countries (RU, CN, KP, IR, BY, SY, VE, CU)."""
|
|
# Skip static files and health checks
|
|
if request.path.startswith('/static') or request.path == '/health':
|
|
return
|
|
|
|
if not is_ip_allowed():
|
|
ip = request.headers.get('X-Forwarded-For', request.remote_addr)
|
|
if ip:
|
|
ip = ip.split(',')[0].strip()
|
|
from security_service import get_country_code
|
|
country = get_country_code(ip)
|
|
logger.warning(f"GEOIP_BLOCKED ip={ip} country={country} path={request.path}")
|
|
|
|
# Create alert for blocked access
|
|
try:
|
|
db = SessionLocal()
|
|
from security_service import create_security_alert
|
|
create_security_alert(
|
|
db, 'geo_blocked', 'low',
|
|
ip_address=ip,
|
|
details={'country': country, 'path': request.path, 'user_agent': request.user_agent.string[:200]}
|
|
)
|
|
db.commit()
|
|
db.close()
|
|
except Exception as e:
|
|
logger.error(f"Failed to create geo block alert: {e}")
|
|
|
|
abort(403)
|
|
|
|
|
|
@app.before_request
|
|
def track_page_view():
|
|
"""Track page views (excluding static files and API calls)"""
|
|
# Skip static files
|
|
if request.path.startswith('/static'):
|
|
return
|
|
|
|
# Skip API calls except selected ones
|
|
if request.path.startswith('/api'):
|
|
return
|
|
|
|
# Skip analytics tracking endpoints
|
|
if request.path in ['/api/analytics/track', '/api/analytics/heartbeat']:
|
|
return
|
|
|
|
# Skip health checks
|
|
if request.path == '/health':
|
|
return
|
|
|
|
# Skip favicon
|
|
if request.path == '/favicon.ico':
|
|
return
|
|
|
|
try:
|
|
session_db_id = get_or_create_analytics_session()
|
|
if not session_db_id:
|
|
return
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
page_view = PageView(
|
|
session_id=session_db_id,
|
|
user_id=current_user.id if current_user.is_authenticated else None,
|
|
url=request.url[:2000] if request.url else '',
|
|
path=request.path[:500] if request.path else '/',
|
|
referrer=request.referrer[:2000] if request.referrer else None
|
|
)
|
|
|
|
# Extract company_id from path if on company page
|
|
if request.path.startswith('/company/'):
|
|
try:
|
|
slug = request.path.split('/')[2].split('?')[0]
|
|
company = db.query(Company).filter_by(slug=slug).first()
|
|
if company:
|
|
page_view.company_id = company.id
|
|
except Exception:
|
|
pass
|
|
|
|
db.add(page_view)
|
|
|
|
# Update session page count
|
|
user_session = db.query(UserSession).filter_by(id=session_db_id).first()
|
|
if user_session:
|
|
user_session.page_views_count = (user_session.page_views_count or 0) + 1
|
|
|
|
db.commit()
|
|
|
|
# Store page_view_id for click tracking (in request context)
|
|
_current_page_view_id[id(request)] = page_view.id
|
|
|
|
except Exception as e:
|
|
logger.error(f"Page view tracking error: {e}")
|
|
db.rollback()
|
|
finally:
|
|
db.close()
|
|
except Exception as e:
|
|
logger.error(f"Page view tracking outer error: {e}")
|
|
|
|
|
|
@app.context_processor
|
|
def inject_page_view_id():
|
|
"""Inject page_view_id into all templates for JS tracking"""
|
|
page_view_id = _current_page_view_id.get(id(request), '')
|
|
return {'page_view_id': page_view_id}
|
|
|
|
|
|
@app.teardown_request
|
|
def cleanup_page_view_id(exception=None):
|
|
"""Clean up page_view_id from global dict after request"""
|
|
_current_page_view_id.pop(id(request), None)
|
|
|
|
|
|
# ============================================================
|
|
# SECURITY MIDDLEWARE & HELPERS
|
|
# ============================================================
|
|
|
|
@app.after_request
|
|
def set_security_headers(response):
|
|
"""Add security headers to all responses"""
|
|
response.headers['X-Content-Type-Options'] = 'nosniff'
|
|
response.headers['X-Frame-Options'] = 'SAMEORIGIN'
|
|
response.headers['X-XSS-Protection'] = '1; mode=block'
|
|
response.headers['Strict-Transport-Security'] = 'max-age=31536000; includeSubDomains'
|
|
response.headers['Referrer-Policy'] = 'strict-origin-when-cross-origin'
|
|
|
|
# Content Security Policy
|
|
csp = (
|
|
"default-src 'self'; "
|
|
"script-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net; "
|
|
"style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com; "
|
|
"img-src 'self' data: https:; "
|
|
"font-src 'self' https://cdn.jsdelivr.net https://fonts.gstatic.com; "
|
|
"connect-src 'self'"
|
|
)
|
|
response.headers['Content-Security-Policy'] = csp
|
|
|
|
return response
|
|
|
|
|
|
def validate_email(email):
|
|
"""Validate email format"""
|
|
if not email or len(email) > 255:
|
|
return False
|
|
|
|
# RFC 5322 compliant email regex (simplified)
|
|
pattern = r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$'
|
|
return re.match(pattern, email) is not None
|
|
|
|
|
|
def validate_password(password):
|
|
"""
|
|
Validate password strength
|
|
|
|
Requirements:
|
|
- Minimum 8 characters
|
|
- At least one uppercase letter
|
|
- At least one lowercase letter
|
|
- At least one digit
|
|
"""
|
|
if not password or len(password) < 8:
|
|
return False, "Hasło musi mieć minimum 8 znaków"
|
|
|
|
if not re.search(r'[A-Z]', password):
|
|
return False, "Hasło musi zawierać przynajmniej jedną wielką literę"
|
|
|
|
if not re.search(r'[a-z]', password):
|
|
return False, "Hasło musi zawierać przynajmniej jedną małą literę"
|
|
|
|
if not re.search(r'\d', password):
|
|
return False, "Hasło musi zawierać przynajmniej jedną cyfrę"
|
|
|
|
return True, "OK"
|
|
|
|
|
|
def sanitize_input(text, max_length=1000):
|
|
"""Sanitize user input - remove potentially dangerous characters"""
|
|
if not text:
|
|
return ""
|
|
|
|
# Remove null bytes
|
|
text = text.replace('\x00', '')
|
|
|
|
# Trim to max length
|
|
text = text[:max_length]
|
|
|
|
# Strip whitespace
|
|
text = text.strip()
|
|
|
|
return text
|
|
|
|
|
|
def get_free_tier_usage():
|
|
"""
|
|
Get today's Gemini API usage for free tier tracking.
|
|
|
|
Returns:
|
|
Dict with requests_today and tokens_today
|
|
"""
|
|
from datetime import date
|
|
from sqlalchemy import func
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
today = date.today()
|
|
result = db.query(
|
|
func.count(AIAPICostLog.id).label('requests'),
|
|
func.coalesce(func.sum(AIAPICostLog.total_tokens), 0).label('tokens')
|
|
).filter(
|
|
func.date(AIAPICostLog.timestamp) == today,
|
|
AIAPICostLog.api_provider == 'gemini'
|
|
).first()
|
|
|
|
return {
|
|
'requests_today': result.requests or 0,
|
|
'tokens_today': int(result.tokens or 0)
|
|
}
|
|
except Exception as e:
|
|
logger.warning(f"Failed to get free tier usage: {e}")
|
|
return {'requests_today': 0, 'tokens_today': 0}
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
def get_brave_api_usage():
|
|
"""
|
|
Get Brave Search API usage for current month.
|
|
|
|
Brave free tier: 2000 requests/month
|
|
|
|
Returns:
|
|
Dict with usage stats and limits
|
|
"""
|
|
from datetime import date
|
|
from sqlalchemy import func, extract
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
today = date.today()
|
|
current_month = today.month
|
|
current_year = today.year
|
|
|
|
# Monthly usage
|
|
monthly_result = db.query(
|
|
func.count(AIAPICostLog.id).label('requests')
|
|
).filter(
|
|
extract('month', AIAPICostLog.timestamp) == current_month,
|
|
extract('year', AIAPICostLog.timestamp) == current_year,
|
|
AIAPICostLog.api_provider == 'brave'
|
|
).first()
|
|
|
|
# Today's usage
|
|
daily_result = db.query(
|
|
func.count(AIAPICostLog.id).label('requests')
|
|
).filter(
|
|
func.date(AIAPICostLog.timestamp) == today,
|
|
AIAPICostLog.api_provider == 'brave'
|
|
).first()
|
|
|
|
monthly_used = monthly_result.requests or 0
|
|
daily_used = daily_result.requests or 0
|
|
monthly_limit = 2000 # Brave free tier
|
|
|
|
return {
|
|
'requests_today': daily_used,
|
|
'requests_this_month': monthly_used,
|
|
'monthly_limit': monthly_limit,
|
|
'remaining': max(0, monthly_limit - monthly_used),
|
|
'usage_percent': round((monthly_used / monthly_limit) * 100, 1) if monthly_limit > 0 else 0,
|
|
'tier': 'free',
|
|
'is_limit_reached': monthly_used >= monthly_limit
|
|
}
|
|
except Exception as e:
|
|
logger.warning(f"Failed to get Brave API usage: {e}")
|
|
return {
|
|
'requests_today': 0,
|
|
'requests_this_month': 0,
|
|
'monthly_limit': 2000,
|
|
'remaining': 2000,
|
|
'usage_percent': 0,
|
|
'tier': 'free',
|
|
'is_limit_reached': False
|
|
}
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
def log_brave_api_call(user_id=None, feature='news_search', company_name=None):
|
|
"""
|
|
Log a Brave API call for usage tracking.
|
|
|
|
Args:
|
|
user_id: User who triggered the call (optional)
|
|
feature: Feature name (news_search, etc.)
|
|
company_name: Company being searched (for reference)
|
|
"""
|
|
db = SessionLocal()
|
|
try:
|
|
log_entry = AIAPICostLog(
|
|
api_provider='brave',
|
|
model_name='search_api',
|
|
feature=feature,
|
|
user_id=user_id,
|
|
input_tokens=0,
|
|
output_tokens=0,
|
|
total_tokens=0
|
|
)
|
|
db.add(log_entry)
|
|
db.commit()
|
|
logger.debug(f"Logged Brave API call: {feature} for {company_name}")
|
|
except Exception as e:
|
|
logger.error(f"Failed to log Brave API call: {e}")
|
|
db.rollback()
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# ============================================================
|
|
# HEALTH CHECK
|
|
# ============================================================
|
|
|
|
@app.route('/health')
|
|
def health():
|
|
"""Health check endpoint for monitoring"""
|
|
return {'status': 'ok'}, 200
|
|
|
|
|
|
@app.route('/test-error-500')
|
|
@login_required
|
|
def test_error_500():
|
|
"""Test endpoint to trigger 500 error for notification testing. Admin only."""
|
|
if not current_user.is_admin:
|
|
flash('Brak uprawnień', 'error')
|
|
return redirect(url_for('index'))
|
|
# Intentionally raise an error to test error notification
|
|
raise Exception("TEST ERROR 500 - Celowy błąd testowy do sprawdzenia powiadomień email")
|
|
|
|
|
|
@app.route('/health/full')
|
|
def health_full():
|
|
"""
|
|
Extended health check - verifies all critical endpoints.
|
|
Returns detailed status of each endpoint.
|
|
Access: /health/full
|
|
"""
|
|
results = []
|
|
all_ok = True
|
|
|
|
# List of ALL endpoints to check (path, name)
|
|
# Comprehensive list updated 2026-01-17
|
|
endpoints = [
|
|
# ========== PUBLIC PAGES ==========
|
|
('/', 'Strona główna'),
|
|
('/login', 'Logowanie'),
|
|
('/register', 'Rejestracja'),
|
|
('/release-notes', 'Historia zmian'),
|
|
('/search?q=test', 'Wyszukiwarka'),
|
|
('/aktualnosci', 'Aktualności'),
|
|
('/forum', 'Forum'),
|
|
('/kalendarz', 'Kalendarz wydarzeń'),
|
|
('/tablica', 'Tablica ogłoszeń'),
|
|
('/nowi-czlonkowie', 'Nowi członkowie'),
|
|
('/mapa-polaczen', 'Mapa połączeń'),
|
|
('/forgot-password', 'Reset hasła'),
|
|
|
|
# ========== RAPORTY ==========
|
|
('/raporty/', 'Raporty'),
|
|
('/raporty/staz-czlonkostwa', 'Raport: Staż członkostwa'),
|
|
('/raporty/social-media', 'Raport: Social Media'),
|
|
('/raporty/struktura-branzowa', 'Raport: Struktura branżowa'),
|
|
|
|
# ========== ZOPK PUBLIC ==========
|
|
('/zopk', 'ZOPK: Strona główna'),
|
|
('/zopk/aktualnosci', 'ZOPK: Aktualności'),
|
|
|
|
# ========== CHAT ==========
|
|
('/chat', 'NordaGPT Chat'),
|
|
|
|
# ========== IT AUDIT ==========
|
|
('/it-audit/form', 'IT Audit: Formularz'),
|
|
|
|
# ========== PUBLIC API ==========
|
|
('/api/companies', 'API: Lista firm'),
|
|
('/api/model-info', 'API: Model info'),
|
|
('/api/gbp/audit/health', 'API: GBP health'),
|
|
|
|
# ========== ADMIN: CORE ==========
|
|
('/admin/security', 'Admin: Bezpieczeństwo'),
|
|
('/admin/analytics', 'Admin: Analityka'),
|
|
('/admin/status', 'Admin: Status systemu'),
|
|
('/admin/health', 'Admin: Health dashboard'),
|
|
('/admin/debug', 'Admin: Debug'),
|
|
('/admin/ai-usage', 'Admin: AI Usage'),
|
|
('/admin/chat-analytics', 'Admin: Chat analytics'),
|
|
('/admin/users', 'Admin: Użytkownicy'),
|
|
('/admin/recommendations', 'Admin: Rekomendacje'),
|
|
('/admin/fees', 'Admin: Składki'),
|
|
|
|
# ========== ADMIN: AUDITS ==========
|
|
('/admin/seo', 'Admin: SEO Audit'),
|
|
('/admin/gbp-audit', 'Admin: GBP Audit'),
|
|
('/admin/social-media', 'Admin: Social Media'),
|
|
('/admin/social-audit', 'Admin: Social Audit'),
|
|
('/admin/it-audit', 'Admin: IT Audit'),
|
|
('/admin/digital-maturity', 'Admin: Digital Maturity'),
|
|
('/admin/krs-audit', 'Admin: KRS Audit'),
|
|
|
|
# ========== ADMIN: COMMUNITY ==========
|
|
('/admin/forum', 'Admin: Forum'),
|
|
('/admin/kalendarz', 'Admin: Kalendarz'),
|
|
|
|
# ========== ADMIN: ZOPK ==========
|
|
('/admin/zopk', 'Admin: ZOPK Panel'),
|
|
('/admin/zopk/news', 'Admin: ZOPK News'),
|
|
('/admin/zopk/knowledge', 'Admin: ZOPK Knowledge'),
|
|
('/admin/zopk/knowledge/chunks', 'Admin: ZOPK Chunks'),
|
|
('/admin/zopk/knowledge/facts', 'Admin: ZOPK Facts'),
|
|
('/admin/zopk/knowledge/entities', 'Admin: ZOPK Entities'),
|
|
('/admin/zopk/knowledge/duplicates', 'Admin: ZOPK Duplikaty'),
|
|
('/admin/zopk/knowledge/fact-duplicates', 'Admin: ZOPK Fact Duplicates'),
|
|
('/admin/zopk/knowledge/graph', 'Admin: ZOPK Graf'),
|
|
('/admin/zopk/timeline', 'Admin: ZOPK Timeline'),
|
|
|
|
# ========== ZOPK API ==========
|
|
('/api/zopk/milestones', 'API: ZOPK Milestones'),
|
|
('/api/zopk/knowledge/dashboard-stats', 'API: ZOPK Dashboard stats'),
|
|
|
|
# ========== USER SETTINGS (v1.19.0) ==========
|
|
('/settings/privacy', 'Ustawienia: Prywatność'),
|
|
('/settings/blocks', 'Ustawienia: Blokady'),
|
|
('/settings/2fa', 'Ustawienia: 2FA'),
|
|
|
|
# ========== WIADOMOŚCI ==========
|
|
('/wiadomosci', 'Wiadomości: Odebrane'),
|
|
('/wiadomosci/wyslane', 'Wiadomości: Wysłane'),
|
|
('/wiadomosci/nowa', 'Wiadomości: Nowa'),
|
|
|
|
# ========== EDUKACJA ==========
|
|
('/edukacja', 'Edukacja: Strona główna'),
|
|
|
|
# ========== ADMIN: INSIGHTS ==========
|
|
('/admin/insights', 'Admin: Insights'),
|
|
]
|
|
|
|
# Dodaj losową firmę do sprawdzenia
|
|
db = SessionLocal()
|
|
try:
|
|
random_company = db.query(Company).first()
|
|
if random_company:
|
|
endpoints.append((f'/company/{random_company.slug}', f'Profil: {random_company.name[:25]}'))
|
|
finally:
|
|
db.close()
|
|
|
|
# Testuj każdy endpoint używając test client
|
|
with app.test_client() as client:
|
|
for path, name in endpoints:
|
|
try:
|
|
response = client.get(path, follow_redirects=False)
|
|
status = response.status_code
|
|
|
|
# 200 = OK, 302 = redirect (np. do logowania) = OK
|
|
# 429 = rate limited (endpoint działa, tylko ograniczony)
|
|
# 500 = błąd serwera, 404 = nie znaleziono
|
|
if status in (200, 302, 304, 429):
|
|
results.append({
|
|
'endpoint': path,
|
|
'name': name,
|
|
'status': status,
|
|
'ok': True
|
|
})
|
|
else:
|
|
results.append({
|
|
'endpoint': path,
|
|
'name': name,
|
|
'status': status,
|
|
'ok': False
|
|
})
|
|
all_ok = False
|
|
|
|
except Exception as e:
|
|
results.append({
|
|
'endpoint': path,
|
|
'name': name,
|
|
'status': 500,
|
|
'ok': False,
|
|
'error': str(e)[:100]
|
|
})
|
|
all_ok = False
|
|
|
|
# Podsumowanie
|
|
passed = sum(1 for r in results if r['ok'])
|
|
failed = len(results) - passed
|
|
|
|
return {
|
|
'status': 'ok' if all_ok else 'degraded',
|
|
'summary': {
|
|
'total': len(results),
|
|
'passed': passed,
|
|
'failed': failed
|
|
},
|
|
'endpoints': results,
|
|
'timestamp': datetime.now().isoformat()
|
|
}, 200 if all_ok else 503
|
|
|
|
|
|
# ============================================================
|
|
# PUBLIC ROUTES - MOVED TO blueprints/public/routes.py
|
|
# ============================================================
|
|
# The routes below have been migrated to the public blueprint.
|
|
# They are commented out but preserved for reference.
|
|
# See: blueprints/public/routes.py
|
|
# ============================================================
|
|
# RECOMMENDATIONS ADMIN ROUTES - MOVED TO: blueprints/admin/routes.py
|
|
# ============================================================
|
|
|
|
# ============================================================
|
|
# USER MANAGEMENT ADMIN ROUTES
|
|
# Moved to: blueprints/admin/routes.py
|
|
# NOTE: AI-parse routes remain below
|
|
# ============================================================
|
|
|
|
# admin_users, admin_user_add - MOVED TO: blueprints/admin/routes.py
|
|
|
|
|
|
# AI-ASSISTED USER CREATION - MOVED TO blueprints/admin/routes_users_api.py
|
|
# Routes: /admin/users-api/ai-parse, /admin/users-api/bulk-create
|
|
|
|
|
|
# ============================================================
|
|
# USER ANALYTICS API ROUTES
|
|
# ============================================================
|
|
|
|
@app.route('/api/analytics/track', methods=['POST'])
|
|
@csrf.exempt
|
|
def api_analytics_track():
|
|
"""Track clicks and interactions from frontend"""
|
|
data = request.get_json()
|
|
if not data:
|
|
return jsonify({'error': 'No data'}), 400
|
|
|
|
analytics_session_id = session.get('analytics_session_id')
|
|
if not analytics_session_id:
|
|
return jsonify({'error': 'No session'}), 400
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
user_session = db.query(UserSession).filter_by(session_id=analytics_session_id).first()
|
|
if not user_session:
|
|
return jsonify({'error': 'Session not found'}), 404
|
|
|
|
event_type = data.get('type')
|
|
|
|
if event_type == 'click':
|
|
click = UserClick(
|
|
session_id=user_session.id,
|
|
page_view_id=data.get('page_view_id'),
|
|
user_id=current_user.id if current_user.is_authenticated else None,
|
|
element_type=data.get('element_type', '')[:50] if data.get('element_type') else None,
|
|
element_id=data.get('element_id', '')[:100] if data.get('element_id') else None,
|
|
element_text=(data.get('element_text', '') or '')[:255],
|
|
element_class=(data.get('element_class', '') or '')[:500],
|
|
target_url=data.get('target_url', '')[:2000] if data.get('target_url') else None,
|
|
x_position=data.get('x'),
|
|
y_position=data.get('y')
|
|
)
|
|
db.add(click)
|
|
|
|
user_session.clicks_count = (user_session.clicks_count or 0) + 1
|
|
db.commit()
|
|
|
|
elif event_type == 'page_time':
|
|
# Update time on page
|
|
page_view_id = data.get('page_view_id')
|
|
time_seconds = data.get('time_seconds')
|
|
if page_view_id and time_seconds:
|
|
page_view = db.query(PageView).filter_by(id=page_view_id).first()
|
|
if page_view:
|
|
page_view.time_on_page_seconds = min(time_seconds, 86400) # Max 24h
|
|
db.commit()
|
|
|
|
return jsonify({'success': True}), 200
|
|
|
|
except Exception as e:
|
|
logger.error(f"Analytics track error: {e}")
|
|
db.rollback()
|
|
return jsonify({'error': 'Internal error'}), 500
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/analytics/heartbeat', methods=['POST'])
|
|
@csrf.exempt
|
|
def api_analytics_heartbeat():
|
|
"""Keep session alive and update duration"""
|
|
analytics_session_id = session.get('analytics_session_id')
|
|
if not analytics_session_id:
|
|
return jsonify({'success': False}), 200
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
user_session = db.query(UserSession).filter_by(session_id=analytics_session_id).first()
|
|
if user_session:
|
|
user_session.last_activity_at = datetime.now()
|
|
user_session.duration_seconds = int(
|
|
(datetime.now() - user_session.started_at).total_seconds()
|
|
)
|
|
db.commit()
|
|
return jsonify({'success': True}), 200
|
|
except Exception as e:
|
|
logger.error(f"Analytics heartbeat error: {e}")
|
|
db.rollback()
|
|
return jsonify({'success': False}), 200
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/analytics/scroll', methods=['POST'])
|
|
@csrf.exempt
|
|
def api_analytics_scroll():
|
|
"""Track scroll depth from frontend"""
|
|
data = request.get_json()
|
|
if not data:
|
|
return jsonify({'error': 'No data'}), 400
|
|
|
|
page_view_id = data.get('page_view_id')
|
|
scroll_depth = data.get('scroll_depth')
|
|
|
|
if not page_view_id or scroll_depth is None:
|
|
return jsonify({'error': 'Missing data'}), 400
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
page_view = db.query(PageView).filter_by(id=page_view_id).first()
|
|
if page_view:
|
|
# Zapisz tylko jeśli większe niż poprzednie (max scroll depth)
|
|
current_depth = page_view.scroll_depth_percent or 0
|
|
if scroll_depth > current_depth:
|
|
page_view.scroll_depth_percent = min(scroll_depth, 100)
|
|
db.commit()
|
|
return jsonify({'success': True}), 200
|
|
except Exception as e:
|
|
logger.error(f"Analytics scroll error: {e}")
|
|
db.rollback()
|
|
return jsonify({'success': False}), 200
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/analytics/error', methods=['POST'])
|
|
@csrf.exempt
|
|
def api_analytics_error():
|
|
"""Track JavaScript errors from frontend"""
|
|
import hashlib
|
|
|
|
data = request.get_json()
|
|
if not data:
|
|
return jsonify({'error': 'No data'}), 400
|
|
|
|
message = data.get('message', '')[:2000]
|
|
if not message:
|
|
return jsonify({'error': 'No message'}), 400
|
|
|
|
analytics_session_id = session.get('analytics_session_id')
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
# Znajdź session ID
|
|
session_db_id = None
|
|
if analytics_session_id:
|
|
user_session = db.query(UserSession).filter_by(session_id=analytics_session_id).first()
|
|
if user_session:
|
|
session_db_id = user_session.id
|
|
|
|
# Utwórz hash dla agregacji
|
|
error_key = f"{message}|{data.get('source', '')}|{data.get('lineno', '')}"
|
|
error_hash = hashlib.sha256(error_key.encode()).hexdigest()
|
|
|
|
js_error = JSError(
|
|
session_id=session_db_id,
|
|
message=message,
|
|
source=data.get('source', '')[:500] if data.get('source') else None,
|
|
lineno=data.get('lineno'),
|
|
colno=data.get('colno'),
|
|
stack=data.get('stack', '')[:5000] if data.get('stack') else None,
|
|
url=data.get('url', '')[:2000] if data.get('url') else None,
|
|
user_agent=request.headers.get('User-Agent', '')[:500],
|
|
error_hash=error_hash
|
|
)
|
|
db.add(js_error)
|
|
db.commit()
|
|
return jsonify({'success': True}), 200
|
|
except Exception as e:
|
|
logger.error(f"Analytics error tracking error: {e}")
|
|
db.rollback()
|
|
return jsonify({'success': False}), 200
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/analytics/performance', methods=['POST'])
|
|
@csrf.exempt
|
|
def api_analytics_performance():
|
|
"""Track page performance metrics from frontend"""
|
|
data = request.get_json()
|
|
if not data:
|
|
return jsonify({'error': 'No data'}), 400
|
|
|
|
page_view_id = data.get('page_view_id')
|
|
if not page_view_id:
|
|
return jsonify({'error': 'Missing page_view_id'}), 400
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
page_view = db.query(PageView).filter_by(id=page_view_id).first()
|
|
if page_view:
|
|
# Zapisz metryki performance (tylko jeśli jeszcze nie zapisane)
|
|
if page_view.dom_content_loaded_ms is None:
|
|
page_view.dom_content_loaded_ms = data.get('dom_content_loaded_ms')
|
|
if page_view.load_time_ms is None:
|
|
page_view.load_time_ms = data.get('load_time_ms')
|
|
if page_view.first_paint_ms is None:
|
|
page_view.first_paint_ms = data.get('first_paint_ms')
|
|
if page_view.first_contentful_paint_ms is None:
|
|
page_view.first_contentful_paint_ms = data.get('first_contentful_paint_ms')
|
|
db.commit()
|
|
return jsonify({'success': True}), 200
|
|
except Exception as e:
|
|
logger.error(f"Analytics performance error: {e}")
|
|
db.rollback()
|
|
return jsonify({'success': False}), 200
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/analytics/conversion', methods=['POST'])
|
|
@csrf.exempt
|
|
def api_analytics_conversion():
|
|
"""Track conversion events from frontend (contact clicks)"""
|
|
data = request.get_json()
|
|
if not data:
|
|
return jsonify({'error': 'No data'}), 400
|
|
|
|
event_type = data.get('event_type')
|
|
if not event_type:
|
|
return jsonify({'error': 'Missing event_type'}), 400
|
|
|
|
track_conversion(
|
|
event_type=event_type,
|
|
company_id=data.get('company_id'),
|
|
target_type=data.get('target_type'),
|
|
target_value=data.get('target_value'),
|
|
metadata=data.get('metadata')
|
|
)
|
|
|
|
return jsonify({'success': True}), 200
|
|
|
|
|
|
# ============================================================
|
|
# RECOMMENDATIONS API ROUTES
|
|
# ============================================================
|
|
|
|
@app.route('/api/recommendations/<int:company_id>', methods=['GET'])
|
|
@login_required
|
|
def api_get_recommendations(company_id):
|
|
"""API: Get all approved recommendations for a company"""
|
|
db = SessionLocal()
|
|
try:
|
|
# Verify company exists
|
|
company = db.query(Company).filter_by(id=company_id).first()
|
|
if not company:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Firma nie znaleziona'
|
|
}), 404
|
|
|
|
# Query recommendations with user details
|
|
recommendations = db.query(CompanyRecommendation).filter_by(
|
|
company_id=company_id,
|
|
status='approved'
|
|
).join(User, CompanyRecommendation.user_id == User.id).order_by(CompanyRecommendation.created_at.desc()).all()
|
|
|
|
# Build response with recommender details
|
|
result = []
|
|
for rec in recommendations:
|
|
recommender = db.query(User).filter_by(id=rec.user_id).first()
|
|
recommender_company = None
|
|
if recommender and recommender.company_id:
|
|
recommender_company = db.query(Company).filter_by(id=recommender.company_id).first()
|
|
|
|
rec_data = {
|
|
'id': rec.id,
|
|
'recommendation_text': rec.recommendation_text,
|
|
'service_category': rec.service_category,
|
|
'created_at': rec.created_at.isoformat() if rec.created_at else None,
|
|
'updated_at': rec.updated_at.isoformat() if rec.updated_at else None,
|
|
'recommender': {
|
|
'name': recommender.full_name if recommender else '[Użytkownik usunięty]',
|
|
'email': recommender.email if (recommender and rec.show_contact) else None,
|
|
'phone': recommender.phone if (recommender and rec.show_contact) else None,
|
|
'company_id': recommender_company.id if recommender_company else None,
|
|
'company_name': recommender_company.name if recommender_company else None,
|
|
'company_slug': recommender_company.slug if recommender_company else None
|
|
}
|
|
}
|
|
result.append(rec_data)
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'company_id': company_id,
|
|
'company_name': company.name,
|
|
'recommendations': result,
|
|
'count': len(result)
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error fetching recommendations for company {company_id}: {e}")
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Wystąpił błąd podczas pobierania rekomendacji'
|
|
}), 500
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/recommendations/create', methods=['POST'])
|
|
@login_required
|
|
def api_create_recommendation():
|
|
"""API: Create a new recommendation"""
|
|
db = SessionLocal()
|
|
try:
|
|
# Get JSON data
|
|
data = request.get_json()
|
|
if not data:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Brak danych'
|
|
}), 400
|
|
|
|
company_id = data.get('company_id')
|
|
recommendation_text = data.get('recommendation_text', '').strip()
|
|
service_category = data.get('service_category', '').strip() or None
|
|
show_contact = data.get('show_contact', True)
|
|
|
|
# Validate required fields
|
|
if not company_id:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Brak ID firmy'
|
|
}), 400
|
|
|
|
if not recommendation_text:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Treść rekomendacji jest wymagana'
|
|
}), 400
|
|
|
|
# Validate text length (50-2000 characters)
|
|
if len(recommendation_text) < 50:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Rekomendacja musi mieć co najmniej 50 znaków'
|
|
}), 400
|
|
|
|
if len(recommendation_text) > 2000:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Rekomendacja nie może przekraczać 2000 znaków'
|
|
}), 400
|
|
|
|
# Check if user is verified
|
|
if not current_user.is_verified:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Tylko zweryfikowani użytkownicy mogą dodawać rekomendacje'
|
|
}), 403
|
|
|
|
# Verify company exists
|
|
company = db.query(Company).filter_by(id=company_id, status='active').first()
|
|
if not company:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Firma nie znaleziona'
|
|
}), 404
|
|
|
|
# Prevent self-recommendation
|
|
if current_user.company_id and current_user.company_id == company_id:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Nie możesz polecać własnej firmy'
|
|
}), 400
|
|
|
|
# Check for duplicate recommendation (user can only have one recommendation per company)
|
|
existing_rec = db.query(CompanyRecommendation).filter_by(
|
|
user_id=current_user.id,
|
|
company_id=company_id
|
|
).first()
|
|
|
|
if existing_rec:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Już poleciłeś tę firmę. Możesz edytować swoją istniejącą rekomendację.'
|
|
}), 400
|
|
|
|
# Create recommendation
|
|
recommendation = CompanyRecommendation(
|
|
company_id=company_id,
|
|
user_id=current_user.id,
|
|
recommendation_text=recommendation_text,
|
|
service_category=service_category,
|
|
show_contact=show_contact,
|
|
status='pending' # Start as pending for moderation
|
|
)
|
|
|
|
db.add(recommendation)
|
|
db.commit()
|
|
db.refresh(recommendation)
|
|
|
|
# Create notification for company owner (if exists)
|
|
# Find users associated with this company
|
|
company_users = db.query(User).filter_by(company_id=company_id, is_active=True).all()
|
|
for company_user in company_users:
|
|
if company_user.id != current_user.id:
|
|
notification = UserNotification(
|
|
user_id=company_user.id,
|
|
notification_type='new_recommendation',
|
|
title='Nowa rekomendacja',
|
|
message=f'{current_user.name or current_user.email} polecił Twoją firmę: {company.name}',
|
|
action_url=f'/company/{company.slug}#recommendations',
|
|
related_id=recommendation.id
|
|
)
|
|
db.add(notification)
|
|
db.commit()
|
|
|
|
logger.info(f"Recommendation created: user {current_user.id} -> company {company_id}, ID {recommendation.id}")
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'message': 'Rekomendacja została utworzona i oczekuje na moderację',
|
|
'recommendation_id': recommendation.id,
|
|
'status': recommendation.status
|
|
}), 201
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error creating recommendation: {e}")
|
|
db.rollback()
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Wystąpił błąd podczas tworzenia rekomendacji'
|
|
}), 500
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/recommendations/<int:rec_id>/edit', methods=['POST'])
|
|
@login_required
|
|
def api_edit_recommendation(rec_id):
|
|
"""API: Edit an existing recommendation (owner or admin only)"""
|
|
db = SessionLocal()
|
|
try:
|
|
# Get the recommendation
|
|
recommendation = db.query(CompanyRecommendation).filter_by(id=rec_id).first()
|
|
if not recommendation:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Rekomendacja nie znaleziona'
|
|
}), 404
|
|
|
|
# Check authorization - user must be the owner OR admin
|
|
if recommendation.user_id != current_user.id and not current_user.is_admin:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Brak uprawnień do edycji tej rekomendacji'
|
|
}), 403
|
|
|
|
# Get JSON data
|
|
data = request.get_json()
|
|
if not data:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Brak danych'
|
|
}), 400
|
|
|
|
recommendation_text = data.get('recommendation_text', '').strip()
|
|
service_category = data.get('service_category', '').strip() or None
|
|
show_contact = data.get('show_contact', recommendation.show_contact)
|
|
|
|
# Validate text if provided
|
|
if recommendation_text:
|
|
# Validate text length (50-2000 characters)
|
|
if len(recommendation_text) < 50:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Rekomendacja musi mieć co najmniej 50 znaków'
|
|
}), 400
|
|
|
|
if len(recommendation_text) > 2000:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Rekomendacja nie może przekraczać 2000 znaków'
|
|
}), 400
|
|
|
|
recommendation.recommendation_text = recommendation_text
|
|
|
|
# Update other fields if provided
|
|
if 'service_category' in data:
|
|
recommendation.service_category = service_category
|
|
if 'show_contact' in data:
|
|
recommendation.show_contact = show_contact
|
|
|
|
# Update timestamp
|
|
recommendation.updated_at = datetime.now()
|
|
|
|
db.commit()
|
|
|
|
logger.info(f"Recommendation edited: ID {rec_id} by user {current_user.id}")
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'message': 'Rekomendacja została zaktualizowana',
|
|
'recommendation_id': recommendation.id
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error editing recommendation {rec_id}: {e}")
|
|
db.rollback()
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Wystąpił błąd podczas edycji rekomendacji'
|
|
}), 500
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/recommendations/<int:rec_id>/delete', methods=['POST'])
|
|
@login_required
|
|
def api_delete_recommendation(rec_id):
|
|
"""API: Delete a recommendation (owner or admin only)"""
|
|
db = SessionLocal()
|
|
try:
|
|
# Get the recommendation
|
|
recommendation = db.query(CompanyRecommendation).filter_by(id=rec_id).first()
|
|
if not recommendation:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Rekomendacja nie znaleziona'
|
|
}), 404
|
|
|
|
# Check authorization - user must be the owner OR admin
|
|
if recommendation.user_id != current_user.id and not current_user.is_admin:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Brak uprawnień do usunięcia tej rekomendacji'
|
|
}), 403
|
|
|
|
# Store info for logging
|
|
company_id = recommendation.company_id
|
|
user_id = recommendation.user_id
|
|
|
|
# Delete the recommendation
|
|
db.delete(recommendation)
|
|
db.commit()
|
|
|
|
logger.info(f"Recommendation deleted: ID {rec_id} (company {company_id}, user {user_id}) by user {current_user.id}")
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'message': 'Rekomendacja została usunięta'
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error deleting recommendation {rec_id}: {e}")
|
|
db.rollback()
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Wystąpił błąd podczas usuwania rekomendacji'
|
|
}), 500
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# ============================================================
|
|
# B2B CLASSIFIEDS ROUTES - MIGRATED TO blueprints/community/classifieds/
|
|
# ============================================================
|
|
# Routes: /tablica, /tablica/nowe, /tablica/<id>, /tablica/<id>/zakoncz
|
|
|
|
# ============================================================
|
|
# NEW MEMBERS ROUTE - MOVED TO blueprints/public/routes.py
|
|
# ============================================================
|
|
# AUTHENTICATION ROUTES - MOVED TO blueprints/auth/routes.py
|
|
# ============================================================
|
|
# The routes below have been migrated to the auth blueprint.
|
|
# They are commented out but preserved for reference.
|
|
# See: blueprints/auth/routes.py
|
|
# ============================================================
|
|
# TWO-FACTOR AUTHENTICATION - MOVED TO blueprints/auth/routes.py
|
|
# ============================================================
|
|
# MOJE KONTO - MOVED TO blueprints/auth/routes.py
|
|
# ============================================================
|
|
# USER DASHBOARD - MOVED TO blueprints/public/routes.py
|
|
# ============================================================
|
|
# API ROUTES (for frontend)
|
|
# ============================================================
|
|
|
|
@app.route('/api/companies')
|
|
def api_companies():
|
|
"""API: Get all companies"""
|
|
db = SessionLocal()
|
|
try:
|
|
companies = db.query(Company).filter_by(status='active').all()
|
|
return jsonify({
|
|
'success': True,
|
|
'companies': [
|
|
{
|
|
'id': c.id,
|
|
'name': c.name,
|
|
'category': c.category.name if c.category else None,
|
|
'description': c.description_short,
|
|
'website': c.website,
|
|
'phone': c.phone,
|
|
'email': c.email
|
|
}
|
|
for c in companies
|
|
]
|
|
})
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/connections')
|
|
def api_connections():
|
|
"""
|
|
API: Get company-person connections for D3.js visualization.
|
|
Returns nodes (companies and people) and links (relationships).
|
|
"""
|
|
db = SessionLocal()
|
|
try:
|
|
# Get all companies with people data
|
|
companies = db.query(Company).filter_by(status='active').all()
|
|
|
|
# Get all people with company relationships
|
|
people = db.query(Person).join(CompanyPerson).distinct().all()
|
|
|
|
# Build nodes
|
|
nodes = []
|
|
|
|
# Company nodes
|
|
for c in companies:
|
|
nodes.append({
|
|
'id': f'company_{c.id}',
|
|
'name': c.name,
|
|
'type': 'company',
|
|
'category': c.category.name if c.category else 'Other',
|
|
'slug': c.slug,
|
|
'has_krs': bool(c.krs),
|
|
'city': c.address_city or ''
|
|
})
|
|
|
|
# Person nodes
|
|
for p in people:
|
|
# Count UNIQUE companies this person is connected to (not roles)
|
|
company_count = len(set(r.company_id for r in p.company_roles if r.company and r.company.status == 'active'))
|
|
nodes.append({
|
|
'id': f'person_{p.id}',
|
|
'name': f'{p.imiona} {p.nazwisko}',
|
|
'type': 'person',
|
|
'company_count': company_count
|
|
})
|
|
|
|
# Build links
|
|
links = []
|
|
for p in people:
|
|
for role in p.company_roles:
|
|
if role.company and role.company.status == 'active':
|
|
links.append({
|
|
'source': f'person_{p.id}',
|
|
'target': f'company_{role.company_id}',
|
|
'role': role.role,
|
|
'category': role.role_category
|
|
})
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'nodes': nodes,
|
|
'links': links,
|
|
'stats': {
|
|
'companies': len([n for n in nodes if n['type'] == 'company']),
|
|
'people': len([n for n in nodes if n['type'] == 'person']),
|
|
'connections': len(links)
|
|
}
|
|
})
|
|
finally:
|
|
db.close()
|
|
def _build_seo_audit_response(company, analysis):
|
|
"""
|
|
Helper function to build SEO audit response JSON.
|
|
Used by both /api/seo/audit and /api/seo/audit/<slug> endpoints.
|
|
"""
|
|
# Build issues list from various checks
|
|
issues = []
|
|
|
|
# Check for images without alt
|
|
if analysis.images_without_alt and analysis.images_without_alt > 0:
|
|
issues.append({
|
|
'severity': 'warning',
|
|
'message': f'{analysis.images_without_alt} obrazów nie ma atrybutu alt',
|
|
'category': 'accessibility'
|
|
})
|
|
|
|
# Check for missing meta description
|
|
if not analysis.meta_description:
|
|
issues.append({
|
|
'severity': 'warning',
|
|
'message': 'Brak meta description',
|
|
'category': 'on_page'
|
|
})
|
|
|
|
# Check H1 count (should be exactly 1)
|
|
if analysis.h1_count is not None:
|
|
if analysis.h1_count == 0:
|
|
issues.append({
|
|
'severity': 'error',
|
|
'message': 'Brak nagłówka H1 na stronie',
|
|
'category': 'on_page'
|
|
})
|
|
elif analysis.h1_count > 1:
|
|
issues.append({
|
|
'severity': 'warning',
|
|
'message': f'Strona zawiera {analysis.h1_count} nagłówków H1 (zalecany: 1)',
|
|
'category': 'on_page'
|
|
})
|
|
|
|
# Check SSL
|
|
if analysis.has_ssl is False:
|
|
issues.append({
|
|
'severity': 'error',
|
|
'message': 'Strona nie używa HTTPS (brak certyfikatu SSL)',
|
|
'category': 'security'
|
|
})
|
|
|
|
# Check robots.txt
|
|
if analysis.has_robots_txt is False:
|
|
issues.append({
|
|
'severity': 'info',
|
|
'message': 'Brak pliku robots.txt',
|
|
'category': 'technical'
|
|
})
|
|
|
|
# Check sitemap
|
|
if analysis.has_sitemap is False:
|
|
issues.append({
|
|
'severity': 'info',
|
|
'message': 'Brak pliku sitemap.xml',
|
|
'category': 'technical'
|
|
})
|
|
|
|
# Check indexability
|
|
if analysis.is_indexable is False:
|
|
issues.append({
|
|
'severity': 'error',
|
|
'message': f'Strona nie jest indeksowalna: {analysis.noindex_reason or "nieznana przyczyna"}',
|
|
'category': 'technical'
|
|
})
|
|
|
|
# Check structured data
|
|
if analysis.has_structured_data is False:
|
|
issues.append({
|
|
'severity': 'info',
|
|
'message': 'Brak danych strukturalnych (Schema.org)',
|
|
'category': 'on_page'
|
|
})
|
|
|
|
# Check Open Graph tags
|
|
if analysis.has_og_tags is False:
|
|
issues.append({
|
|
'severity': 'info',
|
|
'message': 'Brak tagów Open Graph (ważne dla udostępniania w social media)',
|
|
'category': 'social'
|
|
})
|
|
|
|
# Check mobile-friendliness
|
|
if analysis.is_mobile_friendly is False:
|
|
issues.append({
|
|
'severity': 'warning',
|
|
'message': 'Strona nie jest przyjazna dla urządzeń mobilnych',
|
|
'category': 'technical'
|
|
})
|
|
|
|
# Add issues from seo_issues JSONB field if available
|
|
if analysis.seo_issues:
|
|
stored_issues = analysis.seo_issues if isinstance(analysis.seo_issues, list) else []
|
|
for issue in stored_issues:
|
|
if isinstance(issue, dict):
|
|
issues.append(issue)
|
|
|
|
# Build response
|
|
return {
|
|
'success': True,
|
|
'company_id': company.id,
|
|
'company_name': company.name,
|
|
'website': company.website,
|
|
'seo_audit': {
|
|
'audited_at': analysis.seo_audited_at.isoformat() if analysis.seo_audited_at else None,
|
|
'audit_version': analysis.seo_audit_version,
|
|
'overall_score': analysis.seo_overall_score,
|
|
'pagespeed': {
|
|
'seo_score': analysis.pagespeed_seo_score,
|
|
'performance_score': analysis.pagespeed_performance_score,
|
|
'accessibility_score': analysis.pagespeed_accessibility_score,
|
|
'best_practices_score': analysis.pagespeed_best_practices_score
|
|
},
|
|
'on_page': {
|
|
'meta_title': analysis.meta_title,
|
|
'meta_description': analysis.meta_description,
|
|
'h1_count': analysis.h1_count,
|
|
'h1_text': analysis.h1_text,
|
|
'h2_count': analysis.h2_count,
|
|
'h3_count': analysis.h3_count,
|
|
'total_images': analysis.total_images,
|
|
'images_without_alt': analysis.images_without_alt,
|
|
'images_with_alt': analysis.images_with_alt,
|
|
'internal_links_count': analysis.internal_links_count,
|
|
'external_links_count': analysis.external_links_count,
|
|
'has_structured_data': analysis.has_structured_data,
|
|
'structured_data_types': analysis.structured_data_types
|
|
},
|
|
'technical': {
|
|
'has_ssl': analysis.has_ssl,
|
|
'ssl_issuer': analysis.ssl_issuer,
|
|
'ssl_expires_at': analysis.ssl_expires_at.isoformat() if analysis.ssl_expires_at else None,
|
|
'has_sitemap': analysis.has_sitemap,
|
|
'has_robots_txt': analysis.has_robots_txt,
|
|
'has_canonical': analysis.has_canonical,
|
|
'canonical_url': analysis.canonical_url,
|
|
'is_indexable': analysis.is_indexable,
|
|
'noindex_reason': analysis.noindex_reason,
|
|
'is_mobile_friendly': analysis.is_mobile_friendly,
|
|
'viewport_configured': analysis.viewport_configured,
|
|
'load_time_ms': analysis.load_time_ms,
|
|
'http_status_code': analysis.http_status_code
|
|
},
|
|
'core_web_vitals': {
|
|
'largest_contentful_paint_ms': analysis.largest_contentful_paint_ms,
|
|
'first_input_delay_ms': analysis.first_input_delay_ms,
|
|
'cumulative_layout_shift': float(analysis.cumulative_layout_shift) if analysis.cumulative_layout_shift else None
|
|
},
|
|
'social': {
|
|
'has_og_tags': analysis.has_og_tags,
|
|
'og_title': analysis.og_title,
|
|
'og_description': analysis.og_description,
|
|
'og_image': analysis.og_image,
|
|
'has_twitter_cards': analysis.has_twitter_cards
|
|
},
|
|
'language': {
|
|
'html_lang': analysis.html_lang,
|
|
'has_hreflang': analysis.has_hreflang
|
|
},
|
|
'issues': issues
|
|
}
|
|
}
|
|
|
|
|
|
def _get_seo_audit_for_company(db, company):
|
|
"""
|
|
Helper function to get SEO audit data for a company.
|
|
Returns tuple of (response_dict, status_code) or (None, None) if audit exists.
|
|
"""
|
|
# Get latest SEO audit for this company
|
|
analysis = db.query(CompanyWebsiteAnalysis).filter_by(
|
|
company_id=company.id
|
|
).order_by(CompanyWebsiteAnalysis.analyzed_at.desc()).first()
|
|
|
|
if not analysis:
|
|
return {
|
|
'success': True,
|
|
'company_id': company.id,
|
|
'company_name': company.name,
|
|
'website': company.website,
|
|
'seo_audit': None,
|
|
'message': 'Brak danych SEO dla tej firmy. Audyt nie został jeszcze przeprowadzony.'
|
|
}, 200
|
|
|
|
# Check if SEO audit was performed (seo_audited_at is set)
|
|
if not analysis.seo_audited_at:
|
|
return {
|
|
'success': True,
|
|
'company_id': company.id,
|
|
'company_name': company.name,
|
|
'website': company.website,
|
|
'seo_audit': None,
|
|
'message': 'Audyt SEO nie został jeszcze przeprowadzony dla tej firmy.'
|
|
}, 200
|
|
|
|
# Build full response
|
|
return _build_seo_audit_response(company, analysis), 200
|
|
|
|
|
|
@app.route('/api/seo/audit')
|
|
def api_seo_audit():
|
|
"""
|
|
API: Get SEO audit results for a company.
|
|
|
|
Query parameters:
|
|
- company_id: Company ID (integer)
|
|
- slug: Company slug (string)
|
|
|
|
At least one of company_id or slug must be provided.
|
|
|
|
Returns JSON with:
|
|
- pagespeed scores (seo, performance, accessibility, best_practices)
|
|
- on_page metrics (meta tags, headings, images, links, structured data)
|
|
- technical checks (ssl, sitemap, robots.txt, mobile-friendly)
|
|
- issues list with severity levels
|
|
"""
|
|
company_id = request.args.get('company_id', type=int)
|
|
slug = request.args.get('slug', type=str)
|
|
|
|
if not company_id and not slug:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Podaj company_id lub slug firmy'
|
|
}), 400
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
# Find company by ID or slug
|
|
if company_id:
|
|
company = db.query(Company).filter_by(id=company_id, status='active').first()
|
|
else:
|
|
company = db.query(Company).filter_by(slug=slug, status='active').first()
|
|
|
|
if not company:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Firma nie znaleziona'
|
|
}), 404
|
|
|
|
response, status_code = _get_seo_audit_for_company(db, company)
|
|
return jsonify(response), status_code
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/seo/audit/<slug>')
|
|
def api_seo_audit_by_slug(slug):
|
|
"""
|
|
API: Get SEO audit results for a company by slug.
|
|
Convenience endpoint that uses slug from URL path.
|
|
|
|
Example: GET /api/seo/audit/pixlab-sp-z-o-o
|
|
"""
|
|
db = SessionLocal()
|
|
try:
|
|
# Find company by slug
|
|
company = db.query(Company).filter_by(slug=slug, status='active').first()
|
|
|
|
if not company:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Firma nie znaleziona'
|
|
}), 404
|
|
|
|
response, status_code = _get_seo_audit_for_company(db, company)
|
|
return jsonify(response), status_code
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/seo/audit', methods=['POST'])
|
|
@login_required
|
|
@limiter.limit("200 per hour")
|
|
def api_seo_audit_trigger():
|
|
"""
|
|
API: Trigger SEO audit for a company (admin-only).
|
|
|
|
This endpoint runs a full SEO audit including:
|
|
- Google PageSpeed Insights analysis
|
|
- On-page SEO analysis (meta tags, headings, images, links)
|
|
- Technical SEO checks (robots.txt, sitemap, canonical URLs)
|
|
|
|
Request JSON body:
|
|
- company_id: Company ID (integer) OR
|
|
- slug: Company slug (string)
|
|
|
|
Returns:
|
|
- Success: Full SEO audit results saved to database
|
|
- Error: Error message with status code
|
|
|
|
Rate limited to 10 requests per hour per user to prevent API abuse.
|
|
"""
|
|
# Admin-only check
|
|
if not current_user.is_admin:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Brak uprawnień. Tylko administrator może uruchamiać audyty SEO.'
|
|
}), 403
|
|
|
|
# Check if SEO audit service is available
|
|
if not SEO_AUDIT_AVAILABLE:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Usługa audytu SEO jest niedostępna. Sprawdź konfigurację serwera.'
|
|
}), 503
|
|
|
|
# Parse request data
|
|
data = request.get_json()
|
|
if not data:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Brak danych w żądaniu. Podaj company_id lub slug.'
|
|
}), 400
|
|
|
|
company_id = data.get('company_id')
|
|
slug = data.get('slug')
|
|
|
|
if not company_id and not slug:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Podaj company_id lub slug firmy do audytu.'
|
|
}), 400
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
# Find company by ID or slug
|
|
if company_id:
|
|
company = db.query(Company).filter_by(id=company_id, status='active').first()
|
|
else:
|
|
company = db.query(Company).filter_by(slug=slug, status='active').first()
|
|
|
|
if not company:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Firma nie znaleziona lub nieaktywna.'
|
|
}), 404
|
|
|
|
# Check if company has a website
|
|
if not company.website:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': f'Firma "{company.name}" nie ma zdefiniowanej strony internetowej.',
|
|
'company_id': company.id,
|
|
'company_name': company.name
|
|
}), 400
|
|
|
|
logger.info(f"SEO audit triggered by admin {current_user.email} for company: {company.name} (ID: {company.id})")
|
|
|
|
# Initialize SEO auditor and run audit
|
|
try:
|
|
auditor = SEOAuditor()
|
|
|
|
# Prepare company dict for auditor
|
|
company_dict = {
|
|
'id': company.id,
|
|
'name': company.name,
|
|
'slug': company.slug,
|
|
'website': company.website,
|
|
'address_city': company.address_city
|
|
}
|
|
|
|
# Run the audit
|
|
audit_result = auditor.audit_company(company_dict)
|
|
|
|
# Check for errors
|
|
if audit_result.get('errors') and not audit_result.get('onpage') and not audit_result.get('pagespeed'):
|
|
return jsonify({
|
|
'success': False,
|
|
'error': f'Audyt nie powiódł się: {", ".join(audit_result["errors"])}',
|
|
'company_id': company.id,
|
|
'company_name': company.name,
|
|
'website': company.website
|
|
}), 422
|
|
|
|
# Save result to database
|
|
saved = auditor.save_audit_result(audit_result)
|
|
|
|
if not saved:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Audyt został wykonany, ale nie udało się zapisać wyników do bazy danych.',
|
|
'company_id': company.id,
|
|
'company_name': company.name
|
|
}), 500
|
|
|
|
# Get the updated analysis record to return
|
|
db.expire_all() # Refresh the session to get updated data
|
|
analysis = db.query(CompanyWebsiteAnalysis).filter_by(
|
|
company_id=company.id
|
|
).order_by(CompanyWebsiteAnalysis.analyzed_at.desc()).first()
|
|
|
|
# Build response using the existing helper function
|
|
response = _build_seo_audit_response(company, analysis)
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'message': f'Audyt SEO dla firmy "{company.name}" został zakończony pomyślnie.',
|
|
'audit_version': SEO_AUDIT_VERSION,
|
|
'triggered_by': current_user.email,
|
|
'triggered_at': datetime.now().isoformat(),
|
|
**response
|
|
}), 200
|
|
|
|
except Exception as e:
|
|
logger.error(f"SEO audit error for company {company.id}: {e}")
|
|
return jsonify({
|
|
'success': False,
|
|
'error': f'Błąd podczas wykonywania audytu: {str(e)}',
|
|
'company_id': company.id,
|
|
'company_name': company.name
|
|
}), 500
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# ============================================================
|
|
# SEO & GBP AUDIT DASHBOARDS - MOVED TO: blueprints/admin/routes_audits.py
|
|
# ============================================================
|
|
|
|
# ============================================================
|
|
# GBP (GOOGLE BUSINESS PROFILE) AUDIT API
|
|
# ============================================================
|
|
|
|
@app.route('/api/gbp/audit/health')
|
|
def api_gbp_audit_health():
|
|
"""
|
|
API: Health check for GBP audit service.
|
|
|
|
Returns service status and version information.
|
|
Used by monitoring systems to verify service availability.
|
|
"""
|
|
if GBP_AUDIT_AVAILABLE:
|
|
return jsonify({
|
|
'status': 'ok',
|
|
'service': 'gbp_audit',
|
|
'version': GBP_AUDIT_VERSION,
|
|
'available': True
|
|
}), 200
|
|
else:
|
|
return jsonify({
|
|
'status': 'unavailable',
|
|
'service': 'gbp_audit',
|
|
'available': False,
|
|
'error': 'GBP audit service not loaded'
|
|
}), 503
|
|
|
|
|
|
@app.route('/api/gbp/audit', methods=['GET'])
|
|
def api_gbp_audit_get():
|
|
"""
|
|
API: Get GBP audit results for a company.
|
|
|
|
Query parameters:
|
|
- company_id: Company ID (integer) OR
|
|
- slug: Company slug (string)
|
|
|
|
Returns:
|
|
- Latest audit results with completeness score and recommendations
|
|
- 404 if company not found
|
|
- 404 if no audit exists for the company
|
|
|
|
Example: GET /api/gbp/audit?company_id=26
|
|
Example: GET /api/gbp/audit?slug=pixlab-sp-z-o-o
|
|
"""
|
|
if not GBP_AUDIT_AVAILABLE:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Usługa audytu GBP jest niedostępna.'
|
|
}), 503
|
|
|
|
company_id = request.args.get('company_id', type=int)
|
|
slug = request.args.get('slug')
|
|
|
|
if not company_id and not slug:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Podaj company_id lub slug firmy.'
|
|
}), 400
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
# Find company
|
|
if company_id:
|
|
company = db.query(Company).filter_by(id=company_id, status='active').first()
|
|
else:
|
|
company = db.query(Company).filter_by(slug=slug, status='active').first()
|
|
|
|
if not company:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Firma nie znaleziona lub nieaktywna.'
|
|
}), 404
|
|
|
|
# Get latest audit
|
|
audit = gbp_get_company_audit(db, company.id)
|
|
|
|
if not audit:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': f'Brak wyników audytu GBP dla firmy "{company.name}". Uruchom audyt używając POST /api/gbp/audit.',
|
|
'company_id': company.id,
|
|
'company_name': company.name
|
|
}), 404
|
|
|
|
# Build response
|
|
return jsonify({
|
|
'success': True,
|
|
'company_id': company.id,
|
|
'company_name': company.name,
|
|
'company_slug': company.slug,
|
|
'audit': {
|
|
'id': audit.id,
|
|
'audit_date': audit.audit_date.isoformat() if audit.audit_date else None,
|
|
'completeness_score': audit.completeness_score,
|
|
'score_category': audit.score_category,
|
|
'fields_status': audit.fields_status,
|
|
'recommendations': audit.recommendations,
|
|
'has_name': audit.has_name,
|
|
'has_address': audit.has_address,
|
|
'has_phone': audit.has_phone,
|
|
'has_website': audit.has_website,
|
|
'has_hours': audit.has_hours,
|
|
'has_categories': audit.has_categories,
|
|
'has_photos': audit.has_photos,
|
|
'has_description': audit.has_description,
|
|
'has_services': audit.has_services,
|
|
'has_reviews': audit.has_reviews,
|
|
'photo_count': audit.photo_count,
|
|
'review_count': audit.review_count,
|
|
'average_rating': float(audit.average_rating) if audit.average_rating else None,
|
|
'google_place_id': audit.google_place_id,
|
|
'audit_source': audit.audit_source,
|
|
'audit_version': audit.audit_version
|
|
}
|
|
}), 200
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error fetching GBP audit: {e}")
|
|
return jsonify({
|
|
'success': False,
|
|
'error': f'Błąd podczas pobierania audytu: {str(e)}'
|
|
}), 500
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/gbp/audit/<slug>')
|
|
def api_gbp_audit_by_slug(slug):
|
|
"""
|
|
API: Get GBP audit results for a company by slug.
|
|
Convenience endpoint that uses slug from URL path.
|
|
|
|
Example: GET /api/gbp/audit/pixlab-sp-z-o-o
|
|
"""
|
|
if not GBP_AUDIT_AVAILABLE:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Usługa audytu GBP jest niedostępna.'
|
|
}), 503
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
company = db.query(Company).filter_by(slug=slug, status='active').first()
|
|
|
|
if not company:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': f'Firma o slug "{slug}" nie znaleziona.'
|
|
}), 404
|
|
|
|
audit = gbp_get_company_audit(db, company.id)
|
|
|
|
if not audit:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': f'Brak wyników audytu GBP dla firmy "{company.name}".',
|
|
'company_id': company.id,
|
|
'company_name': company.name
|
|
}), 404
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'company_id': company.id,
|
|
'company_name': company.name,
|
|
'company_slug': company.slug,
|
|
'audit': {
|
|
'id': audit.id,
|
|
'audit_date': audit.audit_date.isoformat() if audit.audit_date else None,
|
|
'completeness_score': audit.completeness_score,
|
|
'score_category': audit.score_category,
|
|
'fields_status': audit.fields_status,
|
|
'recommendations': audit.recommendations,
|
|
'photo_count': audit.photo_count,
|
|
'review_count': audit.review_count,
|
|
'average_rating': float(audit.average_rating) if audit.average_rating else None
|
|
}
|
|
}), 200
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/gbp/audit', methods=['POST'])
|
|
@login_required
|
|
@limiter.limit("20 per hour")
|
|
def api_gbp_audit_trigger():
|
|
"""
|
|
API: Run GBP audit for a company.
|
|
|
|
This endpoint runs a completeness audit for Google Business Profile data,
|
|
checking fields like name, address, phone, website, hours, categories,
|
|
photos, description, services, and reviews.
|
|
|
|
Request JSON body:
|
|
- company_id: Company ID (integer) OR
|
|
- slug: Company slug (string)
|
|
- save: Whether to save results to database (default: true)
|
|
|
|
Returns:
|
|
- Success: Audit results with completeness score and recommendations
|
|
- Error: Error message with status code
|
|
|
|
Access:
|
|
- Members can audit their own company
|
|
- Admins can audit any company
|
|
|
|
Rate limited to 20 requests per hour per user.
|
|
"""
|
|
if not GBP_AUDIT_AVAILABLE:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Usługa audytu GBP jest niedostępna. Sprawdź konfigurację serwera.'
|
|
}), 503
|
|
|
|
# Parse request data
|
|
data = request.get_json()
|
|
if not data:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Brak danych w żądaniu. Podaj company_id lub slug.'
|
|
}), 400
|
|
|
|
company_id = data.get('company_id')
|
|
slug = data.get('slug')
|
|
save_result = data.get('save', True)
|
|
|
|
if not company_id and not slug:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Podaj company_id lub slug firmy do audytu.'
|
|
}), 400
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
# Find company by ID or slug
|
|
if company_id:
|
|
company = db.query(Company).filter_by(id=company_id, status='active').first()
|
|
else:
|
|
company = db.query(Company).filter_by(slug=slug, status='active').first()
|
|
|
|
if not company:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Firma nie znaleziona lub nieaktywna.'
|
|
}), 404
|
|
|
|
# Check access: admin can audit any company, member only their own
|
|
if not current_user.is_admin:
|
|
# Check if user is associated with this company
|
|
if current_user.company_id != company.id:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Brak uprawnień. Możesz audytować tylko własną firmę.'
|
|
}), 403
|
|
|
|
logger.info(f"GBP audit triggered by {current_user.email} for company: {company.name} (ID: {company.id})")
|
|
|
|
# Option to fetch fresh Google data before audit
|
|
fetch_google = data.get('fetch_google', True)
|
|
force_refresh = data.get('force_refresh', False)
|
|
|
|
try:
|
|
# Step 1: Fetch fresh Google Business data (if enabled)
|
|
fetch_result = None
|
|
if fetch_google:
|
|
logger.info(f"Fetching Google Business data for company {company.id}...")
|
|
fetch_result = gbp_fetch_google_data(db, company.id, force_refresh=force_refresh)
|
|
if not fetch_result.get('success') and not fetch_result.get('data', {}).get('cached'):
|
|
# Log warning but continue with audit
|
|
logger.warning(f"Google fetch warning for company {company.id}: {fetch_result.get('error')}")
|
|
|
|
# Step 2: Run the audit
|
|
result = gbp_audit_company(db, company.id, save=save_result)
|
|
|
|
# Build field status for response
|
|
fields_response = {}
|
|
for field_name, field_status in result.fields.items():
|
|
fields_response[field_name] = {
|
|
'status': field_status.status,
|
|
'value': str(field_status.value) if field_status.value is not None else None,
|
|
'score': field_status.score,
|
|
'max_score': field_status.max_score,
|
|
'recommendation': field_status.recommendation
|
|
}
|
|
|
|
# Determine score category
|
|
score = result.completeness_score
|
|
if score >= 90:
|
|
score_category = 'excellent'
|
|
elif score >= 70:
|
|
score_category = 'good'
|
|
elif score >= 50:
|
|
score_category = 'needs_work'
|
|
else:
|
|
score_category = 'poor'
|
|
|
|
response_data = {
|
|
'success': True,
|
|
'message': f'Audyt GBP dla firmy "{company.name}" został zakończony pomyślnie.',
|
|
'company_id': company.id,
|
|
'company_name': company.name,
|
|
'company_slug': company.slug,
|
|
'audit_version': GBP_AUDIT_VERSION,
|
|
'triggered_by': current_user.email,
|
|
'triggered_at': datetime.now().isoformat(),
|
|
'saved': save_result,
|
|
'audit': {
|
|
'completeness_score': result.completeness_score,
|
|
'score_category': score_category,
|
|
'fields_status': fields_response,
|
|
'recommendations': result.recommendations,
|
|
'photo_count': result.photo_count,
|
|
'logo_present': result.logo_present,
|
|
'cover_photo_present': result.cover_photo_present,
|
|
'review_count': result.review_count,
|
|
'average_rating': float(result.average_rating) if result.average_rating else None,
|
|
'google_place_id': result.google_place_id
|
|
}
|
|
}
|
|
|
|
# Include Google fetch results if performed
|
|
if fetch_result:
|
|
response_data['google_fetch'] = {
|
|
'success': fetch_result.get('success', False),
|
|
'steps': fetch_result.get('steps', []),
|
|
'data': fetch_result.get('data', {}),
|
|
'error': fetch_result.get('error')
|
|
}
|
|
|
|
return jsonify(response_data), 200
|
|
|
|
except ValueError as e:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': str(e),
|
|
'company_id': company.id if company else None
|
|
}), 400
|
|
except Exception as e:
|
|
logger.error(f"GBP audit error for company {company.id}: {e}")
|
|
return jsonify({
|
|
'success': False,
|
|
'error': f'Błąd podczas wykonywania audytu: {str(e)}',
|
|
'company_id': company.id,
|
|
'company_name': company.name
|
|
}), 500
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# ============================================================
|
|
# SEO AUDIT USER-FACING DASHBOARD
|
|
# ============================================================
|
|
|
|
@app.route('/audit/seo/<slug>')
|
|
@login_required
|
|
def seo_audit_dashboard(slug):
|
|
"""
|
|
User-facing SEO audit dashboard for a specific company.
|
|
|
|
Displays SEO audit results with:
|
|
- PageSpeed Insights scores (SEO, Performance, Accessibility, Best Practices)
|
|
- Website analysis data
|
|
- Improvement recommendations
|
|
|
|
Access control:
|
|
- Admin users can view audit for any company
|
|
- Regular users can only view audit for their own company
|
|
|
|
Args:
|
|
slug: Company slug identifier
|
|
|
|
Returns:
|
|
Rendered seo_audit.html template with company and audit data
|
|
"""
|
|
db = SessionLocal()
|
|
try:
|
|
# Find company by slug
|
|
company = db.query(Company).filter_by(slug=slug, status='active').first()
|
|
|
|
if not company:
|
|
flash('Firma nie została znaleziona.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
# Access control: admin can view any company, member only their own
|
|
if not current_user.is_admin:
|
|
if current_user.company_id != company.id:
|
|
flash('Brak uprawnień. Możesz przeglądać audyt tylko własnej firmy.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
# Get latest SEO analysis for this company
|
|
analysis = db.query(CompanyWebsiteAnalysis).filter(
|
|
CompanyWebsiteAnalysis.company_id == company.id
|
|
).order_by(CompanyWebsiteAnalysis.seo_audited_at.desc()).first()
|
|
|
|
# Build SEO data dict if analysis exists
|
|
seo_data = None
|
|
if analysis and analysis.seo_audited_at:
|
|
seo_data = {
|
|
'seo_score': analysis.pagespeed_seo_score,
|
|
'performance_score': analysis.pagespeed_performance_score,
|
|
'accessibility_score': analysis.pagespeed_accessibility_score,
|
|
'best_practices_score': analysis.pagespeed_best_practices_score,
|
|
'audited_at': analysis.seo_audited_at,
|
|
'audit_version': analysis.seo_audit_version,
|
|
'url': analysis.website_url
|
|
}
|
|
|
|
# Determine if user can run audit (admin or company owner)
|
|
can_audit = current_user.is_admin or current_user.company_id == company.id
|
|
|
|
logger.info(f"SEO audit dashboard viewed by {current_user.email} for company: {company.name}")
|
|
|
|
return render_template('seo_audit.html',
|
|
company=company,
|
|
seo_data=seo_data,
|
|
can_audit=can_audit
|
|
)
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# ============================================================
|
|
# SOCIAL MEDIA AUDIT USER-FACING DASHBOARD
|
|
# ============================================================
|
|
|
|
@app.route('/audit/social/<slug>')
|
|
@login_required
|
|
def social_audit_dashboard(slug):
|
|
"""
|
|
User-facing Social Media audit dashboard for a specific company.
|
|
|
|
Displays social media presence audit with:
|
|
- Overall presence score (platforms found / total platforms)
|
|
- Platform-by-platform status
|
|
- Profile validation status
|
|
- Recommendations for missing platforms
|
|
|
|
Access control:
|
|
- Admins: Can view all companies
|
|
- Regular users: Can only view their own company
|
|
|
|
Args:
|
|
slug: Company URL slug
|
|
|
|
Returns:
|
|
Rendered social_audit.html template with company and social data
|
|
"""
|
|
db = SessionLocal()
|
|
try:
|
|
# Find company by slug
|
|
company = db.query(Company).filter_by(slug=slug, status='active').first()
|
|
if not company:
|
|
flash('Firma nie została znaleziona.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
# Access control - admin can view all, users only their company
|
|
if not current_user.is_admin:
|
|
if current_user.company_id != company.id:
|
|
flash('Brak uprawnień do wyświetlenia audytu social media tej firmy.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
# Get social media profiles for this company
|
|
social_profiles = db.query(CompanySocialMedia).filter(
|
|
CompanySocialMedia.company_id == company.id
|
|
).all()
|
|
|
|
# Define all platforms we track
|
|
all_platforms = ['facebook', 'instagram', 'linkedin', 'youtube', 'twitter', 'tiktok']
|
|
|
|
# Build social media data
|
|
profiles_dict = {}
|
|
for profile in social_profiles:
|
|
profiles_dict[profile.platform] = {
|
|
'url': profile.url,
|
|
'is_valid': profile.is_valid,
|
|
'check_status': profile.check_status,
|
|
'page_name': profile.page_name,
|
|
'followers_count': profile.followers_count,
|
|
'verified_at': profile.verified_at,
|
|
'last_checked_at': profile.last_checked_at
|
|
}
|
|
|
|
# Calculate score (platforms with profiles / total platforms)
|
|
platforms_with_profiles = len([p for p in all_platforms if p in profiles_dict])
|
|
total_platforms = len(all_platforms)
|
|
score = int((platforms_with_profiles / total_platforms) * 100) if total_platforms > 0 else 0
|
|
|
|
social_data = {
|
|
'profiles': profiles_dict,
|
|
'all_platforms': all_platforms,
|
|
'platforms_count': platforms_with_profiles,
|
|
'total_platforms': total_platforms,
|
|
'score': score
|
|
}
|
|
|
|
# Determine if user can run audit (admin or company owner)
|
|
can_audit = current_user.is_admin or current_user.company_id == company.id
|
|
|
|
logger.info(f"Social Media audit dashboard viewed by {current_user.email} for company: {company.name}")
|
|
|
|
return render_template('social_audit.html',
|
|
company=company,
|
|
social_data=social_data,
|
|
can_audit=can_audit
|
|
)
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/social/audit', methods=['POST'])
|
|
@login_required
|
|
@limiter.limit("10 per hour")
|
|
def api_social_audit_trigger():
|
|
"""
|
|
API: Trigger Social Media audit for a company.
|
|
|
|
This endpoint performs a comprehensive social media audit:
|
|
- Scans company website for social media links
|
|
- Searches for profiles via Brave Search API (if configured)
|
|
- Fetches Google Business Profile data
|
|
- Updates database with discovered profiles
|
|
|
|
Request JSON body:
|
|
- company_id: Company ID (integer) OR
|
|
- slug: Company slug (string)
|
|
|
|
Returns:
|
|
- Success: Updated social media audit results
|
|
- Error: Error message with status code
|
|
|
|
Rate limited to 10 requests per hour per user.
|
|
"""
|
|
# Import the SocialMediaAuditor from scripts
|
|
try:
|
|
import sys
|
|
from pathlib import Path
|
|
scripts_dir = Path(__file__).parent / 'scripts'
|
|
if str(scripts_dir) not in sys.path:
|
|
sys.path.insert(0, str(scripts_dir))
|
|
from social_media_audit import SocialMediaAuditor
|
|
except ImportError as e:
|
|
logger.error(f"Failed to import SocialMediaAuditor: {e}")
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Usługa audytu Social Media jest niedostępna. Sprawdź konfigurację serwera.'
|
|
}), 503
|
|
|
|
# Parse request data
|
|
data = request.get_json()
|
|
if not data:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Brak danych w żądaniu. Podaj company_id lub slug.'
|
|
}), 400
|
|
|
|
company_id = data.get('company_id')
|
|
slug = data.get('slug')
|
|
|
|
if not company_id and not slug:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Podaj company_id lub slug firmy do audytu.'
|
|
}), 400
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
# Find company by ID or slug
|
|
if company_id:
|
|
company = db.query(Company).filter_by(id=company_id, status='active').first()
|
|
else:
|
|
company = db.query(Company).filter_by(slug=slug, status='active').first()
|
|
|
|
if not company:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Firma nie znaleziona lub nieaktywna.'
|
|
}), 404
|
|
|
|
# Access control - admin can audit all, users only their company
|
|
if not current_user.is_admin:
|
|
if current_user.company_id != company.id:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Brak uprawnień do audytu social media tej firmy.'
|
|
}), 403
|
|
|
|
logger.info(f"Social Media audit triggered by {current_user.email} for company: {company.name} (ID: {company.id})")
|
|
|
|
# Prepare company dict for auditor
|
|
company_dict = {
|
|
'id': company.id,
|
|
'name': company.name,
|
|
'slug': company.slug,
|
|
'website': company.website,
|
|
'address_city': company.address_city or 'Wejherowo'
|
|
}
|
|
|
|
# Initialize auditor and run audit
|
|
try:
|
|
auditor = SocialMediaAuditor()
|
|
audit_result = auditor.audit_company(company_dict)
|
|
|
|
# Check for errors
|
|
if audit_result.get('errors') and not audit_result.get('social_media') and not audit_result.get('website'):
|
|
return jsonify({
|
|
'success': False,
|
|
'error': f'Audyt nie powiódł się: {", ".join(audit_result["errors"][:3])}',
|
|
'company_id': company.id,
|
|
'company_name': company.name
|
|
}), 422
|
|
|
|
# Save result to database
|
|
saved = auditor.save_audit_result(audit_result)
|
|
|
|
if not saved:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Audyt został wykonany, ale nie udało się zapisać wyników do bazy danych.',
|
|
'company_id': company.id,
|
|
'company_name': company.name
|
|
}), 500
|
|
|
|
# Get count of social media profiles found
|
|
social_media_found = audit_result.get('social_media', {})
|
|
platforms_count = len(social_media_found)
|
|
|
|
# Calculate score
|
|
all_platforms = ['facebook', 'instagram', 'linkedin', 'youtube', 'twitter', 'tiktok']
|
|
score = int((platforms_count / len(all_platforms)) * 100)
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'message': f'Audyt Social Media zakończony. Znaleziono {platforms_count} profili.',
|
|
'company_id': company.id,
|
|
'company_name': company.name,
|
|
'profiles_found': platforms_count,
|
|
'platforms': list(social_media_found.keys()),
|
|
'score': score,
|
|
'google_reviews': audit_result.get('google_reviews', {}),
|
|
'errors': audit_result.get('errors') if audit_result.get('errors') else None
|
|
}), 200
|
|
|
|
except Exception as e:
|
|
logger.error(f"Social Media audit error for company {company.id}: {e}")
|
|
return jsonify({
|
|
'success': False,
|
|
'error': f'Błąd podczas audytu: {str(e)}'
|
|
}), 500
|
|
|
|
except Exception as e:
|
|
logger.error(f"Social Media audit error for company {slug or company_id}: {e}")
|
|
db.rollback()
|
|
return jsonify({
|
|
'success': False,
|
|
'error': f'Błąd podczas audytu: {str(e)}'
|
|
}), 500
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# ============================================================
|
|
# GBP AUDIT USER-FACING DASHBOARD
|
|
# ============================================================
|
|
|
|
@app.route('/audit/gbp/<slug>')
|
|
@login_required
|
|
def gbp_audit_dashboard(slug):
|
|
"""
|
|
User-facing GBP audit dashboard for a specific company.
|
|
|
|
Displays Google Business Profile completeness audit results with:
|
|
- Overall completeness score (0-100)
|
|
- Field-by-field status breakdown
|
|
- AI-generated improvement recommendations
|
|
- Historical audit data
|
|
|
|
Access control:
|
|
- Admin users can view audit for any company
|
|
- Regular users can only view audit for their own company
|
|
|
|
Args:
|
|
slug: Company slug identifier
|
|
|
|
Returns:
|
|
Rendered gbp_audit.html template with company and audit data
|
|
"""
|
|
if not GBP_AUDIT_AVAILABLE:
|
|
flash('Usługa audytu Google Business Profile jest tymczasowo niedostępna.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
# Find company by slug
|
|
company = db.query(Company).filter_by(slug=slug, status='active').first()
|
|
|
|
if not company:
|
|
flash('Firma nie została znaleziona.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
# Access control: admin can view any company, member only their own
|
|
if not current_user.is_admin:
|
|
if current_user.company_id != company.id:
|
|
flash('Brak uprawnień. Możesz przeglądać audyt tylko własnej firmy.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
# Get latest audit for this company
|
|
audit = gbp_get_company_audit(db, company.id)
|
|
|
|
# If no audit exists, we still render the page (template handles this)
|
|
# The user can trigger an audit from the dashboard
|
|
|
|
# Determine if user can run audit (admin or company owner)
|
|
can_audit = current_user.is_admin or current_user.company_id == company.id
|
|
|
|
logger.info(f"GBP audit dashboard viewed by {current_user.email} for company: {company.name}")
|
|
|
|
return render_template('gbp_audit.html',
|
|
company=company,
|
|
audit=audit,
|
|
can_audit=can_audit,
|
|
gbp_audit_available=GBP_AUDIT_AVAILABLE,
|
|
gbp_audit_version=GBP_AUDIT_VERSION
|
|
)
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# ============================================================
|
|
# IT AUDIT USER-FACING DASHBOARD
|
|
# ============================================================
|
|
|
|
@app.route('/audit/it/<slug>')
|
|
@login_required
|
|
def it_audit_dashboard(slug):
|
|
"""
|
|
User-facing IT infrastructure audit dashboard for a specific company.
|
|
|
|
Displays IT audit results with:
|
|
- Overall score and maturity level
|
|
- Security, collaboration, and completeness sub-scores
|
|
- Technology stack summary (Azure AD, M365, backup, monitoring)
|
|
- AI-generated recommendations
|
|
|
|
Access control:
|
|
- Admin users can view audit for any company
|
|
- Regular users can only view audit for their own company
|
|
|
|
Args:
|
|
slug: Company slug identifier
|
|
|
|
Returns:
|
|
Rendered it_audit.html template with company and audit data
|
|
"""
|
|
db = SessionLocal()
|
|
try:
|
|
# Import IT audit models
|
|
from database import ITAudit
|
|
|
|
# Find company by slug
|
|
company = db.query(Company).filter_by(slug=slug, status='active').first()
|
|
|
|
if not company:
|
|
flash('Firma nie została znaleziona.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
# Access control: admin can view any company, member only their own
|
|
if not current_user.is_admin:
|
|
if current_user.company_id != company.id:
|
|
flash('Brak uprawnień. Możesz przeglądać audyt tylko własnej firmy.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
# Get latest IT audit for this company
|
|
audit = db.query(ITAudit).filter(
|
|
ITAudit.company_id == company.id
|
|
).order_by(ITAudit.audit_date.desc()).first()
|
|
|
|
# Build audit data dict if audit exists
|
|
audit_data = None
|
|
if audit:
|
|
# Get maturity label
|
|
maturity_labels = {
|
|
'basic': 'Podstawowy',
|
|
'developing': 'Rozwijający się',
|
|
'established': 'Ugruntowany',
|
|
'advanced': 'Zaawansowany'
|
|
}
|
|
|
|
audit_data = {
|
|
'id': audit.id,
|
|
'overall_score': audit.overall_score,
|
|
'security_score': audit.security_score,
|
|
'collaboration_score': audit.collaboration_score,
|
|
'completeness_score': audit.completeness_score,
|
|
'maturity_level': audit.maturity_level,
|
|
'maturity_label': maturity_labels.get(audit.maturity_level, 'Nieznany'),
|
|
'audit_date': audit.audit_date,
|
|
'audit_source': audit.audit_source,
|
|
# Technology flags
|
|
'has_azure_ad': audit.has_azure_ad,
|
|
'has_m365': audit.has_m365,
|
|
'has_google_workspace': audit.has_google_workspace,
|
|
'has_local_ad': audit.has_local_ad,
|
|
'has_edr': audit.has_edr,
|
|
'has_mfa': audit.has_mfa,
|
|
'has_vpn': audit.has_vpn,
|
|
'has_proxmox_pbs': audit.has_proxmox_pbs,
|
|
'has_dr_plan': audit.has_dr_plan,
|
|
'has_mdm': audit.has_mdm,
|
|
# Solutions
|
|
'antivirus_solution': audit.antivirus_solution,
|
|
'backup_solution': audit.backup_solution,
|
|
'monitoring_solution': audit.monitoring_solution,
|
|
'virtualization_platform': audit.virtualization_platform,
|
|
# Collaboration flags
|
|
'open_to_shared_licensing': audit.open_to_shared_licensing,
|
|
'open_to_backup_replication': audit.open_to_backup_replication,
|
|
'open_to_teams_federation': audit.open_to_teams_federation,
|
|
'open_to_shared_monitoring': audit.open_to_shared_monitoring,
|
|
'open_to_collective_purchasing': audit.open_to_collective_purchasing,
|
|
'open_to_knowledge_sharing': audit.open_to_knowledge_sharing,
|
|
# Recommendations
|
|
'recommendations': audit.recommendations
|
|
}
|
|
|
|
# Determine if user can edit audit (admin or company owner)
|
|
can_edit = current_user.is_admin or current_user.company_id == company.id
|
|
|
|
logger.info(f"IT audit dashboard viewed by {current_user.email} for company: {company.name}")
|
|
|
|
return render_template('it_audit.html',
|
|
company=company,
|
|
audit_data=audit_data,
|
|
can_edit=can_edit
|
|
)
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/check-email', methods=['POST'])
|
|
def api_check_email():
|
|
"""API: Check if email is available"""
|
|
data = request.get_json()
|
|
email = data.get('email', '').strip().lower()
|
|
|
|
# Validate email format
|
|
if not email or not validate_email(email):
|
|
return jsonify({
|
|
'available': False,
|
|
'error': 'Nieprawidłowy format email'
|
|
}), 400
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
# Check if email exists
|
|
existing_user = db.query(User).filter_by(email=email).first()
|
|
|
|
return jsonify({
|
|
'available': existing_user is None,
|
|
'email': email
|
|
})
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/verify-nip', methods=['POST'])
|
|
def api_verify_nip():
|
|
"""API: Verify NIP and check if company is NORDA member"""
|
|
data = request.get_json()
|
|
nip = data.get('nip', '').strip()
|
|
|
|
# Validate NIP format
|
|
if not nip or not re.match(r'^\d{10}$', nip):
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Nieprawidłowy format NIP'
|
|
}), 400
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
# Check if NIP exists in companies database
|
|
company = db.query(Company).filter_by(nip=nip, status='active').first()
|
|
|
|
if company:
|
|
return jsonify({
|
|
'success': True,
|
|
'is_member': True,
|
|
'company_name': company.name,
|
|
'company_id': company.id
|
|
})
|
|
else:
|
|
return jsonify({
|
|
'success': True,
|
|
'is_member': False,
|
|
'company_name': None,
|
|
'company_id': None
|
|
})
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/verify-krs', methods=['GET', 'POST'])
|
|
def api_verify_krs():
|
|
"""
|
|
API: Verify company data from KRS Open API (prs.ms.gov.pl).
|
|
|
|
GET /api/verify-krs?krs=0000817317
|
|
POST /api/verify-krs with JSON body: {"krs": "0000817317"}
|
|
|
|
Returns official KRS data including:
|
|
- Company name, NIP, REGON
|
|
- Address
|
|
- Capital
|
|
- Registration date
|
|
- Management board (anonymized in Open API)
|
|
- Shareholders (anonymized in Open API)
|
|
"""
|
|
# Get KRS from query params (GET) or JSON body (POST)
|
|
if request.method == 'GET':
|
|
krs = request.args.get('krs', '').strip()
|
|
else:
|
|
data = request.get_json(silent=True) or {}
|
|
krs = data.get('krs', '').strip()
|
|
|
|
# Validate KRS format (7-10 digits)
|
|
if not krs or not re.match(r'^\d{7,10}$', krs):
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Nieprawidłowy format KRS (wymagane 7-10 cyfr)'
|
|
}), 400
|
|
|
|
# Normalize to 10 digits
|
|
krs_normalized = krs.zfill(10)
|
|
|
|
try:
|
|
# Fetch data from KRS Open API
|
|
krs_data = krs_api_service.get_company_from_krs(krs_normalized)
|
|
|
|
if krs_data is None:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': f'Nie znaleziono podmiotu o KRS {krs_normalized} w rejestrze',
|
|
'krs': krs_normalized
|
|
}), 404
|
|
|
|
# Check if company exists in our database
|
|
db = SessionLocal()
|
|
try:
|
|
our_company = db.query(Company).filter_by(krs=krs_normalized).first()
|
|
is_member = our_company is not None
|
|
company_id = our_company.id if our_company else None
|
|
finally:
|
|
db.close()
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'krs': krs_normalized,
|
|
'is_norda_member': is_member,
|
|
'company_id': company_id,
|
|
'data': krs_data.to_dict(),
|
|
'formatted_address': krs_api_service.format_address(krs_data),
|
|
'source': 'KRS Open API (prs.ms.gov.pl)',
|
|
'note': 'Dane osobowe (imiona, nazwiska) są zanonimizowane w Open API'
|
|
})
|
|
|
|
except Exception as e:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': f'Błąd podczas pobierania danych z KRS: {str(e)}'
|
|
}), 500
|
|
|
|
|
|
@app.route('/api/company/<int:company_id>/refresh-krs', methods=['POST'])
|
|
@login_required
|
|
def api_refresh_company_krs(company_id):
|
|
"""
|
|
API: Refresh company data from KRS Open API.
|
|
Updates company record with official KRS data.
|
|
Requires login.
|
|
"""
|
|
db = SessionLocal()
|
|
try:
|
|
company = db.query(Company).filter_by(id=company_id).first()
|
|
|
|
if not company:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Firma nie znaleziona'
|
|
}), 404
|
|
|
|
if not company.krs:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Firma nie ma numeru KRS'
|
|
}), 400
|
|
|
|
# Fetch data from KRS
|
|
krs_data = krs_api_service.get_company_from_krs(company.krs)
|
|
|
|
if krs_data is None:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': f'Nie znaleziono podmiotu o KRS {company.krs} w rejestrze'
|
|
}), 404
|
|
|
|
# Update company data (only non-personal data)
|
|
updates = {}
|
|
|
|
if krs_data.nip and krs_data.nip != company.nip:
|
|
updates['nip'] = krs_data.nip
|
|
company.nip = krs_data.nip
|
|
|
|
if krs_data.regon:
|
|
regon_9 = krs_data.regon[:9]
|
|
if regon_9 != company.regon:
|
|
updates['regon'] = regon_9
|
|
company.regon = regon_9
|
|
|
|
# Update address if significantly different
|
|
new_address = krs_api_service.format_address(krs_data)
|
|
if new_address and new_address != company.address:
|
|
updates['address'] = new_address
|
|
company.address = new_address
|
|
|
|
if krs_data.miejscowosc and krs_data.miejscowosc != company.city:
|
|
updates['city'] = krs_data.miejscowosc
|
|
company.city = krs_data.miejscowosc
|
|
|
|
if krs_data.kapital_zakladowy:
|
|
updates['kapital_zakladowy'] = krs_data.kapital_zakladowy
|
|
# Note: Might need to add this field to Company model
|
|
|
|
# Update verification timestamp
|
|
company.krs_verified_at = datetime.utcnow()
|
|
|
|
db.commit()
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'company_id': company_id,
|
|
'updates': updates,
|
|
'krs_data': krs_data.to_dict(),
|
|
'message': f'Zaktualizowano {len(updates)} pól' if updates else 'Dane są aktualne'
|
|
})
|
|
|
|
except Exception as e:
|
|
db.rollback()
|
|
return jsonify({
|
|
'success': False,
|
|
'error': f'Błąd podczas aktualizacji: {str(e)}'
|
|
}), 500
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
def _search_brave_for_company(company_name: str, city: str = None) -> dict:
|
|
"""
|
|
Search Brave API for company information.
|
|
Returns dict with news items and web results.
|
|
"""
|
|
import requests
|
|
|
|
brave_api_key = os.getenv('BRAVE_API_KEY')
|
|
if not brave_api_key:
|
|
logger.warning("BRAVE_API_KEY not configured, skipping web search")
|
|
return {'news': [], 'web': []}
|
|
|
|
results = {'news': [], 'web': []}
|
|
|
|
# Build search query
|
|
query = f'"{company_name}"'
|
|
if city:
|
|
query += f' {city}'
|
|
|
|
try:
|
|
headers = {
|
|
'Accept': 'application/json',
|
|
'X-Subscription-Token': brave_api_key
|
|
}
|
|
|
|
# Search news
|
|
news_params = {
|
|
'q': query,
|
|
'count': 5,
|
|
'freshness': 'py', # past year
|
|
'country': 'pl',
|
|
'search_lang': 'pl'
|
|
}
|
|
|
|
news_response = requests.get(
|
|
'https://api.search.brave.com/res/v1/news/search',
|
|
headers=headers,
|
|
params=news_params,
|
|
timeout=10
|
|
)
|
|
|
|
if news_response.status_code == 200:
|
|
news_data = news_response.json()
|
|
for item in news_data.get('results', [])[:5]:
|
|
results['news'].append({
|
|
'title': item.get('title', ''),
|
|
'description': item.get('description', ''),
|
|
'url': item.get('url', ''),
|
|
'source': item.get('meta_url', {}).get('hostname', '')
|
|
})
|
|
logger.info(f"Brave News: found {len(results['news'])} items for '{company_name}'")
|
|
|
|
# Search web
|
|
web_params = {
|
|
'q': query,
|
|
'count': 5,
|
|
'country': 'pl',
|
|
'search_lang': 'pl'
|
|
}
|
|
|
|
web_response = requests.get(
|
|
'https://api.search.brave.com/res/v1/web/search',
|
|
headers=headers,
|
|
params=web_params,
|
|
timeout=10
|
|
)
|
|
|
|
if web_response.status_code == 200:
|
|
web_data = web_response.json()
|
|
for item in web_data.get('web', {}).get('results', [])[:5]:
|
|
results['web'].append({
|
|
'title': item.get('title', ''),
|
|
'description': item.get('description', ''),
|
|
'url': item.get('url', '')
|
|
})
|
|
logger.info(f"Brave Web: found {len(results['web'])} items for '{company_name}'")
|
|
|
|
except Exception as e:
|
|
logger.error(f"Brave search error for '{company_name}': {e}")
|
|
|
|
return results
|
|
|
|
|
|
def _fetch_website_content(url: str) -> str:
|
|
"""
|
|
Fetch and extract text content from company website.
|
|
Returns first 2000 chars of text content.
|
|
"""
|
|
import requests
|
|
from bs4 import BeautifulSoup
|
|
|
|
if not url:
|
|
return ''
|
|
|
|
try:
|
|
# Ensure URL has protocol
|
|
if not url.startswith('http'):
|
|
url = 'https://' + url
|
|
|
|
response = requests.get(url, timeout=10, headers={
|
|
'User-Agent': 'Mozilla/5.0 (compatible; NordaBizBot/1.0)'
|
|
})
|
|
|
|
if response.status_code == 200:
|
|
soup = BeautifulSoup(response.text, 'html.parser')
|
|
|
|
# Remove scripts and styles
|
|
for tag in soup(['script', 'style', 'nav', 'footer', 'header']):
|
|
tag.decompose()
|
|
|
|
# Get text content
|
|
text = soup.get_text(separator=' ', strip=True)
|
|
|
|
# Clean up whitespace
|
|
text = ' '.join(text.split())
|
|
|
|
logger.info(f"Fetched {len(text)} chars from {url}")
|
|
return text[:3000] # Limit to 3000 chars
|
|
|
|
except Exception as e:
|
|
logger.warning(f"Failed to fetch website content from {url}: {e}")
|
|
|
|
return ''
|
|
|
|
|
|
@app.route('/api/company/<int:company_id>/enrich-ai', methods=['POST'])
|
|
@login_required
|
|
@limiter.limit("5 per hour")
|
|
def api_enrich_company_ai(company_id):
|
|
"""
|
|
API: Enrich company data using AI (Gemini) with web search.
|
|
|
|
Process:
|
|
1. Search Brave API for company news and web results
|
|
2. Fetch content from company website
|
|
3. Combine with existing database data
|
|
4. Send to Gemini for AI-powered enrichment
|
|
|
|
Generates AI insights including:
|
|
- Business summary
|
|
- Services list
|
|
- Target market
|
|
- Unique selling points
|
|
- Company values
|
|
- Certifications
|
|
- Industry tags
|
|
|
|
Requires: Admin or company owner permissions.
|
|
Rate limited to 5 requests per hour per user.
|
|
"""
|
|
import json
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
# Get company
|
|
company = db.query(Company).filter_by(id=company_id).first()
|
|
if not company:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Firma nie znaleziona'
|
|
}), 404
|
|
|
|
# Check permissions: admin or company owner
|
|
logger.info(f"Permission check: user={current_user.email}, is_admin={current_user.is_admin}, user_company_id={current_user.company_id}, target_company_id={company.id}")
|
|
if not current_user.is_admin and current_user.company_id != company.id:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Brak uprawnien. Tylko administrator lub wlasciciel firmy moze wzbogacac dane.'
|
|
}), 403
|
|
|
|
# Get Gemini service
|
|
service = gemini_service.get_gemini_service()
|
|
if not service:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Usluga AI jest niedostepna. Skontaktuj sie z administratorem.'
|
|
}), 503
|
|
|
|
logger.info(f"AI enrichment triggered by {current_user.email} for company: {company.name} (ID: {company.id})")
|
|
|
|
# ============================================
|
|
# STEP 1: Search the web for company info
|
|
# ============================================
|
|
brave_results = _search_brave_for_company(company.name, company.address_city)
|
|
|
|
# Format news for prompt
|
|
news_text = ""
|
|
if brave_results['news']:
|
|
news_text = "\n".join([
|
|
f"- {item['title']}: {item['description'][:200]}"
|
|
for item in brave_results['news'][:3]
|
|
])
|
|
|
|
# Format web results for prompt
|
|
web_text = ""
|
|
if brave_results['web']:
|
|
web_text = "\n".join([
|
|
f"- {item['title']}: {item['description'][:200]}"
|
|
for item in brave_results['web'][:3]
|
|
])
|
|
|
|
# ============================================
|
|
# STEP 2: Fetch company website content
|
|
# ============================================
|
|
website_content = ""
|
|
if company.website:
|
|
website_content = _fetch_website_content(company.website)
|
|
|
|
# ============================================
|
|
# STEP 3: Collect existing company data
|
|
# ============================================
|
|
services_list = []
|
|
if company.services:
|
|
services_list = [cs.service.name for cs in company.services if cs.service]
|
|
elif company.services_offered:
|
|
services_list = [company.services_offered]
|
|
|
|
competencies_list = []
|
|
if company.competencies:
|
|
competencies_list = [cc.competency.name for cc in company.competencies if cc.competency]
|
|
|
|
existing_data = {
|
|
'nazwa': company.name,
|
|
'opis_krotki': company.description_short or '',
|
|
'opis_pelny': company.description_full or '',
|
|
'kategoria': company.category.name if company.category else '',
|
|
'uslugi': ', '.join(services_list) if services_list else '',
|
|
'kompetencje': ', '.join(competencies_list) if competencies_list else '',
|
|
'wartosci': company.core_values or '',
|
|
'strona_www': company.website or '',
|
|
'miasto': company.address_city or '',
|
|
'branza': company.pkd_description or ''
|
|
}
|
|
|
|
# ============================================
|
|
# STEP 4: Build comprehensive prompt for AI
|
|
# ============================================
|
|
prompt = f"""Przeanalizuj wszystkie dostepne dane o polskiej firmie i wygeneruj wzbogacone informacje.
|
|
|
|
=== DANE Z BAZY DANYCH ===
|
|
Nazwa: {existing_data['nazwa']}
|
|
Kategoria: {existing_data['kategoria']}
|
|
Opis krotki: {existing_data['opis_krotki']}
|
|
Opis pelny: {existing_data['opis_pelny']}
|
|
Uslugi: {existing_data['uslugi']}
|
|
Kompetencje: {existing_data['kompetencje']}
|
|
Wartosci firmy: {existing_data['wartosci']}
|
|
Strona WWW: {existing_data['strona_www']}
|
|
Miasto: {existing_data['miasto']}
|
|
Branza (PKD): {existing_data['branza']}
|
|
|
|
=== INFORMACJE Z INTERNETU (Brave Search) ===
|
|
Newsy o firmie:
|
|
{news_text if news_text else '(brak znalezionych newsow)'}
|
|
|
|
Wyniki wyszukiwania:
|
|
{web_text if web_text else '(brak wynikow)'}
|
|
|
|
=== TRESC ZE STRONY WWW FIRMY ===
|
|
{website_content[:2000] if website_content else '(nie udalo sie pobrac tresci strony)'}
|
|
|
|
=== ZADANIE ===
|
|
Na podstawie WSZYSTKICH powyzszych danych (baza danych, wyszukiwarka, strona WWW) wygeneruj wzbogacone informacje o firmie.
|
|
Wykorzystaj informacje z internetu do uzupelnienia brakujacych danych.
|
|
Jesli znalazles nowe uslugi, certyfikaty lub informacje - dodaj je do odpowiedzi.
|
|
|
|
Odpowiedz WYLACZNIE w formacie JSON (bez dodatkowego tekstu):
|
|
{{
|
|
"business_summary": "Zwiezly opis dzialalnosci firmy (2-3 zdania) na podstawie wszystkich zrodel",
|
|
"services_list": ["usluga1", "usluga2", "usluga3", "usluga4", "usluga5"],
|
|
"target_market": "Opis grupy docelowej klientow",
|
|
"unique_selling_points": ["wyroznik1", "wyroznik2", "wyroznik3"],
|
|
"company_values": ["wartosc1", "wartosc2", "wartosc3"],
|
|
"certifications": ["certyfikat1", "certyfikat2"],
|
|
"industry_tags": ["tag1", "tag2", "tag3", "tag4", "tag5"],
|
|
"recent_news": "Krotkie podsumowanie ostatnich newsow o firmie (jesli sa)",
|
|
"suggested_category": "Sugerowana kategoria glowna",
|
|
"category_confidence": 0.85,
|
|
"data_sources_used": ["database", "brave_search", "website"]
|
|
}}
|
|
|
|
WAZNE:
|
|
- Odpowiedz TYLKO JSON, bez markdown, bez ```json
|
|
- Wszystkie teksty po polsku
|
|
- Listy powinny zawierac 3-5 elementow
|
|
- category_confidence to liczba od 0 do 1
|
|
- Wykorzystaj maksymalnie informacje z internetu
|
|
"""
|
|
|
|
# Call Gemini API
|
|
|
|
start_time = time.time()
|
|
response_text = service.generate_text(
|
|
prompt=prompt,
|
|
temperature=0.7,
|
|
feature='ai_enrichment',
|
|
user_id=current_user.id,
|
|
company_id=company.id,
|
|
related_entity_type='company',
|
|
related_entity_id=company.id
|
|
)
|
|
processing_time = int((time.time() - start_time) * 1000)
|
|
|
|
# Parse JSON response
|
|
try:
|
|
# Clean response - remove markdown code blocks if present
|
|
clean_response = response_text.strip()
|
|
if clean_response.startswith('```'):
|
|
clean_response = clean_response.split('```')[1]
|
|
if clean_response.startswith('json'):
|
|
clean_response = clean_response[4:]
|
|
clean_response = clean_response.strip()
|
|
|
|
ai_data = json.loads(clean_response)
|
|
except json.JSONDecodeError as e:
|
|
logger.error(f"Failed to parse AI response: {e}\nResponse: {response_text[:500]}")
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Blad parsowania odpowiedzi AI. Sprobuj ponownie.'
|
|
}), 500
|
|
|
|
# Save or update AI insights
|
|
existing_insights = db.query(CompanyAIInsights).filter_by(company_id=company.id).first()
|
|
|
|
if existing_insights:
|
|
# Update existing
|
|
existing_insights.business_summary = ai_data.get('business_summary')
|
|
existing_insights.services_list = ai_data.get('services_list', [])
|
|
existing_insights.target_market = ai_data.get('target_market')
|
|
existing_insights.unique_selling_points = ai_data.get('unique_selling_points', [])
|
|
existing_insights.company_values = ai_data.get('company_values', [])
|
|
existing_insights.certifications = ai_data.get('certifications', [])
|
|
existing_insights.industry_tags = ai_data.get('industry_tags', [])
|
|
existing_insights.suggested_category = ai_data.get('suggested_category')
|
|
existing_insights.category_confidence = ai_data.get('category_confidence')
|
|
existing_insights.ai_confidence_score = 0.85 # Default confidence
|
|
existing_insights.processing_time_ms = processing_time
|
|
existing_insights.analyzed_at = datetime.utcnow()
|
|
else:
|
|
# Create new
|
|
new_insights = CompanyAIInsights(
|
|
company_id=company.id,
|
|
business_summary=ai_data.get('business_summary'),
|
|
services_list=ai_data.get('services_list', []),
|
|
target_market=ai_data.get('target_market'),
|
|
unique_selling_points=ai_data.get('unique_selling_points', []),
|
|
company_values=ai_data.get('company_values', []),
|
|
certifications=ai_data.get('certifications', []),
|
|
industry_tags=ai_data.get('industry_tags', []),
|
|
suggested_category=ai_data.get('suggested_category'),
|
|
category_confidence=ai_data.get('category_confidence'),
|
|
ai_confidence_score=0.85,
|
|
processing_time_ms=processing_time,
|
|
analyzed_at=datetime.utcnow()
|
|
)
|
|
db.add(new_insights)
|
|
|
|
db.commit()
|
|
|
|
# Count sources used
|
|
sources_used = ['database']
|
|
if brave_results['news'] or brave_results['web']:
|
|
sources_used.append('brave_search')
|
|
if website_content:
|
|
sources_used.append('website')
|
|
|
|
logger.info(f"AI enrichment completed for {company.name}. Processing time: {processing_time}ms. Sources: {sources_used}")
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'message': f'Dane firmy "{company.name}" zostaly wzbogacone przez AI',
|
|
'processing_time_ms': processing_time,
|
|
'sources_used': sources_used,
|
|
'brave_results_count': len(brave_results['news']) + len(brave_results['web']),
|
|
'website_content_length': len(website_content),
|
|
'insights': ai_data
|
|
})
|
|
|
|
except Exception as e:
|
|
db.rollback()
|
|
logger.error(f"AI enrichment error for company {company_id}: {str(e)}")
|
|
return jsonify({
|
|
'success': False,
|
|
'error': f'Blad podczas wzbogacania danych: {str(e)}'
|
|
}), 500
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/model-info', methods=['GET'])
|
|
def api_model_info():
|
|
"""API: Get current AI model information"""
|
|
service = gemini_service.get_gemini_service()
|
|
if service:
|
|
return jsonify({
|
|
'success': True,
|
|
'model': service.model_name,
|
|
'provider': 'Google Gemini'
|
|
})
|
|
else:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'AI service not initialized'
|
|
}), 500
|
|
|
|
|
|
@app.route('/api/admin/test-sanitization', methods=['POST'])
|
|
@login_required
|
|
def test_sanitization():
|
|
"""
|
|
Admin API: Test sensitive data detection without saving.
|
|
Allows admins to verify what data would be sanitized.
|
|
"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Admin access required'}), 403
|
|
|
|
try:
|
|
from sensitive_data_service import sanitize_message
|
|
data = request.get_json()
|
|
text = data.get('text', '')
|
|
|
|
if not text:
|
|
return jsonify({'success': False, 'error': 'Text is required'}), 400
|
|
|
|
sanitized, matches = sanitize_message(text)
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'original': text,
|
|
'sanitized': sanitized,
|
|
'matches': [
|
|
{
|
|
'type': m.data_type.value,
|
|
'original': m.original,
|
|
'masked': m.masked,
|
|
'confidence': m.confidence
|
|
}
|
|
for m in matches
|
|
],
|
|
'has_sensitive_data': len(matches) > 0
|
|
})
|
|
except ImportError:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Sensitive data service not available'
|
|
}), 500
|
|
except Exception as e:
|
|
logger.error(f"Error testing sanitization: {e}")
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
# ============================================================
|
|
# DEVELOPMENT INSIGHTS (Roadmap from user feedback)
|
|
# ============================================================
|
|
|
|
# @app.route('/admin/insights') # MOVED TO admin.admin_insights
|
|
# @login_required
|
|
def _old_admin_insights():
|
|
"""Admin dashboard for development insights from forum and chat"""
|
|
if not current_user.is_admin:
|
|
flash('Brak uprawnień do tej strony.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
return render_template('admin/insights.html')
|
|
|
|
|
|
# @app.route('/api/admin/insights', methods=['GET']) # MOVED TO admin.api_get_insights
|
|
# @login_required
|
|
def _old_api_get_insights():
|
|
"""Get development insights for roadmap"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Admin access required'}), 403
|
|
|
|
try:
|
|
from norda_knowledge_service import get_knowledge_service
|
|
service = get_knowledge_service()
|
|
|
|
status = request.args.get('status')
|
|
insights = service.get_development_insights(status=status)
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'insights': insights,
|
|
'count': len(insights)
|
|
})
|
|
except ImportError:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Knowledge service not available'
|
|
}), 500
|
|
except Exception as e:
|
|
logger.error(f"Error getting insights: {e}")
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
# @app.route('/api/admin/insights/<int:insight_id>/status', methods=['PUT']) # MOVED TO admin.api_update_insight_status
|
|
# @login_required
|
|
def _old_api_update_insight_status(insight_id):
|
|
"""Update insight status (for roadmap planning)"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Admin access required'}), 403
|
|
|
|
try:
|
|
from norda_knowledge_service import get_knowledge_service
|
|
service = get_knowledge_service()
|
|
|
|
data = request.get_json()
|
|
status = data.get('status')
|
|
note = data.get('note')
|
|
|
|
if not status:
|
|
return jsonify({'success': False, 'error': 'Status is required'}), 400
|
|
|
|
success = service.update_insight_status(insight_id, status, note)
|
|
|
|
return jsonify({'success': success})
|
|
except Exception as e:
|
|
logger.error(f"Error updating insight status: {e}")
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
# @app.route('/api/admin/insights/sync', methods=['POST']) # MOVED TO admin.api_sync_insights
|
|
# @login_required
|
|
def _old_api_sync_insights():
|
|
"""Manually trigger knowledge sync from forum and chat"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Admin access required'}), 403
|
|
|
|
try:
|
|
from norda_knowledge_service import get_knowledge_service
|
|
service = get_knowledge_service()
|
|
|
|
data = request.get_json() or {}
|
|
days_back = data.get('days_back', 30)
|
|
|
|
results = {
|
|
'forum': service.sync_forum_knowledge(days_back),
|
|
'chat': service.sync_chat_knowledge(days_back),
|
|
'questions': service.analyze_user_questions(days_back)
|
|
}
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'results': results
|
|
})
|
|
except ImportError:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Knowledge service not available'
|
|
}), 500
|
|
except Exception as e:
|
|
logger.error(f"Error syncing insights: {e}")
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
# @app.route('/api/admin/insights/stats', methods=['GET']) # MOVED TO admin.api_insights_stats
|
|
# @login_required
|
|
def _old_api_insights_stats():
|
|
"""Get knowledge base statistics"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Admin access required'}), 403
|
|
|
|
try:
|
|
from norda_knowledge_service import get_knowledge_service
|
|
service = get_knowledge_service()
|
|
|
|
stats = service.get_knowledge_stats()
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'stats': stats
|
|
})
|
|
except ImportError:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Knowledge service not available'
|
|
}), 500
|
|
except Exception as e:
|
|
logger.error(f"Error getting stats: {e}")
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
# @app.route('/admin/analytics') # MOVED TO admin.admin_analytics
|
|
# @login_required
|
|
def _old_admin_analytics():
|
|
"""Admin dashboard for user analytics - sessions, page views, clicks"""
|
|
if not current_user.is_admin:
|
|
flash('Brak uprawnien do tej strony.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
from sqlalchemy import func, desc
|
|
from sqlalchemy.orm import joinedload
|
|
from datetime import date, timedelta
|
|
|
|
period = request.args.get('period', 'week')
|
|
user_id = request.args.get('user_id', type=int)
|
|
|
|
# Period calculation
|
|
today = date.today()
|
|
if period == 'day':
|
|
start_date = today
|
|
elif period == 'week':
|
|
start_date = today - timedelta(days=7)
|
|
elif period == 'month':
|
|
start_date = today - timedelta(days=30)
|
|
else:
|
|
start_date = None
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
# Base query for sessions in period
|
|
sessions_query = db.query(UserSession)
|
|
if start_date:
|
|
sessions_query = sessions_query.filter(
|
|
func.date(UserSession.started_at) >= start_date
|
|
)
|
|
|
|
# Overall stats
|
|
total_sessions = sessions_query.count()
|
|
unique_users = sessions_query.filter(
|
|
UserSession.user_id.isnot(None)
|
|
).distinct(UserSession.user_id).count()
|
|
|
|
total_page_views = db.query(func.sum(UserSession.page_views_count)).filter(
|
|
func.date(UserSession.started_at) >= start_date if start_date else True
|
|
).scalar() or 0
|
|
|
|
total_clicks = db.query(func.sum(UserSession.clicks_count)).filter(
|
|
func.date(UserSession.started_at) >= start_date if start_date else True
|
|
).scalar() or 0
|
|
|
|
avg_duration = db.query(func.avg(UserSession.duration_seconds)).filter(
|
|
func.date(UserSession.started_at) >= start_date if start_date else True,
|
|
UserSession.duration_seconds.isnot(None)
|
|
).scalar() or 0
|
|
|
|
stats = {
|
|
'total_sessions': total_sessions,
|
|
'unique_users': unique_users,
|
|
'total_page_views': int(total_page_views),
|
|
'total_clicks': int(total_clicks),
|
|
'avg_duration': float(avg_duration)
|
|
}
|
|
|
|
# Device breakdown
|
|
device_query = db.query(
|
|
UserSession.device_type,
|
|
func.count(UserSession.id)
|
|
)
|
|
if start_date:
|
|
device_query = device_query.filter(
|
|
func.date(UserSession.started_at) >= start_date
|
|
)
|
|
device_stats = dict(device_query.group_by(UserSession.device_type).all())
|
|
|
|
# Top users by engagement
|
|
user_query = db.query(
|
|
User.id,
|
|
User.name,
|
|
User.email,
|
|
func.count(UserSession.id).label('sessions'),
|
|
func.sum(UserSession.page_views_count).label('page_views'),
|
|
func.sum(UserSession.clicks_count).label('clicks'),
|
|
func.sum(UserSession.duration_seconds).label('total_time')
|
|
).join(UserSession, User.id == UserSession.user_id)
|
|
|
|
if start_date:
|
|
user_query = user_query.filter(
|
|
func.date(UserSession.started_at) >= start_date
|
|
)
|
|
|
|
user_rankings = user_query.group_by(User.id).order_by(
|
|
desc('page_views')
|
|
).limit(20).all()
|
|
|
|
# Popular pages
|
|
page_query = db.query(
|
|
PageView.path,
|
|
func.count(PageView.id).label('views'),
|
|
func.count(func.distinct(PageView.user_id)).label('unique_users'),
|
|
func.avg(PageView.time_on_page_seconds).label('avg_time')
|
|
)
|
|
if start_date:
|
|
page_query = page_query.filter(
|
|
func.date(PageView.viewed_at) >= start_date
|
|
)
|
|
popular_pages = page_query.group_by(PageView.path).order_by(
|
|
desc('views')
|
|
).limit(20).all()
|
|
|
|
# Recent sessions (last 50)
|
|
recent_sessions = db.query(UserSession).options(
|
|
joinedload(UserSession.user)
|
|
).order_by(UserSession.started_at.desc()).limit(50).all()
|
|
|
|
# Single user detail (if requested)
|
|
user_detail = None
|
|
if user_id:
|
|
user_obj = db.query(User).filter_by(id=user_id).first()
|
|
user_sessions = db.query(UserSession).filter_by(user_id=user_id).order_by(
|
|
UserSession.started_at.desc()
|
|
).limit(20).all()
|
|
user_pages = db.query(PageView).filter_by(user_id=user_id).order_by(
|
|
PageView.viewed_at.desc()
|
|
).limit(50).all()
|
|
|
|
user_detail = {
|
|
'user': user_obj,
|
|
'sessions': user_sessions,
|
|
'pages': user_pages
|
|
}
|
|
|
|
# ============================================================
|
|
# NOWE METRYKI (Analytics Expansion 2026-01-30)
|
|
# ============================================================
|
|
|
|
# Bounce rate: sesje z 1 pageview LUB czas < 10s
|
|
bounced_sessions = sessions_query.filter(
|
|
(UserSession.page_views_count <= 1) |
|
|
((UserSession.duration_seconds.isnot(None)) & (UserSession.duration_seconds < 10))
|
|
).count()
|
|
bounce_rate = round((bounced_sessions / total_sessions * 100), 1) if total_sessions > 0 else 0
|
|
|
|
# Geolokalizacja - top 10 krajów
|
|
country_query = db.query(
|
|
UserSession.country,
|
|
func.count(UserSession.id).label('count')
|
|
).filter(UserSession.country.isnot(None))
|
|
if start_date:
|
|
country_query = country_query.filter(func.date(UserSession.started_at) >= start_date)
|
|
country_stats = dict(country_query.group_by(UserSession.country).order_by(desc('count')).limit(10).all())
|
|
|
|
# UTM sources
|
|
utm_query = db.query(
|
|
UserSession.utm_source,
|
|
func.count(UserSession.id).label('count')
|
|
).filter(UserSession.utm_source.isnot(None))
|
|
if start_date:
|
|
utm_query = utm_query.filter(func.date(UserSession.started_at) >= start_date)
|
|
utm_stats = dict(utm_query.group_by(UserSession.utm_source).order_by(desc('count')).limit(10).all())
|
|
|
|
# Top wyszukiwania
|
|
search_query = db.query(
|
|
SearchQuery.query_normalized,
|
|
func.count(SearchQuery.id).label('count'),
|
|
func.avg(SearchQuery.results_count).label('avg_results')
|
|
)
|
|
if start_date:
|
|
search_query = search_query.filter(func.date(SearchQuery.searched_at) >= start_date)
|
|
top_searches = search_query.group_by(SearchQuery.query_normalized).order_by(desc('count')).limit(15).all()
|
|
|
|
# Wyszukiwania bez wyników
|
|
no_results_query = db.query(
|
|
SearchQuery.query_normalized,
|
|
func.count(SearchQuery.id).label('count')
|
|
).filter(SearchQuery.has_results == False)
|
|
if start_date:
|
|
no_results_query = no_results_query.filter(func.date(SearchQuery.searched_at) >= start_date)
|
|
searches_no_results = no_results_query.group_by(SearchQuery.query_normalized).order_by(desc('count')).limit(10).all()
|
|
|
|
# Konwersje
|
|
conversion_query = db.query(
|
|
ConversionEvent.event_type,
|
|
func.count(ConversionEvent.id).label('count')
|
|
)
|
|
if start_date:
|
|
conversion_query = conversion_query.filter(func.date(ConversionEvent.converted_at) >= start_date)
|
|
conversion_stats = dict(conversion_query.group_by(ConversionEvent.event_type).all())
|
|
|
|
# Błędy JS (agregowane)
|
|
error_query = db.query(
|
|
JSError.message,
|
|
JSError.source,
|
|
func.count(JSError.id).label('count')
|
|
)
|
|
if start_date:
|
|
error_query = error_query.filter(func.date(JSError.occurred_at) >= start_date)
|
|
js_errors = error_query.group_by(JSError.error_hash, JSError.message, JSError.source).order_by(desc('count')).limit(10).all()
|
|
|
|
# Średni scroll depth
|
|
avg_scroll = db.query(func.avg(PageView.scroll_depth_percent)).filter(
|
|
PageView.scroll_depth_percent.isnot(None)
|
|
)
|
|
if start_date:
|
|
avg_scroll = avg_scroll.filter(func.date(PageView.viewed_at) >= start_date)
|
|
avg_scroll_depth = round(avg_scroll.scalar() or 0, 1)
|
|
|
|
# Wzorce czasowe - aktywność wg godziny
|
|
hourly_query = db.query(
|
|
func.extract('hour', UserSession.started_at).label('hour'),
|
|
func.count(UserSession.id).label('count')
|
|
)
|
|
if start_date:
|
|
hourly_query = hourly_query.filter(func.date(UserSession.started_at) >= start_date)
|
|
hourly_activity = dict(hourly_query.group_by('hour').all())
|
|
|
|
# Dodaj nowe statystyki do stats
|
|
stats['bounce_rate'] = bounce_rate
|
|
stats['avg_scroll_depth'] = avg_scroll_depth
|
|
|
|
return render_template(
|
|
'admin/analytics_dashboard.html',
|
|
stats=stats,
|
|
device_stats=device_stats,
|
|
user_rankings=user_rankings,
|
|
popular_pages=popular_pages,
|
|
recent_sessions=recent_sessions,
|
|
user_detail=user_detail,
|
|
current_period=period,
|
|
# Nowe dane
|
|
country_stats=country_stats,
|
|
utm_stats=utm_stats,
|
|
top_searches=top_searches,
|
|
searches_no_results=searches_no_results,
|
|
conversion_stats=conversion_stats,
|
|
js_errors=js_errors,
|
|
hourly_activity=hourly_activity
|
|
)
|
|
except Exception as e:
|
|
logger.error(f"Admin analytics error: {e}")
|
|
flash('Blad podczas ladowania analityki.', 'error')
|
|
return redirect(url_for('admin_users'))
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# @app.route('/admin/analytics/export') # MOVED TO admin.admin_analytics_export
|
|
# @login_required
|
|
def _old_admin_analytics_export():
|
|
"""Export analytics data as CSV"""
|
|
import csv
|
|
import io
|
|
|
|
if not current_user.is_admin:
|
|
flash('Brak uprawnien.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
export_type = request.args.get('type', 'sessions')
|
|
period = request.args.get('period', 'month')
|
|
|
|
from datetime import date, timedelta
|
|
today = date.today()
|
|
|
|
if period == 'day':
|
|
start_date = today
|
|
elif period == 'week':
|
|
start_date = today - timedelta(days=7)
|
|
elif period == 'month':
|
|
start_date = today - timedelta(days=30)
|
|
else:
|
|
start_date = today - timedelta(days=365) # year
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
output = io.StringIO()
|
|
writer = csv.writer(output)
|
|
|
|
if export_type == 'sessions':
|
|
writer.writerow(['ID', 'User ID', 'Started At', 'Duration (s)', 'Page Views', 'Clicks',
|
|
'Device', 'Browser', 'OS', 'Country', 'UTM Source', 'UTM Campaign'])
|
|
|
|
sessions = db.query(UserSession).filter(
|
|
func.date(UserSession.started_at) >= start_date
|
|
).order_by(UserSession.started_at.desc()).all()
|
|
|
|
for s in sessions:
|
|
writer.writerow([
|
|
s.id, s.user_id, s.started_at.isoformat() if s.started_at else '',
|
|
s.duration_seconds or 0, s.page_views_count or 0, s.clicks_count or 0,
|
|
s.device_type or '', s.browser or '', s.os or '',
|
|
s.country or '', s.utm_source or '', s.utm_campaign or ''
|
|
])
|
|
|
|
elif export_type == 'pageviews':
|
|
writer.writerow(['ID', 'Session ID', 'User ID', 'Path', 'Viewed At', 'Time on Page (s)',
|
|
'Scroll Depth (%)', 'Company ID'])
|
|
|
|
views = db.query(PageView).filter(
|
|
func.date(PageView.viewed_at) >= start_date
|
|
).order_by(PageView.viewed_at.desc()).limit(10000).all()
|
|
|
|
for v in views:
|
|
writer.writerow([
|
|
v.id, v.session_id, v.user_id, v.path,
|
|
v.viewed_at.isoformat() if v.viewed_at else '',
|
|
v.time_on_page_seconds or 0, v.scroll_depth_percent or 0, v.company_id or ''
|
|
])
|
|
|
|
elif export_type == 'searches':
|
|
writer.writerow(['ID', 'User ID', 'Query', 'Results Count', 'Has Results', 'Clicked Company',
|
|
'Search Type', 'Searched At'])
|
|
|
|
searches = db.query(SearchQuery).filter(
|
|
func.date(SearchQuery.searched_at) >= start_date
|
|
).order_by(SearchQuery.searched_at.desc()).limit(10000).all()
|
|
|
|
for s in searches:
|
|
writer.writerow([
|
|
s.id, s.user_id, s.query, s.results_count, s.has_results,
|
|
s.clicked_company_id or '', s.search_type,
|
|
s.searched_at.isoformat() if s.searched_at else ''
|
|
])
|
|
|
|
elif export_type == 'conversions':
|
|
writer.writerow(['ID', 'User ID', 'Event Type', 'Event Category', 'Company ID',
|
|
'Target Type', 'Converted At'])
|
|
|
|
conversions = db.query(ConversionEvent).filter(
|
|
func.date(ConversionEvent.converted_at) >= start_date
|
|
).order_by(ConversionEvent.converted_at.desc()).all()
|
|
|
|
for c in conversions:
|
|
writer.writerow([
|
|
c.id, c.user_id, c.event_type, c.event_category or '',
|
|
c.company_id or '', c.target_type or '',
|
|
c.converted_at.isoformat() if c.converted_at else ''
|
|
])
|
|
|
|
output.seek(0)
|
|
return Response(
|
|
output.getvalue(),
|
|
mimetype='text/csv',
|
|
headers={'Content-Disposition': f'attachment; filename=analytics_{export_type}_{period}.csv'}
|
|
)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Export error: {e}")
|
|
flash('Blad podczas eksportu.', 'error')
|
|
return redirect(url_for('admin_analytics'))
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# @app.route('/api/admin/ai-learning-status') # MOVED TO admin.api_ai_learning_status
|
|
# @login_required
|
|
def _old_api_ai_learning_status():
|
|
"""API: Get AI feedback learning status and examples"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Not authorized'}), 403
|
|
|
|
try:
|
|
from feedback_learning_service import get_feedback_learning_service
|
|
service = get_feedback_learning_service()
|
|
context = service.get_learning_context()
|
|
|
|
# Format examples for JSON response
|
|
positive_examples = []
|
|
for ex in context.get('positive_examples', []):
|
|
positive_examples.append({
|
|
'query': ex.query,
|
|
'response': ex.response[:300] + '...' if len(ex.response) > 300 else ex.response,
|
|
'companies': ex.companies_mentioned or []
|
|
})
|
|
|
|
negative_examples = []
|
|
for ex in context.get('negative_examples', []):
|
|
negative_examples.append({
|
|
'query': ex.query,
|
|
'response': ex.response,
|
|
'comment': ex.feedback_comment
|
|
})
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'learning_active': True,
|
|
'stats': context.get('stats', {}),
|
|
'using_seed_examples': context.get('stats', {}).get('using_seed_examples', False),
|
|
'positive_examples_count': len(positive_examples),
|
|
'negative_examples_count': len(negative_examples),
|
|
'positive_examples': positive_examples,
|
|
'negative_examples': negative_examples,
|
|
'negative_patterns': context.get('negative_patterns', []),
|
|
'generated_at': context.get('generated_at')
|
|
})
|
|
except ImportError:
|
|
return jsonify({
|
|
'success': True,
|
|
'learning_active': False,
|
|
'message': 'Feedback learning service not available'
|
|
})
|
|
except Exception as e:
|
|
logger.error(f"Error getting AI learning status: {e}")
|
|
return jsonify({
|
|
'success': False,
|
|
'error': str(e)
|
|
}), 500
|
|
|
|
|
|
# ============================================================
|
|
# MODEL COMPARISON - Porównanie modeli AI
|
|
# ============================================================
|
|
|
|
# @app.route('/admin/model-comparison') # MOVED TO admin.admin_model_comparison
|
|
# @login_required
|
|
def _old_admin_model_comparison():
|
|
"""Admin page for comparing AI model responses"""
|
|
if not current_user.is_admin:
|
|
flash('Brak uprawnień do tej strony.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
# Load saved comparison results if exist
|
|
results_file = '/tmp/nordabiz_model_comparison_results.json'
|
|
|
|
results = None
|
|
generated_at = None
|
|
|
|
if os.path.exists(results_file):
|
|
try:
|
|
with open(results_file, 'r', encoding='utf-8') as f:
|
|
data = json.load(f)
|
|
results = data.get('results', {})
|
|
generated_at = data.get('generated_at', 'Nieznana data')
|
|
except Exception as e:
|
|
logger.error(f"Error loading model comparison results: {e}")
|
|
|
|
return render_template(
|
|
'admin/model_comparison.html',
|
|
results=results,
|
|
generated_at=generated_at
|
|
)
|
|
|
|
|
|
# @app.route('/admin/model-comparison/run', methods=['POST']) # MOVED TO admin.admin_model_comparison_run
|
|
# @login_required
|
|
def _old_admin_model_comparison_run():
|
|
"""Run model comparison simulation"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
|
|
|
|
try:
|
|
# Questions to compare (from real conversations)
|
|
comparison_questions = {
|
|
'artur': {
|
|
'user_id': 3,
|
|
'user_name': 'Artur Wiertel',
|
|
'original_conversation_id': 63,
|
|
'comparisons': [
|
|
{
|
|
'question_id': 1,
|
|
'question': 'ilu członków ma izba norda?',
|
|
'type': 'simple',
|
|
'old_model': 'gemini-2.5-flash-lite',
|
|
'old_answer': 'Izba Norda Biznes zrzesza obecnie 111 firm.',
|
|
'new_model': 'gemini-3-flash-preview',
|
|
'new_answer': None
|
|
},
|
|
{
|
|
'question_id': 2,
|
|
'question': 'czy w izbie są osoby gotowe do współpracy przy wspólnych projektach inwestycyjnych?',
|
|
'type': 'complex',
|
|
'old_model': 'gemini-2.5-flash-lite',
|
|
'old_answer': 'W Norda Biznes posiadamy firmy otwarte na współpracę przy projektach inwestycyjnych. Szczególnie interesujące mogą być podmioty z kategorii Budownictwo, Nieruchomości oraz Usługi Biznesowe. Czy szukasz konkretnego rodzaju partnera lub projektu?',
|
|
'new_model': 'gemini-3-flash-preview',
|
|
'new_answer': None
|
|
},
|
|
{
|
|
'question_id': 3,
|
|
'question': 'Mam pomysł i projekt ciekawego przedsięwzięcia inwestycyjnego - budowa nieruchomości komercyjnej. Szukam partnerów - 3 osoby, wkład na poziomie 1,5 mln netto',
|
|
'type': 'business',
|
|
'old_model': 'gemini-2.5-flash-lite',
|
|
'old_answer': 'Poszukujesz partnerów do inwestycji w nieruchomość komercyjną. W naszej bazie znajdują się firmy, które mogą być zainteresowane takim przedsięwzięciem. Warto zwrócić uwagę na: ARD Invest, Progress Optima, Orlex MG...',
|
|
'new_model': 'gemini-3-flash-preview',
|
|
'new_answer': None
|
|
},
|
|
{
|
|
'question_id': 4,
|
|
'question': 'a co sądzisz o firmie sigma?',
|
|
'type': 'company',
|
|
'old_model': 'gemini-2.5-flash-lite',
|
|
'old_answer': 'Firma Sigma Budownictwo to generalny wykonawca robót budowlanych, który ma na swoim koncie portfolio ponad 250 tys. m² powierzchni użytkowej, w tym hale przemysłowe. Jest to rodzinna firma z Wejherowa, zarządzana przez Romana i Tomasza Wiercińskich.',
|
|
'new_model': 'gemini-3-flash-preview',
|
|
'new_answer': None
|
|
},
|
|
{
|
|
'question_id': 5,
|
|
'question': 'co myślisz o pomysłach Jacka Pomieczyńskiego na forum?',
|
|
'type': 'analysis',
|
|
'old_model': 'gemini-2.5-flash-lite',
|
|
'old_answer': 'Chętnie pomogę! Aby odpowiedzieć na Twoje pytanie o pomysły Jacka Pomieczyńskiego, potrzebuję więcej informacji. Czy możesz sprecyzować, o jakie konkretnie pomysły chodzi?',
|
|
'new_model': 'gemini-3-flash-preview',
|
|
'new_answer': None
|
|
}
|
|
]
|
|
},
|
|
'maciej': {
|
|
'user_id': 1,
|
|
'user_name': 'Maciej Pienczyn',
|
|
'original_conversation_id': 62,
|
|
'comparisons': [
|
|
{
|
|
'question_id': 1,
|
|
'question': 'Kto jest prezesem PIXLAB?',
|
|
'type': 'simple',
|
|
'old_model': 'gemini-2.5-flash-lite',
|
|
'old_answer': 'Prezesem firmy Pixlab Softwarehouse jest Michał Bogdan Roszman.',
|
|
'new_model': 'gemini-3-flash-preview',
|
|
'new_answer': None
|
|
},
|
|
{
|
|
'question_id': 2,
|
|
'question': 'jakie znasz inne firmy IT w Norda?',
|
|
'type': 'listing',
|
|
'old_model': 'gemini-2.5-flash-lite',
|
|
'old_answer': 'W Norda Biznes działa kilka firm z branży IT i Telekomunikacja. Są to między innymi VENCODE, oferujące dedykowane aplikacje oparte na sztucznej inteligencji i technologii blockchain, oraz Delkom, specjalizujący się w usługach IT, cyberbezpieczeństwie i doradztwie informatycznym.',
|
|
'new_model': 'gemini-3-flash-preview',
|
|
'new_answer': None
|
|
},
|
|
{
|
|
'question_id': 3,
|
|
'question': 'podaj osoby kontaktowe - będę budował sieć komputerową',
|
|
'type': 'contact',
|
|
'old_model': 'gemini-2.5-flash-lite',
|
|
'old_answer': 'Wśród firm z branży IT i telekomunikacja, które mogą pomóc w budowie sieci komputerowej, znajdują się: Chopin Telewizja Kablowa: Bogdan Łaga (Prezes), Delkom: Seweryn Kędziora (Członek Zarządu), INPI Sp. z o.o.',
|
|
'new_model': 'gemini-3-flash-preview',
|
|
'new_answer': None
|
|
}
|
|
]
|
|
}
|
|
}
|
|
|
|
# Use gemini_service directly with FULL NordaGPT context
|
|
gs = gemini_service.get_gemini_service()
|
|
logger.info(f"Running model comparison with: {gs.model_name}")
|
|
|
|
# Build FULL context from database (like NordaBizChatEngine._build_conversation_context)
|
|
db = SessionLocal()
|
|
try:
|
|
# === ALL COMPANIES in compact format ===
|
|
all_companies = db.query(Company).filter_by(status='active').all()
|
|
companies_list = []
|
|
for c in all_companies:
|
|
info = f"- {c.name}"
|
|
if c.category:
|
|
info += f" [{c.category.name}]"
|
|
if c.description_short:
|
|
info += f": {c.description_short}"
|
|
if c.founding_history:
|
|
info += f" Historia: {c.founding_history[:200]}"
|
|
services = [cs.service.name for cs in c.services if cs.service] if c.services else []
|
|
if services:
|
|
info += f" Usługi: {', '.join(services[:5])}"
|
|
if c.website:
|
|
info += f" WWW: {c.website}"
|
|
if c.phone:
|
|
info += f" Tel: {c.phone}"
|
|
if c.email:
|
|
info += f" Email: {c.email}"
|
|
if c.address_city:
|
|
info += f" Miasto: {c.address_city}"
|
|
info += f" Profil: https://nordabiznes.pl/company/{c.slug}"
|
|
companies_list.append(info)
|
|
|
|
companies_context = "\n".join(companies_list)
|
|
total_count = len(all_companies)
|
|
|
|
# === CATEGORIES with counts ===
|
|
categories = db.query(Category).all()
|
|
categories_context = "Kategorie firm:\n" + "\n".join([
|
|
f"- {cat.name}: {db.query(Company).filter_by(category_id=cat.id, status='active').count()} firm"
|
|
for cat in categories
|
|
])
|
|
|
|
# === COMPANY PEOPLE (zarząd, wspólnicy) ===
|
|
from sqlalchemy.orm import joinedload
|
|
company_people = db.query(CompanyPerson).options(
|
|
joinedload(CompanyPerson.person),
|
|
joinedload(CompanyPerson.company)
|
|
).all()
|
|
|
|
people_lines = []
|
|
for cp in company_people:
|
|
if cp.company and cp.person:
|
|
line = f"- {cp.company.name}: {cp.person.full_name()}"
|
|
if cp.role:
|
|
line += f" ({cp.role})"
|
|
if cp.shares_percent:
|
|
line += f" - {cp.shares_percent}% udziałów"
|
|
people_lines.append(line)
|
|
people_context = "Osoby w firmach (zarząd, wspólnicy):\n" + "\n".join(people_lines) if people_lines else ""
|
|
|
|
# === RECOMMENDATIONS ===
|
|
recommendations = db.query(CompanyRecommendation).filter_by(
|
|
status='approved'
|
|
).order_by(CompanyRecommendation.created_at.desc()).limit(20).all()
|
|
|
|
recs_lines = []
|
|
for rec in recommendations:
|
|
if rec.company:
|
|
line = f"- {rec.company.name}: {rec.recommendation_text[:150] if rec.recommendation_text else ''}"
|
|
recs_lines.append(line)
|
|
recommendations_context = "Rekomendacje firm:\n" + "\n".join(recs_lines) if recs_lines else ""
|
|
|
|
# === FORUM TOPICS ===
|
|
forum_topics = db.query(ForumTopic).filter(
|
|
ForumTopic.category != 'test'
|
|
).order_by(ForumTopic.created_at.desc()).limit(15).all()
|
|
|
|
forum_lines = []
|
|
for topic in forum_topics:
|
|
line = f"- [{topic.category_label}] {topic.title}"
|
|
if topic.reply_count:
|
|
line += f" ({topic.reply_count} odpowiedzi)"
|
|
forum_lines.append(line)
|
|
forum_context = "Tematy na forum:\n" + "\n".join(forum_lines) if forum_lines else ""
|
|
|
|
# === UPCOMING EVENTS ===
|
|
from datetime import date, timedelta
|
|
event_cutoff = date.today() + timedelta(days=60)
|
|
upcoming_events = db.query(NordaEvent).filter(
|
|
NordaEvent.event_date >= date.today(),
|
|
NordaEvent.event_date <= event_cutoff
|
|
).order_by(NordaEvent.event_date).limit(15).all()
|
|
|
|
events_lines = []
|
|
for event in upcoming_events:
|
|
line = f"- {event.event_date.strftime('%Y-%m-%d')}: {event.title}"
|
|
if event.location:
|
|
line += f" ({event.location})"
|
|
events_lines.append(line)
|
|
events_context = "Nadchodzące wydarzenia:\n" + "\n".join(events_lines) if events_lines else ""
|
|
|
|
# === B2B CLASSIFIEDS ===
|
|
active_classifieds = db.query(Classified).filter(
|
|
Classified.is_active == True,
|
|
Classified.is_test == False
|
|
).order_by(Classified.created_at.desc()).limit(20).all()
|
|
|
|
classifieds_lines = []
|
|
for c in active_classifieds:
|
|
line = f"- [{c.listing_type}] {c.title}"
|
|
if c.company:
|
|
line += f" - {c.company.name}"
|
|
classifieds_lines.append(line)
|
|
classifieds_context = "Ogłoszenia B2B:\n" + "\n".join(classifieds_lines) if classifieds_lines else ""
|
|
|
|
# === RECENT NEWS (ZOPK) ===
|
|
news_cutoff = datetime.now() - timedelta(days=30)
|
|
recent_news = db.query(ZOPKNews).filter(
|
|
ZOPKNews.status.in_(['approved', 'auto_approved']),
|
|
ZOPKNews.published_at >= news_cutoff
|
|
).order_by(ZOPKNews.published_at.desc()).limit(10).all()
|
|
|
|
news_lines = []
|
|
for news in recent_news:
|
|
line = f"- {news.published_at.strftime('%Y-%m-%d') if news.published_at else ''}: {news.title}"
|
|
news_lines.append(line)
|
|
news_context = "Ostatnie aktualności:\n" + "\n".join(news_lines) if news_lines else ""
|
|
|
|
# === SOCIAL MEDIA ===
|
|
social_media = db.query(CompanySocialMedia).filter(
|
|
CompanySocialMedia.is_valid == True
|
|
).options(joinedload(CompanySocialMedia.company)).all()
|
|
|
|
social_lines = []
|
|
for sm in social_media:
|
|
if sm.company:
|
|
line = f"- {sm.company.name}: {sm.platform}"
|
|
if sm.followers_count:
|
|
line += f" ({sm.followers_count} obserwujących)"
|
|
social_lines.append(line)
|
|
social_context = "Social media firm:\n" + "\n".join(social_lines[:30]) if social_lines else ""
|
|
|
|
# === GBP AUDITS (Google Business Profile) ===
|
|
from sqlalchemy import func
|
|
# Get latest audit per company
|
|
latest_audit_subq = db.query(
|
|
GBPAudit.company_id,
|
|
func.max(GBPAudit.audit_date).label('max_date')
|
|
).group_by(GBPAudit.company_id).subquery()
|
|
|
|
latest_audits = db.query(GBPAudit).join(
|
|
latest_audit_subq,
|
|
(GBPAudit.company_id == latest_audit_subq.c.company_id) &
|
|
(GBPAudit.audit_date == latest_audit_subq.c.max_date)
|
|
).options(joinedload(GBPAudit.company)).all()
|
|
|
|
gbp_lines = []
|
|
for audit in latest_audits:
|
|
if audit.company:
|
|
line = f"- {audit.company.name}: Kompletność {audit.completeness_score or 0}%"
|
|
if audit.review_count:
|
|
line += f", {audit.review_count} recenzji"
|
|
if audit.average_rating:
|
|
line += f", ocena {float(audit.average_rating):.1f}/5"
|
|
if audit.google_maps_url:
|
|
line += f" Maps: {audit.google_maps_url}"
|
|
gbp_lines.append(line)
|
|
gbp_context = "Audyty Google Business Profile:\n" + "\n".join(gbp_lines) if gbp_lines else ""
|
|
|
|
# === SEO AUDITS (PageSpeed) ===
|
|
seo_audits = db.query(CompanyWebsiteAnalysis).filter(
|
|
CompanyWebsiteAnalysis.pagespeed_seo_score.isnot(None)
|
|
).options(joinedload(CompanyWebsiteAnalysis.company)).all()
|
|
|
|
seo_lines = []
|
|
for audit in seo_audits:
|
|
if audit.company:
|
|
line = f"- {audit.company.name}: SEO {audit.pagespeed_seo_score or 0}/100"
|
|
if audit.pagespeed_performance_score:
|
|
line += f", Wydajność {audit.pagespeed_performance_score}/100"
|
|
if audit.pagespeed_accessibility_score:
|
|
line += f", Dostępność {audit.pagespeed_accessibility_score}/100"
|
|
if audit.seo_overall_score:
|
|
line += f", Ogólnie {audit.seo_overall_score}/100"
|
|
seo_lines.append(line)
|
|
seo_context = "Audyty SEO stron WWW:\n" + "\n".join(seo_lines) if seo_lines else ""
|
|
|
|
# === ZOPK KNOWLEDGE BASE ===
|
|
zopk_knowledge = """Baza wiedzy ZOPK (Zielony Okręg Przemysłowy Kaszubia):
|
|
|
|
ELEKTROWNIA JĄDROWA:
|
|
- Lokalizacja: Lubiatowo-Kopalino (gmina Choczewo)
|
|
- Inwestor: Polskie Elektrownie Jądrowe (PEJ)
|
|
- Partner technologiczny: Westinghouse (reaktory AP1000)
|
|
- Moc: 2 reaktory po 1150 MW (łącznie 2300 MW)
|
|
- Harmonogram: Budowa 2028-2035, uruchomienie 2035-2037
|
|
- Zatrudnienie: 3000 miejsc pracy (budowa), 900 stałych (eksploatacja)
|
|
|
|
OFFSHORE WIND (Morskie Farmy Wiatrowe):
|
|
- Baltic Power (Orlen + Northland): 1.2 GW, 76 turbin, ~25 km od Łeby
|
|
- Baltica 2 (PGE + Ørsted): 1.5 GW, na wschód od Łeby
|
|
- Baltica 3 (PGE + Ørsted): 1 GW
|
|
- Łączna moc planowana: 5.9 GW do 2030, 11 GW do 2040
|
|
- Port serwisowy: Ustka, Łeba (rozbudowa)
|
|
|
|
INFRASTRUKTURA TRANSPORTOWA:
|
|
- Via Pomerania: Droga S6 Szczecin-Gdańsk (w budowie)
|
|
- Droga Czerwona: S7 Gdańsk-Elbląg z Obwodnicą Metropolitalną
|
|
- PKM (Pomorska Kolej Metropolitalna): Rozwój sieci
|
|
|
|
INWESTYCJE PRZEMYSŁOWE:
|
|
- Kongsberg Maritime: Fabryka w Rumi (automatyzacja morska)
|
|
- Bałtycki Port Nowych Technologii: Gdynia
|
|
- Pomorska Specjalna Strefa Ekonomiczna: Ulgi podatkowe
|
|
|
|
IZBA NORDA BIZNES:
|
|
- Siedziba: Wejherowo
|
|
- Członkowie: 150 firm
|
|
- Cel: Networking, współpraca biznesowa, rozwój regionu"""
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
# Build comprehensive system prompt with ALL context
|
|
system_prompt = f"""Jesteś NordaGPT - inteligentnym asystentem portalu Norda Biznes, katalogu {total_count} firm zrzeszonych w stowarzyszeniu Norda Biznes z Wejherowa (Polska).
|
|
|
|
TWOJE MOŻLIWOŚCI:
|
|
- Znasz WSZYSTKIE firmy członkowskie, ich dane kontaktowe, usługi, historię
|
|
- Znasz osoby zarządzające firmami (prezesi, wspólnicy, udziałowcy)
|
|
- Śledzisz aktualności, wydarzenia i ogłoszenia B2B
|
|
- Możesz polecić firmy do współpracy na podstawie potrzeb użytkownika
|
|
- Śledzisz dyskusje na forum członków
|
|
- Znasz wyniki audytów Google Business Profile (oceny, recenzje)
|
|
- Znasz wyniki audytów SEO stron WWW firm
|
|
- Masz wiedzę o ZOPK (Zielony Okręg Przemysłowy Kaszubia) - elektrownia jądrowa, offshore wind, infrastruktura
|
|
|
|
=== BAZA FIRM ({total_count} aktywnych) ===
|
|
{companies_context}
|
|
|
|
=== {categories_context} ===
|
|
|
|
{people_context}
|
|
|
|
{recommendations_context}
|
|
|
|
{forum_context}
|
|
|
|
{events_context}
|
|
|
|
{classifieds_context}
|
|
|
|
{news_context}
|
|
|
|
{social_context}
|
|
|
|
{gbp_context}
|
|
|
|
{seo_context}
|
|
|
|
{zopk_knowledge}
|
|
|
|
=== ZASADY ODPOWIEDZI ===
|
|
- Odpowiadaj konkretnie, podając nazwy firm i dane kontaktowe
|
|
- Linkuj do profili firm na portalu: https://nordabiznes.pl/company/[slug]
|
|
- Jeśli pytanie dotyczy konkretnej firmy - podaj szczegóły z bazy
|
|
- Przy pytaniach o osoby - podaj stanowisko i firmę
|
|
- NIE podawaj danych kontaktowych osób, które je ukryły w ustawieniach prywatności
|
|
- Bądź pomocny i profesjonalny"""
|
|
|
|
# Generate new responses
|
|
for user_key, user_data in comparison_questions.items():
|
|
for comp in user_data['comparisons']:
|
|
try:
|
|
prompt = f"{system_prompt}\n\nPytanie użytkownika: {comp['question']}"
|
|
response_text = gs.generate_text(prompt=prompt, temperature=0.7)
|
|
comp['new_answer'] = response_text if response_text else 'Brak odpowiedzi'
|
|
logger.info(f"Generated response for {user_key} Q{comp['question_id']}")
|
|
except Exception as e:
|
|
comp['new_answer'] = f'Błąd: {str(e)}'
|
|
logger.error(f"Error generating response for {user_key} Q{comp['question_id']}: {e}")
|
|
|
|
# Save results to /tmp (always writable)
|
|
results_file = '/tmp/nordabiz_model_comparison_results.json'
|
|
|
|
with open(results_file, 'w', encoding='utf-8') as f:
|
|
json.dump({
|
|
'generated_at': datetime.now().strftime('%Y-%m-%d %H:%M'),
|
|
'old_model': 'gemini-2.5-flash-lite',
|
|
'new_model': 'gemini-3-flash-preview',
|
|
'results': comparison_questions
|
|
}, f, ensure_ascii=False, indent=2)
|
|
|
|
return jsonify({'success': True})
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error running model comparison: {e}")
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
|
|
# @app.route('/admin/ai-usage') # MOVED TO admin.admin_ai_usage
|
|
# @login_required
|
|
def _old_admin_ai_usage():
|
|
"""Admin dashboard for AI (Gemini) API usage monitoring"""
|
|
if not current_user.is_admin:
|
|
flash('Brak uprawnień do tej strony.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
from database import AIUsageLog, AIUsageDaily, User, Company
|
|
from sqlalchemy import func, desc, case
|
|
from datetime import timedelta
|
|
|
|
# Get period filter from query params
|
|
period = request.args.get('period', 'month') # day, week, month, all
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
now = datetime.now()
|
|
today = now.date()
|
|
week_ago = today - timedelta(days=7)
|
|
month_ago = today - timedelta(days=30)
|
|
day_ago = now - timedelta(hours=24)
|
|
|
|
# Determine date filter based on period
|
|
period_labels = {
|
|
'day': ('Dzisiaj', today),
|
|
'week': ('Ten tydzień', week_ago),
|
|
'month': ('Ten miesiąc', month_ago),
|
|
'all': ('Od początku', None)
|
|
}
|
|
period_label, period_start = period_labels.get(period, period_labels['month'])
|
|
|
|
# Base query filter for period
|
|
def period_filter(query):
|
|
if period_start:
|
|
return query.filter(func.date(AIUsageLog.created_at) >= period_start)
|
|
return query
|
|
|
|
# Today's stats (always show)
|
|
today_stats = db.query(
|
|
func.count(AIUsageLog.id).label('requests'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_input), 0).label('tokens_input'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_output), 0).label('tokens_output'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
|
|
).filter(
|
|
func.date(AIUsageLog.created_at) == today
|
|
).first()
|
|
|
|
# Week stats
|
|
week_requests = db.query(func.count(AIUsageLog.id)).filter(
|
|
func.date(AIUsageLog.created_at) >= week_ago
|
|
).scalar() or 0
|
|
|
|
# Month stats
|
|
month_stats = db.query(
|
|
func.count(AIUsageLog.id).label('requests'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
|
|
).filter(
|
|
func.date(AIUsageLog.created_at) >= month_ago
|
|
).first()
|
|
|
|
# All-time stats
|
|
all_time_stats = db.query(
|
|
func.count(AIUsageLog.id).label('requests'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
|
|
).first()
|
|
|
|
# Error rate (last 24h)
|
|
last_24h_total = db.query(func.count(AIUsageLog.id)).filter(
|
|
AIUsageLog.created_at >= day_ago
|
|
).scalar() or 0
|
|
|
|
last_24h_errors = db.query(func.count(AIUsageLog.id)).filter(
|
|
AIUsageLog.created_at >= day_ago,
|
|
AIUsageLog.success == False
|
|
).scalar() or 0
|
|
|
|
error_rate = (last_24h_errors / last_24h_total * 100) if last_24h_total > 0 else 0
|
|
|
|
# Average response time (last 24h)
|
|
avg_response_time = db.query(func.avg(AIUsageLog.response_time_ms)).filter(
|
|
AIUsageLog.created_at >= day_ago,
|
|
AIUsageLog.success == True
|
|
).scalar() or 0
|
|
|
|
# Usage by type (filtered by period)
|
|
type_query = db.query(
|
|
AIUsageLog.request_type,
|
|
func.count(AIUsageLog.id).label('count')
|
|
)
|
|
type_query = period_filter(type_query)
|
|
type_stats = type_query.group_by(AIUsageLog.request_type).order_by(desc('count')).all()
|
|
|
|
# Calculate percentages for type breakdown
|
|
total_type_count = sum(t.count for t in type_stats) if type_stats else 0
|
|
type_labels = {
|
|
'ai_chat': ('Chat AI', 'chat'),
|
|
'zopk_news_evaluation': ('Ocena newsów ZOP Kaszubia', 'news'),
|
|
'ai_user_parse': ('Tworzenie user', 'user'),
|
|
'gbp_audit_ai': ('Audyt GBP', 'image'),
|
|
'general': ('Ogólne', 'other')
|
|
}
|
|
usage_by_type = []
|
|
for t in type_stats:
|
|
label, css_class = type_labels.get(t.request_type, (t.request_type, 'other'))
|
|
percentage = (t.count / total_type_count * 100) if total_type_count > 0 else 0
|
|
usage_by_type.append({
|
|
'type': t.request_type,
|
|
'type_label': label,
|
|
'type_class': css_class,
|
|
'count': t.count,
|
|
'percentage': round(percentage, 1)
|
|
})
|
|
|
|
# ========================================
|
|
# USER STATISTICS (filtered by period)
|
|
# ========================================
|
|
user_query = db.query(
|
|
User.id,
|
|
User.name.label('user_name'),
|
|
User.email,
|
|
Company.name.label('company_name'),
|
|
func.count(AIUsageLog.id).label('requests'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_input), 0).label('tokens_input'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_output), 0).label('tokens_output'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
|
|
).join(
|
|
AIUsageLog, AIUsageLog.user_id == User.id
|
|
).outerjoin(
|
|
Company, User.company_id == Company.id
|
|
)
|
|
user_query = period_filter(user_query)
|
|
user_stats = user_query.group_by(
|
|
User.id, User.name, User.email, Company.name
|
|
).order_by(desc('cost_cents')).limit(20).all()
|
|
|
|
# Format user stats
|
|
user_rankings = []
|
|
for u in user_stats:
|
|
user_rankings.append({
|
|
'id': u.id,
|
|
'name': u.user_name or u.email,
|
|
'email': u.email,
|
|
'company': u.company_name or '-',
|
|
'requests': u.requests,
|
|
'tokens': int(u.tokens_input) + int(u.tokens_output),
|
|
'cost_cents': float(u.cost_cents or 0),
|
|
'cost_usd': float(u.cost_cents or 0) / 100
|
|
})
|
|
|
|
# ========================================
|
|
# COMPANY STATISTICS (filtered by period)
|
|
# ========================================
|
|
company_query = db.query(
|
|
Company.id,
|
|
Company.name,
|
|
func.count(AIUsageLog.id).label('requests'),
|
|
func.count(func.distinct(AIUsageLog.user_id)).label('unique_users'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_input), 0).label('tokens_input'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_output), 0).label('tokens_output'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
|
|
).join(
|
|
User, User.company_id == Company.id
|
|
).join(
|
|
AIUsageLog, AIUsageLog.user_id == User.id
|
|
)
|
|
company_query = period_filter(company_query)
|
|
company_stats = company_query.group_by(
|
|
Company.id, Company.name
|
|
).order_by(desc('cost_cents')).limit(20).all()
|
|
|
|
# Format company stats
|
|
company_rankings = []
|
|
for c in company_stats:
|
|
company_rankings.append({
|
|
'id': c.id,
|
|
'name': c.name,
|
|
'requests': c.requests,
|
|
'unique_users': c.unique_users,
|
|
'tokens': int(c.tokens_input) + int(c.tokens_output),
|
|
'cost_cents': float(c.cost_cents or 0),
|
|
'cost_usd': float(c.cost_cents or 0) / 100
|
|
})
|
|
|
|
# Recent logs with user info
|
|
recent_logs = db.query(AIUsageLog).order_by(desc(AIUsageLog.created_at)).limit(20).all()
|
|
|
|
# Enrich recent logs with user names
|
|
for log in recent_logs:
|
|
label, _ = type_labels.get(log.request_type, (log.request_type, 'other'))
|
|
log.type_label = label
|
|
if log.user_id:
|
|
user = db.query(User).filter_by(id=log.user_id).first()
|
|
if user:
|
|
log.user_name = user.name or user.email
|
|
else:
|
|
log.user_name = None
|
|
else:
|
|
log.user_name = None
|
|
|
|
# Daily history (last 14 days)
|
|
daily_history = db.query(AIUsageDaily).filter(
|
|
AIUsageDaily.date >= today - timedelta(days=14)
|
|
).order_by(desc(AIUsageDaily.date)).all()
|
|
|
|
stats = {
|
|
'today_requests': today_stats.requests or 0,
|
|
'today_tokens_input': int(today_stats.tokens_input) or 0,
|
|
'today_tokens_output': int(today_stats.tokens_output) or 0,
|
|
'today_cost': float(today_stats.cost_cents or 0) / 100,
|
|
'week_requests': week_requests,
|
|
'month_requests': month_stats.requests or 0,
|
|
'month_cost': float(month_stats.cost_cents or 0) / 100,
|
|
'all_requests': all_time_stats.requests or 0,
|
|
'all_cost': float(all_time_stats.cost_cents or 0) / 100,
|
|
'error_rate': error_rate,
|
|
'avg_response_time': int(avg_response_time)
|
|
}
|
|
|
|
return render_template(
|
|
'admin/ai_usage_dashboard.html',
|
|
stats=stats,
|
|
usage_by_type=usage_by_type,
|
|
recent_logs=recent_logs,
|
|
daily_history=daily_history,
|
|
user_rankings=user_rankings,
|
|
company_rankings=company_rankings,
|
|
current_period=period,
|
|
period_label=period_label
|
|
)
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# @app.route('/admin/ai-usage/user/<int:user_id>') # MOVED TO admin.admin_ai_usage_user
|
|
# @login_required
|
|
def _old_admin_ai_usage_user(user_id):
|
|
"""Detailed AI usage for a specific user"""
|
|
if not current_user.is_admin:
|
|
flash('Brak uprawnień do tej strony.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
from database import AIUsageLog, User, Company
|
|
from sqlalchemy import func, desc
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
# Get user info
|
|
user = db.query(User).filter_by(id=user_id).first()
|
|
if not user:
|
|
flash('Użytkownik nie istnieje.', 'error')
|
|
return redirect(url_for('admin_ai_usage'))
|
|
|
|
company = None
|
|
if user.company_id:
|
|
company = db.query(Company).filter_by(id=user.company_id).first()
|
|
|
|
# Get overall stats for this user
|
|
stats = db.query(
|
|
func.count(AIUsageLog.id).label('total_requests'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_input), 0).label('tokens_input'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_output), 0).label('tokens_output'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents'),
|
|
func.count(func.nullif(AIUsageLog.success, True)).label('errors')
|
|
).filter(AIUsageLog.user_id == user_id).first()
|
|
|
|
# Usage by type
|
|
type_labels = {
|
|
'ai_chat': 'Chat AI',
|
|
'zopk_news_evaluation': 'Ocena newsów ZOP Kaszubia',
|
|
'ai_user_parse': 'Tworzenie user',
|
|
'gbp_audit_ai': 'Audyt GBP',
|
|
'general': 'Ogólne'
|
|
}
|
|
|
|
type_stats = db.query(
|
|
AIUsageLog.request_type,
|
|
func.count(AIUsageLog.id).label('count'),
|
|
func.coalesce(func.sum(AIUsageLog.tokens_input + AIUsageLog.tokens_output), 0).label('tokens'),
|
|
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
|
|
).filter(
|
|
AIUsageLog.user_id == user_id
|
|
).group_by(AIUsageLog.request_type).order_by(desc('count')).all()
|
|
|
|
# Calculate total for percentages
|
|
total_type_count = sum(t.count for t in type_stats) if type_stats else 1
|
|
|
|
type_classes = {
|
|
'ai_chat': 'chat',
|
|
'zopk_news_evaluation': 'news_evaluation',
|
|
'ai_user_parse': 'user_creation',
|
|
'gbp_audit_ai': 'image_analysis',
|
|
'general': 'other'
|
|
}
|
|
|
|
usage_by_type = []
|
|
for t in type_stats:
|
|
usage_by_type.append({
|
|
'type': t.request_type,
|
|
'type_label': type_labels.get(t.request_type, t.request_type),
|
|
'type_class': type_classes.get(t.request_type, 'other'),
|
|
'count': t.count,
|
|
'tokens': int(t.tokens),
|
|
'cost_usd': float(t.cost_cents) / 100,
|
|
'percentage': round(t.count / total_type_count * 100, 1) if total_type_count > 0 else 0
|
|
})
|
|
|
|
# Get all requests for this user (paginated)
|
|
page = request.args.get('page', 1, type=int)
|
|
per_page = 50
|
|
|
|
requests_query = db.query(AIUsageLog).filter(
|
|
AIUsageLog.user_id == user_id
|
|
).order_by(desc(AIUsageLog.created_at))
|
|
|
|
total_requests = requests_query.count()
|
|
total_pages = (total_requests + per_page - 1) // per_page
|
|
|
|
logs = requests_query.offset((page - 1) * per_page).limit(per_page).all()
|
|
|
|
# Enrich logs with type labels and cost
|
|
for log in logs:
|
|
log.type_label = type_labels.get(log.request_type, log.request_type)
|
|
log.cost_usd = float(log.cost_cents or 0) / 100
|
|
|
|
user_stats = {
|
|
'total_requests': stats.total_requests or 0,
|
|
'tokens_total': int(stats.tokens_input or 0) + int(stats.tokens_output or 0),
|
|
'tokens_input': int(stats.tokens_input or 0),
|
|
'tokens_output': int(stats.tokens_output or 0),
|
|
'cost_usd': float(stats.cost_cents or 0) / 100,
|
|
'errors': stats.errors or 0
|
|
}
|
|
|
|
return render_template(
|
|
'admin/ai_usage_user.html',
|
|
user=user,
|
|
company=company,
|
|
stats=user_stats,
|
|
usage_by_type=usage_by_type,
|
|
logs=logs,
|
|
page=page,
|
|
total_pages=total_pages,
|
|
total_requests=total_requests
|
|
)
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# @app.route('/api/admin/chat-stats') # MOVED TO admin.api_chat_stats
|
|
# @login_required
|
|
def _old_api_chat_stats():
|
|
"""API: Get chat statistics for dashboard"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Not authorized'}), 403
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
from sqlalchemy import func, desc
|
|
from datetime import timedelta
|
|
|
|
# Stats for last 7 days
|
|
week_ago = datetime.now() - timedelta(days=7)
|
|
|
|
daily_stats = db.query(
|
|
func.date(AIChatMessage.created_at).label('date'),
|
|
func.count(AIChatMessage.id).label('count')
|
|
).filter(
|
|
AIChatMessage.created_at >= week_ago,
|
|
AIChatMessage.role == 'user'
|
|
).group_by(
|
|
func.date(AIChatMessage.created_at)
|
|
).order_by('date').all()
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'daily_queries': [{'date': str(d.date), 'count': d.count} for d in daily_stats]
|
|
})
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# ============================================================
|
|
# SYSTEM STATUS DASHBOARD (Admin only)
|
|
# MOVED TO blueprints/admin/routes_status.py
|
|
# ============================================================
|
|
|
|
# @app.route('/admin/status') # MOVED TO admin.admin_status
|
|
# @login_required
|
|
def _old_admin_status():
|
|
"""System status dashboard with real-time metrics"""
|
|
if not current_user.is_admin:
|
|
flash('Brak uprawnień.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
import subprocess
|
|
import platform
|
|
from sqlalchemy import func, text
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
# Current timestamp
|
|
now = datetime.now()
|
|
|
|
# ===== SYSTEM METRICS =====
|
|
system_metrics = {
|
|
'hostname': platform.node(),
|
|
'os': f"{platform.system()} {platform.release()}",
|
|
'python': platform.python_version(),
|
|
}
|
|
|
|
# CPU usage (via top command)
|
|
try:
|
|
result = subprocess.run(['top', '-l', '1', '-n', '0'], capture_output=True, text=True, timeout=5)
|
|
for line in result.stdout.split('\n'):
|
|
if 'CPU usage' in line:
|
|
# Parse: "CPU usage: 5.88% user, 8.82% sys, 85.29% idle"
|
|
parts = line.split(':')[1].strip().split(',')
|
|
user = float(parts[0].replace('% user', '').strip())
|
|
sys_cpu = float(parts[1].replace('% sys', '').strip())
|
|
idle = float(parts[2].replace('% idle', '').strip())
|
|
system_metrics['cpu_percent'] = round(user + sys_cpu, 1)
|
|
system_metrics['cpu_idle'] = round(idle, 1)
|
|
break
|
|
except Exception:
|
|
# Linux fallback
|
|
try:
|
|
result = subprocess.run(['grep', 'cpu ', '/proc/stat'], capture_output=True, text=True, timeout=5)
|
|
if result.returncode == 0:
|
|
parts = result.stdout.split()
|
|
idle = int(parts[4])
|
|
total = sum(int(x) for x in parts[1:])
|
|
system_metrics['cpu_percent'] = round(100 * (1 - idle / total), 1)
|
|
system_metrics['cpu_idle'] = round(100 * idle / total, 1)
|
|
except Exception:
|
|
system_metrics['cpu_percent'] = None
|
|
system_metrics['cpu_idle'] = None
|
|
|
|
# RAM usage
|
|
try:
|
|
# macOS
|
|
result = subprocess.run(['vm_stat'], capture_output=True, text=True, timeout=5)
|
|
if result.returncode == 0 and 'Pages' in result.stdout:
|
|
lines = result.stdout.strip().split('\n')
|
|
page_size = 16384 # bytes
|
|
stats = {}
|
|
for line in lines[1:]:
|
|
if ':' in line:
|
|
key, val = line.split(':')
|
|
stats[key.strip()] = int(val.strip().rstrip('.'))
|
|
free = stats.get('Pages free', 0) * page_size
|
|
active = stats.get('Pages active', 0) * page_size
|
|
inactive = stats.get('Pages inactive', 0) * page_size
|
|
wired = stats.get('Pages wired down', 0) * page_size
|
|
total_used = active + inactive + wired
|
|
total_mem = total_used + free
|
|
system_metrics['ram_total_gb'] = round(total_mem / (1024**3), 1)
|
|
system_metrics['ram_used_gb'] = round(total_used / (1024**3), 1)
|
|
system_metrics['ram_percent'] = round(100 * total_used / total_mem, 1)
|
|
else:
|
|
raise Exception("Not macOS")
|
|
except Exception:
|
|
# Linux fallback
|
|
try:
|
|
result = subprocess.run(['free', '-b'], capture_output=True, text=True, timeout=5)
|
|
if result.returncode == 0:
|
|
lines = result.stdout.strip().split('\n')
|
|
mem_line = lines[1].split()
|
|
total = int(mem_line[1])
|
|
used = int(mem_line[2])
|
|
system_metrics['ram_total_gb'] = round(total / (1024**3), 1)
|
|
system_metrics['ram_used_gb'] = round(used / (1024**3), 1)
|
|
system_metrics['ram_percent'] = round(100 * used / total, 1)
|
|
except Exception:
|
|
system_metrics['ram_total_gb'] = None
|
|
system_metrics['ram_used_gb'] = None
|
|
system_metrics['ram_percent'] = None
|
|
|
|
# Disk usage
|
|
try:
|
|
result = subprocess.run(['df', '-h', '/'], capture_output=True, text=True, timeout=5)
|
|
if result.returncode == 0:
|
|
lines = result.stdout.strip().split('\n')
|
|
parts = lines[1].split()
|
|
system_metrics['disk_total'] = parts[1]
|
|
system_metrics['disk_used'] = parts[2]
|
|
system_metrics['disk_percent'] = int(parts[4].replace('%', ''))
|
|
except Exception:
|
|
system_metrics['disk_total'] = None
|
|
system_metrics['disk_used'] = None
|
|
system_metrics['disk_percent'] = None
|
|
|
|
# System uptime
|
|
try:
|
|
result = subprocess.run(['uptime'], capture_output=True, text=True, timeout=5)
|
|
if result.returncode == 0:
|
|
system_metrics['uptime'] = result.stdout.strip().split('up')[1].split(',')[0].strip()
|
|
except Exception:
|
|
system_metrics['uptime'] = None
|
|
|
|
# ===== DATABASE METRICS =====
|
|
db_metrics = {}
|
|
|
|
try:
|
|
# PostgreSQL version
|
|
version_result = db.execute(text("SELECT version()")).scalar()
|
|
# Extract just version number: "PostgreSQL 16.11 ..." -> "16.11"
|
|
if version_result:
|
|
import re
|
|
match = re.search(r'PostgreSQL (\d+\.\d+)', version_result)
|
|
db_metrics['version'] = match.group(1) if match else version_result.split()[1]
|
|
|
|
# Database size
|
|
result = db.execute(text("SELECT pg_database_size(current_database())")).scalar()
|
|
db_metrics['size_mb'] = round(result / (1024 * 1024), 2)
|
|
|
|
# Active connections
|
|
result = db.execute(text("SELECT count(*) FROM pg_stat_activity WHERE state = 'active'")).scalar()
|
|
db_metrics['active_connections'] = result
|
|
|
|
# Total connections
|
|
result = db.execute(text("SELECT count(*) FROM pg_stat_activity")).scalar()
|
|
db_metrics['total_connections'] = result
|
|
|
|
# Table counts
|
|
db_metrics['companies'] = db.query(Company).count()
|
|
db_metrics['users'] = db.query(User).count()
|
|
|
|
# Get additional counts if tables exist
|
|
try:
|
|
from database import ChatMessage, ChatSession, CompanySocialMedia, SEOMetrics
|
|
db_metrics['chat_messages'] = db.query(ChatMessage).count()
|
|
db_metrics['chat_sessions'] = db.query(ChatSession).count()
|
|
db_metrics['social_media'] = db.query(CompanySocialMedia).count()
|
|
db_metrics['seo_audits'] = db.query(SEOMetrics).count()
|
|
except Exception:
|
|
pass
|
|
|
|
db_metrics['status'] = 'ok'
|
|
except Exception as e:
|
|
db_metrics['status'] = 'error'
|
|
db_metrics['error'] = str(e)[:100]
|
|
|
|
# ===== APPLICATION METRICS =====
|
|
app_metrics = {}
|
|
|
|
# Health check - test key endpoints
|
|
try:
|
|
with app.test_client() as client:
|
|
endpoints_ok = 0
|
|
endpoints_total = 5
|
|
test_endpoints = ['/', '/login', '/api/companies', '/health', '/search?q=test']
|
|
for ep in test_endpoints:
|
|
try:
|
|
response = client.get(ep, follow_redirects=False)
|
|
if response.status_code in (200, 302, 304):
|
|
endpoints_ok += 1
|
|
except Exception:
|
|
pass
|
|
app_metrics['endpoints_ok'] = endpoints_ok
|
|
app_metrics['endpoints_total'] = endpoints_total
|
|
app_metrics['endpoints_percent'] = round(100 * endpoints_ok / endpoints_total, 0)
|
|
except Exception:
|
|
app_metrics['endpoints_ok'] = None
|
|
|
|
# Users statistics
|
|
app_metrics['admins'] = db.query(User).filter(User.is_admin == True).count()
|
|
app_metrics['users_with_2fa'] = db.query(User).filter(User.totp_enabled == True).count()
|
|
|
|
# Recent activity (last 24h)
|
|
yesterday = now - timedelta(days=1)
|
|
try:
|
|
app_metrics['logins_24h'] = db.query(AuditLog).filter(
|
|
AuditLog.action == 'login',
|
|
AuditLog.created_at >= yesterday
|
|
).count()
|
|
except Exception:
|
|
app_metrics['logins_24h'] = 0
|
|
|
|
# Security alerts (last 24h)
|
|
try:
|
|
app_metrics['alerts_24h'] = db.query(SecurityAlert).filter(
|
|
SecurityAlert.created_at >= yesterday
|
|
).count()
|
|
except Exception:
|
|
app_metrics['alerts_24h'] = 0
|
|
|
|
# ===== GUNICORN/PROCESS METRICS =====
|
|
process_metrics = {}
|
|
try:
|
|
result = subprocess.run(['pgrep', '-f', 'gunicorn'], capture_output=True, text=True, timeout=5)
|
|
if result.returncode == 0:
|
|
pids = result.stdout.strip().split('\n')
|
|
process_metrics['gunicorn_workers'] = len(pids) - 1 # -1 for master
|
|
process_metrics['gunicorn_status'] = 'running'
|
|
else:
|
|
process_metrics['gunicorn_status'] = 'not found'
|
|
except Exception:
|
|
process_metrics['gunicorn_status'] = 'unknown'
|
|
|
|
# ===== TECHNOLOGY STACK =====
|
|
import flask
|
|
import sqlalchemy
|
|
# Technology stack - ONLY VERIFIED VERSIONS (checked via SSH 2026-01-14)
|
|
# Dynamic versions are fetched at runtime, static ones were verified manually
|
|
technology_stack = {
|
|
'programming': [
|
|
{'name': 'Python', 'version': platform.python_version(), 'icon': '🐍', 'category': 'Backend'},
|
|
{'name': 'Flask', 'version': flask.__version__, 'icon': '🌶️', 'category': 'Web Framework'},
|
|
{'name': 'SQLAlchemy', 'version': sqlalchemy.__version__, 'icon': '🗃️', 'category': 'ORM'},
|
|
{'name': 'Jinja2', 'version': '3.1.6', 'icon': '📄', 'category': 'Templating'},
|
|
{'name': 'Werkzeug', 'version': '3.1.3', 'icon': '🔧', 'category': 'WSGI Toolkit'},
|
|
],
|
|
'databases': [
|
|
{'name': 'PostgreSQL', 'version': db_metrics.get('version', 'N/A'), 'icon': '🐘', 'category': 'Primary DB'},
|
|
],
|
|
'ai': [
|
|
{'name': 'Google Gemini', 'version': '3 Flash', 'icon': '🤖', 'category': 'AI Chat'},
|
|
{'name': 'Brave Search API', 'version': 'v1', 'icon': '🔍', 'category': 'News Search'},
|
|
{'name': 'Google PageSpeed', 'version': 'v5', 'icon': '⚡', 'category': 'SEO Audit'},
|
|
],
|
|
'infrastructure': [
|
|
{'name': 'Proxmox VE', 'version': '9.1.1', 'icon': '🖥️', 'category': 'Wirtualizacja'},
|
|
{'name': 'Ubuntu Server', 'version': '24.04.3 LTS', 'icon': '🐧', 'category': 'System OS'},
|
|
{'name': 'Nginx', 'version': '1.24.0', 'icon': '🔧', 'category': 'Web Server'},
|
|
],
|
|
'network': [
|
|
{'name': 'Fortigate 500D', 'version': None, 'icon': '🛡️', 'category': 'Firewall/VPN'},
|
|
{'name': 'Nginx Proxy Manager', 'version': '2.12.6', 'icon': '🔀', 'category': 'Reverse Proxy'},
|
|
{'name': 'Docker', 'version': '28.2.2', 'icon': '🐳', 'category': 'Containers'},
|
|
{'name': "Let's Encrypt", 'version': 'ACME v2', 'icon': '🔒', 'category': 'SSL/TLS'},
|
|
],
|
|
'security': [
|
|
{'name': 'Flask-Login', 'version': '0.6.3', 'icon': '🔐', 'category': 'Autentykacja'},
|
|
{'name': 'Flask-WTF', 'version': '1.2.2', 'icon': '🛡️', 'category': 'CSRF Protection'},
|
|
{'name': 'Flask-Limiter', 'version': '4.0.0', 'icon': '⏱️', 'category': 'Rate Limiting'},
|
|
{'name': 'geoip2', 'version': '5.2.0', 'icon': '🌍', 'category': 'GeoIP Blocking'},
|
|
{'name': 'PyOTP', 'version': '2.9.0', 'icon': '📱', 'category': '2FA/TOTP'},
|
|
],
|
|
'devops': [
|
|
{'name': 'Git', 'version': '2.43.0', 'icon': '📦', 'category': 'Version Control'},
|
|
{'name': 'Gitea', 'version': '1.22.6', 'icon': '🍵', 'category': 'Git Server'},
|
|
{'name': 'systemd', 'version': '255', 'icon': '⚙️', 'category': 'Service Manager'},
|
|
],
|
|
'servers': [
|
|
{'name': 'NORDABIZ-01', 'ip': '10.22.68.249', 'icon': '🖥️', 'role': 'App Server (VM 249)'},
|
|
{'name': 'R11-REVPROXY-01', 'ip': '10.22.68.250', 'icon': '🔀', 'role': 'Reverse Proxy (VM 119)'},
|
|
{'name': 'R11-DNS-01', 'ip': '10.22.68.171', 'icon': '📡', 'role': 'DNS Server (VM 122)'},
|
|
{'name': 'R11-GIT-INPI', 'ip': '10.22.68.180', 'icon': '📦', 'role': 'Git Server (VM 180)'},
|
|
],
|
|
}
|
|
|
|
return render_template(
|
|
'admin/status_dashboard.html',
|
|
system_metrics=system_metrics,
|
|
db_metrics=db_metrics,
|
|
app_metrics=app_metrics,
|
|
process_metrics=process_metrics,
|
|
technology_stack=technology_stack,
|
|
generated_at=now
|
|
)
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# @app.route('/api/admin/status') # MOVED TO admin.api_admin_status
|
|
# @login_required
|
|
def _old_api_admin_status():
|
|
"""API endpoint for status dashboard auto-refresh"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Not authorized'}), 403
|
|
|
|
import subprocess
|
|
import platform
|
|
from sqlalchemy import text
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
now = datetime.now()
|
|
data = {'timestamp': now.isoformat()}
|
|
|
|
# System metrics
|
|
system = {}
|
|
try:
|
|
# CPU (Linux)
|
|
result = subprocess.run(['grep', 'cpu ', '/proc/stat'], capture_output=True, text=True, timeout=2)
|
|
if result.returncode == 0:
|
|
parts = result.stdout.split()
|
|
idle = int(parts[4])
|
|
total = sum(int(x) for x in parts[1:])
|
|
system['cpu_percent'] = round(100 * (1 - idle / total), 1)
|
|
except Exception:
|
|
system['cpu_percent'] = None
|
|
|
|
try:
|
|
# RAM (Linux)
|
|
result = subprocess.run(['free', '-b'], capture_output=True, text=True, timeout=2)
|
|
if result.returncode == 0:
|
|
lines = result.stdout.strip().split('\n')
|
|
mem_line = lines[1].split()
|
|
total = int(mem_line[1])
|
|
used = int(mem_line[2])
|
|
system['ram_percent'] = round(100 * used / total, 1)
|
|
except Exception:
|
|
system['ram_percent'] = None
|
|
|
|
try:
|
|
# Disk
|
|
result = subprocess.run(['df', '-h', '/'], capture_output=True, text=True, timeout=2)
|
|
if result.returncode == 0:
|
|
lines = result.stdout.strip().split('\n')
|
|
parts = lines[1].split()
|
|
system['disk_percent'] = int(parts[4].replace('%', ''))
|
|
except Exception:
|
|
system['disk_percent'] = None
|
|
|
|
data['system'] = system
|
|
|
|
# Database metrics
|
|
db_data = {}
|
|
try:
|
|
db_data['active_connections'] = db.execute(text("SELECT count(*) FROM pg_stat_activity WHERE state = 'active'")).scalar()
|
|
db_data['status'] = 'ok'
|
|
except Exception as e:
|
|
db_data['status'] = 'error'
|
|
db_data['error'] = str(e)[:50]
|
|
|
|
data['database'] = db_data
|
|
|
|
# App metrics
|
|
yesterday = now - timedelta(days=1)
|
|
app_data = {
|
|
'alerts_24h': db.query(SecurityAlert).filter(SecurityAlert.created_at >= yesterday).count()
|
|
}
|
|
data['app'] = app_data
|
|
|
|
return jsonify(data)
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# ============================================================
|
|
# DEBUG PANEL (Admin only)
|
|
# ============================================================
|
|
|
|
# @app.route('/admin/health') # MOVED TO admin.admin_health
|
|
# @login_required
|
|
def _old_admin_health():
|
|
"""
|
|
Graphical health check dashboard.
|
|
Shows status of all critical endpoints with visual indicators.
|
|
"""
|
|
if not current_user.is_admin:
|
|
flash('Brak uprawnień do tej strony.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
from datetime import datetime
|
|
|
|
results = []
|
|
categories = {
|
|
'public': {'name': 'Strony publiczne', 'icon': '🌐', 'endpoints': []},
|
|
'auth': {'name': 'Autentykacja', 'icon': '🔐', 'endpoints': []},
|
|
'api': {'name': 'API', 'icon': '⚡', 'endpoints': []},
|
|
'admin': {'name': 'Panel admina', 'icon': '👨💼', 'endpoints': []},
|
|
'company': {'name': 'Profile firm', 'icon': '🏢', 'endpoints': []},
|
|
}
|
|
|
|
# Endpoints to check (path, name, category)
|
|
endpoints = [
|
|
('/', 'Strona główna', 'public'),
|
|
('/release-notes', 'Historia zmian', 'public'),
|
|
('/search?q=test', 'Wyszukiwarka', 'public'),
|
|
('/chat', 'NordaGPT Chat', 'public'),
|
|
('/raporty/', 'Raporty', 'public'),
|
|
('/login', 'Logowanie', 'auth'),
|
|
('/register', 'Rejestracja', 'auth'),
|
|
('/api/companies', 'Lista firm', 'api'),
|
|
('/health', 'Health check', 'api'),
|
|
('/admin/security', 'Bezpieczeństwo', 'admin'),
|
|
('/admin/seo', 'SEO Audit', 'admin'),
|
|
('/admin/social-media', 'Social Media', 'admin'),
|
|
('/admin/analytics', 'Analityka', 'admin'),
|
|
('/admin/forum', 'Forum', 'admin'),
|
|
('/admin/kalendarz', 'Kalendarz', 'admin'),
|
|
('/admin/status', 'Status systemu', 'admin'),
|
|
('/admin/fees', 'Składki (FIS)', 'admin'),
|
|
('/admin/zopk/news', 'ZOPK News', 'admin'),
|
|
('/admin/recommendations', 'Rekomendacje', 'admin'),
|
|
]
|
|
|
|
# Add company profiles: INPI, Waterm (fixed) + 3 random
|
|
db = SessionLocal()
|
|
try:
|
|
import random as rnd
|
|
|
|
# Fixed companies to always check
|
|
fixed_companies = db.query(Company).filter(
|
|
Company.name.ilike('%INPI%') | Company.name.ilike('%Waterm%')
|
|
).all()
|
|
|
|
for company in fixed_companies:
|
|
endpoints.append((f'/company/{company.slug}', company.name[:30], 'company'))
|
|
|
|
# 3 random companies (excluding fixed ones)
|
|
fixed_ids = [c.id for c in fixed_companies]
|
|
all_other = db.query(Company).filter(~Company.id.in_(fixed_ids)).all()
|
|
random_companies = rnd.sample(all_other, min(3, len(all_other)))
|
|
|
|
for company in random_companies:
|
|
endpoints.append((f'/company/{company.slug}', f'{company.name[:25]}...', 'company'))
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
# Test each endpoint
|
|
with app.test_client() as client:
|
|
for path, name, category in endpoints:
|
|
start_time = datetime.now()
|
|
try:
|
|
response = client.get(path, follow_redirects=False)
|
|
status_code = response.status_code
|
|
response_time = (datetime.now() - start_time).total_seconds() * 1000 # ms
|
|
|
|
# Determine status
|
|
# 429 = rate limited (endpoint works, just protected)
|
|
# 403 = forbidden (endpoint works, requires auth)
|
|
if status_code in (200, 302, 304, 429):
|
|
status = 'ok'
|
|
elif status_code == 404:
|
|
status = 'not_found'
|
|
elif status_code >= 500:
|
|
status = 'error'
|
|
else:
|
|
status = 'warning'
|
|
|
|
result = {
|
|
'path': path,
|
|
'name': name,
|
|
'status_code': status_code,
|
|
'status': status,
|
|
'response_time': round(response_time, 1),
|
|
'error': None
|
|
}
|
|
|
|
except Exception as e:
|
|
result = {
|
|
'path': path,
|
|
'name': name,
|
|
'status_code': 500,
|
|
'status': 'error',
|
|
'response_time': None,
|
|
'error': str(e)[:100]
|
|
}
|
|
|
|
categories[category]['endpoints'].append(result)
|
|
results.append(result)
|
|
|
|
# Summary stats
|
|
total = len(results)
|
|
ok_count = sum(1 for r in results if r['status'] == 'ok')
|
|
warning_count = sum(1 for r in results if r['status'] == 'warning')
|
|
error_count = sum(1 for r in results if r['status'] in ('error', 'not_found'))
|
|
avg_response_time = sum(r['response_time'] for r in results if r['response_time']) / total if total else 0
|
|
|
|
summary = {
|
|
'total': total,
|
|
'ok': ok_count,
|
|
'warning': warning_count,
|
|
'error': error_count,
|
|
'health_percent': round(100 * ok_count / total, 1) if total else 0,
|
|
'avg_response_time': round(avg_response_time, 1),
|
|
'overall_status': 'ok' if error_count == 0 else ('degraded' if ok_count > error_count else 'critical')
|
|
}
|
|
|
|
return render_template(
|
|
'admin/health_dashboard.html',
|
|
categories=categories,
|
|
summary=summary,
|
|
generated_at=datetime.now()
|
|
)
|
|
|
|
|
|
# @app.route('/api/admin/health') # MOVED TO admin.api_admin_health
|
|
# @login_required
|
|
def _old_api_admin_health():
|
|
"""API endpoint for health dashboard auto-refresh"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Not authorized'}), 403
|
|
|
|
# Run the same checks as admin_health but return JSON
|
|
results = []
|
|
|
|
endpoints = [
|
|
('/', 'Strona główna'),
|
|
('/release-notes', 'Historia zmian'),
|
|
('/search?q=test', 'Wyszukiwarka'),
|
|
('/chat', 'NordaGPT Chat'),
|
|
('/login', 'Logowanie'),
|
|
('/api/companies', 'Lista firm'),
|
|
('/health', 'Health check'),
|
|
('/admin/security', 'Bezpieczeństwo'),
|
|
('/admin/status', 'Status systemu'),
|
|
('/admin/fees', 'Składki (FIS)'),
|
|
('/admin/zopk/news', 'ZOPK News'),
|
|
]
|
|
|
|
with app.test_client() as client:
|
|
for path, name in endpoints:
|
|
try:
|
|
response = client.get(path, follow_redirects=False)
|
|
status_code = response.status_code
|
|
ok = status_code in (200, 302, 304, 429) # 429 = rate limited, endpoint works
|
|
results.append({'path': path, 'name': name, 'status': status_code, 'ok': ok})
|
|
except Exception as e:
|
|
results.append({'path': path, 'name': name, 'status': 500, 'ok': False, 'error': str(e)[:50]})
|
|
|
|
ok_count = sum(1 for r in results if r['ok'])
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'timestamp': datetime.now().isoformat(),
|
|
'results': results,
|
|
'summary': {
|
|
'total': len(results),
|
|
'ok': ok_count,
|
|
'failed': len(results) - ok_count,
|
|
'health_percent': round(100 * ok_count / len(results), 1)
|
|
}
|
|
})
|
|
|
|
|
|
# @app.route('/admin/debug') # MOVED TO admin.debug_panel
|
|
# @login_required
|
|
def _old_debug_panel():
|
|
"""Real-time debug panel for monitoring app activity"""
|
|
if not current_user.is_admin:
|
|
flash('Brak uprawnień do tej strony.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
return render_template('admin/debug.html')
|
|
|
|
|
|
# @app.route('/api/admin/logs') # MOVED TO admin.api_get_logs
|
|
# @login_required
|
|
def _old_api_get_logs():
|
|
"""API: Get recent logs"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Not authorized'}), 403
|
|
|
|
# Get optional filters
|
|
level = request.args.get('level', '') # DEBUG, INFO, WARNING, ERROR
|
|
since = request.args.get('since', '') # ISO timestamp
|
|
limit = min(int(request.args.get('limit', 100)), 500)
|
|
|
|
logs = list(debug_handler.logs)
|
|
|
|
# Filter by level
|
|
if level:
|
|
logs = [l for l in logs if l['level'] == level.upper()]
|
|
|
|
# Filter by timestamp
|
|
if since:
|
|
logs = [l for l in logs if l['timestamp'] > since]
|
|
|
|
# Return most recent
|
|
logs = logs[-limit:]
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'logs': logs,
|
|
'total': len(debug_handler.logs)
|
|
})
|
|
|
|
|
|
# @app.route('/api/admin/logs/stream') # MOVED TO admin.api_logs_stream
|
|
# @login_required
|
|
def _old_api_logs_stream():
|
|
"""SSE endpoint for real-time log streaming"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Not authorized'}), 403
|
|
|
|
def generate():
|
|
last_count = 0
|
|
while True:
|
|
current_count = len(debug_handler.logs)
|
|
if current_count > last_count:
|
|
# Send new logs
|
|
new_logs = list(debug_handler.logs)[last_count:]
|
|
for log in new_logs:
|
|
yield f"data: {json.dumps(log)}\n\n"
|
|
last_count = current_count
|
|
import time
|
|
time.sleep(0.5)
|
|
|
|
return Response(generate(), mimetype='text/event-stream')
|
|
|
|
|
|
# @app.route('/api/admin/logs/clear', methods=['POST']) # MOVED TO admin.api_clear_logs
|
|
# @login_required
|
|
def _old_api_clear_logs():
|
|
"""API: Clear log buffer"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Not authorized'}), 403
|
|
|
|
debug_handler.logs.clear()
|
|
logger.info("Log buffer cleared by admin")
|
|
return jsonify({'success': True})
|
|
|
|
|
|
# @app.route('/api/admin/test-log', methods=['POST']) # MOVED TO admin.api_test_log
|
|
# @login_required
|
|
def _old_api_test_log():
|
|
"""API: Generate test log entries"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Not authorized'}), 403
|
|
|
|
logger.debug("Test DEBUG message")
|
|
logger.info("Test INFO message")
|
|
logger.warning("Test WARNING message")
|
|
logger.error("Test ERROR message")
|
|
return jsonify({'success': True, 'message': 'Test logs generated'})
|
|
|
|
|
|
# @app.route('/admin/digital-maturity') # MOVED TO admin.digital_maturity_dashboard
|
|
# @login_required
|
|
def _old_digital_maturity_dashboard():
|
|
"""Admin dashboard for digital maturity assessment results"""
|
|
if not current_user.is_admin:
|
|
flash('Brak uprawnień do tej strony.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
from sqlalchemy import func, desc
|
|
|
|
# Get all companies with maturity data
|
|
companies_query = db.query(
|
|
Company.id,
|
|
Company.name,
|
|
Company.slug,
|
|
Company.website,
|
|
CompanyDigitalMaturity.overall_score,
|
|
CompanyDigitalMaturity.online_presence_score,
|
|
CompanyDigitalMaturity.sales_readiness,
|
|
CompanyDigitalMaturity.total_opportunity_value,
|
|
CompanyWebsiteAnalysis.opportunity_score,
|
|
CompanyWebsiteAnalysis.has_blog,
|
|
CompanyWebsiteAnalysis.has_portfolio,
|
|
CompanyWebsiteAnalysis.has_contact_form,
|
|
CompanyWebsiteAnalysis.content_richness_score,
|
|
CompanyDigitalMaturity.critical_gaps,
|
|
CompanyWebsiteAnalysis.missing_features
|
|
).join(
|
|
CompanyDigitalMaturity, Company.id == CompanyDigitalMaturity.company_id
|
|
).join(
|
|
CompanyWebsiteAnalysis, Company.id == CompanyWebsiteAnalysis.company_id
|
|
).filter(
|
|
CompanyDigitalMaturity.overall_score > 0
|
|
).order_by(
|
|
desc(CompanyDigitalMaturity.overall_score)
|
|
).all()
|
|
|
|
# Calculate stats
|
|
total_analyzed = len(companies_query)
|
|
avg_score = round(sum(c.overall_score for c in companies_query) / total_analyzed, 1) if total_analyzed else 0
|
|
total_opportunity = sum(float(c.total_opportunity_value or 0) for c in companies_query)
|
|
|
|
warm_leads = [c for c in companies_query if c.sales_readiness == 'warm']
|
|
cold_leads = [c for c in companies_query if c.sales_readiness == 'cold']
|
|
|
|
# Top 10 and bottom 10
|
|
top_performers = companies_query[:10]
|
|
bottom_performers = sorted(companies_query, key=lambda c: c.overall_score)[:10]
|
|
|
|
# Top opportunities
|
|
top_opportunities = sorted(
|
|
companies_query,
|
|
key=lambda c: float(c.total_opportunity_value or 0),
|
|
reverse=True
|
|
)[:10]
|
|
|
|
return render_template('admin/digital_maturity.html',
|
|
total_analyzed=total_analyzed,
|
|
avg_score=avg_score,
|
|
total_opportunity=total_opportunity,
|
|
warm_leads_count=len(warm_leads),
|
|
cold_leads_count=len(cold_leads),
|
|
top_performers=top_performers,
|
|
bottom_performers=bottom_performers,
|
|
top_opportunities=top_opportunities,
|
|
all_companies=companies_query
|
|
)
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# @app.route('/admin/social-media') # MOVED TO admin.admin_social_media
|
|
# @login_required
|
|
def _old_admin_social_media():
|
|
"""Admin dashboard for social media analytics"""
|
|
if not current_user.is_admin:
|
|
flash('Brak uprawnień do tej strony.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
from sqlalchemy import func, case, distinct
|
|
from database import CompanySocialMedia
|
|
|
|
# Total counts per platform
|
|
platform_stats = db.query(
|
|
CompanySocialMedia.platform,
|
|
func.count(CompanySocialMedia.id).label('count'),
|
|
func.count(distinct(CompanySocialMedia.company_id)).label('companies')
|
|
).filter(
|
|
CompanySocialMedia.is_valid == True
|
|
).group_by(CompanySocialMedia.platform).all()
|
|
|
|
# Companies with each platform combination
|
|
company_platforms = db.query(
|
|
Company.id,
|
|
Company.name,
|
|
Company.slug,
|
|
func.array_agg(distinct(CompanySocialMedia.platform)).label('platforms')
|
|
).outerjoin(
|
|
CompanySocialMedia,
|
|
(Company.id == CompanySocialMedia.company_id) & (CompanySocialMedia.is_valid == True)
|
|
).group_by(Company.id, Company.name, Company.slug).all()
|
|
|
|
# Analysis
|
|
total_companies = len(company_platforms)
|
|
companies_with_sm = [c for c in company_platforms if c.platforms and c.platforms[0] is not None]
|
|
companies_without_sm = [c for c in company_platforms if not c.platforms or c.platforms[0] is None]
|
|
|
|
# Platform combinations
|
|
platform_combos_raw = {}
|
|
for c in companies_with_sm:
|
|
platforms = sorted([p for p in c.platforms if p]) if c.platforms else []
|
|
key = ', '.join(platforms) if platforms else 'Brak'
|
|
if key not in platform_combos_raw:
|
|
platform_combos_raw[key] = []
|
|
platform_combos_raw[key].append({'id': c.id, 'name': c.name, 'slug': c.slug})
|
|
|
|
# Sort by number of companies (descending)
|
|
platform_combos = dict(sorted(platform_combos_raw.items(), key=lambda x: len(x[1]), reverse=True))
|
|
|
|
# Only Facebook
|
|
only_facebook = [c for c in companies_with_sm if set(c.platforms) == {'facebook'}]
|
|
# Only LinkedIn
|
|
only_linkedin = [c for c in companies_with_sm if set(c.platforms) == {'linkedin'}]
|
|
# Only Instagram
|
|
only_instagram = [c for c in companies_with_sm if set(c.platforms) == {'instagram'}]
|
|
# Has all major (FB + LI + IG)
|
|
has_all_major = [c for c in companies_with_sm if {'facebook', 'linkedin', 'instagram'}.issubset(set(c.platforms or []))]
|
|
|
|
# Get all social media entries with company info for detailed view
|
|
all_entries = db.query(
|
|
CompanySocialMedia,
|
|
Company.name.label('company_name'),
|
|
Company.slug.label('company_slug')
|
|
).join(Company).order_by(
|
|
Company.name, CompanySocialMedia.platform
|
|
).all()
|
|
|
|
# Freshness analysis
|
|
from datetime import datetime, timedelta
|
|
now = datetime.now()
|
|
fresh_30d = db.query(func.count(CompanySocialMedia.id)).filter(
|
|
CompanySocialMedia.verified_at >= now - timedelta(days=30)
|
|
).scalar()
|
|
stale_90d = db.query(func.count(CompanySocialMedia.id)).filter(
|
|
CompanySocialMedia.verified_at < now - timedelta(days=90)
|
|
).scalar()
|
|
|
|
return render_template('admin/social_media.html',
|
|
platform_stats=platform_stats,
|
|
total_companies=total_companies,
|
|
companies_with_sm=len(companies_with_sm),
|
|
companies_without_sm=companies_without_sm,
|
|
platform_combos=platform_combos,
|
|
only_facebook=only_facebook,
|
|
only_linkedin=only_linkedin,
|
|
only_instagram=only_instagram,
|
|
has_all_major=has_all_major,
|
|
all_entries=all_entries,
|
|
fresh_30d=fresh_30d,
|
|
stale_90d=stale_90d,
|
|
now=now
|
|
)
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# ============================================================
|
|
# SOCIAL MEDIA AUDIT ADMIN DASHBOARD
|
|
# ============================================================
|
|
|
|
# @app.route('/admin/social-audit') # MOVED TO admin.admin_social_audit
|
|
# @login_required
|
|
def _old_admin_social_audit():
|
|
"""
|
|
Admin dashboard for Social Media audit overview.
|
|
|
|
Displays:
|
|
- Summary stats (coverage per platform, total profiles)
|
|
- Platform coverage with progress bars
|
|
- Sortable table with platform icons per company
|
|
- Followers aggregate statistics
|
|
"""
|
|
if not current_user.is_admin:
|
|
flash('Brak uprawnień do tej strony.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
from sqlalchemy import func, distinct
|
|
from database import CompanySocialMedia, Category
|
|
|
|
# Platform definitions
|
|
platforms = ['facebook', 'instagram', 'linkedin', 'youtube', 'twitter', 'tiktok']
|
|
|
|
# Total companies count
|
|
total_companies = db.query(func.count(Company.id)).filter(Company.status == 'active').scalar()
|
|
|
|
# Get all companies with their social media profiles
|
|
companies_query = db.query(
|
|
Company.id,
|
|
Company.name,
|
|
Company.slug,
|
|
Company.website,
|
|
Category.name.label('category_name')
|
|
).outerjoin(
|
|
Category,
|
|
Company.category_id == Category.id
|
|
).filter(
|
|
Company.status == 'active'
|
|
).order_by(Company.name).all()
|
|
|
|
# Get social media data per company
|
|
social_data = db.query(
|
|
CompanySocialMedia.company_id,
|
|
CompanySocialMedia.platform,
|
|
CompanySocialMedia.url,
|
|
CompanySocialMedia.followers_count,
|
|
CompanySocialMedia.verified_at,
|
|
CompanySocialMedia.is_valid
|
|
).filter(
|
|
CompanySocialMedia.is_valid == True
|
|
).all()
|
|
|
|
# Group social media by company
|
|
company_social = {}
|
|
for sm in social_data:
|
|
if sm.company_id not in company_social:
|
|
company_social[sm.company_id] = {}
|
|
company_social[sm.company_id][sm.platform] = {
|
|
'url': sm.url,
|
|
'followers': sm.followers_count or 0,
|
|
'verified_at': sm.verified_at
|
|
}
|
|
|
|
# Build companies list with social media info
|
|
companies = []
|
|
for row in companies_query:
|
|
sm_data = company_social.get(row.id, {})
|
|
total_followers = sum(p.get('followers', 0) for p in sm_data.values())
|
|
platform_count = len(sm_data)
|
|
|
|
# Get last verified date across all platforms
|
|
verified_dates = [p.get('verified_at') for p in sm_data.values() if p.get('verified_at')]
|
|
last_verified = max(verified_dates) if verified_dates else None
|
|
|
|
companies.append({
|
|
'id': row.id,
|
|
'name': row.name,
|
|
'slug': row.slug,
|
|
'website': row.website,
|
|
'category': row.category_name,
|
|
'platforms': sm_data,
|
|
'platform_count': platform_count,
|
|
'total_followers': total_followers,
|
|
'last_verified': last_verified,
|
|
'has_facebook': 'facebook' in sm_data,
|
|
'has_instagram': 'instagram' in sm_data,
|
|
'has_linkedin': 'linkedin' in sm_data,
|
|
'has_youtube': 'youtube' in sm_data,
|
|
'has_twitter': 'twitter' in sm_data,
|
|
'has_tiktok': 'tiktok' in sm_data
|
|
})
|
|
|
|
# Platform statistics
|
|
platform_stats = {}
|
|
for platform in platforms:
|
|
count = db.query(func.count(distinct(CompanySocialMedia.company_id))).filter(
|
|
CompanySocialMedia.platform == platform,
|
|
CompanySocialMedia.is_valid == True
|
|
).scalar() or 0
|
|
platform_stats[platform] = {
|
|
'count': count,
|
|
'percent': round(count / total_companies * 100) if total_companies > 0 else 0
|
|
}
|
|
|
|
# Summary stats
|
|
companies_with_sm = len([c for c in companies if c['platform_count'] > 0])
|
|
companies_without_sm = total_companies - companies_with_sm
|
|
total_profiles = sum(c['platform_count'] for c in companies)
|
|
total_followers = sum(c['total_followers'] for c in companies)
|
|
|
|
# Top followers (top 10 companies by total followers)
|
|
top_followers = sorted([c for c in companies if c['total_followers'] > 0],
|
|
key=lambda x: x['total_followers'], reverse=True)[:10]
|
|
|
|
stats = {
|
|
'total_companies': total_companies,
|
|
'companies_with_sm': companies_with_sm,
|
|
'companies_without_sm': companies_without_sm,
|
|
'total_profiles': total_profiles,
|
|
'total_followers': total_followers,
|
|
'platform_stats': platform_stats
|
|
}
|
|
|
|
# Get unique categories
|
|
categories = sorted(set(c['category'] for c in companies if c['category']))
|
|
|
|
# Convert to objects for template
|
|
class CompanyRow:
|
|
def __init__(self, data):
|
|
for key, value in data.items():
|
|
setattr(self, key, value)
|
|
|
|
companies_objects = [CompanyRow(c) for c in companies]
|
|
top_followers_objects = [CompanyRow(c) for c in top_followers]
|
|
|
|
return render_template('admin/social_audit_dashboard.html',
|
|
companies=companies_objects,
|
|
stats=stats,
|
|
categories=categories,
|
|
platforms=platforms,
|
|
top_followers=top_followers_objects,
|
|
now=datetime.now()
|
|
)
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# ============================================================
|
|
# IT AUDIT ADMIN DASHBOARD
|
|
# ============================================================
|
|
|
|
# @app.route('/admin/it-audit') # MOVED TO admin.admin_it_audit
|
|
# @login_required
|
|
def _old_admin_it_audit():
|
|
"""
|
|
Admin dashboard for IT audit overview.
|
|
|
|
Displays:
|
|
- Summary stats (audit count, average scores, maturity distribution)
|
|
- Technology adoption stats (Azure AD, M365, PBS, Zabbix, EDR, DR)
|
|
- Collaboration flags distribution
|
|
- Company table with IT audit data
|
|
- Collaboration matches matrix
|
|
|
|
Access: Admin only
|
|
"""
|
|
if not current_user.is_admin:
|
|
flash('Brak uprawnień do tej strony.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
from sqlalchemy import func, distinct
|
|
|
|
# Import IT audit models and service
|
|
from database import ITAudit, ITCollaborationMatch
|
|
from it_audit_service import get_maturity_level_label
|
|
|
|
# Get all active companies with their latest IT audit
|
|
# Using subquery to get only the latest audit per company
|
|
latest_audit_subq = db.query(
|
|
ITAudit.company_id,
|
|
func.max(ITAudit.audit_date).label('max_date')
|
|
).group_by(ITAudit.company_id).subquery()
|
|
|
|
companies_query = db.query(
|
|
Company.id,
|
|
Company.name,
|
|
Company.slug,
|
|
ITAudit.id.label('audit_id'),
|
|
ITAudit.overall_score,
|
|
ITAudit.security_score,
|
|
ITAudit.collaboration_score,
|
|
ITAudit.completeness_score,
|
|
ITAudit.maturity_level,
|
|
ITAudit.audit_date,
|
|
ITAudit.has_azure_ad,
|
|
ITAudit.has_m365,
|
|
ITAudit.has_proxmox_pbs,
|
|
ITAudit.monitoring_solution,
|
|
ITAudit.has_edr,
|
|
ITAudit.has_dr_plan
|
|
).outerjoin(
|
|
latest_audit_subq,
|
|
Company.id == latest_audit_subq.c.company_id
|
|
).outerjoin(
|
|
ITAudit,
|
|
(Company.id == ITAudit.company_id) &
|
|
(ITAudit.audit_date == latest_audit_subq.c.max_date)
|
|
).filter(
|
|
Company.status == 'active'
|
|
).order_by(
|
|
Company.name
|
|
).all()
|
|
|
|
# Build companies list with named attributes for template
|
|
companies = []
|
|
for row in companies_query:
|
|
# Detect Zabbix from monitoring_solution field
|
|
has_zabbix = row.monitoring_solution and 'zabbix' in str(row.monitoring_solution).lower()
|
|
|
|
companies.append({
|
|
'id': row.id,
|
|
'name': row.name,
|
|
'slug': row.slug,
|
|
'audit_id': row.audit_id,
|
|
'overall_score': row.overall_score,
|
|
'security_score': row.security_score,
|
|
'collaboration_score': row.collaboration_score,
|
|
'completeness_score': row.completeness_score,
|
|
'maturity_level': row.maturity_level,
|
|
'maturity_label': get_maturity_level_label(row.maturity_level) if row.maturity_level else None,
|
|
'audit_date': row.audit_date,
|
|
'has_azure_ad': row.has_azure_ad,
|
|
'has_m365': row.has_m365,
|
|
'has_proxmox_pbs': row.has_proxmox_pbs,
|
|
'has_zabbix': has_zabbix,
|
|
'has_edr': row.has_edr,
|
|
'has_dr_plan': row.has_dr_plan
|
|
})
|
|
|
|
# Calculate statistics
|
|
audited_companies = [c for c in companies if c['overall_score'] is not None]
|
|
not_audited = [c for c in companies if c['overall_score'] is None]
|
|
|
|
# Maturity distribution
|
|
maturity_counts = {
|
|
'basic': 0,
|
|
'developing': 0,
|
|
'established': 0,
|
|
'advanced': 0
|
|
}
|
|
for c in audited_companies:
|
|
level = c['maturity_level']
|
|
if level in maturity_counts:
|
|
maturity_counts[level] += 1
|
|
|
|
# Calculate average scores
|
|
if audited_companies:
|
|
avg_overall = round(sum(c['overall_score'] for c in audited_companies) / len(audited_companies))
|
|
avg_security = round(sum(c['security_score'] or 0 for c in audited_companies) / len(audited_companies))
|
|
avg_collaboration = round(sum(c['collaboration_score'] or 0 for c in audited_companies) / len(audited_companies))
|
|
else:
|
|
avg_overall = None
|
|
avg_security = None
|
|
avg_collaboration = None
|
|
|
|
# Technology adoption stats
|
|
tech_stats = {
|
|
'azure_ad': len([c for c in audited_companies if c['has_azure_ad']]),
|
|
'm365': len([c for c in audited_companies if c['has_m365']]),
|
|
'proxmox_pbs': len([c for c in audited_companies if c['has_proxmox_pbs']]),
|
|
'zabbix': len([c for c in audited_companies if c['has_zabbix']]),
|
|
'edr': len([c for c in audited_companies if c['has_edr']]),
|
|
'dr_plan': len([c for c in audited_companies if c['has_dr_plan']])
|
|
}
|
|
|
|
# Collaboration flags stats from latest audits
|
|
collab_stats = {}
|
|
if audited_companies:
|
|
collab_flags = [
|
|
'open_to_shared_licensing',
|
|
'open_to_backup_replication',
|
|
'open_to_teams_federation',
|
|
'open_to_shared_monitoring',
|
|
'open_to_collective_purchasing',
|
|
'open_to_knowledge_sharing'
|
|
]
|
|
for flag in collab_flags:
|
|
count = db.query(func.count(ITAudit.id)).filter(
|
|
ITAudit.id.in_([c['audit_id'] for c in audited_companies if c['audit_id']]),
|
|
getattr(ITAudit, flag) == True
|
|
).scalar()
|
|
collab_stats[flag] = count
|
|
|
|
# Get collaboration matches with both companies' info
|
|
matches = db.query(ITCollaborationMatch).order_by(
|
|
ITCollaborationMatch.match_score.desc()
|
|
).all()
|
|
|
|
# Build flat list of collaboration matches with all necessary attributes
|
|
class CollabMatchRow:
|
|
"""Helper class for template attribute access"""
|
|
def __init__(self, **kwargs):
|
|
for key, value in kwargs.items():
|
|
setattr(self, key, value)
|
|
|
|
collaboration_matches = []
|
|
for match in matches:
|
|
# Get company A and B info
|
|
company_a = db.query(Company).filter(Company.id == match.company_a_id).first()
|
|
company_b = db.query(Company).filter(Company.id == match.company_b_id).first()
|
|
|
|
collaboration_matches.append(CollabMatchRow(
|
|
id=match.id,
|
|
match_type=match.match_type,
|
|
company_a_id=match.company_a_id,
|
|
company_a_name=company_a.name if company_a else 'Nieznana',
|
|
company_a_slug=company_a.slug if company_a else '',
|
|
company_b_id=match.company_b_id,
|
|
company_b_name=company_b.name if company_b else 'Nieznana',
|
|
company_b_slug=company_b.slug if company_b else '',
|
|
match_reason=match.match_reason,
|
|
match_score=match.match_score,
|
|
status=match.status,
|
|
created_at=match.created_at
|
|
))
|
|
|
|
stats = {
|
|
# Main stats
|
|
'total_audits': len(audited_companies),
|
|
'total_companies': len(companies),
|
|
'companies_without_audit': len(not_audited),
|
|
|
|
# Score averages
|
|
'avg_overall_score': avg_overall,
|
|
'avg_security_score': avg_security,
|
|
'avg_collaboration_score': avg_collaboration,
|
|
|
|
# Maturity distribution (flattened for template)
|
|
'maturity_basic': maturity_counts['basic'],
|
|
'maturity_developing': maturity_counts['developing'],
|
|
'maturity_established': maturity_counts['established'],
|
|
'maturity_advanced': maturity_counts['advanced'],
|
|
|
|
# Technology adoption stats (matching template naming with has_* prefix)
|
|
'has_azure_ad': tech_stats['azure_ad'],
|
|
'has_m365': tech_stats['m365'],
|
|
'has_proxmox_pbs': tech_stats['proxmox_pbs'],
|
|
'has_zabbix': tech_stats['zabbix'],
|
|
'has_edr': tech_stats['edr'],
|
|
'has_dr_plan': tech_stats['dr_plan'],
|
|
|
|
# Collaboration flags
|
|
'open_to_shared_licensing': collab_stats.get('open_to_shared_licensing', 0),
|
|
'open_to_backup_replication': collab_stats.get('open_to_backup_replication', 0),
|
|
'open_to_teams_federation': collab_stats.get('open_to_teams_federation', 0),
|
|
'open_to_shared_monitoring': collab_stats.get('open_to_shared_monitoring', 0),
|
|
'open_to_collective_purchasing': collab_stats.get('open_to_collective_purchasing', 0),
|
|
'open_to_knowledge_sharing': collab_stats.get('open_to_knowledge_sharing', 0),
|
|
|
|
# Legacy nested structures (for any templates that still use them)
|
|
'maturity_counts': maturity_counts,
|
|
'tech_stats': tech_stats,
|
|
'collab_stats': collab_stats,
|
|
'total_matches': len(collaboration_matches)
|
|
}
|
|
|
|
# Convert companies list to objects with attribute access for template
|
|
class CompanyRow:
|
|
def __init__(self, data):
|
|
for key, value in data.items():
|
|
setattr(self, key, value)
|
|
|
|
companies_objects = [CompanyRow(c) for c in companies]
|
|
|
|
return render_template('admin/it_audit_dashboard.html',
|
|
companies=companies_objects,
|
|
stats=stats,
|
|
collaboration_matches=collaboration_matches,
|
|
now=datetime.now()
|
|
)
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
|
|
# ============================================================
|
|
# IT AUDIT FORM - MOVED TO blueprints/it_audit/
|
|
# ============================================================
|
|
# Routes: /it-audit/form, /it-audit/save, /api/it-audit/*
|
|
|
|
|
|
# ============================================================
|
|
# RAPORTY - MIGRATED TO blueprints/reports/
|
|
# ============================================================
|
|
# Routes: /raporty, /raporty/staz-czlonkostwa, /raporty/social-media, /raporty/struktura-branzowa
|
|
|
|
|
|
# RELEASE NOTES - MOVED TO blueprints/admin/routes.py (admin_notify_release)
|
|
|
|
|
|
# ============================================================
|
|
# ============================================================
|
|
# ZOPK PUBLIC ROUTES - MOVED TO blueprints/public/routes_zopk.py
|
|
# Routes: /zopk, /zopk/projekty/<slug>, /zopk/aktualnosci
|
|
# ============================================================
|
|
|
|
|
|
# ============================================================
|
|
# ZOPK ROUTES - MOVED TO BLUEPRINTS
|
|
# ============================================================
|
|
# All ZOPK routes have been migrated to:
|
|
# - blueprints/admin/routes_zopk_dashboard.py
|
|
# - blueprints/admin/routes_zopk_news.py
|
|
# - blueprints/admin/routes_zopk_knowledge.py
|
|
# - blueprints/admin/routes_zopk_timeline.py
|
|
# ============================================================
|
|
|
|
# Endpoint aliases for ZOPK are created in blueprints/__init__.py
|
|
|
|
# ============================================================
|
|
# KRS AUDIT (Krajowy Rejestr Sądowy)
|
|
# ============================================================
|
|
|
|
# @app.route('/admin/krs-audit') # MOVED TO admin.admin_krs_audit
|
|
# @login_required
|
|
def _old_admin_krs_audit():
|
|
"""
|
|
Admin dashboard for KRS (Krajowy Rejestr Sądowy) audit.
|
|
|
|
Displays:
|
|
- Summary stats (with KRS, audited count, data extraction status)
|
|
- List of companies with KRS numbers
|
|
- Audit progress and status for each company
|
|
- Links to source PDF files
|
|
"""
|
|
if not current_user.is_admin:
|
|
flash('Brak uprawnień do tej strony.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
from sqlalchemy import func
|
|
|
|
# Get all active companies with KRS numbers
|
|
companies_query = db.query(Company).filter(
|
|
Company.status == 'active',
|
|
Company.krs.isnot(None),
|
|
Company.krs != ''
|
|
).order_by(Company.name).all()
|
|
|
|
# Get latest audit for each company
|
|
companies = []
|
|
for company in companies_query:
|
|
# Get latest audit
|
|
latest_audit = db.query(KRSAudit).filter(
|
|
KRSAudit.company_id == company.id
|
|
).order_by(KRSAudit.audit_date.desc()).first()
|
|
|
|
# Get PKD codes (all)
|
|
pkd_codes = db.query(CompanyPKD).filter(
|
|
CompanyPKD.company_id == company.id
|
|
).order_by(CompanyPKD.is_primary.desc(), CompanyPKD.pkd_code).all()
|
|
pkd_count = len(pkd_codes)
|
|
|
|
# Get people count
|
|
people_count = db.query(CompanyPerson).filter(
|
|
CompanyPerson.company_id == company.id
|
|
).count()
|
|
|
|
companies.append({
|
|
'id': company.id,
|
|
'name': company.name,
|
|
'slug': company.slug,
|
|
'krs': company.krs,
|
|
'nip': company.nip,
|
|
'capital_amount': company.capital_amount,
|
|
'krs_last_audit_at': company.krs_last_audit_at,
|
|
'krs_pdf_path': company.krs_pdf_path,
|
|
'audit': latest_audit,
|
|
'pkd_count': pkd_count,
|
|
'pkd_codes': [{
|
|
'code': pkd.pkd_code,
|
|
'description': pkd.pkd_description,
|
|
'is_primary': pkd.is_primary
|
|
} for pkd in pkd_codes],
|
|
'people_count': people_count,
|
|
'capital_shares_count': company.capital_shares_count
|
|
})
|
|
|
|
# Calculate stats
|
|
total_with_krs = len(companies)
|
|
audited_count = len([c for c in companies if c['krs_last_audit_at']])
|
|
not_audited_count = total_with_krs - audited_count
|
|
with_capital = len([c for c in companies if c['capital_amount']])
|
|
with_people = len([c for c in companies if c['people_count'] > 0])
|
|
with_pkd = len([c for c in companies if c['pkd_count'] > 0])
|
|
|
|
# Companies without KRS
|
|
no_krs_count = db.query(Company).filter(
|
|
Company.status == 'active',
|
|
(Company.krs.is_(None)) | (Company.krs == '')
|
|
).count()
|
|
|
|
stats = {
|
|
'total_with_krs': total_with_krs,
|
|
'audited_count': audited_count,
|
|
'not_audited_count': not_audited_count,
|
|
'no_krs_count': no_krs_count,
|
|
'with_capital': with_capital,
|
|
'with_people': with_people,
|
|
'with_pkd': with_pkd
|
|
}
|
|
|
|
return render_template('admin/krs_audit_dashboard.html',
|
|
companies=companies,
|
|
stats=stats,
|
|
krs_audit_available=KRS_AUDIT_AVAILABLE,
|
|
now=datetime.now()
|
|
)
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/krs/audit', methods=['POST'])
|
|
@login_required
|
|
@limiter.limit("200 per hour")
|
|
def api_krs_audit_trigger():
|
|
"""
|
|
API: Trigger KRS audit for a company (admin-only).
|
|
|
|
Parses KRS PDF file and extracts all available data:
|
|
- Basic info (KRS, NIP, REGON, name, legal form)
|
|
- Capital and shares
|
|
- Management board, shareholders, procurators
|
|
- PKD codes
|
|
- Financial reports
|
|
|
|
Request JSON body:
|
|
- company_id: Company ID (integer)
|
|
|
|
Returns:
|
|
- Success: Audit results saved to database
|
|
- Error: Error message with status code
|
|
"""
|
|
if not current_user.is_admin:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Brak uprawnień. Tylko administrator może uruchamiać audyty KRS.'
|
|
}), 403
|
|
|
|
if not KRS_AUDIT_AVAILABLE:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Usługa audytu KRS jest niedostępna.'
|
|
}), 503
|
|
|
|
data = request.get_json()
|
|
if not data or not data.get('company_id'):
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Podaj company_id firmy do audytu.'
|
|
}), 400
|
|
|
|
company_id = data['company_id']
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
company = db.query(Company).filter_by(id=company_id, status='active').first()
|
|
if not company:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Firma nie znaleziona.'
|
|
}), 404
|
|
|
|
if not company.krs:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': f'Firma "{company.name}" nie ma numeru KRS.'
|
|
}), 400
|
|
|
|
# Find PDF file
|
|
pdf_dir = Path('data/krs_pdfs')
|
|
pdf_files = list(pdf_dir.glob(f'*{company.krs}*.pdf'))
|
|
|
|
if not pdf_files:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': f'Nie znaleziono pliku PDF dla KRS {company.krs}. '
|
|
f'Pobierz odpis z ekrs.ms.gov.pl i umieść w data/krs_pdfs/'
|
|
}), 404
|
|
|
|
pdf_path = pdf_files[0]
|
|
|
|
# Create audit record
|
|
audit = KRSAudit(
|
|
company_id=company.id,
|
|
status='parsing',
|
|
progress_percent=10,
|
|
progress_message='Parsowanie pliku PDF...',
|
|
pdf_filename=pdf_path.name,
|
|
pdf_path=str(pdf_path)
|
|
)
|
|
db.add(audit)
|
|
db.commit()
|
|
|
|
# Parse PDF
|
|
try:
|
|
parsed_data = parse_krs_pdf(str(pdf_path), verbose=True)
|
|
|
|
# Update audit with parsed data
|
|
audit.status = 'completed'
|
|
audit.progress_percent = 100
|
|
audit.progress_message = 'Audyt zakończony pomyślnie'
|
|
audit.extracted_krs = parsed_data.get('krs')
|
|
audit.extracted_nazwa = parsed_data.get('nazwa')
|
|
audit.extracted_nip = parsed_data.get('nip')
|
|
audit.extracted_regon = parsed_data.get('regon')
|
|
audit.extracted_forma_prawna = parsed_data.get('forma_prawna')
|
|
audit.extracted_data_rejestracji = parse_date_str(parsed_data.get('data_rejestracji'))
|
|
audit.extracted_kapital_zakladowy = parsed_data.get('kapital_zakladowy')
|
|
audit.extracted_liczba_udzialow = parsed_data.get('liczba_udzialow')
|
|
audit.extracted_sposob_reprezentacji = parsed_data.get('sposob_reprezentacji')
|
|
audit.zarzad_count = len(parsed_data.get('zarzad', []))
|
|
audit.wspolnicy_count = len(parsed_data.get('wspolnicy', []))
|
|
audit.prokurenci_count = len(parsed_data.get('prokurenci', []))
|
|
audit.pkd_count = 1 if parsed_data.get('pkd_przewazajacy') else 0
|
|
audit.pkd_count += len(parsed_data.get('pkd_pozostale', []))
|
|
|
|
# Convert non-JSON-serializable values for JSONB storage
|
|
def make_json_serializable(obj):
|
|
from decimal import Decimal
|
|
if isinstance(obj, Decimal):
|
|
return float(obj)
|
|
elif isinstance(obj, (datetime, date)):
|
|
return obj.isoformat()
|
|
elif isinstance(obj, dict):
|
|
return {k: make_json_serializable(v) for k, v in obj.items()}
|
|
elif isinstance(obj, list):
|
|
return [make_json_serializable(i) for i in obj]
|
|
return obj
|
|
|
|
audit.parsed_data = make_json_serializable(parsed_data)
|
|
audit.pdf_downloaded_at = datetime.now()
|
|
|
|
# Update company with parsed data
|
|
if parsed_data.get('kapital_zakladowy'):
|
|
company.capital_amount = parsed_data['kapital_zakladowy']
|
|
if parsed_data.get('liczba_udzialow'):
|
|
company.capital_shares_count = parsed_data['liczba_udzialow']
|
|
if parsed_data.get('wartosc_nominalna_udzialu'):
|
|
company.capital_share_value = parsed_data['wartosc_nominalna_udzialu']
|
|
if parsed_data.get('data_rejestracji'):
|
|
company.krs_registration_date = parse_date_str(parsed_data['data_rejestracji'])
|
|
if parsed_data.get('sposob_reprezentacji'):
|
|
company.krs_representation_rules = parsed_data['sposob_reprezentacji']
|
|
if parsed_data.get('czas_trwania'):
|
|
company.krs_duration = parsed_data['czas_trwania']
|
|
company.krs_last_audit_at = datetime.now()
|
|
company.krs_pdf_path = str(pdf_path)
|
|
|
|
# Import PKD codes
|
|
pkd_main = parsed_data.get('pkd_przewazajacy')
|
|
if pkd_main:
|
|
existing = db.query(CompanyPKD).filter_by(
|
|
company_id=company.id,
|
|
pkd_code=pkd_main['kod']
|
|
).first()
|
|
if not existing:
|
|
db.add(CompanyPKD(
|
|
company_id=company.id,
|
|
pkd_code=pkd_main['kod'],
|
|
pkd_description=pkd_main['opis'],
|
|
is_primary=True,
|
|
source='ekrs'
|
|
))
|
|
# Also update Company.pkd_code
|
|
company.pkd_code = pkd_main['kod']
|
|
company.pkd_description = pkd_main['opis']
|
|
|
|
for pkd in parsed_data.get('pkd_pozostale', []):
|
|
existing = db.query(CompanyPKD).filter_by(
|
|
company_id=company.id,
|
|
pkd_code=pkd['kod']
|
|
).first()
|
|
if not existing:
|
|
db.add(CompanyPKD(
|
|
company_id=company.id,
|
|
pkd_code=pkd['kod'],
|
|
pkd_description=pkd['opis'],
|
|
is_primary=False,
|
|
source='ekrs'
|
|
))
|
|
|
|
# Import people (zarząd, wspólnicy)
|
|
for person_data in parsed_data.get('zarzad', []):
|
|
_import_krs_person(db, company.id, person_data, 'zarzad', pdf_path.name)
|
|
|
|
for person_data in parsed_data.get('wspolnicy', []):
|
|
_import_krs_person(db, company.id, person_data, 'wspolnik', pdf_path.name)
|
|
|
|
for person_data in parsed_data.get('prokurenci', []):
|
|
_import_krs_person(db, company.id, person_data, 'prokurent', pdf_path.name)
|
|
|
|
# Import financial reports
|
|
for report in parsed_data.get('sprawozdania_finansowe', []):
|
|
existing = db.query(CompanyFinancialReport).filter_by(
|
|
company_id=company.id,
|
|
period_start=parse_date_str(report.get('okres_od')),
|
|
period_end=parse_date_str(report.get('okres_do'))
|
|
).first()
|
|
if not existing:
|
|
db.add(CompanyFinancialReport(
|
|
company_id=company.id,
|
|
period_start=parse_date_str(report.get('okres_od')),
|
|
period_end=parse_date_str(report.get('okres_do')),
|
|
filed_at=parse_date_str(report.get('data_zlozenia')),
|
|
source='ekrs'
|
|
))
|
|
|
|
db.commit()
|
|
|
|
logger.info(f"KRS audit completed for {company.name} (KRS: {company.krs})")
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'message': f'Audyt KRS zakończony dla {company.name}',
|
|
'company_id': company.id,
|
|
'data': {
|
|
'krs': parsed_data.get('krs'),
|
|
'nazwa': parsed_data.get('nazwa'),
|
|
'nip': parsed_data.get('nip'),
|
|
'regon': parsed_data.get('regon'),
|
|
'kapital': float(parsed_data.get('kapital_zakladowy', 0) or 0),
|
|
'liczba_udzialow': parsed_data.get('liczba_udzialow'),
|
|
'zarzad_count': len(parsed_data.get('zarzad', [])),
|
|
'wspolnicy_count': len(parsed_data.get('wspolnicy', [])),
|
|
'prokurenci_count': len(parsed_data.get('prokurenci', [])),
|
|
'pkd_count': audit.pkd_count
|
|
}
|
|
})
|
|
|
|
except Exception as e:
|
|
audit.status = 'error'
|
|
audit.progress_percent = 0
|
|
audit.error_message = str(e)
|
|
db.commit()
|
|
logger.error(f"KRS audit failed for {company.name}: {e}")
|
|
return jsonify({
|
|
'success': False,
|
|
'error': f'Błąd parsowania PDF: {str(e)}'
|
|
}), 500
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
def parse_date_str(date_val):
|
|
"""Helper to parse date from string or return date object as-is"""
|
|
if date_val is None:
|
|
return None
|
|
if isinstance(date_val, date):
|
|
return date_val
|
|
if isinstance(date_val, str):
|
|
try:
|
|
return datetime.strptime(date_val, '%Y-%m-%d').date()
|
|
except:
|
|
return None
|
|
return None
|
|
|
|
|
|
def _import_krs_person(db, company_id, person_data, role_category, source_document):
|
|
"""Helper to import a person from KRS data"""
|
|
pesel = person_data.get('pesel')
|
|
nazwisko = person_data.get('nazwisko', '')
|
|
imiona = person_data.get('imiona', '')
|
|
rola = person_data.get('rola', '')
|
|
|
|
# Find or create Person
|
|
person = None
|
|
if pesel:
|
|
person = db.query(Person).filter_by(pesel=pesel).first()
|
|
|
|
if not person:
|
|
# Try to find by name
|
|
person = db.query(Person).filter_by(
|
|
nazwisko=nazwisko,
|
|
imiona=imiona
|
|
).first()
|
|
|
|
if not person:
|
|
person = Person(
|
|
pesel=pesel,
|
|
nazwisko=nazwisko,
|
|
imiona=imiona
|
|
)
|
|
db.add(person)
|
|
db.flush()
|
|
|
|
# Check if relation already exists
|
|
existing_rel = db.query(CompanyPerson).filter_by(
|
|
company_id=company_id,
|
|
person_id=person.id,
|
|
role_category=role_category
|
|
).first()
|
|
|
|
if not existing_rel:
|
|
cp = CompanyPerson(
|
|
company_id=company_id,
|
|
person_id=person.id,
|
|
role=rola,
|
|
role_category=role_category,
|
|
source='ekrs.ms.gov.pl',
|
|
source_document=source_document,
|
|
fetched_at=datetime.now()
|
|
)
|
|
# Add shares info for shareholders
|
|
if role_category == 'wspolnik':
|
|
cp.shares_count = person_data.get('udzialy_liczba')
|
|
if person_data.get('udzialy_wartosc'):
|
|
cp.shares_value = person_data['udzialy_wartosc']
|
|
if person_data.get('udzialy_procent'):
|
|
cp.shares_percent = person_data['udzialy_procent']
|
|
db.add(cp)
|
|
|
|
|
|
@app.route('/api/krs/audit/batch', methods=['POST'])
|
|
@login_required
|
|
@limiter.limit("5 per hour")
|
|
def api_krs_audit_batch():
|
|
"""
|
|
API: Trigger batch KRS audit for all companies with KRS numbers.
|
|
|
|
This runs audits sequentially to avoid overloading the system.
|
|
Returns progress updates via the response.
|
|
"""
|
|
if not current_user.is_admin:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Brak uprawnień.'
|
|
}), 403
|
|
|
|
if not KRS_AUDIT_AVAILABLE:
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Usługa audytu KRS jest niedostępna.'
|
|
}), 503
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
# Get companies with KRS that haven't been audited recently
|
|
companies = db.query(Company).filter(
|
|
Company.status == 'active',
|
|
Company.krs.isnot(None),
|
|
Company.krs != ''
|
|
).order_by(Company.name).all()
|
|
|
|
results = {
|
|
'total': len(companies),
|
|
'success': 0,
|
|
'failed': 0,
|
|
'skipped': 0,
|
|
'details': []
|
|
}
|
|
|
|
pdf_dir = Path('data/krs_pdfs')
|
|
|
|
for company in companies:
|
|
# Find PDF file
|
|
pdf_files = list(pdf_dir.glob(f'*{company.krs}*.pdf'))
|
|
|
|
if not pdf_files:
|
|
results['skipped'] += 1
|
|
results['details'].append({
|
|
'company': company.name,
|
|
'krs': company.krs,
|
|
'status': 'skipped',
|
|
'reason': 'Brak pliku PDF'
|
|
})
|
|
continue
|
|
|
|
pdf_path = pdf_files[0]
|
|
|
|
try:
|
|
parsed_data = parse_krs_pdf(str(pdf_path))
|
|
|
|
# Update company
|
|
if parsed_data.get('kapital_zakladowy'):
|
|
company.capital_amount = parsed_data['kapital_zakladowy']
|
|
if parsed_data.get('liczba_udzialow'):
|
|
company.capital_shares_count = parsed_data['liczba_udzialow']
|
|
company.krs_last_audit_at = datetime.now()
|
|
company.krs_pdf_path = str(pdf_path)
|
|
|
|
results['success'] += 1
|
|
results['details'].append({
|
|
'company': company.name,
|
|
'krs': company.krs,
|
|
'status': 'success'
|
|
})
|
|
|
|
except Exception as e:
|
|
results['failed'] += 1
|
|
results['details'].append({
|
|
'company': company.name,
|
|
'krs': company.krs,
|
|
'status': 'error',
|
|
'reason': str(e)
|
|
})
|
|
|
|
db.commit()
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'message': f'Audyt zakończony: {results["success"]} sukces, '
|
|
f'{results["failed"]} błędów, {results["skipped"]} pominiętych',
|
|
'results': results
|
|
})
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/krs/pdf/<int:company_id>')
|
|
@login_required
|
|
def api_krs_pdf_download(company_id):
|
|
"""
|
|
API: Download/serve KRS PDF file for a company.
|
|
"""
|
|
db = SessionLocal()
|
|
try:
|
|
company = db.query(Company).filter_by(id=company_id).first()
|
|
if not company:
|
|
return jsonify({'error': 'Firma nie znaleziona'}), 404
|
|
|
|
if not company.krs_pdf_path:
|
|
return jsonify({'error': 'Brak pliku PDF'}), 404
|
|
|
|
pdf_path = Path(company.krs_pdf_path)
|
|
if not pdf_path.exists():
|
|
return jsonify({'error': 'Plik PDF nie istnieje'}), 404
|
|
|
|
return send_file(
|
|
str(pdf_path),
|
|
mimetype='application/pdf',
|
|
as_attachment=False,
|
|
download_name=pdf_path.name
|
|
)
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# ============================================================
|
|
# ERROR HANDLERS
|
|
# ============================================================
|
|
|
|
@app.errorhandler(404)
|
|
def not_found(error):
|
|
return render_template('errors/404.html'), 404
|
|
|
|
|
|
def send_registration_notification(user_info):
|
|
"""Send email notification when a new user registers"""
|
|
try:
|
|
from email_service import send_email, is_configured
|
|
|
|
if not is_configured():
|
|
logger.warning("Email service not configured - skipping registration notification")
|
|
return
|
|
|
|
notify_email = os.getenv('ERROR_NOTIFY_EMAIL', 'maciej.pienczyn@inpi.pl')
|
|
if not notify_email:
|
|
return
|
|
|
|
reg_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
|
is_member = "✅ TAK" if user_info.get('is_norda_member') else "❌ NIE"
|
|
company_name = user_info.get('company_name', 'Brak przypisanej firmy')
|
|
|
|
subject = f"👤 NordaBiz: Nowa rejestracja - {user_info.get('name', 'Nieznany')}"
|
|
|
|
body_text = f"""👤 NOWA REJESTRACJA NA NORDABIZNES.PL
|
|
{'='*50}
|
|
|
|
🕐 Czas: {reg_time}
|
|
👤 Imię: {user_info.get('name', 'N/A')}
|
|
📧 Email: {user_info.get('email', 'N/A')}
|
|
🏢 NIP: {user_info.get('company_nip', 'N/A')}
|
|
🏛️ Firma: {company_name}
|
|
🎫 Członek NORDA: {is_member}
|
|
|
|
{'='*50}
|
|
🔗 Panel użytkowników: https://nordabiznes.pl/admin/users
|
|
"""
|
|
|
|
body_html = f"""<!DOCTYPE html>
|
|
<html>
|
|
<head><meta charset="UTF-8"></head>
|
|
<body style="font-family: 'Inter', Arial, sans-serif; background: #f8fafc; color: #1e293b; padding: 20px;">
|
|
<div style="max-width: 600px; margin: 0 auto;">
|
|
<div style="background: linear-gradient(135deg, #10b981, #059669); color: white; padding: 20px; border-radius: 8px 8px 0 0;">
|
|
<h1 style="margin: 0; font-size: 20px;">👤 Nowa rejestracja na NordaBiznes.pl</h1>
|
|
</div>
|
|
<div style="background: white; padding: 25px; border-radius: 0 0 8px 8px; box-shadow: 0 4px 6px rgba(0,0,0,0.1);">
|
|
<table style="width: 100%; border-collapse: collapse;">
|
|
<tr><td style="color: #64748b; padding: 8px 0; border-bottom: 1px solid #e2e8f0;">🕐 Czas:</td><td style="padding: 8px 0; border-bottom: 1px solid #e2e8f0; font-weight: 500;">{reg_time}</td></tr>
|
|
<tr><td style="color: #64748b; padding: 8px 0; border-bottom: 1px solid #e2e8f0;">👤 Imię:</td><td style="padding: 8px 0; border-bottom: 1px solid #e2e8f0; font-weight: 600; color: #1e40af;">{user_info.get('name', 'N/A')}</td></tr>
|
|
<tr><td style="color: #64748b; padding: 8px 0; border-bottom: 1px solid #e2e8f0;">📧 Email:</td><td style="padding: 8px 0; border-bottom: 1px solid #e2e8f0;"><a href="mailto:{user_info.get('email', '')}" style="color: #2563eb;">{user_info.get('email', 'N/A')}</a></td></tr>
|
|
<tr><td style="color: #64748b; padding: 8px 0; border-bottom: 1px solid #e2e8f0;">🏢 NIP:</td><td style="padding: 8px 0; border-bottom: 1px solid #e2e8f0; font-family: monospace;">{user_info.get('company_nip', 'N/A')}</td></tr>
|
|
<tr><td style="color: #64748b; padding: 8px 0; border-bottom: 1px solid #e2e8f0;">🏛️ Firma:</td><td style="padding: 8px 0; border-bottom: 1px solid #e2e8f0;">{company_name}</td></tr>
|
|
<tr><td style="color: #64748b; padding: 8px 0;">🎫 Członek NORDA:</td><td style="padding: 8px 0; font-weight: 600;">{is_member}</td></tr>
|
|
</table>
|
|
<div style="margin-top: 25px; text-align: center;">
|
|
<a href="https://nordabiznes.pl/admin/users" style="display: inline-block; padding: 12px 24px; background: #2563eb; color: white; text-decoration: none; border-radius: 6px; font-weight: 500;">Otwórz panel użytkowników</a>
|
|
</div>
|
|
</div>
|
|
</div>
|
|
</body>
|
|
</html>"""
|
|
|
|
result = send_email(
|
|
to=[notify_email],
|
|
subject=subject,
|
|
body_text=body_text,
|
|
body_html=body_html,
|
|
email_type='registration_notification'
|
|
)
|
|
|
|
if result:
|
|
logger.info(f"Registration notification sent to {notify_email}")
|
|
else:
|
|
logger.error(f"Failed to send registration notification to {notify_email}")
|
|
|
|
except Exception as e:
|
|
logger.error(f"Failed to send registration notification: {e}")
|
|
|
|
|
|
def send_error_notification(error, request_info):
|
|
"""Send email notification about 500 errors via Microsoft Graph"""
|
|
try:
|
|
from email_service import send_email, is_configured
|
|
|
|
if not is_configured():
|
|
logger.warning("Email service not configured - skipping error notification")
|
|
return
|
|
|
|
error_email = os.getenv('ERROR_NOTIFY_EMAIL', 'maciej.pienczyn@inpi.pl')
|
|
if not error_email:
|
|
return
|
|
|
|
# Build error details
|
|
error_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
|
traceback_str = tb_module.format_exc()
|
|
|
|
subject = f"🚨 NordaBiz ERROR 500: {request_info.get('path', 'Unknown')}"
|
|
|
|
body_text = f"""⚠️ BŁĄD 500 NA NORDABIZNES.PL
|
|
{'='*50}
|
|
|
|
🕐 Czas: {error_time}
|
|
🌐 URL: {request_info.get('url', 'N/A')}
|
|
📍 Ścieżka: {request_info.get('path', 'N/A')}
|
|
📝 Metoda: {request_info.get('method', 'N/A')}
|
|
👤 Użytkownik: {request_info.get('user', 'Anonimowy')}
|
|
🖥️ IP: {request_info.get('ip', 'N/A')}
|
|
🌍 User-Agent: {request_info.get('user_agent', 'N/A')}
|
|
|
|
{'='*50}
|
|
📋 BŁĄD:
|
|
{str(error)}
|
|
|
|
{'='*50}
|
|
📜 TRACEBACK:
|
|
{traceback_str}
|
|
|
|
{'='*50}
|
|
🔧 Sprawdź logi: ssh maciejpi@10.22.68.249 "sudo journalctl -u nordabiznes --since '10 minutes ago'"
|
|
"""
|
|
|
|
body_html = f"""<!DOCTYPE html>
|
|
<html>
|
|
<head><meta charset="UTF-8"></head>
|
|
<body style="font-family: 'Courier New', monospace; background: #1e1e1e; color: #d4d4d4; padding: 20px;">
|
|
<div style="max-width: 800px; margin: 0 auto;">
|
|
<div style="background: #dc2626; color: white; padding: 15px 20px; border-radius: 8px 8px 0 0;">
|
|
<h1 style="margin: 0; font-size: 20px;">🚨 BŁĄD 500 NA NORDABIZNES.PL</h1>
|
|
</div>
|
|
<div style="background: #2d2d2d; padding: 20px; border-radius: 0 0 8px 8px;">
|
|
<table style="width: 100%; border-collapse: collapse;">
|
|
<tr><td style="color: #9ca3af; padding: 5px 0;">🕐 Czas:</td><td style="color: #fbbf24;">{error_time}</td></tr>
|
|
<tr><td style="color: #9ca3af; padding: 5px 0;">🌐 URL:</td><td style="color: #60a5fa; word-break: break-all;">{request_info.get('url', 'N/A')}</td></tr>
|
|
<tr><td style="color: #9ca3af; padding: 5px 0;">📍 Ścieżka:</td><td style="color: #34d399;">{request_info.get('path', 'N/A')}</td></tr>
|
|
<tr><td style="color: #9ca3af; padding: 5px 0;">📝 Metoda:</td><td>{request_info.get('method', 'N/A')}</td></tr>
|
|
<tr><td style="color: #9ca3af; padding: 5px 0;">👤 Użytkownik:</td><td>{request_info.get('user', 'Anonimowy')}</td></tr>
|
|
<tr><td style="color: #9ca3af; padding: 5px 0;">🖥️ IP:</td><td>{request_info.get('ip', 'N/A')}</td></tr>
|
|
</table>
|
|
<div style="margin-top: 20px; padding: 15px; background: #1e1e1e; border-radius: 8px; border-left: 4px solid #dc2626;">
|
|
<div style="color: #f87171; font-weight: bold; margin-bottom: 10px;">📋 BŁĄD:</div>
|
|
<pre style="margin: 0; white-space: pre-wrap; color: #fca5a5;">{str(error)}</pre>
|
|
</div>
|
|
<div style="margin-top: 20px; padding: 15px; background: #1e1e1e; border-radius: 8px; border-left: 4px solid #f59e0b;">
|
|
<div style="color: #fbbf24; font-weight: bold; margin-bottom: 10px;">📜 TRACEBACK:</div>
|
|
<pre style="margin: 0; white-space: pre-wrap; font-size: 12px; color: #9ca3af; max-height: 400px; overflow: auto;">{traceback_str}</pre>
|
|
</div>
|
|
<div style="margin-top: 20px; padding: 15px; background: #1e3a5f; border-radius: 8px;">
|
|
<div style="color: #60a5fa;">🔧 <strong>Sprawdź logi:</strong></div>
|
|
<code style="display: block; margin-top: 10px; color: #34d399; word-break: break-all;">ssh maciejpi@10.22.68.249 "sudo journalctl -u nordabiznes --since '10 minutes ago'"</code>
|
|
</div>
|
|
</div>
|
|
</div>
|
|
</body>
|
|
</html>"""
|
|
|
|
result = send_email(
|
|
to=[error_email],
|
|
subject=subject,
|
|
body_text=body_text,
|
|
body_html=body_html,
|
|
email_type='error_notification'
|
|
)
|
|
|
|
if result:
|
|
logger.info(f"Error notification sent to {error_email}")
|
|
else:
|
|
logger.error(f"Failed to send error notification to {error_email}")
|
|
|
|
except Exception as e:
|
|
logger.error(f"Failed to send error notification: {e}")
|
|
|
|
|
|
@app.errorhandler(500)
|
|
def internal_error(error):
|
|
# Collect request info for notification
|
|
request_info = {
|
|
'url': request.url if request else 'N/A',
|
|
'path': request.path if request else 'N/A',
|
|
'method': request.method if request else 'N/A',
|
|
'ip': request.remote_addr if request else 'N/A',
|
|
'user_agent': request.headers.get('User-Agent', 'N/A') if request else 'N/A',
|
|
'user': current_user.email if current_user and current_user.is_authenticated else 'Anonimowy'
|
|
}
|
|
|
|
# Send notification in background (don't block response)
|
|
try:
|
|
send_error_notification(error, request_info)
|
|
except Exception as e:
|
|
logger.error(f"Error notification failed: {e}")
|
|
|
|
return render_template('errors/500.html'), 500
|
|
|
|
|
|
# ============================================================
|
|
# ADMIN - SECURITY DASHBOARD
|
|
# ============================================================
|
|
|
|
# @app.route('/admin/security') # MOVED TO admin.admin_security
|
|
# @login_required
|
|
def _old_admin_security():
|
|
"""Security dashboard - audit logs, alerts, GeoIP stats"""
|
|
if not current_user.is_admin:
|
|
flash('Brak uprawnień.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
from sqlalchemy import func, desc
|
|
|
|
# Get recent audit logs
|
|
audit_logs = db.query(AuditLog).order_by(
|
|
desc(AuditLog.created_at)
|
|
).limit(50).all()
|
|
|
|
# Get security alerts
|
|
alerts = db.query(SecurityAlert).order_by(
|
|
desc(SecurityAlert.created_at)
|
|
).limit(50).all()
|
|
|
|
# Alert stats
|
|
new_alerts_count = db.query(SecurityAlert).filter(
|
|
SecurityAlert.status == 'new'
|
|
).count()
|
|
|
|
# Recent locked accounts
|
|
locked_accounts = db.query(User).filter(
|
|
User.locked_until > datetime.now()
|
|
).all()
|
|
|
|
# Users with 2FA enabled
|
|
users_with_2fa = db.query(User).filter(
|
|
User.totp_enabled == True
|
|
).count()
|
|
total_admins = db.query(User).filter(
|
|
User.is_admin == True
|
|
).count()
|
|
|
|
# Alert type breakdown
|
|
alert_breakdown = db.query(
|
|
SecurityAlert.alert_type,
|
|
func.count(SecurityAlert.id).label('count')
|
|
).group_by(SecurityAlert.alert_type).all()
|
|
|
|
stats = {
|
|
'new_alerts': new_alerts_count,
|
|
'locked_accounts': len(locked_accounts),
|
|
'users_with_2fa': users_with_2fa,
|
|
'total_admins': total_admins,
|
|
'alert_breakdown': {a.alert_type: a.count for a in alert_breakdown}
|
|
}
|
|
|
|
# GeoIP stats
|
|
from security_service import _get_geoip_enabled
|
|
geoip_enabled = _get_geoip_enabled()
|
|
|
|
geoip_stats = {'today': 0, 'this_month': 0, 'this_year': 0, 'total': 0, 'by_country': []}
|
|
|
|
if geoip_enabled:
|
|
today = datetime.now().date()
|
|
first_of_month = today.replace(day=1)
|
|
first_of_year = today.replace(month=1, day=1)
|
|
|
|
# Count geo_blocked alerts
|
|
geoip_stats['today'] = db.query(SecurityAlert).filter(
|
|
SecurityAlert.alert_type == 'geo_blocked',
|
|
func.date(SecurityAlert.created_at) == today
|
|
).count()
|
|
|
|
geoip_stats['this_month'] = db.query(SecurityAlert).filter(
|
|
SecurityAlert.alert_type == 'geo_blocked',
|
|
func.date(SecurityAlert.created_at) >= first_of_month
|
|
).count()
|
|
|
|
geoip_stats['this_year'] = db.query(SecurityAlert).filter(
|
|
SecurityAlert.alert_type == 'geo_blocked',
|
|
func.date(SecurityAlert.created_at) >= first_of_year
|
|
).count()
|
|
|
|
geoip_stats['total'] = db.query(SecurityAlert).filter(
|
|
SecurityAlert.alert_type == 'geo_blocked'
|
|
).count()
|
|
|
|
# Country breakdown (from details JSON)
|
|
country_flags = {
|
|
'RU': ('🇷🇺', 'Rosja'), 'CN': ('🇨🇳', 'Chiny'), 'KP': ('🇰🇵', 'Korea Płn.'),
|
|
'IR': ('🇮🇷', 'Iran'), 'BY': ('🇧🇾', 'Białoruś'), 'SY': ('🇸🇾', 'Syria'),
|
|
'VE': ('🇻🇪', 'Wenezuela'), 'CU': ('🇨🇺', 'Kuba')
|
|
}
|
|
|
|
geo_alerts = db.query(SecurityAlert).filter(
|
|
SecurityAlert.alert_type == 'geo_blocked'
|
|
).all()
|
|
|
|
country_counts = {}
|
|
for alert in geo_alerts:
|
|
if alert.details and 'country' in alert.details:
|
|
country = alert.details['country']
|
|
if country:
|
|
country_counts[country] = country_counts.get(country, 0) + 1
|
|
|
|
# Sort by count descending
|
|
sorted_countries = sorted(country_counts.items(), key=lambda x: x[1], reverse=True)
|
|
for code, count in sorted_countries:
|
|
flag, name = country_flags.get(code, ('🏴', code))
|
|
geoip_stats['by_country'].append({
|
|
'code': code, 'flag': flag, 'name': name, 'count': count
|
|
})
|
|
|
|
return render_template(
|
|
'admin/security_dashboard.html',
|
|
audit_logs=audit_logs,
|
|
alerts=alerts,
|
|
locked_accounts=locked_accounts,
|
|
stats=stats,
|
|
geoip_enabled=geoip_enabled,
|
|
geoip_stats=geoip_stats,
|
|
generated_at=datetime.now()
|
|
)
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# @app.route('/admin/security/alert/<int:alert_id>/acknowledge', methods=['POST']) # MOVED TO admin.acknowledge_security_alert
|
|
# @login_required
|
|
def _old_acknowledge_security_alert(alert_id):
|
|
"""Acknowledge a security alert"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Not authorized'}), 403
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
alert = db.query(SecurityAlert).get(alert_id)
|
|
if not alert:
|
|
return jsonify({'success': False, 'error': 'Alert not found'}), 404
|
|
|
|
alert.status = 'acknowledged'
|
|
alert.acknowledged_by = current_user.id
|
|
alert.acknowledged_at = datetime.now()
|
|
|
|
# Log audit
|
|
if SECURITY_SERVICE_AVAILABLE:
|
|
log_audit(db, 'alert.acknowledge', 'security_alert', alert_id,
|
|
details={'alert_type': alert.alert_type})
|
|
|
|
db.commit()
|
|
return jsonify({'success': True})
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# @app.route('/admin/security/alert/<int:alert_id>/resolve', methods=['POST']) # MOVED TO admin.resolve_security_alert
|
|
# @login_required
|
|
def _old_resolve_security_alert(alert_id):
|
|
"""Resolve a security alert"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Not authorized'}), 403
|
|
|
|
note = request.form.get('note', '')
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
alert = db.query(SecurityAlert).get(alert_id)
|
|
if not alert:
|
|
return jsonify({'success': False, 'error': 'Alert not found'}), 404
|
|
|
|
alert.status = 'resolved'
|
|
alert.resolution_note = note
|
|
if not alert.acknowledged_by:
|
|
alert.acknowledged_by = current_user.id
|
|
alert.acknowledged_at = datetime.now()
|
|
|
|
# Log audit
|
|
if SECURITY_SERVICE_AVAILABLE:
|
|
log_audit(db, 'alert.resolve', 'security_alert', alert_id,
|
|
details={'alert_type': alert.alert_type, 'note': note})
|
|
|
|
db.commit()
|
|
flash('Alert został rozwiązany.', 'success')
|
|
return redirect(url_for('admin_security'))
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# @app.route('/admin/security/unlock-account/<int:user_id>', methods=['POST']) # MOVED TO admin.unlock_account
|
|
# @login_required
|
|
def _old_unlock_account(user_id):
|
|
"""Unlock a locked user account"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Not authorized'}), 403
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
user = db.query(User).get(user_id)
|
|
if not user:
|
|
return jsonify({'success': False, 'error': 'User not found'}), 404
|
|
|
|
user.locked_until = None
|
|
user.failed_login_attempts = 0
|
|
|
|
# Log audit
|
|
if SECURITY_SERVICE_AVAILABLE:
|
|
log_audit(db, 'user.unlock', 'user', user_id, user.email)
|
|
|
|
db.commit()
|
|
flash(f'Konto {user.email} zostało odblokowane.', 'success')
|
|
return redirect(url_for('admin_security'))
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# @app.route('/api/admin/security/geoip-stats') # MOVED TO admin.api_geoip_stats
|
|
# @login_required
|
|
def _old_api_geoip_stats():
|
|
"""API endpoint for GeoIP stats auto-refresh"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Not authorized'}), 403
|
|
|
|
from sqlalchemy import func
|
|
from security_service import _get_geoip_enabled
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
now = datetime.now()
|
|
geoip_enabled = _get_geoip_enabled()
|
|
|
|
if not geoip_enabled:
|
|
return jsonify({
|
|
'enabled': False,
|
|
'timestamp': now.isoformat()
|
|
})
|
|
|
|
today = now.date()
|
|
first_of_month = today.replace(day=1)
|
|
first_of_year = today.replace(month=1, day=1)
|
|
|
|
stats = {
|
|
'enabled': True,
|
|
'timestamp': now.isoformat(),
|
|
'today': db.query(SecurityAlert).filter(
|
|
SecurityAlert.alert_type == 'geo_blocked',
|
|
func.date(SecurityAlert.created_at) == today
|
|
).count(),
|
|
'this_month': db.query(SecurityAlert).filter(
|
|
SecurityAlert.alert_type == 'geo_blocked',
|
|
func.date(SecurityAlert.created_at) >= first_of_month
|
|
).count(),
|
|
'this_year': db.query(SecurityAlert).filter(
|
|
SecurityAlert.alert_type == 'geo_blocked',
|
|
func.date(SecurityAlert.created_at) >= first_of_year
|
|
).count(),
|
|
'total': db.query(SecurityAlert).filter(
|
|
SecurityAlert.alert_type == 'geo_blocked'
|
|
).count()
|
|
}
|
|
|
|
# Country breakdown
|
|
country_flags = {
|
|
'RU': ('🇷🇺', 'Rosja'), 'CN': ('🇨🇳', 'Chiny'), 'KP': ('🇰🇵', 'Korea Płn.'),
|
|
'IR': ('🇮🇷', 'Iran'), 'BY': ('🇧🇾', 'Białoruś'), 'SY': ('🇸🇾', 'Syria'),
|
|
'VE': ('🇻🇪', 'Wenezuela'), 'CU': ('🇨🇺', 'Kuba')
|
|
}
|
|
|
|
geo_alerts = db.query(SecurityAlert).filter(
|
|
SecurityAlert.alert_type == 'geo_blocked'
|
|
).all()
|
|
|
|
country_counts = {}
|
|
for alert in geo_alerts:
|
|
if alert.details and 'country' in alert.details:
|
|
country = alert.details['country']
|
|
if country:
|
|
country_counts[country] = country_counts.get(country, 0) + 1
|
|
|
|
by_country = []
|
|
for code, count in sorted(country_counts.items(), key=lambda x: x[1], reverse=True):
|
|
flag, name = country_flags.get(code, ('🏴', code))
|
|
by_country.append({'code': code, 'flag': flag, 'name': name, 'count': count})
|
|
|
|
stats['by_country'] = by_country
|
|
|
|
return jsonify(stats)
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# ============================================================
|
|
# ANNOUNCEMENTS (Ogłoszenia dla członków)
|
|
# ============================================================
|
|
|
|
def generate_slug(title):
|
|
"""
|
|
Generate URL-friendly slug from title.
|
|
Uses unidecode for proper Polish character handling.
|
|
"""
|
|
import re
|
|
try:
|
|
from unidecode import unidecode
|
|
text = unidecode(title.lower())
|
|
except ImportError:
|
|
# Fallback without unidecode
|
|
text = title.lower()
|
|
replacements = {
|
|
'ą': 'a', 'ć': 'c', 'ę': 'e', 'ł': 'l', 'ń': 'n',
|
|
'ó': 'o', 'ś': 's', 'ź': 'z', 'ż': 'z'
|
|
}
|
|
for pl, en in replacements.items():
|
|
text = text.replace(pl, en)
|
|
|
|
# Remove special characters, replace spaces with hyphens
|
|
text = re.sub(r'[^\w\s-]', '', text)
|
|
text = re.sub(r'[-\s]+', '-', text).strip('-')
|
|
return text[:200] # Limit slug length
|
|
|
|
|
|
# @app.route('/admin/announcements') # MOVED TO admin.admin_announcements
|
|
# @login_required
|
|
def _old_admin_announcements():
|
|
"""Admin panel - lista ogłoszeń"""
|
|
if not current_user.is_admin:
|
|
flash('Brak uprawnień do tej strony.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
from database import Announcement
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
# Filters
|
|
status_filter = request.args.get('status', 'all')
|
|
category_filter = request.args.get('category', 'all')
|
|
|
|
query = db.query(Announcement)
|
|
|
|
if status_filter != 'all':
|
|
query = query.filter(Announcement.status == status_filter)
|
|
if category_filter != 'all':
|
|
from sqlalchemy.dialects.postgresql import array as pg_array
|
|
query = query.filter(Announcement.categories.op('@>')(pg_array([category_filter])))
|
|
|
|
# Sort: pinned first, then by created_at desc
|
|
query = query.order_by(
|
|
Announcement.is_pinned.desc(),
|
|
Announcement.created_at.desc()
|
|
)
|
|
|
|
announcements = query.all()
|
|
|
|
return render_template('admin/announcements.html',
|
|
announcements=announcements,
|
|
now=datetime.now(),
|
|
status_filter=status_filter,
|
|
category_filter=category_filter,
|
|
categories=Announcement.CATEGORIES,
|
|
category_labels=Announcement.CATEGORY_LABELS,
|
|
statuses=Announcement.STATUSES,
|
|
status_labels=Announcement.STATUS_LABELS)
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# @app.route('/admin/announcements/new', methods=['GET', 'POST']) # MOVED TO admin.admin_announcements_new
|
|
# @login_required
|
|
def _old_admin_announcements_new():
|
|
"""Admin panel - nowe ogłoszenie"""
|
|
if not current_user.is_admin:
|
|
flash('Brak uprawnień do tej strony.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
from database import Announcement
|
|
|
|
if request.method == 'POST':
|
|
db = SessionLocal()
|
|
try:
|
|
title = request.form.get('title', '').strip()
|
|
excerpt = request.form.get('excerpt', '').strip()
|
|
content = request.form.get('content', '').strip()
|
|
categories = request.form.getlist('categories')
|
|
if not categories:
|
|
categories = ['internal'] # Default category
|
|
category = categories[0] # Backwards compatibility
|
|
image_url = request.form.get('image_url', '').strip() or None
|
|
external_link = request.form.get('external_link', '').strip() or None
|
|
is_featured = 'is_featured' in request.form
|
|
is_pinned = 'is_pinned' in request.form
|
|
|
|
# Handle expires_at
|
|
expires_at_str = request.form.get('expires_at', '').strip()
|
|
expires_at = None
|
|
if expires_at_str:
|
|
try:
|
|
expires_at = datetime.strptime(expires_at_str, '%Y-%m-%dT%H:%M')
|
|
except ValueError:
|
|
pass
|
|
|
|
# Generate unique slug
|
|
base_slug = generate_slug(title)
|
|
slug = base_slug
|
|
counter = 1
|
|
while db.query(Announcement).filter(Announcement.slug == slug).first():
|
|
slug = f"{base_slug}-{counter}"
|
|
counter += 1
|
|
|
|
# Determine status based on button clicked
|
|
action = request.form.get('action', 'draft')
|
|
status = 'published' if action == 'publish' else 'draft'
|
|
published_at = datetime.now() if status == 'published' else None
|
|
|
|
announcement = Announcement(
|
|
title=title,
|
|
slug=slug,
|
|
excerpt=excerpt or None,
|
|
content=content,
|
|
category=category,
|
|
categories=categories,
|
|
image_url=image_url,
|
|
external_link=external_link,
|
|
status=status,
|
|
published_at=published_at,
|
|
expires_at=expires_at,
|
|
is_featured=is_featured,
|
|
is_pinned=is_pinned,
|
|
created_by=current_user.id
|
|
)
|
|
|
|
db.add(announcement)
|
|
db.commit()
|
|
|
|
flash(f'Ogłoszenie zostało {"opublikowane" if status == "published" else "zapisane jako szkic"}.', 'success')
|
|
return redirect(url_for('admin_announcements'))
|
|
|
|
except Exception as e:
|
|
db.rollback()
|
|
logger.error(f"Error creating announcement: {e}")
|
|
flash(f'Błąd podczas tworzenia ogłoszenia: {e}', 'error')
|
|
finally:
|
|
db.close()
|
|
|
|
# GET request - show form
|
|
from database import Announcement
|
|
return render_template('admin/announcements_form.html',
|
|
announcement=None,
|
|
categories=Announcement.CATEGORIES,
|
|
category_labels=Announcement.CATEGORY_LABELS)
|
|
|
|
|
|
# @app.route('/admin/announcements/<int:id>/edit', methods=['GET', 'POST']) # MOVED TO admin.admin_announcements_edit
|
|
# @login_required
|
|
def _old_admin_announcements_edit(id):
|
|
"""Admin panel - edycja ogłoszenia"""
|
|
if not current_user.is_admin:
|
|
flash('Brak uprawnień do tej strony.', 'error')
|
|
return redirect(url_for('dashboard'))
|
|
|
|
from database import Announcement
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
announcement = db.query(Announcement).filter(Announcement.id == id).first()
|
|
if not announcement:
|
|
flash('Nie znaleziono ogłoszenia.', 'error')
|
|
return redirect(url_for('admin_announcements'))
|
|
|
|
if request.method == 'POST':
|
|
announcement.title = request.form.get('title', '').strip()
|
|
announcement.excerpt = request.form.get('excerpt', '').strip() or None
|
|
announcement.content = request.form.get('content', '').strip()
|
|
categories = request.form.getlist('categories')
|
|
if not categories:
|
|
categories = ['internal'] # Default category
|
|
announcement.categories = categories
|
|
announcement.category = categories[0] # Backwards compatibility
|
|
announcement.image_url = request.form.get('image_url', '').strip() or None
|
|
announcement.external_link = request.form.get('external_link', '').strip() or None
|
|
announcement.is_featured = 'is_featured' in request.form
|
|
announcement.is_pinned = 'is_pinned' in request.form
|
|
|
|
# Handle expires_at
|
|
expires_at_str = request.form.get('expires_at', '').strip()
|
|
if expires_at_str:
|
|
try:
|
|
announcement.expires_at = datetime.strptime(expires_at_str, '%Y-%m-%dT%H:%M')
|
|
except ValueError:
|
|
pass
|
|
else:
|
|
announcement.expires_at = None
|
|
|
|
# Regenerate slug if title changed significantly
|
|
new_slug = generate_slug(announcement.title)
|
|
if new_slug != announcement.slug.split('-')[0]: # Check if base changed
|
|
base_slug = new_slug
|
|
slug = base_slug
|
|
counter = 1
|
|
while db.query(Announcement).filter(
|
|
Announcement.slug == slug,
|
|
Announcement.id != id
|
|
).first():
|
|
slug = f"{base_slug}-{counter}"
|
|
counter += 1
|
|
announcement.slug = slug
|
|
|
|
# Handle status change
|
|
action = request.form.get('action', 'save')
|
|
if action == 'publish' and announcement.status != 'published':
|
|
announcement.status = 'published'
|
|
announcement.published_at = datetime.now()
|
|
elif action == 'archive':
|
|
announcement.status = 'archived'
|
|
elif action == 'draft':
|
|
announcement.status = 'draft'
|
|
|
|
announcement.updated_at = datetime.now()
|
|
db.commit()
|
|
|
|
flash('Zmiany zostały zapisane.', 'success')
|
|
return redirect(url_for('admin_announcements'))
|
|
|
|
# GET request - show form
|
|
return render_template('admin/announcements_form.html',
|
|
announcement=announcement,
|
|
categories=Announcement.CATEGORIES,
|
|
category_labels=Announcement.CATEGORY_LABELS)
|
|
|
|
except Exception as e:
|
|
db.rollback()
|
|
logger.error(f"Error editing announcement {id}: {e}")
|
|
flash(f'Błąd: {e}', 'error')
|
|
return redirect(url_for('admin_announcements'))
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# @app.route('/admin/announcements/<int:id>/publish', methods=['POST']) # MOVED TO admin.admin_announcements_publish
|
|
# @login_required
|
|
def _old_admin_announcements_publish(id):
|
|
"""Publikacja ogłoszenia"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
|
|
|
|
from database import Announcement
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
announcement = db.query(Announcement).filter(Announcement.id == id).first()
|
|
if not announcement:
|
|
return jsonify({'success': False, 'error': 'Nie znaleziono ogłoszenia'}), 404
|
|
|
|
announcement.status = 'published'
|
|
if not announcement.published_at:
|
|
announcement.published_at = datetime.now()
|
|
announcement.updated_at = datetime.now()
|
|
db.commit()
|
|
|
|
# Notify all users about new announcement
|
|
from utils.notifications import notify_all_users_announcement
|
|
notify_count = notify_all_users_announcement(
|
|
announcement_id=announcement.id,
|
|
title=announcement.title,
|
|
category=announcement.category
|
|
)
|
|
logger.info(f"Sent {notify_count} notifications for announcement: {announcement.title}")
|
|
|
|
return jsonify({'success': True, 'message': f'Ogłoszenie zostało opublikowane. Wysłano {notify_count} powiadomień.'})
|
|
|
|
except Exception as e:
|
|
db.rollback()
|
|
logger.error(f"Error publishing announcement {id}: {e}")
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# @app.route('/admin/announcements/<int:id>/archive', methods=['POST']) # MOVED TO admin.admin_announcements_archive
|
|
# @login_required
|
|
def _old_admin_announcements_archive(id):
|
|
"""Archiwizacja ogłoszenia"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
|
|
|
|
from database import Announcement
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
announcement = db.query(Announcement).filter(Announcement.id == id).first()
|
|
if not announcement:
|
|
return jsonify({'success': False, 'error': 'Nie znaleziono ogłoszenia'}), 404
|
|
|
|
announcement.status = 'archived'
|
|
announcement.updated_at = datetime.now()
|
|
db.commit()
|
|
|
|
return jsonify({'success': True, 'message': 'Ogłoszenie zostało zarchiwizowane'})
|
|
|
|
except Exception as e:
|
|
db.rollback()
|
|
logger.error(f"Error archiving announcement {id}: {e}")
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# @app.route('/admin/announcements/<int:id>/delete', methods=['POST']) # MOVED TO admin.admin_announcements_delete
|
|
# @login_required
|
|
def _old_admin_announcements_delete(id):
|
|
"""Usunięcie ogłoszenia"""
|
|
if not current_user.is_admin:
|
|
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
|
|
|
|
from database import Announcement
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
announcement = db.query(Announcement).filter(Announcement.id == id).first()
|
|
if not announcement:
|
|
return jsonify({'success': False, 'error': 'Nie znaleziono ogłoszenia'}), 404
|
|
|
|
db.delete(announcement)
|
|
db.commit()
|
|
|
|
return jsonify({'success': True, 'message': 'Ogłoszenie zostało usunięte'})
|
|
|
|
except Exception as e:
|
|
db.rollback()
|
|
logger.error(f"Error deleting announcement {id}: {e}")
|
|
return jsonify({'success': False, 'error': str(e)}), 500
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# ============================================================
|
|
# PUBLIC ANNOUNCEMENTS PAGE
|
|
# ============================================================
|
|
|
|
@app.route('/ogloszenia')
|
|
@login_required
|
|
@limiter.limit("60 per minute")
|
|
def announcements_list():
|
|
"""Strona z listą ogłoszeń dla zalogowanych członków"""
|
|
from database import Announcement
|
|
from sqlalchemy import or_, desc
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
page = request.args.get('page', 1, type=int)
|
|
category = request.args.get('category', '')
|
|
per_page = 12
|
|
|
|
# Base query: published and not expired
|
|
query = db.query(Announcement).filter(
|
|
Announcement.status == 'published',
|
|
or_(
|
|
Announcement.expires_at.is_(None),
|
|
Announcement.expires_at > datetime.now()
|
|
)
|
|
)
|
|
|
|
# Filter by category (supports both single category and categories array)
|
|
# Use PostgreSQL @> operator for array contains
|
|
if category and category in Announcement.CATEGORIES:
|
|
from sqlalchemy.dialects.postgresql import array as pg_array
|
|
query = query.filter(Announcement.categories.op('@>')(pg_array([category])))
|
|
|
|
# Sort: pinned first, then by published_at desc
|
|
query = query.order_by(
|
|
desc(Announcement.is_pinned),
|
|
desc(Announcement.published_at)
|
|
)
|
|
|
|
# Pagination
|
|
total = query.count()
|
|
total_pages = (total + per_page - 1) // per_page
|
|
announcements = query.offset((page - 1) * per_page).limit(per_page).all()
|
|
|
|
return render_template('announcements/list.html',
|
|
announcements=announcements,
|
|
current_category=category,
|
|
categories=Announcement.CATEGORIES,
|
|
category_labels=Announcement.CATEGORY_LABELS,
|
|
page=page,
|
|
total_pages=total_pages,
|
|
total=total)
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/ogloszenia/<slug>')
|
|
@login_required
|
|
@limiter.limit("60 per minute")
|
|
def announcement_detail(slug):
|
|
"""Szczegóły ogłoszenia dla zalogowanych członków"""
|
|
from database import Announcement, AnnouncementRead, User
|
|
from sqlalchemy import or_, desc, func
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
announcement = db.query(Announcement).filter(
|
|
Announcement.slug == slug,
|
|
Announcement.status == 'published',
|
|
or_(
|
|
Announcement.expires_at.is_(None),
|
|
Announcement.expires_at > datetime.now()
|
|
)
|
|
).first()
|
|
|
|
if not announcement:
|
|
flash('Nie znaleziono ogłoszenia lub zostało usunięte.', 'error')
|
|
return redirect(url_for('announcements_list'))
|
|
|
|
# Increment views counter
|
|
announcement.views_count = (announcement.views_count or 0) + 1
|
|
|
|
# Record read by current user (if not already recorded)
|
|
existing_read = db.query(AnnouncementRead).filter(
|
|
AnnouncementRead.announcement_id == announcement.id,
|
|
AnnouncementRead.user_id == current_user.id
|
|
).first()
|
|
|
|
if not existing_read:
|
|
new_read = AnnouncementRead(
|
|
announcement_id=announcement.id,
|
|
user_id=current_user.id
|
|
)
|
|
db.add(new_read)
|
|
|
|
db.commit()
|
|
|
|
# Get readers (users who read this announcement)
|
|
readers = db.query(AnnouncementRead).filter(
|
|
AnnouncementRead.announcement_id == announcement.id
|
|
).order_by(desc(AnnouncementRead.read_at)).all()
|
|
|
|
# Get total registered users count for percentage calculation
|
|
total_users = db.query(func.count(User.id)).filter(
|
|
User.is_active == True,
|
|
User.is_verified == True
|
|
).scalar() or 1
|
|
|
|
readers_count = len(readers)
|
|
read_percentage = round((readers_count / total_users) * 100, 1) if total_users > 0 else 0
|
|
|
|
# Get other recent announcements for sidebar
|
|
other_announcements = db.query(Announcement).filter(
|
|
Announcement.status == 'published',
|
|
Announcement.id != announcement.id,
|
|
or_(
|
|
Announcement.expires_at.is_(None),
|
|
Announcement.expires_at > datetime.now()
|
|
)
|
|
).order_by(desc(Announcement.published_at)).limit(5).all()
|
|
|
|
return render_template('announcements/detail.html',
|
|
announcement=announcement,
|
|
other_announcements=other_announcements,
|
|
category_labels=Announcement.CATEGORY_LABELS,
|
|
readers=readers,
|
|
readers_count=readers_count,
|
|
total_users=total_users,
|
|
read_percentage=read_percentage)
|
|
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# ============================================================
|
|
# EXTERNAL CONTACTS - PAGE ROUTES MIGRATED TO blueprints/community/contacts/
|
|
# ============================================================
|
|
# Routes: /kontakty, /kontakty/<id>, /kontakty/dodaj, /kontakty/<id>/edytuj, /kontakty/<id>/usun
|
|
# API routes remain below for backwards compatibility
|
|
|
|
# ============================================================
|
|
# AI-ASSISTED EXTERNAL CONTACT CREATION
|
|
# ============================================================
|
|
|
|
AI_CONTACT_PARSE_PROMPT = """Jesteś asystentem systemu NordaBiz pomagającym dodawać kontakty zewnętrzne.
|
|
|
|
ZADANIE:
|
|
Przeanalizuj podany tekst i wyodrębnij informacje o osobach kontaktowych z zewnętrznych organizacji
|
|
(urzędy, agencje, instytucje, firmy partnerskie - osoby spoza Norda Biznes).
|
|
|
|
DANE WEJŚCIOWE:
|
|
```
|
|
{input_text}
|
|
```
|
|
|
|
TYPY ORGANIZACJI:
|
|
- government = Urząd (np. ministerstwo, urząd gminy/powiatu)
|
|
- agency = Agencja (np. ARP, PARP, agencje rozwoju)
|
|
- company = Firma (przedsiębiorstwa, spółki)
|
|
- ngo = Organizacja pozarządowa (fundacje, stowarzyszenia)
|
|
- university = Uczelnia (uniwersytety, politechniki)
|
|
- other = Inne
|
|
|
|
INSTRUKCJE:
|
|
1. Wyodrębnij każdą osobę kontaktową z tekstu
|
|
2. Dla każdej osoby zidentyfikuj:
|
|
- imię i nazwisko (WYMAGANE)
|
|
- stanowisko/funkcja (jeśli dostępne)
|
|
- telefon (jeśli dostępny)
|
|
- email (jeśli dostępny)
|
|
- organizacja (WYMAGANE - nazwa instytucji)
|
|
- typ organizacji (government/agency/company/ngo/university/other)
|
|
- projekt/kontekst (jeśli tekst wspomina o konkretnym projekcie)
|
|
- tagi (słowa kluczowe związane z osobą/projektem)
|
|
3. Jeśli brak imienia i nazwiska - pomiń osobę
|
|
4. Jeśli brak nazwy organizacji - pomiń osobę
|
|
|
|
ZWRÓĆ TYLKO CZYSTY JSON w dokładnie takim formacie (bez żadnego tekstu przed ani po):
|
|
{{
|
|
"analysis": "Krótki opis znalezionych kontaktów (1-2 zdania po polsku)",
|
|
"contacts": [
|
|
{{
|
|
"first_name": "Imię",
|
|
"last_name": "Nazwisko",
|
|
"position": "Stanowisko lub null",
|
|
"phone": "Numer telefonu lub null",
|
|
"email": "Email lub null",
|
|
"organization_name": "Nazwa organizacji",
|
|
"organization_type": "government|agency|company|ngo|university|other",
|
|
"project_name": "Nazwa projektu lub null",
|
|
"tags": "tagi, oddzielone, przecinkami",
|
|
"warnings": []
|
|
}}
|
|
]
|
|
}}"""
|
|
|
|
AI_CONTACT_IMAGE_PROMPT = """Jesteś asystentem systemu NordaBiz pomagającym dodawać kontakty zewnętrzne.
|
|
|
|
ZADANIE:
|
|
Przeanalizuj ten obraz (screenshot) i wyodrębnij informacje o osobach kontaktowych.
|
|
Szukaj: imion i nazwisk, stanowisk, telefonów, emaili, nazw organizacji, projektów.
|
|
|
|
TYPY ORGANIZACJI:
|
|
- government = Urząd (np. ministerstwo, urząd gminy/powiatu)
|
|
- agency = Agencja (np. ARP, PARP, agencje rozwoju)
|
|
- company = Firma (przedsiębiorstwa, spółki)
|
|
- ngo = Organizacja pozarządowa (fundacje, stowarzyszenia)
|
|
- university = Uczelnia (uniwersytety, politechniki)
|
|
- other = Inne
|
|
|
|
INSTRUKCJE:
|
|
1. Przeczytaj cały tekst widoczny na obrazie
|
|
2. Wyodrębnij każdą osobę kontaktową
|
|
3. Dla każdej osoby zidentyfikuj:
|
|
- imię i nazwisko (WYMAGANE)
|
|
- stanowisko/funkcja
|
|
- telefon
|
|
- email
|
|
- organizacja (WYMAGANE)
|
|
- typ organizacji
|
|
- projekt/kontekst
|
|
- tagi
|
|
4. Jeśli brak imienia/nazwiska lub organizacji - pomiń osobę
|
|
|
|
ZWRÓĆ TYLKO CZYSTY JSON w dokładnie takim formacie:
|
|
{{
|
|
"analysis": "Krótki opis znalezionych kontaktów (1-2 zdania po polsku)",
|
|
"contacts": [
|
|
{{
|
|
"first_name": "Imię",
|
|
"last_name": "Nazwisko",
|
|
"position": "Stanowisko lub null",
|
|
"phone": "Numer telefonu lub null",
|
|
"email": "Email lub null",
|
|
"organization_name": "Nazwa organizacji",
|
|
"organization_type": "government|agency|company|ngo|university|other",
|
|
"project_name": "Nazwa projektu lub null",
|
|
"tags": "tagi, oddzielone, przecinkami",
|
|
"warnings": []
|
|
}}
|
|
]
|
|
}}"""
|
|
|
|
|
|
@app.route('/api/contacts/ai-parse', methods=['POST'])
|
|
@login_required
|
|
def contacts_ai_parse():
|
|
"""Parse text or image with AI to extract external contact data."""
|
|
db = SessionLocal()
|
|
try:
|
|
# Check input type
|
|
input_type = request.form.get('input_type') or (request.get_json() or {}).get('input_type', 'text')
|
|
|
|
if input_type == 'image':
|
|
# Handle image upload
|
|
if 'file' not in request.files:
|
|
return jsonify({'success': False, 'error': 'Brak pliku obrazu'}), 400
|
|
|
|
file = request.files['file']
|
|
if file.filename == '':
|
|
return jsonify({'success': False, 'error': 'Nie wybrano pliku'}), 400
|
|
|
|
# Validate file type
|
|
allowed_extensions = {'png', 'jpg', 'jpeg', 'gif', 'webp'}
|
|
ext = file.filename.rsplit('.', 1)[-1].lower() if '.' in file.filename else ''
|
|
if ext not in allowed_extensions:
|
|
return jsonify({'success': False, 'error': 'Dozwolone formaty: PNG, JPG, JPEG, GIF, WEBP'}), 400
|
|
|
|
# Save temp file
|
|
import tempfile
|
|
with tempfile.NamedTemporaryFile(delete=False, suffix=f'.{ext}') as tmp:
|
|
file.save(tmp.name)
|
|
temp_path = tmp.name
|
|
|
|
try:
|
|
# Get Gemini service and analyze image
|
|
service = gemini_service.get_gemini_service()
|
|
ai_response = service.analyze_image(temp_path, AI_CONTACT_IMAGE_PROMPT)
|
|
finally:
|
|
# Clean up temp file
|
|
import os
|
|
if os.path.exists(temp_path):
|
|
os.unlink(temp_path)
|
|
|
|
else:
|
|
# Handle text input
|
|
data = request.get_json() or {}
|
|
# Support both 'text' (from frontend modal) and 'content' for backwards compatibility
|
|
content = (data.get('text') or data.get('content', '')).strip()
|
|
|
|
if not content:
|
|
return jsonify({'success': False, 'error': 'Brak treści do analizy'}), 400
|
|
|
|
# Get Gemini service and analyze text
|
|
service = gemini_service.get_gemini_service()
|
|
prompt = AI_CONTACT_PARSE_PROMPT.format(input_text=content)
|
|
ai_response = service.generate_text(
|
|
prompt=prompt,
|
|
feature='ai_contact_parse',
|
|
user_id=current_user.id,
|
|
temperature=0.3
|
|
)
|
|
|
|
# Parse AI response as JSON
|
|
import re
|
|
json_match = re.search(r'\{[\s\S]*\}', ai_response)
|
|
if not json_match:
|
|
logger.error(f"AI contact response not valid JSON: {ai_response[:500]}")
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'AI nie zwróciło prawidłowej odpowiedzi. Spróbuj ponownie.'
|
|
}), 500
|
|
|
|
try:
|
|
parsed = json.loads(json_match.group())
|
|
except json.JSONDecodeError as e:
|
|
logger.error(f"JSON parse error: {e}, response: {ai_response[:500]}")
|
|
return jsonify({
|
|
'success': False,
|
|
'error': 'Błąd parsowania odpowiedzi AI. Spróbuj ponownie.'
|
|
}), 500
|
|
|
|
# Check for potential duplicates
|
|
from database import ExternalContact
|
|
proposed_contacts = parsed.get('contacts', [])
|
|
|
|
for contact in proposed_contacts:
|
|
first_name = contact.get('first_name', '').strip()
|
|
last_name = contact.get('last_name', '').strip()
|
|
org_name = contact.get('organization_name', '').strip()
|
|
|
|
if first_name and last_name and org_name:
|
|
# Check for existing similar contact
|
|
existing = db.query(ExternalContact).filter(
|
|
ExternalContact.first_name.ilike(first_name),
|
|
ExternalContact.last_name.ilike(last_name),
|
|
ExternalContact.organization_name.ilike(f'%{org_name}%'),
|
|
ExternalContact.is_active == True
|
|
).first()
|
|
|
|
if existing:
|
|
contact['warnings'] = contact.get('warnings', []) + [
|
|
f'Podobny kontakt może już istnieć: {existing.full_name} @ {existing.organization_name}'
|
|
]
|
|
contact['potential_duplicate_id'] = existing.id
|
|
|
|
logger.info(f"User {current_user.email} used AI to parse contacts: {len(proposed_contacts)} found")
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'analysis': parsed.get('analysis', 'Analiza zakończona'),
|
|
'contacts': proposed_contacts
|
|
})
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error in AI contact parse: {e}")
|
|
return jsonify({'success': False, 'error': f'Błąd: {str(e)}'}), 500
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
@app.route('/api/contacts/bulk-create', methods=['POST'])
|
|
@login_required
|
|
def contacts_bulk_create():
|
|
"""Create multiple external contacts from confirmed proposals."""
|
|
from database import ExternalContact
|
|
|
|
db = SessionLocal()
|
|
try:
|
|
data = request.get_json() or {}
|
|
contacts_to_create = data.get('contacts', [])
|
|
|
|
if not contacts_to_create:
|
|
return jsonify({'success': False, 'error': 'Brak kontaktów do utworzenia'}), 400
|
|
|
|
created = []
|
|
failed = []
|
|
|
|
for contact_data in contacts_to_create:
|
|
try:
|
|
# Validate required fields
|
|
first_name = contact_data.get('first_name', '').strip()
|
|
last_name = contact_data.get('last_name', '').strip()
|
|
organization_name = contact_data.get('organization_name', '').strip()
|
|
|
|
if not first_name or not last_name or not organization_name:
|
|
failed.append({
|
|
'name': f"{first_name} {last_name}",
|
|
'error': 'Brak wymaganych danych (imię, nazwisko lub organizacja)'
|
|
})
|
|
continue
|
|
|
|
# Create contact
|
|
contact = ExternalContact(
|
|
first_name=first_name,
|
|
last_name=last_name,
|
|
position=contact_data.get('position', '').strip() or None,
|
|
phone=contact_data.get('phone', '').strip() or None,
|
|
email=contact_data.get('email', '').strip() or None,
|
|
organization_name=organization_name,
|
|
organization_type=contact_data.get('organization_type', 'other'),
|
|
project_name=contact_data.get('project_name', '').strip() or None,
|
|
tags=contact_data.get('tags', '').strip() or None,
|
|
source_type='ai_import',
|
|
created_by=current_user.id
|
|
)
|
|
|
|
db.add(contact)
|
|
db.flush()
|
|
|
|
created.append({
|
|
'id': contact.id,
|
|
'name': contact.full_name,
|
|
'organization': contact.organization_name
|
|
})
|
|
|
|
except Exception as e:
|
|
failed.append({
|
|
'name': f"{contact_data.get('first_name', '')} {contact_data.get('last_name', '')}",
|
|
'error': str(e)
|
|
})
|
|
|
|
db.commit()
|
|
|
|
logger.info(f"User {current_user.email} bulk created {len(created)} contacts via AI")
|
|
|
|
return jsonify({
|
|
'success': True,
|
|
'created': created,
|
|
'failed': failed,
|
|
'message': f'Utworzono {len(created)} kontaktów' + (f', {len(failed)} błędów' if failed else '')
|
|
})
|
|
|
|
except Exception as e:
|
|
db.rollback()
|
|
logger.error(f"Error in contacts bulk create: {e}")
|
|
return jsonify({'success': False, 'error': f'Błąd: {str(e)}'}), 500
|
|
finally:
|
|
db.close()
|
|
|
|
|
|
# ============================================================
|
|
# HONEYPOT ENDPOINTS (trap for malicious bots)
|
|
# ============================================================
|
|
|
|
@app.route('/wp-admin')
|
|
@app.route('/wp-admin/<path:path>')
|
|
@app.route('/wp-login.php')
|
|
@app.route('/administrator')
|
|
@app.route('/phpmyadmin')
|
|
@app.route('/phpmyadmin/<path:path>')
|
|
@app.route('/.env')
|
|
@app.route('/.git/config')
|
|
@app.route('/xmlrpc.php')
|
|
@app.route('/config.php')
|
|
@app.route('/admin.php')
|
|
def honeypot_trap(path=None):
|
|
"""
|
|
Honeypot endpoints - log and return 404.
|
|
These URLs are commonly probed by malicious bots looking for WordPress,
|
|
phpMyAdmin, or exposed configuration files.
|
|
"""
|
|
client_ip = request.headers.get('X-Forwarded-For', request.remote_addr)
|
|
if client_ip and ',' in client_ip:
|
|
client_ip = client_ip.split(',')[0].strip()
|
|
|
|
security_logger.warning(f"HONEYPOT ip={client_ip} path={request.path} ua={request.user_agent.string[:100]}")
|
|
|
|
# Return 404 to not reveal this is a trap
|
|
return render_template('errors/404.html'), 404
|
|
|
|
|
|
# ============================================================
|
|
# MAIN
|
|
# ============================================================
|
|
|
|
if __name__ == '__main__':
|
|
# Port 5001 jako domyślny - macOS AirPlay zajmuje 5000
|
|
port = int(os.getenv('PORT', 5001))
|
|
debug = os.getenv('FLASK_ENV') == 'development'
|
|
|
|
logger.info(f"Starting Norda Biznes Partner on port {port}")
|
|
app.run(host='0.0.0.0', port=port, debug=debug)
|