nordabiz/app.py
Maciej Pienczyn ffc766d034 feat(contacts): Modal "Dodaj z AI" + widoki grupowanie/tabela
- Dodano modal "Dodaj z AI" z parsowaniem tekstu/obrazów przez Gemini
- API endpoints: /api/contacts/ai-parse, /api/contacts/bulk-create
- Nowy widok grupowania kontaktów po organizacji (domyślny)
- Widok tabeli dla kompaktowego przeglądu
- Przełącznik widoków z zapamiętywaniem preferencji
- Drag & drop dla zdjęć wizytówek
- Docker: PostgreSQL 16 (zgodność z produkcją)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-27 08:52:35 +01:00

14455 lines
515 KiB
Python

#!/usr/bin/env python3
"""
Norda Biznes Hub - Flask Application
====================================
Main Flask application for Norda Biznes company directory with AI chat.
Features:
- User authentication with email confirmation
- Company directory with advanced search
- AI chat assistant powered by Google Gemini
- PostgreSQL database integration
- Analytics dashboard for chat insights
Author: Norda Biznes Development Team
Created: 2025-11-23
"""
import os
import logging
import secrets
import re
import json
import time
from collections import deque
from pathlib import Path
from datetime import datetime, timedelta, date
from flask import Flask, render_template, request, jsonify, redirect, url_for, flash, session, Response, send_file
from flask_login import LoginManager, login_user, logout_user, login_required, current_user
from flask_wtf.csrf import CSRFProtect
from flask_limiter import Limiter
from flask_limiter.util import get_remote_address
from werkzeug.security import generate_password_hash, check_password_hash
from dotenv import load_dotenv
from user_agents import parse as parse_user_agent
import uuid
# Load environment variables (override any existing env vars)
# Try .env first, then nordabiz_config.txt for production flexibility
import os
if os.path.exists('.env'):
load_dotenv('.env', override=True)
elif os.path.exists('nordabiz_config.txt'):
load_dotenv('nordabiz_config.txt', override=True)
else:
load_dotenv(override=True)
# Configure logging with in-memory buffer for debug panel
class DebugLogHandler(logging.Handler):
"""Custom handler that stores logs in memory for real-time viewing"""
def __init__(self, max_logs=500):
super().__init__()
self.logs = deque(maxlen=max_logs)
def emit(self, record):
log_entry = {
'timestamp': datetime.now().isoformat(),
'level': record.levelname,
'logger': record.name,
'message': self.format(record),
'module': record.module,
'funcName': record.funcName,
'lineno': record.lineno
}
self.logs.append(log_entry)
# Create debug handler
debug_handler = DebugLogHandler(max_logs=500)
debug_handler.setFormatter(logging.Formatter('%(message)s'))
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
# Add debug handler to root logger
logging.getLogger().addHandler(debug_handler)
logger = logging.getLogger(__name__)
# Security logger for fail2ban integration
# Logs to /var/log/nordabiznes/security.log in production
security_logger = logging.getLogger('security')
security_logger.setLevel(logging.WARNING)
_security_log_path = '/var/log/nordabiznes/security.log'
if os.path.exists('/var/log/nordabiznes'):
_security_handler = logging.FileHandler(_security_log_path)
_security_handler.setFormatter(logging.Formatter(
'%(asctime)s [%(levelname)s] %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
))
security_logger.addHandler(_security_handler)
# Import database models
from database import (
init_db,
SessionLocal,
User,
Company,
Category,
Service,
Competency,
CompanyDigitalMaturity,
CompanyWebsiteAnalysis,
CompanyQualityTracking,
CompanyWebsiteContent,
CompanyAIInsights,
CompanyEvent,
CompanySocialMedia,
CompanyContact,
AIChatConversation,
AIChatMessage,
AIChatFeedback,
AIAPICostLog,
ForumTopic,
ForumReply,
ForumAttachment,
NordaEvent,
EventAttendee,
PrivateMessage,
Classified,
UserNotification,
CompanyRecommendation,
MembershipFee,
MembershipFeeConfig,
Person,
CompanyPerson,
GBPAudit,
ITAudit,
KRSAudit,
CompanyPKD,
CompanyFinancialReport,
UserSession,
PageView,
UserClick,
AnalyticsDaily,
PopularPagesDaily,
AuditLog,
SecurityAlert
)
# Import services
import gemini_service
from nordabiz_chat import NordaBizChatEngine
from search_service import search_companies
import krs_api_service
from file_upload_service import FileUploadService
# Security service for audit log, alerting, GeoIP, 2FA
try:
from security_service import (
log_audit, create_security_alert, get_client_ip,
is_ip_allowed, geoip_check, init_security_service,
generate_totp_secret, get_totp_uri, verify_totp,
generate_backup_codes, verify_backup_code, requires_2fa
)
SECURITY_SERVICE_AVAILABLE = True
except ImportError as e:
SECURITY_SERVICE_AVAILABLE = False
logger.warning(f"Security service not available: {e}")
# News service for fetching company news
try:
from news_service import NewsService, get_news_service, init_news_service
NEWS_SERVICE_AVAILABLE = True
except ImportError:
NEWS_SERVICE_AVAILABLE = False
logger.warning("News service not available")
# SEO audit components for triggering audits via API
import sys
_scripts_path = os.path.join(os.path.dirname(__file__), 'scripts')
if _scripts_path not in sys.path:
sys.path.insert(0, _scripts_path)
try:
from seo_audit import SEOAuditor, SEO_AUDIT_VERSION
SEO_AUDIT_AVAILABLE = True
except ImportError as e:
SEO_AUDIT_AVAILABLE = False
logger.warning(f"SEO audit service not available: {e}")
# GBP (Google Business Profile) audit service
try:
from gbp_audit_service import (
GBPAuditService,
audit_company as gbp_audit_company,
get_company_audit as gbp_get_company_audit,
fetch_google_business_data as gbp_fetch_google_data
)
GBP_AUDIT_AVAILABLE = True
GBP_AUDIT_VERSION = '1.0'
except ImportError as e:
GBP_AUDIT_AVAILABLE = False
GBP_AUDIT_VERSION = None
logger.warning(f"GBP audit service not available: {e}")
# KRS (Krajowy Rejestr Sądowy) audit service
try:
from krs_audit_service import parse_krs_pdf, parse_krs_pdf_full
KRS_AUDIT_AVAILABLE = True
KRS_AUDIT_VERSION = '1.0'
except ImportError as e:
KRS_AUDIT_AVAILABLE = False
KRS_AUDIT_VERSION = None
logger.warning(f"KRS audit service not available: {e}")
# Initialize Flask app
app = Flask(__name__)
# Security: Require strong SECRET_KEY (no default value allowed)
SECRET_KEY = os.getenv('SECRET_KEY')
if not SECRET_KEY or len(SECRET_KEY) < 32:
raise ValueError("SECRET_KEY must be set in environment variables and be at least 32 characters long")
app.config['SECRET_KEY'] = SECRET_KEY
app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(days=7)
# Security configurations
app.config['WTF_CSRF_ENABLED'] = True
app.config['WTF_CSRF_TIME_LIMIT'] = None # No time limit for CSRF tokens
app.config['SESSION_COOKIE_SECURE'] = os.getenv('FLASK_ENV') != 'development' # HTTPS only in production
app.config['SESSION_COOKIE_HTTPONLY'] = True
app.config['SESSION_COOKIE_SAMESITE'] = 'Lax'
# Template filters
@app.template_filter('ensure_url')
def ensure_url_filter(url):
"""Ensure URL has http:// or https:// scheme"""
if url and not url.startswith(('http://', 'https://')):
return f'https://{url}'
return url
# Initialize CSRF protection
csrf = CSRFProtect(app)
# Initialize rate limiter with Redis storage (persistent across restarts)
# Falls back to memory if Redis unavailable
_redis_available = False
try:
import redis
_redis_client = redis.Redis(host='localhost', port=6379, db=0)
_redis_client.ping()
_redis_available = True
logger.info("Rate limiter using Redis storage")
except Exception:
logger.warning("Redis unavailable, rate limiter using memory storage")
limiter = Limiter(
app=app,
key_func=get_remote_address,
default_limits=["200 per day", "50 per hour"],
storage_uri="redis://localhost:6379/0" if _redis_available else "memory://"
)
@limiter.request_filter
def is_admin_exempt():
"""Exempt logged-in admins from rate limiting."""
from flask_login import current_user
try:
return current_user.is_authenticated and current_user.is_admin
except Exception:
return False
# Initialize database
init_db()
# Initialize Login Manager
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
login_manager.login_message = 'Zaloguj się, aby uzyskać dostęp do tej strony.'
# Initialize Gemini service
try:
gemini_service.init_gemini_service(model='flash-lite') # Gemini 2.5 Flash-Lite (DARMOWY, 1000 RPD)
logger.info("Gemini service initialized successfully")
except Exception as e:
logger.error(f"Failed to initialize Gemini service: {e}")
@login_manager.user_loader
def load_user(user_id):
"""Load user from database"""
db = SessionLocal()
try:
return db.query(User).filter_by(id=int(user_id)).first()
finally:
db.close()
# ============================================================
# TEMPLATE CONTEXT PROCESSORS
# ============================================================
@app.context_processor
def inject_globals():
"""Inject global variables into all templates"""
return {
'current_year': datetime.now().year,
'now': datetime.now() # Must be value, not method - templates use now.strftime()
}
@app.context_processor
def inject_notifications():
"""Inject unread notifications count into all templates"""
if current_user.is_authenticated:
db = SessionLocal()
try:
unread_count = db.query(UserNotification).filter(
UserNotification.user_id == current_user.id,
UserNotification.is_read == False
).count()
return {'unread_notifications_count': unread_count}
finally:
db.close()
return {'unread_notifications_count': 0}
# ============================================================
# NOTIFICATION HELPERS
# ============================================================
def create_notification(user_id, title, message, notification_type='info',
related_type=None, related_id=None, action_url=None):
"""
Create a notification for a user.
Args:
user_id: ID of the user to notify
title: Notification title
message: Notification message/body
notification_type: Type of notification (news, system, message, event, alert)
related_type: Type of related entity (company_news, event, message, etc.)
related_id: ID of the related entity
action_url: URL to navigate when notification is clicked
Returns:
UserNotification object or None on error
"""
db = SessionLocal()
try:
notification = UserNotification(
user_id=user_id,
title=title,
message=message,
notification_type=notification_type,
related_type=related_type,
related_id=related_id,
action_url=action_url
)
db.add(notification)
db.commit()
db.refresh(notification)
logger.info(f"Created notification for user {user_id}: {title}")
return notification
except Exception as e:
logger.error(f"Error creating notification: {e}")
db.rollback()
return None
finally:
db.close()
def create_news_notification(company_id, news_id, news_title):
"""
Create notification for company owner when their news is approved.
Args:
company_id: ID of the company
news_id: ID of the approved news
news_title: Title of the news
"""
db = SessionLocal()
try:
# Find users associated with this company
users = db.query(User).filter(
User.company_id == company_id,
User.is_active == True
).all()
for user in users:
create_notification(
user_id=user.id,
title="Nowa aktualnosc o Twojej firmie",
message=f"Aktualnosc '{news_title}' zostala zatwierdzona i jest widoczna na profilu firmy.",
notification_type='news',
related_type='company_news',
related_id=news_id,
action_url=f"/company/{company_id}"
)
finally:
db.close()
# ============================================================
# USER ANALYTICS - TRACKING HELPERS
# ============================================================
# Global variable to store current page_view_id for templates
_current_page_view_id = {}
def get_or_create_analytics_session():
"""
Get existing analytics session or create new one.
Returns the database session ID (integer).
"""
analytics_session_id = session.get('analytics_session_id')
if not analytics_session_id:
analytics_session_id = str(uuid.uuid4())
session['analytics_session_id'] = analytics_session_id
db = SessionLocal()
try:
user_session = db.query(UserSession).filter_by(session_id=analytics_session_id).first()
if not user_session:
# Parse user agent
ua_string = request.headers.get('User-Agent', '')
try:
ua = parse_user_agent(ua_string)
device_type = 'mobile' if ua.is_mobile else ('tablet' if ua.is_tablet else 'desktop')
browser = ua.browser.family
browser_version = ua.browser.version_string
os_name = ua.os.family
os_version = ua.os.version_string
except Exception:
device_type = 'desktop'
browser = 'Unknown'
browser_version = ''
os_name = 'Unknown'
os_version = ''
user_session = UserSession(
session_id=analytics_session_id,
user_id=current_user.id if current_user.is_authenticated else None,
ip_address=request.remote_addr,
user_agent=ua_string[:2000] if ua_string else None,
device_type=device_type,
browser=browser[:50] if browser else None,
browser_version=browser_version[:20] if browser_version else None,
os=os_name[:50] if os_name else None,
os_version=os_version[:20] if os_version else None
)
db.add(user_session)
db.commit()
db.refresh(user_session)
else:
# Update last activity
user_session.last_activity_at = datetime.now()
if current_user.is_authenticated and not user_session.user_id:
user_session.user_id = current_user.id
db.commit()
return user_session.id
except Exception as e:
logger.error(f"Analytics session error: {e}")
db.rollback()
return None
finally:
db.close()
@app.before_request
def check_geoip():
"""Block requests from high-risk countries (RU, CN, KP, IR, BY, SY, VE, CU)."""
# Skip static files and health checks
if request.path.startswith('/static') or request.path == '/health':
return
if not is_ip_allowed():
ip = request.headers.get('X-Forwarded-For', request.remote_addr)
if ip:
ip = ip.split(',')[0].strip()
from security_service import get_country_code
country = get_country_code(ip)
logger.warning(f"GEOIP_BLOCKED ip={ip} country={country} path={request.path}")
# Create alert for blocked access
try:
db = SessionLocal()
from security_service import create_security_alert
create_security_alert(
db, 'geo_blocked', 'low',
ip_address=ip,
details={'country': country, 'path': request.path, 'user_agent': request.user_agent.string[:200]}
)
db.commit()
db.close()
except Exception as e:
logger.error(f"Failed to create geo block alert: {e}")
abort(403)
@app.before_request
def track_page_view():
"""Track page views (excluding static files and API calls)"""
# Skip static files
if request.path.startswith('/static'):
return
# Skip API calls except selected ones
if request.path.startswith('/api'):
return
# Skip analytics tracking endpoints
if request.path in ['/api/analytics/track', '/api/analytics/heartbeat']:
return
# Skip health checks
if request.path == '/health':
return
# Skip favicon
if request.path == '/favicon.ico':
return
try:
session_db_id = get_or_create_analytics_session()
if not session_db_id:
return
db = SessionLocal()
try:
page_view = PageView(
session_id=session_db_id,
user_id=current_user.id if current_user.is_authenticated else None,
url=request.url[:2000] if request.url else '',
path=request.path[:500] if request.path else '/',
referrer=request.referrer[:2000] if request.referrer else None
)
# Extract company_id from path if on company page
if request.path.startswith('/company/'):
try:
slug = request.path.split('/')[2].split('?')[0]
company = db.query(Company).filter_by(slug=slug).first()
if company:
page_view.company_id = company.id
except Exception:
pass
db.add(page_view)
# Update session page count
user_session = db.query(UserSession).filter_by(id=session_db_id).first()
if user_session:
user_session.page_views_count = (user_session.page_views_count or 0) + 1
db.commit()
# Store page_view_id for click tracking (in request context)
_current_page_view_id[id(request)] = page_view.id
except Exception as e:
logger.error(f"Page view tracking error: {e}")
db.rollback()
finally:
db.close()
except Exception as e:
logger.error(f"Page view tracking outer error: {e}")
@app.context_processor
def inject_page_view_id():
"""Inject page_view_id into all templates for JS tracking"""
page_view_id = _current_page_view_id.get(id(request), '')
return {'page_view_id': page_view_id}
@app.teardown_request
def cleanup_page_view_id(exception=None):
"""Clean up page_view_id from global dict after request"""
_current_page_view_id.pop(id(request), None)
# ============================================================
# SECURITY MIDDLEWARE & HELPERS
# ============================================================
@app.after_request
def set_security_headers(response):
"""Add security headers to all responses"""
response.headers['X-Content-Type-Options'] = 'nosniff'
response.headers['X-Frame-Options'] = 'SAMEORIGIN'
response.headers['X-XSS-Protection'] = '1; mode=block'
response.headers['Strict-Transport-Security'] = 'max-age=31536000; includeSubDomains'
response.headers['Referrer-Policy'] = 'strict-origin-when-cross-origin'
# Content Security Policy
csp = (
"default-src 'self'; "
"script-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net; "
"style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com; "
"img-src 'self' data: https:; "
"font-src 'self' https://cdn.jsdelivr.net https://fonts.gstatic.com; "
"connect-src 'self'"
)
response.headers['Content-Security-Policy'] = csp
return response
def validate_email(email):
"""Validate email format"""
if not email or len(email) > 255:
return False
# RFC 5322 compliant email regex (simplified)
pattern = r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$'
return re.match(pattern, email) is not None
def validate_password(password):
"""
Validate password strength
Requirements:
- Minimum 8 characters
- At least one uppercase letter
- At least one lowercase letter
- At least one digit
"""
if not password or len(password) < 8:
return False, "Hasło musi mieć minimum 8 znaków"
if not re.search(r'[A-Z]', password):
return False, "Hasło musi zawierać przynajmniej jedną wielką literę"
if not re.search(r'[a-z]', password):
return False, "Hasło musi zawierać przynajmniej jedną małą literę"
if not re.search(r'\d', password):
return False, "Hasło musi zawierać przynajmniej jedną cyfrę"
return True, "OK"
def sanitize_input(text, max_length=1000):
"""Sanitize user input - remove potentially dangerous characters"""
if not text:
return ""
# Remove null bytes
text = text.replace('\x00', '')
# Trim to max length
text = text[:max_length]
# Strip whitespace
text = text.strip()
return text
def get_free_tier_usage():
"""
Get today's Gemini API usage for free tier tracking.
Returns:
Dict with requests_today and tokens_today
"""
from datetime import date
from sqlalchemy import func
db = SessionLocal()
try:
today = date.today()
result = db.query(
func.count(AIAPICostLog.id).label('requests'),
func.coalesce(func.sum(AIAPICostLog.total_tokens), 0).label('tokens')
).filter(
func.date(AIAPICostLog.timestamp) == today,
AIAPICostLog.api_provider == 'gemini'
).first()
return {
'requests_today': result.requests or 0,
'tokens_today': int(result.tokens or 0)
}
except Exception as e:
logger.warning(f"Failed to get free tier usage: {e}")
return {'requests_today': 0, 'tokens_today': 0}
finally:
db.close()
def get_brave_api_usage():
"""
Get Brave Search API usage for current month.
Brave free tier: 2000 requests/month
Returns:
Dict with usage stats and limits
"""
from datetime import date
from sqlalchemy import func, extract
db = SessionLocal()
try:
today = date.today()
current_month = today.month
current_year = today.year
# Monthly usage
monthly_result = db.query(
func.count(AIAPICostLog.id).label('requests')
).filter(
extract('month', AIAPICostLog.timestamp) == current_month,
extract('year', AIAPICostLog.timestamp) == current_year,
AIAPICostLog.api_provider == 'brave'
).first()
# Today's usage
daily_result = db.query(
func.count(AIAPICostLog.id).label('requests')
).filter(
func.date(AIAPICostLog.timestamp) == today,
AIAPICostLog.api_provider == 'brave'
).first()
monthly_used = monthly_result.requests or 0
daily_used = daily_result.requests or 0
monthly_limit = 2000 # Brave free tier
return {
'requests_today': daily_used,
'requests_this_month': monthly_used,
'monthly_limit': monthly_limit,
'remaining': max(0, monthly_limit - monthly_used),
'usage_percent': round((monthly_used / monthly_limit) * 100, 1) if monthly_limit > 0 else 0,
'tier': 'free',
'is_limit_reached': monthly_used >= monthly_limit
}
except Exception as e:
logger.warning(f"Failed to get Brave API usage: {e}")
return {
'requests_today': 0,
'requests_this_month': 0,
'monthly_limit': 2000,
'remaining': 2000,
'usage_percent': 0,
'tier': 'free',
'is_limit_reached': False
}
finally:
db.close()
def log_brave_api_call(user_id=None, feature='news_search', company_name=None):
"""
Log a Brave API call for usage tracking.
Args:
user_id: User who triggered the call (optional)
feature: Feature name (news_search, etc.)
company_name: Company being searched (for reference)
"""
db = SessionLocal()
try:
log_entry = AIAPICostLog(
api_provider='brave',
model_name='search_api',
feature=feature,
user_id=user_id,
input_tokens=0,
output_tokens=0,
total_tokens=0
)
db.add(log_entry)
db.commit()
logger.debug(f"Logged Brave API call: {feature} for {company_name}")
except Exception as e:
logger.error(f"Failed to log Brave API call: {e}")
db.rollback()
finally:
db.close()
# ============================================================
# HEALTH CHECK
# ============================================================
@app.route('/health')
def health():
"""Health check endpoint for monitoring"""
return {'status': 'ok'}, 200
@app.route('/health/full')
def health_full():
"""
Extended health check - verifies all critical endpoints.
Returns detailed status of each endpoint.
Access: /health/full
"""
results = []
all_ok = True
# List of ALL endpoints to check (path, name)
# Comprehensive list updated 2026-01-17
endpoints = [
# ========== PUBLIC PAGES ==========
('/', 'Strona główna'),
('/login', 'Logowanie'),
('/register', 'Rejestracja'),
('/release-notes', 'Historia zmian'),
('/search?q=test', 'Wyszukiwarka'),
('/aktualnosci', 'Aktualności'),
('/forum', 'Forum'),
('/kalendarz', 'Kalendarz wydarzeń'),
('/tablica', 'Tablica ogłoszeń'),
('/nowi-czlonkowie', 'Nowi członkowie'),
('/mapa-polaczen', 'Mapa połączeń'),
('/forgot-password', 'Reset hasła'),
# ========== RAPORTY ==========
('/raporty', 'Raporty'),
('/raporty/staz-czlonkostwa', 'Raport: Staż członkostwa'),
('/raporty/social-media', 'Raport: Social Media'),
('/raporty/struktura-branzowa', 'Raport: Struktura branżowa'),
# ========== ZOPK PUBLIC ==========
('/zopk', 'ZOPK: Strona główna'),
('/zopk/aktualnosci', 'ZOPK: Aktualności'),
# ========== CHAT ==========
('/chat', 'NordaGPT Chat'),
# ========== IT AUDIT ==========
('/it-audit/form', 'IT Audit: Formularz'),
# ========== PUBLIC API ==========
('/api/companies', 'API: Lista firm'),
('/api/model-info', 'API: Model info'),
('/api/gbp/audit/health', 'API: GBP health'),
# ========== ADMIN: CORE ==========
('/admin/security', 'Admin: Bezpieczeństwo'),
('/admin/analytics', 'Admin: Analityka'),
('/admin/status', 'Admin: Status systemu'),
('/admin/health', 'Admin: Health dashboard'),
('/admin/debug', 'Admin: Debug'),
('/admin/ai-usage', 'Admin: AI Usage'),
('/admin/chat-analytics', 'Admin: Chat analytics'),
('/admin/users', 'Admin: Użytkownicy'),
('/admin/recommendations', 'Admin: Rekomendacje'),
('/admin/fees', 'Admin: Składki'),
# ========== ADMIN: AUDITS ==========
('/admin/seo', 'Admin: SEO Audit'),
('/admin/gbp-audit', 'Admin: GBP Audit'),
('/admin/social-media', 'Admin: Social Media'),
('/admin/social-audit', 'Admin: Social Audit'),
('/admin/it-audit', 'Admin: IT Audit'),
('/admin/digital-maturity', 'Admin: Digital Maturity'),
('/admin/krs-audit', 'Admin: KRS Audit'),
# ========== ADMIN: COMMUNITY ==========
('/admin/forum', 'Admin: Forum'),
('/admin/kalendarz', 'Admin: Kalendarz'),
# ========== ADMIN: ZOPK ==========
('/admin/zopk', 'Admin: ZOPK Panel'),
('/admin/zopk/news', 'Admin: ZOPK News'),
('/admin/zopk/knowledge', 'Admin: ZOPK Knowledge'),
('/admin/zopk/knowledge/chunks', 'Admin: ZOPK Chunks'),
('/admin/zopk/knowledge/facts', 'Admin: ZOPK Facts'),
('/admin/zopk/knowledge/entities', 'Admin: ZOPK Entities'),
('/admin/zopk/knowledge/duplicates', 'Admin: ZOPK Duplikaty'),
('/admin/zopk/knowledge/fact-duplicates', 'Admin: ZOPK Fact Duplicates'),
('/admin/zopk/knowledge/graph', 'Admin: ZOPK Graf'),
('/admin/zopk/timeline', 'Admin: ZOPK Timeline'),
# ========== ZOPK API ==========
('/api/zopk/milestones', 'API: ZOPK Milestones'),
('/api/zopk/knowledge/dashboard-stats', 'API: ZOPK Dashboard stats'),
]
# Dodaj losową firmę do sprawdzenia
db = SessionLocal()
try:
random_company = db.query(Company).first()
if random_company:
endpoints.append((f'/company/{random_company.slug}', f'Profil: {random_company.name[:25]}'))
finally:
db.close()
# Testuj każdy endpoint używając test client
with app.test_client() as client:
for path, name in endpoints:
try:
response = client.get(path, follow_redirects=False)
status = response.status_code
# 200 = OK, 302 = redirect (np. do logowania) = OK
# 429 = rate limited (endpoint działa, tylko ograniczony)
# 500 = błąd serwera, 404 = nie znaleziono
if status in (200, 302, 304, 429):
results.append({
'endpoint': path,
'name': name,
'status': status,
'ok': True
})
else:
results.append({
'endpoint': path,
'name': name,
'status': status,
'ok': False
})
all_ok = False
except Exception as e:
results.append({
'endpoint': path,
'name': name,
'status': 500,
'ok': False,
'error': str(e)[:100]
})
all_ok = False
# Podsumowanie
passed = sum(1 for r in results if r['ok'])
failed = len(results) - passed
return {
'status': 'ok' if all_ok else 'degraded',
'summary': {
'total': len(results),
'passed': passed,
'failed': failed
},
'endpoints': results,
'timestamp': datetime.now().isoformat()
}, 200 if all_ok else 503
# ============================================================
# PUBLIC ROUTES
# ============================================================
@app.route('/')
def index():
"""Homepage - landing page for guests, company directory for logged in users"""
if not current_user.is_authenticated:
# Landing page for guests
db = SessionLocal()
try:
total_companies = db.query(Company).filter_by(status='active').count()
total_categories = db.query(Category).count()
return render_template(
'landing.html',
total_companies=total_companies,
total_categories=total_categories
)
finally:
db.close()
# Company directory for logged in users
db = SessionLocal()
try:
from datetime import date
companies = db.query(Company).filter_by(status='active').order_by(Company.name).all()
categories = db.query(Category).order_by(Category.sort_order).all()
total_companies = len(companies)
total_categories = len([c for c in categories if db.query(Company).filter_by(category_id=c.id).count() > 0])
# Najbliższe wydarzenie (dla bannera "Kto weźmie udział?")
next_event = db.query(NordaEvent).filter(
NordaEvent.event_date >= date.today()
).order_by(NordaEvent.event_date.asc()).first()
# Sprawdź czy użytkownik jest zapisany na to wydarzenie
user_registered = False
if next_event:
user_registered = db.query(EventAttendee).filter(
EventAttendee.event_id == next_event.id,
EventAttendee.user_id == current_user.id
).first() is not None
return render_template(
'index.html',
companies=companies,
categories=categories,
total_companies=total_companies,
total_categories=total_categories,
next_event=next_event,
user_registered=user_registered
)
finally:
db.close()
@app.route('/company/<int:company_id>')
# @login_required # Public access
def company_detail(company_id):
"""Company detail page - requires login"""
db = SessionLocal()
try:
company = db.query(Company).filter_by(id=company_id).first()
if not company:
flash('Firma nie znaleziona.', 'error')
return redirect(url_for('index'))
# Load digital maturity data if available
maturity_data = db.query(CompanyDigitalMaturity).filter_by(company_id=company_id).first()
# Get latest website analysis sorted by audit date (consistent with seo_audit_dashboard)
website_analysis = db.query(CompanyWebsiteAnalysis).filter_by(
company_id=company_id
).order_by(CompanyWebsiteAnalysis.seo_audited_at.desc()).first()
# Load quality tracking data
quality_data = db.query(CompanyQualityTracking).filter_by(company_id=company_id).first()
# Load company events (latest 10)
events = db.query(CompanyEvent).filter_by(company_id=company_id).order_by(
CompanyEvent.event_date.desc(),
CompanyEvent.created_at.desc()
).limit(10).all()
# Load website scraping data (most recent)
website_content = db.query(CompanyWebsiteContent).filter_by(company_id=company_id).order_by(
CompanyWebsiteContent.scraped_at.desc()
).first()
# Load AI insights
ai_insights = db.query(CompanyAIInsights).filter_by(company_id=company_id).first()
# Load social media profiles
social_media = db.query(CompanySocialMedia).filter_by(company_id=company_id).all()
# Load company contacts (phones, emails with sources)
contacts = db.query(CompanyContact).filter_by(company_id=company_id).order_by(
CompanyContact.contact_type,
CompanyContact.is_primary.desc()
).all()
# Load recommendations (approved only, with recommender details)
recommendations = db.query(CompanyRecommendation).filter_by(
company_id=company_id,
status='approved'
).join(User, CompanyRecommendation.user_id == User.id).order_by(
CompanyRecommendation.created_at.desc()
).all()
# Load people connected to company (zarząd, wspólnicy, prokurenci)
people = db.query(CompanyPerson).filter_by(
company_id=company_id
).join(Person, CompanyPerson.person_id == Person.id).order_by(
CompanyPerson.role_category,
Person.nazwisko
).all()
# Load GBP audit (most recent)
gbp_audit = db.query(GBPAudit).filter_by(
company_id=company_id
).order_by(GBPAudit.audit_date.desc()).first()
# Load IT audit (most recent)
it_audit = db.query(ITAudit).filter_by(
company_id=company_id
).order_by(ITAudit.audit_date.desc()).first()
# Load PKD codes (all - primary first)
pkd_codes = db.query(CompanyPKD).filter_by(
company_id=company_id
).order_by(CompanyPKD.is_primary.desc(), CompanyPKD.pkd_code).all()
# Check if current user can enrich company data (admin or company owner)
can_enrich = False
if current_user.is_authenticated:
can_enrich = current_user.is_admin or (current_user.company_id == company.id)
return render_template('company_detail.html',
company=company,
maturity_data=maturity_data,
website_analysis=website_analysis,
quality_data=quality_data,
events=events,
website_content=website_content,
ai_insights=ai_insights,
social_media=social_media,
contacts=contacts,
recommendations=recommendations,
people=people,
gbp_audit=gbp_audit,
it_audit=it_audit,
pkd_codes=pkd_codes,
can_enrich=can_enrich
)
finally:
db.close()
@app.route('/company/<slug>')
# @login_required # Disabled - public access
def company_detail_by_slug(slug):
"""Company detail page by slug - requires login"""
db = SessionLocal()
try:
company = db.query(Company).filter_by(slug=slug).first()
if not company:
flash('Firma nie znaleziona.', 'error')
return redirect(url_for('index'))
# Redirect to canonical int ID route
return redirect(url_for('company_detail', company_id=company.id))
finally:
db.close()
@app.route('/osoba/<int:person_id>')
def person_detail(person_id):
"""Person detail page - shows registry data and portal data if available"""
db = SessionLocal()
try:
# Get person with their company relationships
person = db.query(Person).filter_by(id=person_id).first()
if not person:
flash('Osoba nie znaleziona.', 'error')
return redirect(url_for('index'))
# Get company roles with company details (only active companies)
company_roles = db.query(CompanyPerson).filter_by(
person_id=person_id
).join(Company, CompanyPerson.company_id == Company.id).filter(
Company.status == 'active'
).order_by(
CompanyPerson.role_category,
Company.name
).all()
# Try to find matching user account by name (for portal data)
# This is a simple match - in production might need more sophisticated matching
portal_user = None
name_parts = person.full_name().upper().split()
if len(name_parts) >= 2:
# Try to find user where first/last name matches
potential_users = db.query(User).filter(
User.name.isnot(None)
).all()
for u in potential_users:
if u.name:
user_name_parts = u.name.upper().split()
# Check if at least first and last name match
if len(user_name_parts) >= 2:
if (user_name_parts[-1] in name_parts and # Last name match
any(part in user_name_parts for part in name_parts[:-1])): # First name match
portal_user = u
break
return render_template('person_detail.html',
person=person,
company_roles=company_roles,
portal_user=portal_user
)
finally:
db.close()
@app.route('/company/<slug>/recommend', methods=['GET', 'POST'])
# @login_required # Disabled - public access
def company_recommend(slug):
"""Create recommendation for a company - requires login"""
db = SessionLocal()
try:
# Get company
company = db.query(Company).filter_by(slug=slug).first()
if not company:
flash('Firma nie znaleziona.', 'error')
return redirect(url_for('index'))
# Handle POST (form submission)
if request.method == 'POST':
recommendation_text = request.form.get('recommendation_text', '').strip()
service_category = sanitize_input(request.form.get('service_category', ''), 200)
show_contact = request.form.get('show_contact') == '1'
# Validation
if not recommendation_text or len(recommendation_text) < 50:
flash('Rekomendacja musi mieć co najmniej 50 znaków.', 'error')
return render_template('company/recommend.html', company=company)
if len(recommendation_text) > 2000:
flash('Rekomendacja może mieć maksymalnie 2000 znaków.', 'error')
return render_template('company/recommend.html', company=company)
# Prevent self-recommendation
if current_user.company_id == company.id:
flash('Nie możesz polecać własnej firmy.', 'error')
return redirect(url_for('company_detail', company_id=company.id))
# Check for duplicate (user already recommended this company)
existing = db.query(CompanyRecommendation).filter_by(
user_id=current_user.id,
company_id=company.id
).first()
if existing:
flash('Już poleciłeś tę firmę. Możesz edytować swoją wcześniejszą rekomendację.', 'error')
return redirect(url_for('company_detail', company_id=company.id))
# Create recommendation
recommendation = CompanyRecommendation(
company_id=company.id,
user_id=current_user.id,
recommendation_text=recommendation_text,
service_category=service_category if service_category else None,
show_contact=show_contact,
status='pending'
)
db.add(recommendation)
db.commit()
flash('Dziękujemy! Twoja rekomendacja została przesłana i oczekuje na moderację.', 'success')
return redirect(url_for('company_detail', company_id=company.id))
# Handle GET (show form)
return render_template('company/recommend.html', company=company)
finally:
db.close()
@app.route('/search')
@login_required
def search():
"""Search companies and people with advanced matching - requires login"""
query = request.args.get('q', '')
category_id = request.args.get('category', type=int)
db = SessionLocal()
try:
# Use new SearchService with synonym expansion, NIP/REGON lookup, and fuzzy matching
results = search_companies(db, query, category_id, limit=50)
# Extract companies from SearchResult objects
companies = [r.company for r in results]
# For debugging/analytics - log search stats
if query:
match_types = {}
for r in results:
match_types[r.match_type] = match_types.get(r.match_type, 0) + 1
logger.info(f"Search '{query}': {len(companies)} results, types: {match_types}")
# Search people by name (partial match)
people_results = []
if query and len(query) >= 2:
from sqlalchemy import or_, func
q = f"%{query}%"
people_results = db.query(Person).filter(
or_(
Person.imiona.ilike(q),
Person.nazwisko.ilike(q),
func.concat(Person.imiona, ' ', Person.nazwisko).ilike(q)
)
).limit(20).all()
# For each person, get their company connections count
for person in people_results:
person.company_count = len(set(
r.company_id for r in person.company_roles
if r.company and r.company.status == 'active'
))
logger.info(f"Search '{query}': {len(people_results)} people found")
return render_template(
'search_results.html',
companies=companies,
people=people_results,
query=query,
category_id=category_id,
result_count=len(companies)
)
finally:
db.close()
@app.route('/aktualnosci')
@login_required
def events():
"""Company events and news - latest updates from member companies"""
from sqlalchemy import func
event_type_filter = request.args.get('type', '')
company_id = request.args.get('company', type=int)
page = request.args.get('page', 1, type=int)
per_page = 20
db = SessionLocal()
try:
# Build query
query = db.query(CompanyEvent).join(Company)
# Apply filters
if event_type_filter:
query = query.filter(CompanyEvent.event_type == event_type_filter)
if company_id:
query = query.filter(CompanyEvent.company_id == company_id)
# Order by date (newest first)
query = query.order_by(
CompanyEvent.event_date.desc(),
CompanyEvent.created_at.desc()
)
# Pagination
total_events = query.count()
events = query.limit(per_page).offset((page - 1) * per_page).all()
# Get companies with events for filter dropdown
companies_with_events = db.query(Company).join(CompanyEvent).distinct().order_by(Company.name).all()
# Event type statistics
event_types = db.query(
CompanyEvent.event_type,
func.count(CompanyEvent.id)
).group_by(CompanyEvent.event_type).all()
return render_template(
'events.html',
events=events,
companies_with_events=companies_with_events,
event_types=event_types,
event_type_filter=event_type_filter,
company_id=company_id,
page=page,
per_page=per_page,
total_events=total_events,
total_pages=(total_events + per_page - 1) // per_page
)
finally:
db.close()
# ============================================================
# FORUM ROUTES
# ============================================================
@app.route('/forum')
@login_required
def forum_index():
"""Forum - list of topics with category/status filters"""
page = request.args.get('page', 1, type=int)
per_page = 20
category_filter = request.args.get('category', '')
status_filter = request.args.get('status', '')
db = SessionLocal()
try:
# Build query with optional filters
query = db.query(ForumTopic)
if category_filter and category_filter in ForumTopic.CATEGORIES:
query = query.filter(ForumTopic.category == category_filter)
if status_filter and status_filter in ForumTopic.STATUSES:
query = query.filter(ForumTopic.status == status_filter)
# Order by pinned first, then by last activity
query = query.order_by(
ForumTopic.is_pinned.desc(),
ForumTopic.updated_at.desc()
)
total_topics = query.count()
topics = query.limit(per_page).offset((page - 1) * per_page).all()
return render_template(
'forum/index.html',
topics=topics,
page=page,
per_page=per_page,
total_topics=total_topics,
total_pages=(total_topics + per_page - 1) // per_page,
category_filter=category_filter,
status_filter=status_filter,
categories=ForumTopic.CATEGORIES,
statuses=ForumTopic.STATUSES,
category_labels=ForumTopic.CATEGORY_LABELS,
status_labels=ForumTopic.STATUS_LABELS
)
finally:
db.close()
@app.route('/forum/nowy', methods=['GET', 'POST'])
@login_required
def forum_new_topic():
"""Create new forum topic with category and attachments"""
if request.method == 'POST':
title = sanitize_input(request.form.get('title', ''), 255)
content = request.form.get('content', '').strip()
category = request.form.get('category', 'question')
# Validate category
if category not in ForumTopic.CATEGORIES:
category = 'question'
if not title or len(title) < 5:
flash('Tytuł musi mieć co najmniej 5 znaków.', 'error')
return render_template('forum/new_topic.html',
categories=ForumTopic.CATEGORIES,
category_labels=ForumTopic.CATEGORY_LABELS)
if not content or len(content) < 10:
flash('Treść musi mieć co najmniej 10 znaków.', 'error')
return render_template('forum/new_topic.html',
categories=ForumTopic.CATEGORIES,
category_labels=ForumTopic.CATEGORY_LABELS)
db = SessionLocal()
try:
topic = ForumTopic(
title=title,
content=content,
author_id=current_user.id,
category=category
)
db.add(topic)
db.commit()
db.refresh(topic)
# Handle file upload
if 'attachment' in request.files:
file = request.files['attachment']
if file and file.filename:
is_valid, error_msg = FileUploadService.validate_file(file)
if is_valid:
stored_filename, rel_path, file_size, mime_type = FileUploadService.save_file(file, 'topic')
attachment = ForumAttachment(
attachment_type='topic',
topic_id=topic.id,
original_filename=file.filename,
stored_filename=stored_filename,
file_extension=stored_filename.rsplit('.', 1)[-1],
file_size=file_size,
mime_type=mime_type,
uploaded_by=current_user.id
)
db.add(attachment)
db.commit()
else:
flash(f'Załącznik: {error_msg}', 'warning')
flash('Temat został utworzony.', 'success')
return redirect(url_for('forum_topic', topic_id=topic.id))
finally:
db.close()
return render_template('forum/new_topic.html',
categories=ForumTopic.CATEGORIES,
category_labels=ForumTopic.CATEGORY_LABELS)
@app.route('/forum/<int:topic_id>')
@login_required
def forum_topic(topic_id):
"""View forum topic with replies"""
db = SessionLocal()
try:
topic = db.query(ForumTopic).filter(ForumTopic.id == topic_id).first()
if not topic:
flash('Temat nie istnieje.', 'error')
return redirect(url_for('forum_index'))
# Increment view count (handle NULL)
topic.views_count = (topic.views_count or 0) + 1
db.commit()
return render_template('forum/topic.html',
topic=topic,
category_labels=ForumTopic.CATEGORY_LABELS,
status_labels=ForumTopic.STATUS_LABELS)
finally:
db.close()
@app.route('/forum/<int:topic_id>/odpowiedz', methods=['POST'])
@login_required
def forum_reply(topic_id):
"""Add reply to forum topic with optional attachment"""
content = request.form.get('content', '').strip()
if not content or len(content) < 3:
flash('Odpowiedź musi mieć co najmniej 3 znaki.', 'error')
return redirect(url_for('forum_topic', topic_id=topic_id))
db = SessionLocal()
try:
topic = db.query(ForumTopic).filter(ForumTopic.id == topic_id).first()
if not topic:
flash('Temat nie istnieje.', 'error')
return redirect(url_for('forum_index'))
if topic.is_locked:
flash('Ten temat jest zamknięty.', 'error')
return redirect(url_for('forum_topic', topic_id=topic_id))
reply = ForumReply(
topic_id=topic_id,
author_id=current_user.id,
content=content
)
db.add(reply)
db.commit()
db.refresh(reply)
# Handle multiple file uploads (max 10)
MAX_ATTACHMENTS = 10
files = request.files.getlist('attachments[]')
if not files:
# Fallback for single file upload (backward compatibility)
files = request.files.getlist('attachment')
uploaded_count = 0
errors = []
for file in files[:MAX_ATTACHMENTS]:
if file and file.filename:
is_valid, error_msg = FileUploadService.validate_file(file)
if is_valid:
stored_filename, rel_path, file_size, mime_type = FileUploadService.save_file(file, 'reply')
attachment = ForumAttachment(
attachment_type='reply',
reply_id=reply.id,
original_filename=file.filename,
stored_filename=stored_filename,
file_extension=stored_filename.rsplit('.', 1)[-1],
file_size=file_size,
mime_type=mime_type,
uploaded_by=current_user.id
)
db.add(attachment)
uploaded_count += 1
else:
errors.append(f'{file.filename}: {error_msg}')
if uploaded_count > 0:
db.commit()
if errors:
flash(f'Niektóre załączniki nie zostały dodane: {"; ".join(errors)}', 'warning')
# Update topic updated_at
topic.updated_at = datetime.now()
db.commit()
flash('Odpowiedź dodana.', 'success')
return redirect(url_for('forum_topic', topic_id=topic_id))
finally:
db.close()
# ============================================================
# FORUM ADMIN ROUTES
# ============================================================
@app.route('/admin/forum')
@login_required
def admin_forum():
"""Admin panel for forum moderation"""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('forum_index'))
db = SessionLocal()
try:
# Get all topics with stats
topics = db.query(ForumTopic).order_by(
ForumTopic.created_at.desc()
).all()
# Get recent replies
recent_replies = db.query(ForumReply).order_by(
ForumReply.created_at.desc()
).limit(50).all()
# Stats
total_topics = len(topics)
total_replies = db.query(ForumReply).count()
pinned_count = sum(1 for t in topics if t.is_pinned)
locked_count = sum(1 for t in topics if t.is_locked)
# Category and status stats
category_counts = {}
status_counts = {}
for t in topics:
cat = t.category or 'question'
status = t.status or 'new'
category_counts[cat] = category_counts.get(cat, 0) + 1
status_counts[status] = status_counts.get(status, 0) + 1
return render_template(
'admin/forum.html',
topics=topics,
recent_replies=recent_replies,
total_topics=total_topics,
total_replies=total_replies,
pinned_count=pinned_count,
locked_count=locked_count,
category_counts=category_counts,
status_counts=status_counts,
categories=ForumTopic.CATEGORIES,
statuses=ForumTopic.STATUSES,
category_labels=ForumTopic.CATEGORY_LABELS,
status_labels=ForumTopic.STATUS_LABELS
)
finally:
db.close()
@app.route('/admin/forum/topic/<int:topic_id>/pin', methods=['POST'])
@login_required
def admin_forum_pin(topic_id):
"""Toggle topic pin status"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
db = SessionLocal()
try:
topic = db.query(ForumTopic).filter(ForumTopic.id == topic_id).first()
if not topic:
return jsonify({'success': False, 'error': 'Temat nie istnieje'}), 404
topic.is_pinned = not topic.is_pinned
db.commit()
logger.info(f"Admin {current_user.email} {'pinned' if topic.is_pinned else 'unpinned'} topic #{topic_id}")
return jsonify({
'success': True,
'is_pinned': topic.is_pinned,
'message': f"Temat {'przypięty' if topic.is_pinned else 'odpięty'}"
})
finally:
db.close()
@app.route('/admin/forum/topic/<int:topic_id>/lock', methods=['POST'])
@login_required
def admin_forum_lock(topic_id):
"""Toggle topic lock status"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
db = SessionLocal()
try:
topic = db.query(ForumTopic).filter(ForumTopic.id == topic_id).first()
if not topic:
return jsonify({'success': False, 'error': 'Temat nie istnieje'}), 404
topic.is_locked = not topic.is_locked
db.commit()
logger.info(f"Admin {current_user.email} {'locked' if topic.is_locked else 'unlocked'} topic #{topic_id}")
return jsonify({
'success': True,
'is_locked': topic.is_locked,
'message': f"Temat {'zamknięty' if topic.is_locked else 'otwarty'}"
})
finally:
db.close()
@app.route('/admin/forum/topic/<int:topic_id>/delete', methods=['POST'])
@login_required
def admin_forum_delete_topic(topic_id):
"""Delete topic and all its replies"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
db = SessionLocal()
try:
topic = db.query(ForumTopic).filter(ForumTopic.id == topic_id).first()
if not topic:
return jsonify({'success': False, 'error': 'Temat nie istnieje'}), 404
topic_title = topic.title
db.delete(topic) # Cascade deletes replies
db.commit()
logger.info(f"Admin {current_user.email} deleted topic #{topic_id}: {topic_title}")
return jsonify({
'success': True,
'message': 'Temat usunięty'
})
finally:
db.close()
@app.route('/admin/forum/reply/<int:reply_id>/delete', methods=['POST'])
@login_required
def admin_forum_delete_reply(reply_id):
"""Delete a reply"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
db = SessionLocal()
try:
reply = db.query(ForumReply).filter(ForumReply.id == reply_id).first()
if not reply:
return jsonify({'success': False, 'error': 'Odpowiedź nie istnieje'}), 404
topic_id = reply.topic_id
db.delete(reply)
db.commit()
logger.info(f"Admin {current_user.email} deleted reply #{reply_id} from topic #{topic_id}")
return jsonify({
'success': True,
'message': 'Odpowiedź usunięta'
})
finally:
db.close()
@app.route('/admin/forum/topic/<int:topic_id>/status', methods=['POST'])
@login_required
def admin_forum_change_status(topic_id):
"""Change topic status (admin only)"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
data = request.get_json() or {}
new_status = data.get('status')
note = data.get('note', '').strip()
if not new_status or new_status not in ForumTopic.STATUSES:
return jsonify({'success': False, 'error': 'Nieprawidłowy status'}), 400
db = SessionLocal()
try:
topic = db.query(ForumTopic).filter(ForumTopic.id == topic_id).first()
if not topic:
return jsonify({'success': False, 'error': 'Temat nie istnieje'}), 404
old_status = topic.status
topic.status = new_status
topic.status_changed_by = current_user.id
topic.status_changed_at = datetime.now()
if note:
topic.status_note = note
db.commit()
logger.info(f"Admin {current_user.email} changed topic #{topic_id} status: {old_status} -> {new_status}")
return jsonify({
'success': True,
'status': new_status,
'status_label': ForumTopic.STATUS_LABELS.get(new_status, new_status),
'message': f"Status zmieniony na: {ForumTopic.STATUS_LABELS.get(new_status, new_status)}"
})
finally:
db.close()
# ============================================================
# RECOMMENDATIONS ADMIN ROUTES
# ============================================================
@app.route('/admin/recommendations')
@login_required
def admin_recommendations():
"""Admin panel for recommendations moderation"""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('index'))
db = SessionLocal()
try:
# Get all recommendations with user and company info
recommendations = db.query(CompanyRecommendation).order_by(
CompanyRecommendation.created_at.desc()
).all()
# Get pending recommendations (requires moderation)
pending_recommendations = db.query(CompanyRecommendation).filter(
CompanyRecommendation.status == 'pending'
).order_by(CompanyRecommendation.created_at.desc()).all()
# Stats
total_recommendations = len(recommendations)
pending_count = len(pending_recommendations)
approved_count = db.query(CompanyRecommendation).filter(
CompanyRecommendation.status == 'approved'
).count()
rejected_count = db.query(CompanyRecommendation).filter(
CompanyRecommendation.status == 'rejected'
).count()
logger.info(f"Admin {current_user.email} accessed recommendations panel - {pending_count} pending")
return render_template(
'admin/recommendations.html',
recommendations=recommendations,
pending_recommendations=pending_recommendations,
total_recommendations=total_recommendations,
pending_count=pending_count,
approved_count=approved_count,
rejected_count=rejected_count
)
finally:
db.close()
@app.route('/admin/recommendations/<int:recommendation_id>/approve', methods=['POST'])
@login_required
def admin_recommendation_approve(recommendation_id):
"""Approve a recommendation"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
db = SessionLocal()
try:
recommendation = db.query(CompanyRecommendation).filter(
CompanyRecommendation.id == recommendation_id
).first()
if not recommendation:
return jsonify({'success': False, 'error': 'Rekomendacja nie istnieje'}), 404
recommendation.status = 'approved'
recommendation.moderated_by = current_user.id
recommendation.moderated_at = datetime.utcnow()
recommendation.rejection_reason = None # Clear any previous rejection reason
db.commit()
logger.info(f"Admin {current_user.email} approved recommendation #{recommendation_id}")
return jsonify({
'success': True,
'message': 'Rekomendacja zatwierdzona'
})
finally:
db.close()
@app.route('/admin/recommendations/<int:recommendation_id>/reject', methods=['POST'])
@login_required
def admin_recommendation_reject(recommendation_id):
"""Reject a recommendation"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
db = SessionLocal()
try:
recommendation = db.query(CompanyRecommendation).filter(
CompanyRecommendation.id == recommendation_id
).first()
if not recommendation:
return jsonify({'success': False, 'error': 'Rekomendacja nie istnieje'}), 404
# Get optional rejection reason from request
rejection_reason = request.json.get('reason', '') if request.is_json else request.form.get('reason', '')
recommendation.status = 'rejected'
recommendation.moderated_by = current_user.id
recommendation.moderated_at = datetime.utcnow()
recommendation.rejection_reason = rejection_reason.strip() if rejection_reason else None
db.commit()
logger.info(f"Admin {current_user.email} rejected recommendation #{recommendation_id}")
return jsonify({
'success': True,
'message': 'Rekomendacja odrzucona'
})
finally:
db.close()
# ============================================================
# USER MANAGEMENT ADMIN ROUTES
# ============================================================
@app.route('/admin/users')
@login_required
def admin_users():
"""Admin panel for user management"""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('index'))
db = SessionLocal()
try:
# Get all users with their company info
users = db.query(User).order_by(User.created_at.desc()).all()
# Get all companies for assignment dropdown
companies = db.query(Company).order_by(Company.name).all()
# Stats
total_users = len(users)
admin_count = sum(1 for u in users if u.is_admin)
verified_count = sum(1 for u in users if u.is_verified)
unverified_count = total_users - verified_count
logger.info(f"Admin {current_user.email} accessed users panel - {total_users} users")
return render_template(
'admin/users.html',
users=users,
companies=companies,
total_users=total_users,
admin_count=admin_count,
verified_count=verified_count,
unverified_count=unverified_count
)
finally:
db.close()
@app.route('/admin/users/add', methods=['POST'])
@login_required
def admin_user_add():
"""Create a new user (admin only)"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
db = SessionLocal()
try:
data = request.get_json() or {}
# Validate required fields
email = data.get('email', '').strip().lower()
if not email:
return jsonify({'success': False, 'error': 'Email jest wymagany'}), 400
# Check if email already exists
existing_user = db.query(User).filter(User.email == email).first()
if existing_user:
return jsonify({'success': False, 'error': 'Użytkownik z tym adresem email już istnieje'}), 400
# Generate random password
import secrets
import string
password_chars = string.ascii_letters + string.digits + "!@#$%^&*"
generated_password = ''.join(secrets.choice(password_chars) for _ in range(16))
# Hash password (use pbkdf2:sha256 for compatibility)
password_hash = generate_password_hash(generated_password, method='pbkdf2:sha256')
# Create user
new_user = User(
email=email,
password_hash=password_hash,
name=data.get('name', '').strip() or None,
company_id=data.get('company_id') or None,
is_admin=data.get('is_admin', False),
is_verified=data.get('is_verified', True), # Auto-verify admin-created users
is_active=True
)
db.add(new_user)
db.commit()
db.refresh(new_user)
logger.info(f"Admin {current_user.email} created new user: {email} (ID: {new_user.id})")
return jsonify({
'success': True,
'user_id': new_user.id,
'generated_password': generated_password,
'message': f'Użytkownik {email} został utworzony'
})
except Exception as e:
db.rollback()
logger.error(f"Error creating user: {e}")
return jsonify({'success': False, 'error': 'Błąd podczas tworzenia użytkownika'}), 500
finally:
db.close()
# ============================================================
# AI-ASSISTED USER CREATION
# ============================================================
AI_USER_PARSE_PROMPT = """Jesteś asystentem systemu NordaBiz pomagającym administratorowi tworzyć konta użytkowników.
ZADANIE:
Przeanalizuj podany tekst i wyodrębnij informacje o użytkownikach.
DANE WEJŚCIOWE:
```
{input_text}
```
DOSTĘPNE FIRMY W SYSTEMIE (id: nazwa):
{companies_json}
INSTRUKCJE:
1. Wyodrębnij każdą osobę/użytkownika z tekstu
2. Dla każdego użytkownika zidentyfikuj:
- email (WYMAGANY - jeśli brak prawidłowego emaila, pomiń użytkownika)
- imię i nazwisko (jeśli dostępne)
- firma (dopasuj do listy dostępnych firm po nazwie, nawet częściowej)
- rola: jeśli tekst zawiera słowa "admin", "administrator", "zarząd" przy danej osobie - ustaw is_admin na true
3. Jeśli email jest niepoprawny (brak @), dodaj ostrzeżenie
4. Jeśli firma nie pasuje do żadnej z listy, ustaw company_id na null
ZWRÓĆ TYLKO CZYSTY JSON w dokładnie takim formacie (bez żadnego tekstu przed ani po):
{{
"analysis": "Krótki opis znalezionych danych (1-2 zdania po polsku)",
"users": [
{{
"email": "adres@email.pl",
"name": "Imię Nazwisko lub null",
"company_id": 123,
"company_name": "Nazwa dopasowanej firmy lub null",
"is_admin": false,
"warnings": []
}}
]
}}"""
AI_USER_IMAGE_PROMPT = """Jesteś asystentem systemu NordaBiz pomagającym administratorowi tworzyć konta użytkowników.
ZADANIE:
Przeanalizuj ten obraz (screenshot) i wyodrębnij informacje o użytkownikach.
Szukaj: adresów email, imion i nazwisk, nazw firm, ról (admin/user).
DOSTĘPNE FIRMY W SYSTEMIE (id: nazwa):
{companies_json}
INSTRUKCJE:
1. Przeczytaj cały tekst widoczny na obrazie
2. Wyodrębnij każdą osobę/użytkownika
3. Dla każdego użytkownika zidentyfikuj:
- email (WYMAGANY - jeśli brak, pomiń)
- imię i nazwisko
- firma (dopasuj do listy)
- rola: admin lub zwykły użytkownik
4. Jeśli email jest nieczytelny lub niepoprawny, dodaj ostrzeżenie
ZWRÓĆ TYLKO CZYSTY JSON w dokładnie takim formacie (bez żadnego tekstu przed ani po):
{{
"analysis": "Krótki opis co widzisz na obrazie (1-2 zdania po polsku)",
"users": [
{{
"email": "adres@email.pl",
"name": "Imię Nazwisko lub null",
"company_id": 123,
"company_name": "Nazwa dopasowanej firmy lub null",
"is_admin": false,
"warnings": []
}}
]
}}"""
@app.route('/api/admin/users/ai-parse', methods=['POST'])
@login_required
def admin_users_ai_parse():
"""Parse text or image with AI to extract user data."""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
db = SessionLocal()
try:
# Get list of companies for AI context
companies = db.query(Company).order_by(Company.name).all()
companies_json = "\n".join([f"{c.id}: {c.name}" for c in companies])
# Check input type
input_type = request.form.get('input_type') or (request.get_json() or {}).get('input_type', 'text')
if input_type == 'image':
# Handle image upload
if 'file' not in request.files:
return jsonify({'success': False, 'error': 'Brak pliku obrazu'}), 400
file = request.files['file']
if file.filename == '':
return jsonify({'success': False, 'error': 'Nie wybrano pliku'}), 400
# Validate file type
allowed_extensions = {'png', 'jpg', 'jpeg', 'gif', 'webp'}
ext = file.filename.rsplit('.', 1)[-1].lower() if '.' in file.filename else ''
if ext not in allowed_extensions:
return jsonify({'success': False, 'error': 'Dozwolone formaty: PNG, JPG, JPEG, GIF, WEBP'}), 400
# Save temp file
import tempfile
with tempfile.NamedTemporaryFile(delete=False, suffix=f'.{ext}') as tmp:
file.save(tmp.name)
temp_path = tmp.name
try:
# Get Gemini service and analyze image
service = gemini_service.get_gemini_service()
prompt = AI_USER_IMAGE_PROMPT.format(companies_json=companies_json)
ai_response = service.analyze_image(temp_path, prompt)
finally:
# Clean up temp file
import os
if os.path.exists(temp_path):
os.unlink(temp_path)
else:
# Handle text input
data = request.get_json() or {}
content = data.get('content', '').strip()
if not content:
return jsonify({'success': False, 'error': 'Brak treści do analizy'}), 400
# Get Gemini service and analyze text
service = gemini_service.get_gemini_service()
prompt = AI_USER_PARSE_PROMPT.format(
input_text=content,
companies_json=companies_json
)
ai_response = service.generate_text(
prompt=prompt,
feature='ai_user_parse',
user_id=current_user.id,
temperature=0.3 # Lower temperature for more consistent JSON output
)
# Parse AI response as JSON
import json
import re
# Try to extract JSON from response (handle potential markdown code blocks)
json_match = re.search(r'\{[\s\S]*\}', ai_response)
if not json_match:
logger.error(f"AI response not valid JSON: {ai_response[:500]}")
return jsonify({
'success': False,
'error': 'AI nie zwróciło prawidłowej odpowiedzi. Spróbuj ponownie.'
}), 500
try:
parsed = json.loads(json_match.group())
except json.JSONDecodeError as e:
logger.error(f"JSON parse error: {e}, response: {ai_response[:500]}")
return jsonify({
'success': False,
'error': 'Błąd parsowania odpowiedzi AI. Spróbuj ponownie.'
}), 500
# Check for duplicate emails in database
proposed_users = parsed.get('users', [])
existing_emails = []
for user in proposed_users:
email = user.get('email', '').strip().lower()
if email:
existing = db.query(User).filter(User.email == email).first()
if existing:
existing_emails.append(email)
user['warnings'] = user.get('warnings', []) + [f'Email już istnieje w systemie']
logger.info(f"Admin {current_user.email} used AI to parse users: {len(proposed_users)} found")
return jsonify({
'success': True,
'ai_response': parsed.get('analysis', 'Analiza zakończona'),
'proposed_users': proposed_users,
'duplicate_emails': existing_emails
})
except Exception as e:
logger.error(f"Error in AI user parse: {e}")
return jsonify({'success': False, 'error': f'Błąd: {str(e)}'}), 500
finally:
db.close()
@app.route('/api/admin/users/bulk-create', methods=['POST'])
@login_required
def admin_users_bulk_create():
"""Create multiple users from confirmed proposals."""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
db = SessionLocal()
try:
data = request.get_json() or {}
users_to_create = data.get('users', [])
if not users_to_create:
return jsonify({'success': False, 'error': 'Brak użytkowników do utworzenia'}), 400
created = []
failed = []
import secrets
import string
password_chars = string.ascii_letters + string.digits + "!@#$%^&*"
for user_data in users_to_create:
email = user_data.get('email', '').strip().lower()
if not email:
failed.append({'email': email or 'brak', 'error': 'Brak adresu email'})
continue
# Check if email already exists
existing = db.query(User).filter(User.email == email).first()
if existing:
failed.append({'email': email, 'error': 'Email już istnieje'})
continue
# Validate company_id if provided
company_id = user_data.get('company_id')
if company_id:
company = db.query(Company).filter(Company.id == company_id).first()
if not company:
company_id = None # Reset if company doesn't exist
# Generate password
generated_password = ''.join(secrets.choice(password_chars) for _ in range(16))
password_hash = generate_password_hash(generated_password, method='pbkdf2:sha256')
# Create user
try:
new_user = User(
email=email,
password_hash=password_hash,
name=user_data.get('name', '').strip() or None,
company_id=company_id,
is_admin=user_data.get('is_admin', False),
is_verified=True,
is_active=True
)
db.add(new_user)
db.flush() # Get the ID
created.append({
'email': email,
'user_id': new_user.id,
'name': new_user.name,
'generated_password': generated_password
})
except Exception as e:
failed.append({'email': email, 'error': str(e)})
# Commit all successful creates
if created:
db.commit()
logger.info(f"Admin {current_user.email} bulk created {len(created)} users via AI")
return jsonify({
'success': True,
'created': created,
'failed': failed,
'message': f'Utworzono {len(created)} użytkowników' + (f', {len(failed)} błędów' if failed else '')
})
except Exception as e:
db.rollback()
logger.error(f"Error in bulk user create: {e}")
return jsonify({'success': False, 'error': f'Błąd: {str(e)}'}), 500
finally:
db.close()
@app.route('/admin/users/<int:user_id>/toggle-admin', methods=['POST'])
@login_required
def admin_user_toggle_admin(user_id):
"""Toggle admin status for a user"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
if user_id == current_user.id:
return jsonify({'success': False, 'error': 'Nie możesz zmienić własnych uprawnień'}), 400
db = SessionLocal()
try:
user = db.query(User).filter(User.id == user_id).first()
if not user:
return jsonify({'success': False, 'error': 'Użytkownik nie znaleziony'}), 404
user.is_admin = not user.is_admin
db.commit()
logger.info(f"Admin {current_user.email} {'granted' if user.is_admin else 'revoked'} admin for user {user.email}")
return jsonify({
'success': True,
'is_admin': user.is_admin,
'message': f"{'Nadano' if user.is_admin else 'Odebrano'} uprawnienia admina"
})
finally:
db.close()
@app.route('/admin/users/<int:user_id>/toggle-verified', methods=['POST'])
@login_required
def admin_user_toggle_verified(user_id):
"""Toggle verified status for a user"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
db = SessionLocal()
try:
user = db.query(User).filter(User.id == user_id).first()
if not user:
return jsonify({'success': False, 'error': 'Użytkownik nie znaleziony'}), 404
user.is_verified = not user.is_verified
if user.is_verified:
user.verified_at = datetime.utcnow()
else:
user.verified_at = None
db.commit()
logger.info(f"Admin {current_user.email} {'verified' if user.is_verified else 'unverified'} user {user.email}")
return jsonify({
'success': True,
'is_verified': user.is_verified,
'message': f"Użytkownik {'zweryfikowany' if user.is_verified else 'niezweryfikowany'}"
})
finally:
db.close()
@app.route('/admin/users/<int:user_id>/update', methods=['POST'])
@login_required
def admin_user_update(user_id):
"""Update user data (name, email)"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
db = SessionLocal()
try:
user = db.query(User).filter(User.id == user_id).first()
if not user:
return jsonify({'success': False, 'error': 'Użytkownik nie znaleziony'}), 404
data = request.get_json() or {}
# Update name if provided
if 'name' in data:
user.name = data['name'].strip() if data['name'] else None
# Update email if provided (with validation)
if 'email' in data:
new_email = data['email'].strip().lower()
if new_email and new_email != user.email:
# Check if email already exists
existing = db.query(User).filter(User.email == new_email, User.id != user_id).first()
if existing:
return jsonify({'success': False, 'error': 'Ten email jest już używany'}), 400
user.email = new_email
# Update phone if provided
if 'phone' in data:
user.phone = data['phone'].strip() if data['phone'] else None
db.commit()
logger.info(f"Admin {current_user.email} updated user {user.email}: name={user.name}, phone={user.phone}")
return jsonify({
'success': True,
'user': {
'id': user.id,
'name': user.name,
'email': user.email,
'phone': user.phone
},
'message': 'Dane użytkownika zaktualizowane'
})
except Exception as e:
db.rollback()
logger.error(f"Error updating user {user_id}: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/admin/users/<int:user_id>/assign-company', methods=['POST'])
@login_required
def admin_user_assign_company(user_id):
"""Assign a company to a user"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
db = SessionLocal()
try:
user = db.query(User).filter(User.id == user_id).first()
if not user:
return jsonify({'success': False, 'error': 'Użytkownik nie znaleziony'}), 404
data = request.get_json() or {}
company_id = data.get('company_id')
if company_id:
company = db.query(Company).filter(Company.id == company_id).first()
if not company:
return jsonify({'success': False, 'error': 'Firma nie znaleziona'}), 404
user.company_id = company_id
company_name = company.name
else:
user.company_id = None
company_name = None
db.commit()
logger.info(f"Admin {current_user.email} assigned company '{company_name}' to user {user.email}")
return jsonify({
'success': True,
'company_name': company_name,
'message': f"Przypisano firmę: {company_name}" if company_name else "Odłączono od firmy"
})
finally:
db.close()
@app.route('/admin/users/<int:user_id>/delete', methods=['POST'])
@login_required
def admin_user_delete(user_id):
"""Delete a user"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
if user_id == current_user.id:
return jsonify({'success': False, 'error': 'Nie możesz usunąć własnego konta'}), 400
db = SessionLocal()
try:
user = db.query(User).filter(User.id == user_id).first()
if not user:
return jsonify({'success': False, 'error': 'Użytkownik nie znaleziony'}), 404
email = user.email
db.delete(user)
db.commit()
logger.info(f"Admin {current_user.email} deleted user {email}")
return jsonify({
'success': True,
'message': f"Użytkownik {email} został usunięty"
})
finally:
db.close()
@app.route('/admin/users/<int:user_id>/reset-password', methods=['POST'])
@login_required
def admin_user_reset_password(user_id):
"""Generate password reset token for a user"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
db = SessionLocal()
try:
user = db.query(User).filter(User.id == user_id).first()
if not user:
return jsonify({'success': False, 'error': 'Użytkownik nie znaleziony'}), 404
# Generate reset token
reset_token = secrets.token_urlsafe(32)
user.reset_token = reset_token
user.reset_token_expires = datetime.utcnow() + timedelta(hours=1)
db.commit()
# Build reset URL
base_url = os.getenv('APP_URL', 'https://nordabiznes.pl')
reset_url = f"{base_url}/reset-password/{reset_token}"
logger.info(f"Admin {current_user.email} generated reset token for user {user.email}: {reset_token[:8]}...")
return jsonify({
'success': True,
'reset_url': reset_url,
'message': f"Link do resetu hasła wygenerowany (ważny 1 godzinę)"
})
finally:
db.close()
# ============================================================
# MEMBERSHIP FEES ADMIN
# ============================================================
MONTHS_PL = [
(1, 'Styczen'), (2, 'Luty'), (3, 'Marzec'), (4, 'Kwiecien'),
(5, 'Maj'), (6, 'Czerwiec'), (7, 'Lipiec'), (8, 'Sierpien'),
(9, 'Wrzesien'), (10, 'Pazdziernik'), (11, 'Listopad'), (12, 'Grudzien')
]
@app.route('/admin/fees')
@login_required
def admin_fees():
"""Admin panel for membership fee management"""
if not current_user.is_admin:
flash('Brak uprawnien do tej strony.', 'error')
return redirect(url_for('index'))
db = SessionLocal()
try:
from sqlalchemy import func, case
from decimal import Decimal
# Get filter parameters
year = request.args.get('year', datetime.now().year, type=int)
month = request.args.get('month', type=int)
status_filter = request.args.get('status', '')
# Get all active companies
companies = db.query(Company).filter(Company.status == 'active').order_by(Company.name).all()
# Get fees for selected period
fee_query = db.query(MembershipFee).filter(MembershipFee.fee_year == year)
if month:
fee_query = fee_query.filter(MembershipFee.fee_month == month)
fees = {(f.company_id, f.fee_month): f for f in fee_query.all()}
# Build company list with fee status
companies_fees = []
for company in companies:
if month:
fee = fees.get((company.id, month))
companies_fees.append({
'company': company,
'fee': fee,
'status': fee.status if fee else 'brak'
})
else:
# Show all months
company_data = {'company': company, 'months': {}}
for m in range(1, 13):
fee = fees.get((company.id, m))
company_data['months'][m] = fee
companies_fees.append(company_data)
# Apply status filter
if status_filter and month:
if status_filter == 'paid':
companies_fees = [cf for cf in companies_fees if cf.get('status') == 'paid']
elif status_filter == 'pending':
companies_fees = [cf for cf in companies_fees if cf.get('status') in ('pending', 'brak')]
elif status_filter == 'overdue':
companies_fees = [cf for cf in companies_fees if cf.get('status') == 'overdue']
# Calculate stats
total_companies = len(companies)
if month:
month_fees = [cf.get('fee') for cf in companies_fees if cf.get('fee')]
paid_count = sum(1 for f in month_fees if f and f.status == 'paid')
pending_count = total_companies - paid_count
total_due = sum(float(f.amount) for f in month_fees if f) if month_fees else Decimal(0)
total_paid = sum(float(f.amount_paid or 0) for f in month_fees if f) if month_fees else Decimal(0)
else:
all_fees = list(fees.values())
paid_count = sum(1 for f in all_fees if f.status == 'paid')
pending_count = len(all_fees) - paid_count
total_due = sum(float(f.amount) for f in all_fees) if all_fees else Decimal(0)
total_paid = sum(float(f.amount_paid or 0) for f in all_fees) if all_fees else Decimal(0)
# Get default fee amount
fee_config = db.query(MembershipFeeConfig).filter(
MembershipFeeConfig.scope == 'global',
MembershipFeeConfig.valid_until == None
).first()
default_fee = float(fee_config.monthly_amount) if fee_config else 100.00
return render_template(
'admin/fees.html',
companies_fees=companies_fees,
year=year,
month=month,
status_filter=status_filter,
total_companies=total_companies,
paid_count=paid_count,
pending_count=pending_count,
total_due=total_due,
total_paid=total_paid,
default_fee=default_fee,
years=list(range(2024, datetime.now().year + 2)),
months=MONTHS_PL
)
finally:
db.close()
@app.route('/admin/fees/generate', methods=['POST'])
@login_required
def admin_fees_generate():
"""Generate fee records for all companies for a given month"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnien'}), 403
db = SessionLocal()
try:
year = request.form.get('year', type=int)
month = request.form.get('month', type=int)
if not year or not month:
return jsonify({'success': False, 'error': 'Brak roku lub miesiaca'}), 400
# Get default fee amount
fee_config = db.query(MembershipFeeConfig).filter(
MembershipFeeConfig.scope == 'global',
MembershipFeeConfig.valid_until == None
).first()
default_fee = fee_config.monthly_amount if fee_config else 100.00
# Get all active companies
companies = db.query(Company).filter(Company.status == 'active').all()
created = 0
for company in companies:
# Check if record already exists
existing = db.query(MembershipFee).filter(
MembershipFee.company_id == company.id,
MembershipFee.fee_year == year,
MembershipFee.fee_month == month
).first()
if not existing:
fee = MembershipFee(
company_id=company.id,
fee_year=year,
fee_month=month,
amount=default_fee,
status='pending'
)
db.add(fee)
created += 1
db.commit()
return jsonify({
'success': True,
'message': f'Utworzono {created} rekordow skladek'
})
except Exception as e:
db.rollback()
logger.error(f"Error generating fees: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/admin/fees/<int:fee_id>/mark-paid', methods=['POST'])
@login_required
def admin_fees_mark_paid(fee_id):
"""Mark a fee as paid"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnien'}), 403
db = SessionLocal()
try:
fee = db.query(MembershipFee).filter(MembershipFee.id == fee_id).first()
if not fee:
return jsonify({'success': False, 'error': 'Nie znaleziono skladki'}), 404
# Get data from request
amount_paid = request.form.get('amount_paid', type=float)
payment_date = request.form.get('payment_date')
payment_method = request.form.get('payment_method', 'transfer')
payment_reference = request.form.get('payment_reference', '')
notes = request.form.get('notes', '')
# Update fee record
fee.amount_paid = amount_paid or float(fee.amount)
fee.payment_date = datetime.strptime(payment_date, '%Y-%m-%d').date() if payment_date else datetime.now().date()
fee.payment_method = payment_method
fee.payment_reference = payment_reference
fee.notes = notes
fee.recorded_by = current_user.id
fee.recorded_at = datetime.now()
# Set status based on payment amount
if fee.amount_paid >= float(fee.amount):
fee.status = 'paid'
elif fee.amount_paid > 0:
fee.status = 'partial'
db.commit()
return jsonify({
'success': True,
'message': 'Skladka zostala zarejestrowana'
})
except Exception as e:
db.rollback()
logger.error(f"Error marking fee as paid: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/admin/fees/bulk-mark-paid', methods=['POST'])
@login_required
def admin_fees_bulk_mark_paid():
"""Bulk mark fees as paid"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnien'}), 403
db = SessionLocal()
try:
fee_ids = request.form.getlist('fee_ids[]', type=int)
if not fee_ids:
return jsonify({'success': False, 'error': 'Brak wybranych skladek'}), 400
updated = 0
for fee_id in fee_ids:
fee = db.query(MembershipFee).filter(MembershipFee.id == fee_id).first()
if fee and fee.status != 'paid':
fee.status = 'paid'
fee.amount_paid = fee.amount
fee.payment_date = datetime.now().date()
fee.recorded_by = current_user.id
fee.recorded_at = datetime.now()
updated += 1
db.commit()
return jsonify({
'success': True,
'message': f'Zaktualizowano {updated} rekordow'
})
except Exception as e:
db.rollback()
logger.error(f"Error in bulk action: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/admin/fees/export')
@login_required
def admin_fees_export():
"""Export fees to CSV"""
if not current_user.is_admin:
flash('Brak uprawnien.', 'error')
return redirect(url_for('admin_fees'))
import csv
from io import StringIO
db = SessionLocal()
try:
year = request.args.get('year', datetime.now().year, type=int)
month = request.args.get('month', type=int)
query = db.query(MembershipFee).join(Company).filter(
MembershipFee.fee_year == year
)
if month:
query = query.filter(MembershipFee.fee_month == month)
fees = query.order_by(Company.name, MembershipFee.fee_month).all()
# Generate CSV
output = StringIO()
writer = csv.writer(output)
writer.writerow([
'Firma', 'NIP', 'Rok', 'Miesiac', 'Kwota', 'Zaplacono',
'Status', 'Data platnosci', 'Metoda', 'Referencja', 'Notatki'
])
for fee in fees:
writer.writerow([
fee.company.name,
fee.company.nip,
fee.fee_year,
fee.fee_month,
fee.amount,
fee.amount_paid,
fee.status,
fee.payment_date,
fee.payment_method,
fee.payment_reference,
fee.notes
])
output.seek(0)
return Response(
output.getvalue(),
mimetype='text/csv',
headers={
'Content-Disposition': f'attachment; filename=skladki_{year}_{month or "all"}.csv'
}
)
finally:
db.close()
# ============================================================
# CALENDAR ROUTES
# ============================================================
@app.route('/kalendarz')
@login_required
def calendar_index():
"""Kalendarz wydarzeń Norda Biznes - widok listy lub siatki miesięcznej"""
from datetime import date
import calendar as cal_module
# Polskie nazwy miesięcy
POLISH_MONTHS = {
1: 'Styczeń', 2: 'Luty', 3: 'Marzec', 4: 'Kwiecień',
5: 'Maj', 6: 'Czerwiec', 7: 'Lipiec', 8: 'Sierpień',
9: 'Wrzesień', 10: 'Październik', 11: 'Listopad', 12: 'Grudzień'
}
db = SessionLocal()
try:
today = date.today()
# Parametry widoku
view_mode = request.args.get('view', 'list') # list lub grid
year = request.args.get('year', today.year, type=int)
month = request.args.get('month', today.month, type=int)
# Walidacja miesiąca/roku
if month < 1:
month = 12
year -= 1
elif month > 12:
month = 1
year += 1
# Oblicz poprzedni/następny miesiąc
if month == 1:
prev_month, prev_year = 12, year - 1
else:
prev_month, prev_year = month - 1, year
if month == 12:
next_month, next_year = 1, year + 1
else:
next_month, next_year = month + 1, year
# Dane dla widoku siatki
month_days = []
events_by_day = {}
if view_mode == 'grid':
# Pobierz wydarzenia z danego miesiąca
first_day = date(year, month, 1)
last_day = date(year, month, cal_module.monthrange(year, month)[1])
events = db.query(NordaEvent).filter(
NordaEvent.event_date >= first_day,
NordaEvent.event_date <= last_day
).order_by(NordaEvent.event_date.asc()).all()
# Przygotuj strukturę kalendarza (poniedziałek = 0)
cal = cal_module.Calendar(firstweekday=0)
month_days = cal.monthdayscalendar(year, month)
# Mapuj wydarzenia na dni
for event in events:
day = event.event_date.day
if day not in events_by_day:
events_by_day[day] = []
events_by_day[day].append(event)
# Dane dla widoku listy (zawsze potrzebne dla fallback)
upcoming = db.query(NordaEvent).filter(
NordaEvent.event_date >= today
).order_by(NordaEvent.event_date.asc()).all()
past = db.query(NordaEvent).filter(
NordaEvent.event_date < today
).order_by(NordaEvent.event_date.desc()).limit(5).all()
return render_template('calendar/index.html',
# Dane dla widoku listy
upcoming_events=upcoming,
past_events=past,
today=today,
# Dane dla widoku siatki
view_mode=view_mode,
year=year,
month=month,
month_name=POLISH_MONTHS.get(month, ''),
month_days=month_days,
events_by_day=events_by_day,
prev_month=prev_month,
prev_year=prev_year,
next_month=next_month,
next_year=next_year,
)
finally:
db.close()
@app.route('/kalendarz/<int:event_id>')
@login_required
def calendar_event(event_id):
"""Szczegóły wydarzenia"""
db = SessionLocal()
try:
event = db.query(NordaEvent).filter(NordaEvent.id == event_id).first()
if not event:
flash('Wydarzenie nie istnieje.', 'error')
return redirect(url_for('calendar_index'))
# Sprawdź czy użytkownik jest zapisany
user_attending = db.query(EventAttendee).filter(
EventAttendee.event_id == event_id,
EventAttendee.user_id == current_user.id
).first()
return render_template('calendar/event.html',
event=event,
user_attending=user_attending
)
finally:
db.close()
@app.route('/kalendarz/<int:event_id>/rsvp', methods=['POST'])
@login_required
def calendar_rsvp(event_id):
"""Zapisz się / wypisz z wydarzenia"""
db = SessionLocal()
try:
event = db.query(NordaEvent).filter(NordaEvent.id == event_id).first()
if not event:
return jsonify({'success': False, 'error': 'Wydarzenie nie istnieje'}), 404
# Sprawdź czy już zapisany
existing = db.query(EventAttendee).filter(
EventAttendee.event_id == event_id,
EventAttendee.user_id == current_user.id
).first()
if existing:
# Wypisz
db.delete(existing)
db.commit()
return jsonify({
'success': True,
'action': 'removed',
'message': 'Wypisano z wydarzenia',
'attendee_count': event.attendee_count
})
else:
# Zapisz
if event.max_attendees and event.attendee_count >= event.max_attendees:
return jsonify({'success': False, 'error': 'Brak wolnych miejsc'}), 400
attendee = EventAttendee(
event_id=event_id,
user_id=current_user.id,
status='confirmed'
)
db.add(attendee)
db.commit()
return jsonify({
'success': True,
'action': 'added',
'message': 'Zapisano na wydarzenie',
'attendee_count': event.attendee_count
})
finally:
db.close()
@app.route('/admin/kalendarz')
@login_required
def admin_calendar():
"""Panel admin - zarządzanie wydarzeniami"""
if not current_user.is_admin:
flash('Brak uprawnień.', 'error')
return redirect(url_for('calendar_index'))
db = SessionLocal()
try:
events = db.query(NordaEvent).order_by(NordaEvent.event_date.desc()).all()
return render_template('calendar/admin.html', events=events)
finally:
db.close()
@app.route('/admin/kalendarz/nowy', methods=['GET', 'POST'])
@login_required
def admin_calendar_new():
"""Dodaj nowe wydarzenie"""
if not current_user.is_admin:
flash('Brak uprawnień.', 'error')
return redirect(url_for('calendar_index'))
if request.method == 'POST':
from datetime import datetime as dt
title = sanitize_input(request.form.get('title', ''), 255)
description = request.form.get('description', '').strip()
event_type = request.form.get('event_type', 'meeting')
event_date_str = request.form.get('event_date', '')
time_start_str = request.form.get('time_start', '')
time_end_str = request.form.get('time_end', '')
location = sanitize_input(request.form.get('location', ''), 500)
location_url = request.form.get('location_url', '').strip()
speaker_name = sanitize_input(request.form.get('speaker_name', ''), 255)
max_attendees = request.form.get('max_attendees', type=int)
if not title or not event_date_str:
flash('Tytuł i data są wymagane.', 'error')
return render_template('calendar/admin_new.html')
db = SessionLocal()
try:
event = NordaEvent(
title=title,
description=description,
event_type=event_type,
event_date=dt.strptime(event_date_str, '%Y-%m-%d').date(),
time_start=dt.strptime(time_start_str, '%H:%M').time() if time_start_str else None,
time_end=dt.strptime(time_end_str, '%H:%M').time() if time_end_str else None,
location=location,
location_url=location_url,
speaker_name=speaker_name,
max_attendees=max_attendees,
created_by=current_user.id
)
db.add(event)
db.commit()
flash('Wydarzenie utworzone.', 'success')
return redirect(url_for('admin_calendar'))
finally:
db.close()
return render_template('calendar/admin_new.html')
@app.route('/admin/kalendarz/<int:event_id>/delete', methods=['POST'])
@login_required
def admin_calendar_delete(event_id):
"""Usuń wydarzenie"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
db = SessionLocal()
try:
event = db.query(NordaEvent).filter(NordaEvent.id == event_id).first()
if not event:
return jsonify({'success': False, 'error': 'Wydarzenie nie istnieje'}), 404
db.delete(event)
db.commit()
return jsonify({'success': True, 'message': 'Wydarzenie usunięte'})
finally:
db.close()
# ============================================================
# PRIVATE MESSAGES ROUTES
# ============================================================
@app.route('/wiadomosci')
@login_required
def messages_inbox():
"""Skrzynka odbiorcza"""
page = request.args.get('page', 1, type=int)
per_page = 20
db = SessionLocal()
try:
query = db.query(PrivateMessage).filter(
PrivateMessage.recipient_id == current_user.id
).order_by(PrivateMessage.created_at.desc())
total = query.count()
messages = query.limit(per_page).offset((page - 1) * per_page).all()
unread_count = db.query(PrivateMessage).filter(
PrivateMessage.recipient_id == current_user.id,
PrivateMessage.is_read == False
).count()
return render_template('messages/inbox.html',
messages=messages,
page=page,
total_pages=(total + per_page - 1) // per_page,
unread_count=unread_count
)
finally:
db.close()
@app.route('/wiadomosci/wyslane')
@login_required
def messages_sent():
"""Wysłane wiadomości"""
page = request.args.get('page', 1, type=int)
per_page = 20
db = SessionLocal()
try:
query = db.query(PrivateMessage).filter(
PrivateMessage.sender_id == current_user.id
).order_by(PrivateMessage.created_at.desc())
total = query.count()
messages = query.limit(per_page).offset((page - 1) * per_page).all()
return render_template('messages/sent.html',
messages=messages,
page=page,
total_pages=(total + per_page - 1) // per_page
)
finally:
db.close()
@app.route('/wiadomosci/nowa')
@login_required
def messages_new():
"""Formularz nowej wiadomości"""
recipient_id = request.args.get('to', type=int)
db = SessionLocal()
try:
# Lista użytkowników do wyboru
users = db.query(User).filter(
User.is_active == True,
User.is_verified == True,
User.id != current_user.id
).order_by(User.name).all()
recipient = None
if recipient_id:
recipient = db.query(User).filter(User.id == recipient_id).first()
return render_template('messages/compose.html',
users=users,
recipient=recipient
)
finally:
db.close()
@app.route('/wiadomosci/wyslij', methods=['POST'])
@login_required
def messages_send():
"""Wyślij wiadomość"""
recipient_id = request.form.get('recipient_id', type=int)
subject = sanitize_input(request.form.get('subject', ''), 255)
content = request.form.get('content', '').strip()
if not recipient_id or not content:
flash('Odbiorca i treść są wymagane.', 'error')
return redirect(url_for('messages_new'))
db = SessionLocal()
try:
recipient = db.query(User).filter(User.id == recipient_id).first()
if not recipient:
flash('Odbiorca nie istnieje.', 'error')
return redirect(url_for('messages_new'))
message = PrivateMessage(
sender_id=current_user.id,
recipient_id=recipient_id,
subject=subject,
content=content
)
db.add(message)
db.commit()
flash('Wiadomość wysłana.', 'success')
return redirect(url_for('messages_sent'))
finally:
db.close()
@app.route('/wiadomosci/<int:message_id>')
@login_required
def messages_view(message_id):
"""Czytaj wiadomość"""
db = SessionLocal()
try:
message = db.query(PrivateMessage).filter(
PrivateMessage.id == message_id
).first()
if not message:
flash('Wiadomość nie istnieje.', 'error')
return redirect(url_for('messages_inbox'))
# Sprawdź dostęp
if message.recipient_id != current_user.id and message.sender_id != current_user.id:
flash('Brak dostępu do tej wiadomości.', 'error')
return redirect(url_for('messages_inbox'))
# Oznacz jako przeczytaną
if message.recipient_id == current_user.id and not message.is_read:
message.is_read = True
message.read_at = datetime.now()
db.commit()
return render_template('messages/view.html', message=message)
finally:
db.close()
@app.route('/wiadomosci/<int:message_id>/odpowiedz', methods=['POST'])
@login_required
def messages_reply(message_id):
"""Odpowiedz na wiadomość"""
content = request.form.get('content', '').strip()
if not content:
flash('Treść jest wymagana.', 'error')
return redirect(url_for('messages_view', message_id=message_id))
db = SessionLocal()
try:
original = db.query(PrivateMessage).filter(
PrivateMessage.id == message_id
).first()
if not original:
flash('Wiadomość nie istnieje.', 'error')
return redirect(url_for('messages_inbox'))
# Odpowiedz do nadawcy oryginalnej wiadomości
recipient_id = original.sender_id if original.sender_id != current_user.id else original.recipient_id
reply = PrivateMessage(
sender_id=current_user.id,
recipient_id=recipient_id,
subject=f"Re: {original.subject}" if original.subject else None,
content=content,
parent_id=message_id
)
db.add(reply)
db.commit()
flash('Odpowiedź wysłana.', 'success')
return redirect(url_for('messages_view', message_id=message_id))
finally:
db.close()
@app.route('/api/messages/unread-count')
@login_required
def api_unread_count():
"""API: Liczba nieprzeczytanych wiadomości"""
db = SessionLocal()
try:
count = db.query(PrivateMessage).filter(
PrivateMessage.recipient_id == current_user.id,
PrivateMessage.is_read == False
).count()
return jsonify({'count': count})
finally:
db.close()
# ============================================================
# NOTIFICATIONS API ROUTES
# ============================================================
@app.route('/api/notifications')
@login_required
def api_notifications():
"""API: Get user notifications"""
limit = request.args.get('limit', 20, type=int)
offset = request.args.get('offset', 0, type=int)
unread_only = request.args.get('unread_only', 'false').lower() == 'true'
db = SessionLocal()
try:
query = db.query(UserNotification).filter(
UserNotification.user_id == current_user.id
)
if unread_only:
query = query.filter(UserNotification.is_read == False)
# Order by most recent first
query = query.order_by(UserNotification.created_at.desc())
total = query.count()
notifications = query.limit(limit).offset(offset).all()
return jsonify({
'success': True,
'notifications': [
{
'id': n.id,
'title': n.title,
'message': n.message,
'notification_type': n.notification_type,
'related_type': n.related_type,
'related_id': n.related_id,
'action_url': n.action_url,
'is_read': n.is_read,
'created_at': n.created_at.isoformat() if n.created_at else None
}
for n in notifications
],
'total': total,
'unread_count': db.query(UserNotification).filter(
UserNotification.user_id == current_user.id,
UserNotification.is_read == False
).count()
})
finally:
db.close()
@app.route('/api/notifications/<int:notification_id>/read', methods=['POST'])
@login_required
def api_notification_mark_read(notification_id):
"""API: Mark notification as read"""
db = SessionLocal()
try:
notification = db.query(UserNotification).filter(
UserNotification.id == notification_id,
UserNotification.user_id == current_user.id
).first()
if not notification:
return jsonify({'success': False, 'error': 'Powiadomienie nie znalezione'}), 404
notification.mark_as_read()
db.commit()
return jsonify({
'success': True,
'message': 'Oznaczono jako przeczytane'
})
finally:
db.close()
@app.route('/api/notifications/read-all', methods=['POST'])
@login_required
def api_notifications_mark_all_read():
"""API: Mark all notifications as read"""
db = SessionLocal()
try:
updated = db.query(UserNotification).filter(
UserNotification.user_id == current_user.id,
UserNotification.is_read == False
).update({
UserNotification.is_read: True,
UserNotification.read_at: datetime.now()
})
db.commit()
return jsonify({
'success': True,
'message': f'Oznaczono {updated} powiadomien jako przeczytane',
'count': updated
})
finally:
db.close()
@app.route('/api/notifications/unread-count')
@login_required
def api_notifications_unread_count():
"""API: Get unread notifications count"""
db = SessionLocal()
try:
count = db.query(UserNotification).filter(
UserNotification.user_id == current_user.id,
UserNotification.is_read == False
).count()
return jsonify({'count': count})
finally:
db.close()
# ============================================================
# USER ANALYTICS API ROUTES
# ============================================================
@app.route('/api/analytics/track', methods=['POST'])
@csrf.exempt
def api_analytics_track():
"""Track clicks and interactions from frontend"""
data = request.get_json()
if not data:
return jsonify({'error': 'No data'}), 400
analytics_session_id = session.get('analytics_session_id')
if not analytics_session_id:
return jsonify({'error': 'No session'}), 400
db = SessionLocal()
try:
user_session = db.query(UserSession).filter_by(session_id=analytics_session_id).first()
if not user_session:
return jsonify({'error': 'Session not found'}), 404
event_type = data.get('type')
if event_type == 'click':
click = UserClick(
session_id=user_session.id,
page_view_id=data.get('page_view_id'),
user_id=current_user.id if current_user.is_authenticated else None,
element_type=data.get('element_type', '')[:50] if data.get('element_type') else None,
element_id=data.get('element_id', '')[:100] if data.get('element_id') else None,
element_text=(data.get('element_text', '') or '')[:255],
element_class=(data.get('element_class', '') or '')[:500],
target_url=data.get('target_url', '')[:2000] if data.get('target_url') else None,
x_position=data.get('x'),
y_position=data.get('y')
)
db.add(click)
user_session.clicks_count = (user_session.clicks_count or 0) + 1
db.commit()
elif event_type == 'page_time':
# Update time on page
page_view_id = data.get('page_view_id')
time_seconds = data.get('time_seconds')
if page_view_id and time_seconds:
page_view = db.query(PageView).filter_by(id=page_view_id).first()
if page_view:
page_view.time_on_page_seconds = min(time_seconds, 86400) # Max 24h
db.commit()
return jsonify({'success': True}), 200
except Exception as e:
logger.error(f"Analytics track error: {e}")
db.rollback()
return jsonify({'error': 'Internal error'}), 500
finally:
db.close()
@app.route('/api/analytics/heartbeat', methods=['POST'])
@csrf.exempt
def api_analytics_heartbeat():
"""Keep session alive and update duration"""
analytics_session_id = session.get('analytics_session_id')
if not analytics_session_id:
return jsonify({'success': False}), 200
db = SessionLocal()
try:
user_session = db.query(UserSession).filter_by(session_id=analytics_session_id).first()
if user_session:
user_session.last_activity_at = datetime.now()
user_session.duration_seconds = int(
(datetime.now() - user_session.started_at).total_seconds()
)
db.commit()
return jsonify({'success': True}), 200
except Exception as e:
logger.error(f"Analytics heartbeat error: {e}")
db.rollback()
return jsonify({'success': False}), 200
finally:
db.close()
# ============================================================
# RECOMMENDATIONS API ROUTES
# ============================================================
@app.route('/api/recommendations/<int:company_id>', methods=['GET'])
@login_required
def api_get_recommendations(company_id):
"""API: Get all approved recommendations for a company"""
db = SessionLocal()
try:
# Verify company exists
company = db.query(Company).filter_by(id=company_id).first()
if not company:
return jsonify({
'success': False,
'error': 'Firma nie znaleziona'
}), 404
# Query recommendations with user details
recommendations = db.query(CompanyRecommendation).filter_by(
company_id=company_id,
status='approved'
).join(User, CompanyRecommendation.user_id == User.id).order_by(CompanyRecommendation.created_at.desc()).all()
# Build response with recommender details
result = []
for rec in recommendations:
recommender = db.query(User).filter_by(id=rec.user_id).first()
recommender_company = None
if recommender and recommender.company_id:
recommender_company = db.query(Company).filter_by(id=recommender.company_id).first()
rec_data = {
'id': rec.id,
'recommendation_text': rec.recommendation_text,
'service_category': rec.service_category,
'created_at': rec.created_at.isoformat() if rec.created_at else None,
'updated_at': rec.updated_at.isoformat() if rec.updated_at else None,
'recommender': {
'name': recommender.full_name if recommender else '[Użytkownik usunięty]',
'email': recommender.email if (recommender and rec.show_contact) else None,
'phone': recommender.phone if (recommender and rec.show_contact) else None,
'company_id': recommender_company.id if recommender_company else None,
'company_name': recommender_company.name if recommender_company else None,
'company_slug': recommender_company.slug if recommender_company else None
}
}
result.append(rec_data)
return jsonify({
'success': True,
'company_id': company_id,
'company_name': company.name,
'recommendations': result,
'count': len(result)
})
except Exception as e:
logger.error(f"Error fetching recommendations for company {company_id}: {e}")
return jsonify({
'success': False,
'error': 'Wystąpił błąd podczas pobierania rekomendacji'
}), 500
finally:
db.close()
@app.route('/api/recommendations/create', methods=['POST'])
@login_required
def api_create_recommendation():
"""API: Create a new recommendation"""
db = SessionLocal()
try:
# Get JSON data
data = request.get_json()
if not data:
return jsonify({
'success': False,
'error': 'Brak danych'
}), 400
company_id = data.get('company_id')
recommendation_text = data.get('recommendation_text', '').strip()
service_category = data.get('service_category', '').strip() or None
show_contact = data.get('show_contact', True)
# Validate required fields
if not company_id:
return jsonify({
'success': False,
'error': 'Brak ID firmy'
}), 400
if not recommendation_text:
return jsonify({
'success': False,
'error': 'Treść rekomendacji jest wymagana'
}), 400
# Validate text length (50-2000 characters)
if len(recommendation_text) < 50:
return jsonify({
'success': False,
'error': 'Rekomendacja musi mieć co najmniej 50 znaków'
}), 400
if len(recommendation_text) > 2000:
return jsonify({
'success': False,
'error': 'Rekomendacja nie może przekraczać 2000 znaków'
}), 400
# Check if user is verified
if not current_user.is_verified:
return jsonify({
'success': False,
'error': 'Tylko zweryfikowani użytkownicy mogą dodawać rekomendacje'
}), 403
# Verify company exists
company = db.query(Company).filter_by(id=company_id, status='active').first()
if not company:
return jsonify({
'success': False,
'error': 'Firma nie znaleziona'
}), 404
# Prevent self-recommendation
if current_user.company_id and current_user.company_id == company_id:
return jsonify({
'success': False,
'error': 'Nie możesz polecać własnej firmy'
}), 400
# Check for duplicate recommendation (user can only have one recommendation per company)
existing_rec = db.query(CompanyRecommendation).filter_by(
user_id=current_user.id,
company_id=company_id
).first()
if existing_rec:
return jsonify({
'success': False,
'error': 'Już poleciłeś tę firmę. Możesz edytować swoją istniejącą rekomendację.'
}), 400
# Create recommendation
recommendation = CompanyRecommendation(
company_id=company_id,
user_id=current_user.id,
recommendation_text=recommendation_text,
service_category=service_category,
show_contact=show_contact,
status='pending' # Start as pending for moderation
)
db.add(recommendation)
db.commit()
db.refresh(recommendation)
# Create notification for company owner (if exists)
# Find users associated with this company
company_users = db.query(User).filter_by(company_id=company_id, is_active=True).all()
for company_user in company_users:
if company_user.id != current_user.id:
notification = UserNotification(
user_id=company_user.id,
notification_type='new_recommendation',
title='Nowa rekomendacja',
message=f'{current_user.name or current_user.email} polecił Twoją firmę: {company.name}',
action_url=f'/company/{company.slug}#recommendations',
related_id=recommendation.id
)
db.add(notification)
db.commit()
logger.info(f"Recommendation created: user {current_user.id} -> company {company_id}, ID {recommendation.id}")
return jsonify({
'success': True,
'message': 'Rekomendacja została utworzona i oczekuje na moderację',
'recommendation_id': recommendation.id,
'status': recommendation.status
}), 201
except Exception as e:
logger.error(f"Error creating recommendation: {e}")
db.rollback()
return jsonify({
'success': False,
'error': 'Wystąpił błąd podczas tworzenia rekomendacji'
}), 500
finally:
db.close()
@app.route('/api/recommendations/<int:rec_id>/edit', methods=['POST'])
@login_required
def api_edit_recommendation(rec_id):
"""API: Edit an existing recommendation (owner or admin only)"""
db = SessionLocal()
try:
# Get the recommendation
recommendation = db.query(CompanyRecommendation).filter_by(id=rec_id).first()
if not recommendation:
return jsonify({
'success': False,
'error': 'Rekomendacja nie znaleziona'
}), 404
# Check authorization - user must be the owner OR admin
if recommendation.user_id != current_user.id and not current_user.is_admin:
return jsonify({
'success': False,
'error': 'Brak uprawnień do edycji tej rekomendacji'
}), 403
# Get JSON data
data = request.get_json()
if not data:
return jsonify({
'success': False,
'error': 'Brak danych'
}), 400
recommendation_text = data.get('recommendation_text', '').strip()
service_category = data.get('service_category', '').strip() or None
show_contact = data.get('show_contact', recommendation.show_contact)
# Validate text if provided
if recommendation_text:
# Validate text length (50-2000 characters)
if len(recommendation_text) < 50:
return jsonify({
'success': False,
'error': 'Rekomendacja musi mieć co najmniej 50 znaków'
}), 400
if len(recommendation_text) > 2000:
return jsonify({
'success': False,
'error': 'Rekomendacja nie może przekraczać 2000 znaków'
}), 400
recommendation.recommendation_text = recommendation_text
# Update other fields if provided
if 'service_category' in data:
recommendation.service_category = service_category
if 'show_contact' in data:
recommendation.show_contact = show_contact
# Update timestamp
recommendation.updated_at = datetime.now()
db.commit()
logger.info(f"Recommendation edited: ID {rec_id} by user {current_user.id}")
return jsonify({
'success': True,
'message': 'Rekomendacja została zaktualizowana',
'recommendation_id': recommendation.id
})
except Exception as e:
logger.error(f"Error editing recommendation {rec_id}: {e}")
db.rollback()
return jsonify({
'success': False,
'error': 'Wystąpił błąd podczas edycji rekomendacji'
}), 500
finally:
db.close()
@app.route('/api/recommendations/<int:rec_id>/delete', methods=['POST'])
@login_required
def api_delete_recommendation(rec_id):
"""API: Delete a recommendation (owner or admin only)"""
db = SessionLocal()
try:
# Get the recommendation
recommendation = db.query(CompanyRecommendation).filter_by(id=rec_id).first()
if not recommendation:
return jsonify({
'success': False,
'error': 'Rekomendacja nie znaleziona'
}), 404
# Check authorization - user must be the owner OR admin
if recommendation.user_id != current_user.id and not current_user.is_admin:
return jsonify({
'success': False,
'error': 'Brak uprawnień do usunięcia tej rekomendacji'
}), 403
# Store info for logging
company_id = recommendation.company_id
user_id = recommendation.user_id
# Delete the recommendation
db.delete(recommendation)
db.commit()
logger.info(f"Recommendation deleted: ID {rec_id} (company {company_id}, user {user_id}) by user {current_user.id}")
return jsonify({
'success': True,
'message': 'Rekomendacja została usunięta'
})
except Exception as e:
logger.error(f"Error deleting recommendation {rec_id}: {e}")
db.rollback()
return jsonify({
'success': False,
'error': 'Wystąpił błąd podczas usuwania rekomendacji'
}), 500
finally:
db.close()
# ============================================================
# B2B CLASSIFIEDS ROUTES
# ============================================================
@app.route('/tablica')
@login_required
def classifieds_index():
"""Tablica ogłoszeń B2B"""
listing_type = request.args.get('type', '')
category = request.args.get('category', '')
page = request.args.get('page', 1, type=int)
per_page = 20
db = SessionLocal()
try:
query = db.query(Classified).filter(
Classified.is_active == True
)
# Filtry
if listing_type:
query = query.filter(Classified.listing_type == listing_type)
if category:
query = query.filter(Classified.category == category)
# Sortowanie - najnowsze pierwsze
query = query.order_by(Classified.created_at.desc())
total = query.count()
classifieds = query.limit(per_page).offset((page - 1) * per_page).all()
# Kategorie do filtrów
categories = [
('uslugi', 'Usługi'),
('produkty', 'Produkty'),
('wspolpraca', 'Współpraca'),
('praca', 'Praca'),
('inne', 'Inne')
]
return render_template('classifieds/index.html',
classifieds=classifieds,
categories=categories,
listing_type=listing_type,
category_filter=category,
page=page,
total_pages=(total + per_page - 1) // per_page
)
finally:
db.close()
@app.route('/tablica/nowe', methods=['GET', 'POST'])
@login_required
def classifieds_new():
"""Dodaj nowe ogłoszenie"""
if request.method == 'POST':
listing_type = request.form.get('listing_type', '')
category = request.form.get('category', '')
title = sanitize_input(request.form.get('title', ''), 255)
description = request.form.get('description', '').strip()
budget_info = sanitize_input(request.form.get('budget_info', ''), 255)
location_info = sanitize_input(request.form.get('location_info', ''), 255)
if not listing_type or not category or not title or not description:
flash('Wszystkie wymagane pola muszą być wypełnione.', 'error')
return render_template('classifieds/new.html')
db = SessionLocal()
try:
# Automatyczne wygaśnięcie po 30 dniach
expires = datetime.now() + timedelta(days=30)
classified = Classified(
author_id=current_user.id,
company_id=current_user.company_id,
listing_type=listing_type,
category=category,
title=title,
description=description,
budget_info=budget_info,
location_info=location_info,
expires_at=expires
)
db.add(classified)
db.commit()
flash('Ogłoszenie dodane.', 'success')
return redirect(url_for('classifieds_index'))
finally:
db.close()
return render_template('classifieds/new.html')
@app.route('/tablica/<int:classified_id>')
@login_required
def classifieds_view(classified_id):
"""Szczegóły ogłoszenia"""
db = SessionLocal()
try:
classified = db.query(Classified).filter(
Classified.id == classified_id
).first()
if not classified:
flash('Ogłoszenie nie istnieje.', 'error')
return redirect(url_for('classifieds_index'))
# Zwiększ licznik wyświetleń (handle NULL)
classified.views_count = (classified.views_count or 0) + 1
db.commit()
return render_template('classifieds/view.html', classified=classified)
finally:
db.close()
@app.route('/tablica/<int:classified_id>/zakoncz', methods=['POST'])
@login_required
def classifieds_close(classified_id):
"""Zamknij ogłoszenie"""
db = SessionLocal()
try:
classified = db.query(Classified).filter(
Classified.id == classified_id,
Classified.author_id == current_user.id
).first()
if not classified:
return jsonify({'success': False, 'error': 'Ogłoszenie nie istnieje lub brak uprawnień'}), 404
classified.is_active = False
db.commit()
return jsonify({'success': True, 'message': 'Ogłoszenie zamknięte'})
finally:
db.close()
# ============================================================
# NEW MEMBERS ROUTE
# ============================================================
@app.route('/nowi-czlonkowie')
@login_required
def new_members():
"""Lista nowych firm członkowskich"""
days = request.args.get('days', 90, type=int)
db = SessionLocal()
try:
cutoff_date = datetime.now() - timedelta(days=days)
new_companies = db.query(Company).filter(
Company.status == 'active',
Company.created_at >= cutoff_date
).order_by(Company.created_at.desc()).all()
return render_template('new_members.html',
companies=new_companies,
days=days,
total=len(new_companies)
)
finally:
db.close()
# ============================================================
# AUTHENTICATION ROUTES
# ============================================================
@app.route('/register', methods=['GET', 'POST'])
@limiter.limit("5 per hour") # Limit registration attempts
def register():
"""User registration"""
if current_user.is_authenticated:
return redirect(url_for('index'))
if request.method == 'POST':
email = sanitize_input(request.form.get('email', ''), 255)
password = request.form.get('password', '')
name = sanitize_input(request.form.get('name', ''), 255)
company_nip = sanitize_input(request.form.get('company_nip', ''), 10)
# Validate email
if not validate_email(email):
flash('Nieprawidłowy format adresu email.', 'error')
return render_template('auth/register.html')
# Validate password
password_valid, password_message = validate_password(password)
if not password_valid:
flash(password_message, 'error')
return render_template('auth/register.html')
# Validate required fields
if not name or not email or not company_nip:
flash('Imię, email i NIP firmy są wymagane.', 'error')
return render_template('auth/register.html')
# Validate NIP format
if not re.match(r'^\d{10}$', company_nip):
flash('NIP musi składać się z 10 cyfr.', 'error')
return render_template('auth/register.html')
db = SessionLocal()
try:
# Check if user exists
if db.query(User).filter_by(email=email).first():
flash('Email już jest zarejestrowany.', 'error')
return render_template('auth/register.html')
# Check if company is NORDA member
is_norda_member = False
company_id = None
if company_nip and re.match(r'^\d{10}$', company_nip):
company = db.query(Company).filter_by(nip=company_nip, status='active').first()
if company:
is_norda_member = True
company_id = company.id
# Generate verification token
verification_token = secrets.token_urlsafe(32)
verification_expires = datetime.now() + timedelta(hours=24)
# Create user
user = User(
email=email,
password_hash=generate_password_hash(password, method='pbkdf2:sha256'),
name=name,
company_nip=company_nip,
company_id=company_id,
is_norda_member=is_norda_member,
created_at=datetime.now(),
is_active=True,
is_verified=False, # Requires email verification
verification_token=verification_token,
verification_token_expires=verification_expires
)
db.add(user)
db.commit()
# Build verification URL
base_url = os.getenv('APP_URL', 'https://nordabiznes.pl')
verification_url = f"{base_url}/verify-email/{verification_token}"
# Try to send verification email
try:
import email_service
if email_service.is_configured():
success = email_service.send_welcome_email(email, name, verification_url)
if success:
logger.info(f"Verification email sent to {email}")
else:
logger.warning(f"Failed to send verification email to {email}")
logger.info(f"Verification token (email failed) for {email}: {verification_token[:8]}...")
else:
logger.warning("Email service not configured")
logger.info(f"Verification token (no email) for {email}: {verification_token[:8]}...")
except Exception as e:
logger.error(f"Error sending verification email: {e}")
logger.info(f"Verification token (exception) for {email}: {verification_token[:8]}...")
logger.info(f"New user registered: {email}")
flash('Rejestracja udana! Sprawdz email i kliknij link weryfikacyjny.', 'success')
return redirect(url_for('login'))
except Exception as e:
logger.error(f"Registration error: {e}")
flash('Wystąpił błąd podczas rejestracji. Spróbuj ponownie.', 'error')
return render_template('auth/register.html')
finally:
db.close()
return render_template('auth/register.html')
@app.route('/login', methods=['GET', 'POST'])
@limiter.limit("1000 per hour" if os.getenv('FLASK_ENV') == 'development' else "30 per minute")
def login():
"""User login"""
if current_user.is_authenticated:
return redirect(url_for('index'))
if request.method == 'POST':
email = sanitize_input(request.form.get('email', ''), 255)
password = request.form.get('password', '')
remember = request.form.get('remember', False) == 'on'
# Basic validation
if not email or not password:
flash('Email i hasło są wymagane.', 'error')
return render_template('auth/login.html')
db = SessionLocal()
try:
user = db.query(User).filter_by(email=email).first()
# Get client IP for logging
client_ip = request.headers.get('X-Forwarded-For', request.remote_addr)
if client_ip and ',' in client_ip:
client_ip = client_ip.split(',')[0].strip()
# Check if account is locked
if user and user.locked_until and user.locked_until > datetime.now():
remaining = (user.locked_until - datetime.now()).seconds // 60 + 1
security_logger.warning(f"LOCKED_ACCOUNT ip={client_ip} email={email}")
flash(f'Konto tymczasowo zablokowane. Spróbuj za {remaining} minut.', 'error')
return render_template('auth/login.html')
if not user or not check_password_hash(user.password_hash, password):
logger.warning(f"Failed login attempt for: {email}")
security_logger.warning(f"FAILED_LOGIN ip={client_ip} email={email}")
# Increment failed attempts if user exists
if user:
user.failed_login_attempts = (user.failed_login_attempts or 0) + 1
# Lock account after 5 failed attempts (30 min lockout)
if user.failed_login_attempts >= 5:
user.locked_until = datetime.now() + timedelta(minutes=30)
security_logger.warning(f"ACCOUNT_LOCKED ip={client_ip} email={email} attempts={user.failed_login_attempts}")
db.commit()
flash('Zbyt wiele nieudanych prób. Konto zablokowane na 30 minut.', 'error')
return render_template('auth/login.html')
db.commit()
flash('Nieprawidłowy email lub hasło.', 'error')
return render_template('auth/login.html')
if not user.is_active:
flash('Konto zostało dezaktywowane.', 'error')
return render_template('auth/login.html')
# Require email verification
if not user.is_verified:
flash('Musisz potwierdzic adres email przed zalogowaniem. Sprawdz skrzynke.', 'error')
return render_template('auth/login.html')
# Reset failed attempts on successful login
user.failed_login_attempts = 0
user.locked_until = None
# Check if user has 2FA enabled
if user.totp_enabled and SECURITY_SERVICE_AVAILABLE:
# Store pending login in session for 2FA verification
session['2fa_pending_user_id'] = user.id
session['2fa_remember'] = remember
next_page = request.args.get('next')
if next_page and next_page.startswith('/'):
session['2fa_next'] = next_page
db.commit()
logger.info(f"2FA required for user: {email}")
return redirect(url_for('verify_2fa'))
# No 2FA - login directly
login_user(user, remember=remember)
user.last_login = datetime.now()
db.commit()
logger.info(f"User logged in: {email}")
next_page = request.args.get('next')
# Prevent open redirect vulnerability
if next_page and not next_page.startswith('/'):
next_page = None
return redirect(next_page or url_for('dashboard'))
except Exception as e:
logger.error(f"Login error: {e}")
flash('Wystąpił błąd podczas logowania. Spróbuj ponownie.', 'error')
return render_template('auth/login.html')
finally:
db.close()
return render_template('auth/login.html')
@app.route('/logout')
@login_required
def logout():
"""User logout"""
# Clear 2FA session flag
session.pop('2fa_verified', None)
logout_user()
flash('Wylogowano pomyślnie.', 'success')
return redirect(url_for('index'))
# ============================================================
# TWO-FACTOR AUTHENTICATION
# ============================================================
@app.route('/verify-2fa', methods=['GET', 'POST'])
@limiter.limit("10 per minute")
def verify_2fa():
"""Verify 2FA code during login"""
# Check if there's a pending 2FA login
pending_user_id = session.get('2fa_pending_user_id')
if not pending_user_id:
flash('Sesja wygasła. Zaloguj się ponownie.', 'error')
return redirect(url_for('login'))
if request.method == 'POST':
code = request.form.get('code', '').strip()
use_backup = request.form.get('use_backup', False)
if not code:
flash('Wprowadź kod weryfikacyjny.', 'error')
return render_template('auth/verify_2fa.html')
db = SessionLocal()
try:
user = db.query(User).get(pending_user_id)
if not user:
session.pop('2fa_pending_user_id', None)
flash('Użytkownik nie istnieje.', 'error')
return redirect(url_for('login'))
# Verify code
if SECURITY_SERVICE_AVAILABLE:
if use_backup:
valid = verify_backup_code(user, code, db)
else:
valid = verify_totp(user, code)
else:
valid = False
if valid:
# Clear pending login and log in
session.pop('2fa_pending_user_id', None)
remember = session.pop('2fa_remember', False)
next_page = session.pop('2fa_next', None)
login_user(user, remember=remember)
session['2fa_verified'] = True
user.last_login = datetime.now()
db.commit()
logger.info(f"User logged in with 2FA: {user.email}")
flash('Zalogowano pomyślnie.', 'success')
return redirect(next_page or url_for('dashboard'))
else:
client_ip = request.headers.get('X-Forwarded-For', request.remote_addr)
if client_ip and ',' in client_ip:
client_ip = client_ip.split(',')[0].strip()
security_logger.warning(f"INVALID_2FA ip={client_ip} user_id={pending_user_id}")
flash('Nieprawidłowy kod weryfikacyjny.', 'error')
except Exception as e:
logger.error(f"2FA verification error: {e}")
flash('Wystąpił błąd. Spróbuj ponownie.', 'error')
finally:
db.close()
return render_template('auth/verify_2fa.html')
@app.route('/settings/2fa', methods=['GET', 'POST'])
@login_required
def settings_2fa():
"""2FA settings - enable/disable"""
db = SessionLocal()
try:
user = db.query(User).get(current_user.id)
if request.method == 'POST':
action = request.form.get('action')
if action == 'setup':
# Generate new secret
if SECURITY_SERVICE_AVAILABLE:
secret = generate_totp_secret()
if secret:
user.totp_secret = secret
user.totp_enabled = False # Not enabled until verified
db.commit()
qr_uri = get_totp_uri(user)
return render_template('auth/2fa_setup.html',
qr_uri=qr_uri,
secret=secret)
flash('Błąd konfiguracji 2FA.', 'error')
elif action == 'verify_setup':
# Verify the setup code and enable 2FA
code = request.form.get('code', '').strip()
if SECURITY_SERVICE_AVAILABLE and verify_totp(user, code):
user.totp_enabled = True
# Generate backup codes
backup_codes = generate_backup_codes(8)
user.totp_backup_codes = backup_codes
db.commit()
# Log audit
if SECURITY_SERVICE_AVAILABLE:
log_audit(db, '2fa.enabled', 'user', user.id, user.email)
db.commit()
logger.info(f"2FA enabled for user: {user.email}")
return render_template('auth/2fa_backup_codes.html',
backup_codes=backup_codes)
else:
flash('Nieprawidłowy kod. Spróbuj ponownie.', 'error')
qr_uri = get_totp_uri(user)
return render_template('auth/2fa_setup.html',
qr_uri=qr_uri,
secret=user.totp_secret)
elif action == 'disable':
# Require current code to disable
code = request.form.get('code', '').strip()
if SECURITY_SERVICE_AVAILABLE and verify_totp(user, code):
user.totp_enabled = False
user.totp_secret = None
user.totp_backup_codes = None
db.commit()
# Log audit
if SECURITY_SERVICE_AVAILABLE:
log_audit(db, '2fa.disabled', 'user', user.id, user.email)
db.commit()
logger.info(f"2FA disabled for user: {user.email}")
flash('Uwierzytelnianie dwuskładnikowe zostało wyłączone.', 'success')
else:
flash('Nieprawidłowy kod. Nie można wyłączyć 2FA.', 'error')
elif action == 'regenerate_backup':
# Require current code to regenerate backup codes
code = request.form.get('code', '').strip()
if SECURITY_SERVICE_AVAILABLE and verify_totp(user, code):
backup_codes = generate_backup_codes(8)
user.totp_backup_codes = backup_codes
db.commit()
logger.info(f"Backup codes regenerated for user: {user.email}")
return render_template('auth/2fa_backup_codes.html',
backup_codes=backup_codes)
else:
flash('Nieprawidłowy kod. Nie można wygenerować kodów.', 'error')
return render_template('auth/2fa_settings.html',
totp_enabled=user.totp_enabled,
backup_codes_count=len(user.totp_backup_codes) if user.totp_backup_codes else 0)
except Exception as e:
logger.error(f"2FA settings error: {e}")
flash('Wystąpił błąd.', 'error')
return redirect(url_for('dashboard'))
finally:
db.close()
@app.route('/forgot-password', methods=['GET', 'POST'])
@limiter.limit("5 per hour")
def forgot_password():
"""Request password reset"""
if current_user.is_authenticated:
return redirect(url_for('index'))
if request.method == 'POST':
email = sanitize_input(request.form.get('email', ''), 255)
if not validate_email(email):
flash('Nieprawidłowy format adresu email.', 'error')
return render_template('auth/forgot_password.html')
db = SessionLocal()
try:
user = db.query(User).filter_by(email=email, is_active=True).first()
if user:
# Generate reset token
reset_token = secrets.token_urlsafe(32)
reset_expires = datetime.now() + timedelta(hours=1)
# Save token to database
user.reset_token = reset_token
user.reset_token_expires = reset_expires
db.commit()
# Build reset URL
base_url = os.getenv('APP_URL', 'https://nordabiznes.pl')
reset_url = f"{base_url}/reset-password/{reset_token}"
# Try to send email
try:
import email_service
if email_service.is_configured():
success = email_service.send_password_reset_email(email, reset_url)
if success:
logger.info(f"Password reset email sent to {email}")
else:
logger.warning(f"Failed to send password reset email to {email}")
# Log token preview for debugging (full token never logged for security)
logger.info(f"Reset token (email failed) for {email}: {reset_token[:8]}...")
else:
logger.warning("Email service not configured")
logger.info(f"Reset token (no email) for {email}: {reset_token[:8]}...")
except Exception as e:
logger.error(f"Error sending reset email: {e}")
logger.info(f"Reset token (exception) for {email}: {reset_token[:8]}...")
# Always show same message to prevent email enumeration
flash('Jeśli email istnieje w systemie, instrukcje resetowania hasła zostały wysłane.', 'info')
return redirect(url_for('login'))
except Exception as e:
logger.error(f"Password reset error: {e}")
flash('Wystąpił błąd. Spróbuj ponownie.', 'error')
finally:
db.close()
return render_template('auth/forgot_password.html')
@app.route('/reset-password/<token>', methods=['GET', 'POST'])
@limiter.limit("10 per hour")
def reset_password(token):
"""Reset password with token"""
if current_user.is_authenticated:
return redirect(url_for('index'))
db = SessionLocal()
try:
# Find user with valid token
user = db.query(User).filter(
User.reset_token == token,
User.reset_token_expires > datetime.now(),
User.is_active == True
).first()
if not user:
flash('Link resetowania hasła jest nieprawidłowy lub wygasł.', 'error')
return redirect(url_for('forgot_password'))
if request.method == 'POST':
password = request.form.get('password', '')
password_confirm = request.form.get('password_confirm', '')
# Validate passwords match
if password != password_confirm:
flash('Hasła nie są identyczne.', 'error')
return render_template('auth/reset_password.html', token=token)
# Validate password strength
password_valid, password_message = validate_password(password)
if not password_valid:
flash(password_message, 'error')
return render_template('auth/reset_password.html', token=token)
# Update password and clear reset token
user.password_hash = generate_password_hash(password, method='pbkdf2:sha256')
user.reset_token = None
user.reset_token_expires = None
db.commit()
logger.info(f"Password reset successful for {user.email}")
flash('Hasło zostało zmienione. Możesz się teraz zalogować.', 'success')
return redirect(url_for('login'))
return render_template('auth/reset_password.html', token=token)
except Exception as e:
logger.error(f"Reset password error: {e}")
flash('Wystąpił błąd. Spróbuj ponownie.', 'error')
return redirect(url_for('forgot_password'))
finally:
db.close()
@app.route('/verify-email/<token>')
def verify_email(token):
"""Verify email address with token"""
db = SessionLocal()
try:
user = db.query(User).filter(
User.verification_token == token,
User.verification_token_expires > datetime.now(),
User.is_active == True
).first()
if not user:
flash('Link weryfikacyjny jest nieprawidłowy lub wygasł.', 'error')
return redirect(url_for('login'))
if user.is_verified:
flash('Email został już zweryfikowany.', 'info')
return redirect(url_for('login'))
# Verify user
user.is_verified = True
user.verified_at = datetime.now()
user.verification_token = None
user.verification_token_expires = None
db.commit()
logger.info(f"Email verified for {user.email}")
flash('Email został zweryfikowany! Możesz się teraz zalogować.', 'success')
return redirect(url_for('login'))
except Exception as e:
logger.error(f"Email verification error: {e}")
flash('Wystąpił błąd podczas weryfikacji.', 'error')
return redirect(url_for('login'))
finally:
db.close()
@app.route('/resend-verification', methods=['GET', 'POST'])
@limiter.limit("5 per hour")
def resend_verification():
"""Resend email verification link"""
if current_user.is_authenticated:
return redirect(url_for('index'))
if request.method == 'POST':
email = sanitize_input(request.form.get('email', ''), 255)
if not validate_email(email):
flash('Nieprawidłowy format adresu email.', 'error')
return render_template('auth/resend_verification.html')
db = SessionLocal()
try:
user = db.query(User).filter_by(email=email, is_active=True).first()
if user and not user.is_verified:
# Generate new verification token
verification_token = secrets.token_urlsafe(32)
verification_expires = datetime.now() + timedelta(hours=24)
# Update user token
user.verification_token = verification_token
user.verification_token_expires = verification_expires
db.commit()
# Build verification URL
base_url = os.getenv('APP_URL', 'https://nordabiznes.pl')
verification_url = f"{base_url}/verify-email/{verification_token}"
# Try to send email
try:
import email_service
if email_service.is_configured():
success = email_service.send_welcome_email(email, user.name, verification_url)
if success:
logger.info(f"Verification email resent to {email}")
else:
logger.warning(f"Failed to resend verification email to {email}")
logger.info(f"Resend verification token (email failed) for {email}: {verification_token[:8]}...")
else:
logger.warning("Email service not configured")
logger.info(f"Resend verification token (no email) for {email}: {verification_token[:8]}...")
except Exception as e:
logger.error(f"Error resending verification email: {e}")
logger.info(f"Resend verification token (exception) for {email}: {verification_token[:8]}...")
# Always show same message to prevent email enumeration
flash('Jesli konto istnieje i nie zostalo zweryfikowane, email weryfikacyjny zostal wyslany.', 'info')
return redirect(url_for('login'))
except Exception as e:
logger.error(f"Resend verification error: {e}")
flash('Wystapil blad. Sprobuj ponownie.', 'error')
finally:
db.close()
return render_template('auth/resend_verification.html')
# ============================================================
# USER DASHBOARD
# ============================================================
@app.route('/dashboard')
@login_required
def dashboard():
"""User dashboard"""
db = SessionLocal()
try:
# Get user's conversations
conversations = db.query(AIChatConversation).filter_by(
user_id=current_user.id
).order_by(AIChatConversation.updated_at.desc()).limit(10).all()
# Stats
total_conversations = db.query(AIChatConversation).filter_by(user_id=current_user.id).count()
total_messages = db.query(AIChatMessage).join(AIChatConversation).filter(
AIChatConversation.user_id == current_user.id
).count()
return render_template(
'dashboard.html',
conversations=conversations,
total_conversations=total_conversations,
total_messages=total_messages
)
finally:
db.close()
# ============================================================
# AI CHAT ROUTES
# ============================================================
@app.route('/chat')
@login_required
def chat():
"""AI Chat interface"""
return render_template('chat.html')
@app.route('/api/chat/start', methods=['POST'])
@csrf.exempt
@login_required
def chat_start():
"""Start new chat conversation"""
try:
data = request.get_json()
title = data.get('title', f"Rozmowa - {datetime.now().strftime('%Y-%m-%d %H:%M')}")
chat_engine = NordaBizChatEngine()
conversation = chat_engine.start_conversation(
user_id=current_user.id,
title=title
)
return jsonify({
'success': True,
'conversation_id': conversation.id,
'title': conversation.title
})
except Exception as e:
logger.error(f"Error starting chat: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
@app.route('/api/chat/<int:conversation_id>/message', methods=['POST'])
@csrf.exempt
@login_required
def chat_send_message(conversation_id):
"""Send message to AI chat"""
try:
data = request.get_json()
message = data.get('message', '').strip()
if not message:
return jsonify({'success': False, 'error': 'Wiadomość nie może być pusta'}), 400
# Verify conversation belongs to user
db = SessionLocal()
try:
conversation = db.query(AIChatConversation).filter_by(
id=conversation_id,
user_id=current_user.id
).first()
if not conversation:
return jsonify({'success': False, 'error': 'Conversation not found'}), 404
finally:
db.close()
chat_engine = NordaBizChatEngine()
response = chat_engine.send_message(
conversation_id=conversation_id,
user_message=message,
user_id=current_user.id
)
# Get free tier usage stats for today
free_tier_stats = get_free_tier_usage()
# Calculate theoretical cost (Gemini 2.0 Flash pricing)
tokens_in = response.tokens_input or 0
tokens_out = response.tokens_output or 0
theoretical_cost = (tokens_in / 1_000_000) * 0.075 + (tokens_out / 1_000_000) * 0.30
return jsonify({
'success': True,
'message': response.content,
'message_id': response.id,
'created_at': response.created_at.isoformat(),
# Technical metadata
'tech_info': {
'model': 'gemini-2.0-flash',
'data_source': 'PostgreSQL (111 firm Norda Biznes)',
'architecture': 'Full DB Context (wszystkie firmy w kontekście AI)',
'tokens_input': tokens_in,
'tokens_output': tokens_out,
'tokens_total': tokens_in + tokens_out,
'latency_ms': response.latency_ms or 0,
'theoretical_cost_usd': round(theoretical_cost, 6),
'actual_cost_usd': 0.0, # Free tier
'free_tier': {
'is_free': True,
'daily_limit': 1500, # Gemini free tier: 1500 req/day
'requests_today': free_tier_stats['requests_today'],
'tokens_today': free_tier_stats['tokens_today'],
'remaining': max(0, 1500 - free_tier_stats['requests_today'])
}
}
})
except Exception as e:
logger.error(f"Error sending message: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
@app.route('/api/chat/<int:conversation_id>/history', methods=['GET'])
@login_required
def chat_get_history(conversation_id):
"""Get conversation history"""
try:
# Verify conversation belongs to user
db = SessionLocal()
try:
conversation = db.query(AIChatConversation).filter_by(
id=conversation_id,
user_id=current_user.id
).first()
if not conversation:
return jsonify({'success': False, 'error': 'Conversation not found'}), 404
finally:
db.close()
chat_engine = NordaBizChatEngine()
history = chat_engine.get_conversation_history(conversation_id)
return jsonify({
'success': True,
'messages': history
})
except Exception as e:
logger.error(f"Error getting history: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
@app.route('/api/chat/conversations', methods=['GET'])
@login_required
def chat_list_conversations():
"""Get list of user's conversations for sidebar"""
db = SessionLocal()
try:
conversations = db.query(AIChatConversation).filter_by(
user_id=current_user.id
).order_by(AIChatConversation.updated_at.desc()).limit(50).all()
return jsonify({
'success': True,
'conversations': [
{
'id': c.id,
'title': c.title,
'created_at': c.started_at.isoformat() if c.started_at else None,
'updated_at': c.updated_at.isoformat() if c.updated_at else None,
'message_count': len(c.messages) if c.messages else 0
}
for c in conversations
]
})
except Exception as e:
logger.error(f"Error listing conversations: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/api/chat/<int:conversation_id>/delete', methods=['DELETE'])
@login_required
def chat_delete_conversation(conversation_id):
"""Delete a conversation"""
db = SessionLocal()
try:
conversation = db.query(AIChatConversation).filter_by(
id=conversation_id,
user_id=current_user.id
).first()
if not conversation:
return jsonify({'success': False, 'error': 'Conversation not found'}), 404
# Delete messages first
db.query(AIChatMessage).filter_by(conversation_id=conversation_id).delete()
db.delete(conversation)
db.commit()
return jsonify({'success': True})
except Exception as e:
logger.error(f"Error deleting conversation: {e}")
db.rollback()
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
# ============================================================
# API ROUTES (for frontend)
# ============================================================
@app.route('/api/companies')
def api_companies():
"""API: Get all companies"""
db = SessionLocal()
try:
companies = db.query(Company).filter_by(status='active').all()
return jsonify({
'success': True,
'companies': [
{
'id': c.id,
'name': c.name,
'category': c.category.name if c.category else None,
'description': c.description_short,
'website': c.website,
'phone': c.phone,
'email': c.email
}
for c in companies
]
})
finally:
db.close()
@app.route('/api/connections')
def api_connections():
"""
API: Get company-person connections for D3.js visualization.
Returns nodes (companies and people) and links (relationships).
"""
db = SessionLocal()
try:
# Get all companies with people data
companies = db.query(Company).filter_by(status='active').all()
# Get all people with company relationships
people = db.query(Person).join(CompanyPerson).distinct().all()
# Build nodes
nodes = []
# Company nodes
for c in companies:
nodes.append({
'id': f'company_{c.id}',
'name': c.name,
'type': 'company',
'category': c.category.name if c.category else 'Other',
'slug': c.slug,
'has_krs': bool(c.krs),
'city': c.address_city or ''
})
# Person nodes
for p in people:
# Count UNIQUE companies this person is connected to (not roles)
company_count = len(set(r.company_id for r in p.company_roles if r.company and r.company.status == 'active'))
nodes.append({
'id': f'person_{p.id}',
'name': f'{p.imiona} {p.nazwisko}',
'type': 'person',
'company_count': company_count
})
# Build links
links = []
for p in people:
for role in p.company_roles:
if role.company and role.company.status == 'active':
links.append({
'source': f'person_{p.id}',
'target': f'company_{role.company_id}',
'role': role.role,
'category': role.role_category
})
return jsonify({
'success': True,
'nodes': nodes,
'links': links,
'stats': {
'companies': len([n for n in nodes if n['type'] == 'company']),
'people': len([n for n in nodes if n['type'] == 'person']),
'connections': len(links)
}
})
finally:
db.close()
@app.route('/mapa-polaczen')
def connections_map():
"""Company-person connections visualization page"""
return render_template('connections_map.html')
def _build_seo_audit_response(company, analysis):
"""
Helper function to build SEO audit response JSON.
Used by both /api/seo/audit and /api/seo/audit/<slug> endpoints.
"""
# Build issues list from various checks
issues = []
# Check for images without alt
if analysis.images_without_alt and analysis.images_without_alt > 0:
issues.append({
'severity': 'warning',
'message': f'{analysis.images_without_alt} obrazów nie ma atrybutu alt',
'category': 'accessibility'
})
# Check for missing meta description
if not analysis.meta_description:
issues.append({
'severity': 'warning',
'message': 'Brak meta description',
'category': 'on_page'
})
# Check H1 count (should be exactly 1)
if analysis.h1_count is not None:
if analysis.h1_count == 0:
issues.append({
'severity': 'error',
'message': 'Brak nagłówka H1 na stronie',
'category': 'on_page'
})
elif analysis.h1_count > 1:
issues.append({
'severity': 'warning',
'message': f'Strona zawiera {analysis.h1_count} nagłówków H1 (zalecany: 1)',
'category': 'on_page'
})
# Check SSL
if analysis.has_ssl is False:
issues.append({
'severity': 'error',
'message': 'Strona nie używa HTTPS (brak certyfikatu SSL)',
'category': 'security'
})
# Check robots.txt
if analysis.has_robots_txt is False:
issues.append({
'severity': 'info',
'message': 'Brak pliku robots.txt',
'category': 'technical'
})
# Check sitemap
if analysis.has_sitemap is False:
issues.append({
'severity': 'info',
'message': 'Brak pliku sitemap.xml',
'category': 'technical'
})
# Check indexability
if analysis.is_indexable is False:
issues.append({
'severity': 'error',
'message': f'Strona nie jest indeksowalna: {analysis.noindex_reason or "nieznana przyczyna"}',
'category': 'technical'
})
# Check structured data
if analysis.has_structured_data is False:
issues.append({
'severity': 'info',
'message': 'Brak danych strukturalnych (Schema.org)',
'category': 'on_page'
})
# Check Open Graph tags
if analysis.has_og_tags is False:
issues.append({
'severity': 'info',
'message': 'Brak tagów Open Graph (ważne dla udostępniania w social media)',
'category': 'social'
})
# Check mobile-friendliness
if analysis.is_mobile_friendly is False:
issues.append({
'severity': 'warning',
'message': 'Strona nie jest przyjazna dla urządzeń mobilnych',
'category': 'technical'
})
# Add issues from seo_issues JSONB field if available
if analysis.seo_issues:
stored_issues = analysis.seo_issues if isinstance(analysis.seo_issues, list) else []
for issue in stored_issues:
if isinstance(issue, dict):
issues.append(issue)
# Build response
return {
'success': True,
'company_id': company.id,
'company_name': company.name,
'website': company.website,
'seo_audit': {
'audited_at': analysis.seo_audited_at.isoformat() if analysis.seo_audited_at else None,
'audit_version': analysis.seo_audit_version,
'overall_score': analysis.seo_overall_score,
'pagespeed': {
'seo_score': analysis.pagespeed_seo_score,
'performance_score': analysis.pagespeed_performance_score,
'accessibility_score': analysis.pagespeed_accessibility_score,
'best_practices_score': analysis.pagespeed_best_practices_score
},
'on_page': {
'meta_title': analysis.meta_title,
'meta_description': analysis.meta_description,
'h1_count': analysis.h1_count,
'h1_text': analysis.h1_text,
'h2_count': analysis.h2_count,
'h3_count': analysis.h3_count,
'total_images': analysis.total_images,
'images_without_alt': analysis.images_without_alt,
'images_with_alt': analysis.images_with_alt,
'internal_links_count': analysis.internal_links_count,
'external_links_count': analysis.external_links_count,
'has_structured_data': analysis.has_structured_data,
'structured_data_types': analysis.structured_data_types
},
'technical': {
'has_ssl': analysis.has_ssl,
'ssl_issuer': analysis.ssl_issuer,
'ssl_expires_at': analysis.ssl_expires_at.isoformat() if analysis.ssl_expires_at else None,
'has_sitemap': analysis.has_sitemap,
'has_robots_txt': analysis.has_robots_txt,
'has_canonical': analysis.has_canonical,
'canonical_url': analysis.canonical_url,
'is_indexable': analysis.is_indexable,
'noindex_reason': analysis.noindex_reason,
'is_mobile_friendly': analysis.is_mobile_friendly,
'viewport_configured': analysis.viewport_configured,
'load_time_ms': analysis.load_time_ms,
'http_status_code': analysis.http_status_code
},
'core_web_vitals': {
'largest_contentful_paint_ms': analysis.largest_contentful_paint_ms,
'first_input_delay_ms': analysis.first_input_delay_ms,
'cumulative_layout_shift': float(analysis.cumulative_layout_shift) if analysis.cumulative_layout_shift else None
},
'social': {
'has_og_tags': analysis.has_og_tags,
'og_title': analysis.og_title,
'og_description': analysis.og_description,
'og_image': analysis.og_image,
'has_twitter_cards': analysis.has_twitter_cards
},
'language': {
'html_lang': analysis.html_lang,
'has_hreflang': analysis.has_hreflang
},
'issues': issues
}
}
def _get_seo_audit_for_company(db, company):
"""
Helper function to get SEO audit data for a company.
Returns tuple of (response_dict, status_code) or (None, None) if audit exists.
"""
# Get latest SEO audit for this company
analysis = db.query(CompanyWebsiteAnalysis).filter_by(
company_id=company.id
).order_by(CompanyWebsiteAnalysis.analyzed_at.desc()).first()
if not analysis:
return {
'success': True,
'company_id': company.id,
'company_name': company.name,
'website': company.website,
'seo_audit': None,
'message': 'Brak danych SEO dla tej firmy. Audyt nie został jeszcze przeprowadzony.'
}, 200
# Check if SEO audit was performed (seo_audited_at is set)
if not analysis.seo_audited_at:
return {
'success': True,
'company_id': company.id,
'company_name': company.name,
'website': company.website,
'seo_audit': None,
'message': 'Audyt SEO nie został jeszcze przeprowadzony dla tej firmy.'
}, 200
# Build full response
return _build_seo_audit_response(company, analysis), 200
@app.route('/api/seo/audit')
def api_seo_audit():
"""
API: Get SEO audit results for a company.
Query parameters:
- company_id: Company ID (integer)
- slug: Company slug (string)
At least one of company_id or slug must be provided.
Returns JSON with:
- pagespeed scores (seo, performance, accessibility, best_practices)
- on_page metrics (meta tags, headings, images, links, structured data)
- technical checks (ssl, sitemap, robots.txt, mobile-friendly)
- issues list with severity levels
"""
company_id = request.args.get('company_id', type=int)
slug = request.args.get('slug', type=str)
if not company_id and not slug:
return jsonify({
'success': False,
'error': 'Podaj company_id lub slug firmy'
}), 400
db = SessionLocal()
try:
# Find company by ID or slug
if company_id:
company = db.query(Company).filter_by(id=company_id, status='active').first()
else:
company = db.query(Company).filter_by(slug=slug, status='active').first()
if not company:
return jsonify({
'success': False,
'error': 'Firma nie znaleziona'
}), 404
response, status_code = _get_seo_audit_for_company(db, company)
return jsonify(response), status_code
finally:
db.close()
@app.route('/api/seo/audit/<slug>')
def api_seo_audit_by_slug(slug):
"""
API: Get SEO audit results for a company by slug.
Convenience endpoint that uses slug from URL path.
Example: GET /api/seo/audit/pixlab-sp-z-o-o
"""
db = SessionLocal()
try:
# Find company by slug
company = db.query(Company).filter_by(slug=slug, status='active').first()
if not company:
return jsonify({
'success': False,
'error': 'Firma nie znaleziona'
}), 404
response, status_code = _get_seo_audit_for_company(db, company)
return jsonify(response), status_code
finally:
db.close()
@app.route('/api/seo/audit', methods=['POST'])
@login_required
@limiter.limit("200 per hour")
def api_seo_audit_trigger():
"""
API: Trigger SEO audit for a company (admin-only).
This endpoint runs a full SEO audit including:
- Google PageSpeed Insights analysis
- On-page SEO analysis (meta tags, headings, images, links)
- Technical SEO checks (robots.txt, sitemap, canonical URLs)
Request JSON body:
- company_id: Company ID (integer) OR
- slug: Company slug (string)
Returns:
- Success: Full SEO audit results saved to database
- Error: Error message with status code
Rate limited to 10 requests per hour per user to prevent API abuse.
"""
# Admin-only check
if not current_user.is_admin:
return jsonify({
'success': False,
'error': 'Brak uprawnień. Tylko administrator może uruchamiać audyty SEO.'
}), 403
# Check if SEO audit service is available
if not SEO_AUDIT_AVAILABLE:
return jsonify({
'success': False,
'error': 'Usługa audytu SEO jest niedostępna. Sprawdź konfigurację serwera.'
}), 503
# Parse request data
data = request.get_json()
if not data:
return jsonify({
'success': False,
'error': 'Brak danych w żądaniu. Podaj company_id lub slug.'
}), 400
company_id = data.get('company_id')
slug = data.get('slug')
if not company_id and not slug:
return jsonify({
'success': False,
'error': 'Podaj company_id lub slug firmy do audytu.'
}), 400
db = SessionLocal()
try:
# Find company by ID or slug
if company_id:
company = db.query(Company).filter_by(id=company_id, status='active').first()
else:
company = db.query(Company).filter_by(slug=slug, status='active').first()
if not company:
return jsonify({
'success': False,
'error': 'Firma nie znaleziona lub nieaktywna.'
}), 404
# Check if company has a website
if not company.website:
return jsonify({
'success': False,
'error': f'Firma "{company.name}" nie ma zdefiniowanej strony internetowej.',
'company_id': company.id,
'company_name': company.name
}), 400
logger.info(f"SEO audit triggered by admin {current_user.email} for company: {company.name} (ID: {company.id})")
# Initialize SEO auditor and run audit
try:
auditor = SEOAuditor()
# Prepare company dict for auditor
company_dict = {
'id': company.id,
'name': company.name,
'slug': company.slug,
'website': company.website,
'address_city': company.address_city
}
# Run the audit
audit_result = auditor.audit_company(company_dict)
# Check for errors
if audit_result.get('errors') and not audit_result.get('onpage') and not audit_result.get('pagespeed'):
return jsonify({
'success': False,
'error': f'Audyt nie powiódł się: {", ".join(audit_result["errors"])}',
'company_id': company.id,
'company_name': company.name,
'website': company.website
}), 422
# Save result to database
saved = auditor.save_audit_result(audit_result)
if not saved:
return jsonify({
'success': False,
'error': 'Audyt został wykonany, ale nie udało się zapisać wyników do bazy danych.',
'company_id': company.id,
'company_name': company.name
}), 500
# Get the updated analysis record to return
db.expire_all() # Refresh the session to get updated data
analysis = db.query(CompanyWebsiteAnalysis).filter_by(
company_id=company.id
).order_by(CompanyWebsiteAnalysis.analyzed_at.desc()).first()
# Build response using the existing helper function
response = _build_seo_audit_response(company, analysis)
return jsonify({
'success': True,
'message': f'Audyt SEO dla firmy "{company.name}" został zakończony pomyślnie.',
'audit_version': SEO_AUDIT_VERSION,
'triggered_by': current_user.email,
'triggered_at': datetime.now().isoformat(),
**response
}), 200
except Exception as e:
logger.error(f"SEO audit error for company {company.id}: {e}")
return jsonify({
'success': False,
'error': f'Błąd podczas wykonywania audytu: {str(e)}',
'company_id': company.id,
'company_name': company.name
}), 500
finally:
db.close()
# ============================================================
# SEO ADMIN DASHBOARD
# ============================================================
@app.route('/admin/seo')
@login_required
def admin_seo():
"""
Admin dashboard for SEO metrics overview.
Displays:
- Summary stats (score distribution, average score)
- Sortable table of all companies with SEO scores
- Color-coded score badges (green 90-100, yellow 50-89, red 0-49)
- Filtering by category, score range, and search text
- Last audit date with staleness indicator
- Actions: view profile, trigger single company audit
Query Parameters:
- company: Slug of company to highlight/filter (optional)
"""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('dashboard'))
# Get optional company filter from URL
filter_company_slug = request.args.get('company', '')
db = SessionLocal()
try:
from sqlalchemy import func
# Get all active companies with their latest SEO analysis data
# Using outerjoin to include companies without SEO data
companies_query = db.query(
Company.id,
Company.name,
Company.slug,
Company.website,
Category.name.label('category_name'),
CompanyWebsiteAnalysis.pagespeed_seo_score,
CompanyWebsiteAnalysis.pagespeed_performance_score,
CompanyWebsiteAnalysis.pagespeed_accessibility_score,
CompanyWebsiteAnalysis.pagespeed_best_practices_score,
CompanyWebsiteAnalysis.seo_audited_at
).outerjoin(
Category,
Company.category_id == Category.id
).outerjoin(
CompanyWebsiteAnalysis,
Company.id == CompanyWebsiteAnalysis.company_id
).filter(
Company.status == 'active'
).order_by(
Company.name
).all()
# Build companies list with named attributes for template
companies = []
for row in companies_query:
companies.append({
'id': row.id,
'name': row.name,
'slug': row.slug,
'website': row.website,
'category': row.category_name,
'seo_score': row.pagespeed_seo_score,
'performance_score': row.pagespeed_performance_score,
'accessibility_score': row.pagespeed_accessibility_score,
'best_practices_score': row.pagespeed_best_practices_score,
'seo_audited_at': row.seo_audited_at
})
# Calculate statistics
audited_companies = [c for c in companies if c['seo_score'] is not None]
not_audited = [c for c in companies if c['seo_score'] is None]
good_count = len([c for c in audited_companies if c['seo_score'] >= 90])
medium_count = len([c for c in audited_companies if 50 <= c['seo_score'] < 90])
poor_count = len([c for c in audited_companies if c['seo_score'] < 50])
not_audited_count = len(not_audited)
# Calculate average score (only for audited companies)
if audited_companies:
avg_score = round(sum(c['seo_score'] for c in audited_companies) / len(audited_companies))
else:
avg_score = None
stats = {
'good_count': good_count,
'medium_count': medium_count,
'poor_count': poor_count,
'not_audited_count': not_audited_count,
'avg_score': avg_score
}
# Get unique categories for filter dropdown
categories = sorted(set(c['category'] for c in companies if c['category']))
# Convert companies list to objects with attribute access for template
class CompanyRow:
def __init__(self, data):
for key, value in data.items():
setattr(self, key, value)
companies_objects = [CompanyRow(c) for c in companies]
return render_template('admin_seo_dashboard.html',
companies=companies_objects,
stats=stats,
categories=categories,
now=datetime.now(),
filter_company=filter_company_slug
)
finally:
db.close()
# ============================================================
# GBP AUDIT ADMIN DASHBOARD
# ============================================================
@app.route('/admin/gbp-audit')
@login_required
def admin_gbp_audit():
"""
Admin dashboard for GBP (Google Business Profile) audit overview.
Displays:
- Summary stats (completeness score distribution, field coverage)
- Sortable table of all companies with GBP audit data
- Review metrics (avg rating, review counts)
- Photo statistics
"""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('dashboard'))
db = SessionLocal()
try:
from sqlalchemy import func, distinct
from database import GBPAudit, Category
# Subquery to get latest audit for each company
latest_audit_subq = db.query(
GBPAudit.company_id,
func.max(GBPAudit.audit_date).label('max_date')
).group_by(GBPAudit.company_id).subquery()
# Get all companies with their latest GBP audit data
companies_query = db.query(
Company.id,
Company.name,
Company.slug,
Company.website,
Category.name.label('category_name'),
GBPAudit.completeness_score,
GBPAudit.average_rating,
GBPAudit.review_count,
GBPAudit.photo_count,
GBPAudit.has_name,
GBPAudit.has_address,
GBPAudit.has_phone,
GBPAudit.has_website,
GBPAudit.has_hours,
GBPAudit.has_categories,
GBPAudit.has_photos,
GBPAudit.has_description,
GBPAudit.has_services,
GBPAudit.has_reviews,
GBPAudit.audit_date
).outerjoin(
Category,
Company.category_id == Category.id
).outerjoin(
latest_audit_subq,
Company.id == latest_audit_subq.c.company_id
).outerjoin(
GBPAudit,
(Company.id == GBPAudit.company_id) &
(GBPAudit.audit_date == latest_audit_subq.c.max_date)
).filter(
Company.status == 'active'
).order_by(Company.name).all()
# Build companies list
companies = []
for row in companies_query:
companies.append({
'id': row.id,
'name': row.name,
'slug': row.slug,
'website': row.website,
'category': row.category_name,
'completeness_score': row.completeness_score,
'average_rating': float(row.average_rating) if row.average_rating else None,
'review_count': row.review_count or 0,
'photo_count': row.photo_count or 0,
'has_name': row.has_name,
'has_address': row.has_address,
'has_phone': row.has_phone,
'has_website': row.has_website,
'has_hours': row.has_hours,
'has_categories': row.has_categories,
'has_photos': row.has_photos,
'has_description': row.has_description,
'has_services': row.has_services,
'has_reviews': row.has_reviews,
'audit_date': row.audit_date
})
# Calculate statistics
total_companies = len(companies)
audited = [c for c in companies if c['completeness_score'] is not None]
not_audited = [c for c in companies if c['completeness_score'] is None]
# Score distribution
excellent_count = len([c for c in audited if c['completeness_score'] >= 90])
good_count = len([c for c in audited if 70 <= c['completeness_score'] < 90])
poor_count = len([c for c in audited if c['completeness_score'] < 70])
not_audited_count = len(not_audited)
# Average completeness
avg_completeness = round(sum(c['completeness_score'] for c in audited) / len(audited)) if audited else None
# Average rating (only for companies with reviews)
companies_with_rating = [c for c in audited if c['average_rating']]
avg_rating = round(sum(c['average_rating'] for c in companies_with_rating) / len(companies_with_rating), 1) if companies_with_rating else None
# Total reviews
total_reviews = sum(c['review_count'] for c in companies)
# Field coverage stats (percentage of audited companies with each field)
if audited:
field_coverage = {
'name': round(len([c for c in audited if c['has_name']]) / len(audited) * 100),
'address': round(len([c for c in audited if c['has_address']]) / len(audited) * 100),
'phone': round(len([c for c in audited if c['has_phone']]) / len(audited) * 100),
'website': round(len([c for c in audited if c['has_website']]) / len(audited) * 100),
'hours': round(len([c for c in audited if c['has_hours']]) / len(audited) * 100),
'categories': round(len([c for c in audited if c['has_categories']]) / len(audited) * 100),
'photos': round(len([c for c in audited if c['has_photos']]) / len(audited) * 100),
'description': round(len([c for c in audited if c['has_description']]) / len(audited) * 100),
'services': round(len([c for c in audited if c['has_services']]) / len(audited) * 100),
'reviews': round(len([c for c in audited if c['has_reviews']]) / len(audited) * 100),
}
else:
field_coverage = {k: 0 for k in ['name', 'address', 'phone', 'website', 'hours', 'categories', 'photos', 'description', 'services', 'reviews']}
stats = {
'total_companies': total_companies,
'audited_count': len(audited),
'excellent_count': excellent_count,
'good_count': good_count,
'poor_count': poor_count,
'not_audited_count': not_audited_count,
'avg_completeness': avg_completeness,
'avg_rating': avg_rating,
'total_reviews': total_reviews,
'field_coverage': field_coverage
}
# Get unique categories
categories = sorted(set(c['category'] for c in companies if c['category']))
# Convert to objects for template
class CompanyRow:
def __init__(self, data):
for key, value in data.items():
setattr(self, key, value)
companies_objects = [CompanyRow(c) for c in companies]
return render_template('admin/gbp_audit_dashboard.html',
companies=companies_objects,
stats=stats,
categories=categories,
now=datetime.now()
)
finally:
db.close()
# ============================================================
# GBP (GOOGLE BUSINESS PROFILE) AUDIT API
# ============================================================
@app.route('/api/gbp/audit/health')
def api_gbp_audit_health():
"""
API: Health check for GBP audit service.
Returns service status and version information.
Used by monitoring systems to verify service availability.
"""
if GBP_AUDIT_AVAILABLE:
return jsonify({
'status': 'ok',
'service': 'gbp_audit',
'version': GBP_AUDIT_VERSION,
'available': True
}), 200
else:
return jsonify({
'status': 'unavailable',
'service': 'gbp_audit',
'available': False,
'error': 'GBP audit service not loaded'
}), 503
@app.route('/api/gbp/audit', methods=['GET'])
def api_gbp_audit_get():
"""
API: Get GBP audit results for a company.
Query parameters:
- company_id: Company ID (integer) OR
- slug: Company slug (string)
Returns:
- Latest audit results with completeness score and recommendations
- 404 if company not found
- 404 if no audit exists for the company
Example: GET /api/gbp/audit?company_id=26
Example: GET /api/gbp/audit?slug=pixlab-sp-z-o-o
"""
if not GBP_AUDIT_AVAILABLE:
return jsonify({
'success': False,
'error': 'Usługa audytu GBP jest niedostępna.'
}), 503
company_id = request.args.get('company_id', type=int)
slug = request.args.get('slug')
if not company_id and not slug:
return jsonify({
'success': False,
'error': 'Podaj company_id lub slug firmy.'
}), 400
db = SessionLocal()
try:
# Find company
if company_id:
company = db.query(Company).filter_by(id=company_id, status='active').first()
else:
company = db.query(Company).filter_by(slug=slug, status='active').first()
if not company:
return jsonify({
'success': False,
'error': 'Firma nie znaleziona lub nieaktywna.'
}), 404
# Get latest audit
audit = gbp_get_company_audit(db, company.id)
if not audit:
return jsonify({
'success': False,
'error': f'Brak wyników audytu GBP dla firmy "{company.name}". Uruchom audyt używając POST /api/gbp/audit.',
'company_id': company.id,
'company_name': company.name
}), 404
# Build response
return jsonify({
'success': True,
'company_id': company.id,
'company_name': company.name,
'company_slug': company.slug,
'audit': {
'id': audit.id,
'audit_date': audit.audit_date.isoformat() if audit.audit_date else None,
'completeness_score': audit.completeness_score,
'score_category': audit.score_category,
'fields_status': audit.fields_status,
'recommendations': audit.recommendations,
'has_name': audit.has_name,
'has_address': audit.has_address,
'has_phone': audit.has_phone,
'has_website': audit.has_website,
'has_hours': audit.has_hours,
'has_categories': audit.has_categories,
'has_photos': audit.has_photos,
'has_description': audit.has_description,
'has_services': audit.has_services,
'has_reviews': audit.has_reviews,
'photo_count': audit.photo_count,
'review_count': audit.review_count,
'average_rating': float(audit.average_rating) if audit.average_rating else None,
'google_place_id': audit.google_place_id,
'audit_source': audit.audit_source,
'audit_version': audit.audit_version
}
}), 200
except Exception as e:
logger.error(f"Error fetching GBP audit: {e}")
return jsonify({
'success': False,
'error': f'Błąd podczas pobierania audytu: {str(e)}'
}), 500
finally:
db.close()
@app.route('/api/gbp/audit/<slug>')
def api_gbp_audit_by_slug(slug):
"""
API: Get GBP audit results for a company by slug.
Convenience endpoint that uses slug from URL path.
Example: GET /api/gbp/audit/pixlab-sp-z-o-o
"""
if not GBP_AUDIT_AVAILABLE:
return jsonify({
'success': False,
'error': 'Usługa audytu GBP jest niedostępna.'
}), 503
db = SessionLocal()
try:
company = db.query(Company).filter_by(slug=slug, status='active').first()
if not company:
return jsonify({
'success': False,
'error': f'Firma o slug "{slug}" nie znaleziona.'
}), 404
audit = gbp_get_company_audit(db, company.id)
if not audit:
return jsonify({
'success': False,
'error': f'Brak wyników audytu GBP dla firmy "{company.name}".',
'company_id': company.id,
'company_name': company.name
}), 404
return jsonify({
'success': True,
'company_id': company.id,
'company_name': company.name,
'company_slug': company.slug,
'audit': {
'id': audit.id,
'audit_date': audit.audit_date.isoformat() if audit.audit_date else None,
'completeness_score': audit.completeness_score,
'score_category': audit.score_category,
'fields_status': audit.fields_status,
'recommendations': audit.recommendations,
'photo_count': audit.photo_count,
'review_count': audit.review_count,
'average_rating': float(audit.average_rating) if audit.average_rating else None
}
}), 200
finally:
db.close()
@app.route('/api/gbp/audit', methods=['POST'])
@login_required
@limiter.limit("20 per hour")
def api_gbp_audit_trigger():
"""
API: Run GBP audit for a company.
This endpoint runs a completeness audit for Google Business Profile data,
checking fields like name, address, phone, website, hours, categories,
photos, description, services, and reviews.
Request JSON body:
- company_id: Company ID (integer) OR
- slug: Company slug (string)
- save: Whether to save results to database (default: true)
Returns:
- Success: Audit results with completeness score and recommendations
- Error: Error message with status code
Access:
- Members can audit their own company
- Admins can audit any company
Rate limited to 20 requests per hour per user.
"""
if not GBP_AUDIT_AVAILABLE:
return jsonify({
'success': False,
'error': 'Usługa audytu GBP jest niedostępna. Sprawdź konfigurację serwera.'
}), 503
# Parse request data
data = request.get_json()
if not data:
return jsonify({
'success': False,
'error': 'Brak danych w żądaniu. Podaj company_id lub slug.'
}), 400
company_id = data.get('company_id')
slug = data.get('slug')
save_result = data.get('save', True)
if not company_id and not slug:
return jsonify({
'success': False,
'error': 'Podaj company_id lub slug firmy do audytu.'
}), 400
db = SessionLocal()
try:
# Find company by ID or slug
if company_id:
company = db.query(Company).filter_by(id=company_id, status='active').first()
else:
company = db.query(Company).filter_by(slug=slug, status='active').first()
if not company:
return jsonify({
'success': False,
'error': 'Firma nie znaleziona lub nieaktywna.'
}), 404
# Check access: admin can audit any company, member only their own
if not current_user.is_admin:
# Check if user is associated with this company
if current_user.company_id != company.id:
return jsonify({
'success': False,
'error': 'Brak uprawnień. Możesz audytować tylko własną firmę.'
}), 403
logger.info(f"GBP audit triggered by {current_user.email} for company: {company.name} (ID: {company.id})")
# Option to fetch fresh Google data before audit
fetch_google = data.get('fetch_google', True)
force_refresh = data.get('force_refresh', False)
try:
# Step 1: Fetch fresh Google Business data (if enabled)
fetch_result = None
if fetch_google:
logger.info(f"Fetching Google Business data for company {company.id}...")
fetch_result = gbp_fetch_google_data(db, company.id, force_refresh=force_refresh)
if not fetch_result.get('success') and not fetch_result.get('data', {}).get('cached'):
# Log warning but continue with audit
logger.warning(f"Google fetch warning for company {company.id}: {fetch_result.get('error')}")
# Step 2: Run the audit
result = gbp_audit_company(db, company.id, save=save_result)
# Build field status for response
fields_response = {}
for field_name, field_status in result.fields.items():
fields_response[field_name] = {
'status': field_status.status,
'value': str(field_status.value) if field_status.value is not None else None,
'score': field_status.score,
'max_score': field_status.max_score,
'recommendation': field_status.recommendation
}
# Determine score category
score = result.completeness_score
if score >= 90:
score_category = 'excellent'
elif score >= 70:
score_category = 'good'
elif score >= 50:
score_category = 'needs_work'
else:
score_category = 'poor'
response_data = {
'success': True,
'message': f'Audyt GBP dla firmy "{company.name}" został zakończony pomyślnie.',
'company_id': company.id,
'company_name': company.name,
'company_slug': company.slug,
'audit_version': GBP_AUDIT_VERSION,
'triggered_by': current_user.email,
'triggered_at': datetime.now().isoformat(),
'saved': save_result,
'audit': {
'completeness_score': result.completeness_score,
'score_category': score_category,
'fields_status': fields_response,
'recommendations': result.recommendations,
'photo_count': result.photo_count,
'logo_present': result.logo_present,
'cover_photo_present': result.cover_photo_present,
'review_count': result.review_count,
'average_rating': float(result.average_rating) if result.average_rating else None,
'google_place_id': result.google_place_id
}
}
# Include Google fetch results if performed
if fetch_result:
response_data['google_fetch'] = {
'success': fetch_result.get('success', False),
'steps': fetch_result.get('steps', []),
'data': fetch_result.get('data', {}),
'error': fetch_result.get('error')
}
return jsonify(response_data), 200
except ValueError as e:
return jsonify({
'success': False,
'error': str(e),
'company_id': company.id if company else None
}), 400
except Exception as e:
logger.error(f"GBP audit error for company {company.id}: {e}")
return jsonify({
'success': False,
'error': f'Błąd podczas wykonywania audytu: {str(e)}',
'company_id': company.id,
'company_name': company.name
}), 500
finally:
db.close()
# ============================================================
# SEO AUDIT USER-FACING DASHBOARD
# ============================================================
@app.route('/audit/seo/<slug>')
@login_required
def seo_audit_dashboard(slug):
"""
User-facing SEO audit dashboard for a specific company.
Displays SEO audit results with:
- PageSpeed Insights scores (SEO, Performance, Accessibility, Best Practices)
- Website analysis data
- Improvement recommendations
Access control:
- Admin users can view audit for any company
- Regular users can only view audit for their own company
Args:
slug: Company slug identifier
Returns:
Rendered seo_audit.html template with company and audit data
"""
db = SessionLocal()
try:
# Find company by slug
company = db.query(Company).filter_by(slug=slug, status='active').first()
if not company:
flash('Firma nie została znaleziona.', 'error')
return redirect(url_for('dashboard'))
# Access control: admin can view any company, member only their own
if not current_user.is_admin:
if current_user.company_id != company.id:
flash('Brak uprawnień. Możesz przeglądać audyt tylko własnej firmy.', 'error')
return redirect(url_for('dashboard'))
# Get latest SEO analysis for this company
analysis = db.query(CompanyWebsiteAnalysis).filter(
CompanyWebsiteAnalysis.company_id == company.id
).order_by(CompanyWebsiteAnalysis.seo_audited_at.desc()).first()
# Build SEO data dict if analysis exists
seo_data = None
if analysis and analysis.seo_audited_at:
seo_data = {
'seo_score': analysis.pagespeed_seo_score,
'performance_score': analysis.pagespeed_performance_score,
'accessibility_score': analysis.pagespeed_accessibility_score,
'best_practices_score': analysis.pagespeed_best_practices_score,
'audited_at': analysis.seo_audited_at,
'audit_version': analysis.seo_audit_version,
'url': analysis.website_url
}
# Determine if user can run audit (admin or company owner)
can_audit = current_user.is_admin or current_user.company_id == company.id
logger.info(f"SEO audit dashboard viewed by {current_user.email} for company: {company.name}")
return render_template('seo_audit.html',
company=company,
seo_data=seo_data,
can_audit=can_audit
)
finally:
db.close()
# ============================================================
# SOCIAL MEDIA AUDIT USER-FACING DASHBOARD
# ============================================================
@app.route('/audit/social/<slug>')
@login_required
def social_audit_dashboard(slug):
"""
User-facing Social Media audit dashboard for a specific company.
Displays social media presence audit with:
- Overall presence score (platforms found / total platforms)
- Platform-by-platform status
- Profile validation status
- Recommendations for missing platforms
Access control:
- Admins: Can view all companies
- Regular users: Can only view their own company
Args:
slug: Company URL slug
Returns:
Rendered social_audit.html template with company and social data
"""
db = SessionLocal()
try:
# Find company by slug
company = db.query(Company).filter_by(slug=slug, status='active').first()
if not company:
flash('Firma nie została znaleziona.', 'error')
return redirect(url_for('dashboard'))
# Access control - admin can view all, users only their company
if not current_user.is_admin:
if current_user.company_id != company.id:
flash('Brak uprawnień do wyświetlenia audytu social media tej firmy.', 'error')
return redirect(url_for('dashboard'))
# Get social media profiles for this company
social_profiles = db.query(CompanySocialMedia).filter(
CompanySocialMedia.company_id == company.id
).all()
# Define all platforms we track
all_platforms = ['facebook', 'instagram', 'linkedin', 'youtube', 'twitter', 'tiktok']
# Build social media data
profiles_dict = {}
for profile in social_profiles:
profiles_dict[profile.platform] = {
'url': profile.url,
'is_valid': profile.is_valid,
'check_status': profile.check_status,
'page_name': profile.page_name,
'followers_count': profile.followers_count,
'verified_at': profile.verified_at,
'last_checked_at': profile.last_checked_at
}
# Calculate score (platforms with profiles / total platforms)
platforms_with_profiles = len([p for p in all_platforms if p in profiles_dict])
total_platforms = len(all_platforms)
score = int((platforms_with_profiles / total_platforms) * 100) if total_platforms > 0 else 0
social_data = {
'profiles': profiles_dict,
'all_platforms': all_platforms,
'platforms_count': platforms_with_profiles,
'total_platforms': total_platforms,
'score': score
}
# Determine if user can run audit (admin or company owner)
can_audit = current_user.is_admin or current_user.company_id == company.id
logger.info(f"Social Media audit dashboard viewed by {current_user.email} for company: {company.name}")
return render_template('social_audit.html',
company=company,
social_data=social_data,
can_audit=can_audit
)
finally:
db.close()
@app.route('/api/social/audit', methods=['POST'])
@login_required
@limiter.limit("10 per hour")
def api_social_audit_trigger():
"""
API: Trigger Social Media audit for a company.
This endpoint performs a comprehensive social media audit:
- Scans company website for social media links
- Searches for profiles via Brave Search API (if configured)
- Fetches Google Business Profile data
- Updates database with discovered profiles
Request JSON body:
- company_id: Company ID (integer) OR
- slug: Company slug (string)
Returns:
- Success: Updated social media audit results
- Error: Error message with status code
Rate limited to 10 requests per hour per user.
"""
# Import the SocialMediaAuditor from scripts
try:
import sys
from pathlib import Path
scripts_dir = Path(__file__).parent / 'scripts'
if str(scripts_dir) not in sys.path:
sys.path.insert(0, str(scripts_dir))
from social_media_audit import SocialMediaAuditor
except ImportError as e:
logger.error(f"Failed to import SocialMediaAuditor: {e}")
return jsonify({
'success': False,
'error': 'Usługa audytu Social Media jest niedostępna. Sprawdź konfigurację serwera.'
}), 503
# Parse request data
data = request.get_json()
if not data:
return jsonify({
'success': False,
'error': 'Brak danych w żądaniu. Podaj company_id lub slug.'
}), 400
company_id = data.get('company_id')
slug = data.get('slug')
if not company_id and not slug:
return jsonify({
'success': False,
'error': 'Podaj company_id lub slug firmy do audytu.'
}), 400
db = SessionLocal()
try:
# Find company by ID or slug
if company_id:
company = db.query(Company).filter_by(id=company_id, status='active').first()
else:
company = db.query(Company).filter_by(slug=slug, status='active').first()
if not company:
return jsonify({
'success': False,
'error': 'Firma nie znaleziona lub nieaktywna.'
}), 404
# Access control - admin can audit all, users only their company
if not current_user.is_admin:
if current_user.company_id != company.id:
return jsonify({
'success': False,
'error': 'Brak uprawnień do audytu social media tej firmy.'
}), 403
logger.info(f"Social Media audit triggered by {current_user.email} for company: {company.name} (ID: {company.id})")
# Prepare company dict for auditor
company_dict = {
'id': company.id,
'name': company.name,
'slug': company.slug,
'website': company.website,
'address_city': company.address_city or 'Wejherowo'
}
# Initialize auditor and run audit
try:
auditor = SocialMediaAuditor()
audit_result = auditor.audit_company(company_dict)
# Check for errors
if audit_result.get('errors') and not audit_result.get('social_media') and not audit_result.get('website'):
return jsonify({
'success': False,
'error': f'Audyt nie powiódł się: {", ".join(audit_result["errors"][:3])}',
'company_id': company.id,
'company_name': company.name
}), 422
# Save result to database
saved = auditor.save_audit_result(audit_result)
if not saved:
return jsonify({
'success': False,
'error': 'Audyt został wykonany, ale nie udało się zapisać wyników do bazy danych.',
'company_id': company.id,
'company_name': company.name
}), 500
# Get count of social media profiles found
social_media_found = audit_result.get('social_media', {})
platforms_count = len(social_media_found)
# Calculate score
all_platforms = ['facebook', 'instagram', 'linkedin', 'youtube', 'twitter', 'tiktok']
score = int((platforms_count / len(all_platforms)) * 100)
return jsonify({
'success': True,
'message': f'Audyt Social Media zakończony. Znaleziono {platforms_count} profili.',
'company_id': company.id,
'company_name': company.name,
'profiles_found': platforms_count,
'platforms': list(social_media_found.keys()),
'score': score,
'google_reviews': audit_result.get('google_reviews', {}),
'errors': audit_result.get('errors') if audit_result.get('errors') else None
}), 200
except Exception as e:
logger.error(f"Social Media audit error for company {company.id}: {e}")
return jsonify({
'success': False,
'error': f'Błąd podczas audytu: {str(e)}'
}), 500
except Exception as e:
logger.error(f"Social Media audit error for company {slug or company_id}: {e}")
db.rollback()
return jsonify({
'success': False,
'error': f'Błąd podczas audytu: {str(e)}'
}), 500
finally:
db.close()
# ============================================================
# GBP AUDIT USER-FACING DASHBOARD
# ============================================================
@app.route('/audit/gbp/<slug>')
@login_required
def gbp_audit_dashboard(slug):
"""
User-facing GBP audit dashboard for a specific company.
Displays Google Business Profile completeness audit results with:
- Overall completeness score (0-100)
- Field-by-field status breakdown
- AI-generated improvement recommendations
- Historical audit data
Access control:
- Admin users can view audit for any company
- Regular users can only view audit for their own company
Args:
slug: Company slug identifier
Returns:
Rendered gbp_audit.html template with company and audit data
"""
if not GBP_AUDIT_AVAILABLE:
flash('Usługa audytu Google Business Profile jest tymczasowo niedostępna.', 'error')
return redirect(url_for('dashboard'))
db = SessionLocal()
try:
# Find company by slug
company = db.query(Company).filter_by(slug=slug, status='active').first()
if not company:
flash('Firma nie została znaleziona.', 'error')
return redirect(url_for('dashboard'))
# Access control: admin can view any company, member only their own
if not current_user.is_admin:
if current_user.company_id != company.id:
flash('Brak uprawnień. Możesz przeglądać audyt tylko własnej firmy.', 'error')
return redirect(url_for('dashboard'))
# Get latest audit for this company
audit = gbp_get_company_audit(db, company.id)
# If no audit exists, we still render the page (template handles this)
# The user can trigger an audit from the dashboard
# Determine if user can run audit (admin or company owner)
can_audit = current_user.is_admin or current_user.company_id == company.id
logger.info(f"GBP audit dashboard viewed by {current_user.email} for company: {company.name}")
return render_template('gbp_audit.html',
company=company,
audit=audit,
can_audit=can_audit,
gbp_audit_available=GBP_AUDIT_AVAILABLE,
gbp_audit_version=GBP_AUDIT_VERSION
)
finally:
db.close()
# ============================================================
# IT AUDIT USER-FACING DASHBOARD
# ============================================================
@app.route('/audit/it/<slug>')
@login_required
def it_audit_dashboard(slug):
"""
User-facing IT infrastructure audit dashboard for a specific company.
Displays IT audit results with:
- Overall score and maturity level
- Security, collaboration, and completeness sub-scores
- Technology stack summary (Azure AD, M365, backup, monitoring)
- AI-generated recommendations
Access control:
- Admin users can view audit for any company
- Regular users can only view audit for their own company
Args:
slug: Company slug identifier
Returns:
Rendered it_audit.html template with company and audit data
"""
db = SessionLocal()
try:
# Import IT audit models
from database import ITAudit
# Find company by slug
company = db.query(Company).filter_by(slug=slug, status='active').first()
if not company:
flash('Firma nie została znaleziona.', 'error')
return redirect(url_for('dashboard'))
# Access control: admin can view any company, member only their own
if not current_user.is_admin:
if current_user.company_id != company.id:
flash('Brak uprawnień. Możesz przeglądać audyt tylko własnej firmy.', 'error')
return redirect(url_for('dashboard'))
# Get latest IT audit for this company
audit = db.query(ITAudit).filter(
ITAudit.company_id == company.id
).order_by(ITAudit.audit_date.desc()).first()
# Build audit data dict if audit exists
audit_data = None
if audit:
# Get maturity label
maturity_labels = {
'basic': 'Podstawowy',
'developing': 'Rozwijający się',
'established': 'Ugruntowany',
'advanced': 'Zaawansowany'
}
audit_data = {
'id': audit.id,
'overall_score': audit.overall_score,
'security_score': audit.security_score,
'collaboration_score': audit.collaboration_score,
'completeness_score': audit.completeness_score,
'maturity_level': audit.maturity_level,
'maturity_label': maturity_labels.get(audit.maturity_level, 'Nieznany'),
'audit_date': audit.audit_date,
'audit_source': audit.audit_source,
# Technology flags
'has_azure_ad': audit.has_azure_ad,
'has_m365': audit.has_m365,
'has_google_workspace': audit.has_google_workspace,
'has_local_ad': audit.has_local_ad,
'has_edr': audit.has_edr,
'has_mfa': audit.has_mfa,
'has_vpn': audit.has_vpn,
'has_proxmox_pbs': audit.has_proxmox_pbs,
'has_dr_plan': audit.has_dr_plan,
'has_mdm': audit.has_mdm,
# Solutions
'antivirus_solution': audit.antivirus_solution,
'backup_solution': audit.backup_solution,
'monitoring_solution': audit.monitoring_solution,
'virtualization_platform': audit.virtualization_platform,
# Collaboration flags
'open_to_shared_licensing': audit.open_to_shared_licensing,
'open_to_backup_replication': audit.open_to_backup_replication,
'open_to_teams_federation': audit.open_to_teams_federation,
'open_to_shared_monitoring': audit.open_to_shared_monitoring,
'open_to_collective_purchasing': audit.open_to_collective_purchasing,
'open_to_knowledge_sharing': audit.open_to_knowledge_sharing,
# Recommendations
'recommendations': audit.recommendations
}
# Determine if user can edit audit (admin or company owner)
can_edit = current_user.is_admin or current_user.company_id == company.id
logger.info(f"IT audit dashboard viewed by {current_user.email} for company: {company.name}")
return render_template('it_audit.html',
company=company,
audit_data=audit_data,
can_edit=can_edit
)
finally:
db.close()
@app.route('/api/check-email', methods=['POST'])
def api_check_email():
"""API: Check if email is available"""
data = request.get_json()
email = data.get('email', '').strip().lower()
# Validate email format
if not email or not validate_email(email):
return jsonify({
'available': False,
'error': 'Nieprawidłowy format email'
}), 400
db = SessionLocal()
try:
# Check if email exists
existing_user = db.query(User).filter_by(email=email).first()
return jsonify({
'available': existing_user is None,
'email': email
})
finally:
db.close()
@app.route('/api/verify-nip', methods=['POST'])
def api_verify_nip():
"""API: Verify NIP and check if company is NORDA member"""
data = request.get_json()
nip = data.get('nip', '').strip()
# Validate NIP format
if not nip or not re.match(r'^\d{10}$', nip):
return jsonify({
'success': False,
'error': 'Nieprawidłowy format NIP'
}), 400
db = SessionLocal()
try:
# Check if NIP exists in companies database
company = db.query(Company).filter_by(nip=nip, status='active').first()
if company:
return jsonify({
'success': True,
'is_member': True,
'company_name': company.name,
'company_id': company.id
})
else:
return jsonify({
'success': True,
'is_member': False,
'company_name': None,
'company_id': None
})
finally:
db.close()
@app.route('/api/verify-krs', methods=['GET', 'POST'])
def api_verify_krs():
"""
API: Verify company data from KRS Open API (prs.ms.gov.pl).
GET /api/verify-krs?krs=0000817317
POST /api/verify-krs with JSON body: {"krs": "0000817317"}
Returns official KRS data including:
- Company name, NIP, REGON
- Address
- Capital
- Registration date
- Management board (anonymized in Open API)
- Shareholders (anonymized in Open API)
"""
# Get KRS from query params (GET) or JSON body (POST)
if request.method == 'GET':
krs = request.args.get('krs', '').strip()
else:
data = request.get_json(silent=True) or {}
krs = data.get('krs', '').strip()
# Validate KRS format (7-10 digits)
if not krs or not re.match(r'^\d{7,10}$', krs):
return jsonify({
'success': False,
'error': 'Nieprawidłowy format KRS (wymagane 7-10 cyfr)'
}), 400
# Normalize to 10 digits
krs_normalized = krs.zfill(10)
try:
# Fetch data from KRS Open API
krs_data = krs_api_service.get_company_from_krs(krs_normalized)
if krs_data is None:
return jsonify({
'success': False,
'error': f'Nie znaleziono podmiotu o KRS {krs_normalized} w rejestrze',
'krs': krs_normalized
}), 404
# Check if company exists in our database
db = SessionLocal()
try:
our_company = db.query(Company).filter_by(krs=krs_normalized).first()
is_member = our_company is not None
company_id = our_company.id if our_company else None
finally:
db.close()
return jsonify({
'success': True,
'krs': krs_normalized,
'is_norda_member': is_member,
'company_id': company_id,
'data': krs_data.to_dict(),
'formatted_address': krs_api_service.format_address(krs_data),
'source': 'KRS Open API (prs.ms.gov.pl)',
'note': 'Dane osobowe (imiona, nazwiska) są zanonimizowane w Open API'
})
except Exception as e:
return jsonify({
'success': False,
'error': f'Błąd podczas pobierania danych z KRS: {str(e)}'
}), 500
@app.route('/api/company/<int:company_id>/refresh-krs', methods=['POST'])
@login_required
def api_refresh_company_krs(company_id):
"""
API: Refresh company data from KRS Open API.
Updates company record with official KRS data.
Requires login.
"""
db = SessionLocal()
try:
company = db.query(Company).filter_by(id=company_id).first()
if not company:
return jsonify({
'success': False,
'error': 'Firma nie znaleziona'
}), 404
if not company.krs:
return jsonify({
'success': False,
'error': 'Firma nie ma numeru KRS'
}), 400
# Fetch data from KRS
krs_data = krs_api_service.get_company_from_krs(company.krs)
if krs_data is None:
return jsonify({
'success': False,
'error': f'Nie znaleziono podmiotu o KRS {company.krs} w rejestrze'
}), 404
# Update company data (only non-personal data)
updates = {}
if krs_data.nip and krs_data.nip != company.nip:
updates['nip'] = krs_data.nip
company.nip = krs_data.nip
if krs_data.regon:
regon_9 = krs_data.regon[:9]
if regon_9 != company.regon:
updates['regon'] = regon_9
company.regon = regon_9
# Update address if significantly different
new_address = krs_api_service.format_address(krs_data)
if new_address and new_address != company.address:
updates['address'] = new_address
company.address = new_address
if krs_data.miejscowosc and krs_data.miejscowosc != company.city:
updates['city'] = krs_data.miejscowosc
company.city = krs_data.miejscowosc
if krs_data.kapital_zakladowy:
updates['kapital_zakladowy'] = krs_data.kapital_zakladowy
# Note: Might need to add this field to Company model
# Update verification timestamp
company.krs_verified_at = datetime.utcnow()
db.commit()
return jsonify({
'success': True,
'company_id': company_id,
'updates': updates,
'krs_data': krs_data.to_dict(),
'message': f'Zaktualizowano {len(updates)} pól' if updates else 'Dane są aktualne'
})
except Exception as e:
db.rollback()
return jsonify({
'success': False,
'error': f'Błąd podczas aktualizacji: {str(e)}'
}), 500
finally:
db.close()
def _search_brave_for_company(company_name: str, city: str = None) -> dict:
"""
Search Brave API for company information.
Returns dict with news items and web results.
"""
import requests
brave_api_key = os.getenv('BRAVE_API_KEY')
if not brave_api_key:
logger.warning("BRAVE_API_KEY not configured, skipping web search")
return {'news': [], 'web': []}
results = {'news': [], 'web': []}
# Build search query
query = f'"{company_name}"'
if city:
query += f' {city}'
try:
headers = {
'Accept': 'application/json',
'X-Subscription-Token': brave_api_key
}
# Search news
news_params = {
'q': query,
'count': 5,
'freshness': 'py', # past year
'country': 'pl',
'search_lang': 'pl'
}
news_response = requests.get(
'https://api.search.brave.com/res/v1/news/search',
headers=headers,
params=news_params,
timeout=10
)
if news_response.status_code == 200:
news_data = news_response.json()
for item in news_data.get('results', [])[:5]:
results['news'].append({
'title': item.get('title', ''),
'description': item.get('description', ''),
'url': item.get('url', ''),
'source': item.get('meta_url', {}).get('hostname', '')
})
logger.info(f"Brave News: found {len(results['news'])} items for '{company_name}'")
# Search web
web_params = {
'q': query,
'count': 5,
'country': 'pl',
'search_lang': 'pl'
}
web_response = requests.get(
'https://api.search.brave.com/res/v1/web/search',
headers=headers,
params=web_params,
timeout=10
)
if web_response.status_code == 200:
web_data = web_response.json()
for item in web_data.get('web', {}).get('results', [])[:5]:
results['web'].append({
'title': item.get('title', ''),
'description': item.get('description', ''),
'url': item.get('url', '')
})
logger.info(f"Brave Web: found {len(results['web'])} items for '{company_name}'")
except Exception as e:
logger.error(f"Brave search error for '{company_name}': {e}")
return results
def _fetch_website_content(url: str) -> str:
"""
Fetch and extract text content from company website.
Returns first 2000 chars of text content.
"""
import requests
from bs4 import BeautifulSoup
if not url:
return ''
try:
# Ensure URL has protocol
if not url.startswith('http'):
url = 'https://' + url
response = requests.get(url, timeout=10, headers={
'User-Agent': 'Mozilla/5.0 (compatible; NordaBizBot/1.0)'
})
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
# Remove scripts and styles
for tag in soup(['script', 'style', 'nav', 'footer', 'header']):
tag.decompose()
# Get text content
text = soup.get_text(separator=' ', strip=True)
# Clean up whitespace
text = ' '.join(text.split())
logger.info(f"Fetched {len(text)} chars from {url}")
return text[:3000] # Limit to 3000 chars
except Exception as e:
logger.warning(f"Failed to fetch website content from {url}: {e}")
return ''
@app.route('/api/company/<int:company_id>/enrich-ai', methods=['POST'])
@login_required
@limiter.limit("5 per hour")
def api_enrich_company_ai(company_id):
"""
API: Enrich company data using AI (Gemini) with web search.
Process:
1. Search Brave API for company news and web results
2. Fetch content from company website
3. Combine with existing database data
4. Send to Gemini for AI-powered enrichment
Generates AI insights including:
- Business summary
- Services list
- Target market
- Unique selling points
- Company values
- Certifications
- Industry tags
Requires: Admin or company owner permissions.
Rate limited to 5 requests per hour per user.
"""
import json
db = SessionLocal()
try:
# Get company
company = db.query(Company).filter_by(id=company_id).first()
if not company:
return jsonify({
'success': False,
'error': 'Firma nie znaleziona'
}), 404
# Check permissions: admin or company owner
logger.info(f"Permission check: user={current_user.email}, is_admin={current_user.is_admin}, user_company_id={current_user.company_id}, target_company_id={company.id}")
if not current_user.is_admin and current_user.company_id != company.id:
return jsonify({
'success': False,
'error': 'Brak uprawnien. Tylko administrator lub wlasciciel firmy moze wzbogacac dane.'
}), 403
# Get Gemini service
service = gemini_service.get_gemini_service()
if not service:
return jsonify({
'success': False,
'error': 'Usluga AI jest niedostepna. Skontaktuj sie z administratorem.'
}), 503
logger.info(f"AI enrichment triggered by {current_user.email} for company: {company.name} (ID: {company.id})")
# ============================================
# STEP 1: Search the web for company info
# ============================================
brave_results = _search_brave_for_company(company.name, company.address_city)
# Format news for prompt
news_text = ""
if brave_results['news']:
news_text = "\n".join([
f"- {item['title']}: {item['description'][:200]}"
for item in brave_results['news'][:3]
])
# Format web results for prompt
web_text = ""
if brave_results['web']:
web_text = "\n".join([
f"- {item['title']}: {item['description'][:200]}"
for item in brave_results['web'][:3]
])
# ============================================
# STEP 2: Fetch company website content
# ============================================
website_content = ""
if company.website:
website_content = _fetch_website_content(company.website)
# ============================================
# STEP 3: Collect existing company data
# ============================================
services_list = []
if company.services:
services_list = [cs.service.name for cs in company.services if cs.service]
elif company.services_offered:
services_list = [company.services_offered]
competencies_list = []
if company.competencies:
competencies_list = [cc.competency.name for cc in company.competencies if cc.competency]
existing_data = {
'nazwa': company.name,
'opis_krotki': company.description_short or '',
'opis_pelny': company.description_full or '',
'kategoria': company.category.name if company.category else '',
'uslugi': ', '.join(services_list) if services_list else '',
'kompetencje': ', '.join(competencies_list) if competencies_list else '',
'wartosci': company.core_values or '',
'strona_www': company.website or '',
'miasto': company.address_city or '',
'branza': company.pkd_description or ''
}
# ============================================
# STEP 4: Build comprehensive prompt for AI
# ============================================
prompt = f"""Przeanalizuj wszystkie dostepne dane o polskiej firmie i wygeneruj wzbogacone informacje.
=== DANE Z BAZY DANYCH ===
Nazwa: {existing_data['nazwa']}
Kategoria: {existing_data['kategoria']}
Opis krotki: {existing_data['opis_krotki']}
Opis pelny: {existing_data['opis_pelny']}
Uslugi: {existing_data['uslugi']}
Kompetencje: {existing_data['kompetencje']}
Wartosci firmy: {existing_data['wartosci']}
Strona WWW: {existing_data['strona_www']}
Miasto: {existing_data['miasto']}
Branza (PKD): {existing_data['branza']}
=== INFORMACJE Z INTERNETU (Brave Search) ===
Newsy o firmie:
{news_text if news_text else '(brak znalezionych newsow)'}
Wyniki wyszukiwania:
{web_text if web_text else '(brak wynikow)'}
=== TRESC ZE STRONY WWW FIRMY ===
{website_content[:2000] if website_content else '(nie udalo sie pobrac tresci strony)'}
=== ZADANIE ===
Na podstawie WSZYSTKICH powyzszych danych (baza danych, wyszukiwarka, strona WWW) wygeneruj wzbogacone informacje o firmie.
Wykorzystaj informacje z internetu do uzupelnienia brakujacych danych.
Jesli znalazles nowe uslugi, certyfikaty lub informacje - dodaj je do odpowiedzi.
Odpowiedz WYLACZNIE w formacie JSON (bez dodatkowego tekstu):
{{
"business_summary": "Zwiezly opis dzialalnosci firmy (2-3 zdania) na podstawie wszystkich zrodel",
"services_list": ["usluga1", "usluga2", "usluga3", "usluga4", "usluga5"],
"target_market": "Opis grupy docelowej klientow",
"unique_selling_points": ["wyroznik1", "wyroznik2", "wyroznik3"],
"company_values": ["wartosc1", "wartosc2", "wartosc3"],
"certifications": ["certyfikat1", "certyfikat2"],
"industry_tags": ["tag1", "tag2", "tag3", "tag4", "tag5"],
"recent_news": "Krotkie podsumowanie ostatnich newsow o firmie (jesli sa)",
"suggested_category": "Sugerowana kategoria glowna",
"category_confidence": 0.85,
"data_sources_used": ["database", "brave_search", "website"]
}}
WAZNE:
- Odpowiedz TYLKO JSON, bez markdown, bez ```json
- Wszystkie teksty po polsku
- Listy powinny zawierac 3-5 elementow
- category_confidence to liczba od 0 do 1
- Wykorzystaj maksymalnie informacje z internetu
"""
# Call Gemini API
start_time = time.time()
response_text = service.generate_text(
prompt=prompt,
temperature=0.7,
feature='ai_enrichment',
user_id=current_user.id,
company_id=company.id,
related_entity_type='company',
related_entity_id=company.id
)
processing_time = int((time.time() - start_time) * 1000)
# Parse JSON response
try:
# Clean response - remove markdown code blocks if present
clean_response = response_text.strip()
if clean_response.startswith('```'):
clean_response = clean_response.split('```')[1]
if clean_response.startswith('json'):
clean_response = clean_response[4:]
clean_response = clean_response.strip()
ai_data = json.loads(clean_response)
except json.JSONDecodeError as e:
logger.error(f"Failed to parse AI response: {e}\nResponse: {response_text[:500]}")
return jsonify({
'success': False,
'error': 'Blad parsowania odpowiedzi AI. Sprobuj ponownie.'
}), 500
# Save or update AI insights
existing_insights = db.query(CompanyAIInsights).filter_by(company_id=company.id).first()
if existing_insights:
# Update existing
existing_insights.business_summary = ai_data.get('business_summary')
existing_insights.services_list = ai_data.get('services_list', [])
existing_insights.target_market = ai_data.get('target_market')
existing_insights.unique_selling_points = ai_data.get('unique_selling_points', [])
existing_insights.company_values = ai_data.get('company_values', [])
existing_insights.certifications = ai_data.get('certifications', [])
existing_insights.industry_tags = ai_data.get('industry_tags', [])
existing_insights.suggested_category = ai_data.get('suggested_category')
existing_insights.category_confidence = ai_data.get('category_confidence')
existing_insights.ai_confidence_score = 0.85 # Default confidence
existing_insights.processing_time_ms = processing_time
existing_insights.analyzed_at = datetime.utcnow()
else:
# Create new
new_insights = CompanyAIInsights(
company_id=company.id,
business_summary=ai_data.get('business_summary'),
services_list=ai_data.get('services_list', []),
target_market=ai_data.get('target_market'),
unique_selling_points=ai_data.get('unique_selling_points', []),
company_values=ai_data.get('company_values', []),
certifications=ai_data.get('certifications', []),
industry_tags=ai_data.get('industry_tags', []),
suggested_category=ai_data.get('suggested_category'),
category_confidence=ai_data.get('category_confidence'),
ai_confidence_score=0.85,
processing_time_ms=processing_time,
analyzed_at=datetime.utcnow()
)
db.add(new_insights)
db.commit()
# Count sources used
sources_used = ['database']
if brave_results['news'] or brave_results['web']:
sources_used.append('brave_search')
if website_content:
sources_used.append('website')
logger.info(f"AI enrichment completed for {company.name}. Processing time: {processing_time}ms. Sources: {sources_used}")
return jsonify({
'success': True,
'message': f'Dane firmy "{company.name}" zostaly wzbogacone przez AI',
'processing_time_ms': processing_time,
'sources_used': sources_used,
'brave_results_count': len(brave_results['news']) + len(brave_results['web']),
'website_content_length': len(website_content),
'insights': ai_data
})
except Exception as e:
db.rollback()
logger.error(f"AI enrichment error for company {company_id}: {str(e)}")
return jsonify({
'success': False,
'error': f'Blad podczas wzbogacania danych: {str(e)}'
}), 500
finally:
db.close()
@app.route('/api/model-info', methods=['GET'])
def api_model_info():
"""API: Get current AI model information"""
service = gemini_service.get_gemini_service()
if service:
return jsonify({
'success': True,
'model': service.model_name,
'provider': 'Google Gemini'
})
else:
return jsonify({
'success': False,
'error': 'AI service not initialized'
}), 500
# ============================================================
# AI CHAT FEEDBACK & ANALYTICS
# ============================================================
@app.route('/api/chat/feedback', methods=['POST'])
@login_required
def chat_feedback():
"""API: Submit feedback for AI response"""
try:
data = request.get_json()
message_id = data.get('message_id')
rating = data.get('rating') # 1 = thumbs down, 2 = thumbs up
if not message_id or rating not in [1, 2]:
return jsonify({'success': False, 'error': 'Invalid data'}), 400
db = SessionLocal()
try:
# Verify message exists and belongs to user's conversation
message = db.query(AIChatMessage).filter_by(id=message_id).first()
if not message:
return jsonify({'success': False, 'error': 'Message not found'}), 404
conversation = db.query(AIChatConversation).filter_by(
id=message.conversation_id,
user_id=current_user.id
).first()
if not conversation:
return jsonify({'success': False, 'error': 'Not authorized'}), 403
# Update message feedback
message.feedback_rating = rating
message.feedback_at = datetime.now()
message.feedback_comment = data.get('comment', '')
# Create detailed feedback record if provided
if data.get('is_helpful') is not None or data.get('comment'):
existing_feedback = db.query(AIChatFeedback).filter_by(message_id=message_id).first()
if existing_feedback:
existing_feedback.rating = rating
existing_feedback.is_helpful = data.get('is_helpful')
existing_feedback.is_accurate = data.get('is_accurate')
existing_feedback.found_company = data.get('found_company')
existing_feedback.comment = data.get('comment')
else:
feedback = AIChatFeedback(
message_id=message_id,
user_id=current_user.id,
rating=rating,
is_helpful=data.get('is_helpful'),
is_accurate=data.get('is_accurate'),
found_company=data.get('found_company'),
comment=data.get('comment'),
original_query=data.get('original_query'),
expected_companies=data.get('expected_companies')
)
db.add(feedback)
db.commit()
logger.info(f"Feedback received: message_id={message_id}, rating={rating}")
return jsonify({'success': True})
finally:
db.close()
except Exception as e:
logger.error(f"Error saving feedback: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
@app.route('/admin/chat-analytics')
@login_required
def chat_analytics():
"""Admin dashboard for chat analytics"""
# Only admins can access
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('dashboard'))
db = SessionLocal()
try:
from sqlalchemy import func, desc
# Basic stats
total_conversations = db.query(AIChatConversation).count()
total_messages = db.query(AIChatMessage).count()
total_user_messages = db.query(AIChatMessage).filter_by(role='user').count()
# Feedback stats
feedback_count = db.query(AIChatMessage).filter(AIChatMessage.feedback_rating.isnot(None)).count()
positive_feedback = db.query(AIChatMessage).filter_by(feedback_rating=2).count()
negative_feedback = db.query(AIChatMessage).filter_by(feedback_rating=1).count()
# Recent conversations with feedback
recent_feedback = db.query(AIChatMessage).filter(
AIChatMessage.feedback_rating.isnot(None)
).order_by(desc(AIChatMessage.feedback_at)).limit(20).all()
# Popular queries (user messages)
recent_queries = db.query(AIChatMessage).filter_by(role='user').order_by(
desc(AIChatMessage.created_at)
).limit(50).all()
# Calculate satisfaction rate
satisfaction_rate = (positive_feedback / feedback_count * 100) if feedback_count > 0 else 0
return render_template(
'admin/chat_analytics.html',
total_conversations=total_conversations,
total_messages=total_messages,
total_user_messages=total_user_messages,
feedback_count=feedback_count,
positive_feedback=positive_feedback,
negative_feedback=negative_feedback,
satisfaction_rate=round(satisfaction_rate, 1),
recent_feedback=recent_feedback,
recent_queries=recent_queries
)
finally:
db.close()
@app.route('/admin/analytics')
@login_required
def admin_analytics():
"""Admin dashboard for user analytics - sessions, page views, clicks"""
if not current_user.is_admin:
flash('Brak uprawnien do tej strony.', 'error')
return redirect(url_for('dashboard'))
from sqlalchemy import func, desc
from sqlalchemy.orm import joinedload
from datetime import date, timedelta
period = request.args.get('period', 'week')
user_id = request.args.get('user_id', type=int)
# Period calculation
today = date.today()
if period == 'day':
start_date = today
elif period == 'week':
start_date = today - timedelta(days=7)
elif period == 'month':
start_date = today - timedelta(days=30)
else:
start_date = None
db = SessionLocal()
try:
# Base query for sessions in period
sessions_query = db.query(UserSession)
if start_date:
sessions_query = sessions_query.filter(
func.date(UserSession.started_at) >= start_date
)
# Overall stats
total_sessions = sessions_query.count()
unique_users = sessions_query.filter(
UserSession.user_id.isnot(None)
).distinct(UserSession.user_id).count()
total_page_views = db.query(func.sum(UserSession.page_views_count)).filter(
func.date(UserSession.started_at) >= start_date if start_date else True
).scalar() or 0
total_clicks = db.query(func.sum(UserSession.clicks_count)).filter(
func.date(UserSession.started_at) >= start_date if start_date else True
).scalar() or 0
avg_duration = db.query(func.avg(UserSession.duration_seconds)).filter(
func.date(UserSession.started_at) >= start_date if start_date else True,
UserSession.duration_seconds.isnot(None)
).scalar() or 0
stats = {
'total_sessions': total_sessions,
'unique_users': unique_users,
'total_page_views': int(total_page_views),
'total_clicks': int(total_clicks),
'avg_duration': float(avg_duration)
}
# Device breakdown
device_query = db.query(
UserSession.device_type,
func.count(UserSession.id)
)
if start_date:
device_query = device_query.filter(
func.date(UserSession.started_at) >= start_date
)
device_stats = dict(device_query.group_by(UserSession.device_type).all())
# Top users by engagement
user_query = db.query(
User.id,
User.name,
User.email,
func.count(UserSession.id).label('sessions'),
func.sum(UserSession.page_views_count).label('page_views'),
func.sum(UserSession.clicks_count).label('clicks'),
func.sum(UserSession.duration_seconds).label('total_time')
).join(UserSession, User.id == UserSession.user_id)
if start_date:
user_query = user_query.filter(
func.date(UserSession.started_at) >= start_date
)
user_rankings = user_query.group_by(User.id).order_by(
desc('page_views')
).limit(20).all()
# Popular pages
page_query = db.query(
PageView.path,
func.count(PageView.id).label('views'),
func.count(func.distinct(PageView.user_id)).label('unique_users'),
func.avg(PageView.time_on_page_seconds).label('avg_time')
)
if start_date:
page_query = page_query.filter(
func.date(PageView.viewed_at) >= start_date
)
popular_pages = page_query.group_by(PageView.path).order_by(
desc('views')
).limit(20).all()
# Recent sessions (last 50)
recent_sessions = db.query(UserSession).options(
joinedload(UserSession.user)
).order_by(UserSession.started_at.desc()).limit(50).all()
# Single user detail (if requested)
user_detail = None
if user_id:
user_obj = db.query(User).filter_by(id=user_id).first()
user_sessions = db.query(UserSession).filter_by(user_id=user_id).order_by(
UserSession.started_at.desc()
).limit(20).all()
user_pages = db.query(PageView).filter_by(user_id=user_id).order_by(
PageView.viewed_at.desc()
).limit(50).all()
user_detail = {
'user': user_obj,
'sessions': user_sessions,
'pages': user_pages
}
return render_template(
'admin/analytics_dashboard.html',
stats=stats,
device_stats=device_stats,
user_rankings=user_rankings,
popular_pages=popular_pages,
recent_sessions=recent_sessions,
user_detail=user_detail,
current_period=period
)
except Exception as e:
logger.error(f"Admin analytics error: {e}")
flash('Blad podczas ladowania analityki.', 'error')
return redirect(url_for('admin_users'))
finally:
db.close()
@app.route('/api/admin/ai-learning-status')
@login_required
def api_ai_learning_status():
"""API: Get AI feedback learning status and examples"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Not authorized'}), 403
try:
from feedback_learning_service import get_feedback_learning_service
service = get_feedback_learning_service()
context = service.get_learning_context()
# Format examples for JSON response
positive_examples = []
for ex in context.get('positive_examples', []):
positive_examples.append({
'query': ex.query,
'response': ex.response[:300] + '...' if len(ex.response) > 300 else ex.response,
'companies': ex.companies_mentioned or []
})
negative_examples = []
for ex in context.get('negative_examples', []):
negative_examples.append({
'query': ex.query,
'response': ex.response,
'comment': ex.feedback_comment
})
return jsonify({
'success': True,
'learning_active': True,
'stats': context.get('stats', {}),
'using_seed_examples': context.get('stats', {}).get('using_seed_examples', False),
'positive_examples_count': len(positive_examples),
'negative_examples_count': len(negative_examples),
'positive_examples': positive_examples,
'negative_examples': negative_examples,
'negative_patterns': context.get('negative_patterns', []),
'generated_at': context.get('generated_at')
})
except ImportError:
return jsonify({
'success': True,
'learning_active': False,
'message': 'Feedback learning service not available'
})
except Exception as e:
logger.error(f"Error getting AI learning status: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@app.route('/admin/ai-usage')
@login_required
def admin_ai_usage():
"""Admin dashboard for AI (Gemini) API usage monitoring"""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('dashboard'))
from database import AIUsageLog, AIUsageDaily, User, Company
from sqlalchemy import func, desc, case
from datetime import timedelta
# Get period filter from query params
period = request.args.get('period', 'month') # day, week, month, all
db = SessionLocal()
try:
now = datetime.now()
today = now.date()
week_ago = today - timedelta(days=7)
month_ago = today - timedelta(days=30)
day_ago = now - timedelta(hours=24)
# Determine date filter based on period
period_labels = {
'day': ('Dzisiaj', today),
'week': ('Ten tydzień', week_ago),
'month': ('Ten miesiąc', month_ago),
'all': ('Od początku', None)
}
period_label, period_start = period_labels.get(period, period_labels['month'])
# Base query filter for period
def period_filter(query):
if period_start:
return query.filter(func.date(AIUsageLog.created_at) >= period_start)
return query
# Today's stats (always show)
today_stats = db.query(
func.count(AIUsageLog.id).label('requests'),
func.coalesce(func.sum(AIUsageLog.tokens_input), 0).label('tokens_input'),
func.coalesce(func.sum(AIUsageLog.tokens_output), 0).label('tokens_output'),
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
).filter(
func.date(AIUsageLog.created_at) == today
).first()
# Week stats
week_requests = db.query(func.count(AIUsageLog.id)).filter(
func.date(AIUsageLog.created_at) >= week_ago
).scalar() or 0
# Month stats
month_stats = db.query(
func.count(AIUsageLog.id).label('requests'),
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
).filter(
func.date(AIUsageLog.created_at) >= month_ago
).first()
# All-time stats
all_time_stats = db.query(
func.count(AIUsageLog.id).label('requests'),
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
).first()
# Error rate (last 24h)
last_24h_total = db.query(func.count(AIUsageLog.id)).filter(
AIUsageLog.created_at >= day_ago
).scalar() or 0
last_24h_errors = db.query(func.count(AIUsageLog.id)).filter(
AIUsageLog.created_at >= day_ago,
AIUsageLog.success == False
).scalar() or 0
error_rate = (last_24h_errors / last_24h_total * 100) if last_24h_total > 0 else 0
# Average response time (last 24h)
avg_response_time = db.query(func.avg(AIUsageLog.response_time_ms)).filter(
AIUsageLog.created_at >= day_ago,
AIUsageLog.success == True
).scalar() or 0
# Usage by type (filtered by period)
type_query = db.query(
AIUsageLog.request_type,
func.count(AIUsageLog.id).label('count')
)
type_query = period_filter(type_query)
type_stats = type_query.group_by(AIUsageLog.request_type).order_by(desc('count')).all()
# Calculate percentages for type breakdown
total_type_count = sum(t.count for t in type_stats) if type_stats else 0
type_labels = {
'ai_chat': ('Chat AI', 'chat'),
'zopk_news_evaluation': ('Ocena newsów ZOP Kaszubia', 'news'),
'ai_user_parse': ('Tworzenie user', 'user'),
'gbp_audit_ai': ('Audyt GBP', 'image'),
'general': ('Ogólne', 'other')
}
usage_by_type = []
for t in type_stats:
label, css_class = type_labels.get(t.request_type, (t.request_type, 'other'))
percentage = (t.count / total_type_count * 100) if total_type_count > 0 else 0
usage_by_type.append({
'type': t.request_type,
'type_label': label,
'type_class': css_class,
'count': t.count,
'percentage': round(percentage, 1)
})
# ========================================
# USER STATISTICS (filtered by period)
# ========================================
user_query = db.query(
User.id,
User.name.label('user_name'),
User.email,
Company.name.label('company_name'),
func.count(AIUsageLog.id).label('requests'),
func.coalesce(func.sum(AIUsageLog.tokens_input), 0).label('tokens_input'),
func.coalesce(func.sum(AIUsageLog.tokens_output), 0).label('tokens_output'),
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
).join(
AIUsageLog, AIUsageLog.user_id == User.id
).outerjoin(
Company, User.company_id == Company.id
)
user_query = period_filter(user_query)
user_stats = user_query.group_by(
User.id, User.name, User.email, Company.name
).order_by(desc('cost_cents')).limit(20).all()
# Format user stats
user_rankings = []
for u in user_stats:
user_rankings.append({
'id': u.id,
'name': u.user_name or u.email,
'email': u.email,
'company': u.company_name or '-',
'requests': u.requests,
'tokens': int(u.tokens_input) + int(u.tokens_output),
'cost_cents': float(u.cost_cents or 0),
'cost_usd': float(u.cost_cents or 0) / 100
})
# ========================================
# COMPANY STATISTICS (filtered by period)
# ========================================
company_query = db.query(
Company.id,
Company.name,
func.count(AIUsageLog.id).label('requests'),
func.count(func.distinct(AIUsageLog.user_id)).label('unique_users'),
func.coalesce(func.sum(AIUsageLog.tokens_input), 0).label('tokens_input'),
func.coalesce(func.sum(AIUsageLog.tokens_output), 0).label('tokens_output'),
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
).join(
User, User.company_id == Company.id
).join(
AIUsageLog, AIUsageLog.user_id == User.id
)
company_query = period_filter(company_query)
company_stats = company_query.group_by(
Company.id, Company.name
).order_by(desc('cost_cents')).limit(20).all()
# Format company stats
company_rankings = []
for c in company_stats:
company_rankings.append({
'id': c.id,
'name': c.name,
'requests': c.requests,
'unique_users': c.unique_users,
'tokens': int(c.tokens_input) + int(c.tokens_output),
'cost_cents': float(c.cost_cents or 0),
'cost_usd': float(c.cost_cents or 0) / 100
})
# Recent logs with user info
recent_logs = db.query(AIUsageLog).order_by(desc(AIUsageLog.created_at)).limit(20).all()
# Enrich recent logs with user names
for log in recent_logs:
label, _ = type_labels.get(log.request_type, (log.request_type, 'other'))
log.type_label = label
if log.user_id:
user = db.query(User).filter_by(id=log.user_id).first()
if user:
log.user_name = user.name or user.email
else:
log.user_name = None
else:
log.user_name = None
# Daily history (last 14 days)
daily_history = db.query(AIUsageDaily).filter(
AIUsageDaily.date >= today - timedelta(days=14)
).order_by(desc(AIUsageDaily.date)).all()
stats = {
'today_requests': today_stats.requests or 0,
'today_tokens_input': int(today_stats.tokens_input) or 0,
'today_tokens_output': int(today_stats.tokens_output) or 0,
'today_cost': float(today_stats.cost_cents or 0) / 100,
'week_requests': week_requests,
'month_requests': month_stats.requests or 0,
'month_cost': float(month_stats.cost_cents or 0) / 100,
'all_requests': all_time_stats.requests or 0,
'all_cost': float(all_time_stats.cost_cents or 0) / 100,
'error_rate': error_rate,
'avg_response_time': int(avg_response_time)
}
return render_template(
'admin/ai_usage_dashboard.html',
stats=stats,
usage_by_type=usage_by_type,
recent_logs=recent_logs,
daily_history=daily_history,
user_rankings=user_rankings,
company_rankings=company_rankings,
current_period=period,
period_label=period_label
)
finally:
db.close()
@app.route('/admin/ai-usage/user/<int:user_id>')
@login_required
def admin_ai_usage_user(user_id):
"""Detailed AI usage for a specific user"""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('dashboard'))
from database import AIUsageLog, User, Company
from sqlalchemy import func, desc
db = SessionLocal()
try:
# Get user info
user = db.query(User).filter_by(id=user_id).first()
if not user:
flash('Użytkownik nie istnieje.', 'error')
return redirect(url_for('admin_ai_usage'))
company = None
if user.company_id:
company = db.query(Company).filter_by(id=user.company_id).first()
# Get overall stats for this user
stats = db.query(
func.count(AIUsageLog.id).label('total_requests'),
func.coalesce(func.sum(AIUsageLog.tokens_input), 0).label('tokens_input'),
func.coalesce(func.sum(AIUsageLog.tokens_output), 0).label('tokens_output'),
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents'),
func.count(func.nullif(AIUsageLog.success, True)).label('errors')
).filter(AIUsageLog.user_id == user_id).first()
# Usage by type
type_labels = {
'ai_chat': 'Chat AI',
'zopk_news_evaluation': 'Ocena newsów ZOP Kaszubia',
'ai_user_parse': 'Tworzenie user',
'gbp_audit_ai': 'Audyt GBP',
'general': 'Ogólne'
}
type_stats = db.query(
AIUsageLog.request_type,
func.count(AIUsageLog.id).label('count'),
func.coalesce(func.sum(AIUsageLog.tokens_input + AIUsageLog.tokens_output), 0).label('tokens'),
func.coalesce(func.sum(AIUsageLog.cost_cents), 0).label('cost_cents')
).filter(
AIUsageLog.user_id == user_id
).group_by(AIUsageLog.request_type).order_by(desc('count')).all()
# Calculate total for percentages
total_type_count = sum(t.count for t in type_stats) if type_stats else 1
type_classes = {
'ai_chat': 'chat',
'zopk_news_evaluation': 'news_evaluation',
'ai_user_parse': 'user_creation',
'gbp_audit_ai': 'image_analysis',
'general': 'other'
}
usage_by_type = []
for t in type_stats:
usage_by_type.append({
'type': t.request_type,
'type_label': type_labels.get(t.request_type, t.request_type),
'type_class': type_classes.get(t.request_type, 'other'),
'count': t.count,
'tokens': int(t.tokens),
'cost_usd': float(t.cost_cents) / 100,
'percentage': round(t.count / total_type_count * 100, 1) if total_type_count > 0 else 0
})
# Get all requests for this user (paginated)
page = request.args.get('page', 1, type=int)
per_page = 50
requests_query = db.query(AIUsageLog).filter(
AIUsageLog.user_id == user_id
).order_by(desc(AIUsageLog.created_at))
total_requests = requests_query.count()
total_pages = (total_requests + per_page - 1) // per_page
logs = requests_query.offset((page - 1) * per_page).limit(per_page).all()
# Enrich logs with type labels and cost
for log in logs:
log.type_label = type_labels.get(log.request_type, log.request_type)
log.cost_usd = float(log.cost_cents or 0) / 100
user_stats = {
'total_requests': stats.total_requests or 0,
'tokens_total': int(stats.tokens_input or 0) + int(stats.tokens_output or 0),
'tokens_input': int(stats.tokens_input or 0),
'tokens_output': int(stats.tokens_output or 0),
'cost_usd': float(stats.cost_cents or 0) / 100,
'errors': stats.errors or 0
}
return render_template(
'admin/ai_usage_user.html',
user=user,
company=company,
stats=user_stats,
usage_by_type=usage_by_type,
logs=logs,
page=page,
total_pages=total_pages,
total_requests=total_requests
)
finally:
db.close()
@app.route('/api/admin/chat-stats')
@login_required
def api_chat_stats():
"""API: Get chat statistics for dashboard"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Not authorized'}), 403
db = SessionLocal()
try:
from sqlalchemy import func, desc
from datetime import timedelta
# Stats for last 7 days
week_ago = datetime.now() - timedelta(days=7)
daily_stats = db.query(
func.date(AIChatMessage.created_at).label('date'),
func.count(AIChatMessage.id).label('count')
).filter(
AIChatMessage.created_at >= week_ago,
AIChatMessage.role == 'user'
).group_by(
func.date(AIChatMessage.created_at)
).order_by('date').all()
return jsonify({
'success': True,
'daily_queries': [{'date': str(d.date), 'count': d.count} for d in daily_stats]
})
finally:
db.close()
# ============================================================
# SYSTEM STATUS DASHBOARD (Admin only)
# ============================================================
@app.route('/admin/status')
@login_required
def admin_status():
"""System status dashboard with real-time metrics"""
if not current_user.is_admin:
flash('Brak uprawnień.', 'error')
return redirect(url_for('dashboard'))
import subprocess
import platform
from sqlalchemy import func, text
db = SessionLocal()
try:
# Current timestamp
now = datetime.now()
# ===== SYSTEM METRICS =====
system_metrics = {
'hostname': platform.node(),
'os': f"{platform.system()} {platform.release()}",
'python': platform.python_version(),
}
# CPU usage (via top command)
try:
result = subprocess.run(['top', '-l', '1', '-n', '0'], capture_output=True, text=True, timeout=5)
for line in result.stdout.split('\n'):
if 'CPU usage' in line:
# Parse: "CPU usage: 5.88% user, 8.82% sys, 85.29% idle"
parts = line.split(':')[1].strip().split(',')
user = float(parts[0].replace('% user', '').strip())
sys_cpu = float(parts[1].replace('% sys', '').strip())
idle = float(parts[2].replace('% idle', '').strip())
system_metrics['cpu_percent'] = round(user + sys_cpu, 1)
system_metrics['cpu_idle'] = round(idle, 1)
break
except Exception:
# Linux fallback
try:
result = subprocess.run(['grep', 'cpu ', '/proc/stat'], capture_output=True, text=True, timeout=5)
if result.returncode == 0:
parts = result.stdout.split()
idle = int(parts[4])
total = sum(int(x) for x in parts[1:])
system_metrics['cpu_percent'] = round(100 * (1 - idle / total), 1)
system_metrics['cpu_idle'] = round(100 * idle / total, 1)
except Exception:
system_metrics['cpu_percent'] = None
system_metrics['cpu_idle'] = None
# RAM usage
try:
# macOS
result = subprocess.run(['vm_stat'], capture_output=True, text=True, timeout=5)
if result.returncode == 0 and 'Pages' in result.stdout:
lines = result.stdout.strip().split('\n')
page_size = 16384 # bytes
stats = {}
for line in lines[1:]:
if ':' in line:
key, val = line.split(':')
stats[key.strip()] = int(val.strip().rstrip('.'))
free = stats.get('Pages free', 0) * page_size
active = stats.get('Pages active', 0) * page_size
inactive = stats.get('Pages inactive', 0) * page_size
wired = stats.get('Pages wired down', 0) * page_size
total_used = active + inactive + wired
total_mem = total_used + free
system_metrics['ram_total_gb'] = round(total_mem / (1024**3), 1)
system_metrics['ram_used_gb'] = round(total_used / (1024**3), 1)
system_metrics['ram_percent'] = round(100 * total_used / total_mem, 1)
else:
raise Exception("Not macOS")
except Exception:
# Linux fallback
try:
result = subprocess.run(['free', '-b'], capture_output=True, text=True, timeout=5)
if result.returncode == 0:
lines = result.stdout.strip().split('\n')
mem_line = lines[1].split()
total = int(mem_line[1])
used = int(mem_line[2])
system_metrics['ram_total_gb'] = round(total / (1024**3), 1)
system_metrics['ram_used_gb'] = round(used / (1024**3), 1)
system_metrics['ram_percent'] = round(100 * used / total, 1)
except Exception:
system_metrics['ram_total_gb'] = None
system_metrics['ram_used_gb'] = None
system_metrics['ram_percent'] = None
# Disk usage
try:
result = subprocess.run(['df', '-h', '/'], capture_output=True, text=True, timeout=5)
if result.returncode == 0:
lines = result.stdout.strip().split('\n')
parts = lines[1].split()
system_metrics['disk_total'] = parts[1]
system_metrics['disk_used'] = parts[2]
system_metrics['disk_percent'] = int(parts[4].replace('%', ''))
except Exception:
system_metrics['disk_total'] = None
system_metrics['disk_used'] = None
system_metrics['disk_percent'] = None
# System uptime
try:
result = subprocess.run(['uptime'], capture_output=True, text=True, timeout=5)
if result.returncode == 0:
system_metrics['uptime'] = result.stdout.strip().split('up')[1].split(',')[0].strip()
except Exception:
system_metrics['uptime'] = None
# ===== DATABASE METRICS =====
db_metrics = {}
try:
# PostgreSQL version
version_result = db.execute(text("SELECT version()")).scalar()
# Extract just version number: "PostgreSQL 16.11 ..." -> "16.11"
if version_result:
import re
match = re.search(r'PostgreSQL (\d+\.\d+)', version_result)
db_metrics['version'] = match.group(1) if match else version_result.split()[1]
# Database size
result = db.execute(text("SELECT pg_database_size(current_database())")).scalar()
db_metrics['size_mb'] = round(result / (1024 * 1024), 2)
# Active connections
result = db.execute(text("SELECT count(*) FROM pg_stat_activity WHERE state = 'active'")).scalar()
db_metrics['active_connections'] = result
# Total connections
result = db.execute(text("SELECT count(*) FROM pg_stat_activity")).scalar()
db_metrics['total_connections'] = result
# Table counts
db_metrics['companies'] = db.query(Company).count()
db_metrics['users'] = db.query(User).count()
# Get additional counts if tables exist
try:
from database import ChatMessage, ChatSession, CompanySocialMedia, SEOMetrics
db_metrics['chat_messages'] = db.query(ChatMessage).count()
db_metrics['chat_sessions'] = db.query(ChatSession).count()
db_metrics['social_media'] = db.query(CompanySocialMedia).count()
db_metrics['seo_audits'] = db.query(SEOMetrics).count()
except Exception:
pass
db_metrics['status'] = 'ok'
except Exception as e:
db_metrics['status'] = 'error'
db_metrics['error'] = str(e)[:100]
# ===== APPLICATION METRICS =====
app_metrics = {}
# Health check - test key endpoints
try:
with app.test_client() as client:
endpoints_ok = 0
endpoints_total = 5
test_endpoints = ['/', '/login', '/api/companies', '/health', '/search?q=test']
for ep in test_endpoints:
try:
response = client.get(ep, follow_redirects=False)
if response.status_code in (200, 302, 304):
endpoints_ok += 1
except Exception:
pass
app_metrics['endpoints_ok'] = endpoints_ok
app_metrics['endpoints_total'] = endpoints_total
app_metrics['endpoints_percent'] = round(100 * endpoints_ok / endpoints_total, 0)
except Exception:
app_metrics['endpoints_ok'] = None
# Users statistics
app_metrics['admins'] = db.query(User).filter(User.is_admin == True).count()
app_metrics['users_with_2fa'] = db.query(User).filter(User.totp_enabled == True).count()
# Recent activity (last 24h)
yesterday = now - timedelta(days=1)
try:
app_metrics['logins_24h'] = db.query(AuditLog).filter(
AuditLog.action == 'login',
AuditLog.created_at >= yesterday
).count()
except Exception:
app_metrics['logins_24h'] = 0
# Security alerts (last 24h)
try:
app_metrics['alerts_24h'] = db.query(SecurityAlert).filter(
SecurityAlert.created_at >= yesterday
).count()
except Exception:
app_metrics['alerts_24h'] = 0
# ===== GUNICORN/PROCESS METRICS =====
process_metrics = {}
try:
result = subprocess.run(['pgrep', '-f', 'gunicorn'], capture_output=True, text=True, timeout=5)
if result.returncode == 0:
pids = result.stdout.strip().split('\n')
process_metrics['gunicorn_workers'] = len(pids) - 1 # -1 for master
process_metrics['gunicorn_status'] = 'running'
else:
process_metrics['gunicorn_status'] = 'not found'
except Exception:
process_metrics['gunicorn_status'] = 'unknown'
# ===== TECHNOLOGY STACK =====
import flask
import sqlalchemy
# Technology stack - ONLY VERIFIED VERSIONS (checked via SSH 2026-01-14)
# Dynamic versions are fetched at runtime, static ones were verified manually
technology_stack = {
'programming': [
{'name': 'Python', 'version': platform.python_version(), 'icon': '🐍', 'category': 'Backend'},
{'name': 'Flask', 'version': flask.__version__, 'icon': '🌶️', 'category': 'Web Framework'},
{'name': 'SQLAlchemy', 'version': sqlalchemy.__version__, 'icon': '🗃️', 'category': 'ORM'},
{'name': 'Jinja2', 'version': '3.1.6', 'icon': '📄', 'category': 'Templating'},
{'name': 'Werkzeug', 'version': '3.1.3', 'icon': '🔧', 'category': 'WSGI Toolkit'},
],
'databases': [
{'name': 'PostgreSQL', 'version': db_metrics.get('version', 'N/A'), 'icon': '🐘', 'category': 'Primary DB'},
],
'ai': [
{'name': 'Google Gemini', 'version': '2.0 Flash', 'icon': '🤖', 'category': 'AI Chat'},
{'name': 'Brave Search API', 'version': 'v1', 'icon': '🔍', 'category': 'News Search'},
{'name': 'Google PageSpeed', 'version': 'v5', 'icon': '', 'category': 'SEO Audit'},
],
'infrastructure': [
{'name': 'Proxmox VE', 'version': '9.1.1', 'icon': '🖥️', 'category': 'Wirtualizacja'},
{'name': 'Ubuntu Server', 'version': '24.04.3 LTS', 'icon': '🐧', 'category': 'System OS'},
{'name': 'Nginx', 'version': '1.24.0', 'icon': '🔧', 'category': 'Web Server'},
],
'network': [
{'name': 'Fortigate 500D', 'version': None, 'icon': '🛡️', 'category': 'Firewall/VPN'},
{'name': 'Nginx Proxy Manager', 'version': '2.12.6', 'icon': '🔀', 'category': 'Reverse Proxy'},
{'name': 'Docker', 'version': '28.2.2', 'icon': '🐳', 'category': 'Containers'},
{'name': "Let's Encrypt", 'version': 'ACME v2', 'icon': '🔒', 'category': 'SSL/TLS'},
],
'security': [
{'name': 'Flask-Login', 'version': '0.6.3', 'icon': '🔐', 'category': 'Autentykacja'},
{'name': 'Flask-WTF', 'version': '1.2.2', 'icon': '🛡️', 'category': 'CSRF Protection'},
{'name': 'Flask-Limiter', 'version': '4.0.0', 'icon': '⏱️', 'category': 'Rate Limiting'},
{'name': 'geoip2', 'version': '5.2.0', 'icon': '🌍', 'category': 'GeoIP Blocking'},
{'name': 'PyOTP', 'version': '2.9.0', 'icon': '📱', 'category': '2FA/TOTP'},
],
'devops': [
{'name': 'Git', 'version': '2.43.0', 'icon': '📦', 'category': 'Version Control'},
{'name': 'Gitea', 'version': '1.22.6', 'icon': '🍵', 'category': 'Git Server'},
{'name': 'systemd', 'version': '255', 'icon': '⚙️', 'category': 'Service Manager'},
],
'servers': [
{'name': 'NORDABIZ-01', 'ip': '10.22.68.249', 'icon': '🖥️', 'role': 'App Server (VM 249)'},
{'name': 'R11-REVPROXY-01', 'ip': '10.22.68.250', 'icon': '🔀', 'role': 'Reverse Proxy (VM 119)'},
{'name': 'R11-DNS-01', 'ip': '10.22.68.171', 'icon': '📡', 'role': 'DNS Server (VM 122)'},
{'name': 'R11-GIT-INPI', 'ip': '10.22.68.180', 'icon': '📦', 'role': 'Git Server (VM 180)'},
],
}
return render_template(
'admin/status_dashboard.html',
system_metrics=system_metrics,
db_metrics=db_metrics,
app_metrics=app_metrics,
process_metrics=process_metrics,
technology_stack=technology_stack,
generated_at=now
)
finally:
db.close()
@app.route('/api/admin/status')
@login_required
def api_admin_status():
"""API endpoint for status dashboard auto-refresh"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Not authorized'}), 403
import subprocess
import platform
from sqlalchemy import text
db = SessionLocal()
try:
now = datetime.now()
data = {'timestamp': now.isoformat()}
# System metrics
system = {}
try:
# CPU (Linux)
result = subprocess.run(['grep', 'cpu ', '/proc/stat'], capture_output=True, text=True, timeout=2)
if result.returncode == 0:
parts = result.stdout.split()
idle = int(parts[4])
total = sum(int(x) for x in parts[1:])
system['cpu_percent'] = round(100 * (1 - idle / total), 1)
except Exception:
system['cpu_percent'] = None
try:
# RAM (Linux)
result = subprocess.run(['free', '-b'], capture_output=True, text=True, timeout=2)
if result.returncode == 0:
lines = result.stdout.strip().split('\n')
mem_line = lines[1].split()
total = int(mem_line[1])
used = int(mem_line[2])
system['ram_percent'] = round(100 * used / total, 1)
except Exception:
system['ram_percent'] = None
try:
# Disk
result = subprocess.run(['df', '-h', '/'], capture_output=True, text=True, timeout=2)
if result.returncode == 0:
lines = result.stdout.strip().split('\n')
parts = lines[1].split()
system['disk_percent'] = int(parts[4].replace('%', ''))
except Exception:
system['disk_percent'] = None
data['system'] = system
# Database metrics
db_data = {}
try:
db_data['active_connections'] = db.execute(text("SELECT count(*) FROM pg_stat_activity WHERE state = 'active'")).scalar()
db_data['status'] = 'ok'
except Exception as e:
db_data['status'] = 'error'
db_data['error'] = str(e)[:50]
data['database'] = db_data
# App metrics
yesterday = now - timedelta(days=1)
app_data = {
'alerts_24h': db.query(SecurityAlert).filter(SecurityAlert.created_at >= yesterday).count()
}
data['app'] = app_data
return jsonify(data)
finally:
db.close()
# ============================================================
# DEBUG PANEL (Admin only)
# ============================================================
@app.route('/admin/health')
@login_required
def admin_health():
"""
Graphical health check dashboard.
Shows status of all critical endpoints with visual indicators.
"""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('dashboard'))
from datetime import datetime
results = []
categories = {
'public': {'name': 'Strony publiczne', 'icon': '🌐', 'endpoints': []},
'auth': {'name': 'Autentykacja', 'icon': '🔐', 'endpoints': []},
'api': {'name': 'API', 'icon': '', 'endpoints': []},
'admin': {'name': 'Panel admina', 'icon': '👨‍💼', 'endpoints': []},
'company': {'name': 'Profile firm', 'icon': '🏢', 'endpoints': []},
}
# Endpoints to check (path, name, category)
endpoints = [
('/', 'Strona główna', 'public'),
('/release-notes', 'Historia zmian', 'public'),
('/search?q=test', 'Wyszukiwarka', 'public'),
('/chat', 'NordaGPT Chat', 'public'),
('/raporty', 'Raporty', 'public'),
('/login', 'Logowanie', 'auth'),
('/register', 'Rejestracja', 'auth'),
('/api/companies', 'Lista firm', 'api'),
('/health', 'Health check', 'api'),
('/admin/security', 'Bezpieczeństwo', 'admin'),
('/admin/seo', 'SEO Audit', 'admin'),
('/admin/social-media', 'Social Media', 'admin'),
('/admin/analytics', 'Analityka', 'admin'),
('/admin/forum', 'Forum', 'admin'),
('/admin/kalendarz', 'Kalendarz', 'admin'),
('/admin/status', 'Status systemu', 'admin'),
('/admin/fees', 'Składki (FIS)', 'admin'),
('/admin/zopk/news', 'ZOPK News', 'admin'),
('/admin/recommendations', 'Rekomendacje', 'admin'),
]
# Add company profiles: INPI, Waterm (fixed) + 3 random
db = SessionLocal()
try:
import random as rnd
# Fixed companies to always check
fixed_companies = db.query(Company).filter(
Company.name.ilike('%INPI%') | Company.name.ilike('%Waterm%')
).all()
for company in fixed_companies:
endpoints.append((f'/company/{company.slug}', company.name[:30], 'company'))
# 3 random companies (excluding fixed ones)
fixed_ids = [c.id for c in fixed_companies]
all_other = db.query(Company).filter(~Company.id.in_(fixed_ids)).all()
random_companies = rnd.sample(all_other, min(3, len(all_other)))
for company in random_companies:
endpoints.append((f'/company/{company.slug}', f'{company.name[:25]}...', 'company'))
finally:
db.close()
# Test each endpoint
with app.test_client() as client:
for path, name, category in endpoints:
start_time = datetime.now()
try:
response = client.get(path, follow_redirects=False)
status_code = response.status_code
response_time = (datetime.now() - start_time).total_seconds() * 1000 # ms
# Determine status
# 429 = rate limited (endpoint works, just protected)
# 403 = forbidden (endpoint works, requires auth)
if status_code in (200, 302, 304, 429):
status = 'ok'
elif status_code == 404:
status = 'not_found'
elif status_code >= 500:
status = 'error'
else:
status = 'warning'
result = {
'path': path,
'name': name,
'status_code': status_code,
'status': status,
'response_time': round(response_time, 1),
'error': None
}
except Exception as e:
result = {
'path': path,
'name': name,
'status_code': 500,
'status': 'error',
'response_time': None,
'error': str(e)[:100]
}
categories[category]['endpoints'].append(result)
results.append(result)
# Summary stats
total = len(results)
ok_count = sum(1 for r in results if r['status'] == 'ok')
warning_count = sum(1 for r in results if r['status'] == 'warning')
error_count = sum(1 for r in results if r['status'] in ('error', 'not_found'))
avg_response_time = sum(r['response_time'] for r in results if r['response_time']) / total if total else 0
summary = {
'total': total,
'ok': ok_count,
'warning': warning_count,
'error': error_count,
'health_percent': round(100 * ok_count / total, 1) if total else 0,
'avg_response_time': round(avg_response_time, 1),
'overall_status': 'ok' if error_count == 0 else ('degraded' if ok_count > error_count else 'critical')
}
return render_template(
'admin/health_dashboard.html',
categories=categories,
summary=summary,
generated_at=datetime.now()
)
@app.route('/api/admin/health')
@login_required
def api_admin_health():
"""API endpoint for health dashboard auto-refresh"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Not authorized'}), 403
# Run the same checks as admin_health but return JSON
results = []
endpoints = [
('/', 'Strona główna'),
('/release-notes', 'Historia zmian'),
('/search?q=test', 'Wyszukiwarka'),
('/chat', 'NordaGPT Chat'),
('/login', 'Logowanie'),
('/api/companies', 'Lista firm'),
('/health', 'Health check'),
('/admin/security', 'Bezpieczeństwo'),
('/admin/status', 'Status systemu'),
('/admin/fees', 'Składki (FIS)'),
('/admin/zopk/news', 'ZOPK News'),
]
with app.test_client() as client:
for path, name in endpoints:
try:
response = client.get(path, follow_redirects=False)
status_code = response.status_code
ok = status_code in (200, 302, 304, 429) # 429 = rate limited, endpoint works
results.append({'path': path, 'name': name, 'status': status_code, 'ok': ok})
except Exception as e:
results.append({'path': path, 'name': name, 'status': 500, 'ok': False, 'error': str(e)[:50]})
ok_count = sum(1 for r in results if r['ok'])
return jsonify({
'success': True,
'timestamp': datetime.now().isoformat(),
'results': results,
'summary': {
'total': len(results),
'ok': ok_count,
'failed': len(results) - ok_count,
'health_percent': round(100 * ok_count / len(results), 1)
}
})
@app.route('/admin/debug')
@login_required
def debug_panel():
"""Real-time debug panel for monitoring app activity"""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('dashboard'))
return render_template('admin/debug.html')
@app.route('/api/admin/logs')
@login_required
def api_get_logs():
"""API: Get recent logs"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Not authorized'}), 403
# Get optional filters
level = request.args.get('level', '') # DEBUG, INFO, WARNING, ERROR
since = request.args.get('since', '') # ISO timestamp
limit = min(int(request.args.get('limit', 100)), 500)
logs = list(debug_handler.logs)
# Filter by level
if level:
logs = [l for l in logs if l['level'] == level.upper()]
# Filter by timestamp
if since:
logs = [l for l in logs if l['timestamp'] > since]
# Return most recent
logs = logs[-limit:]
return jsonify({
'success': True,
'logs': logs,
'total': len(debug_handler.logs)
})
@app.route('/api/admin/logs/stream')
@login_required
def api_logs_stream():
"""SSE endpoint for real-time log streaming"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Not authorized'}), 403
def generate():
last_count = 0
while True:
current_count = len(debug_handler.logs)
if current_count > last_count:
# Send new logs
new_logs = list(debug_handler.logs)[last_count:]
for log in new_logs:
yield f"data: {json.dumps(log)}\n\n"
last_count = current_count
import time
time.sleep(0.5)
return Response(generate(), mimetype='text/event-stream')
@app.route('/api/admin/logs/clear', methods=['POST'])
@login_required
def api_clear_logs():
"""API: Clear log buffer"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Not authorized'}), 403
debug_handler.logs.clear()
logger.info("Log buffer cleared by admin")
return jsonify({'success': True})
@app.route('/api/admin/test-log', methods=['POST'])
@login_required
def api_test_log():
"""API: Generate test log entries"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Not authorized'}), 403
logger.debug("Test DEBUG message")
logger.info("Test INFO message")
logger.warning("Test WARNING message")
logger.error("Test ERROR message")
return jsonify({'success': True, 'message': 'Test logs generated'})
@app.route('/admin/digital-maturity')
@login_required
def digital_maturity_dashboard():
"""Admin dashboard for digital maturity assessment results"""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('dashboard'))
db = SessionLocal()
try:
from sqlalchemy import func, desc
# Get all companies with maturity data
companies_query = db.query(
Company.id,
Company.name,
Company.slug,
Company.website,
CompanyDigitalMaturity.overall_score,
CompanyDigitalMaturity.online_presence_score,
CompanyDigitalMaturity.sales_readiness,
CompanyDigitalMaturity.total_opportunity_value,
CompanyWebsiteAnalysis.opportunity_score,
CompanyWebsiteAnalysis.has_blog,
CompanyWebsiteAnalysis.has_portfolio,
CompanyWebsiteAnalysis.has_contact_form,
CompanyWebsiteAnalysis.content_richness_score,
CompanyDigitalMaturity.critical_gaps,
CompanyWebsiteAnalysis.missing_features
).join(
CompanyDigitalMaturity, Company.id == CompanyDigitalMaturity.company_id
).join(
CompanyWebsiteAnalysis, Company.id == CompanyWebsiteAnalysis.company_id
).filter(
CompanyDigitalMaturity.overall_score > 0
).order_by(
desc(CompanyDigitalMaturity.overall_score)
).all()
# Calculate stats
total_analyzed = len(companies_query)
avg_score = round(sum(c.overall_score for c in companies_query) / total_analyzed, 1) if total_analyzed else 0
total_opportunity = sum(float(c.total_opportunity_value or 0) for c in companies_query)
warm_leads = [c for c in companies_query if c.sales_readiness == 'warm']
cold_leads = [c for c in companies_query if c.sales_readiness == 'cold']
# Top 10 and bottom 10
top_performers = companies_query[:10]
bottom_performers = sorted(companies_query, key=lambda c: c.overall_score)[:10]
# Top opportunities
top_opportunities = sorted(
companies_query,
key=lambda c: float(c.total_opportunity_value or 0),
reverse=True
)[:10]
return render_template('admin/digital_maturity.html',
total_analyzed=total_analyzed,
avg_score=avg_score,
total_opportunity=total_opportunity,
warm_leads_count=len(warm_leads),
cold_leads_count=len(cold_leads),
top_performers=top_performers,
bottom_performers=bottom_performers,
top_opportunities=top_opportunities,
all_companies=companies_query
)
finally:
db.close()
@app.route('/admin/social-media')
@login_required
def admin_social_media():
"""Admin dashboard for social media analytics"""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('dashboard'))
db = SessionLocal()
try:
from sqlalchemy import func, case, distinct
from database import CompanySocialMedia
# Total counts per platform
platform_stats = db.query(
CompanySocialMedia.platform,
func.count(CompanySocialMedia.id).label('count'),
func.count(distinct(CompanySocialMedia.company_id)).label('companies')
).filter(
CompanySocialMedia.is_valid == True
).group_by(CompanySocialMedia.platform).all()
# Companies with each platform combination
company_platforms = db.query(
Company.id,
Company.name,
Company.slug,
func.array_agg(distinct(CompanySocialMedia.platform)).label('platforms')
).outerjoin(
CompanySocialMedia,
(Company.id == CompanySocialMedia.company_id) & (CompanySocialMedia.is_valid == True)
).group_by(Company.id, Company.name, Company.slug).all()
# Analysis
total_companies = len(company_platforms)
companies_with_sm = [c for c in company_platforms if c.platforms and c.platforms[0] is not None]
companies_without_sm = [c for c in company_platforms if not c.platforms or c.platforms[0] is None]
# Platform combinations
platform_combos_raw = {}
for c in companies_with_sm:
platforms = sorted([p for p in c.platforms if p]) if c.platforms else []
key = ', '.join(platforms) if platforms else 'Brak'
if key not in platform_combos_raw:
platform_combos_raw[key] = []
platform_combos_raw[key].append({'id': c.id, 'name': c.name, 'slug': c.slug})
# Sort by number of companies (descending)
platform_combos = dict(sorted(platform_combos_raw.items(), key=lambda x: len(x[1]), reverse=True))
# Only Facebook
only_facebook = [c for c in companies_with_sm if set(c.platforms) == {'facebook'}]
# Only LinkedIn
only_linkedin = [c for c in companies_with_sm if set(c.platforms) == {'linkedin'}]
# Only Instagram
only_instagram = [c for c in companies_with_sm if set(c.platforms) == {'instagram'}]
# Has all major (FB + LI + IG)
has_all_major = [c for c in companies_with_sm if {'facebook', 'linkedin', 'instagram'}.issubset(set(c.platforms or []))]
# Get all social media entries with company info for detailed view
all_entries = db.query(
CompanySocialMedia,
Company.name.label('company_name'),
Company.slug.label('company_slug')
).join(Company).order_by(
Company.name, CompanySocialMedia.platform
).all()
# Freshness analysis
from datetime import datetime, timedelta
now = datetime.now()
fresh_30d = db.query(func.count(CompanySocialMedia.id)).filter(
CompanySocialMedia.verified_at >= now - timedelta(days=30)
).scalar()
stale_90d = db.query(func.count(CompanySocialMedia.id)).filter(
CompanySocialMedia.verified_at < now - timedelta(days=90)
).scalar()
return render_template('admin/social_media.html',
platform_stats=platform_stats,
total_companies=total_companies,
companies_with_sm=len(companies_with_sm),
companies_without_sm=companies_without_sm,
platform_combos=platform_combos,
only_facebook=only_facebook,
only_linkedin=only_linkedin,
only_instagram=only_instagram,
has_all_major=has_all_major,
all_entries=all_entries,
fresh_30d=fresh_30d,
stale_90d=stale_90d,
now=now
)
finally:
db.close()
# ============================================================
# SOCIAL MEDIA AUDIT ADMIN DASHBOARD
# ============================================================
@app.route('/admin/social-audit')
@login_required
def admin_social_audit():
"""
Admin dashboard for Social Media audit overview.
Displays:
- Summary stats (coverage per platform, total profiles)
- Platform coverage with progress bars
- Sortable table with platform icons per company
- Followers aggregate statistics
"""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('dashboard'))
db = SessionLocal()
try:
from sqlalchemy import func, distinct
from database import CompanySocialMedia, Category
# Platform definitions
platforms = ['facebook', 'instagram', 'linkedin', 'youtube', 'twitter', 'tiktok']
# Total companies count
total_companies = db.query(func.count(Company.id)).filter(Company.status == 'active').scalar()
# Get all companies with their social media profiles
companies_query = db.query(
Company.id,
Company.name,
Company.slug,
Company.website,
Category.name.label('category_name')
).outerjoin(
Category,
Company.category_id == Category.id
).filter(
Company.status == 'active'
).order_by(Company.name).all()
# Get social media data per company
social_data = db.query(
CompanySocialMedia.company_id,
CompanySocialMedia.platform,
CompanySocialMedia.url,
CompanySocialMedia.followers_count,
CompanySocialMedia.verified_at,
CompanySocialMedia.is_valid
).filter(
CompanySocialMedia.is_valid == True
).all()
# Group social media by company
company_social = {}
for sm in social_data:
if sm.company_id not in company_social:
company_social[sm.company_id] = {}
company_social[sm.company_id][sm.platform] = {
'url': sm.url,
'followers': sm.followers_count or 0,
'verified_at': sm.verified_at
}
# Build companies list with social media info
companies = []
for row in companies_query:
sm_data = company_social.get(row.id, {})
total_followers = sum(p.get('followers', 0) for p in sm_data.values())
platform_count = len(sm_data)
# Get last verified date across all platforms
verified_dates = [p.get('verified_at') for p in sm_data.values() if p.get('verified_at')]
last_verified = max(verified_dates) if verified_dates else None
companies.append({
'id': row.id,
'name': row.name,
'slug': row.slug,
'website': row.website,
'category': row.category_name,
'platforms': sm_data,
'platform_count': platform_count,
'total_followers': total_followers,
'last_verified': last_verified,
'has_facebook': 'facebook' in sm_data,
'has_instagram': 'instagram' in sm_data,
'has_linkedin': 'linkedin' in sm_data,
'has_youtube': 'youtube' in sm_data,
'has_twitter': 'twitter' in sm_data,
'has_tiktok': 'tiktok' in sm_data
})
# Platform statistics
platform_stats = {}
for platform in platforms:
count = db.query(func.count(distinct(CompanySocialMedia.company_id))).filter(
CompanySocialMedia.platform == platform,
CompanySocialMedia.is_valid == True
).scalar() or 0
platform_stats[platform] = {
'count': count,
'percent': round(count / total_companies * 100) if total_companies > 0 else 0
}
# Summary stats
companies_with_sm = len([c for c in companies if c['platform_count'] > 0])
companies_without_sm = total_companies - companies_with_sm
total_profiles = sum(c['platform_count'] for c in companies)
total_followers = sum(c['total_followers'] for c in companies)
# Top followers (top 10 companies by total followers)
top_followers = sorted([c for c in companies if c['total_followers'] > 0],
key=lambda x: x['total_followers'], reverse=True)[:10]
stats = {
'total_companies': total_companies,
'companies_with_sm': companies_with_sm,
'companies_without_sm': companies_without_sm,
'total_profiles': total_profiles,
'total_followers': total_followers,
'platform_stats': platform_stats
}
# Get unique categories
categories = sorted(set(c['category'] for c in companies if c['category']))
# Convert to objects for template
class CompanyRow:
def __init__(self, data):
for key, value in data.items():
setattr(self, key, value)
companies_objects = [CompanyRow(c) for c in companies]
top_followers_objects = [CompanyRow(c) for c in top_followers]
return render_template('admin/social_audit_dashboard.html',
companies=companies_objects,
stats=stats,
categories=categories,
platforms=platforms,
top_followers=top_followers_objects,
now=datetime.now()
)
finally:
db.close()
# ============================================================
# IT AUDIT ADMIN DASHBOARD
# ============================================================
@app.route('/admin/it-audit')
@login_required
def admin_it_audit():
"""
Admin dashboard for IT audit overview.
Displays:
- Summary stats (audit count, average scores, maturity distribution)
- Technology adoption stats (Azure AD, M365, PBS, Zabbix, EDR, DR)
- Collaboration flags distribution
- Company table with IT audit data
- Collaboration matches matrix
Access: Admin only
"""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('dashboard'))
db = SessionLocal()
try:
from sqlalchemy import func, distinct
# Import IT audit models and service
from database import ITAudit, ITCollaborationMatch
from it_audit_service import get_maturity_level_label
# Get all active companies with their latest IT audit
# Using subquery to get only the latest audit per company
latest_audit_subq = db.query(
ITAudit.company_id,
func.max(ITAudit.audit_date).label('max_date')
).group_by(ITAudit.company_id).subquery()
companies_query = db.query(
Company.id,
Company.name,
Company.slug,
ITAudit.id.label('audit_id'),
ITAudit.overall_score,
ITAudit.security_score,
ITAudit.collaboration_score,
ITAudit.completeness_score,
ITAudit.maturity_level,
ITAudit.audit_date,
ITAudit.has_azure_ad,
ITAudit.has_m365,
ITAudit.has_proxmox_pbs,
ITAudit.monitoring_solution,
ITAudit.has_edr,
ITAudit.has_dr_plan
).outerjoin(
latest_audit_subq,
Company.id == latest_audit_subq.c.company_id
).outerjoin(
ITAudit,
(Company.id == ITAudit.company_id) &
(ITAudit.audit_date == latest_audit_subq.c.max_date)
).filter(
Company.status == 'active'
).order_by(
Company.name
).all()
# Build companies list with named attributes for template
companies = []
for row in companies_query:
# Detect Zabbix from monitoring_solution field
has_zabbix = row.monitoring_solution and 'zabbix' in str(row.monitoring_solution).lower()
companies.append({
'id': row.id,
'name': row.name,
'slug': row.slug,
'audit_id': row.audit_id,
'overall_score': row.overall_score,
'security_score': row.security_score,
'collaboration_score': row.collaboration_score,
'completeness_score': row.completeness_score,
'maturity_level': row.maturity_level,
'maturity_label': get_maturity_level_label(row.maturity_level) if row.maturity_level else None,
'audit_date': row.audit_date,
'has_azure_ad': row.has_azure_ad,
'has_m365': row.has_m365,
'has_proxmox_pbs': row.has_proxmox_pbs,
'has_zabbix': has_zabbix,
'has_edr': row.has_edr,
'has_dr_plan': row.has_dr_plan
})
# Calculate statistics
audited_companies = [c for c in companies if c['overall_score'] is not None]
not_audited = [c for c in companies if c['overall_score'] is None]
# Maturity distribution
maturity_counts = {
'basic': 0,
'developing': 0,
'established': 0,
'advanced': 0
}
for c in audited_companies:
level = c['maturity_level']
if level in maturity_counts:
maturity_counts[level] += 1
# Calculate average scores
if audited_companies:
avg_overall = round(sum(c['overall_score'] for c in audited_companies) / len(audited_companies))
avg_security = round(sum(c['security_score'] or 0 for c in audited_companies) / len(audited_companies))
avg_collaboration = round(sum(c['collaboration_score'] or 0 for c in audited_companies) / len(audited_companies))
else:
avg_overall = None
avg_security = None
avg_collaboration = None
# Technology adoption stats
tech_stats = {
'azure_ad': len([c for c in audited_companies if c['has_azure_ad']]),
'm365': len([c for c in audited_companies if c['has_m365']]),
'proxmox_pbs': len([c for c in audited_companies if c['has_proxmox_pbs']]),
'zabbix': len([c for c in audited_companies if c['has_zabbix']]),
'edr': len([c for c in audited_companies if c['has_edr']]),
'dr_plan': len([c for c in audited_companies if c['has_dr_plan']])
}
# Collaboration flags stats from latest audits
collab_stats = {}
if audited_companies:
collab_flags = [
'open_to_shared_licensing',
'open_to_backup_replication',
'open_to_teams_federation',
'open_to_shared_monitoring',
'open_to_collective_purchasing',
'open_to_knowledge_sharing'
]
for flag in collab_flags:
count = db.query(func.count(ITAudit.id)).filter(
ITAudit.id.in_([c['audit_id'] for c in audited_companies if c['audit_id']]),
getattr(ITAudit, flag) == True
).scalar()
collab_stats[flag] = count
# Get collaboration matches with both companies' info
matches = db.query(ITCollaborationMatch).order_by(
ITCollaborationMatch.match_score.desc()
).all()
# Build flat list of collaboration matches with all necessary attributes
class CollabMatchRow:
"""Helper class for template attribute access"""
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
collaboration_matches = []
for match in matches:
# Get company A and B info
company_a = db.query(Company).filter(Company.id == match.company_a_id).first()
company_b = db.query(Company).filter(Company.id == match.company_b_id).first()
collaboration_matches.append(CollabMatchRow(
id=match.id,
match_type=match.match_type,
company_a_id=match.company_a_id,
company_a_name=company_a.name if company_a else 'Nieznana',
company_a_slug=company_a.slug if company_a else '',
company_b_id=match.company_b_id,
company_b_name=company_b.name if company_b else 'Nieznana',
company_b_slug=company_b.slug if company_b else '',
match_reason=match.match_reason,
match_score=match.match_score,
status=match.status,
created_at=match.created_at
))
stats = {
# Main stats
'total_audits': len(audited_companies),
'total_companies': len(companies),
'companies_without_audit': len(not_audited),
# Score averages
'avg_overall_score': avg_overall,
'avg_security_score': avg_security,
'avg_collaboration_score': avg_collaboration,
# Maturity distribution (flattened for template)
'maturity_basic': maturity_counts['basic'],
'maturity_developing': maturity_counts['developing'],
'maturity_established': maturity_counts['established'],
'maturity_advanced': maturity_counts['advanced'],
# Technology adoption stats (matching template naming with has_* prefix)
'has_azure_ad': tech_stats['azure_ad'],
'has_m365': tech_stats['m365'],
'has_proxmox_pbs': tech_stats['proxmox_pbs'],
'has_zabbix': tech_stats['zabbix'],
'has_edr': tech_stats['edr'],
'has_dr_plan': tech_stats['dr_plan'],
# Collaboration flags
'open_to_shared_licensing': collab_stats.get('open_to_shared_licensing', 0),
'open_to_backup_replication': collab_stats.get('open_to_backup_replication', 0),
'open_to_teams_federation': collab_stats.get('open_to_teams_federation', 0),
'open_to_shared_monitoring': collab_stats.get('open_to_shared_monitoring', 0),
'open_to_collective_purchasing': collab_stats.get('open_to_collective_purchasing', 0),
'open_to_knowledge_sharing': collab_stats.get('open_to_knowledge_sharing', 0),
# Legacy nested structures (for any templates that still use them)
'maturity_counts': maturity_counts,
'tech_stats': tech_stats,
'collab_stats': collab_stats,
'total_matches': len(collaboration_matches)
}
# Convert companies list to objects with attribute access for template
class CompanyRow:
def __init__(self, data):
for key, value in data.items():
setattr(self, key, value)
companies_objects = [CompanyRow(c) for c in companies]
return render_template('admin/it_audit_dashboard.html',
companies=companies_objects,
stats=stats,
collaboration_matches=collaboration_matches,
now=datetime.now()
)
finally:
db.close()
# ============================================================
# IT AUDIT FORM
# ============================================================
@app.route('/it-audit/form')
@login_required
def it_audit_form():
"""
IT Audit form for data collection.
Displays a 9-section form for collecting IT infrastructure data:
- IT Contact
- Cloud & Identity
- Server Infrastructure
- Endpoints
- Security
- Backup & DR
- Monitoring
- Business Apps
- Collaboration
Query parameters:
company_id (int, optional): Company ID to audit. If not provided,
defaults to current user's company.
Access control:
- Admin users can access form for any company
- Regular users can only access form for their own company
Returns:
Rendered it_audit_form.html template with company and audit data
"""
db = SessionLocal()
try:
from database import ITAudit, Company
# Get company_id from query params or use current user's company
company_id = request.args.get('company_id', type=int)
if not company_id:
# If no company_id provided, use current user's company
if current_user.company_id:
company_id = current_user.company_id
elif current_user.is_admin:
# Admin without specific company_id should redirect to admin dashboard
flash('Wybierz firmę do przeprowadzenia audytu IT.', 'info')
return redirect(url_for('admin_it_audit'))
else:
flash('Nie jesteś przypisany do żadnej firmy.', 'error')
return redirect(url_for('dashboard'))
# Find company
company = db.query(Company).filter(
Company.id == company_id,
Company.status == 'active'
).first()
if not company:
flash('Firma nie została znaleziona.', 'error')
return redirect(url_for('dashboard'))
# Access control: admin can access any company, users only their own
if not current_user.is_admin and current_user.company_id != company.id:
flash('Nie masz uprawnień do edycji audytu IT tej firmy.', 'error')
return redirect(url_for('dashboard'))
# Get latest audit for this company (for pre-filling the form)
audit = db.query(ITAudit).filter(
ITAudit.company_id == company.id
).order_by(
ITAudit.audit_date.desc()
).first()
logger.info(f"IT audit form viewed by {current_user.email} for company: {company.name}")
return render_template('it_audit_form.html',
company=company,
audit=audit
)
finally:
db.close()
@app.route('/it-audit/save', methods=['POST'])
@login_required
@limiter.limit("30 per hour")
def it_audit_save():
"""
Save IT audit form data with automatic scoring.
This endpoint saves IT infrastructure audit data from the form,
calculates security, collaboration, and completeness scores,
and stores the audit in the database.
Request JSON body:
- company_id: Company ID (integer, required)
- All audit fields from the 9-section form
Returns:
- Success: Audit results with scores and redirect URL
- Error: Error message with status code
Access:
- Members can save audits for their own company
- Admins can save audits for any company
Rate limited to 30 requests per hour per user.
"""
from database import ITAudit, Company
from it_audit_service import ITAuditService
# Parse request data (supports both JSON and form data)
if request.is_json:
data = request.get_json()
else:
data = request.form.to_dict(flat=True)
if not data:
return jsonify({
'success': False,
'error': 'Brak danych w żądaniu.'
}), 400
# Get company_id
company_id = data.get('company_id')
if company_id:
try:
company_id = int(company_id)
except (ValueError, TypeError):
return jsonify({
'success': False,
'error': 'Nieprawidłowy identyfikator firmy.'
}), 400
else:
# Use current user's company if not specified
if current_user.company_id:
company_id = current_user.company_id
else:
return jsonify({
'success': False,
'error': 'Podaj company_id firmy do audytu.'
}), 400
db = SessionLocal()
try:
# Find company
company = db.query(Company).filter(
Company.id == company_id,
Company.status == 'active'
).first()
if not company:
return jsonify({
'success': False,
'error': 'Firma nie znaleziona lub nieaktywna.'
}), 404
# Access control: admin can save for any company, users only their own
if not current_user.is_admin and current_user.company_id != company.id:
return jsonify({
'success': False,
'error': 'Nie masz uprawnień do edycji audytu IT tej firmy.'
}), 403
# Parse form data into audit_data dictionary
audit_data = _parse_it_audit_form_data(data)
audit_data['audited_by'] = current_user.id
audit_data['audit_source'] = 'form'
# Save audit using service
service = ITAuditService(db)
audit = service.save_audit(company_id, audit_data)
# Check if this is a partial submission (completeness < 100)
is_partial = audit.completeness_score < 100 if audit.completeness_score else True
# Count previous audits for this company (to indicate if history exists)
audit_history_count = db.query(ITAudit).filter(
ITAudit.company_id == company_id
).count()
logger.info(
f"IT audit saved by {current_user.email} for company {company.name}: "
f"overall={audit.overall_score}, security={audit.security_score}, "
f"collaboration={audit.collaboration_score}, completeness={audit.completeness_score}"
f"{' (partial)' if is_partial else ''}"
)
# Build appropriate success message
if is_partial:
if audit.completeness_score < 30:
message = f'Audyt IT został zapisany. Formularz wypełniony w {audit.completeness_score}%. Uzupełnij więcej sekcji, aby uzyskać pełniejszy obraz infrastruktury IT.'
elif audit.completeness_score < 70:
message = f'Audyt IT został zapisany. Wypełniono {audit.completeness_score}% formularza. Rozważ uzupełnienie pozostałych sekcji.'
else:
message = f'Audyt IT został zapisany. Formularz prawie kompletny ({audit.completeness_score}%).'
else:
message = 'Audyt IT został zapisany pomyślnie. Formularz jest kompletny.'
# Return success response with detailed information
return jsonify({
'success': True,
'message': message,
'company_id': company.id,
'company_name': company.name,
'company_slug': company.slug,
'audit': {
'id': audit.id,
'audit_date': audit.audit_date.isoformat() if audit.audit_date else None,
'overall_score': audit.overall_score,
'security_score': audit.security_score,
'collaboration_score': audit.collaboration_score,
'completeness_score': audit.completeness_score,
'maturity_level': audit.maturity_level,
'is_partial': is_partial,
},
'history_count': audit_history_count, # Number of audits for this company (including current)
'redirect_url': url_for('company_detail_by_slug', slug=company.slug)
}), 200
except Exception as e:
db.rollback()
logger.error(f"Error saving IT audit for company {company_id}: {e}")
return jsonify({
'success': False,
'error': f'Błąd podczas zapisywania audytu: {str(e)}'
}), 500
finally:
db.close()
def _parse_it_audit_form_data(data: dict) -> dict:
"""
Parse form data into audit_data dictionary.
Handles:
- Boolean fields (checkboxes)
- Array fields (multi-select)
- String and numeric fields
Args:
data: Raw form data dictionary
Returns:
Parsed audit_data dictionary with proper types
"""
# Boolean fields (checkboxes - present means True)
boolean_fields = [
'has_it_manager', 'it_outsourced',
'has_azure_ad', 'has_m365', 'has_google_workspace',
'has_mdm', 'has_edr', 'has_vpn', 'has_mfa',
'has_proxmox_pbs', 'has_dr_plan',
'has_local_ad', 'has_ad_azure_sync',
'open_to_shared_licensing', 'open_to_backup_replication',
'open_to_teams_federation', 'open_to_shared_monitoring',
'open_to_collective_purchasing', 'open_to_knowledge_sharing',
]
# Array fields (multi-select - may come as comma-separated or multiple values)
array_fields = [
'm365_plans', 'teams_usage', 'server_types', 'server_os',
'desktop_os', 'mfa_scope', 'backup_targets',
]
# String fields
string_fields = [
'it_provider_name', 'it_contact_name', 'it_contact_email',
'azure_tenant_name', 'azure_user_count',
'server_count', 'virtualization_platform', 'network_firewall_brand',
'employee_count', 'computer_count', 'mdm_solution',
'antivirus_solution', 'edr_solution', 'vpn_solution',
'backup_solution', 'backup_frequency',
'monitoring_solution', 'ad_domain_name',
'ticketing_system', 'erp_system', 'crm_system', 'document_management',
]
audit_data = {}
# Parse boolean fields
for field in boolean_fields:
value = data.get(field)
if value is None:
audit_data[field] = False
elif isinstance(value, bool):
audit_data[field] = value
elif isinstance(value, str):
audit_data[field] = value.lower() in ('true', '1', 'on', 'yes')
else:
audit_data[field] = bool(value)
# Parse array fields
for field in array_fields:
value = data.get(field)
if value is None:
audit_data[field] = []
elif isinstance(value, list):
audit_data[field] = value
elif isinstance(value, str):
# Handle comma-separated values
audit_data[field] = [v.strip() for v in value.split(',') if v.strip()]
else:
audit_data[field] = [value]
# Parse string fields
for field in string_fields:
value = data.get(field)
if value is not None and isinstance(value, str):
audit_data[field] = value.strip() if value.strip() else None
else:
audit_data[field] = None
# Parse zabbix_integration as JSON if present
zabbix_integration = data.get('zabbix_integration')
if zabbix_integration:
if isinstance(zabbix_integration, dict):
audit_data['zabbix_integration'] = zabbix_integration
elif isinstance(zabbix_integration, str):
try:
audit_data['zabbix_integration'] = json.loads(zabbix_integration)
except json.JSONDecodeError:
audit_data['zabbix_integration'] = {'hostname': zabbix_integration}
else:
audit_data['zabbix_integration'] = None
else:
# Check for zabbix_hostname field as alternative
zabbix_hostname = data.get('zabbix_hostname')
if zabbix_hostname and isinstance(zabbix_hostname, str) and zabbix_hostname.strip():
audit_data['zabbix_integration'] = {'hostname': zabbix_hostname.strip()}
else:
audit_data['zabbix_integration'] = None
return audit_data
@app.route('/api/it-audit/matches/<int:company_id>')
@login_required
def api_it_audit_matches(company_id):
"""
API: Get IT audit collaboration matches for a company.
Returns all collaboration matches where the specified company
is either company_a or company_b in the match pair.
This endpoint is admin-only as collaboration matches
are not visible to regular users.
Args:
company_id: Company ID to get matches for
Returns:
JSON with list of matches including:
- match_id, match_type, match_score, status
- partner company info (id, name, slug)
- match_reason and shared_attributes
"""
# Only admins can view collaboration matches
if not current_user.is_admin:
return jsonify({
'success': False,
'error': 'Brak uprawnień. Tylko administrator może przeglądać dopasowania.'
}), 403
db = SessionLocal()
try:
from it_audit_service import ITAuditService
from database import ITCollaborationMatch
# Verify company exists
company = db.query(Company).filter_by(id=company_id).first()
if not company:
return jsonify({
'success': False,
'error': 'Firma nie znaleziona'
}), 404
# Get matches for this company
service = ITAuditService(db)
matches = service.get_matches_for_company(company_id)
# Format matches for JSON response
matches_data = []
for match in matches:
# Determine partner company (the other company in the match)
if match.company_a_id == company_id:
partner = match.company_b
else:
partner = match.company_a
matches_data.append({
'id': match.id,
'match_type': match.match_type,
'match_type_label': match.match_type_label,
'match_score': match.match_score,
'match_reason': match.match_reason,
'status': match.status,
'status_label': match.status_label,
'shared_attributes': match.shared_attributes,
'created_at': match.created_at.isoformat() if match.created_at else None,
'partner': {
'id': partner.id if partner else None,
'name': partner.name if partner else None,
'slug': partner.slug if partner else None,
}
})
return jsonify({
'success': True,
'company_id': company_id,
'company_name': company.name,
'matches_count': len(matches_data),
'matches': matches_data
}), 200
except Exception as e:
logger.error(f"Error fetching IT audit matches for company {company_id}: {e}")
return jsonify({
'success': False,
'error': f'Błąd podczas pobierania dopasowań: {str(e)}'
}), 500
finally:
db.close()
@app.route('/api/it-audit/history/<int:company_id>')
@login_required
def api_it_audit_history(company_id):
"""
API: Get IT audit history for a company.
Returns a list of all IT audits for a company, ordered by date descending.
The first item in the list is always the latest (current) audit.
Access:
- Admin: Can view history for any company
- User: Can only view history for their own company
Args:
company_id: Company ID to get audit history for
Query params:
limit: Maximum number of audits to return (default: 10)
Returns:
JSON with list of audits including:
- audit_id, audit_date, overall_score, scores, maturity_level
- is_current flag (True for the most recent audit)
"""
from it_audit_service import get_company_audit_history
# Access control: users can only view their own company's history
if not current_user.is_admin and current_user.company_id != company_id:
return jsonify({
'success': False,
'error': 'Brak uprawnień do przeglądania historii audytów tej firmy.'
}), 403
# Parse limit from query params
limit = request.args.get('limit', 10, type=int)
limit = min(max(limit, 1), 50) # Clamp to 1-50
db = SessionLocal()
try:
# Verify company exists
company = db.query(Company).filter_by(id=company_id).first()
if not company:
return jsonify({
'success': False,
'error': 'Firma nie znaleziona'
}), 404
# Get audit history
audits = get_company_audit_history(db, company_id, limit)
# Format response
history = []
for idx, audit in enumerate(audits):
history.append({
'id': audit.id,
'audit_date': audit.audit_date.isoformat() if audit.audit_date else None,
'audit_source': audit.audit_source,
'overall_score': audit.overall_score,
'security_score': audit.security_score,
'collaboration_score': audit.collaboration_score,
'completeness_score': audit.completeness_score,
'maturity_level': audit.maturity_level,
'is_current': idx == 0, # First item is most recent
'is_partial': (audit.completeness_score or 0) < 100,
})
return jsonify({
'success': True,
'company_id': company_id,
'company_name': company.name,
'company_slug': company.slug,
'total_audits': len(history),
'history': history
}), 200
except Exception as e:
logger.error(f"Error fetching IT audit history for company {company_id}: {e}")
return jsonify({
'success': False,
'error': f'Błąd podczas pobierania historii audytów: {str(e)}'
}), 500
finally:
db.close()
@app.route('/api/it-audit/export')
@login_required
def api_it_audit_export():
"""
API: Export IT audit data as CSV.
Exports all IT audits with company information and scores.
Admin-only endpoint.
Returns:
CSV file with IT audit data
"""
if not current_user.is_admin:
return jsonify({
'success': False,
'error': 'Tylko administrator może eksportować dane audytów.'
}), 403
db = SessionLocal()
try:
from database import ITAudit
import csv
from io import StringIO
# Get all latest audits per company
audits = db.query(ITAudit, Company).join(
Company, ITAudit.company_id == Company.id
).order_by(
ITAudit.company_id,
ITAudit.audit_date.desc()
).all()
# Deduplicate to get only latest audit per company
seen_companies = set()
latest_audits = []
for audit, company in audits:
if company.id not in seen_companies:
seen_companies.add(company.id)
latest_audits.append((audit, company))
# Create CSV
output = StringIO()
writer = csv.writer(output)
# Header
writer.writerow([
'Firma', 'NIP', 'Kategoria', 'Data audytu',
'Wynik ogólny', 'Bezpieczeństwo', 'Współpraca', 'Kompletność',
'Poziom dojrzałości', 'Azure AD', 'M365', 'EDR', 'MFA',
'Proxmox PBS', 'Monitoring'
])
# Data rows
for audit, company in latest_audits:
writer.writerow([
company.name,
company.nip or '',
company.category.name if company.category else '',
audit.audit_date.strftime('%Y-%m-%d') if audit.audit_date else '',
audit.overall_score or '',
audit.security_score or '',
audit.collaboration_score or '',
audit.completeness_score or '',
audit.maturity_level or '',
'Tak' if audit.has_azure_ad else 'Nie',
'Tak' if audit.has_m365 else 'Nie',
'Tak' if audit.has_edr else 'Nie',
'Tak' if audit.has_mfa else 'Nie',
'Tak' if audit.has_proxmox_pbs else 'Nie',
audit.monitoring_solution or 'Brak'
])
# Create response
output.seek(0)
from flask import Response
return Response(
output.getvalue(),
mimetype='text/csv',
headers={
'Content-Disposition': 'attachment; filename=it_audit_export.csv',
'Content-Type': 'text/csv; charset=utf-8'
}
)
except Exception as e:
logger.error(f"Error exporting IT audits: {e}")
return jsonify({
'success': False,
'error': f'Błąd podczas eksportu: {str(e)}'
}), 500
finally:
db.close()
# ============================================================
# RAPORTY
# ============================================================
@app.route('/raporty')
@login_required
def reports_index():
"""Lista dostępnych raportów."""
reports = [
{
'id': 'staz-czlonkostwa',
'title': 'Staż członkostwa w Izbie NORDA',
'description': 'Zestawienie firm według daty przystąpienia do Izby. Pokazuje historię i lojalność członków.',
'icon': '🏆',
'url': url_for('report_membership')
},
{
'id': 'social-media',
'title': 'Pokrycie Social Media',
'description': 'Analiza obecności firm w mediach społecznościowych: Facebook, Instagram, LinkedIn, YouTube, TikTok, X.',
'icon': '📱',
'url': url_for('report_social_media')
},
{
'id': 'struktura-branzowa',
'title': 'Struktura branżowa',
'description': 'Rozkład firm według kategorii działalności: IT, Budownictwo, Usługi, Produkcja, Handel.',
'icon': '🏢',
'url': url_for('report_categories')
},
]
return render_template('reports/index.html', reports=reports)
@app.route('/raporty/staz-czlonkostwa')
@login_required
def report_membership():
"""Raport: Staż członkostwa w Izbie NORDA."""
from datetime import date
db = SessionLocal()
try:
# Firmy z member_since, posortowane od najstarszego
companies = db.query(Company).filter(
Company.member_since.isnot(None)
).order_by(Company.member_since.asc()).all()
# Statystyki
today = date.today()
stats = {
'total_with_date': len(companies),
'total_without_date': db.query(Company).filter(
Company.member_since.is_(None)
).count(),
'oldest': companies[0] if companies else None,
'newest': companies[-1] if companies else None,
'avg_years': sum(
(today - c.member_since).days / 365.25
for c in companies
) / len(companies) if companies else 0
}
# Dodaj obliczony staż do każdej firmy
for c in companies:
c.membership_years = int((today - c.member_since).days / 365.25)
# Dodaj też do oldest i newest
if stats['oldest']:
stats['oldest'].membership_years = int((today - stats['oldest'].member_since).days / 365.25)
return render_template(
'reports/membership.html',
companies=companies,
stats=stats,
generated_at=datetime.now()
)
finally:
db.close()
@app.route('/raporty/social-media')
@login_required
def report_social_media():
"""Raport: Pokrycie Social Media."""
from sqlalchemy.orm import joinedload
db = SessionLocal()
try:
# Wszystkie firmy z ich profilami social media
companies = db.query(Company).options(
joinedload(Company.social_media_profiles)
).order_by(Company.name).all()
platforms = ['facebook', 'instagram', 'linkedin', 'youtube', 'tiktok', 'twitter']
# Statystyki platform
platform_stats = {}
for platform in platforms:
count = db.query(CompanySocialMedia).filter_by(
platform=platform
).count()
platform_stats[platform] = {
'count': count,
'percent': round(count / len(companies) * 100, 1) if companies else 0
}
# Firmy z min. 1 profilem
companies_with_social = [
c for c in companies if c.social_media_profiles
]
stats = {
'total_companies': len(companies),
'with_social': len(companies_with_social),
'without_social': len(companies) - len(companies_with_social),
'coverage_percent': round(
len(companies_with_social) / len(companies) * 100, 1
) if companies else 0
}
return render_template(
'reports/social_media.html',
companies=companies,
platforms=platforms,
platform_stats=platform_stats,
stats=stats,
generated_at=datetime.now()
)
finally:
db.close()
@app.route('/raporty/struktura-branzowa')
@login_required
def report_categories():
"""Raport: Struktura branżowa."""
from sqlalchemy import func
db = SessionLocal()
try:
# Grupowanie po category_id (kolumna FK, nie relacja)
category_counts = db.query(
Company.category_id,
func.count(Company.id).label('count')
).group_by(Company.category_id).all()
total = sum(c.count for c in category_counts)
# Pobierz mapę kategorii (id -> name) jednym zapytaniem
category_map = {cat.id: cat.name for cat in db.query(Category).all()}
categories = []
for cat in category_counts:
cat_id = cat.category_id
cat_name = category_map.get(cat_id, 'Brak kategorii') if cat_id else 'Brak kategorii'
examples = db.query(Company.name).filter(
Company.category_id == cat_id
).limit(3).all()
categories.append({
'name': cat_name,
'count': cat.count,
'percent': round(cat.count / total * 100, 1) if total else 0,
'examples': [e.name for e in examples]
})
# Sortuj od największej
categories.sort(key=lambda x: x['count'], reverse=True)
return render_template(
'reports/categories.html',
categories=categories,
total=total,
generated_at=datetime.now()
)
finally:
db.close()
# ============================================================
# RELEASE NOTES
# ============================================================
@app.route('/release-notes')
def release_notes():
"""Historia zmian platformy."""
releases = [
{
'version': 'v1.17.0',
'date': '26 stycznia 2026',
'badges': ['new'],
'new': [
'Aktualności: Nowa sekcja dla członków (Społeczność → Aktualności)',
'Aktualności: Panel administracyjny do zarządzania ogłoszeniami',
'Aktualności: Kategorie - ogólne, wydarzenia, okazje biznesowe, od członków, partnerstwa',
'Aktualności: Statusy publikacji (szkic, opublikowane, zarchiwizowane)',
'Aktualności: Przypinanie ważnych ogłoszeń na górze listy',
'Aktualności: Linki zewnętrzne i załączniki PDF',
'Aktualności: Licznik wyświetleń i informacja o autorze',
'Pierwsze ogłoszenia: Baza noclegowa ARP, Konkurs Tytani Przedsiębiorczości',
],
},
{
'version': 'v1.16.0',
'date': '14 stycznia 2026',
'badges': ['new', 'improve', 'fix'],
'new': [
'Raporty: Nowa sekcja dostępna z menu głównego dla zalogowanych',
'Raporty: Staż członkostwa w Izbie NORDA (sortowanie od najstarszego)',
'Raporty: Pokrycie Social Media (analiza 6 platform)',
'Raporty: Struktura branżowa (rozkład firm wg kategorii)',
'Email: Konfiguracja DKIM, SPF i DMARC dla nordabiznes.pl',
'Email: Wysyłka z noreply@nordabiznes.pl (Microsoft 365)',
'Profil firmy: Data przystąpienia do Izby NORDA',
'Profil firmy: Karta "Członek Izby NORDA od" z datą i stażem',
'Integracja: API CEIDG do pobierania danych JDG',
'Bezpieczeństwo: GeoIP blocking - blokowanie krajów wysokiego ryzyka (RU, CN, KP, IR, BY, SY, VE, CU)',
'Bezpieczeństwo: Panel z listą wszystkich mechanizmów ochrony (oceny gwiazdkowe)',
'Bezpieczeństwo: Statystyki GeoIP (dzienne, miesięczne, roczne, breakdown per kraj)',
],
'improve': [
'Dane firm: Rok założenia uzupełniony dla 71 z 111 firm (64%)',
'Dane firm: Automatyczne uzupełnianie z KRS i CEIDG',
'Import dat przystąpienia: 57 firm z historią od 1997 roku',
],
'fix': [
'Analityka: Polskie znaki w interfejsie',
'Analityka: Pełne nazwy użytkowników',
'Analityka: Obsługa wartości NULL przy obliczaniu czasu',
],
},
{
'version': 'v1.15.0',
'date': '13 stycznia 2026',
'badges': ['new', 'improve', 'fix'],
'new': [
'NordaGPT: Rozszerzony kontekst AI o rekomendacje, aktualności, kalendarz, B2B i forum',
'NordaGPT: Kontekst AI zawiera teraz dane KRS (osoby), social media, audyty GBP i SEO',
'NordaGPT: Klikalne linki URL i adresy email w odpowiedziach AI',
'NordaGPT: Banner na stronie głównej z szybkim dostępem do chatu',
'Kalendarz: Widok siatki miesięcznej z wydarzeniami',
'Kalendarz: Banner wydarzenia na stronie głównej z listą uczestników',
'Kalendarz: Quick RSVP - szybkie potwierdzenie udziału ze strony głównej',
'Kalendarz: Klikalne badge uczestników (linki do profili firm)',
'KRS Audit: Nowy panel /admin/krs-audit z parsowaniem dokumentów PDF',
'KRS Audit: Progress bar dla audytu pojedynczego i wsadowego',
'KRS Audit: Sortowalne kolumny i wyświetlanie wszystkich kodów PKD',
'AI Enrichment: Przycisk wzbogacania danych AI na profilu firmy',
'AI Enrichment: Web search do automatycznego zbierania informacji o firmie',
'Analityka: Panel /admin/analytics ze śledzeniem sesji użytkowników',
'Analityka: Ranking użytkowników wg aktywności (sesje, strony, kliknięcia)',
'Analityka: Edycja danych użytkowników w panelu admina',
'Profil firmy: Wszystkie kody PKD wyświetlane na stronie firmy',
'Profil firmy: Dane właściciela CEIDG dla jednoosobowych działalności',
'Profil firmy: Powiązanie kont użytkowników z osobami z KRS',
'Profil firmy: Zielone badge dla osób zweryfikowanych w KRS',
'Forum/B2B: Kategoria "test" do oznaczania testowych treści',
'Forum/B2B: Przycisk toggle do ukrywania/pokazywania testów',
],
'improve': [
'Uczestnicy wydarzeń wyświetlani pionowo, alfabetycznie, z nazwami firm',
'Lepsze formatowanie odpowiedzi AI (listy, linki, Markdown)',
'Reorganizacja sekcji na profilu firmy',
'Banner NordaGPT minimalizowalny',
],
'fix': [
'Rate limit logowania zwiększony z 5/h do 30/min',
'Rate limit audytu SEO zwiększony z 10/h do 200/h',
'AI Chat: Poprawna obsługa składni linków Markdown',
],
},
{
'version': 'v1.14.0',
'date': '12 stycznia 2026',
'badges': ['new', 'improve', 'fix'],
'new': [
'Audyt GBP: Sekcja edukacyjna "Jak działa wizytówka Google?" z trzema kartami',
'Audyt GBP: Przycisk "Zobacz wizytówkę Google" prowadzący do profilu w Maps',
'Audyt GBP: Pełny audyt z Google Places API dla wszystkich 111 firm',
'Audyty: Klikalne banery wyników prowadzą do szczegółowych stron audytu',
'Audyty: Sekcje audytów inline na profilu firmy (SEO, GBP, Social Media, IT)',
],
'improve': [
'Ujednolicona 5-poziomowa skala kolorów dla wszystkich audytów',
'Social Media: Wynik jako procent (np. 16%) zamiast liczby platform',
'Normalizacja URL stron WWW firm (usunięcie duplikatów)',
'Spójność kolorów między profilem firmy a szczegółami audytu',
],
'fix': [
'Audyt GBP: Kategorie Google wyświetlane po polsku (np. "Warsztat samochodowy" zamiast "Car Repair")',
'ZOPK: Usunięcie duplikatów interesariuszy i naprawa polskich znaków',
'ZOPK: Unique constraint na nazwę interesariusza (zapobiega duplikatom)',
'SEO: Spójność wyników między profilem firmy a szczegółami audytu',
],
},
{
'version': 'v1.13.0',
'date': '11 stycznia 2026',
'badges': ['new', 'improve'],
'new': [
'Mapa Powiązań - interaktywna wizualizacja powiązań firm i osób (D3.js)',
'Filtry mapy: węzły (firmy/osoby), powiązania (zarząd/wspólnicy/prokurenci/JDG)',
'Liczniki przy filtrach aktualizowane na bieżąco',
'Profile osób (/osoba) - dane z KRS/CEIDG i portalu',
'Wyszukiwarka osób - częściowe dopasowanie imienia/nazwiska',
'Klikalne osoby w sekcji Zarząd i Wspólnicy na profilu firmy',
'Logo firm w wynikach wyszukiwania (klikalne)',
'AI Learning: System uczenia chatbota z feedbacku użytkowników',
'AI Learning: Few-shot learning z pozytywnych odpowiedzi',
'AI Learning: Przykłady startowe (seed) dla zimnego startu',
'Panel AI Usage: Szczegółowy widok użycia AI per użytkownik',
'Panel AI Usage: Klikalne nazwy użytkowników w rankingu',
'Panel Analytics: Sekcja statusu uczenia AI',
],
'improve': [
'Mapa: fullscreen modal zamiast osobnej strony',
'Mapa: etykiety ukryte domyślnie, widoczne przy hover',
'Poprawka liczenia firm vs ról (6 firm z 9 rolami)',
'Tooltip pokazuje wszystkie powiązania bez limitu',
'Ukrycie sekcji Aktualności na profilach firm (tymczasowo)',
'Zmiana nazwy ZOPK na ZOP Kaszubia (Zielony Okręg Przemysłowy Kaszubia)',
'Stylizowane modale zamiast natywnych dialogów przeglądarki',
'System toastów do komunikatów sukcesu/błędu',
'Bezpieczeństwo: Usunięcie starych haseł z dokumentacji',
],
},
{
'version': 'v1.11.0',
'date': '10 stycznia 2026',
'badges': ['new', 'improve', 'security'],
'new': [
'Forum: Kategorie tematów (Propozycja funkcji, Błąd, Pytanie, Ogłoszenie)',
'Forum: Statusy zgłoszeń (Nowy, W realizacji, Rozwiązany, Odrzucony)',
'Forum: Załączniki obrazów do tematów i odpowiedzi (JPG, PNG, GIF)',
'Forum: Upload wielu plików jednocześnie (do 10 na odpowiedź)',
'Forum: Drag & drop i wklejanie ze schowka (Ctrl+V)',
'Panel admina: Statystyki i zmiana statusów tematów',
'Dokumentacja architektury systemu z diagramami Mermaid (19 plików)',
'Diagramy C4: System Context, Containers, Components, Deployment',
'Dokumentacja przepływów danych: Auth, Search, AI Chat, SEO Audit',
'Nowy README.md z kompletną dokumentacją projektu',
'Dokumentacja bezpieczeństwa (docs/SECURITY.md)',
],
'improve': [
'Bezpieczny upload z walidacją magic bytes i usuwaniem EXIF',
'Responsywna siatka podglądu załączników',
'Filtry kategorii i statusów na liście tematów',
],
'security': [
'Usunięcie hardcoded credentials z kodu źródłowego',
'Zmiana hasła PostgreSQL na produkcji',
'Konfiguracja zmiennych środowiskowych w .env',
],
},
{
'version': 'v1.9.0',
'date': '9 stycznia 2026',
'badges': ['new', 'improve'],
'new': [
'Panel Audyt GBP - przegląd kompletności profili Google Business',
'Panel Audyt Social - przegląd pokrycia Social Media wszystkich firm',
'Dodawanie użytkowników bezpośrednio w panelu admina',
'Tworzenie użytkowników z AI - wklejanie tekstu lub screenshotów z dowolnego źródła',
],
'improve': [
'Wyświetlanie przypisanych firm przy użytkownikach',
'Poprawki wyświetlania nazw użytkowników w panelu',
'Nowy pasek Admin z pogrupowanymi funkcjami (Zarządzanie, Audyty, Analityka)',
],
},
{
'version': 'v1.8.0',
'date': '8 stycznia 2026',
'badges': ['new', 'improve'],
'new': [
'Panel Audyt IT - kompleksowy audyt infrastruktury IT firm',
'Eksport audytów IT do CSV',
],
'improve': [
'Poprawki w formularzach edycji audytu IT',
],
},
{
'version': 'v1.7.0',
'date': '6 stycznia 2026',
'badges': ['new'],
'new': [
'Panel Audyt SEO - analiza wydajności stron www firm',
'Integracja z Google PageSpeed Insights API',
],
},
{
'version': 'v1.6.0',
'date': '29 grudnia 2025',
'badges': ['new'],
'new': [
'System newsów i wzmianek medialnych o firmach',
'Panel moderacji newsów dla adminów',
'Integracja z Brave Search API',
],
},
{
'version': 'v1.5.0',
'date': '15 grudnia 2025',
'badges': ['new', 'improve'],
'new': [
'Panel Social Media - zarządzanie profilami społecznościowymi',
'Weryfikacja aktywności profili Social Media',
],
'improve': [
'Ulepszony profil firmy z sekcją Social Media',
],
},
{
'version': 'v1.4.0',
'date': '1 grudnia 2025',
'badges': ['new'],
'new': [
'System rekomendacji między firmami',
'Panel składek członkowskich',
'Kalendarz wydarzeń Norda Biznes',
],
},
{
'version': 'v1.3.0',
'date': '28 listopada 2025',
'badges': ['new', 'improve'],
'new': [
'Chatbot AI z wiedzą o wszystkich firmach',
'Wyszukiwarka firm z synonimami i fuzzy matching',
],
'improve': [
'Ulepszony SearchService z PostgreSQL FTS',
],
},
{
'version': 'v1.2.0',
'date': '25 listopada 2025',
'badges': ['new'],
'new': [
'System wiadomości prywatnych między użytkownikami',
'Powiadomienia o nowych wiadomościach',
],
},
{
'version': 'v1.1.0',
'date': '24 listopada 2025',
'badges': ['new', 'improve'],
'new': [
'Rejestracja i logowanie użytkowników',
'Profile użytkowników powiązane z firmami',
],
'improve': [
'Responsywny design na urządzenia mobilne',
],
},
{
'version': 'v1.0.0',
'date': '23 listopada 2025',
'badges': ['new'],
'new': [
'Oficjalny start platformy Norda Biznes Hub',
'Katalog 111 firm członkowskich',
'Wyszukiwarka firm po nazwie, kategorii, usługach',
'Profile firm z pełnymi danymi kontaktowymi',
],
},
]
# Dynamiczne statystyki z bazy danych
db = SessionLocal()
try:
stats = {
'companies': db.query(Company).count(),
'categories': db.query(Category).count(),
}
finally:
db.close()
return render_template('release_notes.html', releases=releases, stats=stats)
# ============================================================
# ZIELONY OKRĘG PRZEMYSŁOWY KASZUBIA (ZOPK)
# ============================================================
@app.route('/zopk')
@limiter.limit("60 per minute") # SECURITY: Rate limit public ZOPK page
def zopk_index():
"""
Public knowledge base page for ZOPK.
Shows projects, stakeholders, approved news, resources, and timeline.
"""
from database import ZOPKProject, ZOPKStakeholder, ZOPKNews, ZOPKResource, ZOPKMilestone
db = SessionLocal()
try:
# Get active projects
projects = db.query(ZOPKProject).filter(
ZOPKProject.is_active == True
).order_by(ZOPKProject.sort_order, ZOPKProject.name).all()
# Get milestones for timeline (sorted by target_date)
# Show all milestones - is_verified column will be added in migration
milestones = db.query(ZOPKMilestone).order_by(
ZOPKMilestone.target_date.asc()
).all()
# Get active stakeholders
stakeholders = db.query(ZOPKStakeholder).filter(
ZOPKStakeholder.is_active == True
).order_by(ZOPKStakeholder.importance.desc(), ZOPKStakeholder.name).limit(10).all()
# Get approved news (both manually approved and AI auto-approved)
# Show more news on main page (expanded view)
news_items = db.query(ZOPKNews).filter(
ZOPKNews.status.in_(['approved', 'auto_approved'])
).order_by(ZOPKNews.published_at.desc()).limit(25).all()
# Get featured resources
resources = db.query(ZOPKResource).filter(
ZOPKResource.status == 'approved'
).order_by(ZOPKResource.sort_order, ZOPKResource.created_at.desc()).limit(12).all()
# News time-based statistics
from datetime import datetime, timedelta
now = datetime.now()
day_ago = now - timedelta(days=1)
week_ago = now - timedelta(days=7)
month_ago = now - timedelta(days=30)
approved_news_filter = ZOPKNews.status.in_(['approved', 'auto_approved'])
total_news = db.query(ZOPKNews).filter(approved_news_filter).count()
news_stats = {
'total': total_news,
'last_day': db.query(ZOPKNews).filter(
approved_news_filter,
ZOPKNews.published_at >= day_ago
).count(),
'last_week': db.query(ZOPKNews).filter(
approved_news_filter,
ZOPKNews.published_at >= week_ago
).count(),
'last_month': db.query(ZOPKNews).filter(
approved_news_filter,
ZOPKNews.published_at >= month_ago
).count()
}
# General stats
stats = {
'total_projects': len(projects),
'total_news': total_news,
'total_resources': db.query(ZOPKResource).filter(ZOPKResource.status == 'approved').count(),
'total_stakeholders': db.query(ZOPKStakeholder).filter(ZOPKStakeholder.is_active == True).count()
}
return render_template('zopk/index.html',
projects=projects,
stakeholders=stakeholders,
news_items=news_items,
resources=resources,
stats=stats,
news_stats=news_stats,
milestones=milestones
)
finally:
db.close()
@app.route('/zopk/projekty/<slug>')
@limiter.limit("60 per minute") # SECURITY: Rate limit public ZOPK project pages
def zopk_project_detail(slug):
"""Project detail page"""
from database import ZOPKProject, ZOPKNews, ZOPKResource, ZOPKCompanyLink
db = SessionLocal()
try:
project = db.query(ZOPKProject).filter(ZOPKProject.slug == slug).first()
if not project:
abort(404)
# Get news for this project (both manually approved and AI auto-approved)
news_items = db.query(ZOPKNews).filter(
ZOPKNews.project_id == project.id,
ZOPKNews.status.in_(['approved', 'auto_approved'])
).order_by(ZOPKNews.published_at.desc()).limit(10).all()
# Get resources for this project
resources = db.query(ZOPKResource).filter(
ZOPKResource.project_id == project.id,
ZOPKResource.status == 'approved'
).order_by(ZOPKResource.sort_order).all()
# Get Norda companies linked to this project
company_links = db.query(ZOPKCompanyLink).filter(
ZOPKCompanyLink.project_id == project.id
).order_by(ZOPKCompanyLink.relevance_score.desc()).all()
return render_template('zopk/project_detail.html',
project=project,
news_items=news_items,
resources=resources,
company_links=company_links
)
finally:
db.close()
@app.route('/zopk/aktualnosci')
@limiter.limit("60 per minute") # SECURITY: Rate limit public ZOPK news list
def zopk_news_list():
"""All ZOPK news - paginated"""
from database import ZOPKProject, ZOPKNews
db = SessionLocal()
try:
page = request.args.get('page', 1, type=int)
per_page = 20
project_slug = request.args.get('projekt')
query = db.query(ZOPKNews).filter(ZOPKNews.status.in_(['approved', 'auto_approved']))
if project_slug:
project = db.query(ZOPKProject).filter(ZOPKProject.slug == project_slug).first()
if project:
query = query.filter(ZOPKNews.project_id == project.id)
total = query.count()
news_items = query.order_by(ZOPKNews.published_at.desc()).offset(
(page - 1) * per_page
).limit(per_page).all()
total_pages = (total + per_page - 1) // per_page
# Get projects for filter
projects = db.query(ZOPKProject).filter(
ZOPKProject.is_active == True
).order_by(ZOPKProject.sort_order).all()
return render_template('zopk/news_list.html',
news_items=news_items,
projects=projects,
current_project=project_slug,
page=page,
total_pages=total_pages,
total=total
)
finally:
db.close()
@app.route('/admin/zopk')
@login_required
def admin_zopk():
"""Admin dashboard for ZOPK management"""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('dashboard'))
from database import ZOPKProject, ZOPKStakeholder, ZOPKNews, ZOPKResource, ZOPKNewsFetchJob
from datetime import datetime
db = SessionLocal()
try:
# Pagination and filtering parameters
page = request.args.get('page', 1, type=int)
per_page = 20
status_filter = request.args.get('status', 'pending') # pending, approved, rejected, all
min_year = request.args.get('min_year', 2024, type=int) # ZOPK started in 2024
show_old = request.args.get('show_old', 'false') == 'true'
# ZOPK project started in 2024 - news from before this year are likely irrelevant
min_date = datetime(min_year, 1, 1) if not show_old else None
# Stats
stats = {
'total_projects': db.query(ZOPKProject).count(),
'total_stakeholders': db.query(ZOPKStakeholder).count(),
'total_news': db.query(ZOPKNews).count(),
'pending_news': db.query(ZOPKNews).filter(ZOPKNews.status == 'pending').count(),
'approved_news': db.query(ZOPKNews).filter(ZOPKNews.status.in_(['approved', 'auto_approved'])).count(),
'rejected_news': db.query(ZOPKNews).filter(ZOPKNews.status == 'rejected').count(),
'total_resources': db.query(ZOPKResource).count(),
# Count old news (before min_year) - likely irrelevant
'old_news': db.query(ZOPKNews).filter(
ZOPKNews.status == 'pending',
ZOPKNews.published_at < datetime(min_year, 1, 1)
).count() if not show_old else 0,
# AI evaluation stats
'ai_relevant': db.query(ZOPKNews).filter(ZOPKNews.ai_relevant == True).count(),
'ai_not_relevant': db.query(ZOPKNews).filter(ZOPKNews.ai_relevant == False).count(),
'ai_not_evaluated': db.query(ZOPKNews).filter(
ZOPKNews.status == 'pending',
ZOPKNews.ai_relevant.is_(None)
).count(),
# Items with ai_relevant but missing score (need upgrade to 1-5 stars)
'ai_missing_score': db.query(ZOPKNews).filter(
ZOPKNews.ai_relevant.isnot(None),
ZOPKNews.ai_relevance_score.is_(None)
).count()
}
# Build news query with filters
news_query = db.query(ZOPKNews)
# Status filter (including AI-based filters)
if status_filter == 'pending':
news_query = news_query.filter(ZOPKNews.status == 'pending')
elif status_filter == 'approved':
news_query = news_query.filter(ZOPKNews.status.in_(['approved', 'auto_approved']))
elif status_filter == 'rejected':
news_query = news_query.filter(ZOPKNews.status == 'rejected')
elif status_filter == 'ai_relevant':
# AI evaluated as relevant (regardless of status)
news_query = news_query.filter(ZOPKNews.ai_relevant == True)
elif status_filter == 'ai_not_relevant':
# AI evaluated as NOT relevant
news_query = news_query.filter(ZOPKNews.ai_relevant == False)
elif status_filter == 'ai_not_evaluated':
# Not yet evaluated by AI
news_query = news_query.filter(ZOPKNews.ai_relevant.is_(None))
# 'all' - no status filter
# Date filter - exclude old news by default
if min_date and not show_old:
news_query = news_query.filter(
(ZOPKNews.published_at >= min_date) | (ZOPKNews.published_at.is_(None))
)
# Order and count
total_news_filtered = news_query.count()
total_pages = (total_news_filtered + per_page - 1) // per_page
# Paginate
news_items = news_query.order_by(
ZOPKNews.published_at.desc().nullslast(),
ZOPKNews.created_at.desc()
).offset((page - 1) * per_page).limit(per_page).all()
# All projects
projects = db.query(ZOPKProject).order_by(ZOPKProject.sort_order).all()
# Recent fetch jobs
fetch_jobs = db.query(ZOPKNewsFetchJob).order_by(
ZOPKNewsFetchJob.created_at.desc()
).limit(5).all()
return render_template('admin/zopk_dashboard.html',
stats=stats,
news_items=news_items,
projects=projects,
fetch_jobs=fetch_jobs,
# Pagination
current_page=page,
total_pages=total_pages,
total_news_filtered=total_news_filtered,
per_page=per_page,
# Filters
status_filter=status_filter,
min_year=min_year,
show_old=show_old
)
finally:
db.close()
@app.route('/admin/zopk/news')
@login_required
def admin_zopk_news():
"""Admin news management for ZOPK"""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('dashboard'))
from database import ZOPKProject, ZOPKNews
from sqlalchemy import desc, asc, nullslast
db = SessionLocal()
try:
page = request.args.get('page', 1, type=int)
status = request.args.get('status', 'all')
stars = request.args.get('stars', 'all') # 'all', '1'-'5', 'none'
sort_by = request.args.get('sort', 'date') # 'date', 'score', 'title'
sort_dir = request.args.get('dir', 'desc') # 'asc', 'desc'
per_page = 50
query = db.query(ZOPKNews)
if status != 'all':
query = query.filter(ZOPKNews.status == status)
# Filter by star rating
if stars == 'none':
query = query.filter(ZOPKNews.ai_relevance_score.is_(None))
elif stars in ['1', '2', '3', '4', '5']:
query = query.filter(ZOPKNews.ai_relevance_score == int(stars))
# 'all' - no filter
# Apply sorting
sort_func = desc if sort_dir == 'desc' else asc
if sort_by == 'score':
# Sort by AI score (nulls last so evaluated items come first)
query = query.order_by(nullslast(sort_func(ZOPKNews.ai_relevance_score)))
elif sort_by == 'title':
query = query.order_by(sort_func(ZOPKNews.title))
else: # default: date
query = query.order_by(sort_func(ZOPKNews.published_at))
total = query.count()
news_items = query.offset((page - 1) * per_page).limit(per_page).all()
total_pages = (total + per_page - 1) // per_page
projects = db.query(ZOPKProject).order_by(ZOPKProject.sort_order).all()
return render_template('admin/zopk_news.html',
news_items=news_items,
projects=projects,
page=page,
total_pages=total_pages,
total=total,
current_status=status,
current_stars=stars,
current_sort=sort_by,
current_dir=sort_dir
)
finally:
db.close()
@app.route('/admin/zopk/news/<int:news_id>/approve', methods=['POST'])
@login_required
def admin_zopk_news_approve(news_id):
"""Approve a ZOPK news item"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from database import ZOPKNews
db = SessionLocal()
try:
news = db.query(ZOPKNews).filter(ZOPKNews.id == news_id).first()
if not news:
return jsonify({'success': False, 'error': 'Nie znaleziono newsa'}), 404
news.status = 'approved'
news.moderated_by = current_user.id
news.moderated_at = datetime.now()
db.commit()
return jsonify({'success': True, 'message': 'News został zatwierdzony'})
except Exception as e:
db.rollback()
logger.error(f"Error approving ZOPK news {news_id}: {e}")
return jsonify({'success': False, 'error': 'Wystąpił błąd podczas zatwierdzania'}), 500
finally:
db.close()
@app.route('/admin/zopk/news/<int:news_id>/reject', methods=['POST'])
@login_required
def admin_zopk_news_reject(news_id):
"""Reject a ZOPK news item"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from database import ZOPKNews
db = SessionLocal()
try:
data = request.get_json() or {}
reason = data.get('reason', '')
news = db.query(ZOPKNews).filter(ZOPKNews.id == news_id).first()
if not news:
return jsonify({'success': False, 'error': 'Nie znaleziono newsa'}), 404
news.status = 'rejected'
news.moderated_by = current_user.id
news.moderated_at = datetime.now()
news.rejection_reason = reason
db.commit()
return jsonify({'success': True, 'message': 'News został odrzucony'})
except Exception as e:
db.rollback()
logger.error(f"Error rejecting ZOPK news {news_id}: {e}")
return jsonify({'success': False, 'error': 'Wystąpił błąd podczas odrzucania'}), 500
finally:
db.close()
@app.route('/admin/zopk/news/add', methods=['POST'])
@login_required
def admin_zopk_news_add():
"""Manually add a ZOPK news item"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from database import ZOPKNews, ZOPKProject
import hashlib
db = SessionLocal()
try:
data = request.get_json() or {}
title = data.get('title', '').strip()
url = data.get('url', '').strip()
description = data.get('description', '').strip()
source_name = data.get('source_name', '').strip()
project_id = data.get('project_id')
if not title or not url:
return jsonify({'success': False, 'error': 'Tytuł i URL są wymagane'}), 400
# SECURITY: Validate URL protocol (block javascript:, data:, etc.)
from urllib.parse import urlparse
parsed = urlparse(url)
allowed_protocols = ('http', 'https')
if parsed.scheme.lower() not in allowed_protocols:
return jsonify({'success': False, 'error': 'Nieprawidłowy protokół URL. Dozwolone: http, https'}), 400
# SECURITY: Validate project_id if provided
if project_id:
try:
project_id = int(project_id)
project = db.query(ZOPKProject).filter(ZOPKProject.id == project_id).first()
if not project:
return jsonify({'success': False, 'error': 'Nieprawidłowy ID projektu'}), 400
except (ValueError, TypeError):
return jsonify({'success': False, 'error': 'ID projektu musi być liczbą'}), 400
else:
project_id = None
# Generate URL hash for deduplication
url_hash = hashlib.sha256(url.encode()).hexdigest()
# Check if URL already exists
existing = db.query(ZOPKNews).filter(ZOPKNews.url_hash == url_hash).first()
if existing:
return jsonify({'success': False, 'error': 'Ten artykuł już istnieje w bazie'}), 400
# Extract domain from URL
source_domain = parsed.netloc.replace('www.', '')
news = ZOPKNews(
title=title,
url=url,
url_hash=url_hash,
description=description,
source_name=source_name or source_domain,
source_domain=source_domain,
source_type='manual',
status='approved', # Manual entries are auto-approved
moderated_by=current_user.id,
moderated_at=datetime.now(),
published_at=datetime.now(),
project_id=project_id
)
db.add(news)
db.commit()
return jsonify({
'success': True,
'message': 'News został dodany',
'news_id': news.id
})
except Exception as e:
db.rollback()
logger.error(f"Error adding ZOPK news: {e}")
return jsonify({'success': False, 'error': 'Wystąpił błąd podczas dodawania newsa'}), 500
finally:
db.close()
@app.route('/admin/zopk/news/reject-old', methods=['POST'])
@login_required
def admin_zopk_reject_old_news():
"""Reject all news from before a certain year (ZOPK didn't exist then)"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from database import ZOPKNews
db = SessionLocal()
try:
data = request.get_json() or {}
min_year = data.get('min_year', 2024)
# Find all pending news from before min_year
min_date = datetime(min_year, 1, 1)
old_news = db.query(ZOPKNews).filter(
ZOPKNews.status == 'pending',
ZOPKNews.published_at < min_date
).all()
count = len(old_news)
# Reject them all
for news in old_news:
news.status = 'rejected'
news.moderated_by = current_user.id
news.moderated_at = datetime.now()
news.rejection_reason = f'Automatycznie odrzucony - artykuł sprzed {min_year} roku (ZOP Kaszubia powstał w 2024)'
db.commit()
return jsonify({
'success': True,
'message': f'Odrzucono {count} newsów sprzed {min_year} roku',
'count': count
})
except Exception as e:
db.rollback()
logger.error(f"Error rejecting old ZOPK news: {e}")
return jsonify({'success': False, 'error': 'Wystąpił błąd podczas odrzucania starych newsów'}), 500
finally:
db.close()
@app.route('/admin/zopk/news/star-counts')
@login_required
def admin_zopk_news_star_counts():
"""Get counts of pending news items grouped by star rating"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from database import ZOPKNews
from sqlalchemy import func
db = SessionLocal()
try:
# Count pending news for each star rating (1-5 and NULL)
counts = {}
# Count for each star 1-5
for star in range(1, 6):
count = db.query(func.count(ZOPKNews.id)).filter(
ZOPKNews.status == 'pending',
ZOPKNews.ai_relevance_score == star
).scalar()
counts[star] = count
# Count for NULL (no AI evaluation)
count_null = db.query(func.count(ZOPKNews.id)).filter(
ZOPKNews.status == 'pending',
ZOPKNews.ai_relevance_score.is_(None)
).scalar()
counts[0] = count_null
return jsonify({
'success': True,
'counts': counts
})
except Exception as e:
logger.error(f"Error getting ZOPK news star counts: {e}")
return jsonify({'success': False, 'error': 'Wystąpił błąd'}), 500
finally:
db.close()
@app.route('/admin/zopk/news/reject-by-stars', methods=['POST'])
@login_required
def admin_zopk_reject_by_stars():
"""Reject all pending news items with specified star ratings"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from database import ZOPKNews
db = SessionLocal()
try:
data = request.get_json() or {}
stars = data.get('stars', []) # List of star ratings to reject (0 = no rating)
reason = data.get('reason', '')
if not stars:
return jsonify({'success': False, 'error': 'Nie wybrano ocen do odrzucenia'}), 400
# Validate stars input
valid_stars = [s for s in stars if s in [0, 1, 2, 3, 4, 5]]
if not valid_stars:
return jsonify({'success': False, 'error': 'Nieprawidłowe oceny gwiazdkowe'}), 400
# Build query for pending news with specified stars
from sqlalchemy import or_
conditions = []
for star in valid_stars:
if star == 0:
conditions.append(ZOPKNews.ai_relevance_score.is_(None))
else:
conditions.append(ZOPKNews.ai_relevance_score == star)
news_to_reject = db.query(ZOPKNews).filter(
ZOPKNews.status == 'pending',
or_(*conditions)
).all()
count = len(news_to_reject)
# Reject them all
default_reason = f"Masowo odrzucone - oceny: {', '.join(str(s) + '' if s > 0 else 'brak oceny' for s in valid_stars)}"
final_reason = reason if reason else default_reason
for news in news_to_reject:
news.status = 'rejected'
news.moderated_by = current_user.id
news.moderated_at = datetime.now()
news.rejection_reason = final_reason
db.commit()
logger.info(f"Admin {current_user.email} rejected {count} ZOPK news with stars {valid_stars}")
return jsonify({
'success': True,
'message': f'Odrzucono {count} artykułów',
'count': count
})
except Exception as e:
db.rollback()
logger.error(f"Error rejecting ZOPK news by stars: {e}")
return jsonify({'success': False, 'error': 'Wystąpił błąd podczas odrzucania'}), 500
finally:
db.close()
@app.route('/admin/zopk/news/evaluate-ai', methods=['POST'])
@login_required
def admin_zopk_evaluate_ai():
"""Evaluate pending news for ZOPK relevance using Gemini AI"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from zopk_news_service import evaluate_pending_news
db = SessionLocal()
try:
data = request.get_json() or {}
limit = data.get('limit', 50) # Max 50 to avoid API limits
# Run AI evaluation
result = evaluate_pending_news(db, limit=limit, user_id=current_user.id)
return jsonify({
'success': True,
'total_evaluated': result.get('total_evaluated', 0),
'relevant_count': result.get('relevant_count', 0),
'not_relevant_count': result.get('not_relevant_count', 0),
'errors': result.get('errors', 0),
'message': result.get('message', '')
})
except Exception as e:
db.rollback()
logger.error(f"Error evaluating ZOPK news with AI: {e}")
return jsonify({'success': False, 'error': 'Wystąpił błąd podczas oceny AI'}), 500
finally:
db.close()
@app.route('/admin/zopk/news/reevaluate-scores', methods=['POST'])
@login_required
def admin_zopk_reevaluate_scores():
"""Re-evaluate news items that have ai_relevant but no ai_relevance_score (1-5 stars)"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from zopk_news_service import reevaluate_news_without_score
db = SessionLocal()
try:
data = request.get_json() or {}
limit = data.get('limit', 50) # Max 50 to avoid API limits
# Run AI re-evaluation for items missing scores
result = reevaluate_news_without_score(db, limit=limit, user_id=current_user.id)
return jsonify({
'success': True,
'total_evaluated': result.get('total_evaluated', 0),
'relevant_count': result.get('relevant_count', 0),
'not_relevant_count': result.get('not_relevant_count', 0),
'errors': result.get('errors', 0),
'message': result.get('message', '')
})
except Exception as e:
db.rollback()
logger.error(f"Error reevaluating ZOPK news scores: {e}")
return jsonify({'success': False, 'error': 'Wystąpił błąd podczas ponownej oceny'}), 500
finally:
db.close()
@app.route('/admin/zopk/news/reevaluate-low-scores', methods=['POST'])
@login_required
def admin_zopk_reevaluate_low_scores():
"""
Re-evaluate news with low AI scores (1-2★) that contain key ZOPK topics.
Useful after updating AI prompt to include new topics like Via Pomerania, S6, NORDA.
Old articles scored low before these topics were recognized will be re-evaluated
and potentially upgraded.
"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from zopk_news_service import reevaluate_low_score_news
db = SessionLocal()
try:
data = request.get_json() or {}
limit = data.get('limit', 50) # Max 50 to avoid API limits
# Run AI re-evaluation for low-score items with key topics
result = reevaluate_low_score_news(db, limit=limit, user_id=current_user.id)
return jsonify({
'success': True,
'total_evaluated': result.get('total_evaluated', 0),
'upgraded': result.get('upgraded', 0),
'downgraded': result.get('downgraded', 0),
'unchanged': result.get('unchanged', 0),
'errors': result.get('errors', 0),
'message': result.get('message', ''),
'details': result.get('details', [])
})
except Exception as e:
db.rollback()
logger.error(f"Error reevaluating low-score ZOPK news: {e}")
return jsonify({'success': False, 'error': 'Wystąpił błąd podczas ponownej oceny'}), 500
finally:
db.close()
@app.route('/api/zopk/search-news', methods=['POST'])
@login_required
def api_zopk_search_news():
"""
Search for ZOPK news using multiple sources with cross-verification.
Sources:
- Brave Search API
- Google News RSS
- Local media RSS (trojmiasto.pl, dziennikbaltycki.pl)
Cross-verification:
- 1 source → pending (manual review)
- 3+ sources → auto_approved
"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from database import ZOPKNewsFetchJob
from zopk_news_service import ZOPKNewsService
import uuid
db = SessionLocal()
try:
data = request.get_json() or {}
query = data.get('query', 'Zielony Okręg Przemysłowy Kaszubia')
# Create fetch job record
job_id = str(uuid.uuid4())[:8]
fetch_job = ZOPKNewsFetchJob(
job_id=job_id,
search_query=query,
search_api='multi_source', # Brave + RSS
triggered_by='admin',
triggered_by_user=current_user.id,
status='running',
started_at=datetime.now()
)
db.add(fetch_job)
db.commit()
# Use multi-source service
service = ZOPKNewsService(db)
results = service.search_all_sources(query)
# Update fetch job
fetch_job.results_found = results['total_found']
fetch_job.results_new = results['saved_new']
fetch_job.results_approved = results['auto_approved']
fetch_job.status = 'completed'
fetch_job.completed_at = datetime.now()
db.commit()
# Build detailed message
source_info = ', '.join(f"{k}: {v}" for k, v in results['source_stats'].items() if v > 0)
return jsonify({
'success': True,
'message': f"Znaleziono {results['total_found']} wyników z {len(results['source_stats'])} źródeł. "
f"Dodano {results['saved_new']} nowych, zaktualizowano {results['updated_existing']}. "
f"Auto-zatwierdzono: {results['auto_approved']}",
'job_id': job_id,
'total_found': results['total_found'],
'unique_items': results['unique_items'],
'saved_new': results['saved_new'],
'updated_existing': results['updated_existing'],
'auto_approved': results['auto_approved'],
'ai_approved': results.get('ai_approved', 0),
'ai_rejected': results.get('ai_rejected', 0),
'blacklisted': results.get('blacklisted', 0),
'keyword_filtered': results.get('keyword_filtered', 0),
'sent_to_ai': results.get('sent_to_ai', 0),
'duplicates': results.get('duplicates', 0),
'processing_time': results.get('processing_time', 0),
'knowledge_entities_created': results.get('knowledge_entities_created', 0),
'source_stats': results['source_stats'],
'process_log': results.get('process_log', []),
'auto_approved_articles': results.get('auto_approved_articles', []),
'ai_rejected_articles': results.get('ai_rejected_articles', [])
})
except Exception as e:
db.rollback()
logger.error(f"ZOPK news search error: {e}")
# Update job status on error
try:
fetch_job.status = 'failed'
fetch_job.error_message = str(e) # Keep internal log
fetch_job.completed_at = datetime.now()
db.commit()
except:
pass
return jsonify({'success': False, 'error': 'Wystąpił błąd podczas wyszukiwania newsów'}), 500
finally:
db.close()
# ============================================================
# ZOPK CONTENT SCRAPING (Knowledge Base Pipeline)
# ============================================================
@app.route('/admin/zopk/news/scrape-stats')
@login_required
def admin_zopk_scrape_stats():
"""
Get content scraping statistics.
Returns JSON with:
- total_approved: Total approved/auto_approved articles
- scraped: Successfully scraped articles
- pending: Articles waiting to be scraped
- failed: Failed scraping attempts
- skipped: Skipped (social media, paywalls)
- ready_for_extraction: Scraped but not yet processed for knowledge
"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from zopk_content_scraper import get_scrape_stats
db = SessionLocal()
try:
stats = get_scrape_stats(db)
return jsonify({
'success': True,
**stats
})
except Exception as e:
logger.error(f"Error getting scrape stats: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/admin/zopk/news/scrape-content', methods=['POST'])
@login_required
def admin_zopk_scrape_content():
"""
Batch scrape article content from source URLs.
Request JSON:
- limit: int (default 50) - max articles to scrape
- force: bool (default false) - re-scrape already scraped
Response:
- scraped: number of successfully scraped
- failed: number of failures
- skipped: number of skipped (social media, etc.)
- errors: list of error details
- scraped_articles: list of scraped article info
"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from zopk_content_scraper import ZOPKContentScraper
db = SessionLocal()
try:
data = request.get_json() or {}
limit = min(data.get('limit', 50), 100) # Max 100 at once
force = data.get('force', False)
scraper = ZOPKContentScraper(db, user_id=current_user.id)
result = scraper.batch_scrape(limit=limit, force=force)
return jsonify({
'success': True,
'message': f"Scraping zakończony: {result['scraped']} pobrano, "
f"{result['failed']} błędów, {result['skipped']} pominięto",
**result
})
except Exception as e:
db.rollback()
logger.error(f"Error in batch scrape: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/admin/zopk/news/<int:news_id>/scrape', methods=['POST'])
@login_required
def admin_zopk_scrape_single(news_id):
"""
Scrape content for a single article.
"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from zopk_content_scraper import ZOPKContentScraper
db = SessionLocal()
try:
scraper = ZOPKContentScraper(db, user_id=current_user.id)
result = scraper.scrape_article(news_id)
if result.success:
return jsonify({
'success': True,
'message': f"Pobrano treść: {result.word_count} słów",
'word_count': result.word_count,
'status': result.status
})
else:
return jsonify({
'success': False,
'error': result.error,
'status': result.status
}), 400
except Exception as e:
db.rollback()
logger.error(f"Error scraping article {news_id}: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
# ============================================================
# ZOPK KNOWLEDGE EXTRACTION (AI-powered)
# ============================================================
@app.route('/admin/zopk/knowledge/stats')
@login_required
def admin_zopk_knowledge_stats():
"""
Get knowledge extraction statistics.
Returns:
- articles: stats about articles (approved, scraped, extracted)
- knowledge_base: stats about chunks, facts, entities, relations
- top_entities: most mentioned entities
"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from zopk_knowledge_service import get_knowledge_stats
db = SessionLocal()
try:
stats = get_knowledge_stats(db)
return jsonify({
'success': True,
**stats
})
except Exception as e:
logger.error(f"Error getting knowledge stats: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/admin/zopk/knowledge/extract', methods=['POST'])
@login_required
def admin_zopk_knowledge_extract():
"""
Batch extract knowledge from scraped articles.
Request JSON:
- limit: int (default 50) - max articles to process
Response:
- success/failed counts
- chunks/facts/entities/relations created
- errors list
"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from zopk_knowledge_service import ZOPKKnowledgeService
db = SessionLocal()
try:
data = request.get_json() or {}
limit = min(data.get('limit', 50), 100)
service = ZOPKKnowledgeService(db, user_id=current_user.id)
result = service.batch_extract(limit=limit)
return jsonify({
'success': True,
'message': f"Ekstrakcja zakończona: {result['success']}/{result['total']} artykułów. "
f"Utworzono: {result['chunks_created']} chunks, {result['facts_created']} faktów, "
f"{result['entities_created']} encji, {result['relations_created']} relacji.",
'processed': result['success'], # Renamed to avoid overwriting 'success'
'total': result['total'],
'failed': result['failed'],
'chunks_created': result['chunks_created'],
'facts_created': result['facts_created'],
'entities_created': result['entities_created'],
'relations_created': result['relations_created']
})
except Exception as e:
db.rollback()
logger.error(f"Error in knowledge extraction: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/admin/zopk/knowledge/extract/<int:news_id>', methods=['POST'])
@login_required
def admin_zopk_knowledge_extract_single(news_id):
"""
Extract knowledge from a single article.
"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from zopk_knowledge_service import ZOPKKnowledgeService
db = SessionLocal()
try:
service = ZOPKKnowledgeService(db, user_id=current_user.id)
result = service.extract_from_news(news_id)
if result.success:
return jsonify({
'success': True,
'message': f"Wyekstrahowano: {result.chunks_created} chunks, "
f"{result.facts_created} faktów, {result.entities_created} encji",
'chunks_created': result.chunks_created,
'facts_created': result.facts_created,
'entities_created': result.entities_created,
'relations_created': result.relations_created,
'processing_time': result.processing_time
})
else:
return jsonify({
'success': False,
'error': result.error
}), 400
except Exception as e:
db.rollback()
logger.error(f"Error extracting from news {news_id}: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/admin/zopk/knowledge/embeddings', methods=['POST'])
@login_required
def admin_zopk_generate_embeddings():
"""
Generate embeddings for chunks that don't have them.
Request JSON:
- limit: int (default 100) - max chunks to process
"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from zopk_knowledge_service import generate_chunk_embeddings
db = SessionLocal()
try:
data = request.get_json() or {}
limit = min(data.get('limit', 100), 500)
result = generate_chunk_embeddings(db, limit=limit, user_id=current_user.id)
return jsonify({
'success': True,
'message': f"Wygenerowano embeddings: {result['success']}/{result['total']}",
'generated': result['success'], # Renamed to avoid overwriting 'success'
'total': result['total'],
'failed': result['failed']
})
except Exception as e:
db.rollback()
logger.error(f"Error generating embeddings: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
# ============================================================
# ZOPK SSE ENDPOINTS (Server-Sent Events for Progress Tracking)
# ============================================================
def sse_progress_generator(operation_func, db, **kwargs):
"""
Generic SSE generator for progress tracking.
Args:
operation_func: Function to call (must accept progress_callback)
db: Database session
**kwargs: Additional arguments for operation_func
Yields:
SSE formatted progress events
"""
import json
from dataclasses import asdict
progress_queue = []
def progress_callback(update):
progress_queue.append(update)
def run_operation():
try:
result = operation_func(progress_callback=progress_callback, **kwargs)
return result
except Exception as e:
logger.error(f"SSE operation error: {e}")
return {'error': str(e)}
# Start operation in separate thread
import threading
result_container = [None]
def thread_target():
result_container[0] = run_operation()
thread = threading.Thread(target=thread_target)
thread.start()
# Yield progress updates while thread is running
while thread.is_alive() or progress_queue:
while progress_queue:
update = progress_queue.pop(0)
data = asdict(update)
yield f"data: {json.dumps(data, ensure_ascii=False)}\n\n"
if thread.is_alive():
import time
time.sleep(0.1)
thread.join()
# Send final result
final_result = result_container[0] or {}
yield f"data: {json.dumps({'type': 'result', **final_result}, ensure_ascii=False)}\n\n"
@app.route('/admin/zopk/news/scrape-content/stream', methods=['GET'])
@login_required
def admin_zopk_scrape_content_stream():
"""
SSE endpoint for streaming scrape progress.
Query params:
- limit: int (default 30) - max articles to scrape
- force: bool (default false) - re-scrape already scraped
"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from zopk_content_scraper import ZOPKContentScraper
limit = min(int(request.args.get('limit', 30)), 100)
force = request.args.get('force', 'false').lower() == 'true'
user_id = current_user.id
def generate():
import json
import time
from zopk_content_scraper import ZOPKContentScraper, MAX_RETRY_ATTEMPTS
from database import ZOPKNews
db = SessionLocal()
try:
scraper = ZOPKContentScraper(db, user_id=user_id)
# Get articles to scrape
query = db.query(ZOPKNews).filter(
ZOPKNews.status.in_(['approved', 'auto_approved'])
)
if not force:
query = query.filter(ZOPKNews.scrape_status.in_(['pending', 'failed']))
query = query.filter(
(ZOPKNews.scrape_status == 'pending') |
((ZOPKNews.scrape_status == 'failed') & (ZOPKNews.scrape_attempts < MAX_RETRY_ATTEMPTS))
)
query = query.order_by(ZOPKNews.created_at.desc())
articles = query.limit(limit).all()
total = len(articles)
if total == 0:
yield f"data: {json.dumps({'status': 'complete', 'message': 'Brak artykułów do scrapowania', 'total': 0}, ensure_ascii=False)}\n\n"
return
# Send initial
yield f"data: {json.dumps({'current': 0, 'total': total, 'percent': 0, 'stage': 'scraping', 'status': 'processing', 'message': f'Rozpoczynam scraping {total} artykułów...'}, ensure_ascii=False)}\n\n"
stats = {'scraped': 0, 'failed': 0, 'skipped': 0}
start_time = time.time()
for idx, article in enumerate(articles, 1):
# Send processing update
yield f"data: {json.dumps({'current': idx, 'total': total, 'percent': round((idx-1)/total*100, 1), 'stage': 'scraping', 'status': 'processing', 'message': f'Pobieram: {article.title[:50]}...', 'article_id': article.id, 'article_title': article.title[:80], 'details': {'source': article.source_name or 'nieznane', **stats}}, ensure_ascii=False)}\n\n"
result = scraper.scrape_article(article.id)
if result.status == 'scraped':
stats['scraped'] += 1
yield f"data: {json.dumps({'current': idx, 'total': total, 'percent': round(idx/total*100, 1), 'stage': 'scraping', 'status': 'success', 'message': f'{result.word_count} słów: {article.title[:40]}...', 'article_id': article.id, 'details': {'word_count': result.word_count, **stats}}, ensure_ascii=False)}\n\n"
elif result.status == 'skipped':
stats['skipped'] += 1
yield f"data: {json.dumps({'current': idx, 'total': total, 'percent': round(idx/total*100, 1), 'stage': 'scraping', 'status': 'skipped', 'message': f'⊘ Pominięto: {article.title[:40]}...', 'article_id': article.id, 'details': stats}, ensure_ascii=False)}\n\n"
else:
stats['failed'] += 1
error_msg = result.error[:50] if result.error else 'Nieznany błąd'
yield f"data: {json.dumps({'current': idx, 'total': total, 'percent': round(idx/total*100, 1), 'stage': 'scraping', 'status': 'failed', 'message': f'{error_msg}', 'article_id': article.id, 'details': {'error': result.error, **stats}}, ensure_ascii=False)}\n\n"
processing_time = round(time.time() - start_time, 2)
# Send completion
scraped_count = stats['scraped']
failed_count = stats['failed']
skipped_count = stats['skipped']
complete_msg = f'Zakończono: {scraped_count} pobrano, {failed_count} błędów, {skipped_count} pominięto'
complete_data = {'current': total, 'total': total, 'percent': 100, 'stage': 'scraping', 'status': 'complete', 'message': complete_msg, 'details': {'processing_time': processing_time, **stats}}
yield f"data: {json.dumps(complete_data, ensure_ascii=False)}\n\n"
except Exception as e:
logger.error(f"SSE scraping error: {e}")
yield f"data: {json.dumps({'status': 'error', 'message': str(e)}, ensure_ascii=False)}\n\n"
finally:
db.close()
return Response(generate(), mimetype='text/event-stream', headers={
'Cache-Control': 'no-cache',
'X-Accel-Buffering': 'no'
})
@app.route('/admin/zopk/knowledge/extract/stream', methods=['GET'])
@login_required
def admin_zopk_knowledge_extract_stream():
"""
SSE endpoint for streaming knowledge extraction progress.
Query params:
- limit: int (default 10) - max articles to process
"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
limit = min(int(request.args.get('limit', 10)), 50)
user_id = current_user.id
def generate():
import json
import time
db = SessionLocal()
try:
from zopk_knowledge_service import ZOPKKnowledgeService
from database import ZOPKNews
service = ZOPKKnowledgeService(db, user_id=user_id)
# Find articles ready for extraction
articles = db.query(ZOPKNews).filter(
ZOPKNews.status.in_(['approved', 'auto_approved']),
ZOPKNews.scrape_status == 'scraped',
ZOPKNews.knowledge_extracted == False
).order_by(
ZOPKNews.created_at.desc()
).limit(limit).all()
total = len(articles)
if total == 0:
yield f"data: {json.dumps({'status': 'complete', 'message': 'Brak artykułów do ekstrakcji', 'total': 0}, ensure_ascii=False)}\n\n"
return
# Send initial
yield f"data: {json.dumps({'current': 0, 'total': total, 'percent': 0, 'stage': 'extracting', 'status': 'processing', 'message': f'Rozpoczynam ekstrakcję z {total} artykułów...'}, ensure_ascii=False)}\n\n"
stats = {'success': 0, 'failed': 0, 'chunks': 0, 'facts': 0, 'entities': 0}
start_time = time.time()
for idx, article in enumerate(articles, 1):
# Send processing update
yield f"data: {json.dumps({'current': idx, 'total': total, 'percent': round((idx-1)/total*100, 1), 'stage': 'extracting', 'status': 'processing', 'message': f'Analizuję AI: {article.title[:50]}...', 'article_id': article.id, 'article_title': article.title[:80], 'details': stats}, ensure_ascii=False)}\n\n"
result = service.extract_from_news(article.id)
if result.success:
stats['success'] += 1
stats['chunks'] += result.chunks_created
stats['facts'] += result.facts_created
stats['entities'] += result.entities_created
yield f"data: {json.dumps({'current': idx, 'total': total, 'percent': round(idx/total*100, 1), 'stage': 'extracting', 'status': 'success', 'message': f'{result.chunks_created}ch, {result.facts_created}f, {result.entities_created}e', 'article_id': article.id, 'details': {'new_chunks': result.chunks_created, 'new_facts': result.facts_created, 'new_entities': result.entities_created, **stats}}, ensure_ascii=False)}\n\n"
else:
stats['failed'] += 1
error_msg = result.error[:50] if result.error else 'Nieznany błąd'
yield f"data: {json.dumps({'current': idx, 'total': total, 'percent': round(idx/total*100, 1), 'stage': 'extracting', 'status': 'failed', 'message': f'{error_msg}', 'article_id': article.id, 'details': {'error': result.error, **stats}}, ensure_ascii=False)}\n\n"
processing_time = round(time.time() - start_time, 2)
# Send completion
success_count = stats['success']
chunks_count = stats['chunks']
facts_count = stats['facts']
entities_count = stats['entities']
complete_msg = f'Zakończono: {success_count}/{total}. Utworzono: {chunks_count}ch, {facts_count}f, {entities_count}e'
complete_data = {'current': total, 'total': total, 'percent': 100, 'stage': 'extracting', 'status': 'complete', 'message': complete_msg, 'details': {'processing_time': processing_time, **stats}}
yield f"data: {json.dumps(complete_data, ensure_ascii=False)}\n\n"
except Exception as e:
logger.error(f"SSE extraction error: {e}")
yield f"data: {json.dumps({'status': 'error', 'message': str(e)}, ensure_ascii=False)}\n\n"
finally:
db.close()
return Response(generate(), mimetype='text/event-stream', headers={
'Cache-Control': 'no-cache',
'X-Accel-Buffering': 'no'
})
@app.route('/admin/zopk/knowledge/embeddings/stream', methods=['GET'])
@login_required
def admin_zopk_embeddings_stream():
"""
SSE endpoint for streaming embeddings generation progress.
Query params:
- limit: int (default 50) - max chunks to process
"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
limit = min(int(request.args.get('limit', 50)), 200)
user_id = current_user.id
def generate():
import json
import time
from gemini_service import GeminiService
db = SessionLocal()
try:
from database import ZOPKKnowledgeChunk
gemini = GeminiService()
# Find chunks without embeddings
chunks = db.query(ZOPKKnowledgeChunk).filter(
ZOPKKnowledgeChunk.embedding.is_(None)
).limit(limit).all()
total = len(chunks)
if total == 0:
yield f"data: {json.dumps({'status': 'complete', 'message': 'Brak chunks bez embeddingów', 'total': 0}, ensure_ascii=False)}\n\n"
return
# Send initial
yield f"data: {json.dumps({'current': 0, 'total': total, 'percent': 0, 'stage': 'embedding', 'status': 'processing', 'message': f'Generuję embeddingi dla {total} chunks...'}, ensure_ascii=False)}\n\n"
stats = {'success': 0, 'failed': 0}
start_time = time.time()
for idx, chunk in enumerate(chunks, 1):
summary_short = chunk.summary[:40] if chunk.summary else f'chunk_{chunk.id}'
# Send processing update
yield f"data: {json.dumps({'current': idx, 'total': total, 'percent': round((idx-1)/total*100, 1), 'stage': 'embedding', 'status': 'processing', 'message': f'Embedding {idx}/{total}: {summary_short}...', 'details': stats}, ensure_ascii=False)}\n\n"
try:
embedding = gemini.generate_embedding(
text=chunk.content,
task_type='retrieval_document',
title=chunk.summary,
user_id=user_id,
feature='zopk_chunk_embedding'
)
if embedding:
chunk.embedding = json.dumps(embedding)
stats['success'] += 1
yield f"data: {json.dumps({'current': idx, 'total': total, 'percent': round(idx/total*100, 1), 'stage': 'embedding', 'status': 'success', 'message': f'✓ 768 dim: {summary_short}', 'details': stats}, ensure_ascii=False)}\n\n"
else:
stats['failed'] += 1
yield f"data: {json.dumps({'current': idx, 'total': total, 'percent': round(idx/total*100, 1), 'stage': 'embedding', 'status': 'failed', 'message': f'✗ Brak odpowiedzi API', 'details': stats}, ensure_ascii=False)}\n\n"
except Exception as e:
stats['failed'] += 1
yield f"data: {json.dumps({'current': idx, 'total': total, 'percent': round(idx/total*100, 1), 'stage': 'embedding', 'status': 'failed', 'message': f'{str(e)[:40]}', 'details': {'error': str(e), **stats}}, ensure_ascii=False)}\n\n"
db.commit()
processing_time = round(time.time() - start_time, 2)
# Send completion
success_count = stats['success']
complete_msg = f'Zakończono: {success_count}/{total} embeddingów'
complete_data = {'current': total, 'total': total, 'percent': 100, 'stage': 'embedding', 'status': 'complete', 'message': complete_msg, 'details': {'processing_time': processing_time, **stats}}
yield f"data: {json.dumps(complete_data, ensure_ascii=False)}\n\n"
except Exception as e:
logger.error(f"SSE embedding error: {e}")
yield f"data: {json.dumps({'status': 'error', 'message': str(e)}, ensure_ascii=False)}\n\n"
finally:
db.close()
return Response(generate(), mimetype='text/event-stream', headers={
'Cache-Control': 'no-cache',
'X-Accel-Buffering': 'no'
})
@app.route('/api/zopk/knowledge/search', methods=['POST'])
@login_required
def api_zopk_knowledge_search():
"""
Semantic search in ZOPK knowledge base.
Request JSON:
- query: str (required) - search query
- limit: int (default 5) - max results
Response:
- chunks: list of matching knowledge chunks with similarity scores
- facts: list of relevant facts
"""
from zopk_knowledge_service import search_knowledge, get_relevant_facts
db = SessionLocal()
try:
data = request.get_json() or {}
query = data.get('query', '')
if not query:
return jsonify({'success': False, 'error': 'Query wymagane'}), 400
limit = min(data.get('limit', 5), 20)
# Search chunks
chunks = search_knowledge(
db,
query=query,
limit=limit,
user_id=current_user.id
)
# Get relevant facts
facts = get_relevant_facts(db, query=query, limit=limit)
return jsonify({
'success': True,
'query': query,
'chunks': chunks,
'facts': facts
})
except Exception as e:
logger.error(f"Error in knowledge search: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
# ============================================================
# ZOPK KNOWLEDGE BASE - ADMIN PANEL
# ============================================================
@app.route('/admin/zopk/knowledge')
@login_required
def admin_zopk_knowledge_dashboard():
"""
Dashboard for ZOPK Knowledge Base management.
Shows stats and links to chunks, facts, entities lists.
"""
if not current_user.is_admin:
flash('Brak uprawnień do tej sekcji.', 'warning')
return redirect(url_for('index'))
return render_template('admin/zopk_knowledge_dashboard.html')
@app.route('/admin/zopk/knowledge/chunks')
@login_required
def admin_zopk_knowledge_chunks():
"""
List knowledge chunks with pagination and filtering.
"""
if not current_user.is_admin:
flash('Brak uprawnień do tej sekcji.', 'warning')
return redirect(url_for('index'))
from zopk_knowledge_service import list_chunks
# Get query params
page = request.args.get('page', 1, type=int)
per_page = request.args.get('per_page', 20, type=int)
source_news_id = request.args.get('source_news_id', type=int)
has_embedding = request.args.get('has_embedding')
is_verified = request.args.get('is_verified')
# Convert string params to bool
if has_embedding is not None:
has_embedding = has_embedding.lower() == 'true'
if is_verified is not None:
is_verified = is_verified.lower() == 'true'
db = SessionLocal()
try:
result = list_chunks(
db,
page=page,
per_page=per_page,
source_news_id=source_news_id,
has_embedding=has_embedding,
is_verified=is_verified
)
return render_template(
'admin/zopk_knowledge_chunks.html',
chunks=result['chunks'],
total=result['total'],
page=result['page'],
per_page=result['per_page'],
pages=result['pages'],
source_news_id=source_news_id,
has_embedding=has_embedding,
is_verified=is_verified
)
finally:
db.close()
@app.route('/admin/zopk/knowledge/facts')
@login_required
def admin_zopk_knowledge_facts():
"""
List knowledge facts with pagination and filtering.
"""
if not current_user.is_admin:
flash('Brak uprawnień do tej sekcji.', 'warning')
return redirect(url_for('index'))
from zopk_knowledge_service import list_facts
# Get query params
page = request.args.get('page', 1, type=int)
per_page = request.args.get('per_page', 20, type=int)
fact_type = request.args.get('fact_type')
source_news_id = request.args.get('source_news_id', type=int)
is_verified = request.args.get('is_verified')
# Convert string param to bool
if is_verified is not None:
is_verified = is_verified.lower() == 'true'
db = SessionLocal()
try:
result = list_facts(
db,
page=page,
per_page=per_page,
fact_type=fact_type,
is_verified=is_verified,
source_news_id=source_news_id
)
return render_template(
'admin/zopk_knowledge_facts.html',
facts=result['facts'],
total=result['total'],
page=result['page'],
per_page=result['per_page'],
pages=result['pages'],
fact_types=result['fact_types'],
current_fact_type=fact_type,
source_news_id=source_news_id,
is_verified=is_verified
)
finally:
db.close()
@app.route('/admin/zopk/knowledge/entities')
@login_required
def admin_zopk_knowledge_entities():
"""
List knowledge entities with pagination and filtering.
"""
if not current_user.is_admin:
flash('Brak uprawnień do tej sekcji.', 'warning')
return redirect(url_for('index'))
from zopk_knowledge_service import list_entities
# Get query params
page = request.args.get('page', 1, type=int)
per_page = request.args.get('per_page', 20, type=int)
entity_type = request.args.get('entity_type')
min_mentions = request.args.get('min_mentions', type=int)
is_verified = request.args.get('is_verified')
# Convert string param to bool
if is_verified is not None:
is_verified = is_verified.lower() == 'true'
db = SessionLocal()
try:
result = list_entities(
db,
page=page,
per_page=per_page,
entity_type=entity_type,
is_verified=is_verified,
min_mentions=min_mentions
)
return render_template(
'admin/zopk_knowledge_entities.html',
entities=result['entities'],
total=result['total'],
page=result['page'],
per_page=result['per_page'],
pages=result['pages'],
entity_types=result['entity_types'],
current_entity_type=entity_type,
min_mentions=min_mentions,
is_verified=is_verified
)
finally:
db.close()
@app.route('/api/zopk/knowledge/chunks/<int:chunk_id>')
@login_required
def api_zopk_chunk_detail(chunk_id):
"""Get detailed information about a chunk."""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from zopk_knowledge_service import get_chunk_detail
db = SessionLocal()
try:
chunk = get_chunk_detail(db, chunk_id)
if not chunk:
return jsonify({'success': False, 'error': 'Chunk nie znaleziony'}), 404
return jsonify({'success': True, 'chunk': chunk})
finally:
db.close()
@app.route('/api/zopk/knowledge/chunks/<int:chunk_id>/verify', methods=['POST'])
@login_required
def api_zopk_chunk_verify(chunk_id):
"""Toggle chunk verification status."""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from zopk_knowledge_service import update_chunk_verification
db = SessionLocal()
try:
data = request.get_json() or {}
is_verified = data.get('is_verified', True)
success = update_chunk_verification(db, chunk_id, is_verified, current_user.id)
if not success:
return jsonify({'success': False, 'error': 'Chunk nie znaleziony'}), 404
return jsonify({'success': True, 'is_verified': is_verified})
except Exception as e:
db.rollback()
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/api/zopk/knowledge/facts/<int:fact_id>/verify', methods=['POST'])
@login_required
def api_zopk_fact_verify(fact_id):
"""Toggle fact verification status."""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from zopk_knowledge_service import update_fact_verification
db = SessionLocal()
try:
data = request.get_json() or {}
is_verified = data.get('is_verified', True)
success = update_fact_verification(db, fact_id, is_verified)
if not success:
return jsonify({'success': False, 'error': 'Fakt nie znaleziony'}), 404
return jsonify({'success': True, 'is_verified': is_verified})
except Exception as e:
db.rollback()
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/api/zopk/knowledge/entities/<int:entity_id>/verify', methods=['POST'])
@login_required
def api_zopk_entity_verify(entity_id):
"""Toggle entity verification status."""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from zopk_knowledge_service import update_entity_verification
db = SessionLocal()
try:
data = request.get_json() or {}
is_verified = data.get('is_verified', True)
success = update_entity_verification(db, entity_id, is_verified)
if not success:
return jsonify({'success': False, 'error': 'Encja nie znaleziona'}), 404
return jsonify({'success': True, 'is_verified': is_verified})
except Exception as e:
db.rollback()
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/api/zopk/knowledge/chunks/<int:chunk_id>', methods=['DELETE'])
@login_required
def api_zopk_chunk_delete(chunk_id):
"""Delete a chunk and its associated data."""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from zopk_knowledge_service import delete_chunk
db = SessionLocal()
try:
success = delete_chunk(db, chunk_id)
if not success:
return jsonify({'success': False, 'error': 'Chunk nie znaleziony'}), 404
return jsonify({'success': True, 'message': 'Chunk usunięty'})
except Exception as e:
db.rollback()
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
# ============================================================
# ZOPK ENTITY DUPLICATE MANAGEMENT
# ============================================================
@app.route('/admin/zopk/knowledge/duplicates')
@login_required
def admin_zopk_knowledge_duplicates():
"""Admin page for managing duplicate entities."""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('dashboard'))
from zopk_knowledge_service import find_duplicate_entities
db = SessionLocal()
try:
# Get filter parameters
entity_type = request.args.get('entity_type', '')
min_similarity = float(request.args.get('min_similarity', 0.4))
# Find duplicates
duplicates = find_duplicate_entities(
db,
entity_type=entity_type if entity_type else None,
min_similarity=min_similarity,
limit=100
)
# Get unique entity types for filter
from database import ZOPKKnowledgeEntity
from sqlalchemy import distinct
entity_types = [r[0] for r in db.query(distinct(ZOPKKnowledgeEntity.entity_type)).all()]
return render_template(
'admin/zopk_knowledge_duplicates.html',
duplicates=duplicates,
entity_types=sorted(entity_types),
selected_type=entity_type,
min_similarity=min_similarity
)
finally:
db.close()
@app.route('/api/zopk/knowledge/duplicates/preview', methods=['POST'])
@login_required
def api_zopk_duplicates_preview():
"""Preview merge operation between two entities."""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from zopk_knowledge_service import get_entity_merge_preview
db = SessionLocal()
try:
data = request.get_json() or {}
primary_id = data.get('primary_id')
duplicate_id = data.get('duplicate_id')
if not primary_id or not duplicate_id:
return jsonify({'success': False, 'error': 'Brak ID encji'}), 400
preview = get_entity_merge_preview(db, primary_id, duplicate_id)
if 'error' in preview:
return jsonify({'success': False, 'error': preview['error']}), 404
return jsonify({'success': True, 'preview': preview})
finally:
db.close()
@app.route('/api/zopk/knowledge/duplicates/merge', methods=['POST'])
@login_required
def api_zopk_duplicates_merge():
"""Merge two entities - keep primary, delete duplicate."""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from zopk_knowledge_service import merge_entities
db = SessionLocal()
try:
data = request.get_json() or {}
primary_id = data.get('primary_id')
duplicate_id = data.get('duplicate_id')
new_name = data.get('new_name')
if not primary_id or not duplicate_id:
return jsonify({'success': False, 'error': 'Brak ID encji'}), 400
result = merge_entities(db, primary_id, duplicate_id, new_name)
return jsonify(result)
finally:
db.close()
# ============================================================
# ZOPK ENTITY RELATIONS GRAPH
# ============================================================
@app.route('/admin/zopk/knowledge/graph')
@login_required
def admin_zopk_knowledge_graph():
"""Admin page for entity relations graph visualization."""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('dashboard'))
return render_template('admin/zopk_knowledge_graph.html')
# Cache for graph data (in-memory with TTL)
_graph_cache = {}
_GRAPH_CACHE_TTL = 300 # 5 minutes
@app.route('/api/zopk/knowledge/graph/data')
@login_required
def api_zopk_knowledge_graph_data():
"""Get graph data for entity co-occurrence visualization.
Uses in-memory cache with 5 minute TTL to avoid recalculating
co-occurrences on every request.
"""
import time
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from sqlalchemy import text, func
from database import ZOPKKnowledgeEntity, ZOPKKnowledgeEntityMention
# Build cache key from parameters
entity_type = request.args.get('entity_type', '')
min_cooccurrence = int(request.args.get('min_cooccurrence', 3))
limit = min(int(request.args.get('limit', 100)), 500)
cache_key = f"graph:{entity_type}:{min_cooccurrence}:{limit}"
# Check cache
if cache_key in _graph_cache:
cached_data, cached_time = _graph_cache[cache_key]
if time.time() - cached_time < _GRAPH_CACHE_TTL:
# Return cached data with cache indicator
cached_data['cached'] = True
return jsonify(cached_data)
db = SessionLocal()
try:
# Get top entities by mentions
entities_query = db.query(ZOPKKnowledgeEntity).filter(
ZOPKKnowledgeEntity.mentions_count >= 5
)
if entity_type:
entities_query = entities_query.filter(
ZOPKKnowledgeEntity.entity_type == entity_type
)
entities_query = entities_query.order_by(
ZOPKKnowledgeEntity.mentions_count.desc()
).limit(100)
entities = entities_query.all()
entity_ids = [e.id for e in entities]
if not entity_ids:
return jsonify({'success': True, 'nodes': [], 'links': []})
# Get co-occurrences (entities appearing in same chunk)
cooccur_query = text("""
SELECT
m1.entity_id as source,
m2.entity_id as target,
COUNT(*) as value
FROM zopk_knowledge_entity_mentions m1
JOIN zopk_knowledge_entity_mentions m2
ON m1.chunk_id = m2.chunk_id
AND m1.entity_id < m2.entity_id
WHERE m1.entity_id = ANY(:entity_ids)
AND m2.entity_id = ANY(:entity_ids)
GROUP BY m1.entity_id, m2.entity_id
HAVING COUNT(*) >= :min_cooccurrence
ORDER BY COUNT(*) DESC
LIMIT :limit
""")
result = db.execute(cooccur_query, {
'entity_ids': entity_ids,
'min_cooccurrence': min_cooccurrence,
'limit': limit
})
# Build nodes and links
used_entity_ids = set()
links = []
for row in result:
links.append({
'source': row.source,
'target': row.target,
'value': row.value
})
used_entity_ids.add(row.source)
used_entity_ids.add(row.target)
# Build nodes only for entities that have links
entity_map = {e.id: e for e in entities}
nodes = []
for eid in used_entity_ids:
if eid in entity_map:
e = entity_map[eid]
nodes.append({
'id': e.id,
'name': e.name,
'type': e.entity_type,
'mentions': e.mentions_count,
'verified': e.is_verified
})
# Build response
response_data = {
'success': True,
'nodes': nodes,
'links': links,
'stats': {
'total_nodes': len(nodes),
'total_links': len(links)
},
'cached': False
}
# Save to cache
_graph_cache[cache_key] = (response_data.copy(), time.time())
return jsonify(response_data)
except Exception as e:
logger.error(f"Error getting graph data: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
# ============================================================
# ZOPK KNOWLEDGE - FACT DUPLICATES
# ============================================================
@app.route('/admin/zopk/knowledge/fact-duplicates')
@login_required
def admin_zopk_fact_duplicates():
"""Panel deduplikacji faktów."""
if not current_user.is_admin:
flash('Brak uprawnień.', 'error')
return redirect(url_for('dashboard'))
return render_template('admin/zopk_fact_duplicates.html')
@app.route('/api/zopk/knowledge/fact-duplicates')
@login_required
def api_zopk_fact_duplicates():
"""API - lista duplikatów faktów."""
if not current_user.is_admin:
return jsonify({'error': 'Forbidden'}), 403
from zopk_knowledge_service import find_duplicate_facts
db = SessionLocal()
try:
min_sim = float(request.args.get('min_similarity', 0.7))
fact_type = request.args.get('fact_type', '')
limit = min(int(request.args.get('limit', 100)), 500)
duplicates = find_duplicate_facts(db, min_sim, limit, fact_type if fact_type else None)
return jsonify({'success': True, 'duplicates': duplicates, 'count': len(duplicates)})
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/api/zopk/knowledge/fact-duplicates/merge', methods=['POST'])
@login_required
def api_zopk_fact_merge():
"""API - merge duplikatów faktów."""
if not current_user.is_admin:
return jsonify({'error': 'Forbidden'}), 403
from zopk_knowledge_service import merge_facts
db = SessionLocal()
try:
data = request.get_json()
primary_id = data.get('primary_id')
duplicate_id = data.get('duplicate_id')
new_text = data.get('new_text')
result = merge_facts(db, primary_id, duplicate_id, new_text)
return jsonify(result)
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
# ============================================================
# ZOPK KNOWLEDGE - AUTO VERIFY
# ============================================================
@app.route('/api/zopk/knowledge/auto-verify/entities', methods=['POST'])
@login_required
def api_zopk_auto_verify_entities():
"""Auto-weryfikacja encji z wysoką liczbą wzmianek."""
if not current_user.is_admin:
return jsonify({'error': 'Forbidden'}), 403
from zopk_knowledge_service import auto_verify_top_entities
db = SessionLocal()
try:
data = request.get_json() or {}
min_mentions = int(data.get('min_mentions', 5))
limit = int(data.get('limit', 100))
result = auto_verify_top_entities(db, min_mentions, limit)
return jsonify(result)
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/api/zopk/knowledge/auto-verify/facts', methods=['POST'])
@login_required
def api_zopk_auto_verify_facts():
"""Auto-weryfikacja faktów z wysoką ważnością."""
if not current_user.is_admin:
return jsonify({'error': 'Forbidden'}), 403
from zopk_knowledge_service import auto_verify_top_facts
db = SessionLocal()
try:
data = request.get_json() or {}
min_importance = float(data.get('min_importance', 0.7))
limit = int(data.get('limit', 200))
result = auto_verify_top_facts(db, min_importance, limit)
return jsonify(result)
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/api/zopk/knowledge/auto-verify/similar', methods=['POST'])
@login_required
def api_zopk_auto_verify_similar():
"""Auto-weryfikacja faktów podobnych do już zweryfikowanych (uczenie się)."""
if not current_user.is_admin:
return jsonify({'error': 'Forbidden'}), 403
from zopk_knowledge_service import auto_verify_similar_to_verified
db = SessionLocal()
try:
data = request.get_json() or {}
min_similarity = float(data.get('min_similarity', 0.8))
limit = int(data.get('limit', 100))
result = auto_verify_similar_to_verified(db, min_similarity, limit)
return jsonify(result)
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/api/zopk/knowledge/suggest-similar-facts')
@login_required
def api_zopk_suggest_similar_facts():
"""Pobierz sugestie faktów podobnych do zweryfikowanych (bez auto-weryfikacji)."""
if not current_user.is_admin:
return jsonify({'error': 'Forbidden'}), 403
from zopk_knowledge_service import find_similar_to_verified_facts
db = SessionLocal()
try:
min_similarity = float(request.args.get('min_similarity', 0.8))
limit = int(request.args.get('limit', 50))
suggestions = find_similar_to_verified_facts(db, min_similarity, limit)
return jsonify({
'success': True,
'suggestions': suggestions,
'count': len(suggestions)
})
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
# ============================================================
# ZOPK KNOWLEDGE - DASHBOARD
# ============================================================
@app.route('/api/zopk/knowledge/dashboard-stats')
@login_required
def api_zopk_dashboard_stats():
"""API - statystyki dashboardu."""
if not current_user.is_admin:
return jsonify({'error': 'Forbidden'}), 403
from zopk_knowledge_service import get_knowledge_dashboard_stats
db = SessionLocal()
try:
stats = get_knowledge_dashboard_stats(db)
return jsonify({'success': True, **stats})
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
# ============================================================
# ZOPK MILESTONES / TIMELINE
# ============================================================
@app.route('/admin/zopk/timeline')
@login_required
def admin_zopk_timeline():
"""Panel Timeline ZOPK."""
if not current_user.is_admin:
flash('Brak uprawnień.', 'error')
return redirect(url_for('dashboard'))
return render_template('admin/zopk_timeline.html')
@app.route('/api/zopk/milestones')
@login_required
def api_zopk_milestones():
"""API - lista kamieni milowych ZOPK."""
from database import ZOPKMilestone
db = SessionLocal()
try:
milestones = db.query(ZOPKMilestone).order_by(ZOPKMilestone.target_date).all()
return jsonify({
'success': True,
'milestones': [{
'id': m.id,
'title': m.title,
'description': m.description,
'category': m.category,
'target_date': m.target_date.isoformat() if m.target_date else None,
'actual_date': m.actual_date.isoformat() if m.actual_date else None,
'status': m.status,
'source_url': m.source_url
} for m in milestones]
})
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/api/zopk/milestones', methods=['POST'])
@login_required
def api_zopk_milestone_create():
"""API - utworzenie kamienia milowego."""
if not current_user.is_admin:
return jsonify({'error': 'Forbidden'}), 403
from database import ZOPKMilestone
from datetime import datetime
db = SessionLocal()
try:
data = request.get_json()
milestone = ZOPKMilestone(
title=data['title'],
description=data.get('description'),
category=data.get('category', 'other'),
target_date=datetime.strptime(data['target_date'], '%Y-%m-%d').date() if data.get('target_date') else None,
actual_date=datetime.strptime(data['actual_date'], '%Y-%m-%d').date() if data.get('actual_date') else None,
status=data.get('status', 'planned'),
source_url=data.get('source_url'),
source_news_id=data.get('source_news_id')
)
db.add(milestone)
db.commit()
return jsonify({'success': True, 'id': milestone.id})
except Exception as e:
db.rollback()
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/api/zopk/milestones/<int:milestone_id>', methods=['PUT'])
@login_required
def api_zopk_milestone_update(milestone_id):
"""API - aktualizacja kamienia milowego."""
if not current_user.is_admin:
return jsonify({'error': 'Forbidden'}), 403
from database import ZOPKMilestone
from datetime import datetime
db = SessionLocal()
try:
milestone = db.query(ZOPKMilestone).get(milestone_id)
if not milestone:
return jsonify({'error': 'Not found'}), 404
data = request.get_json()
if 'title' in data:
milestone.title = data['title']
if 'description' in data:
milestone.description = data['description']
if 'category' in data:
milestone.category = data['category']
if 'target_date' in data:
milestone.target_date = datetime.strptime(data['target_date'], '%Y-%m-%d').date() if data['target_date'] else None
if 'actual_date' in data:
milestone.actual_date = datetime.strptime(data['actual_date'], '%Y-%m-%d').date() if data['actual_date'] else None
if 'status' in data:
milestone.status = data['status']
if 'source_url' in data:
milestone.source_url = data['source_url']
db.commit()
return jsonify({'success': True})
except Exception as e:
db.rollback()
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/api/zopk/milestones/<int:milestone_id>', methods=['DELETE'])
@login_required
def api_zopk_milestone_delete(milestone_id):
"""API - usunięcie kamienia milowego."""
if not current_user.is_admin:
return jsonify({'error': 'Forbidden'}), 403
from database import ZOPKMilestone
db = SessionLocal()
try:
milestone = db.query(ZOPKMilestone).get(milestone_id)
if not milestone:
return jsonify({'error': 'Not found'}), 404
db.delete(milestone)
db.commit()
return jsonify({'success': True})
except Exception as e:
db.rollback()
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/api/zopk/timeline/suggestions')
@login_required
def api_zopk_timeline_suggestions():
"""API - sugestie kamieni milowych z bazy wiedzy."""
if not current_user.is_admin:
return jsonify({'error': 'Forbidden'}), 403
from zopk_knowledge_service import get_timeline_suggestions
limit = request.args.get('limit', 30, type=int)
only_verified = request.args.get('only_verified', 'false').lower() == 'true'
use_ai = request.args.get('use_ai', 'false').lower() == 'true'
db = SessionLocal()
try:
result = get_timeline_suggestions(db, limit=limit, only_verified=only_verified)
if result['success'] and use_ai and result.get('suggestions'):
from zopk_knowledge_service import categorize_milestones_with_ai
result['suggestions'] = categorize_milestones_with_ai(db, result['suggestions'])
return jsonify(result)
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/api/zopk/timeline/suggestions/approve', methods=['POST'])
@login_required
def api_zopk_timeline_suggestion_approve():
"""API - zatwierdzenie sugestii i utworzenie kamienia milowego."""
if not current_user.is_admin:
return jsonify({'error': 'Forbidden'}), 403
from zopk_knowledge_service import create_milestone_from_suggestion
data = request.get_json()
if not data:
return jsonify({'error': 'No data provided'}), 400
fact_id = data.get('fact_id')
if not fact_id:
return jsonify({'error': 'fact_id is required'}), 400
db = SessionLocal()
try:
result = create_milestone_from_suggestion(
db_session=db,
fact_id=fact_id,
title=data.get('title', 'Kamień milowy'),
description=data.get('description'),
category=data.get('category', 'other'),
target_date=data.get('target_date'),
status=data.get('status', 'planned'),
source_url=data.get('source_url')
)
return jsonify(result)
except Exception as e:
db.rollback()
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
# ============================================================
# KRS AUDIT (Krajowy Rejestr Sądowy)
# ============================================================
@app.route('/admin/krs-audit')
@login_required
def admin_krs_audit():
"""
Admin dashboard for KRS (Krajowy Rejestr Sądowy) audit.
Displays:
- Summary stats (with KRS, audited count, data extraction status)
- List of companies with KRS numbers
- Audit progress and status for each company
- Links to source PDF files
"""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('dashboard'))
db = SessionLocal()
try:
from sqlalchemy import func
# Get all active companies with KRS numbers
companies_query = db.query(Company).filter(
Company.status == 'active',
Company.krs.isnot(None),
Company.krs != ''
).order_by(Company.name).all()
# Get latest audit for each company
companies = []
for company in companies_query:
# Get latest audit
latest_audit = db.query(KRSAudit).filter(
KRSAudit.company_id == company.id
).order_by(KRSAudit.audit_date.desc()).first()
# Get PKD codes (all)
pkd_codes = db.query(CompanyPKD).filter(
CompanyPKD.company_id == company.id
).order_by(CompanyPKD.is_primary.desc(), CompanyPKD.pkd_code).all()
pkd_count = len(pkd_codes)
# Get people count
people_count = db.query(CompanyPerson).filter(
CompanyPerson.company_id == company.id
).count()
companies.append({
'id': company.id,
'name': company.name,
'slug': company.slug,
'krs': company.krs,
'nip': company.nip,
'capital_amount': company.capital_amount,
'krs_last_audit_at': company.krs_last_audit_at,
'krs_pdf_path': company.krs_pdf_path,
'audit': latest_audit,
'pkd_count': pkd_count,
'pkd_codes': [{
'code': pkd.pkd_code,
'description': pkd.pkd_description,
'is_primary': pkd.is_primary
} for pkd in pkd_codes],
'people_count': people_count,
'capital_shares_count': company.capital_shares_count
})
# Calculate stats
total_with_krs = len(companies)
audited_count = len([c for c in companies if c['krs_last_audit_at']])
not_audited_count = total_with_krs - audited_count
with_capital = len([c for c in companies if c['capital_amount']])
with_people = len([c for c in companies if c['people_count'] > 0])
with_pkd = len([c for c in companies if c['pkd_count'] > 0])
# Companies without KRS
no_krs_count = db.query(Company).filter(
Company.status == 'active',
(Company.krs.is_(None)) | (Company.krs == '')
).count()
stats = {
'total_with_krs': total_with_krs,
'audited_count': audited_count,
'not_audited_count': not_audited_count,
'no_krs_count': no_krs_count,
'with_capital': with_capital,
'with_people': with_people,
'with_pkd': with_pkd
}
return render_template('admin/krs_audit_dashboard.html',
companies=companies,
stats=stats,
krs_audit_available=KRS_AUDIT_AVAILABLE,
now=datetime.now()
)
finally:
db.close()
@app.route('/api/krs/audit', methods=['POST'])
@login_required
@limiter.limit("200 per hour")
def api_krs_audit_trigger():
"""
API: Trigger KRS audit for a company (admin-only).
Parses KRS PDF file and extracts all available data:
- Basic info (KRS, NIP, REGON, name, legal form)
- Capital and shares
- Management board, shareholders, procurators
- PKD codes
- Financial reports
Request JSON body:
- company_id: Company ID (integer)
Returns:
- Success: Audit results saved to database
- Error: Error message with status code
"""
if not current_user.is_admin:
return jsonify({
'success': False,
'error': 'Brak uprawnień. Tylko administrator może uruchamiać audyty KRS.'
}), 403
if not KRS_AUDIT_AVAILABLE:
return jsonify({
'success': False,
'error': 'Usługa audytu KRS jest niedostępna.'
}), 503
data = request.get_json()
if not data or not data.get('company_id'):
return jsonify({
'success': False,
'error': 'Podaj company_id firmy do audytu.'
}), 400
company_id = data['company_id']
db = SessionLocal()
try:
company = db.query(Company).filter_by(id=company_id, status='active').first()
if not company:
return jsonify({
'success': False,
'error': 'Firma nie znaleziona.'
}), 404
if not company.krs:
return jsonify({
'success': False,
'error': f'Firma "{company.name}" nie ma numeru KRS.'
}), 400
# Find PDF file
pdf_dir = Path('data/krs_pdfs')
pdf_files = list(pdf_dir.glob(f'*{company.krs}*.pdf'))
if not pdf_files:
return jsonify({
'success': False,
'error': f'Nie znaleziono pliku PDF dla KRS {company.krs}. '
f'Pobierz odpis z ekrs.ms.gov.pl i umieść w data/krs_pdfs/'
}), 404
pdf_path = pdf_files[0]
# Create audit record
audit = KRSAudit(
company_id=company.id,
status='parsing',
progress_percent=10,
progress_message='Parsowanie pliku PDF...',
pdf_filename=pdf_path.name,
pdf_path=str(pdf_path)
)
db.add(audit)
db.commit()
# Parse PDF
try:
parsed_data = parse_krs_pdf(str(pdf_path), verbose=True)
# Update audit with parsed data
audit.status = 'completed'
audit.progress_percent = 100
audit.progress_message = 'Audyt zakończony pomyślnie'
audit.extracted_krs = parsed_data.get('krs')
audit.extracted_nazwa = parsed_data.get('nazwa')
audit.extracted_nip = parsed_data.get('nip')
audit.extracted_regon = parsed_data.get('regon')
audit.extracted_forma_prawna = parsed_data.get('forma_prawna')
audit.extracted_data_rejestracji = parse_date_str(parsed_data.get('data_rejestracji'))
audit.extracted_kapital_zakladowy = parsed_data.get('kapital_zakladowy')
audit.extracted_liczba_udzialow = parsed_data.get('liczba_udzialow')
audit.extracted_sposob_reprezentacji = parsed_data.get('sposob_reprezentacji')
audit.zarzad_count = len(parsed_data.get('zarzad', []))
audit.wspolnicy_count = len(parsed_data.get('wspolnicy', []))
audit.prokurenci_count = len(parsed_data.get('prokurenci', []))
audit.pkd_count = 1 if parsed_data.get('pkd_przewazajacy') else 0
audit.pkd_count += len(parsed_data.get('pkd_pozostale', []))
# Convert non-JSON-serializable values for JSONB storage
def make_json_serializable(obj):
from decimal import Decimal
if isinstance(obj, Decimal):
return float(obj)
elif isinstance(obj, (datetime, date)):
return obj.isoformat()
elif isinstance(obj, dict):
return {k: make_json_serializable(v) for k, v in obj.items()}
elif isinstance(obj, list):
return [make_json_serializable(i) for i in obj]
return obj
audit.parsed_data = make_json_serializable(parsed_data)
audit.pdf_downloaded_at = datetime.now()
# Update company with parsed data
if parsed_data.get('kapital_zakladowy'):
company.capital_amount = parsed_data['kapital_zakladowy']
if parsed_data.get('liczba_udzialow'):
company.capital_shares_count = parsed_data['liczba_udzialow']
if parsed_data.get('wartosc_nominalna_udzialu'):
company.capital_share_value = parsed_data['wartosc_nominalna_udzialu']
if parsed_data.get('data_rejestracji'):
company.krs_registration_date = parse_date_str(parsed_data['data_rejestracji'])
if parsed_data.get('sposob_reprezentacji'):
company.krs_representation_rules = parsed_data['sposob_reprezentacji']
if parsed_data.get('czas_trwania'):
company.krs_duration = parsed_data['czas_trwania']
company.krs_last_audit_at = datetime.now()
company.krs_pdf_path = str(pdf_path)
# Import PKD codes
pkd_main = parsed_data.get('pkd_przewazajacy')
if pkd_main:
existing = db.query(CompanyPKD).filter_by(
company_id=company.id,
pkd_code=pkd_main['kod']
).first()
if not existing:
db.add(CompanyPKD(
company_id=company.id,
pkd_code=pkd_main['kod'],
pkd_description=pkd_main['opis'],
is_primary=True,
source='ekrs'
))
# Also update Company.pkd_code
company.pkd_code = pkd_main['kod']
company.pkd_description = pkd_main['opis']
for pkd in parsed_data.get('pkd_pozostale', []):
existing = db.query(CompanyPKD).filter_by(
company_id=company.id,
pkd_code=pkd['kod']
).first()
if not existing:
db.add(CompanyPKD(
company_id=company.id,
pkd_code=pkd['kod'],
pkd_description=pkd['opis'],
is_primary=False,
source='ekrs'
))
# Import people (zarząd, wspólnicy)
for person_data in parsed_data.get('zarzad', []):
_import_krs_person(db, company.id, person_data, 'zarzad', pdf_path.name)
for person_data in parsed_data.get('wspolnicy', []):
_import_krs_person(db, company.id, person_data, 'wspolnik', pdf_path.name)
for person_data in parsed_data.get('prokurenci', []):
_import_krs_person(db, company.id, person_data, 'prokurent', pdf_path.name)
# Import financial reports
for report in parsed_data.get('sprawozdania_finansowe', []):
existing = db.query(CompanyFinancialReport).filter_by(
company_id=company.id,
period_start=parse_date_str(report.get('okres_od')),
period_end=parse_date_str(report.get('okres_do'))
).first()
if not existing:
db.add(CompanyFinancialReport(
company_id=company.id,
period_start=parse_date_str(report.get('okres_od')),
period_end=parse_date_str(report.get('okres_do')),
filed_at=parse_date_str(report.get('data_zlozenia')),
source='ekrs'
))
db.commit()
logger.info(f"KRS audit completed for {company.name} (KRS: {company.krs})")
return jsonify({
'success': True,
'message': f'Audyt KRS zakończony dla {company.name}',
'company_id': company.id,
'data': {
'krs': parsed_data.get('krs'),
'nazwa': parsed_data.get('nazwa'),
'nip': parsed_data.get('nip'),
'regon': parsed_data.get('regon'),
'kapital': float(parsed_data.get('kapital_zakladowy', 0) or 0),
'liczba_udzialow': parsed_data.get('liczba_udzialow'),
'zarzad_count': len(parsed_data.get('zarzad', [])),
'wspolnicy_count': len(parsed_data.get('wspolnicy', [])),
'prokurenci_count': len(parsed_data.get('prokurenci', [])),
'pkd_count': audit.pkd_count
}
})
except Exception as e:
audit.status = 'error'
audit.progress_percent = 0
audit.error_message = str(e)
db.commit()
logger.error(f"KRS audit failed for {company.name}: {e}")
return jsonify({
'success': False,
'error': f'Błąd parsowania PDF: {str(e)}'
}), 500
finally:
db.close()
def parse_date_str(date_val):
"""Helper to parse date from string or return date object as-is"""
if date_val is None:
return None
if isinstance(date_val, date):
return date_val
if isinstance(date_val, str):
try:
return datetime.strptime(date_val, '%Y-%m-%d').date()
except:
return None
return None
def _import_krs_person(db, company_id, person_data, role_category, source_document):
"""Helper to import a person from KRS data"""
pesel = person_data.get('pesel')
nazwisko = person_data.get('nazwisko', '')
imiona = person_data.get('imiona', '')
rola = person_data.get('rola', '')
# Find or create Person
person = None
if pesel:
person = db.query(Person).filter_by(pesel=pesel).first()
if not person:
# Try to find by name
person = db.query(Person).filter_by(
nazwisko=nazwisko,
imiona=imiona
).first()
if not person:
person = Person(
pesel=pesel,
nazwisko=nazwisko,
imiona=imiona
)
db.add(person)
db.flush()
# Check if relation already exists
existing_rel = db.query(CompanyPerson).filter_by(
company_id=company_id,
person_id=person.id,
role_category=role_category
).first()
if not existing_rel:
cp = CompanyPerson(
company_id=company_id,
person_id=person.id,
role=rola,
role_category=role_category,
source='ekrs.ms.gov.pl',
source_document=source_document,
fetched_at=datetime.now()
)
# Add shares info for shareholders
if role_category == 'wspolnik':
cp.shares_count = person_data.get('udzialy_liczba')
if person_data.get('udzialy_wartosc'):
cp.shares_value = person_data['udzialy_wartosc']
if person_data.get('udzialy_procent'):
cp.shares_percent = person_data['udzialy_procent']
db.add(cp)
@app.route('/api/krs/audit/batch', methods=['POST'])
@login_required
@limiter.limit("5 per hour")
def api_krs_audit_batch():
"""
API: Trigger batch KRS audit for all companies with KRS numbers.
This runs audits sequentially to avoid overloading the system.
Returns progress updates via the response.
"""
if not current_user.is_admin:
return jsonify({
'success': False,
'error': 'Brak uprawnień.'
}), 403
if not KRS_AUDIT_AVAILABLE:
return jsonify({
'success': False,
'error': 'Usługa audytu KRS jest niedostępna.'
}), 503
db = SessionLocal()
try:
# Get companies with KRS that haven't been audited recently
companies = db.query(Company).filter(
Company.status == 'active',
Company.krs.isnot(None),
Company.krs != ''
).order_by(Company.name).all()
results = {
'total': len(companies),
'success': 0,
'failed': 0,
'skipped': 0,
'details': []
}
pdf_dir = Path('data/krs_pdfs')
for company in companies:
# Find PDF file
pdf_files = list(pdf_dir.glob(f'*{company.krs}*.pdf'))
if not pdf_files:
results['skipped'] += 1
results['details'].append({
'company': company.name,
'krs': company.krs,
'status': 'skipped',
'reason': 'Brak pliku PDF'
})
continue
pdf_path = pdf_files[0]
try:
parsed_data = parse_krs_pdf(str(pdf_path))
# Update company
if parsed_data.get('kapital_zakladowy'):
company.capital_amount = parsed_data['kapital_zakladowy']
if parsed_data.get('liczba_udzialow'):
company.capital_shares_count = parsed_data['liczba_udzialow']
company.krs_last_audit_at = datetime.now()
company.krs_pdf_path = str(pdf_path)
results['success'] += 1
results['details'].append({
'company': company.name,
'krs': company.krs,
'status': 'success'
})
except Exception as e:
results['failed'] += 1
results['details'].append({
'company': company.name,
'krs': company.krs,
'status': 'error',
'reason': str(e)
})
db.commit()
return jsonify({
'success': True,
'message': f'Audyt zakończony: {results["success"]} sukces, '
f'{results["failed"]} błędów, {results["skipped"]} pominiętych',
'results': results
})
finally:
db.close()
@app.route('/api/krs/pdf/<int:company_id>')
@login_required
def api_krs_pdf_download(company_id):
"""
API: Download/serve KRS PDF file for a company.
"""
db = SessionLocal()
try:
company = db.query(Company).filter_by(id=company_id).first()
if not company:
return jsonify({'error': 'Firma nie znaleziona'}), 404
if not company.krs_pdf_path:
return jsonify({'error': 'Brak pliku PDF'}), 404
pdf_path = Path(company.krs_pdf_path)
if not pdf_path.exists():
return jsonify({'error': 'Plik PDF nie istnieje'}), 404
return send_file(
str(pdf_path),
mimetype='application/pdf',
as_attachment=False,
download_name=pdf_path.name
)
finally:
db.close()
# ============================================================
# ERROR HANDLERS
# ============================================================
@app.errorhandler(404)
def not_found(error):
return render_template('errors/404.html'), 404
@app.errorhandler(500)
def internal_error(error):
return render_template('errors/500.html'), 500
# ============================================================
# ADMIN - SECURITY DASHBOARD
# ============================================================
@app.route('/admin/security')
@login_required
def admin_security():
"""Security dashboard - audit logs, alerts, GeoIP stats"""
if not current_user.is_admin:
flash('Brak uprawnień.', 'error')
return redirect(url_for('dashboard'))
db = SessionLocal()
try:
from sqlalchemy import func, desc
# Get recent audit logs
audit_logs = db.query(AuditLog).order_by(
desc(AuditLog.created_at)
).limit(50).all()
# Get security alerts
alerts = db.query(SecurityAlert).order_by(
desc(SecurityAlert.created_at)
).limit(50).all()
# Alert stats
new_alerts_count = db.query(SecurityAlert).filter(
SecurityAlert.status == 'new'
).count()
# Recent locked accounts
locked_accounts = db.query(User).filter(
User.locked_until > datetime.now()
).all()
# Users with 2FA enabled
users_with_2fa = db.query(User).filter(
User.totp_enabled == True
).count()
total_admins = db.query(User).filter(
User.is_admin == True
).count()
# Alert type breakdown
alert_breakdown = db.query(
SecurityAlert.alert_type,
func.count(SecurityAlert.id).label('count')
).group_by(SecurityAlert.alert_type).all()
stats = {
'new_alerts': new_alerts_count,
'locked_accounts': len(locked_accounts),
'users_with_2fa': users_with_2fa,
'total_admins': total_admins,
'alert_breakdown': {a.alert_type: a.count for a in alert_breakdown}
}
# GeoIP stats
from security_service import _get_geoip_enabled
geoip_enabled = _get_geoip_enabled()
geoip_stats = {'today': 0, 'this_month': 0, 'this_year': 0, 'total': 0, 'by_country': []}
if geoip_enabled:
today = datetime.now().date()
first_of_month = today.replace(day=1)
first_of_year = today.replace(month=1, day=1)
# Count geo_blocked alerts
geoip_stats['today'] = db.query(SecurityAlert).filter(
SecurityAlert.alert_type == 'geo_blocked',
func.date(SecurityAlert.created_at) == today
).count()
geoip_stats['this_month'] = db.query(SecurityAlert).filter(
SecurityAlert.alert_type == 'geo_blocked',
func.date(SecurityAlert.created_at) >= first_of_month
).count()
geoip_stats['this_year'] = db.query(SecurityAlert).filter(
SecurityAlert.alert_type == 'geo_blocked',
func.date(SecurityAlert.created_at) >= first_of_year
).count()
geoip_stats['total'] = db.query(SecurityAlert).filter(
SecurityAlert.alert_type == 'geo_blocked'
).count()
# Country breakdown (from details JSON)
country_flags = {
'RU': ('🇷🇺', 'Rosja'), 'CN': ('🇨🇳', 'Chiny'), 'KP': ('🇰🇵', 'Korea Płn.'),
'IR': ('🇮🇷', 'Iran'), 'BY': ('🇧🇾', 'Białoruś'), 'SY': ('🇸🇾', 'Syria'),
'VE': ('🇻🇪', 'Wenezuela'), 'CU': ('🇨🇺', 'Kuba')
}
geo_alerts = db.query(SecurityAlert).filter(
SecurityAlert.alert_type == 'geo_blocked'
).all()
country_counts = {}
for alert in geo_alerts:
if alert.details and 'country' in alert.details:
country = alert.details['country']
if country:
country_counts[country] = country_counts.get(country, 0) + 1
# Sort by count descending
sorted_countries = sorted(country_counts.items(), key=lambda x: x[1], reverse=True)
for code, count in sorted_countries:
flag, name = country_flags.get(code, ('🏴', code))
geoip_stats['by_country'].append({
'code': code, 'flag': flag, 'name': name, 'count': count
})
return render_template(
'admin/security_dashboard.html',
audit_logs=audit_logs,
alerts=alerts,
locked_accounts=locked_accounts,
stats=stats,
geoip_enabled=geoip_enabled,
geoip_stats=geoip_stats,
generated_at=datetime.now()
)
finally:
db.close()
@app.route('/admin/security/alert/<int:alert_id>/acknowledge', methods=['POST'])
@login_required
def acknowledge_security_alert(alert_id):
"""Acknowledge a security alert"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Not authorized'}), 403
db = SessionLocal()
try:
alert = db.query(SecurityAlert).get(alert_id)
if not alert:
return jsonify({'success': False, 'error': 'Alert not found'}), 404
alert.status = 'acknowledged'
alert.acknowledged_by = current_user.id
alert.acknowledged_at = datetime.now()
# Log audit
if SECURITY_SERVICE_AVAILABLE:
log_audit(db, 'alert.acknowledge', 'security_alert', alert_id,
details={'alert_type': alert.alert_type})
db.commit()
return jsonify({'success': True})
finally:
db.close()
@app.route('/admin/security/alert/<int:alert_id>/resolve', methods=['POST'])
@login_required
def resolve_security_alert(alert_id):
"""Resolve a security alert"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Not authorized'}), 403
note = request.form.get('note', '')
db = SessionLocal()
try:
alert = db.query(SecurityAlert).get(alert_id)
if not alert:
return jsonify({'success': False, 'error': 'Alert not found'}), 404
alert.status = 'resolved'
alert.resolution_note = note
if not alert.acknowledged_by:
alert.acknowledged_by = current_user.id
alert.acknowledged_at = datetime.now()
# Log audit
if SECURITY_SERVICE_AVAILABLE:
log_audit(db, 'alert.resolve', 'security_alert', alert_id,
details={'alert_type': alert.alert_type, 'note': note})
db.commit()
flash('Alert został rozwiązany.', 'success')
return redirect(url_for('admin_security'))
finally:
db.close()
@app.route('/admin/security/unlock-account/<int:user_id>', methods=['POST'])
@login_required
def unlock_account(user_id):
"""Unlock a locked user account"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Not authorized'}), 403
db = SessionLocal()
try:
user = db.query(User).get(user_id)
if not user:
return jsonify({'success': False, 'error': 'User not found'}), 404
user.locked_until = None
user.failed_login_attempts = 0
# Log audit
if SECURITY_SERVICE_AVAILABLE:
log_audit(db, 'user.unlock', 'user', user_id, user.email)
db.commit()
flash(f'Konto {user.email} zostało odblokowane.', 'success')
return redirect(url_for('admin_security'))
finally:
db.close()
@app.route('/api/admin/security/geoip-stats')
@login_required
def api_geoip_stats():
"""API endpoint for GeoIP stats auto-refresh"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Not authorized'}), 403
from sqlalchemy import func
from security_service import _get_geoip_enabled
db = SessionLocal()
try:
now = datetime.now()
geoip_enabled = _get_geoip_enabled()
if not geoip_enabled:
return jsonify({
'enabled': False,
'timestamp': now.isoformat()
})
today = now.date()
first_of_month = today.replace(day=1)
first_of_year = today.replace(month=1, day=1)
stats = {
'enabled': True,
'timestamp': now.isoformat(),
'today': db.query(SecurityAlert).filter(
SecurityAlert.alert_type == 'geo_blocked',
func.date(SecurityAlert.created_at) == today
).count(),
'this_month': db.query(SecurityAlert).filter(
SecurityAlert.alert_type == 'geo_blocked',
func.date(SecurityAlert.created_at) >= first_of_month
).count(),
'this_year': db.query(SecurityAlert).filter(
SecurityAlert.alert_type == 'geo_blocked',
func.date(SecurityAlert.created_at) >= first_of_year
).count(),
'total': db.query(SecurityAlert).filter(
SecurityAlert.alert_type == 'geo_blocked'
).count()
}
# Country breakdown
country_flags = {
'RU': ('🇷🇺', 'Rosja'), 'CN': ('🇨🇳', 'Chiny'), 'KP': ('🇰🇵', 'Korea Płn.'),
'IR': ('🇮🇷', 'Iran'), 'BY': ('🇧🇾', 'Białoruś'), 'SY': ('🇸🇾', 'Syria'),
'VE': ('🇻🇪', 'Wenezuela'), 'CU': ('🇨🇺', 'Kuba')
}
geo_alerts = db.query(SecurityAlert).filter(
SecurityAlert.alert_type == 'geo_blocked'
).all()
country_counts = {}
for alert in geo_alerts:
if alert.details and 'country' in alert.details:
country = alert.details['country']
if country:
country_counts[country] = country_counts.get(country, 0) + 1
by_country = []
for code, count in sorted(country_counts.items(), key=lambda x: x[1], reverse=True):
flag, name = country_flags.get(code, ('🏴', code))
by_country.append({'code': code, 'flag': flag, 'name': name, 'count': count})
stats['by_country'] = by_country
return jsonify(stats)
finally:
db.close()
# ============================================================
# ANNOUNCEMENTS (Ogłoszenia dla członków)
# ============================================================
def generate_slug(title):
"""
Generate URL-friendly slug from title.
Uses unidecode for proper Polish character handling.
"""
import re
try:
from unidecode import unidecode
text = unidecode(title.lower())
except ImportError:
# Fallback without unidecode
text = title.lower()
replacements = {
'ą': 'a', 'ć': 'c', 'ę': 'e', 'ł': 'l', 'ń': 'n',
'ó': 'o', 'ś': 's', 'ź': 'z', 'ż': 'z'
}
for pl, en in replacements.items():
text = text.replace(pl, en)
# Remove special characters, replace spaces with hyphens
text = re.sub(r'[^\w\s-]', '', text)
text = re.sub(r'[-\s]+', '-', text).strip('-')
return text[:200] # Limit slug length
@app.route('/admin/announcements')
@login_required
def admin_announcements():
"""Admin panel - lista ogłoszeń"""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('dashboard'))
from database import Announcement
db = SessionLocal()
try:
# Filters
status_filter = request.args.get('status', 'all')
category_filter = request.args.get('category', 'all')
query = db.query(Announcement)
if status_filter != 'all':
query = query.filter(Announcement.status == status_filter)
if category_filter != 'all':
query = query.filter(Announcement.category == category_filter)
# Sort: pinned first, then by created_at desc
query = query.order_by(
Announcement.is_pinned.desc(),
Announcement.created_at.desc()
)
announcements = query.all()
return render_template('admin/announcements.html',
announcements=announcements,
now=datetime.now(),
status_filter=status_filter,
category_filter=category_filter,
categories=Announcement.CATEGORIES,
category_labels=Announcement.CATEGORY_LABELS,
statuses=Announcement.STATUSES,
status_labels=Announcement.STATUS_LABELS)
finally:
db.close()
@app.route('/admin/announcements/new', methods=['GET', 'POST'])
@login_required
def admin_announcements_new():
"""Admin panel - nowe ogłoszenie"""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('dashboard'))
from database import Announcement
if request.method == 'POST':
db = SessionLocal()
try:
title = request.form.get('title', '').strip()
excerpt = request.form.get('excerpt', '').strip()
content = request.form.get('content', '').strip()
category = request.form.get('category', 'general')
image_url = request.form.get('image_url', '').strip() or None
external_link = request.form.get('external_link', '').strip() or None
is_featured = 'is_featured' in request.form
is_pinned = 'is_pinned' in request.form
# Handle expires_at
expires_at_str = request.form.get('expires_at', '').strip()
expires_at = None
if expires_at_str:
try:
expires_at = datetime.strptime(expires_at_str, '%Y-%m-%dT%H:%M')
except ValueError:
pass
# Generate unique slug
base_slug = generate_slug(title)
slug = base_slug
counter = 1
while db.query(Announcement).filter(Announcement.slug == slug).first():
slug = f"{base_slug}-{counter}"
counter += 1
# Determine status based on button clicked
action = request.form.get('action', 'draft')
status = 'published' if action == 'publish' else 'draft'
published_at = datetime.now() if status == 'published' else None
announcement = Announcement(
title=title,
slug=slug,
excerpt=excerpt or None,
content=content,
category=category,
image_url=image_url,
external_link=external_link,
status=status,
published_at=published_at,
expires_at=expires_at,
is_featured=is_featured,
is_pinned=is_pinned,
created_by=current_user.id
)
db.add(announcement)
db.commit()
flash(f'Ogłoszenie zostało {"opublikowane" if status == "published" else "zapisane jako szkic"}.', 'success')
return redirect(url_for('admin_announcements'))
except Exception as e:
db.rollback()
logger.error(f"Error creating announcement: {e}")
flash(f'Błąd podczas tworzenia ogłoszenia: {e}', 'error')
finally:
db.close()
# GET request - show form
from database import Announcement
return render_template('admin/announcements_form.html',
announcement=None,
categories=Announcement.CATEGORIES,
category_labels=Announcement.CATEGORY_LABELS)
@app.route('/admin/announcements/<int:id>/edit', methods=['GET', 'POST'])
@login_required
def admin_announcements_edit(id):
"""Admin panel - edycja ogłoszenia"""
if not current_user.is_admin:
flash('Brak uprawnień do tej strony.', 'error')
return redirect(url_for('dashboard'))
from database import Announcement
db = SessionLocal()
try:
announcement = db.query(Announcement).filter(Announcement.id == id).first()
if not announcement:
flash('Nie znaleziono ogłoszenia.', 'error')
return redirect(url_for('admin_announcements'))
if request.method == 'POST':
announcement.title = request.form.get('title', '').strip()
announcement.excerpt = request.form.get('excerpt', '').strip() or None
announcement.content = request.form.get('content', '').strip()
announcement.category = request.form.get('category', 'general')
announcement.image_url = request.form.get('image_url', '').strip() or None
announcement.external_link = request.form.get('external_link', '').strip() or None
announcement.is_featured = 'is_featured' in request.form
announcement.is_pinned = 'is_pinned' in request.form
# Handle expires_at
expires_at_str = request.form.get('expires_at', '').strip()
if expires_at_str:
try:
announcement.expires_at = datetime.strptime(expires_at_str, '%Y-%m-%dT%H:%M')
except ValueError:
pass
else:
announcement.expires_at = None
# Regenerate slug if title changed significantly
new_slug = generate_slug(announcement.title)
if new_slug != announcement.slug.split('-')[0]: # Check if base changed
base_slug = new_slug
slug = base_slug
counter = 1
while db.query(Announcement).filter(
Announcement.slug == slug,
Announcement.id != id
).first():
slug = f"{base_slug}-{counter}"
counter += 1
announcement.slug = slug
# Handle status change
action = request.form.get('action', 'save')
if action == 'publish' and announcement.status != 'published':
announcement.status = 'published'
announcement.published_at = datetime.now()
elif action == 'archive':
announcement.status = 'archived'
elif action == 'draft':
announcement.status = 'draft'
announcement.updated_at = datetime.now()
db.commit()
flash('Zmiany zostały zapisane.', 'success')
return redirect(url_for('admin_announcements'))
# GET request - show form
return render_template('admin/announcements_form.html',
announcement=announcement,
categories=Announcement.CATEGORIES,
category_labels=Announcement.CATEGORY_LABELS)
except Exception as e:
db.rollback()
logger.error(f"Error editing announcement {id}: {e}")
flash(f'Błąd: {e}', 'error')
return redirect(url_for('admin_announcements'))
finally:
db.close()
@app.route('/admin/announcements/<int:id>/publish', methods=['POST'])
@login_required
def admin_announcements_publish(id):
"""Publikacja ogłoszenia"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from database import Announcement
db = SessionLocal()
try:
announcement = db.query(Announcement).filter(Announcement.id == id).first()
if not announcement:
return jsonify({'success': False, 'error': 'Nie znaleziono ogłoszenia'}), 404
announcement.status = 'published'
if not announcement.published_at:
announcement.published_at = datetime.now()
announcement.updated_at = datetime.now()
db.commit()
return jsonify({'success': True, 'message': 'Ogłoszenie zostało opublikowane'})
except Exception as e:
db.rollback()
logger.error(f"Error publishing announcement {id}: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/admin/announcements/<int:id>/archive', methods=['POST'])
@login_required
def admin_announcements_archive(id):
"""Archiwizacja ogłoszenia"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from database import Announcement
db = SessionLocal()
try:
announcement = db.query(Announcement).filter(Announcement.id == id).first()
if not announcement:
return jsonify({'success': False, 'error': 'Nie znaleziono ogłoszenia'}), 404
announcement.status = 'archived'
announcement.updated_at = datetime.now()
db.commit()
return jsonify({'success': True, 'message': 'Ogłoszenie zostało zarchiwizowane'})
except Exception as e:
db.rollback()
logger.error(f"Error archiving announcement {id}: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
@app.route('/admin/announcements/<int:id>/delete', methods=['POST'])
@login_required
def admin_announcements_delete(id):
"""Usunięcie ogłoszenia"""
if not current_user.is_admin:
return jsonify({'success': False, 'error': 'Brak uprawnień'}), 403
from database import Announcement
db = SessionLocal()
try:
announcement = db.query(Announcement).filter(Announcement.id == id).first()
if not announcement:
return jsonify({'success': False, 'error': 'Nie znaleziono ogłoszenia'}), 404
db.delete(announcement)
db.commit()
return jsonify({'success': True, 'message': 'Ogłoszenie zostało usunięte'})
except Exception as e:
db.rollback()
logger.error(f"Error deleting announcement {id}: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
finally:
db.close()
# ============================================================
# PUBLIC ANNOUNCEMENTS PAGE
# ============================================================
@app.route('/ogloszenia')
@login_required
@limiter.limit("60 per minute")
def announcements_list():
"""Strona z listą ogłoszeń dla zalogowanych członków"""
from database import Announcement
from sqlalchemy import or_, desc
db = SessionLocal()
try:
page = request.args.get('page', 1, type=int)
category = request.args.get('category', '')
per_page = 12
# Base query: published and not expired
query = db.query(Announcement).filter(
Announcement.status == 'published',
or_(
Announcement.expires_at.is_(None),
Announcement.expires_at > datetime.now()
)
)
# Filter by category
if category and category in Announcement.CATEGORIES:
query = query.filter(Announcement.category == category)
# Sort: pinned first, then by published_at desc
query = query.order_by(
desc(Announcement.is_pinned),
desc(Announcement.published_at)
)
# Pagination
total = query.count()
total_pages = (total + per_page - 1) // per_page
announcements = query.offset((page - 1) * per_page).limit(per_page).all()
return render_template('announcements/list.html',
announcements=announcements,
current_category=category,
categories=Announcement.CATEGORIES,
category_labels=Announcement.CATEGORY_LABELS,
page=page,
total_pages=total_pages,
total=total)
finally:
db.close()
@app.route('/ogloszenia/<slug>')
@login_required
@limiter.limit("60 per minute")
def announcement_detail(slug):
"""Szczegóły ogłoszenia dla zalogowanych członków"""
from database import Announcement, AnnouncementRead, User
from sqlalchemy import or_, desc, func
db = SessionLocal()
try:
announcement = db.query(Announcement).filter(
Announcement.slug == slug,
Announcement.status == 'published',
or_(
Announcement.expires_at.is_(None),
Announcement.expires_at > datetime.now()
)
).first()
if not announcement:
flash('Nie znaleziono ogłoszenia lub zostało usunięte.', 'error')
return redirect(url_for('announcements_list'))
# Increment views counter
announcement.views_count = (announcement.views_count or 0) + 1
# Record read by current user (if not already recorded)
existing_read = db.query(AnnouncementRead).filter(
AnnouncementRead.announcement_id == announcement.id,
AnnouncementRead.user_id == current_user.id
).first()
if not existing_read:
new_read = AnnouncementRead(
announcement_id=announcement.id,
user_id=current_user.id
)
db.add(new_read)
db.commit()
# Get readers (users who read this announcement)
readers = db.query(AnnouncementRead).filter(
AnnouncementRead.announcement_id == announcement.id
).order_by(desc(AnnouncementRead.read_at)).all()
# Get total registered users count for percentage calculation
total_users = db.query(func.count(User.id)).filter(
User.is_active == True,
User.is_verified == True
).scalar() or 1
readers_count = len(readers)
read_percentage = round((readers_count / total_users) * 100, 1) if total_users > 0 else 0
# Get other recent announcements for sidebar
other_announcements = db.query(Announcement).filter(
Announcement.status == 'published',
Announcement.id != announcement.id,
or_(
Announcement.expires_at.is_(None),
Announcement.expires_at > datetime.now()
)
).order_by(desc(Announcement.published_at)).limit(5).all()
return render_template('announcements/detail.html',
announcement=announcement,
other_announcements=other_announcements,
category_labels=Announcement.CATEGORY_LABELS,
readers=readers,
readers_count=readers_count,
total_users=total_users,
read_percentage=read_percentage)
finally:
db.close()
# ============================================================
# EXTERNAL CONTACTS (Kontakty zewnętrzne)
# ============================================================
@app.route('/kontakty')
@login_required
def contacts_list():
"""
Lista kontaktów zewnętrznych - urzędy, instytucje, partnerzy.
Dostępna dla wszystkich zalogowanych członków.
"""
from database import ExternalContact, User
db = SessionLocal()
try:
page = request.args.get('page', 1, type=int)
per_page = 20
search = request.args.get('q', '').strip()
org_type = request.args.get('type', '')
project = request.args.get('project', '')
query = db.query(ExternalContact).filter(ExternalContact.is_active == True)
# Search filter
if search:
search_pattern = f'%{search}%'
query = query.filter(
or_(
ExternalContact.first_name.ilike(search_pattern),
ExternalContact.last_name.ilike(search_pattern),
ExternalContact.organization_name.ilike(search_pattern),
ExternalContact.position.ilike(search_pattern),
ExternalContact.project_name.ilike(search_pattern),
ExternalContact.tags.ilike(search_pattern)
)
)
# Organization type filter
if org_type and org_type in ExternalContact.ORGANIZATION_TYPES:
query = query.filter(ExternalContact.organization_type == org_type)
# Project filter
if project:
query = query.filter(ExternalContact.project_name.ilike(f'%{project}%'))
# Order by organization name, then last name
query = query.order_by(
ExternalContact.organization_name,
ExternalContact.last_name
)
# Pagination
total = query.count()
contacts = query.offset((page - 1) * per_page).limit(per_page).all()
total_pages = (total + per_page - 1) // per_page
# Get unique projects for filter dropdown
projects = db.query(ExternalContact.project_name).filter(
ExternalContact.is_active == True,
ExternalContact.project_name.isnot(None),
ExternalContact.project_name != ''
).distinct().order_by(ExternalContact.project_name).all()
project_names = [p[0] for p in projects if p[0]]
return render_template('contacts/list.html',
contacts=contacts,
page=page,
total_pages=total_pages,
total=total,
search=search,
org_type=org_type,
project=project,
org_types=ExternalContact.ORGANIZATION_TYPES,
org_type_labels=ExternalContact.ORGANIZATION_TYPE_LABELS,
project_names=project_names)
finally:
db.close()
@app.route('/kontakty/<int:contact_id>')
@login_required
def contact_detail(contact_id):
"""
Szczegóły kontaktu zewnętrznego - pełna karta osoby.
"""
from database import ExternalContact
db = SessionLocal()
try:
contact = db.query(ExternalContact).filter(
ExternalContact.id == contact_id,
ExternalContact.is_active == True
).first()
if not contact:
flash('Kontakt nie został znaleziony.', 'error')
return redirect(url_for('contacts_list'))
# Get other contacts from the same organization
related_contacts = db.query(ExternalContact).filter(
ExternalContact.organization_name == contact.organization_name,
ExternalContact.id != contact.id,
ExternalContact.is_active == True
).order_by(ExternalContact.last_name).limit(5).all()
# Check if current user can edit (creator or admin)
can_edit = (current_user.is_admin or
(contact.created_by and contact.created_by == current_user.id))
return render_template('contacts/detail.html',
contact=contact,
related_contacts=related_contacts,
can_edit=can_edit,
org_type_labels=ExternalContact.ORGANIZATION_TYPE_LABELS)
finally:
db.close()
@app.route('/kontakty/dodaj', methods=['GET', 'POST'])
@login_required
def contact_add():
"""
Dodawanie nowego kontaktu zewnętrznego.
Każdy zalogowany użytkownik może dodać kontakt.
"""
from database import ExternalContact
if request.method == 'POST':
db = SessionLocal()
try:
# Parse related_links from form (JSON)
related_links_json = request.form.get('related_links', '[]')
try:
related_links = json.loads(related_links_json) if related_links_json else []
except json.JSONDecodeError:
related_links = []
contact = ExternalContact(
first_name=request.form.get('first_name', '').strip(),
last_name=request.form.get('last_name', '').strip(),
position=request.form.get('position', '').strip() or None,
photo_url=request.form.get('photo_url', '').strip() or None,
phone=request.form.get('phone', '').strip() or None,
phone_secondary=request.form.get('phone_secondary', '').strip() or None,
email=request.form.get('email', '').strip() or None,
website=request.form.get('website', '').strip() or None,
linkedin_url=request.form.get('linkedin_url', '').strip() or None,
facebook_url=request.form.get('facebook_url', '').strip() or None,
twitter_url=request.form.get('twitter_url', '').strip() or None,
organization_name=request.form.get('organization_name', '').strip(),
organization_type=request.form.get('organization_type', 'other'),
organization_address=request.form.get('organization_address', '').strip() or None,
organization_website=request.form.get('organization_website', '').strip() or None,
organization_logo_url=request.form.get('organization_logo_url', '').strip() or None,
project_name=request.form.get('project_name', '').strip() or None,
project_description=request.form.get('project_description', '').strip() or None,
source_type='manual',
source_url=request.form.get('source_url', '').strip() or None,
related_links=related_links,
tags=request.form.get('tags', '').strip() or None,
notes=request.form.get('notes', '').strip() or None,
created_by=current_user.id
)
db.add(contact)
db.commit()
flash(f'Kontakt {contact.full_name} został dodany.', 'success')
return redirect(url_for('contact_detail', contact_id=contact.id))
except Exception as e:
db.rollback()
app.logger.error(f"Error adding contact: {e}")
flash('Wystąpił błąd podczas dodawania kontaktu.', 'error')
finally:
db.close()
# GET - show form
return render_template('contacts/form.html',
contact=None,
org_types=ExternalContact.ORGANIZATION_TYPES,
org_type_labels=ExternalContact.ORGANIZATION_TYPE_LABELS)
@app.route('/kontakty/<int:contact_id>/edytuj', methods=['GET', 'POST'])
@login_required
def contact_edit(contact_id):
"""
Edycja kontaktu zewnętrznego.
Może edytować twórca kontaktu lub admin.
"""
from database import ExternalContact
db = SessionLocal()
try:
contact = db.query(ExternalContact).filter(
ExternalContact.id == contact_id
).first()
if not contact:
flash('Kontakt nie został znaleziony.', 'error')
return redirect(url_for('contacts_list'))
# Check permissions
if not current_user.is_admin and contact.created_by != current_user.id:
flash('Nie masz uprawnień do edycji tego kontaktu.', 'error')
return redirect(url_for('contact_detail', contact_id=contact_id))
if request.method == 'POST':
# Parse related_links from form (JSON)
related_links_json = request.form.get('related_links', '[]')
try:
related_links = json.loads(related_links_json) if related_links_json else []
except json.JSONDecodeError:
related_links = contact.related_links or []
contact.first_name = request.form.get('first_name', '').strip()
contact.last_name = request.form.get('last_name', '').strip()
contact.position = request.form.get('position', '').strip() or None
contact.photo_url = request.form.get('photo_url', '').strip() or None
contact.phone = request.form.get('phone', '').strip() or None
contact.phone_secondary = request.form.get('phone_secondary', '').strip() or None
contact.email = request.form.get('email', '').strip() or None
contact.website = request.form.get('website', '').strip() or None
contact.linkedin_url = request.form.get('linkedin_url', '').strip() or None
contact.facebook_url = request.form.get('facebook_url', '').strip() or None
contact.twitter_url = request.form.get('twitter_url', '').strip() or None
contact.organization_name = request.form.get('organization_name', '').strip()
contact.organization_type = request.form.get('organization_type', 'other')
contact.organization_address = request.form.get('organization_address', '').strip() or None
contact.organization_website = request.form.get('organization_website', '').strip() or None
contact.organization_logo_url = request.form.get('organization_logo_url', '').strip() or None
contact.project_name = request.form.get('project_name', '').strip() or None
contact.project_description = request.form.get('project_description', '').strip() or None
contact.source_url = request.form.get('source_url', '').strip() or None
contact.related_links = related_links
contact.tags = request.form.get('tags', '').strip() or None
contact.notes = request.form.get('notes', '').strip() or None
contact.updated_at = datetime.now()
db.commit()
flash(f'Kontakt {contact.full_name} został zaktualizowany.', 'success')
return redirect(url_for('contact_detail', contact_id=contact.id))
# GET - show form with existing data
return render_template('contacts/form.html',
contact=contact,
org_types=ExternalContact.ORGANIZATION_TYPES,
org_type_labels=ExternalContact.ORGANIZATION_TYPE_LABELS)
finally:
db.close()
@app.route('/kontakty/<int:contact_id>/usun', methods=['POST'])
@login_required
def contact_delete(contact_id):
"""
Usuwanie kontaktu zewnętrznego (soft delete).
Może usunąć twórca kontaktu lub admin.
"""
from database import ExternalContact
db = SessionLocal()
try:
contact = db.query(ExternalContact).filter(
ExternalContact.id == contact_id
).first()
if not contact:
flash('Kontakt nie został znaleziony.', 'error')
return redirect(url_for('contacts_list'))
# Check permissions
if not current_user.is_admin and contact.created_by != current_user.id:
flash('Nie masz uprawnień do usunięcia tego kontaktu.', 'error')
return redirect(url_for('contact_detail', contact_id=contact_id))
# Soft delete
contact.is_active = False
contact.updated_at = datetime.now()
db.commit()
flash(f'Kontakt {contact.full_name} został usunięty.', 'success')
return redirect(url_for('contacts_list'))
finally:
db.close()
# ============================================================
# AI-ASSISTED EXTERNAL CONTACT CREATION
# ============================================================
AI_CONTACT_PARSE_PROMPT = """Jesteś asystentem systemu NordaBiz pomagającym dodawać kontakty zewnętrzne.
ZADANIE:
Przeanalizuj podany tekst i wyodrębnij informacje o osobach kontaktowych z zewnętrznych organizacji
(urzędy, agencje, instytucje, firmy partnerskie - osoby spoza Norda Biznes).
DANE WEJŚCIOWE:
```
{input_text}
```
TYPY ORGANIZACJI:
- government = Urząd (np. ministerstwo, urząd gminy/powiatu)
- agency = Agencja (np. ARP, PARP, agencje rozwoju)
- company = Firma (przedsiębiorstwa, spółki)
- ngo = Organizacja pozarządowa (fundacje, stowarzyszenia)
- university = Uczelnia (uniwersytety, politechniki)
- other = Inne
INSTRUKCJE:
1. Wyodrębnij każdą osobę kontaktową z tekstu
2. Dla każdej osoby zidentyfikuj:
- imię i nazwisko (WYMAGANE)
- stanowisko/funkcja (jeśli dostępne)
- telefon (jeśli dostępny)
- email (jeśli dostępny)
- organizacja (WYMAGANE - nazwa instytucji)
- typ organizacji (government/agency/company/ngo/university/other)
- projekt/kontekst (jeśli tekst wspomina o konkretnym projekcie)
- tagi (słowa kluczowe związane z osobą/projektem)
3. Jeśli brak imienia i nazwiska - pomiń osobę
4. Jeśli brak nazwy organizacji - pomiń osobę
ZWRÓĆ TYLKO CZYSTY JSON w dokładnie takim formacie (bez żadnego tekstu przed ani po):
{{
"analysis": "Krótki opis znalezionych kontaktów (1-2 zdania po polsku)",
"contacts": [
{{
"first_name": "Imię",
"last_name": "Nazwisko",
"position": "Stanowisko lub null",
"phone": "Numer telefonu lub null",
"email": "Email lub null",
"organization_name": "Nazwa organizacji",
"organization_type": "government|agency|company|ngo|university|other",
"project_name": "Nazwa projektu lub null",
"tags": "tagi, oddzielone, przecinkami",
"warnings": []
}}
]
}}"""
AI_CONTACT_IMAGE_PROMPT = """Jesteś asystentem systemu NordaBiz pomagającym dodawać kontakty zewnętrzne.
ZADANIE:
Przeanalizuj ten obraz (screenshot) i wyodrębnij informacje o osobach kontaktowych.
Szukaj: imion i nazwisk, stanowisk, telefonów, emaili, nazw organizacji, projektów.
TYPY ORGANIZACJI:
- government = Urząd (np. ministerstwo, urząd gminy/powiatu)
- agency = Agencja (np. ARP, PARP, agencje rozwoju)
- company = Firma (przedsiębiorstwa, spółki)
- ngo = Organizacja pozarządowa (fundacje, stowarzyszenia)
- university = Uczelnia (uniwersytety, politechniki)
- other = Inne
INSTRUKCJE:
1. Przeczytaj cały tekst widoczny na obrazie
2. Wyodrębnij każdą osobę kontaktową
3. Dla każdej osoby zidentyfikuj:
- imię i nazwisko (WYMAGANE)
- stanowisko/funkcja
- telefon
- email
- organizacja (WYMAGANE)
- typ organizacji
- projekt/kontekst
- tagi
4. Jeśli brak imienia/nazwiska lub organizacji - pomiń osobę
ZWRÓĆ TYLKO CZYSTY JSON w dokładnie takim formacie:
{{
"analysis": "Krótki opis znalezionych kontaktów (1-2 zdania po polsku)",
"contacts": [
{{
"first_name": "Imię",
"last_name": "Nazwisko",
"position": "Stanowisko lub null",
"phone": "Numer telefonu lub null",
"email": "Email lub null",
"organization_name": "Nazwa organizacji",
"organization_type": "government|agency|company|ngo|university|other",
"project_name": "Nazwa projektu lub null",
"tags": "tagi, oddzielone, przecinkami",
"warnings": []
}}
]
}}"""
@app.route('/api/contacts/ai-parse', methods=['POST'])
@login_required
def contacts_ai_parse():
"""Parse text or image with AI to extract external contact data."""
db = SessionLocal()
try:
# Check input type
input_type = request.form.get('input_type') or (request.get_json() or {}).get('input_type', 'text')
if input_type == 'image':
# Handle image upload
if 'file' not in request.files:
return jsonify({'success': False, 'error': 'Brak pliku obrazu'}), 400
file = request.files['file']
if file.filename == '':
return jsonify({'success': False, 'error': 'Nie wybrano pliku'}), 400
# Validate file type
allowed_extensions = {'png', 'jpg', 'jpeg', 'gif', 'webp'}
ext = file.filename.rsplit('.', 1)[-1].lower() if '.' in file.filename else ''
if ext not in allowed_extensions:
return jsonify({'success': False, 'error': 'Dozwolone formaty: PNG, JPG, JPEG, GIF, WEBP'}), 400
# Save temp file
import tempfile
with tempfile.NamedTemporaryFile(delete=False, suffix=f'.{ext}') as tmp:
file.save(tmp.name)
temp_path = tmp.name
try:
# Get Gemini service and analyze image
service = gemini_service.get_gemini_service()
ai_response = service.analyze_image(temp_path, AI_CONTACT_IMAGE_PROMPT)
finally:
# Clean up temp file
import os
if os.path.exists(temp_path):
os.unlink(temp_path)
else:
# Handle text input
data = request.get_json() or {}
content = data.get('content', '').strip()
if not content:
return jsonify({'success': False, 'error': 'Brak treści do analizy'}), 400
# Get Gemini service and analyze text
service = gemini_service.get_gemini_service()
prompt = AI_CONTACT_PARSE_PROMPT.format(input_text=content)
ai_response = service.generate_text(
prompt=prompt,
feature='ai_contact_parse',
user_id=current_user.id,
temperature=0.3
)
# Parse AI response as JSON
import re
json_match = re.search(r'\{[\s\S]*\}', ai_response)
if not json_match:
logger.error(f"AI contact response not valid JSON: {ai_response[:500]}")
return jsonify({
'success': False,
'error': 'AI nie zwróciło prawidłowej odpowiedzi. Spróbuj ponownie.'
}), 500
try:
parsed = json.loads(json_match.group())
except json.JSONDecodeError as e:
logger.error(f"JSON parse error: {e}, response: {ai_response[:500]}")
return jsonify({
'success': False,
'error': 'Błąd parsowania odpowiedzi AI. Spróbuj ponownie.'
}), 500
# Check for potential duplicates
from database import ExternalContact
proposed_contacts = parsed.get('contacts', [])
for contact in proposed_contacts:
first_name = contact.get('first_name', '').strip()
last_name = contact.get('last_name', '').strip()
org_name = contact.get('organization_name', '').strip()
if first_name and last_name and org_name:
# Check for existing similar contact
existing = db.query(ExternalContact).filter(
ExternalContact.first_name.ilike(first_name),
ExternalContact.last_name.ilike(last_name),
ExternalContact.organization_name.ilike(f'%{org_name}%'),
ExternalContact.is_active == True
).first()
if existing:
contact['warnings'] = contact.get('warnings', []) + [
f'Podobny kontakt może już istnieć: {existing.full_name} @ {existing.organization_name}'
]
contact['potential_duplicate_id'] = existing.id
logger.info(f"User {current_user.email} used AI to parse contacts: {len(proposed_contacts)} found")
return jsonify({
'success': True,
'ai_response': parsed.get('analysis', 'Analiza zakończona'),
'proposed_contacts': proposed_contacts
})
except Exception as e:
logger.error(f"Error in AI contact parse: {e}")
return jsonify({'success': False, 'error': f'Błąd: {str(e)}'}), 500
finally:
db.close()
@app.route('/api/contacts/bulk-create', methods=['POST'])
@login_required
def contacts_bulk_create():
"""Create multiple external contacts from confirmed proposals."""
from database import ExternalContact
db = SessionLocal()
try:
data = request.get_json() or {}
contacts_to_create = data.get('contacts', [])
if not contacts_to_create:
return jsonify({'success': False, 'error': 'Brak kontaktów do utworzenia'}), 400
created = []
failed = []
for contact_data in contacts_to_create:
try:
# Validate required fields
first_name = contact_data.get('first_name', '').strip()
last_name = contact_data.get('last_name', '').strip()
organization_name = contact_data.get('organization_name', '').strip()
if not first_name or not last_name or not organization_name:
failed.append({
'name': f"{first_name} {last_name}",
'error': 'Brak wymaganych danych (imię, nazwisko lub organizacja)'
})
continue
# Create contact
contact = ExternalContact(
first_name=first_name,
last_name=last_name,
position=contact_data.get('position', '').strip() or None,
phone=contact_data.get('phone', '').strip() or None,
email=contact_data.get('email', '').strip() or None,
organization_name=organization_name,
organization_type=contact_data.get('organization_type', 'other'),
project_name=contact_data.get('project_name', '').strip() or None,
tags=contact_data.get('tags', '').strip() or None,
source_type='ai_import',
created_by=current_user.id
)
db.add(contact)
db.flush()
created.append({
'id': contact.id,
'name': contact.full_name,
'organization': contact.organization_name
})
except Exception as e:
failed.append({
'name': f"{contact_data.get('first_name', '')} {contact_data.get('last_name', '')}",
'error': str(e)
})
db.commit()
logger.info(f"User {current_user.email} bulk created {len(created)} contacts via AI")
return jsonify({
'success': True,
'created': created,
'failed': failed,
'message': f'Utworzono {len(created)} kontaktów' + (f', {len(failed)} błędów' if failed else '')
})
except Exception as e:
db.rollback()
logger.error(f"Error in contacts bulk create: {e}")
return jsonify({'success': False, 'error': f'Błąd: {str(e)}'}), 500
finally:
db.close()
# ============================================================
# HONEYPOT ENDPOINTS (trap for malicious bots)
# ============================================================
@app.route('/wp-admin')
@app.route('/wp-admin/<path:path>')
@app.route('/wp-login.php')
@app.route('/administrator')
@app.route('/phpmyadmin')
@app.route('/phpmyadmin/<path:path>')
@app.route('/.env')
@app.route('/.git/config')
@app.route('/xmlrpc.php')
@app.route('/config.php')
@app.route('/admin.php')
def honeypot_trap(path=None):
"""
Honeypot endpoints - log and return 404.
These URLs are commonly probed by malicious bots looking for WordPress,
phpMyAdmin, or exposed configuration files.
"""
client_ip = request.headers.get('X-Forwarded-For', request.remote_addr)
if client_ip and ',' in client_ip:
client_ip = client_ip.split(',')[0].strip()
security_logger.warning(f"HONEYPOT ip={client_ip} path={request.path} ua={request.user_agent.string[:100]}")
# Return 404 to not reveal this is a trap
return render_template('errors/404.html'), 404
# ============================================================
# MAIN
# ============================================================
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
debug = os.getenv('FLASK_ENV') == 'development'
logger.info(f"Starting Norda Biznes Hub on port {port}")
app.run(host='0.0.0.0', port=port, debug=debug)