diff --git a/blueprints/api/routes_seo_audit.py b/blueprints/api/routes_seo_audit.py
index 7790905..b21b7c7 100644
--- a/blueprints/api/routes_seo_audit.py
+++ b/blueprints/api/routes_seo_audit.py
@@ -451,12 +451,61 @@ def api_seo_audit_trigger():
company_id=company.id
).first()
if analysis_record:
+ # Basic metrics
analysis_record.gsc_clicks = gsc_data.get('clicks')
analysis_record.gsc_impressions = gsc_data.get('impressions')
analysis_record.gsc_ctr = gsc_data.get('ctr')
analysis_record.gsc_avg_position = gsc_data.get('position')
analysis_record.gsc_top_queries = gsc_data.get('top_queries', [])
+ analysis_record.gsc_top_pages = gsc_data.get('top_pages', [])
analysis_record.gsc_period_days = gsc_data.get('period_days', 28)
+
+ # Extended GSC data collection
+ try:
+ # Device breakdown
+ device_data = gsc.get_device_breakdown(company.website, days=28)
+ if device_data:
+ analysis_record.gsc_device_breakdown = device_data
+
+ # Country breakdown
+ country_data = gsc.get_country_breakdown(company.website, days=28)
+ if country_data:
+ analysis_record.gsc_country_breakdown = country_data
+
+ # Search type breakdown
+ type_data = gsc.get_search_type_breakdown(company.website, days=28)
+ if type_data:
+ analysis_record.gsc_search_type_breakdown = type_data
+
+ # Trend data (period-over-period)
+ trend_data = gsc.get_trend_data(company.website, days=28)
+ if trend_data:
+ analysis_record.gsc_trend_data = trend_data
+
+ # URL Inspection (for homepage)
+ homepage = company.website
+ if homepage and not homepage.endswith('/'):
+ homepage += '/'
+ inspection = gsc.inspect_url(company.website, homepage)
+ if inspection:
+ analysis_record.gsc_index_status = inspection.get('index_status')
+ last_crawl = inspection.get('last_crawl')
+ if last_crawl:
+ try:
+ from datetime import datetime as dt
+ analysis_record.gsc_last_crawl = dt.fromisoformat(last_crawl.replace('Z', '+00:00'))
+ except (ValueError, TypeError):
+ pass
+ analysis_record.gsc_crawled_as = inspection.get('crawled_as')
+
+ # Sitemaps
+ sitemaps = gsc.get_sitemaps(company.website)
+ if sitemaps:
+ analysis_record.gsc_sitemaps = sitemaps
+
+ except Exception as ext_err:
+ logger.warning(f"Extended GSC data collection failed for company {company.id}: {ext_err}")
+
db.commit()
logger.info(f"GSC data saved for company {company.id}: {gsc_data.get('clicks', 0)} clicks")
except ImportError:
diff --git a/blueprints/audit/routes.py b/blueprints/audit/routes.py
index 672032e..cddbe67 100644
--- a/blueprints/audit/routes.py
+++ b/blueprints/audit/routes.py
@@ -173,6 +173,16 @@ def seo_audit_dashboard(slug):
'gsc_avg_position': float(analysis.gsc_avg_position) if analysis.gsc_avg_position is not None else None,
'gsc_top_queries': analysis.gsc_top_queries,
'gsc_period_days': analysis.gsc_period_days,
+ # GSC Extended data
+ 'gsc_top_pages': getattr(analysis, 'gsc_top_pages', None),
+ 'gsc_device_breakdown': getattr(analysis, 'gsc_device_breakdown', None),
+ 'gsc_index_status': getattr(analysis, 'gsc_index_status', None),
+ 'gsc_last_crawl': getattr(analysis, 'gsc_last_crawl', None),
+ 'gsc_crawled_as': getattr(analysis, 'gsc_crawled_as', None),
+ 'gsc_sitemaps': getattr(analysis, 'gsc_sitemaps', None),
+ 'gsc_country_breakdown': getattr(analysis, 'gsc_country_breakdown', None),
+ 'gsc_search_type_breakdown': getattr(analysis, 'gsc_search_type_breakdown', None),
+ 'gsc_trend_data': getattr(analysis, 'gsc_trend_data', None),
# Citations list
'citations': [{'directory_name': c.directory_name, 'listing_url': c.listing_url, 'status': c.status, 'nap_accurate': c.nap_accurate} for c in citations],
}
@@ -369,6 +379,20 @@ def gbp_audit_dashboard(slug):
'google_name': analysis.google_name,
'google_address': analysis.google_address,
'google_phone': analysis.google_phone,
+ # GBP Performance API data
+ 'gbp_impressions_maps': getattr(analysis, 'gbp_impressions_maps', None),
+ 'gbp_impressions_search': getattr(analysis, 'gbp_impressions_search', None),
+ 'gbp_call_clicks': getattr(analysis, 'gbp_call_clicks', None),
+ 'gbp_website_clicks': getattr(analysis, 'gbp_website_clicks', None),
+ 'gbp_direction_requests': getattr(analysis, 'gbp_direction_requests', None),
+ 'gbp_conversations': getattr(analysis, 'gbp_conversations', None),
+ 'gbp_search_keywords': getattr(analysis, 'gbp_search_keywords', None),
+ 'gbp_performance_period_days': getattr(analysis, 'gbp_performance_period_days', None),
+ # Owner data
+ 'google_owner_responses_count': getattr(analysis, 'google_owner_responses_count', None),
+ 'google_review_response_rate': float(analysis.google_review_response_rate) if getattr(analysis, 'google_review_response_rate', None) is not None else None,
+ 'google_posts_data': getattr(analysis, 'google_posts_data', None),
+ 'google_posts_count': getattr(analysis, 'google_posts_count', None),
}
# If no audit exists, we still render the page (template handles this)
diff --git a/database.py b/database.py
index d168f98..66c5b6a 100644
--- a/database.py
+++ b/database.py
@@ -1165,6 +1165,33 @@ class CompanyWebsiteAnalysis(Base):
gsc_top_queries = Column(JSONB) # Top search queries with clicks/impressions
gsc_period_days = Column(Integer, default=28) # Data collection period in days
+ # === GBP PERFORMANCE API ===
+ gbp_impressions_maps = Column(Integer) # Maps views (desktop + mobile) in period
+ gbp_impressions_search = Column(Integer) # Search views (desktop + mobile) in period
+ gbp_call_clicks = Column(Integer) # Phone call clicks in period
+ gbp_website_clicks = Column(Integer) # Website clicks in period
+ gbp_direction_requests = Column(Integer) # Direction requests in period
+ gbp_conversations = Column(Integer) # Conversations started in period
+ gbp_search_keywords = Column(JSONB) # Top search keywords with impression counts
+ gbp_performance_period_days = Column(Integer, default=30) # Performance data period
+
+ # === GOOGLE SEARCH CONSOLE EXTENSIONS ===
+ gsc_top_pages = Column(JSONB) # Top pages with clicks/impressions
+ gsc_device_breakdown = Column(JSONB) # {desktop: {clicks, impressions}, mobile: {...}, tablet: {...}}
+ gsc_index_status = Column(String(50)) # URL Inspection: PASS, NEUTRAL, FAIL, etc.
+ gsc_last_crawl = Column(DateTime) # URL Inspection: last crawl timestamp
+ gsc_crawled_as = Column(String(50)) # URL Inspection: Googlebot type
+ gsc_sitemaps = Column(JSONB) # Sitemaps list with status/errors
+ gsc_country_breakdown = Column(JSONB) # Top countries with clicks/impressions
+ gsc_search_type_breakdown = Column(JSONB) # Web/image/video breakdown
+ gsc_trend_data = Column(JSONB) # Period-over-period comparison
+
+ # === GBP OWNER DATA (Management API) ===
+ google_owner_responses_count = Column(Integer) # Number of owner responses to reviews
+ google_review_response_rate = Column(Numeric(5, 1)) # % of reviews with owner response
+ google_posts_data = Column(JSONB) # Recent Google Posts data
+ google_posts_count = Column(Integer) # Total number of Google Posts
+
# === SEO AUDIT METADATA ===
seo_audit_version = Column(String(20)) # Version of SEO audit script used
seo_audited_at = Column(DateTime) # Timestamp of last SEO audit
diff --git a/database/migrations/064_google_api_expansion.sql b/database/migrations/064_google_api_expansion.sql
new file mode 100644
index 0000000..17b51f8
--- /dev/null
+++ b/database/migrations/064_google_api_expansion.sql
@@ -0,0 +1,32 @@
+-- Migration 064: Expand Google API data columns (GBP Performance + GSC extensions + Owner Data)
+-- Part of Google OAuth 100% coverage initiative
+
+-- === GBP PERFORMANCE API ===
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS gbp_impressions_maps INTEGER;
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS gbp_impressions_search INTEGER;
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS gbp_call_clicks INTEGER;
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS gbp_website_clicks INTEGER;
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS gbp_direction_requests INTEGER;
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS gbp_conversations INTEGER;
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS gbp_search_keywords JSONB;
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS gbp_performance_period_days INTEGER DEFAULT 30;
+
+-- === GOOGLE SEARCH CONSOLE EXTENSIONS ===
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS gsc_top_pages JSONB;
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS gsc_device_breakdown JSONB;
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS gsc_index_status VARCHAR(50);
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS gsc_last_crawl TIMESTAMP;
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS gsc_crawled_as VARCHAR(50);
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS gsc_sitemaps JSONB;
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS gsc_country_breakdown JSONB;
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS gsc_search_type_breakdown JSONB;
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS gsc_trend_data JSONB;
+
+-- === GBP OWNER DATA (Management API) ===
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS google_owner_responses_count INTEGER;
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS google_review_response_rate NUMERIC(5,1);
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS google_posts_data JSONB;
+ALTER TABLE company_website_analysis ADD COLUMN IF NOT EXISTS google_posts_count INTEGER;
+
+-- Grant permissions
+GRANT ALL ON TABLE company_website_analysis TO nordabiz_app;
diff --git a/gbp_audit_service.py b/gbp_audit_service.py
index 40d3b5d..350947b 100644
--- a/gbp_audit_service.py
+++ b/gbp_audit_service.py
@@ -1933,6 +1933,36 @@ def fetch_google_business_data(
except Exception as e:
logger.warning(f"OAuth GBP enrichment failed for company {company_id}: {e}")
+ # OAuth: Try GBP Performance API for visibility metrics
+ try:
+ from gbp_performance_service import GBPPerformanceService
+
+ if gbp_token and location_name:
+ perf_service = GBPPerformanceService(gbp_token)
+ # Extract location ID from location_name (format: accounts/X/locations/Y)
+ # Performance API uses locations/Y format
+ parts = location_name.split('/')
+ if len(parts) >= 4:
+ perf_location = f"locations/{parts[3]}"
+ else:
+ perf_location = location_name
+
+ perf_data = perf_service.get_all_performance_data(perf_location, days=30)
+ if perf_data:
+ result['data']['gbp_impressions_maps'] = perf_data.get('maps_impressions', 0)
+ result['data']['gbp_impressions_search'] = perf_data.get('search_impressions', 0)
+ result['data']['gbp_call_clicks'] = perf_data.get('call_clicks', 0)
+ result['data']['gbp_website_clicks'] = perf_data.get('website_clicks', 0)
+ result['data']['gbp_direction_requests'] = perf_data.get('direction_requests', 0)
+ result['data']['gbp_conversations'] = perf_data.get('conversations', 0)
+ result['data']['gbp_search_keywords'] = perf_data.get('search_keywords', [])
+ result['data']['gbp_performance_period_days'] = perf_data.get('period_days', 30)
+ logger.info(f"GBP Performance data collected for company {company_id}")
+ except ImportError:
+ pass
+ except Exception as e:
+ logger.warning(f"GBP Performance API failed for company {company_id}: {e}")
+
# Step 3: Save to database
result['steps'].append({
'step': 'save_data',
@@ -1979,6 +2009,18 @@ def fetch_google_business_data(
('google_photos_metadata', photos_meta if photos_meta else None),
('google_maps_links', maps_links if maps_links else None),
('google_open_now', open_now),
+ ('gbp_impressions_maps', result['data'].get('gbp_impressions_maps')),
+ ('gbp_impressions_search', result['data'].get('gbp_impressions_search')),
+ ('gbp_call_clicks', result['data'].get('gbp_call_clicks')),
+ ('gbp_website_clicks', result['data'].get('gbp_website_clicks')),
+ ('gbp_direction_requests', result['data'].get('gbp_direction_requests')),
+ ('gbp_conversations', result['data'].get('gbp_conversations')),
+ ('gbp_search_keywords', result['data'].get('gbp_search_keywords')),
+ ('gbp_performance_period_days', result['data'].get('gbp_performance_period_days')),
+ ('google_owner_responses_count', result['data'].get('google_owner_responses_count')),
+ ('google_review_response_rate', result['data'].get('google_review_response_rate')),
+ ('google_posts_data', result['data'].get('google_posts_data')),
+ ('google_posts_count', result['data'].get('google_posts_count')),
]:
try:
setattr(analysis, attr, val)
diff --git a/gbp_performance_service.py b/gbp_performance_service.py
new file mode 100644
index 0000000..6907a83
--- /dev/null
+++ b/gbp_performance_service.py
@@ -0,0 +1,194 @@
+"""
+Google Business Profile Performance API Client
+===============================================
+
+Uses OAuth 2.0 to fetch visibility and interaction metrics from GBP.
+Provides daily metrics time series and monthly search keyword impressions.
+
+API docs: https://developers.google.com/my-business/reference/performance/rest
+"""
+
+import logging
+from datetime import datetime, timedelta
+from typing import Dict, List, Optional
+
+import requests
+
+logger = logging.getLogger(__name__)
+
+
+# Metrics to request from getDailyMetricsTimeSeries
+DAILY_METRICS = [
+ 'BUSINESS_IMPRESSIONS_DESKTOP_MAPS',
+ 'BUSINESS_IMPRESSIONS_MOBILE_MAPS',
+ 'BUSINESS_IMPRESSIONS_DESKTOP_SEARCH',
+ 'BUSINESS_IMPRESSIONS_MOBILE_SEARCH',
+ 'CALL_CLICKS',
+ 'WEBSITE_CLICKS',
+ 'BUSINESS_DIRECTION_REQUESTS',
+ 'BUSINESS_CONVERSATIONS',
+ 'BUSINESS_BOOKINGS',
+]
+
+
+class GBPPerformanceService:
+ """Google Business Profile Performance API client."""
+
+ BASE_URL = "https://businessprofileperformance.googleapis.com/v1"
+
+ def __init__(self, access_token: str):
+ self.session = requests.Session()
+ self.session.headers.update({
+ 'Authorization': f'Bearer {access_token}',
+ 'Content-Type': 'application/json',
+ })
+ self.session.timeout = 20
+
+ def get_daily_metrics(self, location_name: str, days: int = 30) -> Dict:
+ """Get aggregated daily metrics for a location.
+
+ Args:
+ location_name: Full location resource name (e.g., 'locations/123456')
+ days: Number of days to aggregate (default 30)
+
+ Returns:
+ Dict with aggregated metrics:
+ {
+ 'maps_impressions': int,
+ 'search_impressions': int,
+ 'call_clicks': int,
+ 'website_clicks': int,
+ 'direction_requests': int,
+ 'conversations': int,
+ 'period_days': int,
+ }
+ """
+ result = {
+ 'maps_impressions': 0,
+ 'search_impressions': 0,
+ 'call_clicks': 0,
+ 'website_clicks': 0,
+ 'direction_requests': 0,
+ 'conversations': 0,
+ 'period_days': days,
+ }
+
+ end_date = datetime.now() - timedelta(days=1) # Yesterday (today may be incomplete)
+ start_date = end_date - timedelta(days=days)
+
+ # Normalize location_name format
+ if not location_name.startswith('locations/'):
+ location_name = f'locations/{location_name}'
+
+ for metric in DAILY_METRICS:
+ try:
+ resp = self.session.get(
+ f"{self.BASE_URL}/{location_name}:getDailyMetricsTimeSeries",
+ params={
+ 'dailyMetric': metric,
+ 'dailyRange.startDate.year': start_date.year,
+ 'dailyRange.startDate.month': start_date.month,
+ 'dailyRange.startDate.day': start_date.day,
+ 'dailyRange.endDate.year': end_date.year,
+ 'dailyRange.endDate.month': end_date.month,
+ 'dailyRange.endDate.day': end_date.day,
+ }
+ )
+
+ if resp.status_code != 200:
+ logger.debug(f"Performance API metric {metric} returned {resp.status_code}")
+ continue
+
+ data = resp.json()
+ time_series = data.get('timeSeries', {})
+ daily_values = time_series.get('datedValues', [])
+
+ total = sum(
+ dv.get('value', 0) for dv in daily_values
+ if dv.get('value') is not None
+ )
+
+ # Map metric to result key
+ if 'MAPS' in metric:
+ result['maps_impressions'] += total
+ elif 'SEARCH' in metric:
+ result['search_impressions'] += total
+ elif metric == 'CALL_CLICKS':
+ result['call_clicks'] = total
+ elif metric == 'WEBSITE_CLICKS':
+ result['website_clicks'] = total
+ elif metric == 'BUSINESS_DIRECTION_REQUESTS':
+ result['direction_requests'] = total
+ elif metric == 'BUSINESS_CONVERSATIONS':
+ result['conversations'] = total
+
+ except Exception as e:
+ logger.debug(f"Performance API metric {metric} failed: {e}")
+ continue
+
+ logger.info(
+ f"GBP Performance for {location_name}: "
+ f"maps={result['maps_impressions']}, search={result['search_impressions']}, "
+ f"calls={result['call_clicks']}, web={result['website_clicks']}"
+ )
+
+ return result
+
+ def get_search_keywords(self, location_name: str) -> List[Dict]:
+ """Get monthly search keyword impressions.
+
+ Args:
+ location_name: Full location resource name
+
+ Returns:
+ List of dicts: [{'keyword': str, 'impressions': int}, ...]
+ """
+ # Normalize location_name format
+ if not location_name.startswith('locations/'):
+ location_name = f'locations/{location_name}'
+
+ try:
+ resp = self.session.get(
+ f"{self.BASE_URL}/{location_name}/searchkeywords/impressions/monthly",
+ params={'pageSize': 20}
+ )
+
+ if resp.status_code != 200:
+ logger.debug(f"Search keywords API returned {resp.status_code}")
+ return []
+
+ data = resp.json()
+ keywords = []
+
+ for item in data.get('searchKeywordsCounts', []):
+ keyword = item.get('searchKeyword', '')
+ # Get the most recent month's data
+ monthly = item.get('insightsValue', {})
+ impressions = monthly.get('value', 0)
+
+ if keyword:
+ keywords.append({
+ 'keyword': keyword,
+ 'impressions': impressions,
+ })
+
+ # Sort by impressions descending
+ keywords.sort(key=lambda x: x['impressions'], reverse=True)
+
+ logger.info(f"GBP search keywords for {location_name}: {len(keywords)} keywords")
+ return keywords[:20]
+
+ except Exception as e:
+ logger.warning(f"GBP search keywords failed for {location_name}: {e}")
+ return []
+
+ def get_all_performance_data(self, location_name: str, days: int = 30) -> Dict:
+ """Get all performance data (metrics + keywords) in one call.
+
+ Returns:
+ Dict with all performance data combined.
+ """
+ metrics = self.get_daily_metrics(location_name, days)
+ keywords = self.get_search_keywords(location_name)
+ metrics['search_keywords'] = keywords
+ return metrics
diff --git a/oauth_service.py b/oauth_service.py
index 5093e41..6117f0c 100644
--- a/oauth_service.py
+++ b/oauth_service.py
@@ -29,7 +29,7 @@ OAUTH_PROVIDERS = {
'token_url': 'https://oauth2.googleapis.com/token',
'scopes': {
'gbp': 'https://www.googleapis.com/auth/business.manage',
- 'search_console': 'https://www.googleapis.com/auth/webmasters.readonly',
+ 'search_console': 'https://www.googleapis.com/auth/webmasters',
},
},
'meta': {
diff --git a/search_console_service.py b/search_console_service.py
index 633b79f..8bbd174 100644
--- a/search_console_service.py
+++ b/search_console_service.py
@@ -163,3 +163,298 @@ class SearchConsoleService:
except Exception as e:
logger.error(f"Search Console analytics failed for {site_url}: {e}")
return {}
+
+ def get_device_breakdown(self, site_url: str, days: int = 28) -> Dict:
+ """Get clicks/impressions breakdown by device type.
+
+ Returns:
+ Dict like {'desktop': {'clicks': N, 'impressions': N}, 'mobile': {...}, 'tablet': {...}}
+ """
+ normalized = self._normalize_site_url(site_url)
+ if not normalized:
+ return {}
+
+ end_date = datetime.now() - timedelta(days=3)
+ start_date = end_date - timedelta(days=days)
+
+ try:
+ resp = self.session.post(
+ f"{self.BASE_URL}/sites/{requests.utils.quote(normalized, safe='')}/searchAnalytics/query",
+ json={
+ 'startDate': start_date.strftime('%Y-%m-%d'),
+ 'endDate': end_date.strftime('%Y-%m-%d'),
+ 'dimensions': ['device'],
+ }
+ )
+ if resp.status_code != 200:
+ return {}
+
+ result = {}
+ for row in resp.json().get('rows', []):
+ device = row['keys'][0].lower()
+ result[device] = {
+ 'clicks': row.get('clicks', 0),
+ 'impressions': row.get('impressions', 0),
+ 'ctr': round(row.get('ctr', 0) * 100, 2),
+ 'position': round(row.get('position', 0), 1),
+ }
+ return result
+
+ except Exception as e:
+ logger.warning(f"Device breakdown failed for {site_url}: {e}")
+ return {}
+
+ def get_country_breakdown(self, site_url: str, days: int = 28) -> List[Dict]:
+ """Get top countries by clicks/impressions.
+
+ Returns:
+ List of dicts: [{'country': 'POL', 'clicks': N, 'impressions': N}, ...]
+ """
+ normalized = self._normalize_site_url(site_url)
+ if not normalized:
+ return []
+
+ end_date = datetime.now() - timedelta(days=3)
+ start_date = end_date - timedelta(days=days)
+
+ try:
+ resp = self.session.post(
+ f"{self.BASE_URL}/sites/{requests.utils.quote(normalized, safe='')}/searchAnalytics/query",
+ json={
+ 'startDate': start_date.strftime('%Y-%m-%d'),
+ 'endDate': end_date.strftime('%Y-%m-%d'),
+ 'dimensions': ['country'],
+ 'rowLimit': 10,
+ }
+ )
+ if resp.status_code != 200:
+ return []
+
+ return [
+ {
+ 'country': row['keys'][0],
+ 'clicks': row.get('clicks', 0),
+ 'impressions': row.get('impressions', 0),
+ }
+ for row in resp.json().get('rows', [])
+ ]
+
+ except Exception as e:
+ logger.warning(f"Country breakdown failed for {site_url}: {e}")
+ return []
+
+ def get_search_type_breakdown(self, site_url: str, days: int = 28) -> Dict:
+ """Get breakdown by search type (web, image, video, news).
+
+ Returns:
+ Dict like {'web': {'clicks': N, 'impressions': N}, 'image': {...}, ...}
+ """
+ normalized = self._normalize_site_url(site_url)
+ if not normalized:
+ return {}
+
+ end_date = datetime.now() - timedelta(days=3)
+ start_date = end_date - timedelta(days=days)
+ result = {}
+
+ for search_type in ['web', 'image', 'video', 'news']:
+ try:
+ resp = self.session.post(
+ f"{self.BASE_URL}/sites/{requests.utils.quote(normalized, safe='')}/searchAnalytics/query",
+ json={
+ 'startDate': start_date.strftime('%Y-%m-%d'),
+ 'endDate': end_date.strftime('%Y-%m-%d'),
+ 'searchType': search_type,
+ 'dimensions': [],
+ }
+ )
+ if resp.status_code == 200:
+ rows = resp.json().get('rows', [])
+ if rows:
+ row = rows[0]
+ result[search_type] = {
+ 'clicks': row.get('clicks', 0),
+ 'impressions': row.get('impressions', 0),
+ }
+ except Exception:
+ continue
+
+ return result
+
+ def get_trend_data(self, site_url: str, days: int = 28) -> Dict:
+ """Compare current period vs previous period.
+
+ Returns:
+ Dict with current, previous values and % change:
+ {
+ 'clicks': {'current': N, 'previous': N, 'change_pct': float},
+ 'impressions': {'current': N, 'previous': N, 'change_pct': float},
+ 'ctr': {'current': float, 'previous': float, 'change_pct': float},
+ 'position': {'current': float, 'previous': float, 'change_pct': float},
+ }
+ """
+ normalized = self._normalize_site_url(site_url)
+ if not normalized:
+ return {}
+
+ end_date = datetime.now() - timedelta(days=3)
+ current_start = end_date - timedelta(days=days)
+ prev_end = current_start - timedelta(days=1)
+ prev_start = prev_end - timedelta(days=days)
+
+ def _get_totals(start, end):
+ try:
+ resp = self.session.post(
+ f"{self.BASE_URL}/sites/{requests.utils.quote(normalized, safe='')}/searchAnalytics/query",
+ json={
+ 'startDate': start.strftime('%Y-%m-%d'),
+ 'endDate': end.strftime('%Y-%m-%d'),
+ 'dimensions': [],
+ }
+ )
+ if resp.status_code == 200:
+ rows = resp.json().get('rows', [])
+ if rows:
+ return rows[0]
+ except Exception:
+ pass
+ return {}
+
+ current = _get_totals(current_start, end_date)
+ previous = _get_totals(prev_start, prev_end)
+
+ if not current:
+ return {}
+
+ def _calc_change(curr_val, prev_val):
+ if prev_val and prev_val != 0:
+ return round((curr_val - prev_val) / abs(prev_val) * 100, 1)
+ return None
+
+ result = {}
+ for key in ['clicks', 'impressions']:
+ c = current.get(key, 0)
+ p = previous.get(key, 0)
+ result[key] = {
+ 'current': c,
+ 'previous': p,
+ 'change_pct': _calc_change(c, p),
+ }
+
+ for key in ['ctr']:
+ c = round(current.get(key, 0) * 100, 2)
+ p = round(previous.get(key, 0) * 100, 2)
+ result[key] = {
+ 'current': c,
+ 'previous': p,
+ 'change_pct': _calc_change(c, p),
+ }
+
+ for key in ['position']:
+ c = round(current.get(key, 0), 1)
+ p = round(previous.get(key, 0), 1)
+ # For position, lower is better, so invert the change
+ change = _calc_change(c, p)
+ result[key] = {
+ 'current': c,
+ 'previous': p,
+ 'change_pct': -change if change is not None else None,
+ }
+
+ return result
+
+ def inspect_url(self, site_url: str, page_url: str) -> Dict:
+ """Inspect a URL's indexing status using URL Inspection API.
+
+ Requires 'webmasters' scope (not readonly).
+
+ Args:
+ site_url: The site property URL (as registered in Search Console)
+ page_url: The specific page URL to inspect
+
+ Returns:
+ Dict with: index_status, last_crawl, crawled_as, canonical_url, is_indexed
+ """
+ INSPECTION_URL = "https://searchconsole.googleapis.com/v1/urlInspection/index:inspect"
+
+ normalized = self._normalize_site_url(site_url)
+ if not normalized:
+ return {}
+
+ try:
+ resp = self.session.post(
+ INSPECTION_URL,
+ json={
+ 'inspectionUrl': page_url,
+ 'siteUrl': normalized,
+ }
+ )
+
+ if resp.status_code != 200:
+ logger.debug(f"URL Inspection returned {resp.status_code} for {page_url}")
+ return {}
+
+ data = resp.json()
+ result_data = data.get('inspectionResult', {})
+ index_status = result_data.get('indexStatusResult', {})
+ crawl_result = index_status
+
+ return {
+ 'index_status': index_status.get('verdict', 'UNKNOWN'),
+ 'coverage_state': index_status.get('coverageState', ''),
+ 'robots_txt_state': index_status.get('robotsTxtState', ''),
+ 'indexing_state': index_status.get('indexingState', ''),
+ 'last_crawl': index_status.get('lastCrawlTime', ''),
+ 'crawled_as': index_status.get('crawledAs', ''),
+ 'canonical_url': index_status.get('googleCanonical', ''),
+ 'user_canonical': index_status.get('userCanonical', ''),
+ 'is_indexed': index_status.get('verdict') == 'PASS',
+ }
+
+ except Exception as e:
+ logger.warning(f"URL Inspection failed for {page_url}: {e}")
+ return {}
+
+ def get_sitemaps(self, site_url: str) -> List[Dict]:
+ """Get sitemaps status for a site.
+
+ Returns:
+ List of dicts: [{'path': str, 'lastSubmitted': str, 'isPending': bool,
+ 'lastDownloaded': str, 'warnings': int, 'errors': int, ...}]
+ """
+ normalized = self._normalize_site_url(site_url)
+ if not normalized:
+ return []
+
+ try:
+ resp = self.session.get(
+ f"{self.BASE_URL}/sites/{requests.utils.quote(normalized, safe='')}/sitemaps"
+ )
+
+ if resp.status_code != 200:
+ logger.debug(f"Sitemaps API returned {resp.status_code}")
+ return []
+
+ sitemaps = []
+ for sm in resp.json().get('sitemap', []):
+ sitemaps.append({
+ 'path': sm.get('path', ''),
+ 'last_submitted': sm.get('lastSubmitted', ''),
+ 'last_downloaded': sm.get('lastDownloaded', ''),
+ 'is_pending': sm.get('isPending', False),
+ 'warnings': sm.get('warnings', 0),
+ 'errors': sm.get('errors', 0),
+ 'contents': [
+ {
+ 'type': c.get('type', ''),
+ 'submitted': c.get('submitted', 0),
+ 'indexed': c.get('indexed', 0),
+ }
+ for c in sm.get('contents', [])
+ ],
+ })
+ return sitemaps
+
+ except Exception as e:
+ logger.warning(f"Sitemaps fetch failed for {site_url}: {e}")
+ return []
diff --git a/templates/gbp_audit.html b/templates/gbp_audit.html
index 842ae0c..eb9fb21 100644
--- a/templates/gbp_audit.html
+++ b/templates/gbp_audit.html
@@ -1561,6 +1561,142 @@
{% endif %}
+
+{% if places_data and places_data.gbp_impressions_maps is not none %}
+
+
+
+ Statystyki widocznosci ({{ places_data.gbp_performance_period_days or 30 }} dni)
+
+
+
+
Wyswietlenia profilu
+
+
+
{{ '{:,}'.format(places_data.gbp_impressions_maps or 0) }}
+
Google Maps
+
+
+
{{ '{:,}'.format(places_data.gbp_impressions_search or 0) }}
+
Wyszukiwarka Google
+
+
+
{{ '{:,}'.format((places_data.gbp_impressions_maps or 0) + (places_data.gbp_impressions_search or 0)) }}
+
Lacznie
+
+
+
+
+
Akcje klientow
+
+
+
{{ places_data.gbp_call_clicks or 0 }}
+
Klikniecia telefon
+
+
+
{{ places_data.gbp_website_clicks or 0 }}
+
Klikniecia strona
+
+
+
{{ places_data.gbp_direction_requests or 0 }}
+
Prosby o trase
+
+ {% if places_data.gbp_conversations %}
+
+
{{ places_data.gbp_conversations }}
+
Rozmowy
+
+ {% endif %}
+
+
+
+ {% if places_data.gbp_search_keywords %}
+
Frazy wyszukiwania
+
+
+
+
+ | # |
+ Fraza |
+ Wyswietlenia |
+
+
+
+ {% for kw in places_data.gbp_search_keywords[:10] %}
+
+ | {{ loop.index }} |
+ {{ kw.keyword }} |
+ {{ '{:,}'.format(kw.impressions) }} |
+
+ {% endfor %}
+
+
+
+ {% endif %}
+
+{% endif %}
+
+
+{% if places_data and places_data.google_posts_data %}
+
+
+
+ Google Posts ({{ places_data.google_posts_count or places_data.google_posts_data|length }})
+
+
+
+ {% for post in places_data.google_posts_data[:5] %}
+
+
+
+ {{ post.topicType|default(post.get('searchUrl', 'POST')|default('Post'))|replace('_', ' ')|title }}
+
+ {% if post.createTime or post.get('createTime') %}
+
+ {{ (post.createTime or post.get('createTime', ''))[:10] }}
+
+ {% endif %}
+
+ {% set summary = post.get('summary', post.get('text', '')) %}
+ {% if summary %}
+
+ {{ summary[:200] }}{% if summary|length > 200 %}...{% endif %}
+
+ {% endif %}
+
+ {% endfor %}
+
+
+{% endif %}
+
+
+{% if places_data and places_data.google_owner_responses_count is not none %}
+
+
+
+ Odpowiedzi na opinie
+
+
+
+
{{ places_data.google_owner_responses_count }}
+
Odpowiedzi wlasciciela
+
+ {% if places_data.google_review_response_rate is not none %}
+
+
{{ places_data.google_review_response_rate }}%
+
Wskaznik odpowiedzi
+
+ {% endif %}
+
+
+{% endif %}
+
{% if audit.recommendations %}
diff --git a/templates/seo_audit.html b/templates/seo_audit.html
index 59bc0ef..346c09e 100644
--- a/templates/seo_audit.html
+++ b/templates/seo_audit.html
@@ -1106,6 +1106,149 @@
{% endif %}
+
+
+ {% if seo_data.gsc_top_pages %}
+ Top strony w Google
+
+
+
+
+ | Strona |
+ Klikniecia |
+ Wyswietlenia |
+
+
+
+ {% for p in seo_data.gsc_top_pages[:10] %}
+
+ | {{ p.page|replace('https://', '')|replace('http://', '') }} |
+ {{ p.clicks }} |
+ {{ p.impressions }} |
+
+ {% endfor %}
+
+
+
+ {% endif %}
+
+
+ {% if seo_data.gsc_device_breakdown %}
+ Urzadzenia
+ {% set devices = seo_data.gsc_device_breakdown %}
+ {% set total_clicks = (devices.get('desktop', {}).get('clicks', 0) or 0) + (devices.get('mobile', {}).get('clicks', 0) or 0) + (devices.get('tablet', {}).get('clicks', 0) or 0) %}
+
+ {% for device_name, device_data in devices.items() %}
+ {% set pct = ((device_data.clicks / total_clicks * 100)|round(1)) if total_clicks > 0 else 0 %}
+
+
{{ pct }}%
+
{{ device_name|capitalize }}
+
{{ device_data.clicks }} klik.
+
+ {% endfor %}
+
+ {% endif %}
+
+
+ {% if seo_data.gsc_trend_data %}
+ Trend (vs poprzedni okres)
+
+ {% set trend = seo_data.gsc_trend_data %}
+ {% for metric_name, metric_label in [('clicks', 'Klikniecia'), ('impressions', 'Wyswietlenia'), ('ctr', 'CTR'), ('position', 'Pozycja')] %}
+ {% if trend.get(metric_name) %}
+ {% set m = trend[metric_name] %}
+ {% set change = m.get('change_pct') %}
+
+
{{ metric_label }}
+
+ {% if metric_name == 'ctr' %}{{ '%.1f'|format(m.current) }}%
+ {% elif metric_name == 'position' %}{{ '%.1f'|format(m.current) }}
+ {% else %}{{ '{:,}'.format(m.current)|replace(',', ' ') }}{% endif %}
+
+ {% if change is not none %}
+
+ {{ '+' if change > 0 else '' }}{{ '%.1f'|format(change) }}%
+
+ {% endif %}
+
+ {% endif %}
+ {% endfor %}
+
+ {% endif %}
+
+
+ {% if seo_data.gsc_country_breakdown %}
+ Kraje
+
+ {% for c in seo_data.gsc_country_breakdown[:5] %}
+
+ {{ c.country }}: {{ c.clicks }} klik.
+
+ {% endfor %}
+
+ {% endif %}
+
+
+ {% if seo_data.gsc_search_type_breakdown %}
+ Typ wyszukiwania
+
+ {% for type_name, type_data in seo_data.gsc_search_type_breakdown.items() %}
+
+
{{ type_data.clicks }}
+
{{ type_name|capitalize }}
+
+ {% endfor %}
+
+ {% endif %}
+
+
+ {% if seo_data.gsc_index_status %}
+ Status indeksowania (URL Inspection)
+
+
+
+ {{ 'Zaindeksowana' if seo_data.gsc_index_status == 'PASS' else 'Oczekuje' if seo_data.gsc_index_status == 'NEUTRAL' else seo_data.gsc_index_status }}
+
+
+ {% if seo_data.gsc_last_crawl %}
+
+ Ostatni crawl: {{ seo_data.gsc_last_crawl.strftime('%Y-%m-%d') if seo_data.gsc_last_crawl.strftime is defined else seo_data.gsc_last_crawl[:10] }}
+
+ {% endif %}
+ {% if seo_data.gsc_crawled_as %}
+
+ Bot: {{ seo_data.gsc_crawled_as }}
+
+ {% endif %}
+
+ {% endif %}
+
+
+ {% if seo_data.gsc_sitemaps %}
+ Sitemaps w Search Console
+
+
+
+
+ | Sciezka |
+ Bledow |
+ Ostrzezen |
+ Ostatnio |
+
+
+
+ {% for sm in seo_data.gsc_sitemaps %}
+
+ | {{ sm.path|replace('https://', '')|replace('http://', '') }} |
+ {{ sm.errors or 0 }} |
+ {{ sm.warnings or 0 }} |
+ {{ sm.last_submitted[:10] if sm.last_submitted else '-' }} |
+
+ {% endfor %}
+
+
+
+ {% endif %}
{% elif has_gsc_token %}