from fastapi import Depends, APIRouter, status, Query, Path, HTTPException,Request
from internal.models import *
from internal.database import fetch_one, fetch_all, execute_query, response_success, raise_if_exists, raise_if_not_found
from dependencies import get_current_active_user
from collections import defaultdict
from limiter_config import limiter
router = APIRouter(
    prefix="/statistics",
    tags=['统计']
)
# 统计编辑量
@router.get("/list")
@limiter.limit("10/minute")
async def statistic_list(request: Request):
    select_query = """SELECT DATE(create_at) AS date,COUNT(*) AS writCount FROM
        (SELECT create_at FROM diarys
        UNION ALL
        SELECT create_at FROM blogs) AS combined
        GROUP BY DATE
        ORDER BY DATE DESC;"""
    statistic_list = fetch_all(select_query)
    return response_success(statistic_list, "statistic get list success")

# 记录所有blog和diary数据


# 获取首页数据
@router.get("/homepage")
@limiter.limit("10/minute")
async def get_homepage_data(request: Request):
    # 获取博客数据
    select_blog = """
        SELECT blogs.id, blogs.blogtitle, blogs.blogcontent, blogs.create_at, blogs.imglink,
        blogs.wordcount, blogtypes.typename, JSON_ARRAYAGG(labels.labelname) AS labelnames
        FROM blogs
        LEFT JOIN `blogtypes` ON blogs.typeid = blogtypes.id
        LEFT JOIN blog_label ON blog_label.blogid = blogs.id
        LEFT JOIN labels ON blog_label.labelid = labels.id
        GROUP BY blogs.id, blogs.blogtitle, blogs.blogcontent, blogs.create_at, blogs.imglink,
        blogs.wordcount, blogtypes.typename
        ORDER BY create_at DESC
    """
    blog_list = fetch_all(select_blog)

    # 获取日记数据
    select_diary = """
        SELECT diarys.id, diarys.diarytitle, diarys.diarycontent, diarys.imglink,
        diarytypes.typename, diarys.create_at, diarys.update_at 
        FROM diarys 
        LEFT JOIN diarytypes ON diarys.typeid = diarytypes.id
        ORDER BY create_at DESC
    """
    diary_list = fetch_all(select_diary)

    # 合并博客和日记数据,按照创建日期降序排序
    combined_data = sorted(blog_list + diary_list, key=lambda x: x['create_at'], reverse=True)

    return {"data": combined_data}

@router.get("/searchtitle")
@limiter.limit("10/minute")
async def search_homepage_data(request: Request,title: str = Query("", description="Title to search for")):
    # 查询博客数据
    select_blog = """
        SELECT 
            blogs.id, 
            blogs.blogtitle, 
            blogs.blogcontent, 
            blogs.create_at, 
            blogs.imglink,
            blogs.readnum,
            blogs.wordcount, 
            blogtypes.typename, 
            JSON_ARRAYAGG(labels.labelname) AS labelnames
        FROM blogs
        LEFT JOIN blogtypes ON blogs.typeid = blogtypes.id
        LEFT JOIN blog_label ON blog_label.blogid = blogs.id
        LEFT JOIN labels ON blog_label.labelid = labels.id
        WHERE blogs.blogtitle LIKE %s
        GROUP BY 
            blogs.id, 
            blogs.blogtitle, 
            blogs.blogcontent, 
            blogs.create_at, 
            blogs.imglink,
            blogs.wordcount, 
            blogtypes.typename
        ORDER BY create_at DESC
    """
    blog_list =fetch_all(select_blog, ('%' + title + '%',))

    # 查询日记数据
    select_diary = """
        SELECT 
            diarys.id, 
            diarys.diarytitle, 
            diarys.diarycontent,
            diarys.readnum, 
            diarys.imglink,
            diarytypes.typename, 
            diarys.create_at, 
            diarys.update_at 
        FROM diarys 
        LEFT JOIN diarytypes ON diarys.typeid = diarytypes.id
        WHERE diarys.diarytitle LIKE %s
        ORDER BY create_at DESC
    """
    diary_list =fetch_all(select_diary, ('%' + title + '%',))

    # 合并博客和日记数据,按照创建日期降序排序
    filtered_blog_list = [item for item in blog_list if item.get('create_at')]
    filtered_diary_list = [item for item in diary_list if item.get('create_at')]    
    combined_list = filtered_blog_list + filtered_diary_list
    combined_data = sorted(combined_list, key=lambda x: x['create_at'], reverse=True)

    return {"data": combined_data}