# FastAPI Dependencies
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
from fastapi import FastAPI, Query, HTTPException, Depends, status
from fastapi.responses import JSONResponse
from datetime import datetime, timedelta, date
from fastapi.middleware.cors import CORSMiddleware

# Other Dependencies
from datetime import datetime
from typing import Optional, DefaultDict
from datetime import datetime, timedelta
from passlib.context import CryptContext
from collections import defaultdict
from jose import JWTError, jwt
import numpy as np
import yfinance as yf
import pandas as pd
import asyncio
import json
import uuid
import re

# Database Dependencies
from pymongo.errors import PyMongoError
from pymongo import ASCENDING, DESCENDING
from motor.motor_asyncio import AsyncIOMotorClient

from models import *
from helper_functions.login import *
from helper_functions.orders import *
from helper_functions.portfolio import *
from helper_functions.ohlc_report import *
from helper_functions.report_details import *
from helper_functions.market_analysis import *
from helper_functions.portfolio_report import *
from helper_functions.heatmap_analysis import *

# Load Credentials
with open(r"/var/www/html/trade_iq/env.json", "r", encoding="utf-8") as f:
# with open(r"C:\Users\user\Documents\FastAPIs\papertrade_apis\env.json", "r", encoding="utf-8") as f:
    data = json.loads(f.read())

# MongoDB Connection
client = AsyncIOMotorClient(data['live']['mongo_url'], maxPoolSize=100, minPoolSize=20, retryWrites=True, maxIdleTimeMS=90000)
db = client[data['live']['database']]
orders_collection = db["orders"]
nseData_collection = db["nse_data1"]
bseData_collection = db["bse_data1"]
holdings_collection = db["holdings"]
levels_collection = db["levels"]
Auth_collection = db["user"]
reportUpload_collection = db["ohlc_reports"]
portfolioUpload_collection = db["portfolio_reports"]
websocket_collection = db["zerodha_credentials"]
queryFormulas_collection = db["query_formulas"]
sections_collection = db["sections"]
today = date.today()

# Secret key, algorithm, and token expiration (should be in environment variables for security)
SECRET_KEY = uuid.uuid4().hex  # Replace with a secure key
ALGORITHM = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES = 30

# Password context for hashing
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")

# OAuth2 password bearer token
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")

# Configurations FastAPI
app = FastAPI()
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],  # Allow all origins, or specify a list of domains
    allow_credentials=True,
    allow_methods=["*"],  # Allow all methods (GET, POST, etc.)
    allow_headers=["*"],  # Allow all headers
)


"""---------- Other Application APIs ----------"""
@app.post('/bse_index')
async def get_bse_index(request: IndexAPI):
    try:
        symbol = request.symbol.upper().strip()

        mycol = db["indices_bse"]
        result = await mycol.find({"symbols": symbol}).to_list(None)

        if not result:
            raise HTTPException(status_code=404, detail=f"No index found with {symbol}, enter a valid symbol")

        indexes_bse = [doc["Index"] for doc in result if "Index" in doc]
        return {"symbol": symbol, "indexes": indexes_bse}

    except Exception as e:
        raise HTTPException(status_code=500, detail=f"Error: {e}")

@app.post('/nse_index')
async def get_nse_index(request: IndexAPI):
    try:
        symbol = request.symbol.upper().strip()

        mycol = db["indices_nse"]
        result = await mycol.find({"symbols": symbol}).to_list(None)

        if not result:
            raise HTTPException(status_code=404, detail=f"No index found with {symbol}, enter a valid symbol")

        indexes_nse = [doc["Index"] for doc in result if "Index" in doc]
        return {"symbol": symbol, "indexes": indexes_nse}

    except Exception as e:
        raise HTTPException(status_code=500, detail=f"Error: {e}")
"""---------- Other Application APIs ----------"""

"""Market Analysis APIs"""
@app.post("/analyze-multiple")
async def analyze_multiple_symbols(
    request: AnalyzeMultipleRequest
):
    """
    Endpoint to analyze price data for all symbols of a given instrument type concurrently.
    Supports pagination using length and offset.
    """

    try:
        # Fetch data for all symbols associated with the instrument type
        symbols_data = []
        if request.exchange == "NSE":
            cursor = nseData_collection.find({"Instrument": request.instrument}, {"_id": 0})
        elif request.exchange == "BSE":
            cursor = bseData_collection.find({"Instrument": request.instrument}, {"_id": 0})
        else:
            raise HTTPException(status_code=400, detail=f"Unsupported exchange: {request.exchange}")

        # Convert cursor to list asynchronously
        symbols_data = await cursor.to_list(length=None)

        if not symbols_data:
            raise HTTPException(status_code=404, detail=f"No symbols found for instrument: {request.instrument}")

        total_items = len(symbols_data)

        # Apply offset and length
        start_index = request.offset
        end_index = request.offset + request.length

        if start_index >= total_items:
            raise HTTPException(status_code=404, detail="Offset exceeds available data")

        paginated_data = symbols_data[start_index:end_index]

        # Process the paginated symbols concurrently
        tasks = [process_single_symbol_ma(symbol_data, request.duration) for symbol_data in paginated_data]
        results = await asyncio.gather(*tasks)

        # Ensure JSON-safe response
        results = make_json_safe_ma(results)

        return {
            "instrument": request.instrument,
            "length": request.length,
            "offset": request.offset,
            "total_items": total_items,
            "results": results,
        }
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))

@app.get("/analyze/{symbol}")
async def analyze_symbol(symbol: str):
    """
    Endpoint to analyze price data for a single symbol.
    """
    duration = '1Y'
    try:
        # Fetch data from MongoDB asynchronously
        symbol_data = await nseData_collection.find_one({"symbol": symbol}, {"_id": 0})
        if not symbol_data:
            raise HTTPException(status_code=404, detail=f"Symbol {symbol} not found.")

        # Process and analyze data
        output = await process_single_symbol_ma(symbol_data, duration)
        return output
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))

@app.post("/model-data")
async def analyze_data(request: ModelInput):
    # Fetch data from MongoDB asynchronously
    query = {"symbol": {"$regex": f"^{request.symbol}"}}

    if request.exchange.upper() == "NSE":
        sample_data = await nseData_collection.find_one(query, {"_id": 0})
    else:
        sample_data = await bseData_collection.find_one(query, {"_id": 0})

    if not sample_data:
        raise HTTPException(status_code=404, detail="Symbol not found")

    # Add time range parameters to the data
    sample_data['time_range'] = request.time_range
    sample_data['start_date'] = request.start_date
    sample_data['end_date'] = request.end_date

    result = process_price_data_ma(sample_data)

    if result:
        return JSONResponse(content=result, status_code=200)
    else:
        raise HTTPException(status_code=500, detail="Error processing data")

"""
@app.post("/graph-data")
async def graph_data(request: GraphDataInput):
    if request.exchange.upper() == "NSE":
        get_data = await nseData_collection.find_one({"symbol": {"$regex": f"^{request.symbol}"}}, {"_id": 0})
    elif request.exchange.upper() == "BSE":
        get_data = await bseData_collection.find_one({"symbol": {"$regex": f"^{request.symbol}"}}, {"_id": 0})
    else:
        raise HTTPException(status_code=400, detail="Invalid exchange")

    if not get_data:
        raise HTTPException(status_code=404, detail="Symbol not found")

    # Get offset value
    offset = int(float(request.offset))
    if offset == 0:
        raise HTTPException(status_code=404, detail="Offset value cannot be 0.")

    data = get_data
    reference_api = process_price_data_ma(data=data)

    # Extract prices
    prices = data["prices"]
    if prices:
        newest_date = max(prices.keys(), key=lambda d: datetime.strptime(d, "%Y-%m-%d"))
        newest_close_price = prices[newest_date]["close"]

    # Extract end_dates and their trends
    trend_dates = {trend["end_date"]: trend["trend"] for trend in reference_api["trends"]}

    # Update the prices dictionary with trend status
    for date_str, price_data in data["prices"].items():
        if date_str in trend_dates:
            price_data.update({"status": 1, "trend": trend_dates[date_str]})
        else:
            price_data.update({"status": 0, "trend": "no"})

    # Find top and bottom points
    top_points = []
    bottom_points = []

    for dates, values in data["prices"].items():
        if values.get("trend") == "uptrend":
            top_price = values.get("high")
            if top_price > newest_close_price:
                top_points.append({
                    "date": dates,
                    "open": values.get("open"),
                    "high": top_price,
                    "low": values.get("low"),
                    "close": values.get("close"),
                    "status": values.get("status"),
                    "trend": values.get("trend")
                })

        if values.get("trend") == "downtrend":
            bottom_price = values.get("low")
            if bottom_price < newest_close_price:
                bottom_points.append({
                    "date": dates,
                    "open": values.get("open"),
                    "high": values.get("high"),
                    "low": bottom_price,
                    "close": values.get("close"),
                    "status": values.get("status"),
                    "trend": values.get("trend")
                })

    # Sort points
    top_points_sorted = sorted(top_points, key=lambda x: x["high"], reverse=False)
    bottom_points_sorted = sorted(bottom_points, key=lambda x: x["low"], reverse=True)

    # Determine the maximum count of points
    top_sum = len(top_points_sorted)
    bottom_sum = len(bottom_points_sorted)
    sum_c = max(top_sum, bottom_sum)

    # Limit points based on offset
    top_points_sorted = top_points_sorted[:offset]
    bottom_points_sorted = bottom_points_sorted[:offset]

    # Get the dates of top and bottom points
    top_dates = {point["date"] for point in top_points_sorted}
    bottom_dates = {point["date"] for point in bottom_points_sorted}

    # Update prices based on top and bottom points
    for date, values in data["prices"].items():
        if date not in top_dates:
            if values["trend"] == "uptrend" and values["high"] < newest_close_price:
                values['status'] = 0
                values['trend'] = 'no'

        if date not in bottom_dates:
            if values["trend"] == "downtrend" and values["low"] > newest_close_price:
                values['status'] = 0
                values['trend'] = 'no'

    # Find the smallest date in the combined top and bottom dates
    total_dates = top_dates | bottom_dates
    smallest_date = min(total_dates)

    # Filter prices based on the smallest date
    filtered_prices = {
        date: details
        for date, details in data["prices"].items()
        if date >= str(smallest_date)
    }
    filtered_prices["max_points"] = sum_c

    # Count uptrend and downtrend close values within the specified range
    uptrend_count = 0
    downtrend_count = 0

    for date, values in data["prices"].items():
        close = values.get("close")
        if values.get("trend") == "uptrend" and close is not None:
            if close - 0.004 <= close <= close + 0.005:
                uptrend_count += 1

        if values.get("trend") == "downtrend" and close is not None:
            if close - 0.004 <= close <= close + 0.005:
                downtrend_count += 1

    # Add counts to the filtered prices
    filtered_prices["uptrend_close_count"] = uptrend_count
    filtered_prices["downtrend_close_count"] = downtrend_count

    if filtered_prices:
        return JSONResponse(content=filtered_prices, status_code=200)
    else:
        raise HTTPException(status_code=500, detail="Error processing data")
"""

@app.post("/graph-data")
async def graph_data(request: GraphDataInput):
    if request.exchange.upper() == "NSE":
        get_data = await nseData_collection.find_one({"symbol": {"$regex": f"^{request.symbol}"}}, {"_id": 0})
    elif request.exchange.upper() == "BSE":
        get_data = await bseData_collection.find_one({"symbol": {"$regex": f"^{request.symbol}"}}, {"_id": 0})
    else:
        raise HTTPException(status_code=400, detail="Invalid exchange")

    if not get_data:
        raise HTTPException(status_code=404, detail="Symbol not found")

    # Get offset value
    try:
        offset = int(float(request.offset))
        if offset <= 0:
            raise ValueError("Offset must be greater than 0")
    except ValueError as e:
        raise HTTPException(status_code=400, detail=str(e))

    data = get_data
    reference_api = process_price_data_ma(data=data)

    # Extract prices and get newest date
    prices = data["prices"]
    if prices:
        newest_date = max(prices.keys(), key=lambda d: datetime.strptime(d, "%Y-%m-%d"))
        newest_close_price = prices[newest_date]["close"]

    # Extract end_dates and their trends
    trend_dates = {trend["end_date"]: trend["trend"] for trend in reference_api["trends"]}

    # Update the prices dictionary with trend status
    for date_str, price_data in data["prices"].items():
        if date_str in trend_dates:
            price_data.update({"status": 1, "trend": trend_dates[date_str]})
        else:
            price_data.update({"status": 0, "trend": "no"})

    # Find top and bottom points
    top_points = []
    bottom_points = []

    for dates, values in data["prices"].items():
        if values.get("trend") == "uptrend":
            top_price = values.get("high")
            if top_price > newest_close_price:
                top_points.append({
                    "date": dates,
                    "open": values.get("open"),
                    "high": top_price,
                    "low": values.get("low"),
                    "close": values.get("close"),
                    "status": values.get("status"),
                    "trend": values.get("trend")
                })

        if values.get("trend") == "downtrend":
            bottom_price = values.get("low")
            if bottom_price < newest_close_price:
                bottom_points.append({
                    "date": dates,
                    "open": values.get("open"),
                    "high": values.get("high"),
                    "low": bottom_price,
                    "close": values.get("close"),
                    "status": values.get("status"),
                    "trend": values.get("trend")
                })

    # Sort points
    top_points_sorted = sorted(top_points, key=lambda x: x["high"], reverse=False)
    bottom_points_sorted = sorted(bottom_points, key=lambda x: x["low"], reverse=True)

    # Determine the maximum count of points
    top_sum = len(top_points_sorted)
    bottom_sum = len(bottom_points_sorted)
    sum_c = max(top_sum, bottom_sum)

    # Limit points based on offset
    top_points_sorted = top_points_sorted[:offset]
    bottom_points_sorted = bottom_points_sorted[:offset]

    # Get all dates from selected points
    selected_dates = set()
    for point in top_points_sorted + bottom_points_sorted:
        selected_dates.add(point["date"])

    # Find the smallest and largest dates from selected points
    if selected_dates:
        smallest_date = min(selected_dates, key=lambda d: datetime.strptime(d, "%Y-%m-%d"))
        largest_date = max(selected_dates, key=lambda d: datetime.strptime(d, "%Y-%m-%d"))
    else:
        raise HTTPException(status_code=404, detail="No trend points found")

    # Create filtered prices dictionary with date range filtering
    filtered_prices = {}

    for date, values in data["prices"].items():
        # Only include dates between smallest_date and newest_date
        if smallest_date <= date <= newest_date:
            if date in selected_dates:
                # Include selected trend points
                filtered_prices[date] = values
            elif values["trend"] == "no":
                # Include no-trend points
                filtered_prices[date] = values
            else:
                # Convert other points to no-trend
                filtered_prices[date] = {
                    **values,
                    "status": 0,
                    "trend": "no"
                }

    filtered_prices["max_points"] = sum_c

    # Count uptrend and downtrend close values within the specified range
    uptrend_count = 0
    downtrend_count = 0

    for date, values in filtered_prices.items():
        if isinstance(values, dict) and "close" in values:  # Skip the max_points key
            close = values.get("close")
            if values.get("trend") == "uptrend" and close is not None:
                if close - 0.004 <= close <= close + 0.005:
                    uptrend_count += 1

            if values.get("trend") == "downtrend" and close is not None:
                if close - 0.004 <= close <= close + 0.005:
                    downtrend_count += 1

    # Add counts to the filtered prices
    filtered_prices["uptrend_close_count"] = uptrend_count
    filtered_prices["downtrend_close_count"] = downtrend_count

    if filtered_prices:
        return JSONResponse(content=filtered_prices, status_code=200)
    else:
        raise HTTPException(status_code=500, detail="Error processing data")

"""Portfolio Report APIs"""
@app.post("/portfolio-report-upload")
async def upload_report(get_lists: List[PortfolioReportList]):
    try:
        # Process each investor's report
        for report in get_lists:
            investor_name = report.investor
            new_holdings = report.holdings

            # Loop through each holding for the investor
            for holding in new_holdings:
                # Update the existing holding or add a new one
                update_result = await portfolioUpload_collection.update_one(
                    {"investor": investor_name, "holdings.symbol": holding.symbol},
                    {
                        "$set": {
                            "holdings.$.qty": holding.qty,
                            "holdings.$.avg_price": holding.avg_price,
                        }
                    }
                )

                # If the holding is not found, add it to the holdings list
                if update_result.matched_count == 0:
                    await portfolioUpload_collection.update_one(
                        {"investor": investor_name},
                        {
                            "$setOnInsert": {"investor": investor_name},
                            "$addToSet": {"holdings": holding.dict()}
                        },
                        upsert=True
                    )

        return JSONResponse(
            content={"message": "Portfolio data inserted/updated successfully."}
        )

    except Exception as e:
        return JSONResponse(
            content={
                "message": "An error occurred during report upload.",
                "error": str(e),
            },
            status_code=500,
        )

@app.post("/portfolio-get-report")
async def get_report(dat: PortfolioReportGet):
    try:
        # Validate and parse dates
        start_dt = datetime.strptime(dat.start_date, "%Y-%m-%d")
        end_dt = datetime.strptime(dat.end_date, "%Y-%m-%d")

        if start_dt > end_dt:
            raise HTTPException(status_code=400, detail="start_date must be before or equal to end_date")

        report_type_map = {1: "open", 2: "high", 3: "low", 4: "close"}
        results_by_type = {"summary_report": {}}

        reqType = 0

        # Determine request types
        if reqType == 0:
            req_types = list(report_type_map.keys())
        else:
            req_types = [reqType]

        # Fetch data for the specific investor
        investor_data = await portfolioUpload_collection.find_one(
            {"investor": dat.investor}, {"_id": 0}
        )
        # print(investor, investor_data)

        if not investor_data:
            raise HTTPException(status_code=404, detail="Investor data not found")

        holdings = investor_data.get("holdings", [])
        if not holdings:
            raise HTTPException(status_code=404, detail="No holdings found for the investor")


        for req_type in req_types:
            report_type = report_type_map.get(req_type)
            if not report_type:
                raise HTTPException(status_code=400, detail="Invalid reqType. Must be between 1 and 4")

            all_symbols = []
            total_prices = {}

            # Process symbols
            for symbol_info in holdings:
                pipeline = [
                   {"$match": {"symbol": symbol_info["symbol"]}},
                   {"$project": {
                        "prices": {
                            "$filter": {
                                "input": {
                                    "$map": {
                                        "input": {"$objectToArray": "$prices"},
                                        "as": "priceData",
                                        "in": {
                                            "date": {"$toDate": "$$priceData.k"},
                                            "price_multiplied": {
                                                "$multiply": [
                                                    f"$$priceData.v.{report_type}",
                                                    symbol_info["qty"]
                                                ]
                                            }
                                        }
                                    }
                                },
                                "as": "filteredPrice",
                                "cond": {
                                    "$and": [
                                        {"$gte": ["$$filteredPrice.date", start_dt]},
                                        {"$lte": ["$$filteredPrice.date", end_dt]}
                                    ]
                                }
                            }
                        }
                    }}
                ]

                # Use await with .to_list() for async aggregation
                results = await nseData_collection.aggregate(pipeline).to_list(length=None)

                symbol_data = {
                    "type": report_type,
                    "symbol": symbol_info["symbol"],
                    "qty": symbol_info["qty"],
                    "avg_price": symbol_info["avg_price"]
                }

                # Aggregate prices by date
                for result in results:
                    for price_data in result.get("prices", []):
                        date_str = price_data["date"].strftime("%Y-%m-%d")
                        price = price_data["price_multiplied"]

                        symbol_data[date_str] = price
                        total_prices[date_str] = total_prices.get(date_str, 0) + price

                all_symbols.append(symbol_data)

            # Calculate summary entries
            sorted_dates = sorted(total_prices.keys())
            total_entry = {
                "type": report_type,
                "symbol": "TOTAL",
                "qty": "",
                "avg_price": "",
                **{date: total_prices[date] for date in sorted_dates}
            }
            all_symbols.append(total_entry)

            # Correctly calculate % Total Change
            if sorted_dates:
                first_total = total_prices[sorted_dates[0]]
                change_entry = {
                    "type": report_type,
                    "symbol": f"% TOTAL CHANGE in {report_type}",
                    "qty": "",
                    "avg_price": "",
                    **{
                        date: "-" if first_total == 0 else
                        round(((total_prices[date] - first_total) / first_total * 100), 2)
                        for date in sorted_dates
                    }
                }
                all_symbols.append(change_entry)

            # Correctly calculate Day Change
            day_changes = {}
            day_change_entry = {
                "type": report_type,
                "symbol": f"DAY CHANGE in {report_type}",
                "qty": "",
                "avg_price": "",
            }

            for i, date in enumerate(sorted_dates):
                if i == 0:
                    day_changes[date] = "-"
                else:
                    prev_date = sorted_dates[i-1]
                    day_changes[date] = round(total_prices[date] - total_prices[prev_date], 2)

                day_change_entry[date] = day_changes[date]

            all_symbols.append(day_change_entry)

            # Correctly calculate Day Change Percentage
            day_change_percent_entry = {
                "type": report_type,
                "symbol": f"DAY CHANGE % in {report_type}",
                "qty": "",
                "avg_price": "",
                **{
                    date: "-" if i == 0 or total_prices[sorted_dates[i-1]] == 0 else
                    round((day_changes[date] / total_prices[sorted_dates[i-1]] * 100), 2)
                    for i, date in enumerate(sorted_dates)
                }
            }
            all_symbols.append(day_change_percent_entry)

            results_by_type[report_type] = convert_objectid_pr(all_symbols)

            # Only add to summary report if reqType is 0
            if reqType == 0:
                results_by_type["summary_report"][report_type] = total_entry

        # Only add metrics to summary report if reqType is 0
        if reqType == 0:
            # Metric Calculations
            metrics = {
                # Absolute value metrics
                "high_minus_open": {"symbol": "High - Open"},
                "open_minus_low": {"symbol": "Open - Low"},
                "high_minus_low": {"symbol": "High - Low"},
                "open_minus_close": {"symbol": "Open - Close"},

                # Percentage metrics with updated names
                "high_minus_open_pr": {"symbol": "High - Open %"},
                "open_minus_low_pr": {"symbol": "Open - Low %"},
                "high_minus_low_pr": {"symbol": "High - Low %"},
                "open_minus_close_pr": {"symbol": "Open - Close %"}
            }

            for metric, result in metrics.items():
                for date in sorted_dates:
                    try:
                        # Handling absolute value metrics
                        if metric == "high_minus_open":
                            values = [results_by_type["summary_report"]["high"][date],
                                      results_by_type["summary_report"]["open"][date]]
                            result[date] = round(values[0] - values[1], 2)

                        elif metric == "open_minus_low":
                            values = [results_by_type["summary_report"]["open"][date],
                                      results_by_type["summary_report"]["low"][date]]
                            result[date] = round(values[0] - values[1], 2)

                        elif metric == "high_minus_low":
                            values = [results_by_type["summary_report"]["high"][date],
                                      results_by_type["summary_report"]["low"][date]]
                            result[date] = round(values[0] - values[1], 2)

                        elif metric == "open_minus_close":
                            values = [results_by_type["summary_report"]["open"][date],
                                      results_by_type["summary_report"]["close"][date]]
                            result[date] = round(values[0] - values[1], 2)

                        # Handling percentage metrics
                        elif metric == "high_minus_open_pr":
                            values = [results_by_type["summary_report"]["high"][date],
                                      results_by_type["summary_report"]["open"][date]]
                            result[date] = "-" if values[1] == 0 else round((values[0] - values[1]) / values[1] * 100, 2)

                        elif metric == "open_minus_low_pr":
                            values = [results_by_type["summary_report"]["open"][date],
                                      results_by_type["summary_report"]["low"][date]]
                            result[date] = "-" if values[1] == 0 else round((values[0] - values[1]) / values[1] * 100, 2)

                        elif metric == "high_minus_low_pr":
                            values = [results_by_type["summary_report"]["high"][date],
                                      results_by_type["summary_report"]["low"][date]]
                            result[date] = "-" if values[1] == 0 else round((values[0] - values[1]) / values[1] * 100, 2)

                        elif metric == "open_minus_close_pr":
                            values = [results_by_type["summary_report"]["open"][date],
                                      results_by_type["summary_report"]["close"][date]]
                            result[date] = "-" if values[1] == 0 else round((values[0] - values[1]) / values[1] * 100, 2)

                    except KeyError:
                        continue

            results_by_type["summary_report"].update(metrics)

        # print(results_by_type)

        result_dict = {}
        result_dict["investor"] = dat.investor

        # Define a mapping for the data type
        data_type_map = {
            0: 'open',
            1: 'high',
            2: 'low',
            3: 'close'
        }

        # Check if the provided data_type is valid
        if dat.data_type not in data_type_map:
            raise HTTPException(status_code=400, detail="Invalid data_type. Use 0 for open, 1 for high, 2 for low, 3 for close.")

        # Determine which data key to use based on the data_type
        key = data_type_map[dat.data_type]

        # Skip processing if the key is not in the data
        if key not in results_by_type:
            raise HTTPException(status_code=404, detail=f"Data for {key} not found.")

        # Process the selected data type
        df = pd.DataFrame(results_by_type[key])

        # Ensure that numeric columns are properly formatted
        date_pattern = re.compile(r'^\d{4}')  # Columns starting with a four-digit year (e.g., '2024', '2023')
        for col in df.columns:
            if date_pattern.match(col):
                # Convert to numeric and coerce errors to NaN
                df[col] = pd.to_numeric(df[col], errors='coerce')

        # Fill any NaN values with 0 (or any other default value)
        df.fillna(0, inplace=True)

        # Perform the calculations
        results = []
        # date_columns = [col for col in df.columns if col.startswith('2025')]
        date_columns = [col for col in df.columns if any(str(year) in col for year in range(2014, 2026))]

        summary_loser = {date: 0 for date in date_columns}
        summary_win = {date: 0 for date in date_columns}
        summary_unchanged = {date: 0 for date in date_columns}

        for index, row in df.iterrows():
            symbol = row['symbol']
            avg_price = row['avg_price']
            quantity = row['qty']

            # Get dates and perform calculations
            calculations = []
            loser, win, unchanged = 0, 0, 0

            for i in range(1, len(date_columns)):
                current_date = date_columns[i]
                previous_date = date_columns[i-1]

                # Calculate change, handling potential numeric issues
                try:
                    change = float(row[current_date]) - float(row[previous_date])
                except (ValueError, TypeError):
                    change = 0  # Default to 0 if calculation fails

                calculations.append(change)

                # Track counts of changes
                if change < 0:
                    loser += 1
                    summary_loser[current_date] += 1
                elif change > 0:
                    win += 1
                    summary_win[current_date] += 1
                else:
                    unchanged += 1
                    summary_unchanged[current_date] += 1

            total = loser + win + unchanged
            is_positive = win > loser

            # Add result to the list
            results.append({
                "Symbol": symbol,
                "Avg_price": avg_price,
                "Quantity": quantity,
                **{date_columns[i]: calculations[i-1] for i in range(1, len(date_columns))},
                "Loser": loser,
                "Win": win, 
                "Unchanged": unchanged,
                "Positive": "TRUE" if is_positive else "FALSE"
            })

        # Convert results to DataFrame
        result_df = pd.DataFrame(results)

        # Create summary rows for Loser, Win, and Unchanged
        summary_rows = {
            "Symbol": ["Total_Loser", "Total_Win"],
            "Avg_price": [None, None],
            "Quantity": [None, None],
            "Positive": [None, None]
        }
        for date in date_columns[1:]:
            summary_rows[date] = [
                summary_loser[date],  # Total Loser count for the date
                summary_win[date],    # Total Win count for the date
            ]

        # Convert summary rows to DataFrame
        summary_df = pd.DataFrame(summary_rows)

        # Concatenate result and summary DataFrames
        final_df = pd.concat([result_df, summary_df], ignore_index=True)

        # Replace problematic JSON values
        final_df.replace([np.inf, -np.inf, np.nan], None, inplace=True)
        result_dict[key] = final_df.to_dict(orient="records")

        # Return the insertion results as a JSON response
        return JSONResponse(content=result_dict, status_code=200)

    except ValueError as e:
        raise HTTPException(status_code=400, detail=f"Invalid date format: {str(e)}")

    except Exception as e:
        raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")

@app.get("/portfolio-investors")
async def get_investor_names():
    try:
        # Use MongoDB's distinct operation to fetch unique investor names
        investor_names = await portfolioUpload_collection.distinct("investor")

        return JSONResponse(
            content={"investors": investor_names}
        )

    except Exception as e:
        return JSONResponse(
            content={
                "message": "An error occurred while fetching investor names.",
                "error": str(e),
            },
            status_code=500,
        )

@app.get("/portfolio-investors-data/")
async def get_investors_data(investor: str = Query(None)):
    try:
        # Build a query filter based on optional `investor` parameter
        query = {}
        if investor:
            query["investor"] = investor

        # Fetch data from the MongoDB collection
        data = await portfolioUpload_collection.find(query, {"_id":0}).to_list(length=None)

        # Return the data
        return JSONResponse(content={"data": data})
    except Exception as e:
        return JSONResponse(
            content={
                "message": "An error occurred while fetching data.",
                "error": str(e),
            },
            status_code=500,
        )

@app.post("/portfolio-get-report")
async def get_report(dat: PortfolioReportGet):
    try:
        # Validate and parse dates
        start_dt = datetime.strptime(dat.start_date, "%Y-%m-%d")
        end_dt = datetime.strptime(dat.end_date, "%Y-%m-%d")

        if start_dt > end_dt:
            raise HTTPException(status_code=400, detail="start_date must be before or equal to end_date")

        report_type_map = {1: "open", 2: "high", 3: "low", 4: "close"}
        results_by_type = {"summary_report": {}}

        reqType = 0

        # Determine request types
        if reqType == 0:
            req_types = list(report_type_map.keys())
        else:
            req_types = [reqType]

        # Fetch data for the specific investor
        investor_data = await portfolioUpload_collection.find_one(
            {"investor": dat.investor}, {"_id": 0}
        )
        # print(investor, investor_data)

        if not investor_data:
            raise HTTPException(status_code=404, detail="Investor data not found")

        holdings = investor_data.get("holdings", [])
        if not holdings:
            raise HTTPException(status_code=404, detail="No holdings found for the investor")


        for req_type in req_types:
            report_type = report_type_map.get(req_type)
            if not report_type:
                raise HTTPException(status_code=400, detail="Invalid reqType. Must be between 1 and 4")

            all_symbols = []
            total_prices = {}

            # Process symbols
            for symbol_info in holdings:
                pipeline = [
                   {"$match": {"symbol": symbol_info["symbol"]}},
                   {"$project": {
                        "prices": {
                            "$filter": {
                                "input": {
                                    "$map": {
                                        "input": {"$objectToArray": "$prices"},
                                        "as": "priceData",
                                        "in": {
                                            "date": {"$toDate": "$$priceData.k"},
                                            "price_multiplied": {
                                                "$multiply": [
                                                    f"$$priceData.v.{report_type}",
                                                    symbol_info["qty"]
                                                ]
                                            }
                                        }
                                    }
                                },
                                "as": "filteredPrice",
                                "cond": {
                                    "$and": [
                                        {"$gte": ["$$filteredPrice.date", start_dt]},
                                        {"$lte": ["$$filteredPrice.date", end_dt]}
                                    ]
                                }
                            }
                        }
                    }}
                ]

                # Use await with .to_list() for async aggregation
                results = await nseData_collection.aggregate(pipeline).to_list(length=None)

                symbol_data = {
                    "type": report_type,
                    "symbol": symbol_info["symbol"],
                    "qty": symbol_info["qty"],
                    "avg_price": symbol_info["avg_price"]
                }

                # Aggregate prices by date
                for result in results:
                    for price_data in result.get("prices", []):
                        date_str = price_data["date"].strftime("%Y-%m-%d")
                        price = price_data["price_multiplied"]

                        symbol_data[date_str] = price
                        total_prices[date_str] = total_prices.get(date_str, 0) + price

                all_symbols.append(symbol_data)

            # Calculate summary entries
            sorted_dates = sorted(total_prices.keys())
            total_entry = {
                "type": report_type,
                "symbol": "TOTAL",
                "qty": "",
                "avg_price": "",
                **{date: total_prices[date] for date in sorted_dates}
            }
            all_symbols.append(total_entry)

            # Correctly calculate % Total Change
            if sorted_dates:
                first_total = total_prices[sorted_dates[0]]
                change_entry = {
                    "type": report_type,
                    "symbol": f"% TOTAL CHANGE in {report_type}",
                    "qty": "",
                    "avg_price": "",
                    **{
                        date: "-" if first_total == 0 else
                        round(((total_prices[date] - first_total) / first_total * 100), 2)
                        for date in sorted_dates
                    }
                }
                all_symbols.append(change_entry)

            # Correctly calculate Day Change
            day_changes = {}
            day_change_entry = {
                "type": report_type,
                "symbol": f"DAY CHANGE in {report_type}",
                "qty": "",
                "avg_price": "",
            }

            for i, date in enumerate(sorted_dates):
                if i == 0:
                    day_changes[date] = "-"
                else:
                    prev_date = sorted_dates[i-1]
                    day_changes[date] = round(total_prices[date] - total_prices[prev_date], 2)

                day_change_entry[date] = day_changes[date]

            all_symbols.append(day_change_entry)

            # Correctly calculate Day Change Percentage
            day_change_percent_entry = {
                "type": report_type,
                "symbol": f"DAY CHANGE % in {report_type}",
                "qty": "",
                "avg_price": "",
                **{
                    date: "-" if i == 0 or total_prices[sorted_dates[i-1]] == 0 else
                    round((day_changes[date] / total_prices[sorted_dates[i-1]] * 100), 2)
                    for i, date in enumerate(sorted_dates)
                }
            }
            all_symbols.append(day_change_percent_entry)

            results_by_type[report_type] = convert_objectid_pr(all_symbols)

            # Only add to summary report if reqType is 0
            if reqType == 0:
                results_by_type["summary_report"][report_type] = total_entry

        # Only add metrics to summary report if reqType is 0
        if reqType == 0:
            # Metric Calculations
            metrics = {
                # Absolute value metrics
                "high_minus_open": {"symbol": "High - Open"},
                "open_minus_low": {"symbol": "Open - Low"},
                "high_minus_low": {"symbol": "High - Low"},
                "open_minus_close": {"symbol": "Open - Close"},

                # Percentage metrics with updated names
                "high_minus_open_pr": {"symbol": "High - Open %"},
                "open_minus_low_pr": {"symbol": "Open - Low %"},
                "high_minus_low_pr": {"symbol": "High - Low %"},
                "open_minus_close_pr": {"symbol": "Open - Close %"}
            }

            for metric, result in metrics.items():
                for date in sorted_dates:
                    try:
                        # Handling absolute value metrics
                        if metric == "high_minus_open":
                            values = [results_by_type["summary_report"]["high"][date],
                                      results_by_type["summary_report"]["open"][date]]
                            result[date] = round(values[0] - values[1], 2)

                        elif metric == "open_minus_low":
                            values = [results_by_type["summary_report"]["open"][date],
                                      results_by_type["summary_report"]["low"][date]]
                            result[date] = round(values[0] - values[1], 2)

                        elif metric == "high_minus_low":
                            values = [results_by_type["summary_report"]["high"][date],
                                      results_by_type["summary_report"]["low"][date]]
                            result[date] = round(values[0] - values[1], 2)

                        elif metric == "open_minus_close":
                            values = [results_by_type["summary_report"]["open"][date],
                                      results_by_type["summary_report"]["close"][date]]
                            result[date] = round(values[0] - values[1], 2)

                        # Handling percentage metrics
                        elif metric == "high_minus_open_pr":
                            values = [results_by_type["summary_report"]["high"][date],
                                      results_by_type["summary_report"]["open"][date]]
                            result[date] = "-" if values[1] == 0 else round((values[0] - values[1]) / values[1] * 100, 2)

                        elif metric == "open_minus_low_pr":
                            values = [results_by_type["summary_report"]["open"][date],
                                      results_by_type["summary_report"]["low"][date]]
                            result[date] = "-" if values[1] == 0 else round((values[0] - values[1]) / values[1] * 100, 2)

                        elif metric == "high_minus_low_pr":
                            values = [results_by_type["summary_report"]["high"][date],
                                      results_by_type["summary_report"]["low"][date]]
                            result[date] = "-" if values[1] == 0 else round((values[0] - values[1]) / values[1] * 100, 2)

                        elif metric == "open_minus_close_pr":
                            values = [results_by_type["summary_report"]["open"][date],
                                      results_by_type["summary_report"]["close"][date]]
                            result[date] = "-" if values[1] == 0 else round((values[0] - values[1]) / values[1] * 100, 2)

                    except KeyError:
                        continue

            results_by_type["summary_report"].update(metrics)

        # print(results_by_type)

        result_dict = {}
        result_dict["investor"] = dat.investor

        # Define a mapping for the data type
        data_type_map = {
            0: 'open',
            1: 'high',
            2: 'low',
            3: 'close'
        }

        # Check if the provided data_type is valid
        if dat.data_type not in data_type_map:
            raise HTTPException(status_code=400, detail="Invalid data_type. Use 0 for open, 1 for high, 2 for low, 3 for close.")

        # Determine which data key to use based on the data_type
        key = data_type_map[dat.data_type]

        # Skip processing if the key is not in the data
        if key not in results_by_type:
            raise HTTPException(status_code=404, detail=f"Data for {key} not found.")

        # Process the selected data type
        df = pd.DataFrame(results_by_type[key])

        # Ensure that numeric columns are properly formatted
        date_pattern = re.compile(r'^\d{4}')  # Columns starting with a four-digit year (e.g., '2024', '2023')
        for col in df.columns:
            if date_pattern.match(col):
                # Convert to numeric and coerce errors to NaN
                df[col] = pd.to_numeric(df[col], errors='coerce')

        # Fill any NaN values with 0 (or any other default value)
        df.fillna(0, inplace=True)

        # Perform the calculations
        results = []
        date_columns = [col for col in df.columns if col.startswith('2024')]
        summary_loser = {date: 0 for date in date_columns}
        summary_win = {date: 0 for date in date_columns}
        summary_unchanged = {date: 0 for date in date_columns}

        for index, row in df.iterrows():
            symbol = row['symbol']
            avg_price = row['avg_price']
            quantity = row['qty']

            # Get dates and perform calculations
            calculations = []
            loser, win, unchanged = 0, 0, 0

            for i in range(1, len(date_columns)):
                current_date = date_columns[i]
                previous_date = date_columns[i-1]

                # Calculate change, handling potential numeric issues
                try:
                    change = float(row[current_date]) - float(row[previous_date])
                except (ValueError, TypeError):
                    change = 0  # Default to 0 if calculation fails

                calculations.append(change)

                # Track counts of changes
                if change < 0:
                    loser += 1
                    summary_loser[current_date] += 1
                elif change > 0:
                    win += 1
                    summary_win[current_date] += 1
                else:
                    unchanged += 1
                    summary_unchanged[current_date] += 1

            total = loser + win + unchanged
            is_positive = win > loser

            # Add result to the list
            results.append({
                "Symbol": symbol,
                "Avg_price": avg_price,
                "Quantity": quantity,
                **{date_columns[i]: calculations[i-1] for i in range(1, len(date_columns))},
                "Loser": loser,
                "Win": win, 
                "Unchanged": unchanged,
                "Positive": "TRUE" if is_positive else "FALSE"
            })

        # Convert results to DataFrame
        result_df = pd.DataFrame(results)

        # Create summary rows for Loser, Win, and Unchanged
        summary_rows = {
            "Symbol": ["Total_Loser", "Total_Win"],
            "Avg_price": [None, None],
            "Quantity": [None, None],
            "Positive": [None, None]
        }
        for date in date_columns[1:]:
            summary_rows[date] = [
                summary_loser[date],  # Total Loser count for the date
                summary_win[date],    # Total Win count for the date
            ]

        # Convert summary rows to DataFrame
        summary_df = pd.DataFrame(summary_rows)

        # Concatenate result and summary DataFrames
        final_df = pd.concat([result_df, summary_df], ignore_index=True)

        # Replace problematic JSON values
        final_df.replace([np.inf, -np.inf, np.nan], None, inplace=True)
        result_dict[key] = final_df.to_dict(orient="records")

        # Return the insertion results as a JSON response
        return JSONResponse(content=result_dict, status_code=200)

    except ValueError as e:
        raise HTTPException(status_code=400, detail=f"Invalid date format: {str(e)}")

    except Exception as e:
        raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")

@app.post("/portfolio-delete")
async def delete_portfolio(request: PortfolioDelete):
    try:
        # Use the correct field from the request model
        if request.investorName:
            result = await portfolioUpload_collection.delete_one(
                {"investor": {"$regex": f"^{request.investorName}", "$options": "i"}}
            )
            if result.deleted_count > 0:
                return {"message": f"Portfolio User {request.investorName} deleted successfully.", "status": 200}
            else:
                return {"message": f"No portfolio found for {request.investorName}."}
        else:
            return {"message": "Payload cannot be NULL."}
    except Exception as e:
        raise HTTPException(status_code=400, detail=str(e))

"""Report Details API"""
@app.post("/report_details_NSE/")  # Used in **Analysis > Report Details: https://stocks.rayvat.com/report**
async def aggregate_nse(request: ReportDetails_NSE):
    """
    Optimized function to fetch NSE data for Report Details.
    """

    try:
        # Get date range either from duration or custom dates
        if request.duration and not (request.start_date and request.end_date):
            request.start_date, request.end_date = calculate_date_range_rd(request.duration)
        elif request.start_date and request.end_date:
            try:
                # Validate date format
                datetime.strptime(request.start_date, '%d-%m-%Y')
                datetime.strptime(request.end_date, '%d-%m-%Y')
            except ValueError:
                return JSONResponse({"detail": "Invalid date format. Use 'dd-mm-yyyy'."}, status_code=400)
        else:
            return JSONResponse({"detail": "Either duration or custom start/end dates must be provided."}, status_code=400)

        if request.input_symbol:
            pipeline = [
                {
                    '$match': {
                        'symbol': {"$regex": f"^{request.input_symbol.upper()}"}
                    }
                },
                {
                    '$project': {
                        '_id': 0,
                        'symbol': 1,
                        'prices': {
                            '$filter': {
                                'input': {'$objectToArray': "$prices"},
                                'as': 'date_item',
                                'cond': {
                                    '$and': [
                                        {
                                            '$gte': [
                                                {'$dateFromString': {'dateString': "$$date_item.k"}},
                                                {'$dateFromString': {'dateString': request.start_date}}
                                            ]
                                        },
                                        {
                                            '$lte': [
                                                {'$dateFromString': {'dateString': "$$date_item.k"}},
                                                {'$dateFromString': {'dateString': request.end_date}}
                                            ]
                                        }
                                    ]
                                }
                            }
                        }
                    }
                },
                {
                    '$unwind': '$prices'
                },
                {
                    '$project': {
                        'symbol': 1,
                        'date': '$prices.k',
                        'prev_close': {'$toDouble': {'$ifNull': ['$prices.v.prev_close', 0]}},
                        'open': {'$toDouble': '$prices.v.open'},
                        'high': {'$toDouble': '$prices.v.high'},
                        'low': {'$toDouble': '$prices.v.low'},
                        'close': {'$toDouble': '$prices.v.close'}
                    }
                }
            ]

            cursor = nseData_collection.aggregate(pipeline, allowDiskUse=True)
            documents = []
            async for doc in cursor:
                processed_doc = calculate_report_values_rd(doc=doc)
                documents.append(processed_doc)

            return JSONResponse(content=documents, status_code=200)
        else:
            pipeline = [
                {
                    '$match': {
                        **({'Instrument': request.Instrument} if request.Instrument != "ALL" else {})
                    }
                },
                {
                    '$project': {
                        '_id': 0,
                        'symbol': 1,
                        'prices': {
                            '$filter': {
                                'input': {'$objectToArray': "$prices"},
                                'as': 'date_item',
                                'cond': {
                                    '$and': [
                                        {
                                            '$gte': [
                                                {'$dateFromString': {'dateString': "$$date_item.k"}},
                                                {'$dateFromString': {'dateString': request.start_date}}
                                            ]
                                        },
                                        {
                                            '$lte': [
                                                {'$dateFromString': {'dateString': "$$date_item.k"}},
                                                {'$dateFromString': {'dateString': request.end_date}}
                                            ]
                                        }
                                    ]
                                }
                            }
                        }
                    }
                },
                {
                    '$unwind': '$prices'
                },
                {
                    '$project': {
                        'symbol': 1,
                        'date': '$prices.k',
                        'prev_close': {'$toDouble': {'$ifNull': ['$prices.v.prev_close', 0]}},
                        'open': {'$toDouble': '$prices.v.open'},
                        'high': {'$toDouble': '$prices.v.high'},
                        'low': {'$toDouble': '$prices.v.low'},
                        'close': {'$toDouble': '$prices.v.close'}
                    }
                }
            ]

            cursor = nseData_collection.aggregate(pipeline, allowDiskUse=True)
            grouped_data = DefaultDict(lambda: DefaultDict(list))

            async for doc in cursor:
                symbol = doc['symbol']
                high = doc['high']
                low = doc['low']
                close = doc['close']
                prev_close = doc['prev_close']

                # Calculate metrics
                close_minus_prev_close = close - prev_close
                high_minus_low = high - low
                high_minus_prev_close = high - prev_close
                high_minus_prev_close_perc = (high - prev_close) / prev_close * 100 if prev_close != 0 else 0
                high_minus_low_perc = (high - low) / high * 100 if high != 0 else 0
                close_minus_prev_close_perc = (close - prev_close) / prev_close * 100 if prev_close != 0 else 0

                # Group data by symbol
                grouped_data[symbol]['high_minus_prev_close'].append(high_minus_prev_close)
                grouped_data[symbol]['high_minus_low'].append(high_minus_low)
                grouped_data[symbol]['close_minus_prev_close'].append(close_minus_prev_close)
                grouped_data[symbol]['high_minus_prev_close_perc'].append(high_minus_prev_close_perc)
                grouped_data[symbol]['high_minus_low_perc'].append(high_minus_low_perc)
                grouped_data[symbol]['close_minus_prev_close_perc'].append(close_minus_prev_close_perc)

            # Calculate averages
            result = []
            for symbol, fields in grouped_data.items():
                avg_data = {
                    'symbol': symbol,
                    'high_minus_prev_close': sum(fields['high_minus_prev_close']) / len(fields['high_minus_prev_close']) if fields['high_minus_prev_close'] else 0,
                    'high_minus_low': sum(fields['high_minus_low']) / len(fields['high_minus_low']) if fields['high_minus_low'] else 0,
                    'close_minus_prev_close': sum(fields['close_minus_prev_close']) / len(fields['close_minus_prev_close']) if fields['close_minus_prev_close'] else 0,
                    'high_minus_prev_close_perc': sum(fields['high_minus_prev_close_perc']) / len(fields['high_minus_prev_close_perc']) if fields['high_minus_prev_close_perc'] else 0,
                    'high_minus_low_perc': sum(fields['high_minus_low_perc']) / len(fields['high_minus_low_perc']) if fields['high_minus_low_perc'] else 0,
                    'close_minus_prev_close_perc': sum(fields['close_minus_prev_close_perc']) / len(fields['close_minus_prev_close_perc']) if fields['close_minus_prev_close_perc'] else 0
                }
                result.append(avg_data)

            # if request.type == 1:
            #     result = filter_response_rd(result, request.filters)

            return JSONResponse(content=result, status_code=200)

    except Exception as e:
        return JSONResponse({"detail": f"An error occurred: {str(e)}"}, status_code=500)

@app.post("/report_details_NSE_test")
async def aggregate_nse_test(request: ReportDetails_test):
    try:
        if request.type == 0:
            return await overall_data_rd(request, collection=nseData_collection)
        if request.type == 1:
            return await calculate_multidura(request, collection=nseData_collection)
    except Exception as e:
        return JSONResponse({"detail": f"An error occurred: {str(e)}"}, status_code=500)

@app.post("/report_details_BSE_test")
async def aggregate_nse_test(request: ReportDetails_test):
    try:
        if request.type == 0:
            return await overall_data_rd(request, collection=bseData_collection)
        if request.type == 1:
            return await calculate_multidura(request, collection=bseData_collection)
    except Exception as e:
        return JSONResponse({"detail": f"An error occurred: {str(e)}"}, status_code=500)

@app.post("/report_details_BSE")  # Used in **Analysis > Report Details: https://stocks.rayvat.com/report**
async def aggregate_bse(request: ReportDetails_BSE):
    """
    Optimized function to fetch BSE data for Report Details.
    """
    collection = db["bse_data1"]

    try:
        # Get date range either from duration or custom dates
        if request.duration and not (request.start_date and request.end_date):
            request.start_date, request.end_date = calculate_date_range_rd(request.duration)
        elif request.start_date and request.end_date:
            try:
                # Validate date format
                datetime.strptime(request.start_date, '%d-%m-%Y')
                datetime.strptime(request.end_date, '%d-%m-%Y')
            except ValueError:
                return JSONResponse({"detail": "Invalid date format. Use 'dd-mm-yyyy'."}, status_code=400)
        else:
            return JSONResponse({"detail": "Either duration or custom start/end dates must be provided."}, status_code=400)

        if request.input_symbol:
            pipeline = [
                {
                    '$match': {
                        'symbol': {"$regex": f"^{request.input_symbol}"}
                    }
                },
                {
                    '$project': {
                        '_id': 0,
                        'symbol': 1,
                        'prices': {
                            '$filter': {
                                'input': {'$objectToArray': "$prices"},
                                'as': 'date_item',
                                'cond': {
                                    '$and': [
                                        {
                                            '$gte': [
                                                {'$dateFromString': {'dateString': "$$date_item.k"}},
                                                {'$dateFromString': {'dateString': request.start_date}}
                                            ]
                                        },
                                        {
                                            '$lte': [
                                                {'$dateFromString': {'dateString': "$$date_item.k"}},
                                                {'$dateFromString': {'dateString': request.end_date}}
                                            ]
                                        }
                                    ]
                                }
                            }
                        }
                    }
                },
                {
                    '$unwind': '$prices'
                },
                {
                    '$project': {
                        'symbol': 1,
                        'date': '$prices.k',
                        'prev_close': {'$toDouble': {'$ifNull': ['$prices.v.prev_close', 0]}},
                        'open': {'$toDouble': '$prices.v.open'},
                        'high': {'$toDouble': '$prices.v.high'},
                        'low': {'$toDouble': '$prices.v.low'},
                        'close': {'$toDouble': '$prices.v.close'}
                    }
                }
            ]

            cursor = collection.aggregate(pipeline, allowDiskUse=True)
            documents = []
            async for doc in cursor:
                high = doc['high']
                low = doc['low']
                close = doc['close']
                prev_close = doc['prev_close']

                processed_doc = {
                    'symbol': doc['symbol'],
                    'date': doc['date'],
                    'prev_close': doc['prev_close'],
                    'open': doc['open'],
                    'high': doc['high'],
                    'low': doc['low'],
                    'close': doc['close'],
                    'close_minus_prev_close': close - prev_close,
                    'high_minus_low': high - low,
                    'high_minus_prev_close': high - prev_close,
                    'high_minus_prev_close_perc': (high - prev_close) / prev_close * 100 if prev_close != 0 else 0,
                    'high_minus_low_perc': (high - low) / high * 100 if high != 0 else 0,
                    'close_minus_prev_close_perc': (close - prev_close) / prev_close * 100 if prev_close != 0 else 0
                }
                documents.append(processed_doc)

            return JSONResponse(content=documents, status_code=200)
        else:
            pipeline = [
                {
                    '$match': {
                        # **({'Instrument': request.Instrument} if request.Instrument != "ALL" else {})
                        **({'Instrument': request.Instrument} if request.Instrument and request.Instrument != "All" else {})
                        }
                },
                {
                    '$project': {
                        '_id': 0,
                        'symbol': 1,
                        'prices': {
                            '$filter': {
                                'input': {'$objectToArray': "$prices"},
                                'as': 'date_item',
                                'cond': {
                                    '$and': [
                                        {
                                            '$gte': [
                                                {'$dateFromString': {'dateString': "$$date_item.k"}},
                                                {'$dateFromString': {'dateString': request.start_date}}
                                            ]
                                        },
                                        {
                                            '$lte': [
                                                {'$dateFromString': {'dateString': "$$date_item.k"}},
                                                {'$dateFromString': {'dateString': request.end_date}}
                                            ]
                                        }
                                    ]
                                }
                            }
                        }
                    }
                },
                {
                    '$unwind': '$prices'
                },
                {
                    '$project': {
                        'symbol': 1,
                        'date': '$prices.k',
                        'prev_close': {'$toDouble': {'$ifNull': ['$prices.v.prev_close', 0]}},
                        'open': {'$toDouble': '$prices.v.open'},
                        'high': {'$toDouble': '$prices.v.high'},
                        'low': {'$toDouble': '$prices.v.low'},
                        'close': {'$toDouble': '$prices.v.close'}
                    }
                }
            ]

            cursor = collection.aggregate(pipeline, allowDiskUse=True)
            grouped_data = DefaultDict(lambda: DefaultDict(list))

            async for doc in cursor:
                symbol = doc['symbol']
                high = doc['high']
                low = doc['low']
                close = doc['close']
                prev_close = doc['prev_close']

                # Calculate metrics
                close_minus_prev_close = close - prev_close
                high_minus_low = high - low
                high_minus_prev_close = high - prev_close
                high_minus_prev_close_perc = (high - prev_close) / prev_close * 100 if prev_close != 0 else 0
                high_minus_low_perc = (high - low) / high * 100 if high != 0 else 0
                close_minus_prev_close_perc = (close - prev_close) / prev_close * 100 if prev_close != 0 else 0

                # Group data by symbol
                grouped_data[symbol]['high_minus_prev_close'].append(high_minus_prev_close)
                grouped_data[symbol]['high_minus_low'].append(high_minus_low)
                grouped_data[symbol]['close_minus_prev_close'].append(close_minus_prev_close)
                grouped_data[symbol]['high_minus_prev_close_perc'].append(high_minus_prev_close_perc)
                grouped_data[symbol]['high_minus_low_perc'].append(high_minus_low_perc)
                grouped_data[symbol]['close_minus_prev_close_perc'].append(close_minus_prev_close_perc)

            # Calculate averages
            result = []
            for symbol, fields in grouped_data.items():
                avg_data = {
                    'symbol': symbol,
                    'high_minus_prev_close': sum(fields['high_minus_prev_close']) / len(fields['high_minus_prev_close']) if fields['high_minus_prev_close'] else 0,
                    'high_minus_low': sum(fields['high_minus_low']) / len(fields['high_minus_low']) if fields['high_minus_low'] else 0,
                    'close_minus_prev_close': sum(fields['close_minus_prev_close']) / len(fields['close_minus_prev_close']) if fields['close_minus_prev_close'] else 0,
                    'high_minus_prev_close_perc': sum(fields['high_minus_prev_close_perc']) / len(fields['high_minus_prev_close_perc']) if fields['high_minus_prev_close_perc'] else 0,
                    'high_minus_low_perc': sum(fields['high_minus_low_perc']) / len(fields['high_minus_low_perc']) if fields['high_minus_low_perc'] else 0,
                    'close_minus_prev_close_perc': sum(fields['close_minus_prev_close_perc']) / len(fields['close_minus_prev_close_perc']) if fields['close_minus_prev_close_perc'] else 0
                }
                result.append(avg_data)

            return JSONResponse(content=result, status_code=200)

    except Exception as e:
        return JSONResponse({"detail": f"An error occurred: {str(e)}"}, status_code=500)


"""Trend Analysis APIs"""
@app.get("/fetch-date-range")  # Used in **Analysis > Trend Analysis: https://stocks.rayvat.com/trendanalysis**
async def fetch_data_test(startdate: str = Query(...), enddate: str = Query(...), type: str = Query(...)):
    """Trend Analysis Api to get range data and calculate to privious year"""
    collection = db['nse_data1'] if type == 'NSE' else db['bse_data1']

    try:
        # Convert startdate and enddate from 'dd-mm-yyyy' to datetime objects and extract day and month
        start_date = datetime.strptime(startdate, "%d-%m-%Y")
        end_date = datetime.strptime(enddate, "%d-%m-%Y")
        start_day_month = start_date.strftime("%m-%d")  # Extract month and day
        end_day_month = end_date.strftime("%m-%d")  # Extract month and day

        # Aggregation pipeline
        pipeline = [
            {
                "$project": {
                    "_id": {"$toString": "$_id"},  # Convert ObjectId to string
                    "symbol": 1,
                    "Security Name": 1,
                    "exchange_token": 1,
                    "instrument_token": 1,
                    "filtered_prices": {
                        "$filter": {
                            "input": {
                                "$objectToArray": "$prices"
                            },
                            "as": "price",
                            "cond": {
                                "$and": [
                                    {"$gte": [{"$substr": [{"$toString": "$$price.k"}, 5, 5]}, start_day_month]},  # Compare day-month of startdate
                                    {"$lte": [{"$substr": [{"$toString": "$$price.k"}, 5, 5]}, end_day_month]}  # Compare day-month of enddate
                                ]
                            }
                        }
                    }
                }
            },
            {
                "$addFields": {
                    "prices": {
                        "$arrayToObject": "$filtered_prices"
                    }
                }
            },
            {
                "$project": {
                    "filtered_prices": 0  # Remove intermediate field
                }
            }
        ]

        result = await collection.aggregate(pipeline).to_list(length=None)

        for item in result:
            prices = item.get('prices', {})
            yearly_changes = {}
            rs_chg_dict = {}
            pr_chg_dict = {}

            # Iterate over the prices to calculate rs_chg for each year
            for date_str, price_data in prices.items():
                # Ensure price_data is a dictionary before proceeding
                if isinstance(price_data, dict) and 'open' in price_data and 'close' in price_data:
                    # Extract the year from the date (in YYYY-MM-DD format)
                    year = date_str.split('-')[0]
                    if year not in yearly_changes:
                        # Initialize the year's data with the first open and close values
                        yearly_changes[year] = {
                            'old_date_close': price_data['close'],  # Changed from old_date_open to old_date_close
                            'latest_date_close': price_data['close']
                        }
                    else:
                        # Update the latest close for the year
                        yearly_changes[year]['latest_date_close'] = price_data['close']
                else:
                    # If price_data is not a dictionary with 'open' and 'close', skip it
                    continue

            # Calculate rs_chg and pr_chg for each year and store in the dictionaries
            for year, year_data in yearly_changes.items():
                rs_chg = year_data['latest_date_close'] - year_data['old_date_close']  # Changed from old_date_open to old_date_close
                if year_data['old_date_close'] != 0:  # Changed from old_date_open to old_date_close
                    pr_chg = (rs_chg * 100 / year_data['old_date_close'])  # Changed from old_date_open to old_date_close
                else:
                    pr_chg = 0  # Avoid division by zero if the open price is zero

                rs_chg_dict[year] = rs_chg
                pr_chg_dict[year] = pr_chg

            # Add rs_chg and pr_chg dictionaries to the item
            item['rs_chg'] = rs_chg_dict
            item['pr_chg'] = pr_chg_dict

            # Calculate the average of rs_chg values
            avg_rs_chg = sum(rs_chg_dict.values()) / len(rs_chg_dict) if rs_chg_dict else 0
            item['rs_chg_avg'] = avg_rs_chg

            # Calculate the average of pr_chg values
            avg_pr_chg = sum(pr_chg_dict.values()) / len(pr_chg_dict) if pr_chg_dict else 0
            item['pr_chg_avg'] = avg_pr_chg

        # return result
        return JSONResponse(content=result, status_code=200)

    except Exception as e:
        return JSONResponse(status_code=500, content={"message": str(e)})


"""Symbol Search APIs"""
@app.get("/search-symbol/{prefix}")  # Used for **Symbol Search** across website
async def Symbol_Get(prefix: str):
    """This Function Gets All NSE, BSE Symbols"""
    collection_bse = db["bse_data1"]
    collection_nse = db["nse_data1"]

    # Define pipelines for symbol search
    def create_pipeline(prefix: str, exchange: str):
        return [
            {"$match": {"symbol": {"$regex": f"^{prefix}", "$options": "i"}}},  # Match symbols starting with the prefix
            {"$group": {"_id": "$symbol"}},
            {
                "$project": {
                    "symbol": "$_id",
                    "exchange": {"$literal": exchange}  # Add the exchange field with a fixed value
                }
            },
        ]

    # Create pipelines for BSE and NSE
    nse_pipeline = create_pipeline(prefix, "NSE")
    bse_pipeline = create_pipeline(prefix, "BSE")

    # Run both aggregations concurrently using asyncio.gather
    nse_task = collection_nse.aggregate(nse_pipeline).to_list(length=None)
    bse_task = collection_bse.aggregate(bse_pipeline).to_list(length=None)

    nse_symbols, bse_symbols = await asyncio.gather(nse_task, bse_task)

    # Combine NSE and BSE symbols into a single list
    result = nse_symbols + bse_symbols

    # Return the combined list of symbols
    return JSONResponse(content=result, status_code=200)


"""Heatmap Analysis APIs"""
@app.get("/daily-table/")  # Used in **Analysis > Heatmap Analysis: https://stocks.rayvat.com/heatmapanalysis**
async def daily_table(
    search: Optional[str] = Query(None),
    type: Optional[str] = Query(None)
    ):
    """
        Contains Data for Two Sections:
        1) Daily Section
        2) Day Month Section

        Parameters: [search] [type]
    """

    # Determine the collection based on the type
    if type == "NSE":
        collection = db["nse_data1"]
    else:
        collection = db["bse_data1"]

    pipeline = [
        {
            "$match": {
                "symbol": search
            }
        },
        {
            "$project": {
                "symbol": 1,
                "date_prices": {
                    "$objectToArray": "$prices"
                }
            }
        },
        {
            "$unwind": "$date_prices"
        },
        {
            "$addFields": {
                "date": "$date_prices.k",
                "price_details": {
                    "open": "$date_prices.v.open",
                    "high": "$date_prices.v.high",
                    "low": "$date_prices.v.low",
                    "close": "$date_prices.v.close",
                    "prev_close": "$date_prices.v.prev_close"
                }
            }
        },
        {
            "$addFields": {
                # Calculate rp_change and pr_change
                "price_details.rp_change": {
                    "$subtract": ["$price_details.close", "$price_details.prev_close"]
                },
                "price_details.pr_change": {
                    "$cond": [
                        { "$ne": ["$price_details.prev_close", 0] },  # Check if prev_close is not zero
                        {
                            "$multiply": [
                                {
                                    "$divide": [
                                        { "$subtract": ["$price_details.close", "$price_details.prev_close"] },
                                        "$price_details.prev_close"
                                    ]
                                },
                                100
                            ]
                        },
                        None  # If prev_close is zero, set pr_change to None (or 0 based on preference)
                    ]
                }
            }
        },
        {
            "$group": {
                "_id": "$symbol",
                "dates": {
                    "$push": {
                        "date": "$date",
                        "price_details": "$price_details"
                    }
                }
            }
        },
        {
            "$project": {
                "_id": 0,
                "symbol": "$_id",
                "dates": {
                    "$arrayToObject": {
                        "$map": {
                            "input": "$dates",
                            "as": "date_entry",
                            "in": {
                                "k": "$$date_entry.date",
                                "v": "$$date_entry.price_details"
                            }
                        }
                    }
                }
            }
        }
    ]

    result = await collection.aggregate(pipeline, allowDiskUse=True).to_list(length=None)
    formatted_result = []
    for item in result:
        symbol = item["symbol"]
        dates = item["dates"]

        # Group dates by year -> month -> day
        grouped_dates = defaultdict(lambda: defaultdict(dict))

        for date, price_details in dates.items():
            year, month, day = date.split("-")
            grouped_dates[year][month][day] = price_details

        formatted_result.append({
            "symbol": symbol,
            "dates": dict(grouped_dates)
        })


    # Initialize the DayMonthTable with defaultdict for easy counting
    day_month_table = defaultdict(lambda: defaultdict(lambda: {"increase": 0, "decrease": 0}))
    final_result = []

    # Iterate through each symbol's date data
    for item in formatted_result:
        symbol = item.get("symbol")
        dates = item.get("dates", {})

        # Iterate through each year in dates
        for year, months in dates.items():
            # Iterate through each month in the year
            for month, days in months.items():
                # Iterate through each day in the month
                for day, data in days.items():
                    # Check if pr_change is positive or negative
                    if data.get("pr_change") is not None:
                        if data["pr_change"] > 0:
                            day_month_table[month][day]["increase"] += 1
                        elif data["pr_change"] < 0:
                            day_month_table[month][day]["decrease"] += 1

    # Add status field and inc_prob based on increase/decrease counts
    for month, days in day_month_table.items():
        for day, counts in days.items():
            increase = counts["increase"]
            decrease = counts["decrease"]
            total = increase + decrease

            # Calculate inc_prob if total is greater than 0
            if total > 0:
                counts["inc_prob"] = increase / total
            else:
                counts["inc_prob"] = 0.0  # Set to 0 if there is no data

            # Determine status
            if increase > decrease:
                counts["status"] = "Increase"
            elif increase < decrease:
                counts["status"] = "Decrease"
            else:
                counts["status"] = "Neutral"

    # Convert defaultdict to a regular dict for final formatting
    day_month_table = {month: dict(days) for month, days in day_month_table.items()}

    # Append the final structured data to formatted_result
    # Assuming you want to keep each item's grouped data separate and add the DayMonthTable per symbol
    for item in formatted_result:
        symbol = item.get("symbol")
        dates = item.get("dates", {})

        # Prepare the final structure and append it to formatted_result
        final_result.append({
            "symbol": symbol,
            "dates": dates,
            "DayMonthTable": day_month_table
        })
    return JSONResponse(content=final_result, status_code=200)

"""
@app.get("/year-month-table/")  # Used in **Analysis > Heatmap Analysis: https://stocks.rayvat.com/heatmapanalysis**
async def year_month_table_new(
    search: Optional[str] = Query(None),
    type: Optional[str] = Query(None)
):
    '''
        Contains Data for Two Sections:
        1) Overall Probability Section
        2) Year Month Section

        Parameters: [search] [type]
    '''

    # Determine the collection based on the type
    if type == "NSE":
        collection = db["nse_data1"]
    else:
        collection = db["bse_data1"]

    # Pipeline to match and project only the necessary fields
    pipeline = [
        {
            '$match': {
                'symbol': search
            }
        },
        {
            '$project': {
                '_id': 1,
                "symbol": 1,
                'prices': 1
            }
        }
    ]

    # Run the aggregation pipeline
    result = await collection.aggregate(pipeline, allowDiskUse=True).to_list(length=None)
    if not result:
        return JSONResponse(content={'error': 'No data found'}, status_code=404)

    # Assuming we are only working with a single document for a given symbol
    data = result[0]
    prices = data.get('prices', {})

    # Aggregating data by year and month
    year_month_data = defaultdict(lambda: defaultdict(dict))
    probability = {month: {"Increased": 0, "Decreased": 0} for month in range(1, 13)}

    for date_str, day_data in prices.items():
        date = datetime.strptime(date_str, "%Y-%m-%d")
        year = date.year
        month = date.month

        # Initialize month data if not already initialized
        if not year_month_data[year][month]:
            year_month_data[year][month] = {
                "open": day_data["open"],  # First open price of the month
                "high": day_data["high"],
                "low": day_data["low"],
                "first_date_close": day_data["close"],  # Store for rs_change calculation
                "last_date_close": day_data["close"],  # Initialize to first close
            }
        else:
            # Update high and low for the month
            year_month_data[year][month]["high"] = max(year_month_data[year][month]["high"], day_data["high"])
            year_month_data[year][month]["low"] = min(year_month_data[year][month]["low"], day_data["low"])
            # Update last_date_close to the current day's close (to get the last date's close)
            year_month_data[year][month]["last_date_close"] = day_data["close"]

    # Calculate rs_change, pr_change, and probability counts
    for year, months in year_month_data.items():
        for month, values in months.items():
            first_close = values["first_date_close"]
            last_close = values["last_date_close"]
            rs_change = last_close - first_close
            pr_change = (rs_change * 100 / first_close) if first_close != 0 else 0

            # Update the dictionary with calculated values
            year_month_data[year][month].update({
                "rs_change": rs_change,
                "pr_change": pr_change,
                "open": first_close
            })

            # Update the probability count
            if pr_change > 0:
                probability[month]["Increased"] += 1
            elif pr_change < 0:
                probability[month]["Decreased"] += 1

            # Remove intermediate calculation values
            del year_month_data[year][month]["first_date_close"]
            del year_month_data[year][month]["last_date_close"]

    # Convert ObjectId to string before returning the response
    response_data = {
        "_id": str(data["_id"]),  # Convert ObjectId to string
        "symbol": data["symbol"],
        "prices": year_month_data,
        "Probability": {
            month: {
                "Increased": count["Increased"],
                "Decreased": count["Decreased"],
                "Inc_Prob": count["Increased"] / (count["Increased"] + count["Decreased"]) if (count["Increased"] + count["Decreased"]) > 0 else 0
            }
            for month, count in probability.items()
        }
    }
    return JSONResponse(content=response_data, status_code=200)
"""

@app.get("/year-month-table/")  # Used in **Analysis > Heatmap Analysis: https://stocks.rayvat.com/heatmapanalysis**
async def year_month_table_new(
    search: Optional[str] = Query(None),
    type: Optional[str] = Query(None)
):
    """
        Contains Data for Two Sections:
        1) Overall Probability Section
        2) Year Month Section

        Parameters: [search] [type]
    """

    # Determine the collection based on the type
    if type == "NSE":
        collection = db["nse_data1"]
    else:
        collection = db["bse_data1"]

    # Pipeline to match and project only the necessary fields
    pipeline = [
        {
            '$match': {
                'symbol': search
            }
        },
        {
            '$project': {
                '_id': 1,
                "symbol": 1,
                'prices': 1
            }
        }
    ]

    # Run the aggregation pipeline
    result = await collection.aggregate(pipeline, allowDiskUse=True).to_list(length=None)
    if not result:
        return JSONResponse(content={'error': 'No data found'}, status_code=404)

    # Assuming we are only working with a single document for a given symbol
    data = result[0]
    prices = data.get('prices', {})

    # Aggregating data by year and month
    year_month_data = defaultdict(lambda: defaultdict(dict))
    probability = {month: {"Increased": 0, "Decreased": 0} for month in range(1, 13)}

    last_close = 0.0  # Initialize last close value

    for date_str, day_data in prices.items():
        date = datetime.strptime(date_str, "%Y-%m-%d")
        year = date.year
        month = date.month

        # Initialize month data if not already initialized
        if year not in year_month_data:
            year_month_data[year] = {}

        if month not in year_month_data[year]:
            year_month_data[year][month] = {
                "open": day_data["open"],  # First open price of the month
                "high": day_data["high"],
                "low": day_data["low"],
                "first_date_close": last_close,  # Use last_close for the first close of the month
                "last_date_close": day_data["close"],  # Initialize to first close
            }
        else:
            # Update high and low for the month
            year_month_data[year][month]["high"] = max(year_month_data[year][month]["high"], day_data["high"])
            year_month_data[year][month]["low"] = min(year_month_data[year][month]["low"], day_data["low"])
            # Update last_date_close to the current day's close (to get the last date's close)
            year_month_data[year][month]["last_date_close"] = day_data["close"]

        # Update last_close for the next day's calculation
        last_close = day_data["close"]

    # Calculate rs_change, pr_change, and probability counts
    for year, months in year_month_data.items():
        for month, values in months.items():
            first_close = values["first_date_close"]
            last_close = values["last_date_close"]
            rs_change = last_close - first_close
            pr_change = (rs_change * 100 / first_close) if first_close != 0 else 0

            # Update the dictionary with calculated values
            year_month_data[year][month].update({
                "rs_change": rs_change,
                "pr_change": pr_change,
                "open": first_close
            })

            # Update the probability count
            if pr_change > 0:
                probability[month]["Increased"] += 1
            elif pr_change < 0:
                probability[month]["Decreased"] += 1

            # Remove intermediate calculation values
            del year_month_data[year][month]["first_date_close"]
            del year_month_data[year][month]["last_date_close"]

    # Convert ObjectId to string before returning the response
    response_data = {
        "_id": str(data["_id"]),  # Convert ObjectId to string
        "symbol": data["symbol"],
        "prices": year_month_data,
        "Probability": {
            month: {
                "Increased": count["Increased"],
                "Decreased": count["Decreased"],
                "Inc_Prob": count["Increased"] / (count["Increased"] + count["Decreased"]) if (count["Increased"] + count["Decreased"]) > 0 else 0
            }
            for month, count in probability.items()
        }
    }
    return JSONResponse(content=response_data, status_code=200)

@app.get("/stock-information/")  # Used in **Analysis > Heatmap Analysis Page: https://stocks.rayvat.com/heatmapanalysis**
async def StockInformation(
    search: Optional[str] = Query(None),
    type: Optional[str] = Query(None)
    ):

    if not search or not type:
        return JSONResponse(content={"error": "Search query and type (NSE or BSE) are required"}, status_code=400)

    query = {'symbol': search}
    try:
        # Common code for NSE and BSE
        if type == "NSE":
            ticker = f"{search}.NS"
            st_collection = db['nse_data1']
        else:
            ticker = f"{search}.BO"
            st_collection = db['bse_data1']

        result = await st_collection.find_one(query)

        if not result:
            return {"error": "Symbol not found"}

        # Fetch stock info
        stock = yf.Ticker(ticker)
        info = stock.info

        # Extract data from stock info
        stock_info = {
            'longName' : info.get('shortName', '-'),
            'dchange': ((( float(info.get('currentPrice', 0.0)) - float(info.get('previousClose', 0.0))) / float(info.get('previousClose', 0.0))) * 100),
            'rchange': (float(info.get('currentPrice', 0.0)) - float(info.get('previousClose', 0.0))),
            'previous_close': info.get('previousClose', '-'),
            'description': info.get("longBusinessSummary", "Description not available."),
            'open': info.get('open', '-'),
            'sector': info.get('sector', '-'),
            'week_52_change' : info.get('52WeekChange', '-'),
            'industry': info.get('industry', '-'),
            'market_cap': round(float(info.get('marketCap', '-')) / 10000000, 2),
            'beta': info.get('beta', '-'),
            'full_time_employees': info.get('fullTimeEmployees', '-'),
            'book_value': info.get('bookValue', '-'),
            'price_to_book': info.get('priceToBook', '-'),
            'last_dividend_value': info.get('lastDividendValue', ''),
            'website': info.get('website', '-'),
            'current_price': info.get('currentPrice', '-'),
            'day_high': info.get('dayHigh', '-'),
            'day_low': info.get('dayLow', '-'),
            'week_52_high': info.get('fiftyTwoWeekHigh', '-'),
            'week_52_low': info.get('fiftyTwoWeekLow', '-')
        }

        day_data = stock.history(period='1d')
        week_data = stock.history(period='5d')
        month_data = stock.history(period="1mo")
        quarter_data = stock.history(period="3mo")
        six_month_data = stock.history(period="6mo")
        year_data = stock.history(period="1y")
        two_year_data = stock.history(period="2y")
        five_year_data = stock.history(period="5y")
        ten_year_data = stock.history(period="10y")

        price_info = {
            # Get data for 1 day
            'day_high' : day_data['High'].max() if not week_data.empty else None,
            'day_low': day_data['Low'].min() if not week_data.empty else None,

            # Get data for 1 week
            'week_high_price' : week_data['High'].max() if not week_data.empty else None,
            'week_low_price': week_data['Low'].min() if not week_data.empty else None,

            # Get data for 1 month
            'month_high_price' : month_data['High'].max() if not month_data.empty else None,
            'month_low_price' : month_data['Low'].min() if not month_data.empty else None,

            # Get data for 1 quarter (3 months)
            'quarter_high_price' : quarter_data['High'].max() if not quarter_data.empty else None,
            'quarter_low_price' : quarter_data['Low'].min() if not quarter_data.empty else None,

            # Get data for 2 quarter (6 months)
            'six_month_high_price' : six_month_data['High'].max() if not quarter_data.empty else None,
            'six_month_low_price' : six_month_data['Low'].min() if not quarter_data.empty else None,

            # Get data for 1 year
            'year_high_price' : year_data['High'].max() if not year_data.empty else None,
            'year_low_price' : year_data['Low'].min() if not year_data.empty else None,

            # Get data for 2 year
            'two_year_high_price' : two_year_data['High'].max() if not year_data.empty else None,
            'two_year_low_price' : two_year_data['Low'].min() if not year_data.empty else None,

            # Get data for 5 year
            'five_year_high_price' : five_year_data['High'].max() if not year_data.empty else None,
            'five_year_low_price' : five_year_data['Low'].min() if not year_data.empty else None,

            # Get data for 10 year
            'ten_year_high_price' : ten_year_data['High'].max() if not year_data.empty else None,
            'ten_year_low_price' : ten_year_data['Low'].min() if not year_data.empty else None,

        }

        today = datetime.today()
        yesterday = today - timedelta(days=1)
        prices = result.get('prices', {})

        # Ensure yesterday is a trading day
        while yesterday.weekday() >= 5:
            yesterday -= timedelta(days=1)

        # Fetch 5 trading days, 63 (3 months), etc., skipping weekends
        date_1w = get_trading_days(yesterday, 5)[-1]  # Get the date 5 trading days back
        date_3m = get_trading_days(yesterday, 63)[-1]  # Get the date 63 trading days back
        date_6m = get_trading_days(yesterday, 126)[-1]  # Get the date 126 trading days back
        date_1y = get_trading_days(yesterday, 252)[-1]  # Get the date 252 trading days back
        date_2y = get_trading_days(yesterday, 2*252)[-1]  # 2 years
        date_3y = get_trading_days(yesterday, 3*252)[-1]  # 3 years
        date_4y = get_trading_days(yesterday, 4*252)[-1]  # 4 years
        date_5y = get_trading_days(yesterday, 5*252)[-1]  # 5 years
        date_10y = get_trading_days(yesterday, 10*252)[-1]  # 10 years

        # Fetching prices for each target date
        price_yesterday = await get_price_for_date(prices, yesterday)
        price_1w = await get_price_for_date(prices, date_1w)
        price_3m = await get_price_for_date(prices, date_3m)
        price_6m = await get_price_for_date(prices, date_6m)
        price_1y = await get_price_for_date(prices, date_1y)
        price_2y = await get_price_for_date(prices, date_2y)
        price_3y = await get_price_for_date(prices, date_3y)
        price_4y = await get_price_for_date(prices, date_4y)
        price_5y = await get_price_for_date(prices, date_5y)
        price_10y = await get_price_for_date(prices, date_10y)

        # Calculating price changes
        price_chg_1w = price_yesterday - price_1w if price_yesterday and price_1w else None
        price_chg_3m = price_yesterday - price_3m if price_yesterday and price_3m else None
        price_chg_6m = price_yesterday - price_6m if price_yesterday and price_6m else None
        price_chg_1y = price_yesterday - price_1y if price_yesterday and price_1y else None
        price_chg_2y = price_yesterday - price_2y if price_yesterday and price_2y else None
        price_chg_3y = price_yesterday - price_3y if price_yesterday and price_3y else None
        price_chg_4y = price_yesterday - price_4y if price_yesterday and price_4y else None
        price_chg_5y = price_yesterday - price_5y if price_yesterday and price_5y else None
        price_chg_10y = price_yesterday - price_10y if price_yesterday and price_10y else None

        # Calculating percentage changes
        per_chg_1w = (price_chg_1w / price_1w * 100) if price_1w and price_chg_1w else None
        per_chg_3m = (price_chg_3m / price_3m * 100) if price_3m and price_chg_3m else None
        per_chg_6m = (price_chg_6m / price_6m * 100) if price_6m and price_chg_6m else None
        per_chg_1y = (price_chg_1y / price_1y * 100) if price_1y and price_chg_1y else None
        per_chg_2y = (price_chg_2y / price_2y * 100) if price_2y and price_chg_2y else None
        per_chg_3y = (price_chg_3y / price_3y * 100) if price_3y and price_chg_3y else None
        per_chg_4y = (price_chg_4y / price_4y * 100) if price_4y and price_chg_4y else None
        per_chg_5y = (price_chg_5y / price_5y * 100) if price_5y and price_chg_5y else None
        per_chg_10y = (price_chg_10y / price_10y * 100) if price_10y and price_chg_10y else None

        hist_info = {
            "yesterday": {"date": yesterday.strftime('%Y-%m-%d'), "price": price_yesterday},
            "one_Week": {"date": date_1w.strftime('%Y-%m-%d'), "price": price_1w, "rs_chg": price_chg_1w, "pr_chg": per_chg_1w},
            "three_Months": {"date": date_3m.strftime('%Y-%m-%d'), "price": price_3m, "rs_chg": price_chg_3m, "pr_chg": per_chg_3m},
            "six_Months": {"date": date_6m.strftime('%Y-%m-%d'), "price": price_6m, "rs_chg": price_chg_6m, "pr_chg": per_chg_6m},
            "one_Year": {"date": date_1y.strftime('%Y-%m-%d'), "price": price_1y, "rs_chg": price_chg_1y, "pr_chg": per_chg_1y},
            "two_Year": {"date": date_2y.strftime('%Y-%m-%d'), "price": price_2y, "rs_chg": price_chg_2y, "pr_chg": per_chg_2y},
            "three_Year": {"date": date_3y.strftime('%Y-%m-%d'), "price": price_3y, "rs_chg": price_chg_3y, "pr_chg": per_chg_3y},
            "four_Year": {"date": date_4y.strftime('%Y-%m-%d'), "price": price_4y, "rs_chg": price_chg_4y, "pr_chg": per_chg_4y},
            "five_Year": {"date": date_5y.strftime('%Y-%m-%d'), "price": price_5y, "rs_chg": price_chg_5y, "pr_chg": per_chg_5y},
            "ten_Year": {"date": date_10y.strftime('%Y-%m-%d'), "price": price_10y, "rs_chg": price_chg_10y, "pr_chg": per_chg_10y}
        }

        # Combine all results in a structured dictionary
        result = {
            'ticker': ticker,
            'stock_info': stock_info,
            'hist_info': hist_info,
            'price_info' : price_info
        }

        return JSONResponse(content=result, status_code=200)

    except Exception as e:
        return JSONResponse(content={"error": str(e)}, status_code=500)

@app.get("/search-symbol/{prefix}")  # Used for **Symbol Search** across website
async def Symbol_Get(prefix: str):
    """This Function Gets All NSE, BSE Symbols"""
    collection_bse = db["bse_data1"]
    collection_nse = db["nse_data1"]

    # Define pipelines for symbol search
    def create_pipeline(prefix: str, exchange: str):
        return [
            {"$match": {"symbol": {"$regex": f"^{prefix}", "$options": "i"}}},  # Match symbols starting with the prefix
            {"$group": {"_id": "$symbol"}},
            {
                "$project": {
                    "symbol": "$_id",
                    "exchange": {"$literal": exchange}  # Add the exchange field with a fixed value
                }
            },
        ]

    # Create pipelines for BSE and NSE
    nse_pipeline = create_pipeline(prefix, "NSE")
    bse_pipeline = create_pipeline(prefix, "BSE")

    # Run both aggregations concurrently using asyncio.gather
    nse_task = collection_nse.aggregate(nse_pipeline).to_list(length=None)
    bse_task = collection_bse.aggregate(bse_pipeline).to_list(length=None)

    nse_symbols, bse_symbols = await asyncio.gather(nse_task, bse_task)

    # Combine NSE and BSE symbols into a single list
    result = nse_symbols + bse_symbols

    # Return the combined list of symbols
    return JSONResponse(content=result, status_code=200)

"""Portfolio APIs"""
async def get_user_from_token_p(token: str = Depends(oauth2_scheme)):  # Helper function of /holdings
    try:
        payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
        username: str = payload.get("sub")
        if username is None:
            raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token")
        user = await Auth_collection.find_one({"username": username})
        if user is None:
            raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="User not found")
        return user
    except JWTError:
        raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token")

@app.get("/holdings")  # Used in **Paper Trade > Portfolio: https://stocks.rayvat.com/portfolio**
async def get_holdings(current_user: dict = Depends(get_user_from_token_p)):
    try:
        # Use to_list() to fetch all documents from the cursor
        holdings_list = await holdings_collection.find({}, {"_id": 0}).to_list(length=None)

        # Convert datetime fields to ISO format for all holdings
        holdings_list = [convert_datetime_p(holding) for holding in holdings_list]

        return JSONResponse(content={"message": "Holdings fetched successfully", "orders": holdings_list})
    except Exception as e:
        return JSONResponse(content={"message": "An error occurred while fetching holdings", "error": str(e)}, status_code=500)


"""Orders APIs"""
@app.get("/get-orders")  # Used in **Paper Trade > Orders: https://stocks.rayvat.com/basketorders**
async def get_orders(
    start_date: str = Query(None, description="Start date in YYYY-MM-DD format"),
    end_date: str = Query(None, description="End date in YYYY-MM-DD format")
):
    try:
        # Default to yesterday if dates are not provided
        if not start_date or not end_date:
            yesterday = datetime.now() # - timedelta(days=1)
            start_date = end_date = yesterday.strftime("%Y-%m-%d")

        # Convert start_date and end_date strings to datetime objects
        start_date_obj = datetime.strptime(start_date, "%Y-%m-%d")
        end_date_obj = datetime.strptime(end_date, "%Y-%m-%d")

        # Check if there are documents for the given start_date
        if orders_collection.count_documents({"datetime": {"$gte": start_date_obj, "$lt": start_date_obj + timedelta(days=1)}}) == 0:
            # Find next available start date if no data for given start_date
            next_start = await orders_collection.find({"datetime": {"$gt": start_date_obj}}).sort("datetime", ASCENDING).limit(1).to_list(None)
            if next_start:
                start_date_obj = next_start[0]["datetime"]

        # Check if there are documents for the given end_date
        if orders_collection.count_documents({"datetime": {"$gte": end_date_obj, "$lt": end_date_obj + timedelta(days=1)}}) == 0:
            # Find previous available end date if no data for given end_date
            prev_end = await orders_collection.find({"datetime": {"$lt": end_date_obj + timedelta(days=1)}}).sort("datetime", DESCENDING).limit(1).to_list(None)
            if prev_end:
                end_date_obj = prev_end[0]["datetime"]

        result = await orders_collection.find({
                "datetime": {
                    "$gte": start_date_obj,
                    "$lt": end_date_obj + timedelta(days=1)
                }
            }).to_list(None)


        # Serialize the orders
        serialized_orders = [serialize_order_o(order) for order in result]
        return JSONResponse(content={"message": "Orders fetched successfully", "orders": serialized_orders})

    except PyMongoError as e:
        raise HTTPException(status_code=500, detail=f"Database error: {e}")
    except ValueError as ve:
        raise HTTPException(status_code=400, detail=f"Invalid date format: {ve}")


"""OHLC Report APIs"""
@app.post("/get-report")
async def get_report(req: ReportType):
    try:
        # Validate and parse dates
        start_dt = datetime.strptime(req.start_date, "%Y-%m-%d")
        end_dt = datetime.strptime(req.end_date, "%Y-%m-%d")
        if start_dt > end_dt:
            raise HTTPException(status_code=400, detail="start_date must be before or equal to end_date")

        report_type_map = {1: "open", 2: "high", 3: "low", 4: "close"}
        results_by_type = {"summary_report": {}}

        # Determine request types
        if req.reqType == 0:
            req_types = list(report_type_map.keys())
        else:
            req_types = [req.reqType]

        for req_type in req_types:
            report_type = report_type_map.get(req_type)
            if not report_type:
                raise HTTPException(status_code=400, detail="Invalid reqType. Must be between 1 and 4")

            # Use await and .to_list() for async motor cursor
            symbols_data = await reportUpload_collection.find().to_list(length=None)

            if not symbols_data:
                raise HTTPException(status_code=404, detail="No data found in reportUpload collection")

            all_symbols = []
            total_prices = {}

            data_sym = {}
            data_sym["symbol_data"] = []

            # Process symbols
            for symbol_info in symbols_data:
                pipeline = [
                   {"$match": {"symbol": {"$regex": f"{symbol_info['symbol']}"} }},
                   {"$project": {
                        "prices": {
                            "$filter": {
                                "input": {
                                    "$map": {
                                        "input": {"$objectToArray": "$prices"},
                                        "as": "priceData",
                                        "in": {
                                            "date": {"$toDate": "$$priceData.k"},
                                            "price_multiplied": {
                                                "$multiply": [
                                                    f"$$priceData.v.{report_type}",
                                                    symbol_info["qty"]
                                                ]
                                            }
                                        }
                                    }
                                },
                                "as": "filteredPrice",
                                "cond": {
                                    "$and": [
                                        {"$gte": ["$$filteredPrice.date", start_dt]},
                                        {"$lte": ["$$filteredPrice.date", end_dt]}
                                    ]
                                }
                            }
                        }
                    }}
                ]

                # Use await with .to_list() for async aggregation
                results = await nseData_collection.aggregate(pipeline).to_list(length=None)

                symbol_data = {
                    "type": report_type,
                    "symbol": symbol_info["symbol"],
                    "qty": symbol_info["qty"],
                    "avg_price": symbol_info["avg_price"]
                }

                # Aggregate prices by date
                for result in results:
                    for price_data in result.get("prices", []):
                        date_str = price_data["date"].strftime("%Y-%m-%d")
                        price = price_data["price_multiplied"]

                        symbol_data[date_str] = price
                        total_prices[date_str] = total_prices.get(date_str, 0) + price

                data_sym["symbol_data"].append(symbol_data)

            all_symbols.append(data_sym)

            summary_total = {}
            summary_total["summary_total"] = []

            # Calculate summary entries
            sorted_dates = sorted(total_prices.keys())
            total_entry = {
                "type": report_type,
                "symbol": "TOTAL",
                "qty": "",
                "avg_price": "",
                **{date: total_prices[date] for date in sorted_dates}
            }
            # all_symbols.append(total_entry)
            summary_total["summary_total"].append(total_entry)

            # Correctly calculate % Total Change
            if sorted_dates:
                first_total = total_prices[sorted_dates[0]]
                change_entry = {
                    "type": report_type,
                    "symbol": f"% TOTAL CHANGE in {report_type}",
                    "qty": "",
                    "avg_price": "",
                    **{
                        date: "-" if first_total == 0 else
                        round(((total_prices[date] - first_total) / first_total * 100), 2)
                        for date in sorted_dates
                    }
                }
                # all_symbols.append(change_entry)
                summary_total["summary_total"].append(change_entry)


            # Correctly calculate Day Change
            day_changes = {}
            day_change_entry = {
                "type": report_type,
                "symbol": f"DAY CHANGE in {report_type}",
                "qty": "",
                "avg_price": "",
            }

            for i, date in enumerate(sorted_dates):
                if i == 0:
                    day_changes[date] = "-"
                else:
                    prev_date = sorted_dates[i-1]
                    day_changes[date] = round(total_prices[date] - total_prices[prev_date], 2)

                day_change_entry[date] = day_changes[date]

            # all_symbols.append(day_change_entry)
            summary_total["summary_total"].append(day_change_entry)

            # Correctly calculate Day Change Percentage
            day_change_percent_entry = {
                "type": report_type,
                "symbol": f"DAY CHANGE % in {report_type}",
                "qty": "",
                "avg_price": "",
                **{
                    date: "-" if i == 0 or total_prices[sorted_dates[i-1]] == 0 else
                    round((day_changes[date] / total_prices[sorted_dates[i-1]] * 100), 2)
                    for i, date in enumerate(sorted_dates)
                }
            }
            # all_symbols.append(day_change_percent_entry)
            summary_total["summary_total"].append(day_change_percent_entry)
            all_symbols.append(summary_total)

            results_by_type[report_type] = convert_objectid_ohlc(all_symbols)

            # Only add to summary report if reqType is 0
            if req.reqType == 0:
                results_by_type["summary_report"][report_type] = total_entry

        # Only add metrics to summary report if reqType is 0
        if req.reqType == 0:
            # Metric Calculations
            metrics = {
                # Absolute value metrics
                "high_minus_open": {"symbol": "High - Open"},
                "open_minus_low": {"symbol": "Open - Low"},
                "high_minus_low": {"symbol": "High - Low"},
                "open_minus_close": {"symbol": "Open - Close"},

                # Percentage metrics with updated names
                "high_minus_open_pr": {"symbol": "High - Open %"},
                "open_minus_low_pr": {"symbol": "Open - Low %"},
                "high_minus_low_pr": {"symbol": "High - Low %"},
                "open_minus_close_pr": {"symbol": "Open - Close %"}
            }

            for metric, result in metrics.items():
                for date in sorted_dates:
                    try:
                        # Handling absolute value metrics
                        if metric == "high_minus_open":
                            values = [results_by_type["summary_report"]["high"][date],
                                      results_by_type["summary_report"]["open"][date]]
                            result[date] = round(values[0] - values[1], 2)

                        elif metric == "open_minus_low":
                            values = [results_by_type["summary_report"]["open"][date],
                                      results_by_type["summary_report"]["low"][date]]
                            result[date] = round(values[0] - values[1], 2)

                        elif metric == "high_minus_low":
                            values = [results_by_type["summary_report"]["high"][date],
                                      results_by_type["summary_report"]["low"][date]]
                            result[date] = round(values[0] - values[1], 2)

                        elif metric == "open_minus_close":
                            values = [results_by_type["summary_report"]["open"][date],
                                      results_by_type["summary_report"]["close"][date]]
                            result[date] = round(values[0] - values[1], 2)

                        # Handling percentage metrics
                        elif metric == "high_minus_open_pr":
                            values = [results_by_type["summary_report"]["high"][date],
                                      results_by_type["summary_report"]["open"][date]]
                            result[date] = "-" if values[1] == 0 else round((values[0] - values[1]) / values[1] * 100, 2)

                        elif metric == "open_minus_low_pr":
                            values = [results_by_type["summary_report"]["open"][date],
                                      results_by_type["summary_report"]["low"][date]]
                            result[date] = "-" if values[1] == 0 else round((values[0] - values[1]) / values[1] * 100, 2)

                        elif metric == "high_minus_low_pr":
                            values = [results_by_type["summary_report"]["high"][date],
                                      results_by_type["summary_report"]["low"][date]]
                            result[date] = "-" if values[1] == 0 else round((values[0] - values[1]) / values[1] * 100, 2)

                        elif metric == "open_minus_close_pr":
                            values = [results_by_type["summary_report"]["open"][date],
                                      results_by_type["summary_report"]["close"][date]]
                            result[date] = "-" if values[1] == 0 else round((values[0] - values[1]) / values[1] * 100, 2)

                    except KeyError:
                        continue

            results_by_type["summary_report"].update(metrics)

        # If only one type is requested, return just that type's data
        if req.reqType != 0 and len(req_types) == 1:
            return JSONResponse(content=results_by_type[report_type])

        return JSONResponse(content=results_by_type)

    except ValueError as e:
        raise HTTPException(status_code=400, detail=f"Invalid date format: {str(e)}")
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")


"""Login APIs"""
async def get_user_from_token(token: str = Depends(oauth2_scheme)):
    try:
        payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
        username: str = payload.get("sub")
        if username is None:
            raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token")
        user = await Auth_collection.find_one({"username": username})
        if user is None:
            raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="User not found")
        return user
    except JWTError:
        raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token")

@app.post("/register", response_model=dict)
async def register(user: User):
    if await Auth_collection.find_one({"username": user.username}):
        raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Username already exists")
    hashed_password = hash_password(user.password)
    user_data = {"username": user.username, "email": user.email, "hashed_password": hashed_password}
    await Auth_collection.insert_one(user_data)
    return {"message": "User registered successfully"}

@app.post("/token", response_model=dict)
async def login(form_data: OAuth2PasswordRequestForm = Depends()):
    user = await Auth_collection.find_one({"username": form_data.username})
    if user is None or not verify_password(form_data.password, user['hashed_password']):
        raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid username or password")

    access_token = create_access_token(data={"sub": user['username']})
    return JSONResponse(content={"access_token": access_token, "token_type": "bearer"},status_code=200)

@app.get("/protected")
async def read_protected_data(current_user: dict = Depends(get_user_from_token)):

    user = {
        "id": str(current_user["_id"]),
        "username": current_user["username"],
        "email": current_user["email"],
        "roles": current_user["roles"]
    }
    return JSONResponse(content=user,status_code=200)

# API to get all queries
"""@app.get("/get_queries", response_model=List[QueryStore])
async def get_queries():
    # Fetch all documents from the queryFormulas collection
    queries = await queryFormulas_collection.find().to_list(length=100)  # Adjust the length if needed
    if not queries:
        raise HTTPException(status_code=404, detail="No queries found.")

    # Map fields from the MongoDB format to the Pydantic model format
    for query in queries:
        print(query)

        query['queryName'] = query.pop('query_name', None)
        query['description'] = query.pop('description', "")  # Ensure description is set to empty string if None

    return queries"""

@app.get("/get_queries/{report_type}", response_model=List[QueryStore])
async def get_queries(report_type: Optional[str] = None):
    # Define the query filter
    query_filter = {}
    if report_type:
        query_filter['report_type'] = report_type  # Replace with the appropriate field name

    # Fetch documents from the queryFormulas collection
    queries = await queryFormulas_collection.find(query_filter).to_list(length=100)  # Adjust the length if needed
    if not queries:
        raise HTTPException(status_code=404, detail="No queries found.")

    # Map fields from the MongoDB format to the Pydantic model format
    for query in queries:
        query['queryName'] = query.pop('query_name', None)
        query['description'] = query.pop('description', "")  # Ensure description is set to empty string if None

    return queries

# API to get a specific query by query_name
@app.get("/get_query/{query_name}", response_model=QueryStore)
async def get_query(query_name: str):
    # Fetch a query based on the query_name
    query = await queryFormulas_collection.find_one({"query_name": query_name})

    if not query:
        raise HTTPException(status_code=404, detail=f"Query '{query_name}' not found.")

    # Map fields from MongoDB to the Pydantic model format
    query['queryName'] = query.pop('query_name', None)
    query['description'] = query.pop('description', None)

    return query

# Save Query API
"""@app.post("/save_query")
async def save_query(request: QueryStore):
    query_name = request.queryName
    description = request.description  # Fetch description from request
    payload = [item.dict() for item in request.payload]  # Convert each TimeRangeQuery to a dictionary

    query_doc = {
        "query_name": query_name,
        "description": description,  # Include the description at the root level
        "payload": payload
    }

    # Try to update the query if it exists, otherwise create a new one
    result = await queryFormulas_collection.update_one(
        {"query_name": query_name},  # Search by query_name
        {"$set": query_doc},  # Update the document
        upsert=True  # Create a new document if not exists
    )

    # Check the result of the operation
    if result.upserted_id:
        return {"message": f"Query '{query_name}' created successfully."}
    elif result.modified_count > 0:
        return {"message": f"Query '{query_name}' updated successfully."}
    else:
        raise HTTPException(status_code=404, detail="Query not found or no changes made.")
"""

@app.post("/save_query")
async def save_query(request: QueryStore):
    query_name = request.queryName
    report_type = request.report_type
    description = request.description
    payload = [item.dict() for item in request.payload]

    query_doc = {
        "query_name": query_name,
        "report_type": report_type,
        "description": description,
        "payload": payload
    }

    # Update or insert the document
    result = await queryFormulas_collection.update_one(
        {"query_name": query_name},  # Match document by query_name
        {"$set": query_doc},         # Update with new query document
        upsert=True                  # Insert if no document matches
    )

    # Response based on the operation result
    if result.upserted_id:
        return {"message": f"Query '{query_name}' created successfully."}
    elif result.modified_count > 0:
        return {"message": f"Query '{query_name}' updated successfully."}
    else:
        return {"message": "No changes were made to the existing query."}

# Delete Query API
@app.delete("/delete_query")
async def delete_query(query_name: str):
    result = await queryFormulas_collection.delete_one({"query_name": query_name})

    if result.deleted_count > 0:
        return {"message": f"Query '{query_name}' deleted successfully."}
    else:
        raise HTTPException(status_code=404, detail=f"Query '{query_name}' not found.")


"""Websocket Fetch"""
@app.get("/get_socket")
async def get_socket():
    try:
        # Assuming `websocket_collection` is a MongoDB collection object
        today_date = date.today()
        getSocket = await websocket_collection.find_one({"date": str(today_date)}, { "_id": 0, "enctoken": 0 })

        if getSocket is None:
            raise HTTPException(status_code=404, detail="No websocket data found for today.")

        data = {"websocket": getSocket, "message": "Websocket received successfully.", "status": 200}
        return JSONResponse(status_code=200, content=data)
    except HTTPException as e:
        raise e
    except Exception as e:
        return JSONResponse(status_code=500, content={"error": str(e)})


"""Sections API"""
# Update section API
@app.post("/sections")
async def add_or_update_section(section: Section):
    try:
        # Parse the Queries JSON string into a Python object
        queries = json.loads(section.Queries)
    except json.JSONDecodeError:
        raise HTTPException(status_code=400, detail="Invalid JSON in 'Queries' field")

    # Check if the section exists
    existing_section = await sections_collection.find_one({"sectionName": section.sectionName})

    if existing_section:
        # Update existing section
        for query in queries:
            existing_query = next(
                (q for q in existing_section["Queries"] if q["queryName"] == query["queryName"]),
                None
            )
            if existing_query:
                # Update existing query
                existing_query.update(query)
            else:
                # Add new query
                existing_section["Queries"].append(query)

        sections_collection.update_one(
            {"_id": existing_section["_id"]},
            {"$set": {"description": section.description, "Queries": existing_section["Queries"]}}
        )
        return {"message": "Section updated successfully", "status": 200}
    else:
        # Create new section
        new_section = {
            "sectionName": section.sectionName,
            "description": section.description,
            "Queries": queries,
            "queryType": section.queryType,
            "report_type": section.report_type,
        }
        sections_collection.insert_one(new_section)
        return {"message": "Section created successfully", "status": 200}

# Get section API
@app.get("/get_sections")
async def get_section():
    section = await sections_collection.find({}, {"_id": 0}).to_list(length=None)
    if not section:
        raise HTTPException(status_code=404, detail="Section not found")

    # Return if section is fetched
    if section:
        section = {"message": "Sections Fetched Successfully", "status": 200, "data": section}

    return section

@app.delete("/delete_section")
async def delete_section(section_name: str):
    # Check if the section exists
    existing_section = await sections_collection.find_one({"sectionName": section_name})
    if not existing_section:
        raise HTTPException(status_code=404, detail=f"Section '{section_name}' not found.")

    # Delete the section
    result = await sections_collection.delete_one({"sectionName": section_name})
    if result.deleted_count == 1:
        return {"message": f"Section '{section_name}' deleted successfully."}
    else:
        raise HTTPException(status_code=500, detail="Failed to delete the section.")

