"""
This script have all the apis that are base for React frontend app:
all are listed below with endpoints:

*base_url = Domain Nmae

1) aggregate_bse (ex: {base_url}/report_details_BSE/?input_symbol=ABB&duration=1M&Instrument=Equity)
1) aggregate_nse (ex: {base_url}/report_details_BSE/?input_symbol=ABB&duration=1M&Instrument=Equity)
"""


# FastAPI Dependencies
from fastapi import FastAPI, Query, HTTPException
from fastapi.responses import JSONResponse
from datetime import datetime, timedelta, date
from fastapi.middleware.cors import CORSMiddleware
from motor.motor_asyncio import AsyncIOMotorCollection

# Other Dependencies
from zerodha.zerodha_ticker import KiteTicker
from datetime import datetime
from pydantic import BaseModel, root_validator, Field
from typing import Dict, List, Literal, Optional, TypedDict, Union, DefaultDict
from collections import defaultdict
from functools import lru_cache
from orders import place_order
import yfinance as yf
import logging
import asyncio
import time
import json
import uuid

# Database Dependencies
from bson import ObjectId
from pymongo import MongoClient
from bson.objectid import ObjectId
from pymongo.errors import PyMongoError
from pymongo import ASCENDING, DESCENDING
from motor.motor_asyncio import AsyncIOMotorClient

# Load Credentials
with open(r"/var/www/html/trade_iq/env.json", "r", encoding="utf-8") as f:
    data = json.loads(f.read())

# MongoDB Connection
client = AsyncIOMotorClient(data['live']['mongo_url'], maxPoolSize=100, minPoolSize=20, retryWrites=True, maxIdleTimeMS=90000)
db = client[data['live']['database']]
orders_collection = db["orders"]
nseData_collection = db["nse_data1"]
bseData_collection = db["bse_data1"]
holdings_collection = db["holdings"]
reportUpload_collection = db["ohlc_reports"]
today = date.today()

# Configurations FastAPI
app = FastAPI()
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],  # Allow all origins, or specify a list of domains
    allow_credentials=True,
    allow_methods=["*"],  # Allow all methods (GET, POST, etc.)
    allow_headers=["*"],  # Allow all headers
)

"""Input Classes for APIs"""
# Pydantic model to define the structure of each report
class ReportList(BaseModel):
    symbol: str
    avg_price: float
    qty: int

class ReportType(BaseModel):
    reqType: int
    start_date: Optional[str] = None
    end_date: Optional[str] = None

    @root_validator(pre=True)
    def set_default_dates(cls, values):
        if values.get("start_date") is None and values.get("end_date") is None:
            end_date = datetime.today()
            start_date = end_date - timedelta(days=30)
            values["start_date"] = start_date.strftime("%Y-%m-%d")
            values["end_date"] = end_date.strftime("%Y-%m-%d")
        return values

class PriceData(TypedDict):
    date: datetime
    price_multiplied: float

class SymbolReport(TypedDict):
    type: str
    symbol: str
    qty: Union[float, str]
    avg_price: Union[float, str]

# Pydantic model to define the structure of each order
class Order(BaseModel):
    symbol: str
    amount: float
    quantity: int
    exchange: str
    action: str
    instrument_token: Optional[int] = None
    exchange_token: Optional[int] = None

class DeleteOrd(BaseModel):
    order_id: list[Dict[str, str]]

"""Helper Functions"""
# clean data for fetch-date-range
def clean_data(data):
    """Recursively convert non-JSON compliant float values to None."""
    if isinstance(data, dict):
        return {k: clean_data(v) for k, v in data.items()}
    elif isinstance(data, list):
        return [clean_data(item) for item in data]
    elif isinstance(data, float):
        # Check for out of range float values
        if data == float('inf') or data == float('-inf') or data != data:  # Check for NaN
            return None  # Convert to None for JSON compliance
    return data

# Function to calculate the number of trading days, skipping weekends
def get_trading_days(start_date, num_days):
    trading_days = []
    current_date = start_date

    while len(trading_days) < num_days:
        if current_date.weekday() < 5:  # Monday to Friday are valid trading days
            trading_days.append(current_date)
        current_date -= timedelta(days=1)

    return trading_days

# Helper function to get price for a specific date, skipping weekends
async def get_price_for_date(prices, target_date):
    date_str = target_date.strftime('%Y-%m-%d')  # Format date as string 'YYYY-MM-DD'

    # If the target date is found, return the price
    if date_str in prices:
        return prices[date_str].get('close')

    # If not found, look for the next available date (skip weekends)
    next_date = target_date + timedelta(days=1)

    while True:
        if next_date.weekday() >= 5:  # Skip weekends
            next_date += timedelta(days=1)
            continue

        next_date_str = next_date.strftime('%Y-%m-%d')
        if next_date_str in prices:
            return prices[next_date_str].get('close')

        next_date += timedelta(days=1)

        # Break if you have exhausted a reasonable range (optional, to prevent infinite loops)
        if (next_date - target_date).days > 365:  # Example limit: stop after 1 year
            return None

def serialize_order(order):
    """Convert MongoDB document to a JSON-compatible format."""
    # Convert ObjectId to string
    order['_id'] = str(order['_id'])

    # Convert datetime objects to ISO format strings
    for key, value in order.items():
        if isinstance(value, ObjectId):
            order[key] = str(value)
        elif isinstance(value, datetime):
            order[key] = value.isoformat()  # Convert datetime to ISO format string

    return order

# Helper function to convert datetime objects to ISO format
def convert_datetime(doc):
    if isinstance(doc, dict):
        for key, value in doc.items():
            if isinstance(value, datetime):
                doc[key] = value.isoformat()
            elif isinstance(value, dict) or isinstance(value, list):
                doc[key] = convert_datetime(value)
    elif isinstance(doc, list):
        for i in range(len(doc)):
            doc[i] = convert_datetime(doc[i])
    return doc

# Helper function to convert MongoDB results to JSON serializable format
def convert_objectid(data):
    if isinstance(data, list):
        return [convert_objectid(item) for item in data]
    elif isinstance(data, dict):
        return {key: convert_objectid(value) for key, value in data.items()}
    elif isinstance(data, ObjectId):
        return str(data)
    else:
        return data


"""API Endpoints Code (Starts Here)"""
@app.get("/stock-information/")  # Used in **Analysis > Heatmap Analysis Page: https://stocks.rayvat.com/heatmapanalysis**
async def StockInformation(
    search: Optional[str] = Query(None),
    type: Optional[str] = Query(None)
    ):

    if not search or not type:
        return JSONResponse(content={"error": "Search query and type (NSE or BSE) are required"}, status_code=400)

    query = {'symbol': search}
    try:
        # Common code for NSE and BSE
        if type == "NSE":
            ticker = f"{search}.NS"
            st_collection = db['nse_data1']
        else:
            ticker = f"{search}.BO"
            st_collection = db['bse_data1']

        result = await st_collection.find_one(query)

        if not result:
            return {"error": "Symbol not found"}

        # Fetch stock info
        stock = yf.Ticker(ticker)
        info = stock.info

        # Extract data from stock info
        stock_info = {
            'longName' : info.get('shortName', 'N/A'),
            'dchange': ((( float(info.get('currentPrice', 0.0)) - float(info.get('previousClose', 0.0))) / float(info.get('previousClose', 0.0))) * 100),
            'rchange': (float(info.get('currentPrice', 0.0)) - float(info.get('previousClose', 0.0))),
            'previous_close': info.get('previousClose', 'N/A'),
            'open': info.get('open', 'N/A'),
            'sector': info.get('sector', 'N/A'),
            'week_52_change' : info.get('52WeekChange', 'N/A'),
            'industry': info.get('industry', 'N/A'),
            'market_cap': info.get('marketCap', 'N/A'),
            'beta': info.get('beta', 'N/A'),
            'full_time_employees': info.get('fullTimeEmployees', 'N/A'),
            'book_value': info.get('bookValue', 'N/A'),
            'price_to_book': info.get('priceToBook', 'N/A'),
            'last_dividend_value': info.get('lastDividendValue', 'N/A'),
            'website': info.get('website', 'N/A'),
            'current_price': info.get('currentPrice', 'N/A'),
            'day_high': info.get('dayHigh', 'N/A'),
            'day_low': info.get('dayLow', 'N/A'),
            'week_52_high': info.get('fiftyTwoWeekHigh', 'N/A'),
            'week_52_low': info.get('fiftyTwoWeekLow', 'N/A')
        }

        day_data = stock.history(period='1d')
        week_data = stock.history(period='5d')
        month_data = stock.history(period="1mo")
        quarter_data = stock.history(period="3mo")
        six_month_data = stock.history(period="6mo")
        year_data = stock.history(period="1y")
        two_year_data = stock.history(period="2y")
        five_year_data = stock.history(period="5y")
        ten_year_data = stock.history(period="10y")

        price_info = {
            # Get data for 1 day
            'day_high' : day_data['High'].max() if not week_data.empty else None,
            'day_low': day_data['Low'].min() if not week_data.empty else None,

            # Get data for 1 week
            'week_high_price' : week_data['High'].max() if not week_data.empty else None,
            'week_low_price': week_data['Low'].min() if not week_data.empty else None,

            # Get data for 1 month
            'month_high_price' : month_data['High'].max() if not month_data.empty else None,
            'month_low_price' : month_data['Low'].min() if not month_data.empty else None,

            # Get data for 1 quarter (3 months)
            'quarter_high_price' : quarter_data['High'].max() if not quarter_data.empty else None,
            'quarter_low_price' : quarter_data['Low'].min() if not quarter_data.empty else None,

            # Get data for 2 quarter (6 months)
            'six_month_high_price' : six_month_data['High'].max() if not quarter_data.empty else None,
            'six_month_low_price' : six_month_data['Low'].min() if not quarter_data.empty else None,

            # Get data for 1 year
            'year_high_price' : year_data['High'].max() if not year_data.empty else None,
            'year_low_price' : year_data['Low'].min() if not year_data.empty else None,

            # Get data for 2 year
            'two_year_high_price' : two_year_data['High'].max() if not year_data.empty else None,
            'two_year_low_price' : two_year_data['Low'].min() if not year_data.empty else None,

            # Get data for 5 year
            'five_year_high_price' : five_year_data['High'].max() if not year_data.empty else None,
            'five_year_low_price' : five_year_data['Low'].min() if not year_data.empty else None,

            # Get data for 10 year
            'ten_year_high_price' : ten_year_data['High'].max() if not year_data.empty else None,
            'ten_year_low_price' : ten_year_data['Low'].min() if not year_data.empty else None,

        }

        today = datetime.today()
        yesterday = today - timedelta(days=1)
        prices = result.get('prices', {})

        # Ensure yesterday is a trading day
        while yesterday.weekday() >= 5:
            yesterday -= timedelta(days=1)

        # Fetch 5 trading days, 63 (3 months), etc., skipping weekends
        date_1w = get_trading_days(yesterday, 5)[-1]  # Get the date 5 trading days back
        date_3m = get_trading_days(yesterday, 63)[-1]  # Get the date 63 trading days back
        date_6m = get_trading_days(yesterday, 126)[-1]  # Get the date 126 trading days back
        date_1y = get_trading_days(yesterday, 252)[-1]  # Get the date 252 trading days back
        date_2y = get_trading_days(yesterday, 2*252)[-1]  # 2 years
        date_3y = get_trading_days(yesterday, 3*252)[-1]  # 3 years
        date_4y = get_trading_days(yesterday, 4*252)[-1]  # 4 years
        date_5y = get_trading_days(yesterday, 5*252)[-1]  # 5 years
        date_10y = get_trading_days(yesterday, 10*252)[-1]  # 10 years

        # Fetching prices for each target date
        price_yesterday = await get_price_for_date(prices, yesterday)
        price_1w = await get_price_for_date(prices, date_1w)
        price_3m = await get_price_for_date(prices, date_3m)
        price_6m = await get_price_for_date(prices, date_6m)
        price_1y = await get_price_for_date(prices, date_1y)
        price_2y = await get_price_for_date(prices, date_2y)
        price_3y = await get_price_for_date(prices, date_3y)
        price_4y = await get_price_for_date(prices, date_4y)
        price_5y = await get_price_for_date(prices, date_5y)
        price_10y = await get_price_for_date(prices, date_10y)

        # Calculating price changes
        price_chg_1w = price_yesterday - price_1w if price_yesterday and price_1w else None
        price_chg_3m = price_yesterday - price_3m if price_yesterday and price_3m else None
        price_chg_6m = price_yesterday - price_6m if price_yesterday and price_6m else None
        price_chg_1y = price_yesterday - price_1y if price_yesterday and price_1y else None
        price_chg_2y = price_yesterday - price_2y if price_yesterday and price_2y else None
        price_chg_3y = price_yesterday - price_3y if price_yesterday and price_3y else None
        price_chg_4y = price_yesterday - price_4y if price_yesterday and price_4y else None
        price_chg_5y = price_yesterday - price_5y if price_yesterday and price_5y else None
        price_chg_10y = price_yesterday - price_10y if price_yesterday and price_10y else None

        # Calculating percentage changes
        per_chg_1w = (price_chg_1w / price_1w * 100) if price_1w and price_chg_1w else None
        per_chg_3m = (price_chg_3m / price_3m * 100) if price_3m and price_chg_3m else None
        per_chg_6m = (price_chg_6m / price_6m * 100) if price_6m and price_chg_6m else None
        per_chg_1y = (price_chg_1y / price_1y * 100) if price_1y and price_chg_1y else None
        per_chg_2y = (price_chg_2y / price_2y * 100) if price_2y and price_chg_2y else None
        per_chg_3y = (price_chg_3y / price_3y * 100) if price_3y and price_chg_3y else None
        per_chg_4y = (price_chg_4y / price_4y * 100) if price_4y and price_chg_4y else None
        per_chg_5y = (price_chg_5y / price_5y * 100) if price_5y and price_chg_5y else None
        per_chg_10y = (price_chg_10y / price_10y * 100) if price_10y and price_chg_10y else None

        hist_info = {
            "yesterday": {"date": yesterday.strftime('%Y-%m-%d'), "price": price_yesterday},
            "one_Week": {"date": date_1w.strftime('%Y-%m-%d'), "price": price_1w, "rs_chg": price_chg_1w, "pr_chg": per_chg_1w},
            "three_Months": {"date": date_3m.strftime('%Y-%m-%d'), "price": price_3m, "rs_chg": price_chg_3m, "pr_chg": per_chg_3m},
            "six_Months": {"date": date_6m.strftime('%Y-%m-%d'), "price": price_6m, "rs_chg": price_chg_6m, "pr_chg": per_chg_6m},
            "one_Year": {"date": date_1y.strftime('%Y-%m-%d'), "price": price_1y, "rs_chg": price_chg_1y, "pr_chg": per_chg_1y},
            "two_Year": {"date": date_2y.strftime('%Y-%m-%d'), "price": price_2y, "rs_chg": price_chg_2y, "pr_chg": per_chg_2y},
            "three_Year": {"date": date_3y.strftime('%Y-%m-%d'), "price": price_3y, "rs_chg": price_chg_3y, "pr_chg": per_chg_3y},
            "four_Year": {"date": date_4y.strftime('%Y-%m-%d'), "price": price_4y, "rs_chg": price_chg_4y, "pr_chg": per_chg_4y},
            "five_Year": {"date": date_5y.strftime('%Y-%m-%d'), "price": price_5y, "rs_chg": price_chg_5y, "pr_chg": per_chg_5y},
            "ten_Year": {"date": date_10y.strftime('%Y-%m-%d'), "price": price_10y, "rs_chg": price_chg_10y, "pr_chg": per_chg_10y}
        }

        # Combine all results in a structured dictionary
        result = {
            'ticker': ticker,
            'stock_info': stock_info,
            'hist_info': hist_info,
            'price_info' : price_info
        }

        return JSONResponse(content=result, status_code=200)

    except Exception as e:
        return JSONResponse(content={"error": str(e)}, status_code=500)

@app.get("/search-symbol/{prefix}")  # Used for **Symbol Search** across website
async def Symbol_Get(prefix: str):
    """This Function Gets All NSE, BSE Symbols"""
    collection_bse = db["bse_data1"]
    collection_nse = db["nse_data1"]

    # Define pipelines for symbol search
    def create_pipeline(prefix: str, exchange: str):
        return [
            {"$match": {"symbol": {"$regex": f"^{prefix}", "$options": "i"}}},  # Match symbols starting with the prefix
            {"$group": {"_id": "$symbol"}},
            {
                "$project": {
                    "symbol": "$_id",
                    "exchange": {"$literal": exchange}  # Add the exchange field with a fixed value
                }
            },
        ]

    # Create pipelines for BSE and NSE
    nse_pipeline = create_pipeline(prefix, "NSE")
    bse_pipeline = create_pipeline(prefix, "BSE")

    # Run both aggregations concurrently using asyncio.gather
    nse_task = collection_nse.aggregate(nse_pipeline).to_list(length=None)
    bse_task = collection_bse.aggregate(bse_pipeline).to_list(length=None)

    nse_symbols, bse_symbols = await asyncio.gather(nse_task, bse_task)

    # Combine NSE and BSE symbols into a single list
    result = nse_symbols + bse_symbols

    # Return the combined list of symbols
    return JSONResponse(content=result, status_code=200)

@app.get("/holdings")  # Used in **Paper Trade > Portfolio: https://stocks.rayvat.com/portfolio**
async def get_holdings():
    try:
        # Use to_list() to fetch all documents from the cursor
        holdings_list = await holdings_collection.find({}, {"_id": 0}).to_list(length=None)

        # Convert datetime fields to ISO format for all holdings
        holdings_list = [convert_datetime(holding) for holding in holdings_list]

        return JSONResponse(content={"message": "Holdings fetched successfully", "orders": holdings_list})
    except Exception as e:
        return JSONResponse(content={"message": "An error occurred while fetching holdings", "error": str(e)}, status_code=500)

@app.post("/report-upload")  # Used in **Reports > OHLC Report: https://stocks.rayvat.com/open**
async def upload_report(get_lists: List[ReportList]):
    try:
        # Convert Pydantic model instances to dictionaries and add a timestamp
        upload_data = [
            {**report.dict(), "datetime": datetime.now()}
            for report in get_lists
        ]

        # Insert into MongoDB
        await reportUpload_collection.insert_many(upload_data)

        # Aggregation pipeline to find duplicates
        pipeline = [
            {
                "$group": {
                    "_id": {"symbol": "$symbol", "qty": "$qty", "avg_price": "$avg_price"},
                    "duplicateIds": {"$push": "$_id"},
                    "count": {"$sum": 1}
                }
            },
            {
                "$match": {
                    "count": {"$gt": 1}
                }
            }
        ]

        # Run the aggregation pipeline
        duplicates = await reportUpload_collection.aggregate(pipeline).to_list(length=None)

        # Loop through duplicates and delete extra documents
        for doc in duplicates:
            duplicate_ids = doc["duplicateIds"]
            duplicate_ids.pop(0)  # Keep one document
            await reportUpload_collection.delete_many({"_id": {"$in": duplicate_ids}})

        return JSONResponse(content={"message": "Data inserted successfully and duplicates handled."})

    except Exception as e:
        return JSONResponse(content={"message": "An error occurred during report upload.", "error": str(e)}, status_code=500)

"""Helper Function for /get-report/"""
def convert_objectid(data: Union[Dict, List]) -> Union[Dict, List]:
    """Convert MongoDB ObjectId to string in nested structures."""
    if isinstance(data, dict):
        return {k: str(v) if isinstance(v, ObjectId) else convert_objectid(v)
                for k, v in data.items()}
    elif isinstance(data, list):
        return [convert_objectid(item) for item in data]
    return data

@app.get("/fetch-date-range")  # Used in **Analysis > Trend Analysis: https://stocks.rayvat.com/trendanalysis**
async def fetch_data_test(startdate: str = Query(...), enddate: str = Query(...), type: str = Query(...)):
    """Trend Analysis Api to get range data and calculate to privious year"""
    collection = db['nse_data1'] if type == 'NSE' else db['bse_data1']

    try:
        # Convert startdate and enddate from 'dd-mm-yyyy' to datetime objects and extract day and month
        start_date = datetime.strptime(startdate, "%d-%m-%Y")
        end_date = datetime.strptime(enddate, "%d-%m-%Y")
        start_day_month = start_date.strftime("%m-%d")  # Extract month and day
        end_day_month = end_date.strftime("%m-%d")  # Extract month and day

        # Aggregation pipeline
        pipeline = [
            {
                "$project": {
                    "_id": {"$toString": "$_id"},  # Convert ObjectId to string
                    "symbol": 1,
                    "Security Name": 1,
                    "exchange_token": 1,
                    "instrument_token": 1,
                    "filtered_prices": {
                        "$filter": {
                            "input": {
                                "$objectToArray": "$prices"
                            },
                            "as": "price",
                            "cond": {
                                "$and": [
                                    {"$gte": [{"$substr": [{"$toString": "$$price.k"}, 5, 5]}, start_day_month]},  # Compare day-month of startdate
                                    {"$lte": [{"$substr": [{"$toString": "$$price.k"}, 5, 5]}, end_day_month]}  # Compare day-month of enddate
                                ]
                            }
                        }
                    }
                }
            },
            {
                "$addFields": {
                    "prices": {
                        "$arrayToObject": "$filtered_prices"
                    }
                }
            },
            {
                "$project": {
                    "filtered_prices": 0  # Remove intermediate field
                }
            }
        ]

        result = await collection.aggregate(pipeline).to_list(length=None)

        for item in result:
            prices = item.get('prices', {})
            yearly_changes = {}
            rs_chg_dict = {}
            pr_chg_dict = {}

            # Iterate over the prices to calculate rs_chg for each year
            for date_str, price_data in prices.items():
                # Ensure price_data is a dictionary before proceeding
                if isinstance(price_data, dict) and 'open' in price_data and 'close' in price_data:
                    # Extract the year from the date (in YYYY-MM-DD format)
                    year = date_str.split('-')[0]
                    if year not in yearly_changes:
                        # Initialize the year's data with the first open and close values
                        yearly_changes[year] = {
                            'old_date_close': price_data['close'],  # Changed from old_date_open to old_date_close
                            'latest_date_close': price_data['close']
                        }
                    else:
                        # Update the latest close for the year
                        yearly_changes[year]['latest_date_close'] = price_data['close']
                else:
                    # If price_data is not a dictionary with 'open' and 'close', skip it
                    continue

            # Calculate rs_chg and pr_chg for each year and store in the dictionaries
            for year, year_data in yearly_changes.items():
                rs_chg = year_data['latest_date_close'] - year_data['old_date_close']  # Changed from old_date_open to old_date_close
                if year_data['old_date_close'] != 0:  # Changed from old_date_open to old_date_close
                    pr_chg = (rs_chg * 100 / year_data['old_date_close'])  # Changed from old_date_open to old_date_close
                else:
                    pr_chg = 0  # Avoid division by zero if the open price is zero

                rs_chg_dict[year] = rs_chg
                pr_chg_dict[year] = pr_chg

            # Add rs_chg and pr_chg dictionaries to the item
            item['rs_chg'] = rs_chg_dict
            item['pr_chg'] = pr_chg_dict

            # Calculate the average of rs_chg values
            avg_rs_chg = sum(rs_chg_dict.values()) / len(rs_chg_dict) if rs_chg_dict else 0
            item['rs_chg_avg'] = avg_rs_chg

            # Calculate the average of pr_chg values
            avg_pr_chg = sum(pr_chg_dict.values()) / len(pr_chg_dict) if pr_chg_dict else 0
            item['pr_chg_avg'] = avg_pr_chg

        # return result
        return JSONResponse(content=result, status_code=200)

    except Exception as e:
        return JSONResponse(status_code=500, content={"message": str(e)})

@lru_cache(maxsize=128)
def calculate_date_range(duration: str) -> tuple:
    today = datetime.today()
    
    duration_mapping = {
        '1D': timedelta(days=1),
        '1W': timedelta(weeks=1),
        '1M': timedelta(days=30),
        '3M': timedelta(days=90),
        '6M': timedelta(days=180),
        '1Y': timedelta(days=365)
    }
    
    if duration not in duration_mapping:
        raise ValueError("Invalid duration format.")
        
    start_dt = today - duration_mapping[duration]
    return start_dt.strftime('%d-%m-%Y'), today.strftime('%d-%m-%Y')

@app.get("/report_details_NSE/")  # Used in **Analysis > Report Details: https://stocks.rayvat.com/report**
async def aggregate_nse(
    input_symbol: Optional[str] = Query(None),
    duration: Optional[str] = Query('1Y'),
    Instrument: Optional[str] = Query('Equity'),
    start_date: Optional[str] = Query(None),
    end_date: Optional[str] = Query(None)
):
    """
    Optimized function to fetch NSE data for Report Details.
    """
    collection = db["nse_data1"]
    
    try:
        # Get date range either from duration or custom dates
        if duration and not (start_date and end_date):
            start_date, end_date = calculate_date_range(duration)
        elif start_date and end_date:
            try:
                # Validate date format
                datetime.strptime(start_date, '%d-%m-%Y')
                datetime.strptime(end_date, '%d-%m-%Y')
            except ValueError:
                return JSONResponse({"detail": "Invalid date format. Use 'dd-mm-yyyy'."}, status_code=400)
        else:
            return JSONResponse({"detail": "Either duration or custom start/end dates must be provided."}, status_code=400)

        if input_symbol:
            pipeline = [
                {
                    '$match': {
                        'symbol': input_symbol
                    }
                },
                {
                    '$project': {
                        '_id': 0,
                        'symbol': 1,
                        'prices': {
                            '$filter': {
                                'input': {'$objectToArray': "$prices"},
                                'as': 'date_item',
                                'cond': {
                                    '$and': [
                                        {
                                            '$gte': [
                                                {'$dateFromString': {'dateString': "$$date_item.k"}},
                                                {'$dateFromString': {'dateString': start_date}}
                                            ]
                                        },
                                        {
                                            '$lte': [
                                                {'$dateFromString': {'dateString': "$$date_item.k"}},
                                                {'$dateFromString': {'dateString': end_date}}
                                            ]
                                        }
                                    ]
                                }
                            }
                        }
                    }
                },
                {
                    '$unwind': '$prices'
                },
                {
                    '$project': {
                        'symbol': 1,
                        'date': '$prices.k',
                        'prev_close': {'$toDouble': {'$ifNull': ['$prices.v.prev_close', 0]}},
                        'open': {'$toDouble': '$prices.v.open'},
                        'high': {'$toDouble': '$prices.v.high'},
                        'low': {'$toDouble': '$prices.v.low'},
                        'close': {'$toDouble': '$prices.v.close'}
                    }
                }
            ]

            cursor = collection.aggregate(pipeline, allowDiskUse=True)
            documents = []
            async for doc in cursor:
                high = doc['high']
                low = doc['low']
                close = doc['close']
                prev_close = doc['prev_close']
                
                processed_doc = {
                    'symbol': doc['symbol'],
                    'date': doc['date'],
                    'prev_close': doc['prev_close'],
                    'open': doc['open'],
                    'high': doc['high'],
                    'low': doc['low'],
                    'close': doc['close'],
                    'close_minus_prev_close': close - prev_close,
                    'high_minus_low': high - low,
                    'high_minus_prev_close': high - prev_close,
                    'high_minus_prev_close_perc': (high - prev_close) / prev_close * 100 if prev_close != 0 else 0,
                    'high_minus_low_perc': (high - low) / high * 100 if high != 0 else 0,
                    'close_minus_prev_close_perc': (close - prev_close) / prev_close * 100 if prev_close != 0 else 0
                }
                documents.append(processed_doc)

            return JSONResponse(content=documents, status_code=200)
        else:
            pipeline = [
                {
                    '$match': {
                        **({'Instrument': Instrument} if Instrument != "ALL" else {})
                    }
                },
                {
                    '$project': {
                        '_id': 0,
                        'symbol': 1,
                        'prices': {
                            '$filter': {
                                'input': {'$objectToArray': "$prices"},
                                'as': 'date_item',
                                'cond': {
                                    '$and': [
                                        {
                                            '$gte': [
                                                {'$dateFromString': {'dateString': "$$date_item.k"}},
                                                {'$dateFromString': {'dateString': start_date}}
                                            ]
                                        },
                                        {
                                            '$lte': [
                                                {'$dateFromString': {'dateString': "$$date_item.k"}},
                                                {'$dateFromString': {'dateString': end_date}}
                                            ]
                                        }
                                    ]
                                }
                            }
                        }
                    }
                },
                {
                    '$unwind': '$prices'
                },
                {
                    '$project': {
                        'symbol': 1,
                        'date': '$prices.k',
                        'prev_close': {'$toDouble': {'$ifNull': ['$prices.v.prev_close', 0]}},
                        'open': {'$toDouble': '$prices.v.open'},
                        'high': {'$toDouble': '$prices.v.high'},
                        'low': {'$toDouble': '$prices.v.low'},
                        'close': {'$toDouble': '$prices.v.close'}
                    }
                }
            ]

            cursor = collection.aggregate(pipeline, allowDiskUse=True)
            grouped_data = DefaultDict(lambda: DefaultDict(list))

            async for doc in cursor:
                symbol = doc['symbol']
                high = doc['high']
                low = doc['low']
                close = doc['close']
                prev_close = doc['prev_close']

                # Calculate metrics
                close_minus_prev_close = close - prev_close
                high_minus_low = high - low
                high_minus_prev_close = high - prev_close
                high_minus_prev_close_perc = (high - prev_close) / prev_close * 100 if prev_close != 0 else 0
                high_minus_low_perc = (high - low) / high * 100 if high != 0 else 0
                close_minus_prev_close_perc = (close - prev_close) / prev_close * 100 if prev_close != 0 else 0

                # Group data by symbol
                grouped_data[symbol]['high_minus_prev_close'].append(high_minus_prev_close)
                grouped_data[symbol]['high_minus_low'].append(high_minus_low)
                grouped_data[symbol]['close_minus_prev_close'].append(close_minus_prev_close)
                grouped_data[symbol]['high_minus_prev_close_perc'].append(high_minus_prev_close_perc)
                grouped_data[symbol]['high_minus_low_perc'].append(high_minus_low_perc)
                grouped_data[symbol]['close_minus_prev_close_perc'].append(close_minus_prev_close_perc)

            # Calculate averages
            result = []
            for symbol, fields in grouped_data.items():
                avg_data = {
                    'symbol': symbol,
                    'high_minus_prev_close': sum(fields['high_minus_prev_close']) / len(fields['high_minus_prev_close']) if fields['high_minus_prev_close'] else 0,
                    'high_minus_low': sum(fields['high_minus_low']) / len(fields['high_minus_low']) if fields['high_minus_low'] else 0,
                    'close_minus_prev_close': sum(fields['close_minus_prev_close']) / len(fields['close_minus_prev_close']) if fields['close_minus_prev_close'] else 0,
                    'high_minus_prev_close_perc': sum(fields['high_minus_prev_close_perc']) / len(fields['high_minus_prev_close_perc']) if fields['high_minus_prev_close_perc'] else 0,
                    'high_minus_low_perc': sum(fields['high_minus_low_perc']) / len(fields['high_minus_low_perc']) if fields['high_minus_low_perc'] else 0,
                    'close_minus_prev_close_perc': sum(fields['close_minus_prev_close_perc']) / len(fields['close_minus_prev_close_perc']) if fields['close_minus_prev_close_perc'] else 0
                }
                result.append(avg_data)

            return JSONResponse(content=result, status_code=200)

    except Exception as e:
        return JSONResponse({"detail": f"An error occurred: {str(e)}"}, status_code=500)

@lru_cache(maxsize=128)
def calculate_date_range(duration: str) -> tuple:
    today = datetime.today()
    
    duration_mapping = {
        '1D': timedelta(days=1),
        '1W': timedelta(weeks=1),
        '1M': timedelta(days=30),
        '3M': timedelta(days=90),
        '6M': timedelta(days=180),
        '1Y': timedelta(days=365)
    }
    
    if duration not in duration_mapping:
        raise ValueError("Invalid duration format.")
        
    start_dt = today - duration_mapping[duration]
    return start_dt.strftime('%d-%m-%Y'), today.strftime('%d-%m-%Y')

@app.get("/report_details_BSE/")  # Used in **Analysis > Report Details: https://stocks.rayvat.com/report**
async def aggregate_bse(
    input_symbol: Optional[str] = Query(None),
    duration: Optional[str] = Query('1Y'),
    Instrument: Optional[str] = Query('Equity'),
    start_date: Optional[str] = Query(None),
    end_date: Optional[str] = Query(None)
):
    """
    Optimized function to fetch BSE data for Report Details.
    """
    collection = db["bse_data1"]
    
    try:
        # Get date range either from duration or custom dates
        if duration and not (start_date and end_date):
            start_date, end_date = calculate_date_range(duration)
        elif start_date and end_date:
            try:
                # Validate date format
                datetime.strptime(start_date, '%d-%m-%Y')
                datetime.strptime(end_date, '%d-%m-%Y')
            except ValueError:
                return JSONResponse({"detail": "Invalid date format. Use 'dd-mm-yyyy'."}, status_code=400)
        else:
            return JSONResponse({"detail": "Either duration or custom start/end dates must be provided."}, status_code=400)

        if input_symbol:
            pipeline = [
                {
                    '$match': {
                        'symbol': input_symbol
                    }
                },
                {
                    '$project': {
                        '_id': 0,
                        'symbol': 1,
                        'prices': {
                            '$filter': {
                                'input': {'$objectToArray': "$prices"},
                                'as': 'date_item',
                                'cond': {
                                    '$and': [
                                        {
                                            '$gte': [
                                                {'$dateFromString': {'dateString': "$$date_item.k"}},
                                                {'$dateFromString': {'dateString': start_date}}
                                            ]
                                        },
                                        {
                                            '$lte': [
                                                {'$dateFromString': {'dateString': "$$date_item.k"}},
                                                {'$dateFromString': {'dateString': end_date}}
                                            ]
                                        }
                                    ]
                                }
                            }
                        }
                    }
                },
                {
                    '$unwind': '$prices'
                },
                {
                    '$project': {
                        'symbol': 1,
                        'date': '$prices.k',
                        'prev_close': {'$toDouble': {'$ifNull': ['$prices.v.prev_close', 0]}},
                        'open': {'$toDouble': '$prices.v.open'},
                        'high': {'$toDouble': '$prices.v.high'},
                        'low': {'$toDouble': '$prices.v.low'},
                        'close': {'$toDouble': '$prices.v.close'}
                    }
                }
            ]

            cursor = collection.aggregate(pipeline, allowDiskUse=True)
            documents = []
            async for doc in cursor:
                high = doc['high']
                low = doc['low']
                close = doc['close']
                prev_close = doc['prev_close']
                
                processed_doc = {
                    'symbol': doc['symbol'],
                    'date': doc['date'],
                    'prev_close': doc['prev_close'],
                    'open': doc['open'],
                    'high': doc['high'],
                    'low': doc['low'],
                    'close': doc['close'],
                    'close_minus_prev_close': close - prev_close,
                    'high_minus_low': high - low,
                    'high_minus_prev_close': high - prev_close,
                    'high_minus_prev_close_perc': (high - prev_close) / prev_close * 100 if prev_close != 0 else 0,
                    'high_minus_low_perc': (high - low) / high * 100 if high != 0 else 0,
                    'close_minus_prev_close_perc': (close - prev_close) / prev_close * 100 if prev_close != 0 else 0
                }
                documents.append(processed_doc)

            return JSONResponse(content=documents, status_code=200)
        else:
            pipeline = [
                {
                    '$match': {
                        **({'Instrument': Instrument} if Instrument != "ALL" else {})
                    }
                },
                {
                    '$project': {
                        '_id': 0,
                        'symbol': 1,
                        'prices': {
                            '$filter': {
                                'input': {'$objectToArray': "$prices"},
                                'as': 'date_item',
                                'cond': {
                                    '$and': [
                                        {
                                            '$gte': [
                                                {'$dateFromString': {'dateString': "$$date_item.k"}},
                                                {'$dateFromString': {'dateString': start_date}}
                                            ]
                                        },
                                        {
                                            '$lte': [
                                                {'$dateFromString': {'dateString': "$$date_item.k"}},
                                                {'$dateFromString': {'dateString': end_date}}
                                            ]
                                        }
                                    ]
                                }
                            }
                        }
                    }
                },
                {
                    '$unwind': '$prices'
                },
                {
                    '$project': {
                        'symbol': 1,
                        'date': '$prices.k',
                        'prev_close': {'$toDouble': {'$ifNull': ['$prices.v.prev_close', 0]}},
                        'open': {'$toDouble': '$prices.v.open'},
                        'high': {'$toDouble': '$prices.v.high'},
                        'low': {'$toDouble': '$prices.v.low'},
                        'close': {'$toDouble': '$prices.v.close'}
                    }
                }
            ]

            cursor = collection.aggregate(pipeline, allowDiskUse=True)
            grouped_data = DefaultDict(lambda: DefaultDict(list))

            async for doc in cursor:
                symbol = doc['symbol']
                high = doc['high']
                low = doc['low']
                close = doc['close']
                prev_close = doc['prev_close']

                # Calculate metrics
                close_minus_prev_close = close - prev_close
                high_minus_low = high - low
                high_minus_prev_close = high - prev_close
                high_minus_prev_close_perc = (high - prev_close) / prev_close * 100 if prev_close != 0 else 0
                high_minus_low_perc = (high - low) / high * 100 if high != 0 else 0
                close_minus_prev_close_perc = (close - prev_close) / prev_close * 100 if prev_close != 0 else 0

                # Group data by symbol
                grouped_data[symbol]['high_minus_prev_close'].append(high_minus_prev_close)
                grouped_data[symbol]['high_minus_low'].append(high_minus_low)
                grouped_data[symbol]['close_minus_prev_close'].append(close_minus_prev_close)
                grouped_data[symbol]['high_minus_prev_close_perc'].append(high_minus_prev_close_perc)
                grouped_data[symbol]['high_minus_low_perc'].append(high_minus_low_perc)
                grouped_data[symbol]['close_minus_prev_close_perc'].append(close_minus_prev_close_perc)

            # Calculate averages
            result = []
            for symbol, fields in grouped_data.items():
                avg_data = {
                    'symbol': symbol,
                    'high_minus_prev_close': sum(fields['high_minus_prev_close']) / len(fields['high_minus_prev_close']) if fields['high_minus_prev_close'] else 0,
                    'high_minus_low': sum(fields['high_minus_low']) / len(fields['high_minus_low']) if fields['high_minus_low'] else 0,
                    'close_minus_prev_close': sum(fields['close_minus_prev_close']) / len(fields['close_minus_prev_close']) if fields['close_minus_prev_close'] else 0,
                    'high_minus_prev_close_perc': sum(fields['high_minus_prev_close_perc']) / len(fields['high_minus_prev_close_perc']) if fields['high_minus_prev_close_perc'] else 0,
                    'high_minus_low_perc': sum(fields['high_minus_low_perc']) / len(fields['high_minus_low_perc']) if fields['high_minus_low_perc'] else 0,
                    'close_minus_prev_close_perc': sum(fields['close_minus_prev_close_perc']) / len(fields['close_minus_prev_close_perc']) if fields['close_minus_prev_close_perc'] else 0
                }
                result.append(avg_data)

            return JSONResponse(content=result, status_code=200)

    except Exception as e:
        return JSONResponse({"detail": f"An error occurred: {str(e)}"}, status_code=500)

@app.get("/year-month-table/")  # Used in **Analysis > Heatmap Analysis: https://stocks.rayvat.com/heatmapanalysis**
async def year_month_table(
    search: Optional[str] = Query(None),
    type: Optional[str] = Query(None)
):
    """
    This function fetches NSE, BSE data for Year-Month Table in HeatMap Analysis
    using search and type parameters.
    """
    # Determine the collection based on the type
    if type == "NSE":
        collection = db["nse_data1"]
    else:
        collection = db["bse_data1"]

    # Pipeline to match and project only the necessary fields
    pipeline = [
        {
            '$match': {
                'symbol': search
            }
        },
        {
            '$project': {
                '_id': 1,
                "symbol": 1,
                'prices': 1
            }
        }
    ]

    # Run the aggregation pipeline
    result = await collection.aggregate(pipeline, allowDiskUse=True).to_list(length=None)
    if not result:
        return JSONResponse(content={'error': 'No data found'}, status_code=404)

    # Assuming we are only working with a single document for a given symbol
    data = result[0]
    prices = data.get('prices', {})

    # Aggregating data by year and month
    year_month_data = DefaultDict(lambda: DefaultDict(dict))

    for date_str, day_data in prices.items():
        date = datetime.strptime(date_str, "%Y-%m-%d")
        year = date.year
        month = date.month

        # Initialize month data if not already initialized
        if not year_month_data[year][month]:
            year_month_data[year][month] = {
                "open": day_data["open"],  # First open price of the month
                "high": day_data["high"],
                "low": day_data["low"],
                "close": day_data["close"],
                "first_date_open": day_data["open"],  # Store for rs_change calculation
                "last_date_close": day_data["close"],  # Initialize to first close
            }
        else:
            # Update high and low for the month
            year_month_data[year][month]["high"] = max(year_month_data[year][month]["high"], day_data["high"])
            year_month_data[year][month]["low"] = min(year_month_data[year][month]["low"], day_data["low"])
            # Update last_date_close to the current day's close (to get the last date's close)
            year_month_data[year][month]["last_date_close"] = day_data["close"]

    # Calculate rs_change and pr_change
    for year, months in year_month_data.items():
        for month, values in months.items():
            first_open = values["first_date_open"]
            last_close = values["last_date_close"]
            rs_change = last_close - first_open
            pr_change = (rs_change * 100 / first_open) if first_open != 0 else 0

            # Update the dictionary with calculated values
            year_month_data[year][month].update({
                "rs_change": rs_change,
                "pr_change": pr_change,
                "open": first_open
            })

            # Remove intermediate calculation values
            del year_month_data[year][month]["first_date_open"]
            del year_month_data[year][month]["last_date_close"]

    # Convert ObjectId to string before returning the response
    response_data = {
        "_id": str(data["_id"]),  # Convert ObjectId to string
        "symbol": data["symbol"],
        "prices": year_month_data
    }

    return JSONResponse(content=response_data, status_code=200)


# Add cache for date parsing
@lru_cache(maxsize=128)
def parse_date(date_str: str) -> datetime:
    return datetime.strptime(date_str, "%Y-%m-%d")

# Add cache for report type mapping
@lru_cache(maxsize=4)
def get_report_type(req_type: int) -> Optional[str]:
    report_type_map = {
        1: "open",
        2: "high",
        3: "low",
        4: "close"
    }
    return report_type_map.get(req_type)

async def fetch_symbols_data(collection: AsyncIOMotorCollection) -> List[Dict]:
    """Fetch symbols data with caching capability"""
    return await collection.find().to_list(None)

async def process_symbol_data(
    symbol_info: Dict,
    report_type: str,
    start_dt: datetime,
    end_dt: datetime,
    nseData_collection: AsyncIOMotorCollection
) -> tuple[Dict, Dict]:
    """Process individual symbol data"""
    pipeline = [
        {"$match": {"symbol": symbol_info["symbol"]}},
        {"$project": {
            "prices": {
                "$filter": {
                    "input": {
                        "$map": {
                            "input": {"$objectToArray": "$prices"},
                            "as": "priceData",
                            "in": {
                                "date": {"$toDate": "$$priceData.k"},
                                "price_multiplied": {
                                    "$multiply": [
                                        f"$$priceData.v.{report_type}",
                                        symbol_info["qty"]
                                    ]
                                }
                            }
                        }
                    },
                    "as": "filteredPrice",
                    "cond": {
                        "$and": [
                            {"$gte": ["$$filteredPrice.date", start_dt]},
                            {"$lte": ["$$filteredPrice.date", end_dt]}
                        ]
                    }
                }
            }
        }}
    ]

    results = await nseData_collection.aggregate(pipeline).to_list(None)

    symbol_data = {
        "type": report_type,
        "symbol": symbol_info["symbol"],
        "qty": symbol_info["qty"],
        "avg_price": symbol_info["avg_price"]
    }

    date_prices = {}

    for result in results:
        for price_data in result.get("prices", []):
            date_str = price_data["date"].strftime("%Y-%m-%d")
            price = price_data["price_multiplied"]
            symbol_data[date_str] = price
            date_prices[date_str] = price

    return symbol_data, date_prices

def calculate_summary_entries(
    total_prices: Dict[str, float],
    report_type: str
) -> List[Dict]:
    """Calculate summary entries for the report"""
    sorted_dates = sorted(total_prices.keys())
    if not sorted_dates:
        return []

    # Total entry
    total_entry = {
        "type": report_type,
        "symbol": "TOTAL",
        "qty": "",
        "avg_price": "",
        **{date: total_prices[date] for date in sorted_dates}
    }

    # Calculate first total for percentage calculations
    first_total = total_prices[sorted_dates[0]]

    # Prepare all entries at once
    entries = [
        # Cumulative change entry
        {
            "type": report_type,
            "symbol": f"% TOTAL CHANGE in {report_type}",
            "qty": "",
            "avg_price": "",
            **{
                date: "-" if i == 0 else
                round(((total_prices[date] - first_total) / first_total * 100), 2)
                for i, date in enumerate(sorted_dates)
            }
        },
        # Day change entry
        {
            "type": report_type,
            "symbol": f"DAY CHANGE in {report_type}",
            "qty": "",
            "avg_price": "",
            **{
                date: "-" if i == 0 else total_prices[date] - total_prices[sorted_dates[i-1]]
                for i, date in enumerate(sorted_dates)
            }
        }
    ]

    # Add day-over-day percentage change entry
    day_changes = {}
    for i, date in enumerate(sorted_dates):
        if i == 0:
            day_changes[date] = "-"
        else:
            prev_total = total_prices[sorted_dates[i-1]]
            if prev_total != 0:  # Avoid division by zero
                change = ((total_prices[date] - prev_total) / prev_total * 100)
                day_changes[date] = round(change, 2)
            else:
                day_changes[date] = 0

    entries.append({
        "type": report_type,
        "symbol": f"DAY CHANGE % in {report_type}",
        "qty": "",
        "avg_price": "",
        **day_changes
    })

    return [total_entry] + entries

async def calculate_value_metrics(
    summary_report: Dict,
    date: str
) -> Dict[str, Dict]:
    """Calculate value metrics for the summary report"""
    metrics = {
        "high_minus_open": {"symbol": "High - Open"},
        "open_minus_low": {"symbol": "Open - Low"},
        "high_minus_low": {"symbol": "High - Low"},
        "open_minus_close": {"symbol": "Open - Close"}
    }

    metrics_calc = {
        "high_minus_open_pr": {"symbol": "High - Open %"},
        "open_minus_low_pr": {"symbol": "Open - Low %"},
        "high_minus_low_pr": {"symbol": "High - Low %"},
        "open_minus_close_pr": {"symbol": "Open - Close %"}
    }

    # Calculate all metrics in a single pass
    for date in summary_report["high"].keys():
        if isinstance(summary_report["high"].get(date), (int, float)):
            high_val = summary_report["high"][date]
            open_val = summary_report["open"][date]
            low_val = summary_report["low"][date]
            close_val = summary_report["close"][date]

            # Calculate absolute differences
            metrics["high_minus_open"][date] = high_val - open_val
            metrics["open_minus_low"][date] = open_val - low_val
            metrics["high_minus_low"][date] = high_val - low_val
            metrics["open_minus_close"][date] = open_val - close_val

            # Calculate percentages
            metrics_calc["high_minus_open_pr"][date] = ((high_val - open_val) / open_val * 100) if open_val else 0
            metrics_calc["open_minus_low_pr"][date] = ((open_val - low_val) / open_val * 100) if open_val else 0
            metrics_calc["high_minus_low_pr"][date] = ((high_val - low_val) / high_val * 100) if high_val else 0
            metrics_calc["open_minus_close_pr"][date] = ((open_val - close_val) / open_val * 100) if open_val else 0

    return {**metrics, **metrics_calc}

'''
@app.post("/get-report")  # Used in **Reports > OHLC Report: https://stocks.rayvat.com/open**
async def get_report(req: ReportType):
    try:
        # Validate and parse dates
        start_dt = datetime.strptime(req.start_date, "%Y-%m-%d")
        end_dt = datetime.strptime(req.end_date, "%Y-%m-%d")
        if start_dt > end_dt:
            raise HTTPException(status_code=400, detail="start_date must be before or equal to end_date")
        
        report_type_map = {1: "open", 2: "high", 3: "low", 4: "close"}
        results_by_type = {"summary_report": {}}

        # Determine request types
        if req.reqType == 0:
            req_types = list(report_type_map.keys())
        else:
            req_types = [req.reqType]

        for req_type in req_types:
            report_type = report_type_map.get(req_type)
            if not report_type:
                raise HTTPException(status_code=400, detail="Invalid reqType. Must be between 1 and 4")

            # Use await and .to_list() for async motor cursor
            symbols_data = await reportUpload_collection.find().to_list(length=None)
            
            if not symbols_data:
                raise HTTPException(status_code=404, detail="No data found in reportUpload collection")

            all_symbols = []
            total_prices = {}

            # Process symbols
            for symbol_info in symbols_data:
                pipeline = [
                    {"$match": {"symbol": symbol_info["symbol"]}},
                    {"$project": {
                        "prices": {
                            "$filter": {
                                "input": {
                                    "$map": {
                                        "input": {"$objectToArray": "$prices"},
                                        "as": "priceData",
                                        "in": {
                                            "date": {"$toDate": "$$priceData.k"},
                                            "price_multiplied": {
                                                "$multiply": [
                                                    f"$$priceData.v.{report_type}",
                                                    symbol_info["qty"]
                                                ]
                                            }
                                        }
                                    }
                                },
                                "as": "filteredPrice",
                                "cond": {
                                    "$and": [
                                        {"$gte": ["$$filteredPrice.date", start_dt]},
                                        {"$lte": ["$$filteredPrice.date", end_dt]}
                                    ]
                                }
                            }
                        }
                    }}
                ]

                # Use await with .to_list() for async aggregation
                results = await nseData_collection.aggregate(pipeline).to_list(length=None)

                symbol_data = {
                    "type": report_type,
                    "symbol": symbol_info["symbol"],
                    "qty": symbol_info["qty"],
                    "avg_price": symbol_info["avg_price"]
                }

                # Aggregate prices by date
                for result in results:
                    for price_data in result.get("prices", []):
                        date_str = price_data["date"].strftime("%Y-%m-%d")
                        price = price_data["price_multiplied"]

                        symbol_data[date_str] = price
                        total_prices[date_str] = total_prices.get(date_str, 0) + price

                all_symbols.append(symbol_data)

            # Calculate summary entries
            sorted_dates = sorted(total_prices.keys())
            total_entry = {
                "type": report_type,
                "symbol": "TOTAL",
                "qty": "",
                "avg_price": "",
                **{date: total_prices[date] for date in sorted_dates}
            }
            all_symbols.append(total_entry)

            # Correctly calculate % Total Change
            if sorted_dates:
                first_total = total_prices[sorted_dates[0]]
                change_entry = {
                    "type": report_type,
                    "symbol": f"% TOTAL CHANGE in {report_type}",
                    "qty": "",
                    "avg_price": "",
                    **{
                        date: "-" if first_total == 0 else 
                        round(((total_prices[date] - first_total) / first_total * 100), 2)
                        for date in sorted_dates
                    }
                }
                all_symbols.append(change_entry)

            # Correctly calculate Day Change
            day_changes = {}
            day_change_entry = {
                "type": report_type,
                "symbol": f"DAY CHANGE in {report_type}",
                "qty": "",
                "avg_price": "",
            }
            
            for i, date in enumerate(sorted_dates):
                if i == 0:
                    day_changes[date] = "-"
                else:
                    prev_date = sorted_dates[i-1]
                    day_changes[date] = round(total_prices[date] - total_prices[prev_date], 2)
                
                day_change_entry[date] = day_changes[date]
            
            all_symbols.append(day_change_entry)

            # Correctly calculate Day Change Percentage
            day_change_percent_entry = {
                "type": report_type,
                "symbol": f"DAY CHANGE % in {report_type}",
                "qty": "",
                "avg_price": "",
                **{
                    date: "-" if i == 0 or total_prices[sorted_dates[i-1]] == 0 else
                    round((day_changes[date] / total_prices[sorted_dates[i-1]] * 100), 2)
                    for i, date in enumerate(sorted_dates)
                }
            }
            all_symbols.append(day_change_percent_entry)

            results_by_type[report_type] = convert_objectid(all_symbols)
            
            # Only add to summary report if reqType is 0
            if req.reqType == 0:
                results_by_type["summary_report"][report_type] = total_entry

        # Only add metrics to summary report if reqType is 0
        if req.reqType == 0:
            # Metric Calculations
            metrics = {
                # Absolute value metrics
                "high_minus_open": {"symbol": "High - Open"},
                "open_minus_low": {"symbol": "Open - Low"},
                "high_minus_low": {"symbol": "High - Low"},
                "open_minus_close": {"symbol": "Open - Close"},
                
                # Percentage metrics
                "high_minus_open_pr": {"symbol": "High - Open %"},
                "open_minus_low_pr": {"symbol": "Open - Low %"},
                "high_minus_low_pr": {"symbol": "High - Low %"},
                "open_minus_close_pr": {"symbol": "Open - Close %"}
            }
            
            for metric, result in metrics.items():
                for date in sorted_dates:
                    try:
                        # Handling absolute value metrics
                        if metric.endswith("minus_open"):
                            values = [results_by_type["summary_report"]["high" if "high" in metric else "open"][date],
                                      results_by_type["summary_report"]["open"][date]]
                            result[date] = round(values[0] - values[1], 2)
                        
                        elif metric.endswith("minus_low"):
                            values = [results_by_type["summary_report"]["open" if "open" in metric else "high"][date],
                                      results_by_type["summary_report"]["low"][date]]
                            result[date] = round(values[0] - values[1], 2)
                        
                        elif metric.endswith("minus_close"):
                            values = [results_by_type["summary_report"]["open"][date],
                                      results_by_type["summary_report"]["close"][date]]
                            result[date] = round(values[0] - values[1], 2)
                        
                        # Handling percentage metrics
                        elif metric.endswith("minus_open_percent"):
                            values = [results_by_type["summary_report"]["high" if "high" in metric else "open"][date],
                                      results_by_type["summary_report"]["open"][date]]
                            result[date] = "-" if values[1] == 0 else round((values[0] - values[1]) / values[1] * 100, 2)
                        
                        elif metric.endswith("minus_low_percent"):
                            values = [results_by_type["summary_report"]["open" if "open" in metric else "high"][date],
                                      results_by_type["summary_report"]["low"][date]]
                            result[date] = "-" if values[1] == 0 else round((values[0] - values[1]) / values[1] * 100, 2)
                        
                        elif metric.endswith("minus_close_percent"):
                            values = [results_by_type["summary_report"]["open"][date],
                                      results_by_type["summary_report"]["close"][date]]
                            result[date] = "-" if values[1] == 0 else round((values[0] - values[1]) / values[1] * 100, 2)
                    
                    except KeyError:
                        continue

            results_by_type["summary_report"].update(metrics)
        
        # If only one type is requested, return just that type's data
        if req.reqType != 0 and len(req_types) == 1:
            return JSONResponse(content=results_by_type[report_type])

        return JSONResponse(content=results_by_type)

    except ValueError as e:
        raise HTTPException(status_code=400, detail=f"Invalid date format: {str(e)}")
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
'''

@app.post("/get-report")
async def get_report(req: ReportType):
    try:
        # Validate and parse dates
        start_dt = datetime.strptime(req.start_date, "%Y-%m-%d")
        end_dt = datetime.strptime(req.end_date, "%Y-%m-%d")
        if start_dt > end_dt:
            raise HTTPException(status_code=400, detail="start_date must be before or equal to end_date")
        
        report_type_map = {1: "open", 2: "high", 3: "low", 4: "close"}
        results_by_type = {"summary_report": {}}

        # Determine request types
        if req.reqType == 0:
            req_types = list(report_type_map.keys())
        else:
            req_types = [req.reqType]

        for req_type in req_types:
            report_type = report_type_map.get(req_type)
            if not report_type:
                raise HTTPException(status_code=400, detail="Invalid reqType. Must be between 1 and 4")

            # Use await and .to_list() for async motor cursor
            symbols_data = await reportUpload_collection.find().to_list(length=None)
            
            if not symbols_data:
                raise HTTPException(status_code=404, detail="No data found in reportUpload collection")

            all_symbols = []
            total_prices = {}

            # Process symbols
            for symbol_info in symbols_data:
                pipeline = [
                    {"$match": {"symbol": symbol_info["symbol"]}},
                    {"$project": {
                        "prices": {
                            "$filter": {
                                "input": {
                                    "$map": {
                                        "input": {"$objectToArray": "$prices"},
                                        "as": "priceData",
                                        "in": {
                                            "date": {"$toDate": "$$priceData.k"},
                                            "price_multiplied": {
                                                "$multiply": [
                                                    f"$$priceData.v.{report_type}",
                                                    symbol_info["qty"]
                                                ]
                                            }
                                        }
                                    }
                                },
                                "as": "filteredPrice",
                                "cond": {
                                    "$and": [
                                        {"$gte": ["$$filteredPrice.date", start_dt]},
                                        {"$lte": ["$$filteredPrice.date", end_dt]}
                                    ]
                                }
                            }
                        }
                    }}
                ]

                # Use await with .to_list() for async aggregation
                results = await nseData_collection.aggregate(pipeline).to_list(length=None)

                symbol_data = {
                    "type": report_type,
                    "symbol": symbol_info["symbol"],
                    "qty": symbol_info["qty"],
                    "avg_price": symbol_info["avg_price"]
                }

                # Aggregate prices by date
                for result in results:
                    for price_data in result.get("prices", []):
                        date_str = price_data["date"].strftime("%Y-%m-%d")
                        price = price_data["price_multiplied"]

                        symbol_data[date_str] = price
                        total_prices[date_str] = total_prices.get(date_str, 0) + price

                all_symbols.append(symbol_data)

            # Calculate summary entries
            sorted_dates = sorted(total_prices.keys())
            total_entry = {
                "type": report_type,
                "symbol": "TOTAL",
                "qty": "",
                "avg_price": "",
                **{date: total_prices[date] for date in sorted_dates}
            }
            all_symbols.append(total_entry)

            # Correctly calculate % Total Change
            if sorted_dates:
                first_total = total_prices[sorted_dates[0]]
                change_entry = {
                    "type": report_type,
                    "symbol": f"% TOTAL CHANGE in {report_type}",
                    "qty": "",
                    "avg_price": "",
                    **{
                        date: "-" if first_total == 0 else 
                        round(((total_prices[date] - first_total) / first_total * 100), 2)
                        for date in sorted_dates
                    }
                }
                all_symbols.append(change_entry)

            # Correctly calculate Day Change
            day_changes = {}
            day_change_entry = {
                "type": report_type,
                "symbol": f"DAY CHANGE in {report_type}",
                "qty": "",
                "avg_price": "",
            }
            
            for i, date in enumerate(sorted_dates):
                if i == 0:
                    day_changes[date] = "-"
                else:
                    prev_date = sorted_dates[i-1]
                    day_changes[date] = round(total_prices[date] - total_prices[prev_date], 2)
                
                day_change_entry[date] = day_changes[date]
            
            all_symbols.append(day_change_entry)

            # Correctly calculate Day Change Percentage
            day_change_percent_entry = {
                "type": report_type,
                "symbol": f"DAY CHANGE % in {report_type}",
                "qty": "",
                "avg_price": "",
                **{
                    date: "-" if i == 0 or total_prices[sorted_dates[i-1]] == 0 else
                    round((day_changes[date] / total_prices[sorted_dates[i-1]] * 100), 2)
                    for i, date in enumerate(sorted_dates)
                }
            }
            all_symbols.append(day_change_percent_entry)

            results_by_type[report_type] = convert_objectid(all_symbols)
            
            # Only add to summary report if reqType is 0
            if req.reqType == 0:
                results_by_type["summary_report"][report_type] = total_entry

        # Only add metrics to summary report if reqType is 0
        if req.reqType == 0:
            # Metric Calculations
            metrics = {
                # Absolute value metrics
                "high_minus_open": {"symbol": "High - Open"},
                "open_minus_low": {"symbol": "Open - Low"},
                "high_minus_low": {"symbol": "High - Low"},
                "open_minus_close": {"symbol": "Open - Close"},
                
                # Percentage metrics with updated names
                "high_minus_open_pr": {"symbol": "High - Open %"},
                "open_minus_low_pr": {"symbol": "Open - Low %"},
                "high_minus_low_pr": {"symbol": "High - Low %"},
                "open_minus_close_pr": {"symbol": "Open - Close %"}
            }
            
            for metric, result in metrics.items():
                for date in sorted_dates:
                    try:
                        # Handling absolute value metrics
                        if metric == "high_minus_open":
                            values = [results_by_type["summary_report"]["high"][date],
                                      results_by_type["summary_report"]["open"][date]]
                            result[date] = round(values[0] - values[1], 2)
                        
                        elif metric == "open_minus_low":
                            values = [results_by_type["summary_report"]["open"][date],
                                      results_by_type["summary_report"]["low"][date]]
                            result[date] = round(values[0] - values[1], 2)
                        
                        elif metric == "high_minus_low":
                            values = [results_by_type["summary_report"]["high"][date],
                                      results_by_type["summary_report"]["low"][date]]
                            result[date] = round(values[0] - values[1], 2)
                        
                        elif metric == "open_minus_close":
                            values = [results_by_type["summary_report"]["open"][date],
                                      results_by_type["summary_report"]["close"][date]]
                            result[date] = round(values[0] - values[1], 2)
                        
                        # Handling percentage metrics
                        elif metric == "high_minus_open_pr":
                            values = [results_by_type["summary_report"]["high"][date],
                                      results_by_type["summary_report"]["open"][date]]
                            result[date] = "-" if values[1] == 0 else round((values[0] - values[1]) / values[1] * 100, 2)
                        
                        elif metric == "open_minus_low_pr":
                            values = [results_by_type["summary_report"]["open"][date],
                                      results_by_type["summary_report"]["low"][date]]
                            result[date] = "-" if values[1] == 0 else round((values[0] - values[1]) / values[1] * 100, 2)
                        
                        elif metric == "high_minus_low_pr":
                            values = [results_by_type["summary_report"]["high"][date],
                                      results_by_type["summary_report"]["low"][date]]
                            result[date] = "-" if values[1] == 0 else round((values[0] - values[1]) / values[1] * 100, 2)
                        
                        elif metric == "open_minus_close_pr":
                            values = [results_by_type["summary_report"]["open"][date],
                                      results_by_type["summary_report"]["close"][date]]
                            result[date] = "-" if values[1] == 0 else round((values[0] - values[1]) / values[1] * 100, 2)
                    
                    except KeyError:
                        continue

            results_by_type["summary_report"].update(metrics)
        
        # If only one type is requested, return just that type's data
        if req.reqType != 0 and len(req_types) == 1:
            return JSONResponse(content=results_by_type[report_type])

        return JSONResponse(content=results_by_type)

    except ValueError as e:
        raise HTTPException(status_code=400, detail=f"Invalid date format: {str(e)}")
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")


@app.get("/get-orders")  # Used in **Paper Trade > Orders: https://stocks.rayvat.com/basketorders**
async def get_orders(
    start_date: str = Query(None, description="Start date in YYYY-MM-DD format"),
    end_date: str = Query(None, description="End date in YYYY-MM-DD format")
):
    try:
        # Default to yesterday if dates are not provided
        if not start_date or not end_date:
            yesterday = datetime.now() # - timedelta(days=1)
            start_date = end_date = yesterday.strftime("%Y-%m-%d")

        # Convert start_date and end_date strings to datetime objects
        start_date_obj = datetime.strptime(start_date, "%Y-%m-%d")
        end_date_obj = datetime.strptime(end_date, "%Y-%m-%d")

        # Check if there are documents for the given start_date
        if orders_collection.count_documents({"datetime": {"$gte": start_date_obj, "$lt": start_date_obj + timedelta(days=1)}}) == 0:
            # Find next available start date if no data for given start_date
            next_start = await orders_collection.find({"datetime": {"$gt": start_date_obj}}).sort("datetime", ASCENDING).limit(1).to_list(None)
            if next_start:
                start_date_obj = next_start[0]["datetime"]

        # Check if there are documents for the given end_date
        if orders_collection.count_documents({"datetime": {"$gte": end_date_obj, "$lt": end_date_obj + timedelta(days=1)}}) == 0:
            # Find previous available end date if no data for given end_date
            prev_end = await orders_collection.find({"datetime": {"$lt": end_date_obj + timedelta(days=1)}}).sort("datetime", DESCENDING).limit(1).to_list(None)
            if prev_end:
                end_date_obj = prev_end[0]["datetime"]

        result = await orders_collection.find({
                "datetime": {
                    "$gte": start_date_obj,
                    "$lt": end_date_obj + timedelta(days=1)
                }
            }).to_list(None)


        # Serialize the orders
        serialized_orders = [serialize_order(order) for order in result]
        return JSONResponse(content={"message": "Orders fetched successfully", "orders": serialized_orders})

    except PyMongoError as e:
        raise HTTPException(status_code=500, detail=f"Database error: {e}")
    except ValueError as ve:
        raise HTTPException(status_code=400, detail=f"Invalid date format: {ve}")
