"""
This script have all the apis that are base for React frontend app:
all are listed below with endpoints:

*base_url = Domain Nmae

1) aggregate_bse (ex: {base_url}/report_details_BSE/?input_symbol=ABB&duration=1M&Instrument=Equity)
1) aggregate_nse (ex: {base_url}/report_details_BSE/?input_symbol=ABB&duration=1M&Instrument=Equity)
"""


# FastAPI Dependencies
from fastapi import FastAPI, Query, HTTPException
from fastapi.responses import JSONResponse
from datetime import datetime, timedelta, date
from fastapi.middleware.cors import CORSMiddleware


# Other Dependencies
from zerodha.zerodha_ticker import KiteTicker
from datetime import datetime
from pydantic import BaseModel, root_validator, Field
from typing import Dict, List, Literal, Optional, TypedDict, Union
from orders import place_order
import yfinance as yf
import logging
import time
import uuid

# Database Dependencies
from bson import ObjectId
from pymongo import MongoClient
from bson.objectid import ObjectId
from pymongo.errors import PyMongoError
from pymongo import ASCENDING, DESCENDING
from motor.motor_asyncio import AsyncIOMotorClient

import json

local_path = ""
live_path = ""

with open(r"/var/www/html/trade_iq/env.json", "r", encoding="utf-8") as f:
    data = json.loads(f.read())

# MongoDB connection
client = MongoClient(data['live']['mongo_url'])
db = client[data['live']['database']]
orders_collection = db["orders"]
nseData_collection = db["nse_data1"]
bseData_collection = db["bse_data1"]
holdings_collection = db["holdings"]
reportUpload_collection = db["ohlc_reports"]
today = date.today()

# Configurations FastAPI
app = FastAPI()
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],  # Allow all origins, or specify a list of domains
    allow_credentials=True,
    allow_methods=["*"],  # Allow all methods (GET, POST, etc.)
    allow_headers=["*"],  # Allow all headers
)

# Pydantic model to define the structure of each report
class ReportList(BaseModel):
    symbol: str
    avg_price: float
    qty: int

class ReportType(BaseModel):
    reqType: int
    start_date: Optional[str] = None
    end_date: Optional[str] = None

    @root_validator(pre=True)
    def set_default_dates(cls, values):
        if values.get("start_date") is None and values.get("end_date") is None:
            end_date = datetime.today()
            start_date = end_date - timedelta(days=30)
            values["start_date"] = start_date.strftime("%Y-%m-%d")
            values["end_date"] = end_date.strftime("%Y-%m-%d")
        return values

# Pydantic model to define the structure of each order
class Order(BaseModel):
    symbol: str
    amount: float
    quantity: int
    exchange: str
    action: str
    instrument_token: Optional[int] = None
    exchange_token: Optional[int] = None

class DeleteOrd(BaseModel):
    order_id: list[Dict[str, str]]

# clean data for fetch-date-range
def clean_data(data):
    """Recursively convert non-JSON compliant float values to None."""
    if isinstance(data, dict):
        return {k: clean_data(v) for k, v in data.items()}
    elif isinstance(data, list):
        return [clean_data(item) for item in data]
    elif isinstance(data, float):
        # Check for out of range float values
        if data == float('inf') or data == float('-inf') or data != data:  # Check for NaN
            return None  # Convert to None for JSON compliance
    return data

# Function to calculate the number of trading days, skipping weekends
def get_trading_days(start_date, num_days):
    trading_days = []
    current_date = start_date

    while len(trading_days) < num_days:
        if current_date.weekday() < 5:  # Monday to Friday are valid trading days
            trading_days.append(current_date)
        current_date -= timedelta(days=1)

    return trading_days

@app.get("/report_details_BSE/")
async def aggregate_bse(
    input_symbol: Optional[str] = Query(None),
    duration: Optional[str] = Query('1Y'),
    Instrument: Optional[str] = Query('Equity'),
    start_date: Optional[str] = Query(None),
    end_date: Optional[str] = Query(None)
    ):

    """
    This function fetch BSE data for Report Details using symbol, duration, instrument, startdate, enddate parameters.
    """
    client = AsyncIOMotorClient("mongodb://192.168.31.176:27017/?directConnection=true", maxPoolSize=100, retryWrites=True) # Harsh bhai local
    database = client["zerodha_test"]

    collection = database["bse_data1"]

    today = datetime.today()

    # Calculate date range if duration is provided
    if duration:
        if duration == '1D':
            start_dt = today - timedelta(days=1)
        elif duration == '1W':
            start_dt = today - timedelta(weeks=1)
        elif duration == '1M':
            start_dt = today - timedelta(days=30)
        elif duration == '3M':
            start_dt = today - timedelta(days=90)
        elif duration == '6M':
            start_dt = today - timedelta(days=180)
        elif duration == '1Y':
            start_dt = today - timedelta(days=365)
        else:
            return JSONResponse({"detail": "Invalid duration format."}, status=400)

        start_date = start_dt.strftime('%Y-%m-%d')
        end_date = today.strftime('%Y-%m-%d')

        # print(start_date, end_date, input_symbol, duration, Instrument)


    # MongoDB aggregation pipeline
    if input_symbol:
        pipeline = [
            {
                '$match': {
                    'symbol': input_symbol,  # Input symbol filter
                }
            },
            {
                '$project': {
                    '_id': 0,
                    'symbol': 1,
                    'prices': {
                        '$filter': {
                            'input': {
                                '$map': {
                                    'input': {'$objectToArray': "$prices"},
                                    'as': 'date_item',
                                    'in': {
                                        'date': "$$date_item.k",
                                        'prev_close': {'$toDouble': {"$ifNull": ["$$date_item.v.prev_close", 0]}},  # Changed to access new structure
                                        'open': {'$toDouble': {"$ifNull": ["$$date_item.v.open", 0]}},
                                        'high': {'$toDouble': {"$ifNull": ["$$date_item.v.high", 0]}},
                                        'low': {'$toDouble': {"$ifNull": ["$$date_item.v.low", 0]}},
                                        'close': {'$toDouble': {"$ifNull": ["$$date_item.v.close", 0]}},
                                        # Removed avg_price, total_traded_qty, delivery_qty, and perc_delivery_qty as they are not in the new structure
                                        'no_of_trades': {
                                            '$cond': {
                                                'if': {'$isNumber': '$$date_item.v.no_of_trades'},
                                                'then': {'$toDouble': '$$date_item.v.no_of_trades'},
                                                'else': '-'
                                            }
                                        },
                                        # Calculations
                                        'close_minus_prev_close': {
                                            '$subtract': [
                                                {'$toDouble': {'$ifNull': ["$$date_item.v.close", 0]}},
                                                {'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}
                                            ]
                                        },
                                        'high_minus_low': {
                                            '$subtract': [
                                                {'$toDouble': {'$ifNull': ["$$date_item.v.high", 0]}},
                                                {'$toDouble': {'$ifNull': ["$$date_item.v.low", 0]}}
                                            ]
                                        },
                                        'high_minus_prev_close': {
                                            '$subtract': [
                                                {'$toDouble': {'$ifNull': ["$$date_item.v.high", 0]}},
                                                {'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}
                                            ]
                                        },
                                        'high_minus_prev_close_perc': {
                                            '$cond': {
                                                'if': {'$eq': [{'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}, 0]},
                                                'then': 0,
                                                'else': {
                                                    '$multiply': [
                                                        {
                                                            '$divide': [
                                                                {
                                                                    '$subtract': [
                                                                        {'$toDouble': {'$ifNull': ["$$date_item.v.high", 0]}},
                                                                        {'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}
                                                                    ]
                                                                },
                                                                {'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}
                                                            ]
                                                        },
                                                        100
                                                    ]
                                                }
                                            }
                                        },
                                        'high_minus_low_perc': {
                                            '$cond': {
                                                'if': {'$eq': [{'$toDouble': {'$ifNull': ["$$date_item.v.high", 0]}}, 0]},
                                                'then': 0,
                                                'else': {
                                                    '$multiply': [
                                                        {
                                                            '$divide': [
                                                                {
                                                                    '$subtract': [
                                                                        {'$toDouble': {'$ifNull': ["$$date_item.v.high", 0]}},
                                                                        {'$toDouble': {'$ifNull': ["$$date_item.v.low", 0]}}
                                                                    ]
                                                                },
                                                                {'$toDouble': {'$ifNull': ["$$date_item.v.high", 0]}}
                                                            ]
                                                        },
                                                        100
                                                    ]
                                                }
                                            }
                                        },
                                        'close_minus_prev_close_perc': {
                                            '$cond': {
                                                'if': {'$eq': [{'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}, 0]},
                                                'then': 0,
                                                'else': {
                                                    '$multiply': [
                                                        {
                                                            '$divide': [
                                                                {
                                                                    '$subtract': [
                                                                        {'$toDouble': {'$ifNull': ["$$date_item.v.close", 0]}},
                                                                        {'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}
                                                                    ]
                                                                },
                                                                {'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}
                                                            ]
                                                        },
                                                        100
                                                    ]
                                                }
                                            }
                                        }
                                    }
                                }
                            },
                            'as': 'date_item',
                            'cond': {
                                '$and': [
                                    {
                                        '$gte': [
                                            {'$dateFromString': {'dateString': "$$date_item.date"}},
                                            {'$dateFromString': {'dateString': start_date}}
                                        ]
                                    },
                                    {
                                        '$lte': [
                                            {'$dateFromString': {'dateString': "$$date_item.date"}},
                                            {'$dateFromString': {'dateString': end_date}}
                                        ]
                                    }
                                ]
                            }
                        }
                    }
                }
            },
            {
            '$unwind': '$prices'  # Unwind to flatten the prices into separate documents
            },
            {
                '$replaceRoot': { 'newRoot': { '$mergeObjects': [ { 'symbol': '$symbol' }, '$prices' ] } }  # Merge symbol with each date entry
            }
        ]
    else:
        pipeline = [
            {
                '$match': {
                    **({'Instrument': Instrument} if Instrument not in ["ALL", "fund"] else {}),
                    **(
                        {'Issuer Name': {'$not': {'$regex': '.*Fund.*', '$options': 'i'}}}
                        if Instrument not in ["ALL", "fund"]
                        else {}
                    ),
                    **(
                        {'Issuer Name': {'$regex': '.*Fund.*', '$options': 'i'}}
                        if Instrument == "fund"
                        else {}
                    )
                }
            },
            {
                '$project': {
                    '_id': 0,
                    'symbol': 1,
                    'prices': {
                        '$objectToArray': '$prices'  # Convert prices object to an array
                    }
                }
            },
            {
                '$unwind': '$prices'  # Unwind the prices array to individual items
            },
            {
                '$addFields': {
                    'date': '$prices.k',
                    'prev_close': {'$toDouble': {'$ifNull': ['$prices.v.prev_close', 0]}},
                    'open': {'$toDouble': '$prices.v.open'},
                    'high': {'$toDouble': '$prices.v.high'},
                    'low': {'$toDouble': '$prices.v.low'},
                    'close': {'$toDouble': '$prices.v.close'}
                }
            },
            {
                '$match': {
                    '$and': [
                        {'date': {'$gte': start_date, '$lte': end_date}}
                    ]
                }
            },
            {
                '$addFields': {
                    'close_minus_prev_close': {
                        '$subtract': ['$close', '$prev_close']
                    },
                    'high_minus_low': {
                        '$subtract': ['$high', '$low']
                    },
                    'high_minus_prev_close': {
                        '$subtract': ['$high', '$prev_close']
                    },
                    'high_minus_prev_close_perc': {
                        '$cond': {
                            'if': {'$eq': ['$prev_close', 0]},
                            'then': 0,
                            'else': {
                                '$multiply': [
                                    {'$divide': [
                                        {'$subtract': ['$high', '$prev_close']},
                                        '$prev_close'
                                    ]},
                                    100
                                ]
                            }
                        }
                    },
                    'high_minus_low_perc': {
                        '$cond': {
                            'if': {'$eq': ['$high', 0]},
                            'then': 0,
                            'else': {
                                '$multiply': [
                                    {'$divide': [
                                        {'$subtract': ['$high', '$low']},
                                        '$high'
                                    ]},
                                    100
                                ]
                            }
                        }
                    },
                    'close_minus_prev_close_perc': {
                        '$cond': {
                            'if': {'$eq': ['$prev_close', 0]},
                            'then': 0,
                            'else': {
                                '$multiply': [
                                    {'$divide': [
                                        {'$subtract': ['$close', '$prev_close']},
                                        '$prev_close'
                                    ]},
                                    100
                                ]
                            }
                        }
                    }
                }
            },
            {
                '$project': {
                    'symbol': 1,
                    'date': 1,
                    'prev_close': 1,
                    'open': 1,
                    'high': 1,
                    'low': 1,
                    'close': 1,
                    'close_minus_prev_close': 1,
                    'high_minus_low': 1,
                    'high_minus_prev_close': 1,
                    'high_minus_prev_close_perc': 1,
                    'high_minus_low_perc': 1,
                    'close_minus_prev_close_perc': 1
                }
            }
        ]

    result = await collection.aggregate(pipeline, allowDiskUse=True).to_list(length=None)

    return result


@app.get("/report_details_NSE/")
async def aggregate_nse(
    input_symbol: Optional[str] = Query(None),
    duration: Optional[str] = Query('1Y'),
    Instrument: Optional[str] = Query('Equity'),
    start_date: Optional[str] = Query(None),
    end_date: Optional[str] = Query(None)
    ):

    """
    This function fetch NSE data for Report Details using symbol, duration, instrument, startdate, enddate parameters.
    """

    client = AsyncIOMotorClient("mongodb://192.168.31.176:27017/?directConnection=true") # Harsh bhai local
    database = client["zerodha_test"]
    collection = database["nse_data1"]

    try:
        # Get today's date
        today = datetime.today()

        # If duration is passed, calculate the date range based on it
        if duration:
            if duration == '1D':
                start_dt = today - timedelta(days=1)
            elif duration == '1W':
                start_dt = today - timedelta(weeks=1)
            elif duration == '1M':
                start_dt = today - timedelta(days=30)  # Approximate month
            elif duration == '3M':
                start_dt = today - timedelta(days=90)  # Approximate 3 months
            elif duration == '6M':
                start_dt = today - timedelta(days=180)  # Approximate 6 months
            elif duration == '1Y':
                start_dt = today - timedelta(days=365)  # 1 year
            else:
                return JSONResponse({"detail": "Invalid duration format."}, status=400)

            start_date = start_dt.strftime('%d-%m-%Y')
            end_date = today.strftime('%d-%m-%Y')

        # If custom date range is provided, use it
        if start_date and end_date:
            try:
                start_dt = datetime.strptime(start_date, '%d-%m-%Y')
                end_dt = datetime.strptime(end_date, '%d-%m-%Y')
            except ValueError:
                return JSONResponse({"detail": "Invalid date format. Use 'dd-mm-yyyy'."}, status=400)
        else:
            return JSONResponse({"detail": "Either duration or custom start/end dates must be provided."}, status=400)

        # MongoDB aggregation pipeline
        if input_symbol:
            pipeline = [
                {
                    '$match': {
                        'symbol': input_symbol
                    }
                },
                {
                    '$project': {
                        '_id': 0,
                        'symbol': 1,
                        'prices': {
                            '$filter': {
                                'input': {
                                    '$map': {
                                        'input': {'$objectToArray': "$prices"},
                                        'as': 'date_item',
                                        'in': {
                                            'date': "$$date_item.k",
                                            'prev_close': {'$toDouble': {'$ifNull': ['$$date_item.v.prev_close', 0]}},
                                            'open': {'$toDouble': "$$date_item.v.open"},
                                            'high': {'$toDouble': "$$date_item.v.high"},
                                            'low': {'$toDouble': "$$date_item.v.low"},
                                            'close': {'$toDouble': "$$date_item.v.close"},
                                            'close_minus_prev_close': {
                                                '$subtract': [
                                                    {'$toDouble': {'$ifNull': ["$$date_item.v.close", 0]}},
                                                    {'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}
                                                ]
                                            },
                                            'high_minus_low': {
                                                '$subtract': [
                                                    {'$toDouble': {'$ifNull': ["$$date_item.v.high", 0]}},
                                                    {'$toDouble': {'$ifNull': ["$$date_item.v.low", 0]}}
                                                ]
                                            },
                                            'high_minus_prev_close': {
                                                '$subtract': [
                                                    {'$toDouble': {'$ifNull': ["$$date_item.v.high", 0]}},
                                                    {'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}
                                                ]
                                            },
                                            'high_minus_prev_close_perc': {
                                                '$cond': {
                                                    'if': {'$eq': [{'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}, 0]},
                                                    'then': 0,
                                                    'else': {
                                                        '$multiply': [
                                                            {
                                                                '$divide': [
                                                                    {
                                                                        '$subtract': [
                                                                            {'$toDouble': {'$ifNull': ["$$date_item.v.high", 0]}},
                                                                            {'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}
                                                                        ]
                                                                    },
                                                                    {'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}
                                                                ]
                                                            },
                                                            100
                                                        ]
                                                    }
                                                }
                                            },
                                            'high_minus_low_perc': {
                                                '$cond': {
                                                    'if': {'$eq': [{'$toDouble': {'$ifNull': ["$$date_item.v.high", 0]}}, 0]},
                                                    'then': 0,
                                                    'else': {
                                                        '$multiply': [
                                                            {
                                                                '$divide': [
                                                                    {
                                                                        '$subtract': [
                                                                            {'$toDouble': {'$ifNull': ["$$date_item.v.high", 0]}},
                                                                            {'$toDouble': {'$ifNull': ["$$date_item.v.low", 0]}}
                                                                        ]
                                                                    },
                                                                    {'$toDouble': {'$ifNull': ["$$date_item.v.high", 0]}}
                                                                ]
                                                            },
                                                            100
                                                        ]
                                                    }
                                                }
                                            },
                                            'close_minus_prev_close_perc': {
                                                '$cond': {
                                                    'if': {'$eq': [{'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}, 0]},
                                                    'then': 0,
                                                    'else': {
                                                        '$multiply': [
                                                            {
                                                                '$divide': [
                                                                    {
                                                                        '$subtract': [
                                                                            {'$toDouble': {'$ifNull': ["$$date_item.v.close", 0]}},
                                                                            {'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}
                                                                        ]
                                                                    },
                                                                    {'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}
                                                                ]
                                                            },
                                                            100
                                                        ]
                                                    }
                                                }
                                            }
                                        }
                                    }
                                },
                                'as': 'date_item',
                                'cond': {
                                    '$and': [
                                        {
                                            '$gte': [
                                                {'$dateFromString': {'dateString': "$$date_item.date"}},
                                                {'$dateFromString': {'dateString': start_date}}
                                            ]
                                        },
                                        {
                                            '$lte': [
                                                {'$dateFromString': {'dateString': "$$date_item.date"}},
                                                {'$dateFromString': {'dateString': end_date}}
                                            ]
                                        }
                                    ]
                                }
                            }
                        }
                    }
                },
                {
                '$unwind': '$prices'  # Unwind to flatten the prices into separate documents
                },
                {
                    '$replaceRoot': { 'newRoot': { '$mergeObjects': [ { 'symbol': '$symbol' }, '$prices' ] } }  # Merge symbol with each date entry
                }
            ]
        else:
            pipeline = [
                {
                    '$match': {
                        **({'Instrument': Instrument} if Instrument != "ALL" else {})
                    }
                },
                {
                    '$project': {
                        '_id': 0,
                        'symbol': 1,
                        'prices': {
                            '$filter': {
                                'input': {
                                    '$map': {
                                        'input': {'$objectToArray': "$prices"},
                                        'as': 'date_item',
                                        'in': {
                                            'date': "$$date_item.k",
                                            'prev_close': {'$toDouble': "$$date_item.v.prev_close"},
                                            'open': {'$toDouble': "$$date_item.v.open"},
                                            'high': {'$toDouble': "$$date_item.v.high"},
                                            'low': {'$toDouble': "$$date_item.v.low"},
                                            'close': {'$toDouble': "$$date_item.v.close"},
                                            'close_minus_prev_close': {
                                                '$subtract': [
                                                    {'$toDouble': {'$ifNull': ["$$date_item.v.close", 0]}},
                                                    {'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}
                                                ]
                                            },
                                            'high_minus_low': {
                                                '$subtract': [
                                                    {'$toDouble': {'$ifNull': ["$$date_item.v.high", 0]}},
                                                    {'$toDouble': {'$ifNull': ["$$date_item.v.low", 0]}}
                                                ]
                                            },
                                            'high_minus_prev_close': {
                                                '$subtract': [
                                                    {'$toDouble': {'$ifNull': ["$$date_item.v.high", 0]}},
                                                    {'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}
                                                ]
                                            },
                                            'high_minus_prev_close_perc': {
                                                '$cond': {
                                                    'if': {'$eq': [{'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}, 0]},
                                                    'then': 0,
                                                    'else': {
                                                        '$multiply': [
                                                            {
                                                                '$divide': [
                                                                    {
                                                                        '$subtract': [
                                                                            {'$toDouble': {'$ifNull': ["$$date_item.v.high", 0]}},
                                                                            {'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}
                                                                        ]
                                                                    },
                                                                    {'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}
                                                                ]
                                                            },
                                                            100
                                                        ]
                                                    }
                                                }
                                            },
                                            'high_minus_low_perc': {
                                                '$cond': {
                                                    'if': {'$eq': [{'$toDouble': {'$ifNull': ["$$date_item.v.high", 0]}}, 0]},
                                                    'then': 0,
                                                    'else': {
                                                        '$multiply': [
                                                            {
                                                                '$divide': [
                                                                    {
                                                                        '$subtract': [
                                                                            {'$toDouble': {'$ifNull': ["$$date_item.v.high", 0]}},
                                                                            {'$toDouble': {'$ifNull': ["$$date_item.v.low", 0]}}
                                                                        ]
                                                                    },
                                                                    {'$toDouble': {'$ifNull': ["$$date_item.v.high", 0]}}
                                                                ]
                                                            },
                                                            100
                                                        ]
                                                    }
                                                }
                                            },
                                            'close_minus_prev_close_perc': {
                                                '$cond': {
                                                    'if': {'$eq': [{'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}, 0]},
                                                    'then': 0,
                                                    'else': {
                                                        '$multiply': [
                                                            {
                                                                '$divide': [
                                                                    {
                                                                        '$subtract': [
                                                                            {'$toDouble': {'$ifNull': ["$$date_item.v.close", 0]}},
                                                                            {'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}
                                                                        ]
                                                                    },
                                                                    {'$toDouble': {'$ifNull': ["$$date_item.v.prev_close", 0]}}
                                                                ]
                                                            },
                                                            100
                                                        ]
                                                    }
                                                }
                                            }
                                        }
                                    }
                                },
                                'as': 'date_item',
                                'cond': {
                                    '$and': [
                                        {
                                            '$gte': [
                                                {'$dateFromString': {'dateString': "$$date_item.date"}},
                                                {'$dateFromString': {'dateString': start_date}}
                                            ]
                                        },
                                        {
                                            '$lte': [
                                                {'$dateFromString': {'dateString': "$$date_item.date"}},
                                                {'$dateFromString': {'dateString': end_date}}
                                            ]
                                        }
                                    ]
                                }
                            }
                        }
                    }
                },
                {
                '$unwind': '$prices'  # Unwind to flatten the prices into separate documents
                },
                {
                    '$replaceRoot': { 'newRoot': { '$mergeObjects': [ { 'symbol': '$symbol' }, '$prices' ] } }  # Merge symbol with each date entry
                }
            ]

        result = await collection.aggregate(pipeline, allowDiskUse=True).to_list(length=None)

        return JSONResponse(content=result, status_code=200)
    except Exception as e:
        return JSONResponse({"detail": str(e)}, status=500)


@app.get("/fetch-date-range/")
async def fetch_data_test(startdate: str = Query(...), enddate: str = Query(...), type: str = Query(...)):
    collection_ta = nseData_collection if type == 'NSE' else bseData_collection

    try:
        # Convert startdate and enddate from 'dd-mm-yyyy' to datetime objects and extract day and month
        start_date = datetime.strptime(startdate, "%d-%m-%Y")
        end_date = datetime.strptime(enddate, "%d-%m-%Y")
        start_day_month = start_date.strftime("%m-%d")  # Extract month and day
        end_day_month = end_date.strftime("%m-%d")  # Extract month and day

        # Aggregation pipeline
        pipeline = [
            {
                "$project": {
                    "_id": {"$toString": "$_id"},  # Convert ObjectId to string
                    "symbol": 1,
                    "Security Name": 1,
                    "exchange_token": 1,
                    "instrument_token": 1,
                    "filtered_prices": {
                        "$filter": {
                            "input": {
                                "$objectToArray": "$prices"
                            },
                            "as": "price",
                            "cond": {
                                "$and": [
                                    {"$gte": [{"$substr": [{"$toString": "$$price.k"}, 5, 5]}, start_day_month]},  # Compare day-month of startdate
                                    {"$lte": [{"$substr": [{"$toString": "$$price.k"}, 5, 5]}, end_day_month]}  # Compare day-month of enddate
                                ]
                            }
                        }
                    }
                }
            },
            {
                "$addFields": {
                    "prices": {
                        "$arrayToObject": "$filtered_prices"
                    }
                }
            },
            {
                "$project": {
                    "filtered_prices": 0  # Remove intermediate field
                }
            }
        ]
        result = collection_ta.aggregate(pipeline).to_list(length=None)

        # Add the rs_chg (price change) calculation to the result
        for item in result:
            prices = item.get('prices', {})
            yearly_changes = {}
            rs_chg_dict = {}
            pr_chg_dict = {}

            # Iterate over the prices to calculate rs_chg for each year
            for date_str, price_data in prices.items():
                # Ensure price_data is a dictionary before proceeding
                if isinstance(price_data, dict) and 'open' in price_data and 'close' in price_data:
                    # Extract the year from the date (in YYYY-MM-DD format)
                    year = date_str.split('-')[0]
                    if year not in yearly_changes:
                        # Initialize the year's data with the first open and close values
                        yearly_changes[year] = {
                            'old_date_open': price_data['open'],
                            'latest_date_close': price_data['close']
                        }
                    else:
                        # Update the latest close for the year
                        yearly_changes[year]['latest_date_close'] = price_data['close']
                else:
                    # If price_data is not a dictionary with 'open' and 'close', skip it
                    continue

            # Calculate rs_chg for each year and store in the rs_chg_dict
            for year, year_data in yearly_changes.items():
                rs_chg = year_data['latest_date_close'] - year_data['old_date_open']
                pr_chg = (rs_chg * 100 / year_data['old_date_open'])

                rs_chg_dict[year] = rs_chg
                pr_chg_dict[year] = pr_chg

            # Add rs_chg dictionary to the item
            item['rs_chg'] = rs_chg_dict
            item['pr_chg'] = pr_chg_dict

            # Calculate the average of rs_chg values
            avg_rs_chg = sum(rs_chg_dict.values()) / len(rs_chg_dict) if rs_chg_dict else 0
            item['rs_chg_avg'] = avg_rs_chg

            avg_pr_chg = sum(pr_chg_dict.values()) / len(pr_chg_dict) if pr_chg_dict else 0
            item['pr_chg_avg'] = avg_pr_chg

        # return result
        return JSONResponse(content=result, status_code=200)

    except Exception as e:
        return JSONResponse(status_code=500, content={"message": str(e)})



# Overall Probability (HeatMap Analysis API)
@app.get("/overall-probability/")
async def overall_probability(
    search: Optional[str] = Query(None),
    type: Optional[str] = Query(None)
    ):

    """ This function fetch NSE, BSE data for Overall Probability table in HeatMap Analysis using search, type parameters. """

    client = AsyncIOMotorClient("mongodb://jenya:DJenya$Mongo%40St0ckDB@172.105.59.175:27017/", maxPoolSize=100, retryWrites=True) # Harsh bhai local
    database = client["raiwat_screener"]

    if type == "NSE":

        collection = database["mon_year_analysis_nse"]

        pipeline = [
            {
                '$match': {
                    'symbol': search
                }
            },
            {
                '$project': {
                    '_id': 0,
                    "symbol": 1,
                    'exchange': 1,
                    "jan":1,
                    "feb":1,
                    "mar":1,
                    "apr":1,
                    "may":1,
                    "jun":1,
                    "jul":1,
                    "aug":1,
                    "sep":1,
                    "oct":1,
                    "nov":1,
                    "dec":1
                }
            }
        ]
    else:
        collection = database["mon_year_analysis_bse"]

        pipeline = [
            {
                '$match': {
                    'Security Id': search
                }
            },
            {
                '$project': {
                    '_id': 0,
                    "Security Id": 1,
                    'exchange': 1,
                    "jan":1,
                    "feb":1,
                    "mar":1,
                    "apr":1,
                    "may":1,
                    "jun":1,
                    "jul":1,
                    "aug":1,
                    "sep":1,
                    "oct":1,
                    "nov":1,
                    "dec":1
                }
            }
        ]

    result = await collection.aggregate(pipeline, allowDiskUse=True).to_list(length=None)
    return JSONResponse(content=result, status_code=200)


#Year Month Table
@app.get("/year-month-table/")
async def year_month_table(
    search: Optional[str] = Query(None),
    type: Optional[str] = Query(None)
    ):
    """ This function fetch NSE, BSE data for Year Month Table in HeatMap Analysis using search, type parameters. """

    client = AsyncIOMotorClient("mongodb://jenya:DJenya$Mongo%40St0ckDB@172.105.59.175:27017/", maxPoolSize=100, retryWrites=True) # Harsh bhai local
    database = client["raiwat_screener"]

    if type == "NSE":

        collection = database["mon_year_val_analysis_nse"]

        pipeline = [
            {
                '$match': {
                    'Security_Id': search
                }
            },
            {
                '$project': {
                    '_id': 0,
                    "Security_Id": 1,
                    'dates': 1
                }
            }
        ]
    else:
        collection = database["mon_year_val_analysis_bse"]

        pipeline = [
            {
                '$match': {
                    'Security_Id': search
                }
            },
            {
                '$project': {
                    '_id': 0,
                    "Security_Id": 1,
                    'dates': 1
                }
            }
        ]

    result = await collection.aggregate(pipeline, allowDiskUse=True).to_list(length=None)
    return JSONResponse(content=result, status_code=200)


# Daily Table
@app.get("/daily-table/")
async def daily_table(
    search: Optional[str] = Query(None),
    type: Optional[str] = Query(None)
    ):
    """ This function fetch NSE, BSE data for Daily Table in HeatMap Analysis using search, type parameters. """

    client = AsyncIOMotorClient("mongodb://jenya:DJenya$Mongo%40St0ckDB@172.105.59.175:27017/", maxPoolSize=100, retryWrites=True) # Harsh bhai local
    database = client["raiwat_screener"]

    if type == "NSE":
        collection = database["day_year_analysis_nse"]
    else:
        collection = database["day_year_analysis_bse"]

    pipeline = [
        {
            '$match': {
                'Security Id': search
            }
        },
        {
            '$project': {
                '_id': 0,
                "Security Id": 1,
                'dates': 1
            }
        }
    ]

    result = await collection.aggregate(pipeline, allowDiskUse=True).to_list(length=None)
    return JSONResponse(content=result, status_code=200)


#Day Month Table
@app.get("/day-month-table/")
async def day_month_table(
    search: Optional[str] = Query(None),
    type: Optional[str] = Query(None)
    ):
    """ This function fetch NSE, BSE data for Day Month Table in HeatMap Analysis using search, type parameters. """

    client = AsyncIOMotorClient("mongodb://jenya:DJenya$Mongo%40St0ckDB@172.105.59.175:27017/", maxPoolSize=100, retryWrites=True) # Harsh bhai local
    database = client["raiwat_screener"]

    if type == "NSE":
        collection = database["day_mon_analysis_nse"]
    else:
        collection = database["day_mon_analysis_bse"]

    pipeline = [
        {
            '$match': {
                'symbol': search
            }
        },
        {
            '$project': {
                '_id': 0,
                "symbol": 1,
                'dates': 1

            }
        }
    ]

    result = await collection.aggregate(pipeline, allowDiskUse=True).to_list(length=None)
    return JSONResponse(content=result, status_code=200)


# Helper function to get price for a specific date, skipping weekends
async def get_price_for_date(prices, target_date):
    date_str = target_date.strftime('%Y-%m-%d')  # Format date as string 'YYYY-MM-DD'

    # If the target date is found, return the price
    if date_str in prices:
        return prices[date_str].get('close')

    # If not found, look for the next available date (skip weekends)
    next_date = target_date + timedelta(days=1)

    while True:
        if next_date.weekday() >= 5:  # Skip weekends
            next_date += timedelta(days=1)
            continue

        next_date_str = next_date.strftime('%Y-%m-%d')
        if next_date_str in prices:
            return prices[next_date_str].get('close')

        next_date += timedelta(days=1)

        # Break if you have exhausted a reasonable range (optional, to prevent infinite loops)
        if (next_date - target_date).days > 365:  # Example limit: stop after 1 year
            return None


# Stock Information
@app.get("/stock-information/")
async def StockInformation(
    search: Optional[str] = Query(None),
    type: Optional[str] = Query(None)
    ):

    if not search or not type:
        return JSONResponse(content={"error": "Search query and type (NSE or BSE) are required"}, status_code=400)

    query = {'symbol': search}
    client = AsyncIOMotorClient('mongodb://192.168.31.176:27017/?directConnection=true', maxPoolSize=100, retryWrites=True)
    db = client['zerodha_test']
    try:
        # Common code for NSE and BSE
        if type == "NSE":
            ticker = f"{search}.NS"
            collection = db['nse_data1']
        else:
            ticker = f"{search}.BO"
            collection = db['bse_data1']

        result = await collection.find_one(query)

        if not result:
            return {"error": "Symbol not found"}

        # Fetch stock info
        stock = yf.Ticker(ticker)
        info = stock.info

        # Extract data from stock info
        stock_info = {
            'longName' : info.get('shortName', 'N/A'),
            'dchange': ((( float(info.get('currentPrice', 0.0)) - float(info.get('previousClose', 0.0))) / float(info.get('previousClose', 0.0))) * 100),
            'rchange': (float(info.get('currentPrice', 0.0)) - float(info.get('previousClose', 0.0))),
            'previous_close': info.get('previousClose', 'N/A'),
            'open': info.get('open', 'N/A'),
            'sector': info.get('sector', 'N/A'),
            'week_52_change' : info.get('52WeekChange', 'N/A'),
            'industry': info.get('industry', 'N/A'),
            'market_cap': info.get('marketCap', 'N/A'),
            'beta': info.get('beta', 'N/A'),
            'full_time_employees': info.get('fullTimeEmployees', 'N/A'),
            'book_value': info.get('bookValue', 'N/A'),
            'price_to_book': info.get('priceToBook', 'N/A'),
            'last_dividend_value': info.get('lastDividendValue', 'N/A'),
            'website': info.get('website', 'N/A'),
            'current_price': info.get('currentPrice', 'N/A'),
            'day_high': info.get('dayHigh', 'N/A'),
            'day_low': info.get('dayLow', 'N/A'),
            'week_52_high': info.get('fiftyTwoWeekHigh', 'N/A'),
            'week_52_low': info.get('fiftyTwoWeekLow', 'N/A')
        }

        day_data = stock.history(period='1d')
        week_data = stock.history(period='5d')
        month_data = stock.history(period="1mo")
        quarter_data = stock.history(period="3mo")
        six_month_data = stock.history(period="6mo")
        year_data = stock.history(period="1y")
        two_year_data = stock.history(period="2y")
        five_year_data = stock.history(period="5y")
        ten_year_data = stock.history(period="10y")

        price_info = {

            # Get data for 1 day
            'day_high' : day_data['High'].max() if not week_data.empty else None,
            'day_low': day_data['Low'].min() if not week_data.empty else None,

            # Get data for 1 week
            'week_high_price' : week_data['High'].max() if not week_data.empty else None,
            'week_low_price': week_data['Low'].min() if not week_data.empty else None,

            # Get data for 1 month
            'month_high_price' : month_data['High'].max() if not month_data.empty else None,
            'month_low_price' : month_data['Low'].min() if not month_data.empty else None,

            # Get data for 1 quarter (3 months)
            'quarter_high_price' : quarter_data['High'].max() if not quarter_data.empty else None,
            'quarter_low_price' : quarter_data['Low'].min() if not quarter_data.empty else None,

            # Get data for 2 quarter (6 months)
            'six_month_high_price' : six_month_data['High'].max() if not quarter_data.empty else None,
            'six_month_low_price' : six_month_data['Low'].min() if not quarter_data.empty else None,

            # Get data for 1 year
            'year_high_price' : year_data['High'].max() if not year_data.empty else None,
            'year_low_price' : year_data['Low'].min() if not year_data.empty else None,

            # Get data for 2 year
            'two_year_high_price' : two_year_data['High'].max() if not year_data.empty else None,
            'two_year_low_price' : two_year_data['Low'].min() if not year_data.empty else None,

            # Get data for 5 year
            'five_year_high_price' : five_year_data['High'].max() if not year_data.empty else None,
            'five_year_low_price' : five_year_data['Low'].min() if not year_data.empty else None,

            # Get data for 10 year
            'ten_year_high_price' : ten_year_data['High'].max() if not year_data.empty else None,
            'ten_year_low_price' : ten_year_data['Low'].min() if not year_data.empty else None,

        }

        today = datetime.today()
        yesterday = today - timedelta(days=1)
        prices = result.get('prices', {})

        # Ensure yesterday is a trading day
        while yesterday.weekday() >= 5:
            yesterday -= timedelta(days=1)

        # Fetch 5 trading days, 63 (3 months), etc., skipping weekends
        date_1w = get_trading_days(yesterday, 5)[-1]  # Get the date 5 trading days back
        date_3m = get_trading_days(yesterday, 63)[-1]  # Get the date 63 trading days back
        date_6m = get_trading_days(yesterday, 126)[-1]  # Get the date 126 trading days back
        date_1y = get_trading_days(yesterday, 252)[-1]  # Get the date 252 trading days back
        date_2y = get_trading_days(yesterday, 2*252)[-1]  # 2 years
        date_3y = get_trading_days(yesterday, 3*252)[-1]  # 3 years
        date_4y = get_trading_days(yesterday, 4*252)[-1]  # 4 years
        date_5y = get_trading_days(yesterday, 5*252)[-1]  # 5 years
        date_10y = get_trading_days(yesterday, 10*252)[-1]  # 10 years

        # Fetching prices for each target date
        price_yesterday = await get_price_for_date(prices, yesterday)
        price_1w = await get_price_for_date(prices, date_1w)
        price_3m = await get_price_for_date(prices, date_3m)
        price_6m = await get_price_for_date(prices, date_6m)
        price_1y = await get_price_for_date(prices, date_1y)
        price_2y = await get_price_for_date(prices, date_2y)
        price_3y = await get_price_for_date(prices, date_3y)
        price_4y = await get_price_for_date(prices, date_4y)
        price_5y = await get_price_for_date(prices, date_5y)
        price_10y = await get_price_for_date(prices, date_10y)

        # Calculating price changes
        price_chg_1w = price_yesterday - price_1w if price_yesterday and price_1w else None
        price_chg_3m = price_yesterday - price_3m if price_yesterday and price_3m else None
        price_chg_6m = price_yesterday - price_6m if price_yesterday and price_6m else None
        price_chg_1y = price_yesterday - price_1y if price_yesterday and price_1y else None
        price_chg_2y = price_yesterday - price_2y if price_yesterday and price_2y else None
        price_chg_3y = price_yesterday - price_3y if price_yesterday and price_3y else None
        price_chg_4y = price_yesterday - price_4y if price_yesterday and price_4y else None
        price_chg_5y = price_yesterday - price_5y if price_yesterday and price_5y else None
        price_chg_10y = price_yesterday - price_10y if price_yesterday and price_10y else None

        # Calculating percentage changes
        per_chg_1w = (price_chg_1w / price_1w * 100) if price_1w and price_chg_1w else None
        per_chg_3m = (price_chg_3m / price_3m * 100) if price_3m and price_chg_3m else None
        per_chg_6m = (price_chg_6m / price_6m * 100) if price_6m and price_chg_6m else None
        per_chg_1y = (price_chg_1y / price_1y * 100) if price_1y and price_chg_1y else None
        per_chg_2y = (price_chg_2y / price_2y * 100) if price_2y and price_chg_2y else None
        per_chg_3y = (price_chg_3y / price_3y * 100) if price_3y and price_chg_3y else None
        per_chg_4y = (price_chg_4y / price_4y * 100) if price_4y and price_chg_4y else None
        per_chg_5y = (price_chg_5y / price_5y * 100) if price_5y and price_chg_5y else None
        per_chg_10y = (price_chg_10y / price_10y * 100) if price_10y and price_chg_10y else None

        hist_info = {
            "yesterday": {"date": yesterday.strftime('%Y-%m-%d'), "price": price_yesterday},
            "one_Week": {"date": date_1w.strftime('%Y-%m-%d'), "price": price_1w, "rs_chg": price_chg_1w, "pr_chg": per_chg_1w},
            "three_Months": {"date": date_3m.strftime('%Y-%m-%d'), "price": price_3m, "rs_chg": price_chg_3m, "pr_chg": per_chg_3m},
            "six_Months": {"date": date_6m.strftime('%Y-%m-%d'), "price": price_6m, "rs_chg": price_chg_6m, "pr_chg": per_chg_6m},
            "one_Year": {"date": date_1y.strftime('%Y-%m-%d'), "price": price_1y, "rs_chg": price_chg_1y, "pr_chg": per_chg_1y},
            "two_Year": {"date": date_2y.strftime('%Y-%m-%d'), "price": price_2y, "rs_chg": price_chg_2y, "pr_chg": per_chg_2y},
            "three_Year": {"date": date_3y.strftime('%Y-%m-%d'), "price": price_3y, "rs_chg": price_chg_3y, "pr_chg": per_chg_3y},
            "four_Year": {"date": date_4y.strftime('%Y-%m-%d'), "price": price_4y, "rs_chg": price_chg_4y, "pr_chg": per_chg_4y},
            "five_Year": {"date": date_5y.strftime('%Y-%m-%d'), "price": price_5y, "rs_chg": price_chg_5y, "pr_chg": per_chg_5y},
            "ten_Year": {"date": date_10y.strftime('%Y-%m-%d'), "price": price_10y, "rs_chg": price_chg_10y, "pr_chg": per_chg_10y}
        }

        # Combine all results in a structured dictionary
        result = {
            'ticker': ticker,
            'stock_info': stock_info,
            'hist_info': hist_info,
            'price_info' : price_info
        }

        return JSONResponse(content=result, status_code=200)

    except Exception as e:
        return JSONResponse(content={"error": str(e)}, status_code=500)


# Get NSE, BSE Symbol
@app.get("/search-symbol/{prefix}")
async def Symbol_Get(prefix: str):
    """ This Function Get All NSE, BSE Symbols """
    client = AsyncIOMotorClient("mongodb://192.168.31.176:27017/?directConnection=true", maxPoolSize=100, retryWrites=True) # Harsh bhai local
    database = client["zerodha_test"]
    collection_bse = database["bse_data1"]
    collection_nse = database["nse_data1"]

    # Pipeline for NSE symbols with label, filtered by prefix
    nse_pipeline = [
        {"$match": {"symbol": {"$regex": f"^{prefix}", "$options": "i"}}},  # Match symbols starting with the prefix
        {"$group": {"_id": "$symbol"}},
        {
            "$project": {
                "symbol": "$_id",
                "exchange": {"$literal": "NSE"}  # Add the exchange field with a fixed value "BSE"
            }
        }
    ]

    # Pipeline for BSE symbols with label, filtered by prefix
    bse_pipeline = [
        {"$match": {"symbol": {"$regex": f"^{prefix}", "$options": "i"}}},  # Match symbols starting with the prefix
        {"$group": {"_id": "$symbol"}},
        {
            "$project": {
                "symbol": "$_id",
                "exchange": {"$literal": "BSE"}  # Add the exchange field with a fixed value "BSE"
            }
        }
    ]

    # Execute aggregation for each collection
    nse_symbols = await collection_nse.aggregate(nse_pipeline).to_list(length=None)
    bse_symbols = await collection_bse.aggregate(bse_pipeline).to_list(length=None)

    # Combine NSE and BSE symbols into a single list
    result = nse_symbols + bse_symbols

    # Close the client
    client.close()

    # Return the combined list of symbols
    return JSONResponse(content=result, status_code=200)

'''
@app.post("/add-orders/")
async def add_orders(orders: List[Order]):
    # Convert the Pydantic models to dictionaries and add "datetime"
    orders_data = [
        {**order.dict(), "datetime": datetime.utcnow()} for order in orders
    ]

    try:
        # Insert the list into MongoDB
        result = orders_collection.insert_many(orders_data)
        place_order(orders=orders_data)
        return {"message": "Orders added successfully", "inserted_ids": str(result.inserted_ids)}
    except PyMongoError as e:
        raise HTTPException(status_code=500, detail=f"Database error: {e}")


def serialize_order(order):
    """Convert MongoDB document to a JSON-compatible format."""
    # Convert ObjectId to string
    order['_id'] = str(order['_id'])

    # Convert datetime objects to ISO format strings
    for key, value in order.items():
        if isinstance(value, ObjectId):
            order[key] = str(value)
        elif isinstance(value, datetime):
            order[key] = value.isoformat()  # Convert datetime to ISO format string

    return order

@app.get("/get-orders/")
async def get_orders(
    start_date: str = Query(None, description="Start date in YYYY-MM-DD format"),
    end_date: str = Query(None, description="End date in YYYY-MM-DD format")
):
    try:
        # Default to yesterday if dates are not provided
        if not start_date or not end_date:
            yesterday = datetime.now() # - timedelta(days=1)
            start_date = end_date = yesterday.strftime("%Y-%m-%d")

        # Convert start_date and end_date strings to datetime objects
        start_date_obj = datetime.strptime(start_date, "%Y-%m-%d")
        end_date_obj = datetime.strptime(end_date, "%Y-%m-%d")

        # Check if there are documents for the given start_date
        if orders_collection.count_documents({"datetime": {"$gte": start_date_obj, "$lt": start_date_obj + timedelta(days=1)}}) == 0:
            # Find next available start date if no data for given start_date
            next_start = list(orders_collection.find({"datetime": {"$gt": start_date_obj}}).sort("datetime", ASCENDING).limit(1))
            if next_start:
                start_date_obj = next_start[0]["datetime"]

        # Check if there are documents for the given end_date
        if orders_collection.count_documents({"datetime": {"$gte": end_date_obj, "$lt": end_date_obj + timedelta(days=1)}}) == 0:
            # Find previous available end date if no data for given end_date
            prev_end = list(orders_collection.find({"datetime": {"$lt": end_date_obj + timedelta(days=1)}}).sort("datetime", DESCENDING).limit(1))
            if prev_end:
                end_date_obj = prev_end[0]["datetime"]

        # Fetch orders within the adjusted date range
        result = list(
            orders_collection.find({
                "datetime": {
                    "$gte": start_date_obj,
                    "$lt": end_date_obj + timedelta(days=1)
                }
            })
        )

        # Serialize the orders
        serialized_orders = [serialize_order(order) for order in result]
        return JSONResponse(content={"message": "Orders fetched successfully", "orders": serialized_orders})

    except PyMongoError as e:
        raise HTTPException(status_code=500, detail=f"Database error: {e}")
    except ValueError as ve:
        raise HTTPException(status_code=400, detail=f"Invalid date format: {ve}")
'''

@app.delete("/delete-orders/")
async def delete_orders(delete_request: DeleteOrd):
    try:
        # Extract order IDs from the request
        order_ids = [ObjectId(order["id"]) for order in delete_request.order_id if "id" in order]

        # Delete the orders with the specified IDs
        delete_result = orders_collection.delete_many({"_id": {"$in": order_ids}})

        return {
            "message": "Orders deleted successfully",
            "deleted_count": delete_result.deleted_count
        }
    except PyMongoError as e:
        raise HTTPException(status_code=500, detail=f"Database error: {e}")


# Helper function to convert datetime objects to ISO format
def convert_datetime(doc):
    if isinstance(doc, dict):
        for key, value in doc.items():
            if isinstance(value, datetime):
                doc[key] = value.isoformat()
            elif isinstance(value, dict) or isinstance(value, list):
                doc[key] = convert_datetime(value)
    elif isinstance(doc, list):
        for i in range(len(doc)):
            doc[i] = convert_datetime(doc[i])
    return doc

@app.get("/holdings/")
async def get_holdings():
    try:
        # Fetch and process holdings data
        holdings_list = list(holdings_collection.find({}, {"_id": 0}))

        # Convert datetime fields to ISO format for all holdings
        holdings_list = [convert_datetime(holding) for holding in holdings_list]

        return JSONResponse(content={"message": "Holdings fetched successfully", "orders": holdings_list})
    except Exception as e:
        return JSONResponse(content={"message": "An error occurred while fetching holdings", "error": str(e)}, status_code=500)


# Helper function to convert MongoDB results to JSON serializable format
def convert_objectid(data):
    if isinstance(data, list):
        return [convert_objectid(item) for item in data]
    elif isinstance(data, dict):
        return {key: convert_objectid(value) for key, value in data.items()}
    elif isinstance(data, ObjectId):
        return str(data)
    else:
        return data

@app.post("/report-upload/")
async def upload_report(get_lists: List[ReportList]):
    # Convert Pydantic model instances to dictionaries and add a timestamp
    upload_data = [
        {**report.dict(), "datetime": datetime.now()}
        for report in get_lists
    ]
    # Insert into MongoDB
    reportUpload_collection.insert_many(upload_data)

    # Aggregation pipeline
    pipeline = [
        {
            "$group": {
                "_id": {"symbol": "$symbol", "qty": "$qty", "avg_price": "$avg_price"},
                "duplicateIds": {"$push": "$_id"},
                "count": {"$sum": 1}
            }
        },
        {
            "$match": {
                "count": {"$gt": 1}
            }
        }
    ]

    # Run the aggregation pipeline
    duplicates = list(reportUpload_collection.aggregate(pipeline))

    # Loop through duplicates and delete extra documents
    for doc in duplicates:
        duplicate_ids = doc["duplicateIds"]
        duplicate_ids.pop(0)  # Keep one document
        reportUpload_collection.delete_many({"_id": {"$in": duplicate_ids}})

    return JSONResponse(content={"message": "Data Inserted successfully..."})

class ReportType(BaseModel):
    start_date: str = Field(..., pattern="^\d{4}-\d{2}-\d{2}$")
    end_date: str = Field(..., pattern="^\d{4}-\d{2}-\d{2}$")
    reqType: int = Field(..., ge=0, le=4)

class PriceData(TypedDict):
    date: datetime
    price_multiplied: float

class SymbolReport(TypedDict):
    type: str
    symbol: str
    qty: Union[float, str]
    avg_price: Union[float, str]

def convert_objectid(data: Union[Dict, List]) -> Union[Dict, List]:
    """Convert MongoDB ObjectId to string in nested structures."""
    if isinstance(data, dict):
        return {k: str(v) if isinstance(v, ObjectId) else convert_objectid(v)
                for k, v in data.items()}
    elif isinstance(data, list):
        return [convert_objectid(item) for item in data]
    return data


@app.post("/get-report/")
async def get_report(req: ReportType) -> JSONResponse:
    try:
        # Validate and convert dates
        start_dt = datetime.strptime(req.start_date, "%Y-%m-%d")
        end_dt = datetime.strptime(req.end_date, "%Y-%m-%d")

        if start_dt > end_dt:
            raise HTTPException(status_code=400, detail="start_date must be before or equal to end_date")

        # Define the types and their mappings
        report_type_map = {
            1: "open",
            2: "high",
            3: "low",
            4: "close"
        }

        # Initialize results dictionary
        results_by_type = {"summary_report": {}}

        # Check if reqType is 0, meaning we should return all types
        if req.reqType == 0:
            # Loop over each type and calculate the report for each
            for req_type, report_type in report_type_map.items():
                symbols_data = list(reportUpload_collection.find())
                if not symbols_data:
                    raise HTTPException(status_code=404, detail="No data found in reportUpload collection")

                all_symbols = []
                total_prices = {}

                # Process each symbol for the given report type
                for symbol_info in symbols_data:
                    pipeline = [
                        {"$match": {"symbol": symbol_info["symbol"]}},
                        {"$project": {
                            "prices": {
                                "$filter": {
                                    "input": {
                                        "$map": {
                                            "input": {"$objectToArray": "$prices"},
                                            "as": "priceData",
                                            "in": {
                                                "date": {"$toDate": "$$priceData.k"},
                                                "price_multiplied": {
                                                    "$multiply": [
                                                        f"$$priceData.v.{report_type}",
                                                        symbol_info["qty"]
                                                    ]
                                                }
                                            }
                                        }
                                    },
                                    "as": "filteredPrice",
                                    "cond": {
                                        "$and": [
                                            {"$gte": ["$$filteredPrice.date", start_dt]},
                                            {"$lte": ["$$filteredPrice.date", end_dt]}
                                        ]
                                    }
                                }
                            }
                        }}
                    ]

                    results = list(nseData_collection.aggregate(pipeline))

                    symbol_data = {
                        "type": report_type,
                        "symbol": symbol_info["symbol"],
                        "qty": symbol_info["qty"],
                        "avg_price": symbol_info["avg_price"]
                    }

                    # Process price data for each date
                    for result in results:
                        for price_data in result.get("prices", []):
                            date_str = price_data["date"].strftime("%Y-%m-%d")
                            price = price_data["price_multiplied"]

                            symbol_data[date_str] = price
                            total_prices[date_str] = total_prices.get(date_str, 0) + price

                    all_symbols.append(symbol_data)

                # Add summary entries
                sorted_dates = sorted(total_prices.keys())

                # Total entry
                total_entry = {
                    "type": report_type,
                    "symbol": "TOTAL",
                    "qty": "",
                    "avg_price": "",
                    **{date: total_prices[date] for date in sorted_dates}
                }
                all_symbols.append(total_entry)

                # Cumulative change entry
                first_total = total_prices[sorted_dates[0]] if sorted_dates else 0
                change_entry = {
                    "type": report_type,
                    "symbol": f"% TOTAL CHANGE in {report_type}",
                    "qty": "",
                    "avg_price": "",
                    **{
                        date: "-" if i == 0 else
                        round(((total_prices[date] - first_total) / first_total * 100), 2)
                        for i, date in enumerate(sorted_dates)
                    }
                }
                all_symbols.append(change_entry)

                # Day-over-day change entry
                day_changes = {
                    date: total_prices[date] - total_prices[sorted_dates[i-1]]
                    if i > 0 else "-"
                    for i, date in enumerate(sorted_dates)
                }
                day_change_entry = {
                    "type": report_type,
                    "symbol": f"DAY CHANGE in {report_type}",
                    "qty": "",
                    "avg_price": "",
                    **day_changes
                }
                all_symbols.append(day_change_entry)

                # Day-over-day percentage change entry
                day_change_percent_entry = {
                    "type": report_type,
                    "symbol": f"DAY CHANGE % in {report_type}",
                    "qty": "",
                    "avg_price": "",
                    **{
                        date: "-" if i == 0 else
                        round((day_changes[date] / total_prices[sorted_dates[i-1]] * 100), 2)
                        for i, date in enumerate(sorted_dates)
                    }
                }
                all_symbols.append(day_change_percent_entry)

                # Store the data for this type in the final results dictionary
                results_by_type[report_type] = convert_objectid(all_symbols)
                results_by_type["summary_report"][report_type] = total_entry

            # -----------------------------VALUE CALCULATIONS-------------------------------- #
            high_minus_open = { "symbol": "High - Open" }
            for date in results_by_type["summary_report"]["high"]:
                if date in results_by_type["summary_report"]["open"]:
                    high_value = results_by_type["summary_report"]["high"][date]
                    open_value = results_by_type["summary_report"]["open"][date]
                    # print(high_value, open_value, type(high_value), type(open_value))
                    if type(high_value) != str:
                        high_minus_open[date] = high_value - open_value
            results_by_type["summary_report"]["high_minus_open"] = high_minus_open

            open_minus_low = { "symbol": "Open - Low" }
            for date in results_by_type["summary_report"]["open"]:
                if date in results_by_type["summary_report"]["low"]:
                    open_value = results_by_type["summary_report"]["open"][date]
                    low_value = results_by_type["summary_report"]["low"][date]
                    if type(open_value) != str:
                        open_minus_low[date] = open_value - low_value
            results_by_type["summary_report"]["open_minus_low"] = open_minus_low

            high_minus_low = { "symbol": "High - Low" }
            for date in results_by_type["summary_report"]["high"]:
                if date in results_by_type["summary_report"]["low"]:
                    high_value = results_by_type["summary_report"]["high"][date]
                    low_value = results_by_type["summary_report"]["low"][date]
                    if type(high_value) != str:
                        high_minus_low[date] = high_value - low_value
            results_by_type["summary_report"]["high_minus_low"] = high_minus_low

            open_minus_close = { "symbol": "Open - Close" }
            for date in results_by_type["summary_report"]["open"]:
                if date in results_by_type["summary_report"]["close"]:
                    open_value = results_by_type["summary_report"]["open"][date]
                    close_value = results_by_type["summary_report"]["close"][date]
                    if type(open_value) != str:
                        open_minus_close[date] = open_value - close_value
            results_by_type["summary_report"]["open_minus_close"] = open_minus_close

            # -----------------------------PERCENTAGE CALCULATIONS-------------------------------- #

            high_minus_open_pr = { "symbol": "High - Open" }
            for date in results_by_type["summary_report"]["high"]:
                if date in results_by_type["summary_report"]["open"]:
                    high_value = results_by_type["summary_report"]["high"][date]
                    open_value = results_by_type["summary_report"]["open"][date]
                    # print(high_value, open_value, type(high_value), type(open_value))
                    if type(high_value) != str:
                        high_minus_open_pr[date] = ((high_value - open_value) / open_value) * 100
            results_by_type["summary_report"]["high_minus_open_pr"] = high_minus_open_pr

            open_minus_low_pr = { "symbol": "Open - Low" }
            for date in results_by_type["summary_report"]["open"]:
                if date in results_by_type["summary_report"]["low"]:
                    open_value = results_by_type["summary_report"]["open"][date]
                    low_value = results_by_type["summary_report"]["low"][date]
                    if type(open_value) != str:
                        open_minus_low_pr[date] = ((open_value - low_value) / open_value) * 100
            results_by_type["summary_report"]["open_minus_low_pr"] = open_minus_low_pr

            high_minus_low_pr = { "symbol": "High - Low" }
            for date in results_by_type["summary_report"]["high"]:
                if date in results_by_type["summary_report"]["low"]:
                    high_value = results_by_type["summary_report"]["high"][date]
                    low_value = results_by_type["summary_report"]["low"][date]
                    if type(high_value) != str:
                        high_minus_low_pr[date] = ((high_value - low_value) / high_value) * 100
            results_by_type["summary_report"]["high_minus_low_pr"] = high_minus_low_pr

            open_minus_close_pr = { "symbol": "Open - Close" }
            for date in results_by_type["summary_report"]["open"]:
                if date in results_by_type["summary_report"]["close"]:
                    open_value = results_by_type["summary_report"]["open"][date]
                    close_value = results_by_type["summary_report"]["close"][date]
                    if type(open_value) != str:
                        open_minus_close_pr[date] = ((open_value - close_value) / open_value) * 100
            results_by_type["summary_report"]["open_minus_close_pr"] = open_minus_close_pr

            # Return data for all types in a single response
            return JSONResponse(content=results_by_type)

        else:
            # Handle individual report types as before
            report_type = report_type_map.get(req.reqType)
            if not report_type:
                raise HTTPException(status_code=400,
                                    detail="Invalid reqType. Must be between 1 and 4")

            symbols_data = list(reportUpload_collection.find())
            if not symbols_data:
                raise HTTPException(status_code=404,
                                    detail="No data found in reportUpload collection")

            all_symbols = []
            total_prices = {}

            # Process each symbol for the specified report type
            for symbol_info in symbols_data:
                pipeline = [
                    {
                        "$match": {"symbol": symbol_info["symbol"]}
                    },
                    {
                        "$project": {
                            "prices": {
                                "$filter": {
                                    "input": {
                                        "$map": {
                                            "input": {"$objectToArray": "$prices"},
                                            "as": "priceData",
                                            "in": {
                                                "date": {"$toDate": "$$priceData.k"},
                                                "price_multiplied": {
                                                    "$multiply": [
                                                        f"$$priceData.v.{report_type}",
                                                        symbol_info["qty"]
                                                    ]
                                                }
                                            }
                                        }
                                    },
                                    "as": "filteredPrice",
                                    "cond": {
                                        "$and": [
                                            {"$gte": ["$$filteredPrice.date", start_dt]},
                                            {"$lte": ["$$filteredPrice.date", end_dt]}
                                        ]
                                    }
                                }
                            }
                        }
                    }
                ]

                results = list(nseData_collection.aggregate(pipeline))

                symbol_data = {
                    "type": report_type,
                    "symbol": symbol_info["symbol"],
                    "qty": symbol_info["qty"],
                    "avg_price": symbol_info["avg_price"]
                }

                # Process price data for each date
                for result in results:
                    for price_data in result.get("prices", []):
                        date_str = price_data["date"].strftime("%Y-%m-%d")
                        price = price_data["price_multiplied"]

                        symbol_data[date_str] = price
                        total_prices[date_str] = total_prices.get(date_str, 0) + price

                all_symbols.append(symbol_data)

            # Add summary entries
            sorted_dates = sorted(total_prices.keys())

            # Add total entry
            total_entry = {
                "type": report_type,
                "symbol": "TOTAL",
                "qty": "",
                "avg_price": "",
                **{date: total_prices[date] for date in sorted_dates}
            }
            all_symbols.append(total_entry)

            # Add cumulative change entry
            first_total = total_prices[sorted_dates[0]] if sorted_dates else 0
            change_entry = {
                "type": report_type,
                "symbol": f"% TOTAL CHANGE in {report_type}",
                "qty": "",
                "avg_price": "",
                **{
                    date: "-" if i == 0 else
                    round(((total_prices[date] - first_total) / first_total * 100), 2)
                    for i, date in enumerate(sorted_dates)
                }
            }
            all_symbols.append(change_entry)

            # Add day-over-day change entry
            day_changes = {
                date: total_prices[date] - total_prices[sorted_dates[i-1]]
                if i > 0 else "-"
                for i, date in enumerate(sorted_dates)
            }

            day_change_entry = {
                "type": report_type,
                "symbol": f"DAY CHANGE in {report_type}",
                "qty": "",
                "avg_price": "",
                **day_changes
            }
            all_symbols.append(day_change_entry)

            # Add day-over-day percentage change entry
            day_change_percent_entry = {
                "type": report_type,
                "symbol": f"DAY CHANGE % in {report_type}",
                "qty": "",
                "avg_price": "",
                **{
                    date: "-" if i == 0 else
                    round((day_changes[date] / total_prices[sorted_dates[i-1]] * 100), 2)
                    for i, date in enumerate(sorted_dates)
                }
            }

            all_symbols.append(day_change_percent_entry)

            # Return data in the regular format for the specified report type
            return JSONResponse(content=convert_objectid(all_symbols))

    except ValueError as e:
        raise HTTPException(status_code=400,
                            detail=f"Invalid date format: {str(e)}")
    except Exception as e:
        raise HTTPException(status_code=500,
                            detail=f"An error occurred: {str(e)}")
