File size: 6,125 Bytes
44dd150
 
 
 
 
ff0b205
44dd150
 
ad9b9a4
 
44dd150
ff0b205
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
44dd150
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ff0b205
44dd150
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ff0b205
44dd150
 
 
 
 
ff0b205
 
 
 
44dd150
 
 
 
 
 
 
 
 
 
 
 
 
 
e907bcd
44dd150
 
 
ad9b9a4
871806c
f4d3335
44dd150
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
import pandas as pd
from datetime import datetime, timedelta
from pymongo import MongoClient
import pytz
import os 
import requests

mongo_url = os.environ['MongoURL']
df_logo = pd.read_csv('https://raw.githubusercontent.com/jarvisx17/nifty500/main/Stocks.csv')
df_logo = df_logo[['Symbol','Industry', "logo", "FNO"]]
tz = pytz.timezone('Asia/Kolkata')

base_url = "https://groww.in/v1/api/charting_service/v3/chart/exchange/NSE/segment/CASH/"
indian_timezone = pytz.timezone('Asia/Kolkata')
utc_timezone = pytz.timezone('UTC')
headers = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36 Edg/119.0.0.0'
}

def get_time_range(days=7):
    current_time = datetime.now(indian_timezone)
    start_time = current_time - timedelta(days=days)
    start_time_utc = start_time.astimezone(pytz.utc)
    current_time_utc = current_time.astimezone(pytz.utc)
    start_time_millis = int(start_time_utc.timestamp() * 1000)
    end_time_millis = int(current_time_utc.timestamp() * 1000)
    return start_time_millis, end_time_millis

def fetch_stock_data(symbol, interval=15, days=7):
    start_time, end_time = get_time_range(days)
    params = {
        'endTimeInMillis': end_time,
        'intervalInMinutes': interval,
        'startTimeInMillis': start_time,
    }
    try:
        print("Downloading data of", symbol.upper())
        response = requests.get(base_url + symbol.upper(), params=params, headers=headers)
        response.raise_for_status()
        data = response.json()
        columns = ['Date', 'Open', 'High', 'Low', 'Close', 'Volume']
        for row in data['candles']:
            row[0] = datetime.utcfromtimestamp(row[0])
        df = pd.DataFrame(data['candles'], columns=columns)
        df['Date'] = pd.to_datetime(df['Date'])
        df['Date'] = df['Date'].dt.tz_localize(utc_timezone).dt.tz_convert(indian_timezone)
        return df
    except requests.exceptions.RequestException as e:
        print(f"Error during API request: {e}")
        return None

def UpdatedCollectionName():
  current_time = datetime.now(tz)
  collection_name = current_time.strftime('%Y-%m-%d')
  if current_time.time() >= datetime.strptime('15:30', '%H:%M').time():
      collection_name = (current_time + timedelta(days=1)).strftime('%Y-%m-%d')
      return collection_name
  else:
    return collection_name

def get_rsi(close, lookback=14):
    ret = close.diff()
    up = []
    down = []
    for i in range(len(ret)):
        if ret.iloc[i] < 0:
            up.append(0)
            down.append(ret.iloc[i])
        else:
            up.append(ret.iloc[i])
            down.append(0)
    up_series = pd.Series(up, index=close.index)
    down_series = pd.Series(down, index=close.index).abs()
    up_ewm = up_series.ewm(com=lookback - 1, adjust=False).mean()
    down_ewm = down_series.ewm(com=lookback - 1, adjust=False).mean()
    rs = up_ewm / down_ewm
    rsi = 100 - (100 / (1 + rs))
    return rsi

def Stocks():
    # end_date = datetime.today()
    utc_now = datetime.utcnow()
    indian_timezone = pytz.timezone('Asia/Kolkata')
    indian_now = utc_now.replace(tzinfo=pytz.utc).astimezone(indian_timezone)
    end_date = utc_now
    end_date = end_date.replace(tzinfo=pytz.utc).astimezone(indian_timezone)
    end_date = end_date.replace(tzinfo=pytz.utc).astimezone(indian_timezone)

    start_date = end_date - timedelta(days=365)
    nifty500_symbols = []
    nifty500 = pd.read_csv('https://archives.nseindia.com/content/indices/ind_nifty500list.csv')
    for symbol in nifty500.Symbol:
        symbol = f'{symbol}.NS'
        nifty500_symbols.append(symbol)
        
    nifty500_data = pd.DataFrame()
    print("Downloading data...")
    for symbol in nifty500_symbols:
        try:
            stock_data = fetch_stock_data(symbol.replace('.NS',''), interval=1440, days=365)
            stock_data['Symbol'] = symbol 
            nifty500_data = pd.concat([nifty500_data, stock_data], axis=0)
        except Exception as e:
            print(f"Error fetching data for {symbol}: {e}")

    nifty500_data['RSI'] = (
        nifty500_data.groupby('Symbol')['Close']
        .transform(lambda x: get_rsi(x, lookback=14))
    )
    nifty500_data['SMA20'] = nifty500_data.groupby('Symbol')['Close'].transform(lambda x: x.rolling(window=20).mean())
    nifty500_data['PercentageChange'] = nifty500_data.groupby('Symbol')['Close'].pct_change() * 100
    nifty500_data_last_2_rows = nifty500_data.groupby('Symbol').tail(2)
    nifty500_data_last_2_rows.reset_index(drop=True, inplace=True)
    nifty500_data_last_2_rows['Prev_RSI'] = nifty500_data_last_2_rows.groupby('Symbol')['RSI'].shift(1)

    filtered_data_by_stock = []
    for symbol, group in nifty500_data_last_2_rows.groupby('Symbol'):
        filtered_stock_data = group[(group['RSI'] >= 60) & (group['Prev_RSI'] < 60)]
        if not filtered_stock_data.empty:
            filtered_data_by_stock.append(filtered_stock_data)
            
    filtered_data = pd.concat(filtered_data_by_stock)
    filtered_data.reset_index(drop=True, inplace=True)
    filtered_data[['Open', 'High','Low', 'Close', 'RSI', 'Prev_RSI','SMA20', 'PercentageChange']] = filtered_data[['Open', 'High','Low', 'Close', 'RSI', 'Prev_RSI', 'SMA20', 'PercentageChange']].round(2)
    filtered_data = filtered_data.sort_values(by='PercentageChange', ascending=False)
    filtered_data.reset_index(drop=True, inplace=True)
    filtered_data = pd.merge(filtered_data, df_logo, on='Symbol', how='inner') 
    filtered_data = filtered_data[['Symbol', 'Date', 'Open', 'High', 'Low', 'Close', 'RSI', 'Prev_RSI','PercentageChange','Industry','FNO', "logo"]]
    filtered_data['Industry'] = filtered_data['FNO'].apply(lambda x: 'F&O + Equity' if x == 'Yes' else 'Equity Only')
    print(filtered_data['Industry'])
    client = MongoClient(mongo_url)
    db = client['mydatabase']
    collection_name = UpdatedCollectionName()
    collection = db[collection_name]
    data_dict = filtered_data.to_dict(orient='records')
    collection.insert_many(data_dict) 
    return filtered_data.to_dict(orient="records")