Skip to content

Instantly share code, notes, and snippets.

@shekarsiri
Forked from spkprav/minute_notifier.py
Created November 6, 2022 17:37
Show Gist options
  • Save shekarsiri/46a2c0ad4dacbf3537a14743bd490c27 to your computer and use it in GitHub Desktop.
Save shekarsiri/46a2c0ad4dacbf3537a14743bd490c27 to your computer and use it in GitHub Desktop.
This is one of the strategies that I am using to catch a trade at the earliest, yet to work on automating the trade
# This script will fetch the notifier data from unstructured CSV data
# and convert that into structured and store the same into MongoDB
# This converted structured data is used to visualize in charts
import pdb
import os
import csv
import pandas as pd
import numpy as np
from datetime import datetime
import pymongo
from pymongo import MongoClient
from dateutil import parser
import time
print("Updating...")
client = pymongo.MongoClient('mongo_creds_goes_here')
# Check volume notification
def volNotfyCheck(group):
# pdb.set_trace()
oldData = group[1].values[0]
arr = []
for newData in group[1].values:
if ((newData - oldData) >= 4000):
arr.append(newData - oldData)
oldData = newData
return sum(arr)
PRICE, VOLUME, DATE = 0, 2, 3
# Today date
currentDate = datetime.today().strftime('%d-%m-%Y')
filePath = r'data/unstructured/live/bank_nifty/live_full-' + \
currentDate + '.csv' # File path
dataLength = 0
lastMinute = 0
while (os.path.exists(filePath)): #Check file exist
while True:
# Read file
csvRead = pd.read_csv(filePath, sep=',', header=None,skip_blank_lines=True,
usecols=[PRICE, VOLUME, DATE])
csv_len = len(csvRead)
if (dataLength < csv_len):
dataLength = csv_len
lastMinute = datetime.strptime(
str(csvRead.values[-1][2]), '%Y-%m-%d %H:%M:%S').strftime('%d-%m-%Y %H:%M')
csvData = []
for data in csvRead.values:
csvDate = datetime.strptime(
str(data[2]), '%Y-%m-%d %H:%M:%S').strftime('%d-%m-%Y')
# fetch only current date data
if (csvDate == currentDate):
data[2] = datetime.strptime(
str(data[2]), '%Y-%m-%d %H:%M:%S').strftime('%d-%m-%Y %H:%M')
csvData.append(data)
df = pd.DataFrame(csvData)
# Grouping by Minutes
groupedData = df.groupby(2)
for gtime, group in groupedData:
# fetch open(O),high(H),low(L),close(C),volume(V)
O = group[0].values[0]
H = max(group[0])
L = min(group[0])
C = group[0].values[-1]
V = int(group[1].values[-1] - group[1].values[0])
volNotfyRes = volNotfyCheck(group) # Check volume notification
query = { 'created_at': datetime.strptime(gtime, '%d-%m-%Y %H:%M') }
update = {
"item": "BNF_minute_data",
"open": O,
"high": H,
"low": L,
"close": C,
"volume": V,
"created_at": datetime.strptime(gtime, '%d-%m-%Y %H:%M'),
"volume_notified": int(volNotfyRes)
}
# inserting into mongodb
db = client.brain
db.bnf_minute.update(query, update, upsert=True)
# print("Updated: ", gtime)
time.sleep(10)
# This script is a WebSocket client connects to a stock broker's server
# using their library KiteConnect.
# I have added few rules to send notifications to the Slack channel
# when it satisfies. The rest of the part is managed manually at the moment
# and working on the automation part yet. The data is stored in MongoDB to
# visualize the data in charts.
import logging
import json
import datetime
from time import gmtime, strftime
import pymongo
from pymongo import MongoClient
import pdb;
import pathlib
import csv
import redis_tokens
from dateutil.relativedelta import relativedelta, FR, TH
import os
from slackclient import SlackClient
from kiteconnect import KiteConnect
from kiteconnect import KiteTicker
######################
pid = str(os.getpid())
pidfile = "/tmp/notifier.pid"
currentFile = open(pidfile, 'w')
currentFile.write(pid)
currentFile.close()
######################
logging.basicConfig(level=logging.DEBUG)
# Initialise
pathlib.Path('data/unstructured/live/bank_nifty').mkdir(parents=True, exist_ok=True)
def myconverter(o):
if isinstance(o, datetime.datetime):
return o.__str__()
kws = KiteTicker("username_goes_here", redis_tokens.user_token)
sc = SlackClient("token_goes_here")
client = pymongo.MongoClient("mongo_creds_goes_here")
old_vol = 0
prev_price = 0
prev_buy_qty = 0
prev_sell_qty = 0
def notify(ltt, last_price, buy_qty, sell_qty, new_vol=1000):
global old_vol
global prev_price
global prev_buy_qty
global prev_sell_qty
if datetime.datetime(2013, 1, 31, 9, 15).time() == ltt.time():
old_vol = 0
prev_price = 0
prev_buy_qty = 0
prev_sell_qty = 0
# logging.debug(old_vol)
vall = new_vol - old_vol
logging.debug(vall)
market_buy_or_sell = "None"
# market_buy_or_sell logic goes here
if vall >= 4000:
db = client.brain
db.bnf_vol.insert(
{
"item" : "vol_change",
"stock": "BNF OCT FUT",
"vol_change" : vall,
"prev_price" : prev_price,
"last_price" : last_price,
"prev_buy_qty": prev_buy_qty,
"buy_qty":buy_qty,
"prev_sell_qty":prev_sell_qty,
"sell_qty":sell_qty,
"created_at": datetime.datetime.now()
}
)
print('notifying...')
sc.api_call(
"chat.postMessage",
channel="channel_id",
username="Future",
text="Volume change(BNF OCT FUT) - {} | Price: ({} to {}) | Buys: ({} to {}) | Sells: ({} to {})".format(vall, prev_price, last_price, prev_buy_qty, buy_qty, prev_sell_qty, sell_qty)
) # raise alarm, there's a new huge volume in market, let's check if we can follow their path to profit
old_vol = new_vol
prev_price = last_price
prev_buy_qty = buy_qty
prev_sell_qty = sell_qty
def on_ticks(ws, ticks):
try:
# Callback to receive ticks.
notify(ticks[0]['last_trade_time'], ticks[0]['last_price'], ticks[0]['buy_quantity'], ticks[0]['sell_quantity'], ticks[0]['volume'])
md_buy_arr = []
md_buy = ticks[0]['depth']['buy']
for tick in md_buy:
data = [tick['quantity'], tick['price'], tick['orders']]
md_buy_arr.append(data)
md_sell_arr = []
md_sell = ticks[0]['depth']['sell']
for tick in md_sell:
data = [tick['quantity'], tick['price'], tick['orders']]
md_sell_arr.append(data)
row = [ticks[0]['last_price'], ticks[0]['last_quantity'], ticks[0]['volume'],myconverter(ticks[0]['last_trade_time']),
ticks[0]['average_price'],ticks[0]['buy_quantity'], ticks[0]['sell_quantity'],md_buy_arr,md_sell_arr]
with open('data/unstructured/live/bank_nifty/live_full-' + "{}".format(datetime.datetime.today().strftime('%d-%m-%Y')) + '.csv', 'a') as csvFile:
writer = csv.writer(csvFile)
writer.writerow(row)
csvFile.close()
except:
print("something wrong")
# pdb.set_trace()
# logging.debug("Ticks: {}".format(ticks))
def on_connect(ws, response):
# Callback on successful connect.
# Subscribe to a list of instrument_tokens (RELIANCE and ACC here).
ws.subscribe([14627842])
# Set RELIANCE to tick in `full` mode.
ws.set_mode(ws.MODE_FULL, [14627842])
def on_close(ws, code, reason):
# On connection close stop the main loop
# Reconnection will not happen after executing `ws.stop()`
ws.stop()
# Assign the callbacks.
kws.on_ticks = on_ticks
kws.on_connect = on_connect
kws.on_close = on_close
# Infinite loop on the main thread. Nothing after this will run.
# You have to use the pre-defined callbacks to manage subscriptions.
kws.connect()
# Every morning at 8:30 AM a cron runs in my private server.
# The cron calls a script which makes login to a stock broker and
# gets the latest token and stores it in Redis
# This script will get the token, which will be later used in notifier.py
import redis
import json
redis_db = redis.StrictRedis() # redis creds
user_key = redis_db.get('user')
datastore_user = json.loads(user_key)
user_token = datastore_user['token']
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment