Skip to content

Instantly share code, notes, and snippets.

@IFOWatch
Created March 9, 2021 09:02
Show Gist options
  • Save IFOWatch/f000feb603461286754881bdd3c27271 to your computer and use it in GitHub Desktop.
Save IFOWatch/f000feb603461286754881bdd3c27271 to your computer and use it in GitHub Desktop.
import datetime
import requests
import time
from collections import defaultdict, OrderedDict
import pandas as pd
from bokeh.plotting import figure, show
from bokeh.layouts import row
from bokeh.transform import cumsum
from math import pi
theme = (
'#23de7b','#00b8ff', '#4900ff', '#9600ff', # 4 colors for normies
'#ff00c1', '#990026', # 2 colors for medium-small whales
'#999','#888','#777','#666','#555','#444','#333','#222','#111','#000' # 10 colors for top 10 addys
)
class HashableTxDict(dict):
""" allows us to convert list of txs into a set """
def __hash__(self):
return hash(self['hash'])
def human_token_value(self):
return int(float(self['value']) / (10 ** int(self['tokenDecimal'])))
def get_transactions_page(contract, address, start_block=0, request_size=10000):
your_api_key = "YourApiKeyToken"
base_url = "https://api.bscscan.com/api?module=account&action=tokentx"
page_url = f"&contractaddress={contract}&address={address}&page=1&offset={request_size}&sort=asc&apikey={your_api_key}&startblock={start_block}"
result = requests.get(base_url + page_url).json()
data = result['result']
return data
def get_all_transactions(contract, address, request_size=10000):
transactions = get_transactions_page(contract, address, 0, request_size)
last_block = int(transactions[-1]['blockNumber'])
finished = False
while not finished:
next_txs = get_transactions_page(contract, address, last_block, request_size)
last_block = int(next_txs[-1]['blockNumber'])
finished = len(next_txs) != request_size
transactions.extend(next_txs)
time.sleep(.4)
# at this point, some transactions are duplicated since we are checking some blocks twice
# convert to a set (and back to a list) to remove duplicates
wrapped_transactions = [HashableTxDict(tx) for tx in transactions]
return sorted(list(set(wrapped_transactions)), key=lambda x: x['timeStamp'])
def transactions_to_fixed_dict(transactions, min_num_watch_to_ungroup=100000, windows=None, token_or_transaction='token'):
if windows is None:
windows = [min_num_watch_to_ungroup/1000, min_num_watch_to_ungroup/100, min_num_watch_to_ungroup/20, min_num_watch_to_ungroup/10, min_num_watch_to_ungroup/5, min_num_watch_to_ungroup]
windows = [int(i) for i in windows]
window_value_dict = defaultdict(int)
for idx, val in enumerate(windows):
max_v = val
min_v = 0
if idx > 0:
min_v = windows[idx - 1]
key = f"{min_v} to {max_v} watch collected"
window_value_dict[key] = (min_v, max_v)
result = OrderedDict({key: 0 for key in window_value_dict.keys()})
for tx in transactions:
tx_val = tx.human_token_value()
added_val = tx_val if token_or_transaction == 'token' else 1
if tx_val > min_num_watch_to_ungroup:
if tx['to'] not in result.keys():
result[tx['to']] = 0
result[tx['to']] += added_val
else:
for key, tup in window_value_dict.items():
if tup[0] < tx_val < tup[1]:
result[key] += added_val
break
return result
def make_pie(x, now, title, unit):
data = pd.Series(x).reset_index(name='value').rename(columns={'index': unit})
data['angle'] = data['value'] / data['value'].sum() * 2 * pi
data['as_pct'] = data['value'] / data['value'].sum()
data['color'] = theme
p = figure(plot_height=700, plot_width=900, title=title, toolbar_location=None,
tools="hover", tooltips="@{}: @value {} (@as_pct{{0.00%}})".format(unit, unit), x_range=(-0.5, 1.0))
p.wedge(x=0, y=1, radius=0.4,
start_angle=cumsum('angle', include_zero=True), end_angle=cumsum('angle'),
line_color="white", fill_color='color', legend_field=unit, source=data)
p.axis.axis_label = None
p.axis.visible = False
p.grid.grid_line_color = None
return p
def print_share(watch_token_dists, max_n_watch_collected):
less_than_n_tx_count = sum(1 for amt in watch_token_dists if amt < max_n_watch_collected)
less_than_n_tx_count_as_percent = 100.0 * less_than_n_tx_count / float(len(all_ifo_collection_transactions))
less_than_n_watch_total = sum(amt for amt in watch_token_dists if amt < max_n_watch_collected)
less_than_n_watch_total_as_percent = 100.0 * less_than_n_watch_total / float(8000000)
print(f"Transactions of less than {max_n_watch_collected} watch accounted for {less_than_n_tx_count_as_percent:.2f}% of transactions ({less_than_n_tx_count} / {len(all_ifo_collection_transactions)})")
print(f"Transactions of less than {max_n_watch_collected} watch accounted for {less_than_n_watch_total_as_percent:.2f}% of watch tokens distributed ({less_than_n_watch_total} / 8000000)")
print("--------------------------")
def identify_funding_goal_participant_index(watch_tokens_distributed, funding_goal_dollars):
ifo_funds_raised = 0
contributor = len(watch_tokens_distributed)
for idx, val in enumerate(watch_tokens_distributed):
user_pledged = val * 0.10 * 711.0
ifo_funds_raised += user_pledged
if ifo_funds_raised >= funding_goal_dollars:
contributor_s = f"Found funding limit on contributor {idx},"
pledge_s = f"who pledged ${user_pledged:,.02f}, and collected {val} $WATCH."
raised_s = f"We have raised {ifo_funds_raised} so far."
print(contributor_s, pledge_s, raised_s)
contributor = idx
break
return contributor
def simulate_with_individual_contributor_limit(watch_token_dists, individual_limit=10000):
ifo_funds_raised = 0
users_limited = 0
for idx, val in enumerate(watch_token_dists):
user_pledged_dollars = val * 0.10 * 711.0
if user_pledged_dollars > individual_limit:
user_pledged_dollars = individual_limit
users_limited += 1
ifo_funds_raised += user_pledged_dollars
funding_goal_factor = ifo_funds_raised / 800000
limit_s = f"With an individual limit of ${individual_limit:,}, {users_limited} were limited,"
goal_s = f"and total funds raised: ${ifo_funds_raised:,.02f} ({funding_goal_factor:,.02f}x goal)"
print(limit_s, goal_s)
if __name__ == "__main__":
yieldwatch_contract = "0x7a9f28eb62c791422aa23ceae1da9c847cbec9b0"
ifo_address = "0x55344b55c71ad8834c397e6e08df5195cf84fe6d"
now = datetime.datetime.now()
print(f"Data retrieved: {now}")
print("-------------------------------------------")
all_ifo_collection_transactions = get_all_transactions(yieldwatch_contract, ifo_address)
# remove the first transaction, which is populating the contract with initial 8M tokens
print(f"remove initial 8m token population from data set: {all_ifo_collection_transactions.pop(0).human_token_value()}")
print("--------------------------")
all_ifo_collection_transactions = [i for i in sorted(all_ifo_collection_transactions, key=lambda x: x.human_token_value())]
watch_token_dists = [tx.human_token_value() for tx in all_ifo_collection_transactions]
print_share(watch_token_dists, 1000)
print_share(watch_token_dists, 10000)
print_share(watch_token_dists, 100000)
print_share(watch_token_dists, 200000)
print_share(watch_token_dists, 300000)
tx_pie_dict = transactions_to_fixed_dict(all_ifo_collection_transactions, 100000, token_or_transaction='tx')
tx_pie = make_pie(tx_pie_dict, now, f"$WATCH Token Distribution as share of all transactions ({now})", "transactions")
token_pie_dict = transactions_to_fixed_dict(all_ifo_collection_transactions, 100000)
token_pie = make_pie(token_pie_dict, now, f"$WATCH Token Distribution as share of all tokens ({now})", "watch")
identify_funding_goal_participant_index(watch_token_dists, 800000)
simulate_with_individual_contributor_limit(watch_token_dists, 10000)
simulate_with_individual_contributor_limit(watch_token_dists, 20000)
simulate_with_individual_contributor_limit(watch_token_dists, 50000)
simulate_with_individual_contributor_limit(watch_token_dists, 100000)
show(row(token_pie, tx_pie))
bokeh==2.3.0
certifi==2020.12.5
chardet==4.0.0
idna==2.10
Jinja2==2.11.3
MarkupSafe==1.1.1
numpy==1.20.1
packaging==20.9
pandas==1.2.3
Pillow==8.1.2
pyparsing==2.4.7
python-dateutil==2.8.1
pytz==2021.1
PyYAML==5.4.1
requests==2.25.1
six==1.15.0
tornado==6.1
typing-extensions==3.7.4.3
urllib3==1.26.3
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment