Skip to content

Instantly share code, notes, and snippets.

@dgnsrekt
Last active October 17, 2020 13:40
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save dgnsrekt/c6ac5a5a295cc734a44b0ca9ce87b0f3 to your computer and use it in GitHub Desktop.
Save dgnsrekt/c6ac5a5a295cc734a44b0ca9ce87b0f3 to your computer and use it in GitHub Desktop.
cashtaglist = []
with NitterScraper(port=8008) as nitter:
for tweet in nitter.get_tweets("eWhispers", pages=1):
if tweet.is_pinned:
continue
if tweet.is_retweet:
continue
if tweet.entries.cashtags:
cashtaglist += tweet.entries.cashtags
print(".", end="", flush=True) # Simple progress bar.
print() # End progress bar with newline.
cashtaglist = sorted(set(map(lambda cashtag: cashtag.replace("$", "").strip(), cashtaglist)))
options_watchlist = []
for chain in call_chains:
chain_dataframe = chain.dataframe
otm = chain_dataframe["in_the_money"] == False
single_contract = chain_dataframe[otm].head(1)
options_watchlist.append(single_contract)
final = pandas.concat(options_watchlist, ignore_index=True)
final["expiration"] = final["expiration_date"].dt.date
final.sort_values(by="implied_volatility", inplace=True)
final.reset_index(inplace=True)
final.drop(
columns=["index", "timestamp", "contract_name", "expiration_date", "in_the_money"],
inplace=True,
)
print(final)
from concurrent.futures import as_completed, ThreadPoolExecutor
from nitter_scraper import NitterScraper
import pandas
from requests_whaor import RequestsWhaor
from yfs import fuzzy_search, get_options_page
from concurrent.futures import as_completed, ThreadPoolExecutor
from nitter_scraper import NitterScraper
import pandas
from requests_whaor import RequestsWhaor
from yfs import fuzzy_search, get_options_page
cashtaglist = []
with NitterScraper(port=8008) as nitter:
for tweet in nitter.get_tweets("eWhispers", pages=1):
if tweet.is_pinned:
continue
if tweet.is_retweet:
continue
if tweet.entries.cashtags:
cashtaglist += tweet.entries.cashtags
print(".", end="", flush=True) # Simple progress bar.
print() # End progress bar with newline.
cashtaglist = sorted(set(map(lambda cashtag: cashtag.replace("$", "").strip(), cashtaglist)))
valid_symbols = []
call_chains = []
MAX_THREADS = 6
MAX_PROXIES = 6
with RequestsWhaor(onion_count=MAX_PROXIES, max_threads=MAX_THREADS) as request_whaor:
with ThreadPoolExecutor(max_workers=MAX_THREADS) as executor:
futures = [
executor.submit(fuzzy_search, ticker, session=request_whaor) for ticker in cashtaglist
]
for future in as_completed(futures):
try:
result = future.result(timeout=60)
# timeout if the response takes too long.
if result:
valid_symbols.append(result.symbol)
print(".", end="", flush=True) # Simple progress bar.
except Exception as exc:
# We want to pass on exceptions.
print("\n", exc)
print() # End progress bar with newline.
print("twitter cashtag count:", len(cashtaglist))
print("validated symbol count:", len(valid_symbols))
request_whaor.restart_onions() # Fresh proxy pool.
futures = [
executor.submit(
get_options_page,
ticker,
after_days=60,
first_chain=True,
use_fuzzy_search=False,
session=request_whaor,
page_not_found_ok=True,
)
for ticker in valid_symbols
]
for future in as_completed(futures):
try:
result = future.result(timeout=60)
# timeout if the response takes too long.
if result:
call_chains.append(result.calls)
print(".", end="", flush=True) # Simple progress bar.
except Exception as exc:
# We want to pass on exceptions.
print("\n", exc)
print() # End progress bar with newline.
options_watchlist = []
for chain in call_chains:
chain_dataframe = chain.dataframe
otm = chain_dataframe["in_the_money"] == False
single_contract = chain_dataframe[otm].head(1)
options_watchlist.append(single_contract)
final = pandas.concat(options_watchlist, ignore_index=True)
final["expiration"] = final["expiration_date"].dt.date
final.sort_values(by="implied_volatility", inplace=True)
final.reset_index(inplace=True)
final.drop(
columns=["index", "timestamp", "contract_name", "expiration_date", "in_the_money"],
inplace=True,
)
print(final)
with RequestsWhaor(onion_count=MAX_PROXIES, max_threads=MAX_THREADS) as request_whaor:
with ThreadPoolExecutor(max_workers=MAX_THREADS) as executor:
futures = [
executor.submit(fuzzy_search, ticker, session=request_whaor) for ticker in cashtaglist
]
for future in as_completed(futures):
try:
result = future.result(timeout=60)
# timeout if the response takes too long.
if result:
valid_symbols.append(result.symbol)
print(".", end="", flush=True) # Simple progress bar.
except Exception as exc:
# We want to pass on exceptions.
print("\n", exc)
print() # End progress bar with newline.
print("twitter cashtag count:", len(cashtaglist))
print("validated symbol count:", len(valid_symbols))
request_whaor.restart_onions() # Fresh proxy pool.
futures = [
executor.submit(
get_options_page,
ticker,
after_days=60,
first_chain=True,
use_fuzzy_search=False,
session=request_whaor,
page_not_found_ok=True,
)
for ticker in valid_symbols
]
for future in as_completed(futures):
try:
result = future.result(timeout=60)
# timeout if the response takes too long.
if result:
call_chains.append(result.calls)
print(".", end="", flush=True) # Simple progress bar.
except Exception as exc:
# We want to pass on exceptions.
print("\n", exc)
print() # End progress bar with newline.
valid_symbols = []
call_chains = []
MAX_THREADS = 6
MAX_PROXIES = 6
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment