net prem ticks dashboard v2
import os
import httpx
import polars as pl
from rich import print as rprint
from lets_plot import *
LetsPlot.setup_html()
We want use the net prem ticks and OHLC endpoints to create a thumbnail dashboard for 6 tickers, say a "sector" dashboard like semiconductors. So the first thing we will do, after setting up grabbing our API token and setting up headers, will be a call to the net prem ticks endpoint:
https://api.unusualwhales.com/docs#/operations/PublicApi.TickerController.net_prem_ticks
We can see in the documentation that each record represents the data for a single minute's ticks, and the documentation suggests that in order to build a daily chart we will need to add the previous data to the current record, so we will tackle that after getting the data into a DataFrame for easy manipulation.
uw_token = os.environ['UW_TOKEN'] # Set this to your own token 'abc123etc'
headers = {'Accept': 'application/json, text/plain', 'Authorization': uw_token}
ticker = 'SMH'
target_date = '2025-01-23'
net_prem_ticks_url = f'https://api.unusualwhales.com/api/stock/{ticker}/net-prem-ticks'
net_prem_ticks_params = {'date': target_date}
net_prem_ticks_rsp = httpx.get(net_prem_ticks_url, headers=headers, params=net_prem_ticks_params)
net_prem_ticks_rsp.status_code
200
Success! If we were to execute net_prem_ticks_rsp.json(), we would get data like this:
{'data': [{'date': '2025-01-23',
'net_call_premium': '-167229.0000',
'net_call_volume': -270,
'net_put_premium': '9946.0000',
'net_put_volume': -129,
'tape_time': '2025-01-23T14:30:00Z'},
{'date': '2025-01-23',
'net_call_premium': '-18254.0000',
'net_call_volume': 35,
'net_put_premium': '21329.0000',
'net_put_volume': 101,
'tape_time': '2025-01-23T14:31:00Z'},
...
{'date': '2025-01-23',
'net_call_premium': '-844559.0000',
'net_call_volume': -2526,
'net_put_premium': '-4246.0000',
'net_put_volume': -68,
'tape_time': '2025-01-23T20:59:00Z'}]}
Which is perfect for a polars DataFrame. I like using a DataFrame for this kind of information because we can transform and add to the existing data quickly almost as if it were in a spreadsheet. Let's use our DataFrame to:
- Convert strings to numerics or dates
- Add an eastern time zone column and a plot-friendly HH:MM:SS column too
- Calculate the cumulative net premium figure for calls and puts (since each observation only represents that single minute in time)
OK let's go:
raw_npt_df = pl.DataFrame(net_prem_ticks_rsp.json()['data'])
clean_npt_df = (
raw_npt_df
.with_columns(
pl.col('net_call_premium').cast(pl.Float64),
pl.col('net_put_premium').cast(pl.Float64),
pl.col('tape_time').cast(pl.Datetime)
)
.with_columns(
pl.col('tape_time').dt.convert_time_zone('America/New_York').alias('tape_time_tz')
)
.with_columns(
pl.col('tape_time_tz').dt.strftime('%H:%M:%S').alias('hms_str')
)
.with_columns(
(pl.col('net_call_premium').cum_sum() / 1_000_000).alias('cumsum_net_call_prem_in_mil'),
(pl.col('net_put_premium').cum_sum() / 1_000_000).alias('cumsum_net_put_prem_in_mil')
)
)
clean_npt_df
date | tape_time | net_call_volume | net_call_premium | net_put_volume | net_put_premium | put_volume | call_volume | call_volume_bid_side | put_volume_bid_side | call_volume_ask_side | put_volume_ask_side | net_delta | tape_time_tz | hms_str | cumsum_net_call_prem_in_mil | cumsum_net_put_prem_in_mil |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
str | datetime[μs] | i64 | f64 | i64 | f64 | i64 | i64 | i64 | i64 | i64 | i64 | str | datetime[μs, America/New_York] | str | f64 | f64 |
"2025-01-23" | 2025-01-23 14:30:00 | -270 | -167229.0 | -129 | 9946.0 | 676 | 670 | 455 | 379 | 185 | 250 | "-7817.91224136… | 2025-01-23 09:30:00 EST | "09:30:00" | -0.167229 | 0.009946 |
"2025-01-23" | 2025-01-23 14:31:00 | 35 | -18254.0 | 101 | 21329.0 | 436 | 167 | 65 | 166 | 100 | 267 | "-2193.66699971… | 2025-01-23 09:31:00 EST | "09:31:00" | -0.185483 | 0.031275 |
"2025-01-23" | 2025-01-23 14:32:00 | -2 | -6737.0 | 83 | 41147.0 | 119 | 90 | 40 | 11 | 38 | 94 | "-3698.79658202… | 2025-01-23 09:32:00 EST | "09:32:00" | -0.19222 | 0.072422 |
"2025-01-23" | 2025-01-23 14:33:00 | -16 | -2032.0 | -24 | -19513.0 | 159 | 67 | 40 | 89 | 24 | 65 | "1582.909327226… | 2025-01-23 09:33:00 EST | "09:33:00" | -0.194252 | 0.052909 |
"2025-01-23" | 2025-01-23 14:34:00 | -25 | -10960.0 | -6 | -14996.0 | 104 | 96 | 60 | 40 | 35 | 34 | "1576.409074520… | 2025-01-23 09:34:00 EST | "09:34:00" | -0.205212 | 0.037913 |
… | … | … | … | … | … | … | … | … | … | … | … | … | … | … | … | … |
"2025-01-23" | 2025-01-23 20:55:00 | 181 | 41562.0 | -26 | -6665.0 | 26 | 328 | 73 | 26 | 254 | 0 | "4563.567324782… | 2025-01-23 15:55:00 EST | "15:55:00" | -2.231713 | 0.327001 |
"2025-01-23" | 2025-01-23 20:56:00 | -6 | -324.0 | 42 | 588.0 | 180 | 17 | 11 | 65 | 5 | 107 | "-270.807460657… | 2025-01-23 15:56:00 EST | "15:56:00" | -2.232037 | 0.327589 |
"2025-01-23" | 2025-01-23 20:57:00 | -7 | 8863.0 | -2374 | -164831.0 | 2562 | 48 | 22 | 2440 | 15 | 66 | "9756.877312182… | 2025-01-23 15:57:00 EST | "15:57:00" | -2.223174 | 0.162758 |
"2025-01-23" | 2025-01-23 20:58:00 | -8 | -861.0 | -51 | -23117.0 | 124 | 53 | 22 | 86 | 14 | 35 | "1615.949673450… | 2025-01-23 15:58:00 EST | "15:58:00" | -2.224035 | 0.139641 |
"2025-01-23" | 2025-01-23 20:59:00 | -2526 | -844559.0 | -68 | -4246.0 | 135 | 4687 | 3600 | 81 | 1074 | 13 | "-65572.7645244… | 2025-01-23 15:59:00 EST | "15:59:00" | -3.068594 | 0.135395 |
Nice! Now the interactive plotting library that I prefer in
these notebook settings is lets-plot, and lets-plot prefers
"narrow" DataFrames, so we are going to use a method called
melt()
to create a plot-ready DataFrame:
plot_ready_npt_df = (
clean_npt_df
.melt(
id_vars=['hms_str'],
value_vars=[
'cumsum_net_call_prem_in_mil',
'cumsum_net_put_prem_in_mil'
],
variable_name='flow_type',
value_name='cumsum_net_prem_in_mil'
)
)
plot_ready_npt_df
hms_str | flow_type | cumsum_net_prem_in_mil |
---|---|---|
str | str | f64 |
"09:30:00" | "cumsum_net_cal… | -0.167229 |
"09:31:00" | "cumsum_net_cal… | -0.185483 |
"09:32:00" | "cumsum_net_cal… | -0.19222 |
"09:33:00" | "cumsum_net_cal… | -0.194252 |
"09:34:00" | "cumsum_net_cal… | -0.205212 |
… | … | … |
"15:55:00" | "cumsum_net_put… | 0.327001 |
"15:56:00" | "cumsum_net_put… | 0.327589 |
"15:57:00" | "cumsum_net_put… | 0.162758 |
"15:58:00" | "cumsum_net_put… | 0.139641 |
"15:59:00" | "cumsum_net_put… | 0.135395 |
With that we are ready to plot (with some extra styling)!
UW_DARK_THEME = {
'red': '#dc3545',
'yellow': '#ffc107',
'teal': '#20c997',
'black': '#161c2d',
'gray_medium': '#748196',
'gray_light': '#f9fbfd',
}
def uw_dark_theme(colors: dict, show_legend: bool=True) -> theme:
"""Create a dark theme for lets-plot using UW colors."""
t = theme_none() + theme(
plot_background=element_rect(fill=colors['black']),
panel_background=element_rect(fill=colors['black']),
panel_grid_major=element_blank(),
panel_grid_minor=element_blank(),
axis_ontop=True,
axis_ticks=element_blank(),
axis_tooltip=element_rect(color=colors['gray_light']),
tooltip=element_rect(color=colors['gray_light'], fill=colors['black']),
line=element_line(color=colors['gray_medium'], size=1),
rect=element_rect(color=colors['black'], fill=colors['black'], size=2),
text=element_text(color=colors['gray_light'], size=10),
legend_background=element_rect(color=colors['gray_light'], fill=colors['black'], size=2),
plot_title=element_text(hjust=0.5, size=16, color=colors['gray_light']),
)
if show_legend:
return t + theme(legend_position='bottom')
else:
return t + theme(legend_position='none')
color_mapping = {
'cumsum_net_call_prem_in_mil': UW_DARK_THEME['teal'],
'cumsum_net_put_prem_in_mil': UW_DARK_THEME['red'],
'price': UW_DARK_THEME['yellow'],
}
npt_plot = (
ggplot(plot_ready_npt_df)
+ aes(x='hms_str', y='cumsum_net_prem_in_mil', color='flow_type')
+ geom_line(size=1)
+ scale_color_manual(values=color_mapping)
+ ggtitle(f'{target_date}: {ticker}')
+ xlab('Timestamp')
+ ylab('Net Prem ($M)')
+ uw_dark_theme(UW_DARK_THEME, show_legend=False)
)
npt_plot.show()
Would be cool to see the price of the underlying stock on a similar chart, say right underneath this one, so let's make that happen. We can use the OHLC endpoint:
https://api.unusualwhales.com/docs#/operations/PublicApi.TickerController.ohlc
To get 1-min data and in this case just work with closing prices. We're going to use a lot of the same tactics that we did in the steps above, so the explanations will be a bit shorter over the next few cells:
candle_size = '1m'
ohlc_url = f'https://api.unusualwhales.com/api/stock/{ticker}/ohlc/{candle_size}'
ohlc_params = {'date': target_date}
ohlc_rsp = httpx.get(ohlc_url, headers=headers, params=ohlc_params)
ohlc_rsp.status_code
200
Success, now when we execute ohlc_rsp.json()
we get
results like this:
>>> ohlc_rsp.json()
{'data': [{'close': '265.6101',
'end_time': '2025-01-24T00:56:00Z',
'high': '265.6101',
'low': '265.6101',
'market_time': 'po',
'open': '265.6101',
'start_time': '2025-01-24T00:55:00Z',
'total_volume': 4704743,
'volume': 144},
{'close': '265.63',
'end_time': '2025-01-24T00:54:00Z',
'high': '265.63',
'low': '265.63',
'market_time': 'po',
'open': '265.63',
'start_time': '2025-01-24T00:53:00Z',
'total_volume': 4704599,
'volume': 2572},
{'close': '265.94',
'end_time': '2025-01-24T00:49:00Z',
'high': '265.94',
'low': '265.94',
'market_time': 'po',
'open': '265.94',
'start_time': '2025-01-24T00:48:00Z',
...
'market_time': 'po',
'open': '266.48',
'start_time': '2025-01-23T00:00:00Z',
'total_volume': 6595506,
'volume': 6595506}]}
So let's apply the same transform and enhance strategy from above:
# Use the start and end times from the net premium ticks DataFrame
# to eventually filter the OHLC DataFrame so they have the exact
# same time range.
start_dt = clean_npt_df.select(pl.col('tape_time').head(1)).item()
end_dt = clean_npt_df.select(pl.col('tape_time').tail(1)).item()
raw_ohlc_df = pl.DataFrame(ohlc_rsp.json()['data'])
clean_ohlc_df = (
raw_ohlc_df
.with_columns(
pl.col('open').cast(pl.Float64),
pl.col('high').cast(pl.Float64),
pl.col('low').cast(pl.Float64),
pl.col('close').cast(pl.Float64),
pl.col('start_time').cast(pl.Datetime)
)
.filter(
(pl.col('start_time') >= start_dt) & (pl.col('start_time') <= end_dt)
)
.with_columns(
pl.col('start_time').dt.convert_time_zone('America/New_York').alias('tape_time_tz')
)
.with_columns(
pl.col('tape_time_tz').dt.strftime('%H:%M:%S').alias('hms_str')
)
.select(
[
'start_time', 'tape_time_tz', 'hms_str',
'open', 'high', 'low', 'close'
]
)
.sort(['tape_time_tz'], descending=False)
)
clean_ohlc_df
start_time | tape_time_tz | hms_str | open | high | low | close |
---|---|---|---|---|---|---|
datetime[μs] | datetime[μs, America/New_York] | str | f64 | f64 | f64 | f64 |
2025-01-23 14:30:00 | 2025-01-23 09:30:00 EST | "09:30:00" | 263.235 | 263.235 | 262.14 | 262.23 |
2025-01-23 14:31:00 | 2025-01-23 09:31:00 EST | "09:31:00" | 262.22 | 262.39 | 261.9 | 262.23 |
2025-01-23 14:32:00 | 2025-01-23 09:32:00 EST | "09:32:00" | 262.12 | 262.26 | 261.96 | 262.26 |
2025-01-23 14:33:00 | 2025-01-23 09:33:00 EST | "09:33:00" | 262.255 | 262.3891 | 262.07 | 262.34 |
2025-01-23 14:34:00 | 2025-01-23 09:34:00 EST | "09:34:00" | 262.43 | 262.99 | 262.22 | 262.895 |
… | … | … | … | … | … | … |
2025-01-23 20:55:00 | 2025-01-23 15:55:00 EST | "15:55:00" | 266.34 | 266.685 | 266.34 | 266.68 |
2025-01-23 20:56:00 | 2025-01-23 15:56:00 EST | "15:56:00" | 266.7 | 266.725 | 266.57 | 266.63 |
2025-01-23 20:57:00 | 2025-01-23 15:57:00 EST | "15:57:00" | 266.65 | 266.7 | 266.62 | 266.62 |
2025-01-23 20:58:00 | 2025-01-23 15:58:00 EST | "15:58:00" | 266.62 | 266.685 | 266.615 | 266.63 |
2025-01-23 20:59:00 | 2025-01-23 15:59:00 EST | "15:59:00" | 266.64 | 266.86 | 266.62 | 266.86 |
Now that we have our OHLC data collected and ready, we need create a plot-friendly melted DataFrame:
plot_ready_ohlc_df = (
clean_ohlc_df
.melt(
id_vars=['hms_str'],
value_vars=['close'],
variable_name='ohlc_type',
value_name='price'
)
)
plot_ready_ohlc_df
hms_str | ohlc_type | price |
---|---|---|
str | str | f64 |
"09:30:00" | "close" | 262.23 |
"09:31:00" | "close" | 262.23 |
"09:32:00" | "close" | 262.26 |
"09:33:00" | "close" | 262.34 |
"09:34:00" | "close" | 262.895 |
… | … | … |
"15:55:00" | "close" | 266.68 |
"15:56:00" | "close" | 266.63 |
"15:57:00" | "close" | 266.62 |
"15:58:00" | "close" | 266.63 |
"15:59:00" | "close" | 266.86 |
And now we are ready to create our price plot and bring it together with the net prem ticks plot! Let's rest them right on top of one and other to make it very simple to price and net premium flows on the same time-axis:
price_plot = (
ggplot(plot_ready_ohlc_df)
+ aes(x='hms_str', y='price')
+ geom_line(size=1, color=color_mapping['price'])
+ ggtitle(f'{target_date}: {ticker}')
+ xlab('Timestamp')
+ ylab('Close Price')
+ uw_dark_theme(UW_DARK_THEME, show_legend=True)
)
plots = [npt_plot, price_plot]
plot_grid = (
gggrid(
plots,
ncol=1,
align=True
)
+ ggsize(800, 800)
+ theme(plot_background=element_rect(fill=UW_DARK_THEME['black']))
)
plot_grid.show()
Extremely interesting to see SMH price rallying all day while Calls and Puts were net sold, right? My suspicion is that market participants are holding a subset of SMH names and using the ETF as a hedging instrument.
To check this hypothesis, let's create a dashboard of the largest semiconductor names using these exact same tactics, except looping through all the tickers (instead of just working with SMH like we have been doing up to now).
Since we have already done this work once the commentary will be a bit more sparse, but I will add notes as I go along just to make sure everything is clear.
tickers = ['NVDA', 'TSM', 'AVGO', 'AMD', 'QCOM', 'MU', 'INTC', 'ASML']
target_date = '2025-01-23'
raw_npt_dfs = {}
for ticker in tickers:
net_prem_ticks_url = f'https://api.unusualwhales.com/api/stock/{ticker}/net-prem-ticks'
net_prem_ticks_params = {'date': target_date}
net_prem_ticks_rsp = httpx.get(net_prem_ticks_url, headers=headers, params=net_prem_ticks_params)
if net_prem_ticks_rsp.status_code == 200 and len(net_prem_ticks_rsp.json()['data']) > 0:
raw_npt_dfs[ticker] = pl.DataFrame(net_prem_ticks_rsp.json()['data'])
else:
rprint(f'{ticker} data not found for {target_date}')
OK great, now we have all of our raw DataFrames, let's clean them up then create "melted" versions for the eventual plots:
clean_npt_dfs = {}
for ticker, raw_df in raw_npt_dfs.items():
clean_npt_dfs[ticker] = (
raw_df
.with_columns(
pl.col('net_call_premium').cast(pl.Float64),
pl.col('net_put_premium').cast(pl.Float64),
pl.col('tape_time').cast(pl.Datetime)
)
.with_columns(
pl.col('tape_time').dt.convert_time_zone('America/New_York').alias('tape_time_tz')
)
.with_columns(
pl.col('tape_time_tz').dt.strftime('%H:%M').alias('hms_str')
)
.with_columns(
(pl.col('net_call_premium').cum_sum() / 1_000_000).alias('cumsum_net_call_prem_in_mil'),
(pl.col('net_put_premium').cum_sum() / 1_000_000).alias('cumsum_net_put_prem_in_mil')
)
)
plot_ready_npt_dfs = {}
for ticker, clean_df in clean_npt_dfs.items():
plot_ready_npt_dfs[ticker] = (
clean_df
.melt(
id_vars=['hms_str'],
value_vars=[
'cumsum_net_call_prem_in_mil',
'cumsum_net_put_prem_in_mil'
],
variable_name='flow_type',
value_name='cumsum_net_prem_in_mil'
)
)
And let's repeat this for our OHLC data as well, collecting the raw data for each ticker, cleaning each DataFrame, then melting it for plotting purposes:
raw_ohlc_dfs = {}
for ticker in tickers:
ohlc_url = f'https://api.unusualwhales.com/api/stock/{ticker}/ohlc/{candle_size}'
ohlc_params = {'date': target_date}
ohlc_rsp = httpx.get(ohlc_url, headers=headers, params=ohlc_params)
raw_ohlc_dfs[ticker] = pl.DataFrame(ohlc_rsp.json()['data'])
clean_ohlc_dfs = {}
for ticker, raw_df in raw_ohlc_dfs.items():
start_date = clean_npt_dfs[ticker].select(pl.col('tape_time').head(1)).item()
end_date = clean_npt_dfs[ticker].select(pl.col('tape_time').tail(1)).item()
clean_ohlc_dfs[ticker] = (
raw_df
.with_columns(
pl.col('open').cast(pl.Float64),
pl.col('high').cast(pl.Float64),
pl.col('low').cast(pl.Float64),
pl.col('close').cast(pl.Float64),
pl.col('start_time').cast(pl.Datetime)
)
.filter(
(pl.col('start_time') >= start_dt) & (pl.col('start_time') <= end_dt)
)
.with_columns(
pl.col('start_time').dt.convert_time_zone('America/New_York').alias('tape_time_tz')
)
.with_columns(
pl.col('tape_time_tz').dt.strftime('%H:%M').alias('hms_str')
)
.select(
[
'start_time', 'tape_time_tz', 'hms_str',
'open', 'high', 'low', 'close'
]
)
.sort(['tape_time_tz'], descending=False)
)
plot_ready_ohlc_dfs = {}
for ticker, clean_df in clean_ohlc_dfs.items():
plot_ready_ohlc_dfs[ticker] = (
clean_df
.melt(
id_vars=['hms_str'],
value_vars=['close'],
variable_name='ohlc_type',
value_name='price'
)
)
We are now ready to pair off our net prem tick and price DataFrames to create our dashboard, so let's take care of that then create our thumbnail-sized dashboard:
grouped_plots = {}
for ticker, plot_ready_npt_df in plot_ready_npt_dfs.items():
npt_plot = (
ggplot(plot_ready_npt_df)
+ aes(x='hms_str', y='cumsum_net_prem_in_mil', color='flow_type')
+ geom_line(size=1)
+ scale_color_manual(values=color_mapping)
+ ggtitle(f'{target_date}: {ticker}')
+ xlab('Timestamp')
+ ylab('Net Prem $M')
+ uw_dark_theme(UW_DARK_THEME, show_legend=False)
+ theme(axis_title_x=element_blank())
)
price_plot = (
ggplot(plot_ready_ohlc_dfs[ticker])
+ aes(x='hms_str', y='price')
+ geom_line(size=1, color=color_mapping['price'])
+ ggtitle(f'{ticker} Price')
+ xlab('Timestamp')
+ ylab(f'{ticker} Price')
+ uw_dark_theme(UW_DARK_THEME, show_legend=False)
+ theme(
axis_title_x=element_blank(),
plot_title=element_blank(),
)
)
plots = [npt_plot, price_plot]
plot_grid = (
gggrid(
plots,
ncol=1,
align=True
)
+ theme(plot_background=element_rect(fill=UW_DARK_THEME['black']))
)
grouped_plots[ticker] = plot_grid
final_plots = []
for _, plot_grid in grouped_plots.items():
final_plots.append(plot_grid)
dashboard = (
gggrid(final_plots, ncol=4, align=True)
+ ggsize(900, 600)
+ theme(plot_background=element_rect(fill=UW_DARK_THEME['black']))
)
dashboard.show()