avax_oi_funding
# avax_oi_funding.py
import requests
import pandas as pd
import time
import argparse
import os
from datetime import datetime, timedelta
BASE = "https://api.bybit.com/v5/market"
def fetch_open_interest(symbol="AVAXUSDT", category="linear", intervalTime="15min",
startTime=None, endTime=None, limit=200, sleep=0.15):
"""
Returns DataFrame with columns: timestamp (ms), openInterest (string)
Uses cursor pagination if API returns nextPageCursor.
intervalTime: one of ['5min','15min','30min','1h','4h','1d']
"""
url = f"{BASE}/open-interest"
params = {
"category": category,
"symbol": symbol,
"intervalTime": intervalTime,
"limit": limit
}
if startTime:
params["startTime"] = int(startTime)
if endTime:
params["endTime"] = int(endTime)
all_rows = []
cursor = None
while True:
if cursor:
params["cursor"] = cursor
r = requests.get(url, params=params, timeout=20)
r.raise_for_status()
j = r.json()
if j.get("retCode") != 0:
print("OpenInterest API error:", j)
break
res = j.get("result", {})
lst = res.get("list", [])
if not lst:
break
all_rows.extend(lst)
cursor = res.get("nextPageCursor")
if not cursor:
break
time.sleep(sleep)
if not all_rows:
return pd.DataFrame(columns=["timestamp","openInterest"])
df = pd.DataFrame(all_rows)
# convert types
df["timestamp"] = pd.to_datetime(df["timestamp"].astype(int), unit="ms", utc=True)
# openInterest is string in API -> convert to float
df["openInterest"] = df["openInterest"].astype(float)
return df.sort_values("timestamp").reset_index(drop=True)
def fetch_funding_history(symbol="AVAXUSDT", category="linear",
startTime=None, endTime=None, limit=200, sleep=0.15):
"""
Returns DataFrame with columns: fundingRateTimestamp (ms), fundingRate (string)
Note: API returns up to `limit` rows; if you need a long span we iterate by setting startTime to last+1.
"""
url = f"{BASE}/funding/history"
params = {
"category": category,
"symbol": symbol,
"limit": limit
}
if startTime:
params["startTime"] = int(startTime)
if endTime:
params["endTime"] = int(endTime)
all_rows = []
while True:
r = requests.get(url, params=params, timeout=20)
r.raise_for_status()
j = r.json()
if j.get("retCode") != 0:
print("Funding API error:", j)
break
res = j.get("result", {})
lst = res.get("list", [])
if not lst:
break
all_rows.extend(lst)
# If less than limit returned, we're done
if len(lst) < limit:
break
# otherwise move startTime forward to last timestamp + 1 ms to page forward
last_ts = int(lst[-1]["fundingRateTimestamp"])
params["startTime"] = last_ts + 1
time.sleep(sleep)
if not all_rows:
return pd.DataFrame(columns=["fundingRateTimestamp","fundingRate","symbol"])
df = pd.DataFrame(all_rows)
df["fundingRateTimestamp"] = pd.to_datetime(df["fundingRateTimestamp"].astype(int), unit="ms", utc=True)
df["fundingRate"] = df["fundingRate"].astype(float)
return df.sort_values("fundingRateTimestamp").reset_index(drop=True)
def ts_from_days_ago(days):
end = int(time.time() * 1000)
start = int((datetime.utcnow() - timedelta(days=days)).timestamp() * 1000)
return start, end
def main():
parser = argparse.ArgumentParser(description="Download AVAX OI and Funding from Bybit")
parser.add_argument("--symbol", default="AVAXUSDT", help="symbol (default AVAXUSDT)")
parser.add_argument("--days", type=int, default=30, help="how many days back to fetch (default 30)")
parser.add_argument("--oi_interval", default="15min", choices=["5min","15min","30min","1h","4h","1d"],
help="interval for open interest (default 15min)")
parser.add_argument("--outdir", default=".", help="output directory (default current)")
args = parser.parse_args()
symbol = args.symbol
days = args.days
oi_interval = args.oi_interval
outdir = args.outdir
os.makedirs(outdir, exist_ok=True)
start_ms, end_ms = ts_from_days_ago(days)
print(f"Fetching OI {symbol} {oi_interval} from {datetime.utcfromtimestamp(start_ms/1000)} to {datetime.utcfromtimestamp(end_ms/1000)}")
df_oi = fetch_open_interest(symbol=symbol, intervalTime=oi_interval, startTime=start_ms, endTime=end_ms)
oi_file = os.path.join(outdir, f"{symbol}_open_interest_{oi_interval}.csv")
df_oi.to_csv(oi_file, index=False)
print("Saved OI:", oi_file, "rows:", len(df_oi))
print(f"Fetching Funding history {symbol} for last {days} days")
df_f = fetch_funding_history(symbol=symbol, startTime=start_ms, endTime=end_ms)
f_file = os.path.join(outdir, f"{symbol}_funding_history.csv")
df_f.to_csv(f_file, index=False)
print("Saved Funding history:", f_file, "rows:", len(df_f))
if __name__ == "__main__":
main() Как работает скрипт
- Подключается к Bybit API
/v5/market/recent-trade(список последних сделок). - Скачивает трейды пачками (limit=1000).
- Сохраняет их в CSV с колонками:
timestamp(UTC)side(Buy/Sell)size(объём сделки в контрактах)price- Далее агрегирует их по 1 минуте в формат:
timestamp, taker_buy_volume, taker_sell_volume- Результат: CSV-файл
avax_takers_1m.csv
avax_trades_to_cvd.py
import requests
import pandas as pd
import time
import os
from datetime import datetime, timedelta
BASE = "https://api.bybit.com/v5/market"
def fetch_trades(symbol="AVAXUSDT", limit=1000, cursor=None):
"""
Получает список последних сделок (max 1000 за раз).
Возвращает JSON.
"""
url = f"{BASE}/recent-trade"
params = {
"category": "linear", # фьючерсы USDT
"symbol": symbol,
"limit": limit
}
if cursor:
params["cursor"] = cursor
r = requests.get(url, params=params, timeout=15)
r.raise_for_status()
return r.json()
def download_trades(symbol="AVAXUSDT", minutes=60):
"""
Скачиваем трейды за последние N минут.
ВАЖНО: этот API отдаёт только последние ~5000–10000 сделок,
поэтому для длинной истории лучше вебсокеты или сторонние датасеты.
"""
end_time = datetime.utcnow()
start_time = end_time - timedelta(minutes=minutes)
all_trades = []
cursor = None
while True:
j = fetch_trades(symbol, cursor=cursor)
if j.get("retCode") != 0:
print("Ошибка API:", j)
break
data = j["result"]["list"]
if not data:
break
# API возвращает трейды в обратном порядке (свежие первыми)
for t in data:
ts = int(t["time"])
dt = datetime.utcfromtimestamp(ts/1000)
if dt < start_time:
return all_trades
trade = {
"timestamp": dt,
"side": t["side"],
"size": float(t["size"]),
"price": float(t["price"])
}
all_trades.append(trade)
cursor = j["result"].get("nextPageCursor")
if not cursor:
break
time.sleep(0.2)
return all_trades
def trades_to_cvd(trades):
"""
Аггрегируем трейды в 1m интервалы: taker_buy_volume / taker_sell_volume.
"""
df = pd.DataFrame(trades)
df = df.set_index("timestamp")
buys = df[df["side"]=="Buy"].resample("1min").agg({"size":"sum"})
sells = df[df["side"]=="Sell"].resample("1min").agg({"size":"sum"})
out = pd.DataFrame(index=pd.date_range(df.index.min(), df.index.max(), freq="1min"))
out["taker_buy_volume"] = buys["size"]
out["taker_sell_volume"] = sells["size"]
out = out.fillna(0).reset_index().rename(columns={"index":"timestamp"})
return out
if __name__ == "__main__":
print("Скачиваем последние 120 минут сделок по AVAX...")
trades = download_trades("AVAXUSDT", minutes=120)
print("Всего сделок:", len(trades))
df = trades_to_cvd(trades)
df.to_csv("avax_takers_1m.csv", index=False)
print("Сохранено:", len(df), "минутных свечей в avax_takers_1m.csv")