feedhandlers extended

This commit is contained in:
2026-04-23 03:11:52 +00:00
parent 408a63fe58
commit 539e6004cf
22 changed files with 1340 additions and 205 deletions

View File

@@ -2,17 +2,17 @@
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"execution_count": null,
"id": "3a269644",
"metadata": {},
"outputs": [],
"source": [
"import modules.aster_auth_api as aster_auth_api"
"import modules.aster_auth as aster_auth"
]
},
{
"cell_type": "code",
"execution_count": 8,
"execution_count": 27,
"id": "4395fabb",
"metadata": {},
"outputs": [],
@@ -57,17 +57,17 @@
},
{
"cell_type": "code",
"execution_count": 9,
"execution_count": null,
"id": "2122885a",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{'orderId': 17340952438,\n",
"{'orderId': 17341253499,\n",
" 'symbol': 'ETHUSDT',\n",
" 'status': 'NEW',\n",
" 'clientOrderId': '57UkyWpQE4iRP74eAwt7Hf',\n",
" 'clientOrderId': 'J1SvsvvDhlDAqwGau1T77F',\n",
" 'price': '2100',\n",
" 'avgPrice': '0.00000',\n",
" 'origQty': '0.010',\n",
@@ -84,18 +84,18 @@
" 'workingType': 'CONTRACT_PRICE',\n",
" 'priceProtect': False,\n",
" 'origType': 'LIMIT',\n",
" 'updateTime': 1776835456300,\n",
" 'newChainData': {'hash': '0x12456fb0e14021975aa03e56adb339f8b906710ec3a8604a73a8b697c7ee1225'}}"
" 'updateTime': 1776838867250,\n",
" 'newChainData': {'hash': '0xa2c23513491587b2961bfa77d2077489a14522d7ce359a9e3635e7c4f0d12a22'}}"
]
},
"execution_count": 9,
"execution_count": 28,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"j = aster_auth_api.post_authenticated_url(post_order)\n",
"j"
"# j = aster_auth_api.post_authenticated_url(post_order)\n",
"# j"
]
},
{
@@ -103,9 +103,197 @@
"execution_count": null,
"id": "45d68bf4",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1776836583531"
]
},
"execution_count": 15,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"from datetime import datetime\n",
"round(datetime.now().timestamp())"
]
},
{
"cell_type": "code",
"execution_count": 16,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1800"
]
},
"execution_count": 16,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"30*60"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "67a3bbb2",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 23,
"id": "e958e7da",
"metadata": {},
"outputs": [],
"source": [
"import json\n",
"\n",
"### User Data Stream ###\n",
"# After posting limit order to rest\n",
"after_order_post = json.loads('{\"e\":\"ORDER_TRADE_UPDATE\",\"T\":1776836992350,\"E\":1776836992388,\"o\":{\"s\":\"ETHUSDT\",\"c\":\"KFi375tZh5kzHsQJWKMJHe\",\"S\":\"BUY\",\"o\":\"LIMIT\",\"f\":\"GTC\",\"q\":\"0.010\",\"p\":\"2100\",\"ap\":\"0\",\"sp\":\"0\",\"x\":\"NEW\",\"X\":\"NEW\",\"i\":17341121450,\"l\":\"0\",\"z\":\"0\",\"L\":\"0\",\"T\":1776836992350,\"t\":0,\"b\":\"21\",\"a\":\"0\",\"m\":false,\"R\":false,\"wt\":\"CONTRACT_PRICE\",\"ot\":\"LIMIT\",\"ps\":\"BOTH\",\"cp\":false,\"rp\":\"0\",\"pP\":false,\"si\":0,\"ss\":0,\"h\":\"0xeec41a368072d5a47c13b3ad83703eded2e2b8cf9ff427095beb8b7684968db0\"}}')\n",
"\n",
"# After order cancellation in GUI\n",
"after_order_cxl = json.loads('{\"e\":\"ORDER_TRADE_UPDATE\",\"T\":1776836998500,\"E\":1776836998533,\"o\":{\"s\":\"ETHUSDT\",\"c\":\"KFi375tZh5kzHsQJWKMJHe\",\"S\":\"BUY\",\"o\":\"LIMIT\",\"f\":\"GTC\",\"q\":\"0.010\",\"p\":\"2100\",\"ap\":\"0\",\"sp\":\"0\",\"x\":\"CANCELED\",\"X\":\"CANCELED\",\"i\":17341121450,\"l\":\"0\",\"z\":\"0\",\"L\":\"0\",\"T\":1776836998500,\"t\":0,\"b\":\"0\",\"a\":\"0\",\"m\":false,\"R\":false,\"wt\":\"CONTRACT_PRICE\",\"ot\":\"LIMIT\",\"ps\":\"BOTH\",\"cp\":false,\"rp\":\"0\",\"pP\":false,\"si\":0,\"ss\":0,\"h\":\"0x116bb48a4b41420aebbc76343d6f55f22d1ccbc36b04462e6172571fda836599\"}}')\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 24,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{'e': 'ORDER_TRADE_UPDATE',\n",
" 'T': 1776836992350,\n",
" 'E': 1776836992388,\n",
" 'o': {'s': 'ETHUSDT',\n",
" 'c': 'KFi375tZh5kzHsQJWKMJHe',\n",
" 'S': 'BUY',\n",
" 'o': 'LIMIT',\n",
" 'f': 'GTC',\n",
" 'q': '0.010',\n",
" 'p': '2100',\n",
" 'ap': '0',\n",
" 'sp': '0',\n",
" 'x': 'NEW',\n",
" 'X': 'NEW',\n",
" 'i': 17341121450,\n",
" 'l': '0',\n",
" 'z': '0',\n",
" 'L': '0',\n",
" 'T': 1776836992350,\n",
" 't': 0,\n",
" 'b': '21',\n",
" 'a': '0',\n",
" 'm': False,\n",
" 'R': False,\n",
" 'wt': 'CONTRACT_PRICE',\n",
" 'ot': 'LIMIT',\n",
" 'ps': 'BOTH',\n",
" 'cp': False,\n",
" 'rp': '0',\n",
" 'pP': False,\n",
" 'si': 0,\n",
" 'ss': 0,\n",
" 'h': '0xeec41a368072d5a47c13b3ad83703eded2e2b8cf9ff427095beb8b7684968db0'}}"
]
},
"execution_count": 24,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"after_order_post"
]
},
{
"cell_type": "code",
"execution_count": 25,
"id": "1ea320f2",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{'e': 'ORDER_TRADE_UPDATE',\n",
" 'T': 1776836998500,\n",
" 'E': 1776836998533,\n",
" 'o': {'s': 'ETHUSDT',\n",
" 'c': 'KFi375tZh5kzHsQJWKMJHe',\n",
" 'S': 'BUY',\n",
" 'o': 'LIMIT',\n",
" 'f': 'GTC',\n",
" 'q': '0.010',\n",
" 'p': '2100',\n",
" 'ap': '0',\n",
" 'sp': '0',\n",
" 'x': 'CANCELED',\n",
" 'X': 'CANCELED',\n",
" 'i': 17341121450,\n",
" 'l': '0',\n",
" 'z': '0',\n",
" 'L': '0',\n",
" 'T': 1776836998500,\n",
" 't': 0,\n",
" 'b': '0',\n",
" 'a': '0',\n",
" 'm': False,\n",
" 'R': False,\n",
" 'wt': 'CONTRACT_PRICE',\n",
" 'ot': 'LIMIT',\n",
" 'ps': 'BOTH',\n",
" 'cp': False,\n",
" 'rp': '0',\n",
" 'pP': False,\n",
" 'si': 0,\n",
" 'ss': 0,\n",
" 'h': '0x116bb48a4b41420aebbc76343d6f55f22d1ccbc36b04462e6172571fda836599'}}"
]
},
"execution_count": 25,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"after_order_cxl"
]
},
{
"cell_type": "code",
"execution_count": 26,
"id": "07ef4360",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"Timestamp('2019-09-19 07:51:05.651000')"
]
},
"execution_count": 26,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import pandas as pd\n",
"\n",
"pd.to_datetime(1568879465651, unit='ms')"
]
},
{
"cell_type": "code",
"execution_count": null,
@@ -116,7 +304,22 @@
{
"cell_type": "code",
"execution_count": null,
"id": "fd513311",
"id": "284b7266",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "98ed3a27",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []

View File

@@ -1,3 +1,5 @@
# tail -f Fund_Rate_Aster_User.log Fund_Rate_Aster.log Fund_Rate_Extended_FR.log Fund_Rate_Extended_OB.log Fund_Rate_Extended_User.log
services:
ws_aster:
container_name: ws_aster
@@ -9,6 +11,16 @@ services:
- /home/ubuntu/data:/home/ubuntu/data:rw # Read-write access to data
- /home/ubuntu/logs:/home/ubuntu/logs:rw # Read-write access to data
network_mode: "host"
ws_aster_user:
container_name: ws_aster_user
restart: "unless-stopped"
build:
context: ./
dockerfile: ./ws_aster_user/Dockerfile
volumes:
- /home/ubuntu/data:/home/ubuntu/data:rw # Read-write access to data
- /home/ubuntu/logs:/home/ubuntu/logs:rw # Read-write access to data
network_mode: "host"
ws_extended_fund_rate:
container_name: ws_extended_fund_rate
restart: "unless-stopped"
@@ -29,18 +41,18 @@ services:
- /home/ubuntu/data:/home/ubuntu/data:rw # Read-write access to data
- /home/ubuntu/logs:/home/ubuntu/logs:rw # Read-write access to data
network_mode: "host"
ws_extended_user:
container_name: ws_extended_user
restart: "unless-stopped"
build:
context: ./
dockerfile: ./ws_extended_user/Dockerfile
volumes:
- /home/ubuntu/data:/home/ubuntu/data:rw # Read-write access to data
- /home/ubuntu/logs:/home/ubuntu/logs:rw # Read-write access to data
network_mode: "host"
# ws_user:
# container_name: ws_user
# restart: "unless-stopped"
# build:
# context: ./
# dockerfile: ./ws_user/Dockerfile
# volumes:
# - /home/ubuntu/data:/home/ubuntu/data:rw # Read-write access to data
# - /home/ubuntu/logs:/home/ubuntu/logs:rw # Read-write access to data
# network_mode: "host"
# ng:
# container_name: ng
# restart: "unless-stopped"

View File

@@ -2,15 +2,104 @@
"cells": [
{
"cell_type": "code",
"execution_count": 15,
"id": "c9606e56",
"execution_count": null,
"id": "6c70a8c3",
"metadata": {},
"outputs": [],
"source": [
"import math\n",
"from datetime import datetime, timezone"
"\n",
"import asyncio\n",
"import requests\n",
"from x10.config import MAINNET_CONFIG, TESTNET_CONFIG\n",
"from x10.core.stark_account import StarkPerpetualAccount\n",
"from x10.perpetual.trading_client import PerpetualTradingClient\n",
"from x10.models.order import OrderSide, OrderType\n",
"import time\n",
"from dotenv import load_dotenv\n",
"import os\n",
"import uuid\n",
"import asyncio\n",
"import logging\n",
"from decimal import Decimal\n",
"\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ff971ca9",
"metadata": {},
"outputs": [],
"source": [
"load_dotenv()\n",
"\n",
"API_KEY = os.getenv('EXTENDED_API_KEY')\n",
"PUBLIC_KEY = os.getenv('EXTENDED_STARK_KEY_PUBLIC') # public Stark key (l2Key from account info)\n",
"PRIVATE_KEY = os.getenv('EXTENDED_STARK_KEY_PRIVATE') # private Stark key (hex)\n",
"VAULT = int(os.getenv('EXTENDED_VAULT_NUMBER')) # l2Vault from account info (integer)\n",
"\n",
"CONFIG = MAINNET_CONFIG\n",
"\n",
"ORDER_MARKET = \"BTC-USD\"\n",
"ORDER_SIDE = OrderSide.BUY\n",
"ORDER_QTY = Decimal(\"0.001\")\n",
"ORDER_PRICE = Decimal(\"75000\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c366706f",
"metadata": {},
"outputs": [],
"source": [
"placed_order = await trading_client.place_order(\n",
" market_name=ORDER_MARKET,\n",
" amount_of_synthetic=ORDER_QTY,\n",
" price=ORDER_PRICE,\n",
" side=ORDER_SIDE,\n",
" taker_fee=Decimal(\"0.00025\")\n",
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "7cd3413d",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "a7212988",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 1,
@@ -121,6 +210,132 @@
"(time_round_down(dt=datetime.now(timezone.utc), interval_mins=60)+(60*60))*1000"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "086dbfb3",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "2d734f79",
"metadata": {},
"outputs": [],
"source": [
"\n"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "e6b59c51",
"metadata": {},
"outputs": [],
"source": [
"import json\n",
"\n",
"### USER DATA STREAM ###\n",
"order = json.loads('{\"type\":\"ORDER\",\"data\":{\"isSnapshot\":true,\"orders\":[]},\"ts\":1776839215443,\"seq\":1}')\n",
"position = json.loads('{\"type\":\"POSITION\",\"data\":{\"isSnapshot\":true,\"positions\":[]},\"ts\":1776839215443,\"seq\":2}')\n",
"balance = json.loads('{\"type\":\"BALANCE\",\"data\":{\"isSnapshot\":true,\"balance\":{\"collateralName\":\"USD\",\"balance\":\"25.979998\",\"status\":\"ACTIVE\",\"equity\":\"25.979998\",\"spotEquity\":\"0\",\"spotEquityForAvailableForTrade\":\"0\",\"availableForTrade\":\"25.979998\",\"availableForWithdrawal\":\"25.979998\",\"unrealisedPnl\":\"0\",\"initialMargin\":\"0.000000\",\"marginRatio\":\"0.0000\",\"updatedTime\":1776822250184,\"exposure\":\"0\",\"leverage\":\"0.0000\"}},\"ts\":1776839215443,\"seq\":3}')"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "ee8420c4",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{'type': 'ORDER',\n",
" 'data': {'isSnapshot': True, 'orders': []},\n",
" 'ts': 1776839215443,\n",
" 'seq': 1}"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"order"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "e8d20d9f",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{'type': 'POSITION',\n",
" 'data': {'isSnapshot': True, 'positions': []},\n",
" 'ts': 1776839215443,\n",
" 'seq': 2}"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"position"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{'type': 'BALANCE',\n",
" 'data': {'isSnapshot': True,\n",
" 'balance': {'collateralName': 'USD',\n",
" 'balance': '25.979998',\n",
" 'status': 'ACTIVE',\n",
" 'equity': '25.979998',\n",
" 'spotEquity': '0',\n",
" 'spotEquityForAvailableForTrade': '0',\n",
" 'availableForTrade': '25.979998',\n",
" 'availableForWithdrawal': '25.979998',\n",
" 'unrealisedPnl': '0',\n",
" 'initialMargin': '0.000000',\n",
" 'marginRatio': '0.0000',\n",
" 'updatedTime': 1776822250184,\n",
" 'exposure': '0',\n",
" 'leverage': '0.0000'}},\n",
" 'ts': 1776839215443,\n",
" 'seq': 3}"
]
},
"execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"balance"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "fb16dfb9",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,

23
main.py
View File

@@ -36,15 +36,22 @@ async def run_algo():
loop_start = time.time()
print('__________Start___________')
ASTER_FUND_RATE = json.loads(VAL_KEY.get('fund_rate_aster'))
ASTER_TICKER = json.loads(VAL_KEY.get('fut_ticker_aster'))
print(f'ASTER FUND RATE: {ASTER_FUND_RATE}')
print(f'ASTER TICKER: {ASTER_TICKER}')
ASTER_FUND_RATE_DICT = json.loads(VAL_KEY.get('fund_rate_aster'))
ASTER_TICKER_DICT = json.loads(VAL_KEY.get('fut_ticker_aster'))
# print(f'ASTER FUND RATE: {ASTER_FUND_RATE}')
# print(f'ASTER TICKER: {ASTER_TICKER}')
EXTENDED_FUND_RATE = json.loads(VAL_KEY.get('fund_rate_extended'))
EXTENDED_TICKER = json.loads(VAL_KEY.get('fut_ticker_extended'))
print(f'EXTENDED FUND RATE: {EXTENDED_FUND_RATE}')
print(f'EXTENDED TICKER: {EXTENDED_TICKER}')
EXTENDED_FUND_RATE_DICT = json.loads(VAL_KEY.get('fund_rate_extended'))
EXTENDED_TICKER_DICT = json.loads(VAL_KEY.get('fut_ticker_extended'))
# print(f'EXTENDED FUND RATE: {EXTENDED_FUND_RATE}')
# print(f'EXTENDED TICKER: {EXTENDED_TICKER}')
ASTER_FUND_RATE = float(ASTER_FUND_RATE_DICT.get('funding_rate', 0))
EXTEND_FUND_RATE = float(EXTENDED_FUND_RATE_DICT.get('funding_rate', 0))
print(f'''
ASTER FR: {ASTER_FUND_RATE:.6%} | EXTEND FR: {EXTEND_FUND_RATE:.6%}
''')
time.sleep(5)

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

147
modules/aster_db.py Normal file
View File

@@ -0,0 +1,147 @@
import logging
from typing import AsyncContextManager
from sqlalchemy import text
### Orders and Trades Tables ####
async def create_fr_aster_user_order_trade_table(
CON: AsyncContextManager,
engine: str = 'mysql', # mysql | duckdb
) -> None:
if CON is None:
logging.info("NO DB CONNECTION, SKIPPING Create Statements")
else:
if engine == 'mysql':
logging.info('Creating Table if Does Not Exist: fr_aster_user_order_trade')
await CON.execute(text("""
CREATE TABLE IF NOT EXISTS fr_aster_user_order_trade (
timestamp_arrival BIGINT,
timestamp_msg BIGINT,
timestamp_value BIGINT,
symbol VARCHAR(20),
client_order_id VARCHAR(100),
side VARCHAR(20),
order_type VARCHAR(100),
time_in_force VARCHAR(20),
original_qty DOUBLE,
original_price DOUBLE,
avg_price DOUBLE,
stop_price DOUBLE,
execution_type VARCHAR(100),
order_status VARCHAR(100),
order_id BIGINT,
last_filled_qty DOUBLE,
filled_accumulated_qty DOUBLE,
last_filled_price DOUBLE,
commission_asset VARCHAR(20),
commission DOUBLE,
order_trade_time_ts BIGINT,
trade_id VARCHAR(100),
bid_notional DOUBLE,
ask_notional DOUBLE,
trade_is_maker BOOL,
trade_is_reduce_only BOOL,
stop_px_working_type VARCHAR(100),
original_order_type VARCHAR(100),
position_side VARCHAR(100),
pushed_w_conditional_order BOOL,
activation_price DOUBLE,
callback_rate DOUBLE,
realized_profit DOUBLE
);
"""))
await CON.commit()
else:
raise ValueError('Only MySQL engine is implemented')
### Margin Calls Table ####
async def create_fr_aster_user_margin_table(
CON: AsyncContextManager,
engine: str = 'mysql', # mysql | duckdb
) -> None:
if CON is None:
logging.info("NO DB CONNECTION, SKIPPING Create Statements")
else:
if engine == 'mysql':
logging.info('Creating Table if Does Not Exist: fr_aster_user_margin')
await CON.execute(text("""
CREATE TABLE IF NOT EXISTS fr_aster_user_margin (
timestamp_arrival BIGINT,
timestamp_msg BIGINT,
cross_wallet_balance DOUBLE,
symbol VARCHAR(20),
position_side VARCHAR(20),
position_amount DOUBLE,
margin_type VARCHAR(20),
isolated_wallet DOUBLE,
mark_price DOUBLE,
unrealized_pnl DOUBLE,
maint_margin_required DOUBLE
);
"""))
await CON.commit()
else:
raise ValueError('Only MySQL engine is implemented')
### Account Balance Table ####
async def create_fr_aster_user_account_bal(
CON: AsyncContextManager,
engine: str = 'mysql', # mysql | duckdb
) -> None:
if CON is None:
logging.info("NO DB CONNECTION, SKIPPING Create Statements")
else:
if engine == 'mysql':
logging.info('Creating Table if Does Not Exist: fr_aster_user_account_bal')
await CON.execute(text("""
CREATE TABLE IF NOT EXISTS fr_aster_user_account_bal (
timestamp_arrival BIGINT,
timestamp_msg BIGINT,
timestamp_transaction BIGINT,
event_reason_type VARCHAR(20),
asset VARCHAR(20),
wallet_balance DOUBLE,
cross_wallet_balance DOUBLE,
balance_change_excl_pnl_comms DOUBLE
);
"""))
await CON.commit()
else:
raise ValueError('Only MySQL engine is implemented')
### Account Positions Table ####
async def create_fr_aster_user_account_pos(
CON: AsyncContextManager,
engine: str = 'mysql', # mysql | duckdb
) -> None:
if CON is None:
logging.info("NO DB CONNECTION, SKIPPING Create Statements")
else:
if engine == 'mysql':
logging.info('Creating Table if Does Not Exist: fr_aster_user_account_pos')
await CON.execute(text("""
CREATE TABLE IF NOT EXISTS fr_aster_user_account_pos (
timestamp_arrival BIGINT,
timestamp_msg BIGINT,
timestamp_transaction BIGINT,
event_reason_type VARCHAR(20),
symbol VARCHAR(20),
position_amount DOUBLE,
entry_price DOUBLE,
accumulated_realized_pre_fees DOUBLE,
unrealized_pnl DOUBLE,
margin_type VARCHAR(20),
isolated_wallet DOUBLE,
position_side VARCHAR(20)
);
"""))
await CON.commit()
else:
raise ValueError('Only MySQL engine is implemented')

27
modules/db.py Normal file
View File

@@ -0,0 +1,27 @@
### Database Funcs ###
import logging
from typing import AsyncContextManager
import pandas as pd
async def insert_df_to_mysql(
table_name: str,
params: dict | list | pd.DataFrame,
CON: AsyncContextManager,
engine: str = 'mysql', # mysql | duckdb
) -> None:
if CON is None:
logging.info("NO DB CONNECTION, SKIPPING Insert Statements")
else:
if engine == 'mysql':
if not isinstance(params, pd.DataFrame):
if isinstance(params, dict):
params = [params]
df = pd.DataFrame(params)
await CON.run_sync(
lambda sync_conn: df.to_sql(name=table_name, con=sync_conn, if_exists='append', index=False)
)
await CON.commit()
else:
raise ValueError('Only MySQL engine is implemented')

30
modules/extended_auth.py Normal file
View File

@@ -0,0 +1,30 @@
import os
from dotenv import load_dotenv
from x10.config import MAINNET_CONFIG
from x10.core.stark_account import StarkPerpetualAccount
from x10.perpetual.trading_client import PerpetualTradingClient
import logging
async def create_auth_account_and_trading_client() -> tuple[StarkPerpetualAccount, PerpetualTradingClient]:
load_dotenv()
API_KEY = os.getenv('EXTENDED_API_KEY')
PUBLIC_KEY = os.getenv('EXTENDED_STARK_KEY_PUBLIC')
PRIVATE_KEY = os.getenv('EXTENDED_STARK_KEY_PRIVATE')
VAULT = int(os.getenv('EXTENDED_VAULT_NUMBER'))
stark_account = StarkPerpetualAccount(
vault=VAULT,
private_key=PRIVATE_KEY,
public_key=PUBLIC_KEY,
api_key=API_KEY,
)
trading_client = PerpetualTradingClient(MAINNET_CONFIG, stark_account)
try:
await trading_client.account.get_balance()
except ValueError as e:
logging.critical(f'Failed to get balance after creation of trading account: {e}')
return stark_account, trading_client

152
modules/extended_db.py Normal file
View File

@@ -0,0 +1,152 @@
import logging
from typing import AsyncContextManager
from sqlalchemy import text
### Orders Table ####
async def create_fr_extended_user_order(
CON: AsyncContextManager,
engine: str = 'mysql', # mysql | duckdb
) -> None:
if CON is None:
logging.info("NO DB CONNECTION, SKIPPING Create Statements")
else:
if engine == 'mysql':
logging.info('Creating Table if Does Not Exist: fr_extended_user_order')
await CON.execute(text("""
CREATE TABLE IF NOT EXISTS fr_extended_user_order (
sequence_id INT,
timestamp_arrival BIGINT,
timestamp_msg BIGINT,
order_id VARCHAR(100),
account_id VARCHAR(100),
external_id VARCHAR(100),
market VARCHAR(20),
type VARCHAR(20),
side VARCHAR(20),
status VARCHAR(20),
status_reason VARCHAR(100),
price DOUBLE,
averagePrice DOUBLE,
qty DOUBLE,
filled_qty DOUBLE,
payed_fee DOUBLE,
tp_sl_type VARCHAR(20),
reduce_only BOOL,
post_only BOOL,
created_time_ts BIGINT,
updated_time_ts BIGINT,
expire_time_ts BIGINT
);
"""))
await CON.commit()
else:
raise ValueError('Only MySQL engine is implemented')
### Orders Table ####
async def create_fr_extended_user_trade(
CON: AsyncContextManager,
engine: str = 'mysql', # mysql | duckdb
) -> None:
if CON is None:
logging.info("NO DB CONNECTION, SKIPPING Create Statements")
else:
if engine == 'mysql':
logging.info('Creating Table if Does Not Exist: fr_extended_user_trade')
await CON.execute(text("""
CREATE TABLE IF NOT EXISTS fr_extended_user_trade (
sequence_id INT,
timestamp_arrival BIGINT,
timestamp_msg BIGINT,
trade_id VARCHAR(100),
account_id VARCHAR(100),
market VARCHAR(20),
order_id VARCHAR(100),
external_order_id VARCHAR(100),
side VARCHAR(20),
price DOUBLE,
qty DOUBLE,
value DOUBLE,
fee DOUBLE,
trade_type VARCHAR(20),
created_time_ts BIGINT,
is_taker BOOL
);
"""))
await CON.commit()
else:
raise ValueError('Only MySQL engine is implemented')
### Balance Table ####
async def create_fr_extended_user_balance(
CON: AsyncContextManager,
engine: str = 'mysql', # mysql | duckdb
) -> None:
if CON is None:
logging.info("NO DB CONNECTION, SKIPPING Create Statements")
else:
if engine == 'mysql':
logging.info('Creating Table if Does Not Exist: fr_extended_user_balance')
await CON.execute(text("""
CREATE TABLE IF NOT EXISTS fr_extended_user_balance (
sequence_id INT,
timestamp_arrival BIGINT,
timestamp_msg BIGINT,
collateral_name VARCHAR(20),
balance DOUBLE,
equity DOUBLE,
available_for_trade DOUBLE,
available_for_withdrawal DOUBLE,
unrealised_pnl DOUBLE,
initial_margin DOUBLE,
margin_ratio DOUBLE,
updated_time_ts BIGINT,
exposure DOUBLE,
leverage DOUBLE
);
"""))
await CON.commit()
else:
raise ValueError('Only MySQL engine is implemented')
### Balance Table ####
async def create_fr_extended_user_position(
CON: AsyncContextManager,
engine: str = 'mysql', # mysql | duckdb
) -> None:
if CON is None:
logging.info("NO DB CONNECTION, SKIPPING Create Statements")
else:
if engine == 'mysql':
logging.info('Creating Table if Does Not Exist: fr_extended_user_position')
await CON.execute(text("""
CREATE TABLE IF NOT EXISTS fr_extended_user_position (
sequence_id INT,
timestamp_arrival BIGINT,
timestamp_msg BIGINT,
position_id VARCHAR(100),
account_id VARCHAR(100),
market VARCHAR(20),
side VARCHAR(20),
leverage DOUBLE,
size DOUBLE,
value DOUBLE,
open_price DOUBLE,
mark_price DOUBLE,
liquidation_price DOUBLE,
margin DOUBLE,
unrealised_pnl DOUBLE,
realised_pnl DOUBLE,
tp_trigger_price DOUBLE,
tp_limit_price DOUBLE,
sl_trigger_price DOUBLE,
sl_limit_price DOUBLE,
adl_percentile INT,
created_at_ts BIGINT,
updated_at_ts BIGINT
);
"""))
await CON.commit()
else:
raise ValueError('Only MySQL engine is implemented')

View File

@@ -218,7 +218,7 @@ async def main():
logging.warning("VALKEY NOT BEING USED, NO DATA WILL BE PUBLISHED")
if USE_DB:
engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/polymarket')
engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/fund_rate')
async with engine.connect() as CON:
# await create_rtds_btcusd_table(CON=CON)
await ws_stream()

View File

@@ -128,7 +128,7 @@ async def ws_stream():
if channel is not None:
match channel:
case c if c == STREAM_MARKPRICE:
print(f'MP: {data}')
# print(f'MP: {data}')
VAL_KEY_OBJ = json.dumps({
'timestamp_arrival': ts_arrival,
'timestamp_msg': data['data']['E'],
@@ -186,7 +186,7 @@ async def main():
logging.warning("VALKEY NOT BEING USED, NO DATA WILL BE PUBLISHED")
if USE_DB:
engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/polymarket')
engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/fund_rate')
async with engine.connect() as CON:
# await create_rtds_btcusd_table(CON=CON)
await ws_stream()

View File

@@ -15,7 +15,9 @@ from sqlalchemy.ext.asyncio import create_async_engine
import valkey
import os
from dotenv import load_dotenv
import modules.aster_auth as aster_auth
import modules.aster_db as aster_db
import modules.db as db
### Allow only ipv4 ###
def allowed_gai_family():
@@ -23,9 +25,12 @@ def allowed_gai_family():
urllib3_cn.allowed_gai_family = allowed_gai_family
### Database ###
USE_DB: bool = False
USE_VK: bool = False
VK_USER = 'fund_rate_aster'
USE_DB: bool = True
USE_VK: bool = True
VK_ORDERS_TRADES = 'fr_aster_user_orders'
VK_MARGIN_CALLS = 'fr_aster_user_margin_calls'
VK_BALANCES = 'fr_aster_user_balances'
VK_POSITIONS = 'fr_aster_user_positions'
CON: AsyncContextManager | None = None
VAL_KEY = None
@@ -34,112 +39,205 @@ load_dotenv()
LOG_FILEPATH: str = os.getenv("LOGS_PATH") + '/Fund_Rate_Aster_User.log'
### CONSTANTS ###
WSS_URL = "wss://fstream.asterdex.com/ws/"
LOCAL_RECENT_UPDATES_LOOKBACK_SEC = 30
### Globals ###
WSS_URL = "wss://fstream.asterdex.com"
LISTEN_KEY: str | None = None
LISTEN_KEY_LAST_UPDATE_TS_S: int = 0
LISTEN_KEY_PUT_INTERVAL_SEC = 1800
# HIST_TRADES = np.empty((0, 3))
# HIST_TRADES_LOOKBACK_SEC = 6
LOCAL_RECENT_ORDERS: list = []
LOCAL_RECENT_MARGIN_CALLS: list = []
LOCAL_RECENT_BALANCES: list = []
LOCAL_RECENT_POSITIONS: list = []
# ### Database Funcs ###
# async def create_rtds_btcusd_table(
# CON: AsyncContextManager,
# engine: str = 'mysql', # mysql | duckdb
# ) -> None:
# if CON is None:
# logging.info("NO DB CONNECTION, SKIPPING Create Statements")
# else:
# if engine == 'mysql':
# logging.info('Creating Table if Does Not Exist: binance_btcusd_trades')
# await CON.execute(text("""
# CREATE TABLE IF NOT EXISTS binance_btcusd_trades (
# timestamp_arrival BIGINT,
# timestamp_msg BIGINT,
# timestamp_value BIGINT,
# value DOUBLE,
# qty DOUBLE
# );
# """))
# await CON.commit()
# else:
# raise ValueError('Only MySQL engine is implemented')
# async def insert_rtds_btcusd_table(
# timestamp_arrival: int,
# timestamp_msg: int,
# timestamp_value: int,
# value: float,
# qty: float,
# CON: AsyncContextManager,
# engine: str = 'mysql', # mysql | duckdb
# ) -> None:
# params={
# 'timestamp_arrival': timestamp_arrival,
# 'timestamp_msg': timestamp_msg,
# 'timestamp_value': timestamp_value,
# 'value': value,
# 'qty': qty,
# }
# if CON is None:
# logging.info("NO DB CONNECTION, SKIPPING Insert Statements")
# else:
# if engine == 'mysql':
# await CON.execute(text("""
# INSERT INTO binance_btcusd_trades
# (
# timestamp_arrival,
# timestamp_msg,
# timestamp_value,
# value,
# qty
# )
# VALUES
# (
# :timestamp_arrival,
# :timestamp_msg,
# :timestamp_value,
# :value,
# :qty
# )
# """),
# parameters=params
# )
# await CON.commit()
# else:
# raise ValueError('Only MySQL engine is implemented')
def upsert_list_of_dicts_by_id(list_of_dicts, new_dict, id='id'):
for index, item in enumerate(list_of_dicts):
if item.get(id) == new_dict.get(id):
list_of_dicts[index] = new_dict
return list_of_dicts
list_of_dicts.append(new_dict)
return list_of_dicts
def get_new_listen_key() -> str:
global LISTEN_KEY_LAST_UPDATE_TS_S
listen_key_request = {
"url": "/fapi/v3/listenKey",
"method": "POST",
"params": {}
}
r = aster_auth.post_authenticated_url(listen_key_request)
listen_key = r.get('listenKey', None)
print(f'LISTEN KEY: {listen_key}')
if listen_key is not None:
LISTEN_KEY_LAST_UPDATE_TS_S = round(datetime.now().timestamp())
return listen_key
else:
raise ValueError(f'Listen Key is None; Failed to Update. response: {r}')
async def listen_key_interval():
global LISTEN_KEY
while True:
await asyncio.sleep(LISTEN_KEY_PUT_INTERVAL_SEC)
LISTEN_KEY = get_new_listen_key()
### Websocket ###
async def ws_stream():
async for websocket in websockets.connect(WSS_URL):
global LISTEN_KEY
global LOCAL_RECENT_ORDERS
global LOCAL_RECENT_MARGIN_CALLS
global LOCAL_RECENT_BALANCES
global LOCAL_RECENT_POSITIONS
LISTEN_KEY = get_new_listen_key()
async for websocket in websockets.connect(WSS_URL+LISTEN_KEY):
logging.info(f"Connected to {WSS_URL}")
asyncio.create_task(listen_key_interval())
try:
async for message in websocket:
ts_arrival = round(datetime.now().timestamp()*1000)
if isinstance(message, str):
try:
data = json.loads(message)
channel = data.get('stream', None)
channel = data.get('e', None)
if channel is not None:
LOOKBACK_MIN_TS_MS = ts_arrival - (LOCAL_RECENT_UPDATES_LOOKBACK_SEC*1000)
match channel:
case :
continue
case :
# print(f'BT: {data}')
VAL_KEY_OBJ = json.dumps({
case 'ORDER_TRADE_UPDATE':
logging.info(f'ORDER_TRADE_UPDATE: {data}')
new_order_update = {
'timestamp_arrival': ts_arrival,
'timestamp_msg': data['data']['E'],
'timestamp_transaction': data['data']['T'],
'orderbook_update_id': data['data']['u'],
'symbol': data['data']['s'],
'best_bid_px': data['data']['b'],
'best_bid_qty': data['data']['B'],
'best_ask_px': data['data']['a'],
'best_ask_qty': data['data']['A'],
})
VAL_KEY.set(VK_TICKER, VAL_KEY_OBJ)
'timestamp_msg': data['E'],
'timestamp_transaction': data['T'],
'symbol': data['o']["s"], # "BTCUSDT", // Symbol
'client_order_id': data['o']["c"], # "TEST", // Client Order Id
'side': data['o']["S"], # "SELL", // Side
'order_type': data['o']["o"], # "TRAILING_STOP_MARKET", // Order Type
'time_in_force': data['o']["f"], # "GTC", // Time in Force
'original_qty': float(data['o']["q"]), # "0.001", // Original Quantity
'original_price': float(data['o']["p"]), # "0", // Original Price
'avg_price': float(data['o']["ap"]), # :"0", // Average Price
'stop_price': float(data['o'].get("sp", 0)), # :"7103.04", // Stop Price. Please ignore with TRAILING_STOP_MARKET order
'execution_type': data['o']["x"], # "NEW", // Execution Type
'order_status': data['o']["X"], # "NEW", // Order Status
'order_id': data['o']["i"], # 8886774, // Order Id
'last_filled_qty': float(data['o']["l"]), # "0", // Order Last Filled Quantity
'filled_accumulated_qty': float(data['o']["z"]), # "0", // Order Filled Accumulated Quantity
'last_filled_price': float(data['o']["L"]), # "0", // Last Filled Price
'commission_asset': data['o'].get("N", None), # "USDT", // Commission Asset, will not push if no commission
'commission': float(data['o'].get("n",0)), # "0", // Commission, will not push if no commission
'order_trade_time_ts': data['o']["T"], # 1568879465651, // Order Trade Time
'trade_id': data['o']["t"], # 0, // Trade Id
'bid_notional': float(data['o']["b"]), # "0", // Bids Notional
'ask_notional': float(data['o']["a"]), # "9.91", // Ask Notional
'trade_is_maker': data['o']["m"], # false, // Is this trade the maker side?
'trade_is_reduce_only': data['o']["R"], # false, // Is this reduce only
'stop_px_working_type': data['o']["wt"], # :"CONTRACT_PRICE", // Stop Price Working Type
'original_order_type': data['o']["ot"], # :"TRAILING_STOP_MARKET", // Original Order Type
'position_side': data['o']["ps"], # :"LONG", // Position Side
'pushed_w_conditional_order': bool(data['o'].get("cp", False)), # :false, // If Close-All, pushed with conditional order
'activation_price': float(data['o'].get("AP", 0)), # :"7476.89", // Activation Price, only puhed with TRAILING_STOP_MARKET order
'callback_rate': float(data['o'].get("cr", 0)), # :"5.0", // Callback Rate, only puhed with TRAILING_STOP_MARKET order
'realized_profit': float(data['o']["rp"]), # :"0" // Realized Profit of the trade
}
LOCAL_RECENT_ORDERS = upsert_list_of_dicts_by_id(LOCAL_RECENT_ORDERS, new_order_update, id='client_order_id')
LOCAL_RECENT_ORDERS = [t for t in LOCAL_RECENT_ORDERS if t.get('timestamp_arrival', 0) >= LOOKBACK_MIN_TS_MS]
VAL_KEY_OBJ = json.dumps(LOCAL_RECENT_ORDERS)
VAL_KEY.set(VK_ORDERS_TRADES, VAL_KEY_OBJ)
await db.insert_df_to_mysql(table_name='fr_aster_user_order_trade', params=new_order_update, CON=CON)
continue
case 'MARGIN_CALL':
logging.info(f'MARGIN_CALL: {data}')
list_for_df = []
for p in list(data['p']):
margin_call_update = {
'timestamp_arrival': ts_arrival,
'timestamp_msg': data['E'],
'cross_wallet_balance': float(data.get('cw', 0)),
'symbol': p["s"], # "ETHUSDT", // Symbol
'position_side': p["ps"], # :"LONG", // Position Side
'position_amount': float(p["pa"]), # :"1.327", // Position Amount
'margin_type': p["mt"], # :"CROSSED", // Margin Type
'isolated_wallet': float(p.get("iw", 0)), # :"0", // Isolated Wallet (if isolated position)
'mark_price': float(p["mp"]), # :"187.17127", // Mark Price
'unrealized_pnl': float(p["up"]), # :"-1.166074", // Unrealized PnL
'maint_margin_required': float(p["mm"]), # :"1.614445" // Maintenance Margin Required
}
list_for_df.append(margin_call_update)
LOCAL_RECENT_MARGIN_CALLS = upsert_list_of_dicts_by_id(LOCAL_RECENT_MARGIN_CALLS, margin_call_update, id='symbol')
LOCAL_RECENT_MARGIN_CALLS = [t for t in LOCAL_RECENT_MARGIN_CALLS if t.get('timestamp_arrival', 0) >= LOOKBACK_MIN_TS_MS]
VAL_KEY_OBJ = json.dumps(LOCAL_RECENT_MARGIN_CALLS)
VAL_KEY.set(VK_MARGIN_CALLS, VAL_KEY_OBJ)
await db.insert_df_to_mysql(table_name='fr_aster_user_margin', params=list_for_df, CON=CON)
continue
case 'ACCOUNT_UPDATE':
logging.info(f'ACCOUNT_UPDATE: {data}')
list_for_df_bal = []
list_for_df_pos = []
### Balance Updates ###
if len(list(data['a']['B'])) > 0:
for b in list(data['a']['B']):
balance_update = {
'timestamp_arrival': ts_arrival,
'timestamp_msg': data['E'],
'timestamp_transaction': data['T'],
'event_reason_type': data['a']["m"],
'asset': b['a'],
'wallet_balance': float(b['wb']),
'cross_wallet_balance': float(b.get('cw', 0)),
'balance_change_excl_pnl_comms': float(b['bc']),
}
list_for_df_bal.append(balance_update)
LOCAL_RECENT_BALANCES = upsert_list_of_dicts_by_id(LOCAL_RECENT_BALANCES, balance_update, id='asset')
LOCAL_RECENT_BALANCES = [t for t in LOCAL_RECENT_BALANCES if t.get('timestamp_arrival', 0) >= LOOKBACK_MIN_TS_MS]
VAL_KEY.set(VK_BALANCES, json.dumps(LOCAL_RECENT_BALANCES))
### Position Updates ###
if len(list(data['a']['P'])) > 0:
for p in list(data['a']['P']):
position_update = {
'timestamp_arrival': ts_arrival,
'timestamp_msg': data['E'],
'timestamp_transaction': data['T'],
'event_reason_type': data['a']["m"],
'symbol': p['s'],
'position_amount': float(p['pa']),
'entry_price': float(p['ep']),
'accumulated_realized_pre_fees': float(p['cr']),
'unrealized_pnl': float(p['up']),
'margin_type': p['mt'],
'isolated_wallet': float(p.get('iw', 0)),
'position_side': p['ps'],
}
list_for_df_pos.append(position_update)
LOCAL_RECENT_POSITIONS = upsert_list_of_dicts_by_id(LOCAL_RECENT_POSITIONS, position_update, id='symbol')
LOCAL_RECENT_POSITIONS = [t for t in LOCAL_RECENT_POSITIONS if t.get('timestamp_arrival', 0) >= LOOKBACK_MIN_TS_MS]
VAL_KEY.set(VK_POSITIONS, json.dumps(LOCAL_RECENT_POSITIONS))
if balance_update:
await db.insert_df_to_mysql(table_name='fr_aster_user_account_bal', params=list_for_df_bal, CON=CON)
if position_update:
await db.insert_df_to_mysql(table_name='fr_aster_user_account_pos', params=list_for_df_bal, CON=CON)
continue
case 'listenKeyExpired':
raise('Listen Key Has Expired; Failed to Update Properly. Restarting.')
case _:
logging.warning(f'UNMATCHED OTHER MSG: {data}')
else:
@@ -170,9 +268,12 @@ async def main():
logging.warning("VALKEY NOT BEING USED, NO DATA WILL BE PUBLISHED")
if USE_DB:
engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/polymarket')
engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/fund_rate')
async with engine.connect() as CON:
# await create_rtds_btcusd_table(CON=CON)
await aster_db.create_fr_aster_user_order_trade_table(CON=CON)
await aster_db.create_fr_aster_user_margin_table(CON=CON)
await aster_db.create_fr_aster_user_account_bal(CON=CON)
await aster_db.create_fr_aster_user_account_pos(CON=CON)
await ws_stream()
else:
CON = None

19
ws_aster_user/Dockerfile Normal file
View File

@@ -0,0 +1,19 @@
FROM python:3.13-slim
RUN apt-get update && \
apt-get install -y build-essential
RUN gcc --version
RUN rm -rf /var/lib/apt/lists/*
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
# Finally, run gunicorn.
CMD [ "python", "ws_aster_user.py"]
# CMD [ "gunicorn", "--workers=5", "--threads=1", "-b 0.0.0.0:8000", "app:server"]

View File

@@ -132,7 +132,7 @@ async def ws_stream():
try:
data = json.loads(message)
if data.get('data', None) is not None:
print(f'FR: {data}')
# print(f'FR: {data}')
fr_next_update_ts = (time_round_down(dt=datetime.now(timezone.utc), interval_mins=60)+(60*60))*1000
VAL_KEY_OBJ = json.dumps({
'sequence_id': data['seq'],
@@ -173,7 +173,7 @@ async def main():
logging.warning("VALKEY NOT BEING USED, NO DATA WILL BE PUBLISHED")
if USE_DB:
engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/polymarket')
engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/fund_rate')
async with engine.connect() as CON:
# await create_rtds_btcusd_table(CON=CON)
await ws_stream()

View File

@@ -32,7 +32,7 @@ VAL_KEY = None
### Logging ###
load_dotenv()
LOG_FILEPATH: str = os.getenv("LOGS_PATH") + '/Fund_Rate_Extended.log'
LOG_FILEPATH: str = os.getenv("LOGS_PATH") + '/Fund_Rate_Extended_OB.log'
### CONSTANTS ###
WS_SYMBOL: str = 'ETH-USD'
@@ -40,76 +40,6 @@ WS_SYMBOL: str = 'ETH-USD'
### Globals ###
WSS_URL = f"wss://api.starknet.extended.exchange/stream.extended.exchange/v1/orderbooks/{WS_SYMBOL}?depth=1"
# HIST_TRADES = np.empty((0, 3))
# HIST_TRADES_LOOKBACK_SEC = 6
# ### Database Funcs ###
# async def create_rtds_btcusd_table(
# CON: AsyncContextManager,
# engine: str = 'mysql', # mysql | duckdb
# ) -> None:
# if CON is None:
# logging.info("NO DB CONNECTION, SKIPPING Create Statements")
# else:
# if engine == 'mysql':
# logging.info('Creating Table if Does Not Exist: binance_btcusd_trades')
# await CON.execute(text("""
# CREATE TABLE IF NOT EXISTS binance_btcusd_trades (
# timestamp_arrival BIGINT,
# timestamp_msg BIGINT,
# timestamp_value BIGINT,
# value DOUBLE,
# qty DOUBLE
# );
# """))
# await CON.commit()
# else:
# raise ValueError('Only MySQL engine is implemented')
# async def insert_rtds_btcusd_table(
# timestamp_arrival: int,
# timestamp_msg: int,
# timestamp_value: int,
# value: float,
# qty: float,
# CON: AsyncContextManager,
# engine: str = 'mysql', # mysql | duckdb
# ) -> None:
# params={
# 'timestamp_arrival': timestamp_arrival,
# 'timestamp_msg': timestamp_msg,
# 'timestamp_value': timestamp_value,
# 'value': value,
# 'qty': qty,
# }
# if CON is None:
# logging.info("NO DB CONNECTION, SKIPPING Insert Statements")
# else:
# if engine == 'mysql':
# await CON.execute(text("""
# INSERT INTO binance_btcusd_trades
# (
# timestamp_arrival,
# timestamp_msg,
# timestamp_value,
# value,
# qty
# )
# VALUES
# (
# :timestamp_arrival,
# :timestamp_msg,
# :timestamp_value,
# :value,
# :qty
# )
# """),
# parameters=params
# )
# await CON.commit()
# else:
# raise ValueError('Only MySQL engine is implemented')
### Websocket ###
async def ws_stream():
async for websocket in websockets.connect(WSS_URL):
@@ -162,7 +92,7 @@ async def main():
logging.warning("VALKEY NOT BEING USED, NO DATA WILL BE PUBLISHED")
if USE_DB:
engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/polymarket')
engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/fund_rate')
async with engine.connect() as CON:
# await create_rtds_btcusd_table(CON=CON)
await ws_stream()

273
ws_extended_user.py Normal file
View File

@@ -0,0 +1,273 @@
import asyncio
import json
import logging
import socket
import traceback
from datetime import datetime, timezone
from typing import AsyncContextManager
import math
import numpy as np
import pandas as pd
import requests.packages.urllib3.util.connection as urllib3_cn # type: ignore
from sqlalchemy import text
import websockets
from sqlalchemy.ext.asyncio import create_async_engine
import valkey
import os
from dotenv import load_dotenv
import modules.extended_db as extended_db
import modules.db as db
### Allow only ipv4 ###
def allowed_gai_family():
return socket.AF_INET
urllib3_cn.allowed_gai_family = allowed_gai_family
### Database ###
USE_DB: bool = True
USE_VK: bool = True
VK_ORDERS = 'fr_extended_user_orders'
VK_TRADES = 'fr_extended_user_trades'
VK_BALANCES = 'fr_extended_user_balances'
VK_POSITIONS = 'fr_extended_user_positions'
CON: AsyncContextManager | None = None
VAL_KEY = None
### Logging ###
load_dotenv()
LOG_FILEPATH: str = os.getenv("LOGS_PATH") + '/Fund_Rate_Extended_User.log'
### CONSTANTS ###
WSS_URL = "wss://api.starknet.extended.exchange/stream.extended.exchange/v1/account"
API_KEY = os.getenv('EXTENDED_API_KEY')
LOCAL_RECENT_UPDATES_LOOKBACK_SEC = 30
### Globals ###
LOCAL_RECENT_ORDERS: list = []
LOCAL_RECENT_TRADES: list = []
LOCAL_RECENT_BALANCES: list = []
LOCAL_RECENT_POSITIONS: list = []
def upsert_list_of_dicts_by_id(list_of_dicts, new_dict, id='id', seq_check_field: str | None = None):
for index, item in enumerate(list_of_dicts):
if item.get(id) == new_dict.get(id):
if seq_check_field is not None:
if item.get(seq_check_field) < new_dict.get(seq_check_field):
logging.info('Skipping out of sequence msg')
return list_of_dicts
list_of_dicts[index] = new_dict
return list_of_dicts
list_of_dicts.append(new_dict)
return list_of_dicts
### Websocket ###
async def ws_stream():
global LOCAL_RECENT_ORDERS
global LOCAL_RECENT_TRADES
global LOCAL_RECENT_BALANCES
global LOCAL_RECENT_POSITIONS
async for websocket in websockets.connect(WSS_URL, extra_headers={'X-Api-Key': API_KEY}):
logging.info(f"Connected to {WSS_URL}")
try:
async for message in websocket:
ts_arrival = round(datetime.now().timestamp()*1000)
if isinstance(message, str):
try:
data = json.loads(message)
channel = data.get('type', None)
if channel is not None:
LOOKBACK_MIN_TS_MS = ts_arrival - (LOCAL_RECENT_UPDATES_LOOKBACK_SEC*1000)
match channel:
case 'ORDER':
list_for_df = []
for o in data['data']['orders']:
order_update = {
'sequence_id': data['seq'],
'timestamp_arrival': ts_arrival,
'timestamp_msg': data['ts'],
'order_id': o['id'],
'account_id': o['accountId'],
'external_id': o.get('externalId', None),
'market': o['market'],
'type': o['type'],
'side': o['side'],
'status': o['status'],
'status_reason': o.get('statusReason', None),
'price': float(o.get('price', 0)),
'averagePrice': float(o.get('averagePrice', 0)),
'qty': float(o['qty']),
'filled_qty': float(o.get('filledQty', 0)),
'payed_fee': float(o.get('payedFee', 0)),
# 'trigger_dict': o.get('trigger', None),
'tp_sl_type': o.get('tpSlType', None),
# 'take_profit_dict': o.get('takeProfit', None),
# 'stop_loss_dict': o.get('stopLoss', None),
'reduce_only': o.get('reduceOnly', False),
'post_only': o.get('postOnly', False),
'created_time_ts': o['createdTime'],
'updated_time_ts': o['updatedTime'],
'expire_time_ts': o['expireTime'],
}
list_for_df.append(order_update)
LOCAL_RECENT_ORDERS = upsert_list_of_dicts_by_id(LOCAL_RECENT_ORDERS, order_update, id='order_id', seq_check_field='sequence_id')
LOCAL_RECENT_ORDERS = [t for t in LOCAL_RECENT_ORDERS if t.get('timestamp_arrival', 0) >= LOOKBACK_MIN_TS_MS]
VAL_KEY_OBJ = json.dumps(LOCAL_RECENT_ORDERS)
VAL_KEY.set(VK_ORDERS, VAL_KEY_OBJ)
await db.insert_df_to_mysql(table_name='fr_extended_user_order', params=list_for_df, CON=CON)
continue
case 'TRADE':
list_for_df = []
for t in data['data']['trades']:
trade_update = {
'sequence_id': data['seq'],
'timestamp_arrival': ts_arrival,
'timestamp_msg': data['ts'],
'trade_id': t['id'],
'account_id': t['accountId'],
'market': t['market'],
'order_id': t['orderId'],
'external_order_id': t.get('externalOrderId', None),
'side': t['side'],
'price': float(t['price']),
'qty': float(t['qty']),
'value': float(t['value']),
'fee': float(t['fee']),
'trade_type': t['tradeType'],
'created_time_ts': t['createdTime'],
'is_taker': t['isTaker'],
}
LOCAL_RECENT_TRADES = upsert_list_of_dicts_by_id(LOCAL_RECENT_TRADES, trade_update, id='trade_id', seq_check_field='sequence_id')
LOCAL_RECENT_TRADES = [t for t in LOCAL_RECENT_TRADES if t.get('timestamp_arrival', 0) >= LOOKBACK_MIN_TS_MS]
VAL_KEY_OBJ = json.dumps(LOCAL_RECENT_TRADES)
VAL_KEY.set(VK_TRADES, VAL_KEY_OBJ)
await db.insert_df_to_mysql(table_name='fr_extended_user_trade', params=list_for_df, CON=CON)
continue
case 'BALANCE':
balance_update = {
'sequence_id': data['seq'],
'timestamp_arrival': ts_arrival,
'timestamp_msg': data['ts'],
'collateral_name': data['data']['balance']['collateralName'],
'balance': float(data['data']['balance']['balance']),
'equity': float(data['data']['balance']['equity']),
'available_for_trade': float(data['data']['balance']['availableForTrade']),
'available_for_withdrawal': float(data['data']['balance']['availableForWithdrawal']),
'unrealised_pnl': float(data['data']['balance']['unrealisedPnl']),
'initial_margin': float(data['data']['balance']['initialMargin']),
'margin_ratio': float(data['data']['balance']['marginRatio']),
'updated_time_ts': data['data']['balance']['updatedTime'],
'exposure': float(data['data']['balance']['exposure']),
'leverage': float(data['data']['balance']['leverage']),
}
LOCAL_RECENT_BALANCES = upsert_list_of_dicts_by_id(LOCAL_RECENT_BALANCES, balance_update, id='collateral_name', seq_check_field='sequence_id')
LOCAL_RECENT_BALANCES = [t for t in LOCAL_RECENT_BALANCES if t.get('timestamp_arrival', 0) >= LOOKBACK_MIN_TS_MS]
VAL_KEY_OBJ = json.dumps(LOCAL_RECENT_BALANCES)
VAL_KEY.set(VK_BALANCES, VAL_KEY_OBJ)
await db.insert_df_to_mysql(table_name='fr_extended_user_balance', params=balance_update, CON=CON)
continue
case 'POSITION':
list_for_df = []
for p in data['data']['positions']:
position_update = {
'sequence_id': data['seq'],
'timestamp_arrival': ts_arrival,
'timestamp_msg': data['ts'],
'position_id': p['id'],
'account_id': p['accountId'],
'market': p['market'],
'side': p['side'],
'leverage': float(p['leverage']),
'size': float(p['size']),
'value': float(p['value']),
'open_price': float(p['openPrice']),
'mark_price': float(p['markPrice']),
'liquidation_price': float(p['liquidationPrice']),
'margin': float(p['margin']),
'unrealised_pnl': float(p['unrealisedPnl']),
'realised_pnl': float(p['realisedPnl']),
'tp_trigger_price': float(p.get('tpTriggerPrice', 0)),
'tp_limit_price': float(p.get('tpLimitPrice', 0)),
'sl_trigger_price': float(p.get('slTriggerPrice', 0)),
'sl_limit_price': float(p.get('slLimitPrice', 0)),
'adl_percentile': p['adl'], # closer to 100 means higher chance of auto-deleveraging
'created_at_ts': p['createdAt'],
'updated_at_ts': p['updatedAt'],
}
LOCAL_RECENT_POSITIONS = upsert_list_of_dicts_by_id(LOCAL_RECENT_POSITIONS, position_update, id='position_id', seq_check_field='sequence_id')
LOCAL_RECENT_POSITIONS = [t for t in LOCAL_RECENT_POSITIONS if t.get('timestamp_arrival', 0) >= LOOKBACK_MIN_TS_MS]
VAL_KEY_OBJ = json.dumps(LOCAL_RECENT_POSITIONS)
VAL_KEY.set(VK_POSITIONS, VAL_KEY_OBJ)
await db.insert_df_to_mysql(table_name='fr_extended_user_position', params=list_for_df, CON=CON)
continue
case _:
logging.warning(f'UNMATCHED OTHER MSG: {data}')
else:
logging.info(f'Initial or unexpected data struct, skipping: {data}')
continue
except (json.JSONDecodeError, ValueError):
logging.warning(f'Message not in JSON format, skipping: {message}')
continue
else:
raise ValueError(f'Type: {type(data)} not expected: {message}')
except websockets.ConnectionClosed as e:
logging.error(f'Connection closed: {e}')
logging.error(traceback.format_exc())
continue
except Exception as e:
logging.error(f'Connection closed: {e}')
logging.error(traceback.format_exc())
async def main():
global VAL_KEY
global CON
if USE_VK:
VAL_KEY = valkey.Valkey(host='localhost', port=6379, db=0)
else:
VAL_KEY = None
logging.warning("VALKEY NOT BEING USED, NO DATA WILL BE PUBLISHED")
if USE_DB:
engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/fund_rate')
async with engine.connect() as CON:
await extended_db.create_fr_extended_user_balance(CON=CON)
await extended_db.create_fr_extended_user_order(CON=CON)
await extended_db.create_fr_extended_user_position(CON=CON)
await extended_db.create_fr_extended_user_trade(CON=CON)
await ws_stream()
else:
CON = None
logging.warning("DATABASE NOT BEING USED, NO DATA WILL BE RECORDED")
await ws_stream()
if __name__ == '__main__':
START_TIME = round(datetime.now().timestamp()*1000)
logging.info(f'Log FilePath: {LOG_FILEPATH}')
logging.basicConfig(
force=True,
filename=LOG_FILEPATH,
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s',
filemode='w'
)
logging.info(f"STARTED: {START_TIME}")
try:
asyncio.run(main())
except KeyboardInterrupt:
logging.info("Stream stopped")

View File

@@ -0,0 +1,19 @@
FROM python:3.13-slim
RUN apt-get update && \
apt-get install -y build-essential
RUN gcc --version
RUN rm -rf /var/lib/apt/lists/*
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
# Finally, run gunicorn.
CMD [ "python", "ws_extended_user.py"]
# CMD [ "gunicorn", "--workers=5", "--threads=1", "-b 0.0.0.0:8000", "app:server"]