From 539e6004cf52933ec920d47025eecafc570a7dba Mon Sep 17 00:00:00 2001 From: stevekeyharvey Date: Thu, 23 Apr 2026 03:11:52 +0000 Subject: [PATCH] feedhandlers extended --- aster.ipynb | 227 +++++++++++++- docker-compose.yml | 34 ++- extended.ipynb | 223 +++++++++++++- main.py | 23 +- .../__pycache__/aster_auth.cpython-313.pyc | Bin 0 -> 3979 bytes modules/__pycache__/aster_db.cpython-313.pyc | Bin 0 -> 6045 bytes modules/__pycache__/db.cpython-313.pyc | Bin 0 -> 1532 bytes .../__pycache__/extended_auth.cpython-313.pyc | Bin 0 -> 1707 bytes .../__pycache__/extended_db.cpython-313.pyc | Bin 0 -> 6159 bytes modules/{aster_auth_api.py => aster_auth.py} | 0 modules/aster_db.py | 147 +++++++++ modules/db.py | 27 ++ modules/extended_auth.py | 30 ++ modules/extended_db.py | 152 ++++++++++ ws_apex.py | 2 +- ws_aster.py | 4 +- ws_aster_user.py | 287 ++++++++++++------ ws_aster_user/Dockerfile | 19 ++ ws_extended_fund_rate.py | 4 +- ws_extended_orderbook.py | 74 +---- ws_extended_user.py | 273 +++++++++++++++++ ws_extended_user/Dockerfile | 19 ++ 22 files changed, 1340 insertions(+), 205 deletions(-) create mode 100644 modules/__pycache__/aster_auth.cpython-313.pyc create mode 100644 modules/__pycache__/aster_db.cpython-313.pyc create mode 100644 modules/__pycache__/db.cpython-313.pyc create mode 100644 modules/__pycache__/extended_auth.cpython-313.pyc create mode 100644 modules/__pycache__/extended_db.cpython-313.pyc rename modules/{aster_auth_api.py => aster_auth.py} (100%) create mode 100644 modules/aster_db.py create mode 100644 modules/db.py create mode 100644 modules/extended_auth.py create mode 100644 modules/extended_db.py create mode 100644 ws_aster_user/Dockerfile create mode 100644 ws_extended_user.py create mode 100644 ws_extended_user/Dockerfile diff --git a/aster.ipynb b/aster.ipynb index 9f1d669..a01e580 100644 --- a/aster.ipynb +++ b/aster.ipynb @@ -2,17 +2,17 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "3a269644", "metadata": {}, "outputs": [], "source": [ - "import modules.aster_auth_api as aster_auth_api" + "import modules.aster_auth as aster_auth" ] }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 27, "id": "4395fabb", "metadata": {}, "outputs": [], @@ -57,17 +57,17 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "id": "2122885a", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "{'orderId': 17340952438,\n", + "{'orderId': 17341253499,\n", " 'symbol': 'ETHUSDT',\n", " 'status': 'NEW',\n", - " 'clientOrderId': '57UkyWpQE4iRP74eAwt7Hf',\n", + " 'clientOrderId': 'J1SvsvvDhlDAqwGau1T77F',\n", " 'price': '2100',\n", " 'avgPrice': '0.00000',\n", " 'origQty': '0.010',\n", @@ -84,18 +84,18 @@ " 'workingType': 'CONTRACT_PRICE',\n", " 'priceProtect': False,\n", " 'origType': 'LIMIT',\n", - " 'updateTime': 1776835456300,\n", - " 'newChainData': {'hash': '0x12456fb0e14021975aa03e56adb339f8b906710ec3a8604a73a8b697c7ee1225'}}" + " 'updateTime': 1776838867250,\n", + " 'newChainData': {'hash': '0xa2c23513491587b2961bfa77d2077489a14522d7ce359a9e3635e7c4f0d12a22'}}" ] }, - "execution_count": 9, + "execution_count": 28, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "j = aster_auth_api.post_authenticated_url(post_order)\n", - "j" + "# j = aster_auth_api.post_authenticated_url(post_order)\n", + "# j" ] }, { @@ -103,9 +103,197 @@ "execution_count": null, "id": "45d68bf4", "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "1776836583531" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from datetime import datetime\n", + "round(datetime.now().timestamp())" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "1800" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "30*60" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "67a3bbb2", + "metadata": {}, "outputs": [], "source": [] }, + { + "cell_type": "code", + "execution_count": 23, + "id": "e958e7da", + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "\n", + "### User Data Stream ###\n", + "# After posting limit order to rest\n", + "after_order_post = json.loads('{\"e\":\"ORDER_TRADE_UPDATE\",\"T\":1776836992350,\"E\":1776836992388,\"o\":{\"s\":\"ETHUSDT\",\"c\":\"KFi375tZh5kzHsQJWKMJHe\",\"S\":\"BUY\",\"o\":\"LIMIT\",\"f\":\"GTC\",\"q\":\"0.010\",\"p\":\"2100\",\"ap\":\"0\",\"sp\":\"0\",\"x\":\"NEW\",\"X\":\"NEW\",\"i\":17341121450,\"l\":\"0\",\"z\":\"0\",\"L\":\"0\",\"T\":1776836992350,\"t\":0,\"b\":\"21\",\"a\":\"0\",\"m\":false,\"R\":false,\"wt\":\"CONTRACT_PRICE\",\"ot\":\"LIMIT\",\"ps\":\"BOTH\",\"cp\":false,\"rp\":\"0\",\"pP\":false,\"si\":0,\"ss\":0,\"h\":\"0xeec41a368072d5a47c13b3ad83703eded2e2b8cf9ff427095beb8b7684968db0\"}}')\n", + "\n", + "# After order cancellation in GUI\n", + "after_order_cxl = json.loads('{\"e\":\"ORDER_TRADE_UPDATE\",\"T\":1776836998500,\"E\":1776836998533,\"o\":{\"s\":\"ETHUSDT\",\"c\":\"KFi375tZh5kzHsQJWKMJHe\",\"S\":\"BUY\",\"o\":\"LIMIT\",\"f\":\"GTC\",\"q\":\"0.010\",\"p\":\"2100\",\"ap\":\"0\",\"sp\":\"0\",\"x\":\"CANCELED\",\"X\":\"CANCELED\",\"i\":17341121450,\"l\":\"0\",\"z\":\"0\",\"L\":\"0\",\"T\":1776836998500,\"t\":0,\"b\":\"0\",\"a\":\"0\",\"m\":false,\"R\":false,\"wt\":\"CONTRACT_PRICE\",\"ot\":\"LIMIT\",\"ps\":\"BOTH\",\"cp\":false,\"rp\":\"0\",\"pP\":false,\"si\":0,\"ss\":0,\"h\":\"0x116bb48a4b41420aebbc76343d6f55f22d1ccbc36b04462e6172571fda836599\"}}')\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'e': 'ORDER_TRADE_UPDATE',\n", + " 'T': 1776836992350,\n", + " 'E': 1776836992388,\n", + " 'o': {'s': 'ETHUSDT',\n", + " 'c': 'KFi375tZh5kzHsQJWKMJHe',\n", + " 'S': 'BUY',\n", + " 'o': 'LIMIT',\n", + " 'f': 'GTC',\n", + " 'q': '0.010',\n", + " 'p': '2100',\n", + " 'ap': '0',\n", + " 'sp': '0',\n", + " 'x': 'NEW',\n", + " 'X': 'NEW',\n", + " 'i': 17341121450,\n", + " 'l': '0',\n", + " 'z': '0',\n", + " 'L': '0',\n", + " 'T': 1776836992350,\n", + " 't': 0,\n", + " 'b': '21',\n", + " 'a': '0',\n", + " 'm': False,\n", + " 'R': False,\n", + " 'wt': 'CONTRACT_PRICE',\n", + " 'ot': 'LIMIT',\n", + " 'ps': 'BOTH',\n", + " 'cp': False,\n", + " 'rp': '0',\n", + " 'pP': False,\n", + " 'si': 0,\n", + " 'ss': 0,\n", + " 'h': '0xeec41a368072d5a47c13b3ad83703eded2e2b8cf9ff427095beb8b7684968db0'}}" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "after_order_post" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "1ea320f2", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'e': 'ORDER_TRADE_UPDATE',\n", + " 'T': 1776836998500,\n", + " 'E': 1776836998533,\n", + " 'o': {'s': 'ETHUSDT',\n", + " 'c': 'KFi375tZh5kzHsQJWKMJHe',\n", + " 'S': 'BUY',\n", + " 'o': 'LIMIT',\n", + " 'f': 'GTC',\n", + " 'q': '0.010',\n", + " 'p': '2100',\n", + " 'ap': '0',\n", + " 'sp': '0',\n", + " 'x': 'CANCELED',\n", + " 'X': 'CANCELED',\n", + " 'i': 17341121450,\n", + " 'l': '0',\n", + " 'z': '0',\n", + " 'L': '0',\n", + " 'T': 1776836998500,\n", + " 't': 0,\n", + " 'b': '0',\n", + " 'a': '0',\n", + " 'm': False,\n", + " 'R': False,\n", + " 'wt': 'CONTRACT_PRICE',\n", + " 'ot': 'LIMIT',\n", + " 'ps': 'BOTH',\n", + " 'cp': False,\n", + " 'rp': '0',\n", + " 'pP': False,\n", + " 'si': 0,\n", + " 'ss': 0,\n", + " 'h': '0x116bb48a4b41420aebbc76343d6f55f22d1ccbc36b04462e6172571fda836599'}}" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "after_order_cxl" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "07ef4360", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Timestamp('2019-09-19 07:51:05.651000')" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import pandas as pd\n", + "\n", + "pd.to_datetime(1568879465651, unit='ms')" + ] + }, { "cell_type": "code", "execution_count": null, @@ -116,7 +304,22 @@ { "cell_type": "code", "execution_count": null, - "id": "fd513311", + "id": "284b7266", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "98ed3a27", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, "metadata": {}, "outputs": [], "source": [] diff --git a/docker-compose.yml b/docker-compose.yml index 8abfeae..379722e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,3 +1,5 @@ +# tail -f Fund_Rate_Aster_User.log Fund_Rate_Aster.log Fund_Rate_Extended_FR.log Fund_Rate_Extended_OB.log Fund_Rate_Extended_User.log + services: ws_aster: container_name: ws_aster @@ -9,6 +11,16 @@ services: - /home/ubuntu/data:/home/ubuntu/data:rw # Read-write access to data - /home/ubuntu/logs:/home/ubuntu/logs:rw # Read-write access to data network_mode: "host" + ws_aster_user: + container_name: ws_aster_user + restart: "unless-stopped" + build: + context: ./ + dockerfile: ./ws_aster_user/Dockerfile + volumes: + - /home/ubuntu/data:/home/ubuntu/data:rw # Read-write access to data + - /home/ubuntu/logs:/home/ubuntu/logs:rw # Read-write access to data + network_mode: "host" ws_extended_fund_rate: container_name: ws_extended_fund_rate restart: "unless-stopped" @@ -29,18 +41,18 @@ services: - /home/ubuntu/data:/home/ubuntu/data:rw # Read-write access to data - /home/ubuntu/logs:/home/ubuntu/logs:rw # Read-write access to data network_mode: "host" + ws_extended_user: + container_name: ws_extended_user + restart: "unless-stopped" + build: + context: ./ + dockerfile: ./ws_extended_user/Dockerfile + volumes: + - /home/ubuntu/data:/home/ubuntu/data:rw # Read-write access to data + - /home/ubuntu/logs:/home/ubuntu/logs:rw # Read-write access to data + network_mode: "host" - - # ws_user: - # container_name: ws_user - # restart: "unless-stopped" - # build: - # context: ./ - # dockerfile: ./ws_user/Dockerfile - # volumes: - # - /home/ubuntu/data:/home/ubuntu/data:rw # Read-write access to data - # - /home/ubuntu/logs:/home/ubuntu/logs:rw # Read-write access to data - # network_mode: "host" + # ng: # container_name: ng # restart: "unless-stopped" diff --git a/extended.ipynb b/extended.ipynb index d0c0e03..2a1270e 100644 --- a/extended.ipynb +++ b/extended.ipynb @@ -2,15 +2,104 @@ "cells": [ { "cell_type": "code", - "execution_count": 15, - "id": "c9606e56", + "execution_count": null, + "id": "6c70a8c3", "metadata": {}, "outputs": [], "source": [ - "import math\n", - "from datetime import datetime, timezone" + "\n", + "import asyncio\n", + "import requests\n", + "from x10.config import MAINNET_CONFIG, TESTNET_CONFIG\n", + "from x10.core.stark_account import StarkPerpetualAccount\n", + "from x10.perpetual.trading_client import PerpetualTradingClient\n", + "from x10.models.order import OrderSide, OrderType\n", + "import time\n", + "from dotenv import load_dotenv\n", + "import os\n", + "import uuid\n", + "import asyncio\n", + "import logging\n", + "from decimal import Decimal\n", + "\n", + "\n" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "ff971ca9", + "metadata": {}, + "outputs": [], + "source": [ + "load_dotenv()\n", + "\n", + "API_KEY = os.getenv('EXTENDED_API_KEY')\n", + "PUBLIC_KEY = os.getenv('EXTENDED_STARK_KEY_PUBLIC') # public Stark key (l2Key from account info)\n", + "PRIVATE_KEY = os.getenv('EXTENDED_STARK_KEY_PRIVATE') # private Stark key (hex)\n", + "VAULT = int(os.getenv('EXTENDED_VAULT_NUMBER')) # l2Vault from account info (integer)\n", + "\n", + "CONFIG = MAINNET_CONFIG\n", + "\n", + "ORDER_MARKET = \"BTC-USD\"\n", + "ORDER_SIDE = OrderSide.BUY\n", + "ORDER_QTY = Decimal(\"0.001\")\n", + "ORDER_PRICE = Decimal(\"75000\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c366706f", + "metadata": {}, + "outputs": [], + "source": [ + "placed_order = await trading_client.place_order(\n", + " market_name=ORDER_MARKET,\n", + " amount_of_synthetic=ORDER_QTY,\n", + " price=ORDER_PRICE,\n", + " side=ORDER_SIDE,\n", + " taker_fee=Decimal(\"0.00025\")\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7cd3413d", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a7212988", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, { "cell_type": "code", "execution_count": 1, @@ -121,6 +210,132 @@ "(time_round_down(dt=datetime.now(timezone.utc), interval_mins=60)+(60*60))*1000" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "086dbfb3", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2d734f79", + "metadata": {}, + "outputs": [], + "source": [ + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "e6b59c51", + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "\n", + "### USER DATA STREAM ###\n", + "order = json.loads('{\"type\":\"ORDER\",\"data\":{\"isSnapshot\":true,\"orders\":[]},\"ts\":1776839215443,\"seq\":1}')\n", + "position = json.loads('{\"type\":\"POSITION\",\"data\":{\"isSnapshot\":true,\"positions\":[]},\"ts\":1776839215443,\"seq\":2}')\n", + "balance = json.loads('{\"type\":\"BALANCE\",\"data\":{\"isSnapshot\":true,\"balance\":{\"collateralName\":\"USD\",\"balance\":\"25.979998\",\"status\":\"ACTIVE\",\"equity\":\"25.979998\",\"spotEquity\":\"0\",\"spotEquityForAvailableForTrade\":\"0\",\"availableForTrade\":\"25.979998\",\"availableForWithdrawal\":\"25.979998\",\"unrealisedPnl\":\"0\",\"initialMargin\":\"0.000000\",\"marginRatio\":\"0.0000\",\"updatedTime\":1776822250184,\"exposure\":\"0\",\"leverage\":\"0.0000\"}},\"ts\":1776839215443,\"seq\":3}')" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "ee8420c4", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'type': 'ORDER',\n", + " 'data': {'isSnapshot': True, 'orders': []},\n", + " 'ts': 1776839215443,\n", + " 'seq': 1}" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "order" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "e8d20d9f", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'type': 'POSITION',\n", + " 'data': {'isSnapshot': True, 'positions': []},\n", + " 'ts': 1776839215443,\n", + " 'seq': 2}" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "position" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'type': 'BALANCE',\n", + " 'data': {'isSnapshot': True,\n", + " 'balance': {'collateralName': 'USD',\n", + " 'balance': '25.979998',\n", + " 'status': 'ACTIVE',\n", + " 'equity': '25.979998',\n", + " 'spotEquity': '0',\n", + " 'spotEquityForAvailableForTrade': '0',\n", + " 'availableForTrade': '25.979998',\n", + " 'availableForWithdrawal': '25.979998',\n", + " 'unrealisedPnl': '0',\n", + " 'initialMargin': '0.000000',\n", + " 'marginRatio': '0.0000',\n", + " 'updatedTime': 1776822250184,\n", + " 'exposure': '0',\n", + " 'leverage': '0.0000'}},\n", + " 'ts': 1776839215443,\n", + " 'seq': 3}" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "balance" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fb16dfb9", + "metadata": {}, + "outputs": [], + "source": [] + }, { "cell_type": "code", "execution_count": null, diff --git a/main.py b/main.py index 7f77fe1..b10f595 100644 --- a/main.py +++ b/main.py @@ -36,15 +36,22 @@ async def run_algo(): loop_start = time.time() print('__________Start___________') - ASTER_FUND_RATE = json.loads(VAL_KEY.get('fund_rate_aster')) - ASTER_TICKER = json.loads(VAL_KEY.get('fut_ticker_aster')) - print(f'ASTER FUND RATE: {ASTER_FUND_RATE}') - print(f'ASTER TICKER: {ASTER_TICKER}') + ASTER_FUND_RATE_DICT = json.loads(VAL_KEY.get('fund_rate_aster')) + ASTER_TICKER_DICT = json.loads(VAL_KEY.get('fut_ticker_aster')) + # print(f'ASTER FUND RATE: {ASTER_FUND_RATE}') + # print(f'ASTER TICKER: {ASTER_TICKER}') - EXTENDED_FUND_RATE = json.loads(VAL_KEY.get('fund_rate_extended')) - EXTENDED_TICKER = json.loads(VAL_KEY.get('fut_ticker_extended')) - print(f'EXTENDED FUND RATE: {EXTENDED_FUND_RATE}') - print(f'EXTENDED TICKER: {EXTENDED_TICKER}') + EXTENDED_FUND_RATE_DICT = json.loads(VAL_KEY.get('fund_rate_extended')) + EXTENDED_TICKER_DICT = json.loads(VAL_KEY.get('fut_ticker_extended')) + # print(f'EXTENDED FUND RATE: {EXTENDED_FUND_RATE}') + # print(f'EXTENDED TICKER: {EXTENDED_TICKER}') + + ASTER_FUND_RATE = float(ASTER_FUND_RATE_DICT.get('funding_rate', 0)) + EXTEND_FUND_RATE = float(EXTENDED_FUND_RATE_DICT.get('funding_rate', 0)) + + print(f''' + ASTER FR: {ASTER_FUND_RATE:.6%} | EXTEND FR: {EXTEND_FUND_RATE:.6%} + ''') time.sleep(5) diff --git a/modules/__pycache__/aster_auth.cpython-313.pyc b/modules/__pycache__/aster_auth.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5002381060537f782401ef1e22d85c5470c0b6c6 GIT binary patch literal 3979 zcmb7H-EULL6`%Wk{q5LEhyx)en6L(H2NoIvEUY1ch5#G++FRHl+>3olyc=JexpqiW zRkBtq)!ls{p+bZ%Qqwmo@xVj>f&K?^{eU}UrL?6|w@*&lYOAUbJ#+mLHp_}K);%+G z=FFKhbI$qA_@%>PMNmHb=*GbxQ`U1FcS#Vnw?-SvY7QZcpT=; zF&^_~EnvZ{MJz(iO-L7IESs|Ygyo_YTm9%dJAjTg)kBYqb+;DXst1rF9Ag@NZ2v(S zu`T2HK^dtn<5bXX7kq9-zTTl&u6G>e6>Bg0n8%)JCTRPbE_Wr!5Zi1{O()a2f~L>A;)cAJP;D-^MwYHrxvwgi-e@8uI9H`p884gQ93Lj+ea z3afBoR^gkqponH)Qe;|3c*W8fi7<*4-xS;Gz)|(mqsj^iHo+zMgpd%A3d&C4$vTr@22cViyGED* z3ie$gysl<7A{qsp(WiqfhUh|srw2%+ISm_`oKB?FjGEEM(?lv}^um#2KO!A4l)1G4 zQ_kh|0#;K6BB|*#)(nGiSpyUi``Gy8@Zlq)xh%{j(q$M@r!`C?h;&Acf`*mMv>wN* zZoqW940Aa1sR*M7hUSO<&l2QKgRu*=VmBooAd7r}+Q1Wm?!zFqJkDfQyb!0m64OnK zvJHzC_o(^&Y$l~r8zS?eJ9qAcZsl+`RK&B5a7r&aCKn1bIX#-sM-GRFg6yK5Zd=m} zA*yoGnlLmRicSM#(Kk~lC57%4JhvS-ZeKkr%#~{{V6yZFw@* z_G4QXZ6%=&zGIOeO$n{hDXPgVv39G)L9fhgr525VDY+$a;wnNaK+BW~FO;h=AqPUY zg%9`ya~tCJwvb>HE(Ggs$%H`8U;#AXcdTiz9|h1JbO^oERGo$lxdqVE57zq8GgBb`+IXMewn9Lf4O=cjf zY)L(*r?liOl@s;c9T+mGGQXdkjLhV+TBLXrlD!xi)#kL>Ts{jtrLE&47jY$!lV zXuZ4tfqqY~b`O?qe{uAzclA8#{EhF|zAtxHyY`jOZ@9W1^;BJZ*4>^Zby-?wH+Jq> zl51VvOYDZ%w=!Pw?t8*kya&GZBB$@})ZMB77&H@CqHHw4{UKnD9+3!70nH5NIj|}D ziMEh>HCWmqbsZ)`NYQgP3#2FgGS#9DVsLxD*L(A7PP zIKl#KNQ*->cm0szAxLpQ&9bzgTIVksaL)ek!EhMw0wreh<6he80}|wjt&xl-R1-%t zBTe`=h`rTq;5a^HPQlbPOpPXX1B^&ErGgwjNJprYsokKE?DzXIjY#4$WlcjHi$&h# z1q=a60_SSlKtz3QRtnCb}rXLUgHv4qEGI)Be_e{;#w~~3-zbWxvYgzin zirhP%x%U3iwRiPG#Wl7jkG%v9=sULu-HM3>kHDFyh5HnpO?SoG4y1D$rF9wWOxy}` zES*fVQE$6e=cee!%@UWlkfW9Vkj?yl7@can7&wB-Y2ycQM3d9TiC7ySnyR6S0Zwc$ z38uA<&L{Y|U6IW2lBu@1Z2-mXQ_j|$7INrtPbn6Y>B@9;gS&J7R5fo>QX|A`C77DbxU&qfmX+Bw9-)CR^WDh8?DaQ zXr-yLt-PgH*K4%y-lh_|Z=-b@1>IMOm{+k1V9Ws?5;3d6t(Yc)3CV;n!Yv3d2}nu} zn<pv`IpE$F}&s$I%1B-5Ex0T0s!k5d+1NX<6)lqT}6;_NKF3g}zVz(FcU zA%XBSpcEgWP3soM7!F_n0-2>mO9P8+hNh>JHy2Dz2j7T8Tf!GXpK%gk;ol`x^K>oE zEQg+X0u@i7>KQCcHK%7uS?OB-sN(D|bG44nrK`(*W%0$%-47SO{8@G9!Lqa@Rpg%a z-rdj=f9YD^H2@8{BKuyryC2w>?a$nU75CukovJ%h7B==DDqELEDstb7R*?gL>hQ1j zKAEa^jFiOaYOEZ0S3_bes8 zIJ55QeQ@aBp_TckJ1U-$@;R9Q;P~?KU!7R9_thNUrMar3kIIDZg;wRK`zxLk-~GE1ZnsiPl5cmgz7T#^sM$&Ik=GPESm)SI>!!=<6zkonvv0#aSo3z*ygO=rQdTFRKvHe|#|k%Z zW5c|riS3o4#+@xfCQ@bXq;6c;0jdoU8)0~mBJ=QmhfFIPr!1dpyxLCVpTao37!4gr zonsj0IdXl4c0Wg!=g9UPxt}A)-_Vh-Q2(Ei?K8)QWckIyrwdEFtCGLO*DSWXzR!JY y-cZ#NE{Qd#`)=m*%vw*R>KrQBHf;8iToc3+Tj#GbjDMA{BU(T1{Fhmq_WTb6*Ozqw literal 0 HcmV?d00001 diff --git a/modules/__pycache__/aster_db.cpython-313.pyc b/modules/__pycache__/aster_db.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a4f7c12bab8627e962e03d3d1934a55470f0a2a1 GIT binary patch literal 6045 zcmeHL&2JM&6rYV9$JkB+2@pQRP*Ox7BD5-PfeLUwpeDhFtOH0Otv2hivt&P{Xz4l7~f>clyxga667jBNCwkoH-S+5;C@!AelNR?R0#bjj>JcSg^)e$Q|315&9!9_$$Wy zg?x9E=kZk`GW~+0;;4+}ZKJR``7dzm<@H z&;f*Dwv$NdLFhUkb&iQjynF2(@W^qvgC7`W(4-)?oViAS%+#H8qL1s|$1!L3=UPM9U@SxKf;os%^@6gwY_CkN^vb(U!Y1-5MB ziDNl^%GB_c=)~}+(TPLvAM0QLL`74ccC|)_0C#>blAp zpSsK#vC0#wxp1p!&-JLtKgkIRi+Rr8$x^9dkWQ&7$uRlyK&E-d0h?(Iav#VfU7jHn z4jSS0q9kQ2L^2I6=K(E-)`B!|(iz~W=Ay?9>!uScYckb|&8lRr9dKEjO+#$V zONwcvoVy^)oF{qAKvpauFvom@K#?_VN>*kh%8A-*6@bPybrIemOU<-uxwtA?>_1g? z`VuF&e=p|5%j39MSh32ms%JH)5|Gq`K)y{gGr$;`yk9laX1=YMuY~%c?&JebR9gAa z6;7U`l$!KG3*}{y2B2ZQ++v(RF*t4NWYC_n4a*)JA#+4Cv%J;}p0mMMuzC|Pvq9ZV z*&1Pk#gdYmI-1Sp4=PT1k~}MoWbxueLG~bCYS|(*FVN?x5JbW77oR;1cHWNMiY#p# z{v|l_JiO)WtB=Bao<_QFkKYApm9>Fzx&PS?Pnve;M66 zMzK8JuaL4QykT)C}+XqsuDDF82i~nYC3YA5HKd<=UA>c+&-q%T}}-`uY7ed8gzA zhjL(Nfll)K=AmhtaxOoPOjJ-#2dA74PB|T%aymHW@TRvO;~W6Qaxdo_x(%NFmP0w;At#Cu#D#M}>NL<&1~=C?{W8LX@Hhto+|IenYyWftUcQp}7Xd zA6!@7aIeY$N~AP_k5FE_pr$1Q*M|S0nkmbus+}TGXH}(GvjM9oK3+Bd^%k)opegtSLHG@IJVOVc zq1}ZD>gio>JtjnjC+)c4^hLI8z2{%qsd3O5A@xT^%@9S6ueF3{ zZ)phwKFedry2S%LXCKWcati7yf(?*CCi6hq%*~UJIm(F*8&8E^kV7(`@ySBQp9#DZ z0LTcb*5*d%Gh2~;8BX>iHz1RdupGPy_k??dhKfWAA2VuFB5wS;T{ZL5mg#6K&P~Ns ziW+spj*>I9tf`bV*I!nsQnFosdLilhHM6LjXr!7`p=MqBYH) zIUNY)>#!NxD8k&ssFBf zB!EROjXjew&-aCXkOY*5QZ8$zIx7jT;0cG%Tc#V*3t4SNw^=aVRJ9e9oZqps7*^ul z5Yt{3d8W&$1e~?D~3IRpus|6g8e!U|E$7x-xKMv z^y|gn-q^jpyj%EkTiWY0>bM3E#0Ar+iZ`qBr=N(83n1#YsF%uyS4T~iLT7t|yykLc)eX%kjxviENpl5N&pU3AR?I9*-}UFMQb~8*@NLDYXg4Ue zsO0l%L7DU^(>!Hup{y@9vwb0o;w0@rC%uFV%)#Ek;l(kmLM;lx$Twdf1}}Uc z{VuxOIsJQZ<|xv(_St@<`!L?KRropg#CU8R#E19d!wrLa5Wlt;zxK4Zf9uZL>|tlu zX6A>?_LcokX^lH-@7x?*AAHto)Oju*`y05HSj`*!5PxlRV13}n!Gmb;UbOem_U_%D z5BJ+AccYUh5u}~?q2;X?c0Zo%;Z$JiJ+c!aQzODow2R@(WNJ*<>2G6rjNm8{%5JR` zrmRgUooX42Oqod7pUG4eo4(0Dc-x`0hatg0xzTQfULk$~%G%thR&czbmkIhd+L*5G z&vB^ngplVDdj?njfOstiUA-rv4sxA5?dYoU3>>v~tU2qEt=vz>BV(_1pyp%Nnjd19 XY5@-hp{=9lIn}}tj{Oy3Rxk9w?N(W$ literal 0 HcmV?d00001 diff --git a/modules/__pycache__/extended_auth.cpython-313.pyc b/modules/__pycache__/extended_auth.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..746f0892c384dd46f478af93116a9245a835ac32 GIT binary patch literal 1707 zcmah}%}*Og6rc5e+J@!R1QQaLmQ=VOm>5VxMN`?vM7D!V{Xw8cS}pe4Y_jZH<26cp zvf7?{se~HkZ>ZEmk3GY+hb2CMHb$z{_P~vZR%+EAIzy`?Y!Uo=Dm44Z{C}W z-d-O-5_xo{{6zxr3!b<_b-=|I0?$DXfzI;o%B9W@Tw)iw z`AVW{*dYPfBc78Px(8)-BQfup0vyP&lb|^0+Ot5q1%SpBhg~K+;-X89vda`09?W&~ zCV&YPL1Utfv#*0ILZCHu8zga$;GGt|i5EV$%wCnOr`aO=C`6 zE5sfq!EoGDI(R{)Rbl29Sg@<`DQ&^29vLVW%_oRkuSpFJR_9R0VpUBTFU#+pN zDkU4UQcc0&^TicOQRT8==t5OCgl$Qcw2~}H6+?zX3Cfb8=$fEc1j8v=L2}CZLm|`y zQ7VDu(d!n2d{H}Cq@o!R_YP$6fJ@S1J06xF8Ht?$mTz5B8*&0d9a^laSF5NUEUp9< zLn%pWh_zTZH9jW~cAETa&cW>oZJ*idt>bnR-6(wLN5zuUT!dW5^(i=F-4E~RyK=a( zjasr1UY7S{Rj=)$E)1_UG~AP77Ue6vtCt(9To21n(YeZUS(F;a&P?r@^^Tnr*`B*G zMM*1*Z-#}FC?>A#`b#vNfhK5EeP6A%`TlQ|uN8Cj-4_2|^Fe!H=tXcpXkNe58o1j` zwFie^g!V(`_`TNPVl&HF66!n^M^$8NX!BdwmfX5!c3&bAPt|feSz&k2cqL%#FU}M$PMav#@UFADO|8V{X&rHh=byIxc^ov)J^XdJG-w zix?N5A`bjFVi9mKO~huYgAj%B2PEca35+AY_#}C-6pW9PhXEGxhvPJ24kxh8D7I7Xrob6W&2*f z0~d$^P{2I5UZMR7>@Vm9(@+Ky(xyqgOI2;tc(v!8xa*HNPKz}00!z6(_I>^M ze7^fU&)sr+dlO??I0og**6xQ65bRV$1Q%2+0?lxYPIFol|I%w_x3I z#p!<}`W>zFpM;*D^&vDb-1DUZ5qLWI*tX6AoF1)R+NjT?D{YQm3^;y?5JIT|_;r5b z%L%by>eXN>jYo$u^k8aiB$G_12C+Dvyl^3zI)g_Tkxha{6UqY7Orsnql#Ds`=BIWI z)$%wa&!_~`93It)fm6DP#}=qz9>+N*!KWdbLKMj|2$OWC5GLP(mkQ%m$H?SZA~S|F ziQ$PcoIH(F=?osboD?&niwiuk=6>$VAafScvP7Z^PNp)08w4gT5W|!UMM-9i&dVwu zPM+~l7L2^B%6=FAwCTj;$l1i?z(GrfhprvYsS42?Xw&`>mImRud= zsH(z1Ya}VkB~avudn?SMWT=vN?=hlSFxYfWE#cvGdSc`I6m`RFRG+mcmLlWgQgckV=PM;3RD{2Us6(bnqGc8gCUnWvq*zCU>P z;C-cny8Cx5qwc;AD59Nzqwe-jxCX@$6y5#(fx8F39sDKI`%|R%_s+eG`%XUYJhd1( zwe%S%mX4tIzJHec;n`m&b`JZ5N4tf@N&lm+E^Z$Zh6Vqlcn8=&3IddG15#4~iSa1M zc+#?^C}T2NA@Y3~eQU=1W}=Mk;!TJ$c0;^FLeo*Enb^@}WLpVXP3pY^PU8U42<#Bf zEqACl1U?@uO#r`HTvJ0bcE}bv3!*k%T(!c1s%}?WYZrLU~@MDqo*SIh{#Wth>2dtGz+ZS%t|rV4>i4 zgQYXWvVAnPI;p;5&9WQSJt(>;}&qJ`R=htdO$O@g*58p8_TWHb+ZhaPPLp-N$S>mlRY1x@(_VKe?@MU{$# zWnq0xVOy>44waLP%4Rb=2dhy4R9hy@P!e$QaF)-i}Du-d`QdYCGq)@7EVfYM_*?S0f zQf)^o`%NC8WzeH)IvzSv-JAHFy$jd)X0Y)C6cwK!2*0B3&(Z$pXip`Ax_Xxa9|#HI zX*;g?xbb2~v|?YW1W|bVpRN2_B_r6vK1aw6TgZ)?&^FIjh5H;KH*6s{YC_u_tqS)$ SLT=bXZrH-VTDi$EwEAD)7DcE4 literal 0 HcmV?d00001 diff --git a/modules/aster_auth_api.py b/modules/aster_auth.py similarity index 100% rename from modules/aster_auth_api.py rename to modules/aster_auth.py diff --git a/modules/aster_db.py b/modules/aster_db.py new file mode 100644 index 0000000..70e7214 --- /dev/null +++ b/modules/aster_db.py @@ -0,0 +1,147 @@ +import logging +from typing import AsyncContextManager +from sqlalchemy import text + + +### Orders and Trades Tables #### +async def create_fr_aster_user_order_trade_table( + CON: AsyncContextManager, + engine: str = 'mysql', # mysql | duckdb + ) -> None: + if CON is None: + logging.info("NO DB CONNECTION, SKIPPING Create Statements") + else: + if engine == 'mysql': + logging.info('Creating Table if Does Not Exist: fr_aster_user_order_trade') + await CON.execute(text(""" + CREATE TABLE IF NOT EXISTS fr_aster_user_order_trade ( + timestamp_arrival BIGINT, + timestamp_msg BIGINT, + timestamp_value BIGINT, + symbol VARCHAR(20), + client_order_id VARCHAR(100), + side VARCHAR(20), + order_type VARCHAR(100), + time_in_force VARCHAR(20), + original_qty DOUBLE, + original_price DOUBLE, + avg_price DOUBLE, + stop_price DOUBLE, + execution_type VARCHAR(100), + order_status VARCHAR(100), + order_id BIGINT, + last_filled_qty DOUBLE, + filled_accumulated_qty DOUBLE, + last_filled_price DOUBLE, + commission_asset VARCHAR(20), + commission DOUBLE, + order_trade_time_ts BIGINT, + trade_id VARCHAR(100), + bid_notional DOUBLE, + ask_notional DOUBLE, + trade_is_maker BOOL, + trade_is_reduce_only BOOL, + stop_px_working_type VARCHAR(100), + original_order_type VARCHAR(100), + position_side VARCHAR(100), + pushed_w_conditional_order BOOL, + activation_price DOUBLE, + callback_rate DOUBLE, + realized_profit DOUBLE + ); + """)) + await CON.commit() + else: + raise ValueError('Only MySQL engine is implemented') + +### Margin Calls Table #### +async def create_fr_aster_user_margin_table( + CON: AsyncContextManager, + engine: str = 'mysql', # mysql | duckdb + ) -> None: + if CON is None: + logging.info("NO DB CONNECTION, SKIPPING Create Statements") + else: + if engine == 'mysql': + logging.info('Creating Table if Does Not Exist: fr_aster_user_margin') + await CON.execute(text(""" + CREATE TABLE IF NOT EXISTS fr_aster_user_margin ( + timestamp_arrival BIGINT, + timestamp_msg BIGINT, + cross_wallet_balance DOUBLE, + symbol VARCHAR(20), + position_side VARCHAR(20), + position_amount DOUBLE, + margin_type VARCHAR(20), + isolated_wallet DOUBLE, + mark_price DOUBLE, + unrealized_pnl DOUBLE, + maint_margin_required DOUBLE + ); + """)) + await CON.commit() + else: + raise ValueError('Only MySQL engine is implemented') + +### Account Balance Table #### +async def create_fr_aster_user_account_bal( + CON: AsyncContextManager, + engine: str = 'mysql', # mysql | duckdb + ) -> None: + if CON is None: + logging.info("NO DB CONNECTION, SKIPPING Create Statements") + else: + if engine == 'mysql': + logging.info('Creating Table if Does Not Exist: fr_aster_user_account_bal') + await CON.execute(text(""" + CREATE TABLE IF NOT EXISTS fr_aster_user_account_bal ( + timestamp_arrival BIGINT, + timestamp_msg BIGINT, + timestamp_transaction BIGINT, + event_reason_type VARCHAR(20), + asset VARCHAR(20), + wallet_balance DOUBLE, + cross_wallet_balance DOUBLE, + balance_change_excl_pnl_comms DOUBLE + ); + """)) + await CON.commit() + else: + raise ValueError('Only MySQL engine is implemented') + +### Account Positions Table #### +async def create_fr_aster_user_account_pos( + CON: AsyncContextManager, + engine: str = 'mysql', # mysql | duckdb + ) -> None: + if CON is None: + logging.info("NO DB CONNECTION, SKIPPING Create Statements") + else: + if engine == 'mysql': + logging.info('Creating Table if Does Not Exist: fr_aster_user_account_pos') + await CON.execute(text(""" + CREATE TABLE IF NOT EXISTS fr_aster_user_account_pos ( + timestamp_arrival BIGINT, + timestamp_msg BIGINT, + timestamp_transaction BIGINT, + event_reason_type VARCHAR(20), + symbol VARCHAR(20), + position_amount DOUBLE, + entry_price DOUBLE, + accumulated_realized_pre_fees DOUBLE, + unrealized_pnl DOUBLE, + margin_type VARCHAR(20), + isolated_wallet DOUBLE, + position_side VARCHAR(20) + ); + """)) + await CON.commit() + else: + raise ValueError('Only MySQL engine is implemented') + + + + + + + diff --git a/modules/db.py b/modules/db.py new file mode 100644 index 0000000..19ca86d --- /dev/null +++ b/modules/db.py @@ -0,0 +1,27 @@ +### Database Funcs ### +import logging +from typing import AsyncContextManager +import pandas as pd + + +async def insert_df_to_mysql( + table_name: str, + params: dict | list | pd.DataFrame, + CON: AsyncContextManager, + engine: str = 'mysql', # mysql | duckdb + ) -> None: + if CON is None: + logging.info("NO DB CONNECTION, SKIPPING Insert Statements") + else: + if engine == 'mysql': + if not isinstance(params, pd.DataFrame): + if isinstance(params, dict): + params = [params] + df = pd.DataFrame(params) + + await CON.run_sync( + lambda sync_conn: df.to_sql(name=table_name, con=sync_conn, if_exists='append', index=False) + ) + await CON.commit() + else: + raise ValueError('Only MySQL engine is implemented') \ No newline at end of file diff --git a/modules/extended_auth.py b/modules/extended_auth.py new file mode 100644 index 0000000..de31724 --- /dev/null +++ b/modules/extended_auth.py @@ -0,0 +1,30 @@ +import os + +from dotenv import load_dotenv +from x10.config import MAINNET_CONFIG +from x10.core.stark_account import StarkPerpetualAccount +from x10.perpetual.trading_client import PerpetualTradingClient +import logging + +async def create_auth_account_and_trading_client() -> tuple[StarkPerpetualAccount, PerpetualTradingClient]: + load_dotenv() + API_KEY = os.getenv('EXTENDED_API_KEY') + PUBLIC_KEY = os.getenv('EXTENDED_STARK_KEY_PUBLIC') + PRIVATE_KEY = os.getenv('EXTENDED_STARK_KEY_PRIVATE') + VAULT = int(os.getenv('EXTENDED_VAULT_NUMBER')) + + stark_account = StarkPerpetualAccount( + vault=VAULT, + private_key=PRIVATE_KEY, + public_key=PUBLIC_KEY, + api_key=API_KEY, + ) + + trading_client = PerpetualTradingClient(MAINNET_CONFIG, stark_account) + + try: + await trading_client.account.get_balance() + except ValueError as e: + logging.critical(f'Failed to get balance after creation of trading account: {e}') + + return stark_account, trading_client \ No newline at end of file diff --git a/modules/extended_db.py b/modules/extended_db.py new file mode 100644 index 0000000..fd5f67d --- /dev/null +++ b/modules/extended_db.py @@ -0,0 +1,152 @@ +import logging +from typing import AsyncContextManager +from sqlalchemy import text + + +### Orders Table #### +async def create_fr_extended_user_order( + CON: AsyncContextManager, + engine: str = 'mysql', # mysql | duckdb + ) -> None: + if CON is None: + logging.info("NO DB CONNECTION, SKIPPING Create Statements") + else: + if engine == 'mysql': + logging.info('Creating Table if Does Not Exist: fr_extended_user_order') + await CON.execute(text(""" + CREATE TABLE IF NOT EXISTS fr_extended_user_order ( + sequence_id INT, + timestamp_arrival BIGINT, + timestamp_msg BIGINT, + order_id VARCHAR(100), + account_id VARCHAR(100), + external_id VARCHAR(100), + market VARCHAR(20), + type VARCHAR(20), + side VARCHAR(20), + status VARCHAR(20), + status_reason VARCHAR(100), + price DOUBLE, + averagePrice DOUBLE, + qty DOUBLE, + filled_qty DOUBLE, + payed_fee DOUBLE, + tp_sl_type VARCHAR(20), + reduce_only BOOL, + post_only BOOL, + created_time_ts BIGINT, + updated_time_ts BIGINT, + expire_time_ts BIGINT + ); + """)) + await CON.commit() + else: + raise ValueError('Only MySQL engine is implemented') + +### Orders Table #### +async def create_fr_extended_user_trade( + CON: AsyncContextManager, + engine: str = 'mysql', # mysql | duckdb + ) -> None: + if CON is None: + logging.info("NO DB CONNECTION, SKIPPING Create Statements") + else: + if engine == 'mysql': + logging.info('Creating Table if Does Not Exist: fr_extended_user_trade') + await CON.execute(text(""" + CREATE TABLE IF NOT EXISTS fr_extended_user_trade ( + sequence_id INT, + timestamp_arrival BIGINT, + timestamp_msg BIGINT, + trade_id VARCHAR(100), + account_id VARCHAR(100), + market VARCHAR(20), + order_id VARCHAR(100), + external_order_id VARCHAR(100), + side VARCHAR(20), + price DOUBLE, + qty DOUBLE, + value DOUBLE, + fee DOUBLE, + trade_type VARCHAR(20), + created_time_ts BIGINT, + is_taker BOOL + ); + """)) + await CON.commit() + else: + raise ValueError('Only MySQL engine is implemented') + +### Balance Table #### +async def create_fr_extended_user_balance( + CON: AsyncContextManager, + engine: str = 'mysql', # mysql | duckdb + ) -> None: + if CON is None: + logging.info("NO DB CONNECTION, SKIPPING Create Statements") + else: + if engine == 'mysql': + logging.info('Creating Table if Does Not Exist: fr_extended_user_balance') + await CON.execute(text(""" + CREATE TABLE IF NOT EXISTS fr_extended_user_balance ( + sequence_id INT, + timestamp_arrival BIGINT, + timestamp_msg BIGINT, + collateral_name VARCHAR(20), + balance DOUBLE, + equity DOUBLE, + available_for_trade DOUBLE, + available_for_withdrawal DOUBLE, + unrealised_pnl DOUBLE, + initial_margin DOUBLE, + margin_ratio DOUBLE, + updated_time_ts BIGINT, + exposure DOUBLE, + leverage DOUBLE + ); + """)) + await CON.commit() + else: + raise ValueError('Only MySQL engine is implemented') + +### Balance Table #### +async def create_fr_extended_user_position( + CON: AsyncContextManager, + engine: str = 'mysql', # mysql | duckdb + ) -> None: + if CON is None: + logging.info("NO DB CONNECTION, SKIPPING Create Statements") + else: + if engine == 'mysql': + logging.info('Creating Table if Does Not Exist: fr_extended_user_position') + await CON.execute(text(""" + CREATE TABLE IF NOT EXISTS fr_extended_user_position ( + sequence_id INT, + timestamp_arrival BIGINT, + timestamp_msg BIGINT, + position_id VARCHAR(100), + account_id VARCHAR(100), + market VARCHAR(20), + side VARCHAR(20), + leverage DOUBLE, + size DOUBLE, + value DOUBLE, + open_price DOUBLE, + mark_price DOUBLE, + liquidation_price DOUBLE, + margin DOUBLE, + unrealised_pnl DOUBLE, + realised_pnl DOUBLE, + tp_trigger_price DOUBLE, + tp_limit_price DOUBLE, + sl_trigger_price DOUBLE, + sl_limit_price DOUBLE, + adl_percentile INT, + created_at_ts BIGINT, + updated_at_ts BIGINT + ); + """)) + await CON.commit() + else: + raise ValueError('Only MySQL engine is implemented') + diff --git a/ws_apex.py b/ws_apex.py index 5ed877e..abccf76 100644 --- a/ws_apex.py +++ b/ws_apex.py @@ -218,7 +218,7 @@ async def main(): logging.warning("VALKEY NOT BEING USED, NO DATA WILL BE PUBLISHED") if USE_DB: - engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/polymarket') + engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/fund_rate') async with engine.connect() as CON: # await create_rtds_btcusd_table(CON=CON) await ws_stream() diff --git a/ws_aster.py b/ws_aster.py index cc529bc..47514d6 100644 --- a/ws_aster.py +++ b/ws_aster.py @@ -128,7 +128,7 @@ async def ws_stream(): if channel is not None: match channel: case c if c == STREAM_MARKPRICE: - print(f'MP: {data}') + # print(f'MP: {data}') VAL_KEY_OBJ = json.dumps({ 'timestamp_arrival': ts_arrival, 'timestamp_msg': data['data']['E'], @@ -186,7 +186,7 @@ async def main(): logging.warning("VALKEY NOT BEING USED, NO DATA WILL BE PUBLISHED") if USE_DB: - engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/polymarket') + engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/fund_rate') async with engine.connect() as CON: # await create_rtds_btcusd_table(CON=CON) await ws_stream() diff --git a/ws_aster_user.py b/ws_aster_user.py index 8b144e1..8c4f9f4 100644 --- a/ws_aster_user.py +++ b/ws_aster_user.py @@ -15,7 +15,9 @@ from sqlalchemy.ext.asyncio import create_async_engine import valkey import os from dotenv import load_dotenv - +import modules.aster_auth as aster_auth +import modules.aster_db as aster_db +import modules.db as db ### Allow only ipv4 ### def allowed_gai_family(): @@ -23,9 +25,12 @@ def allowed_gai_family(): urllib3_cn.allowed_gai_family = allowed_gai_family ### Database ### -USE_DB: bool = False -USE_VK: bool = False -VK_USER = 'fund_rate_aster' +USE_DB: bool = True +USE_VK: bool = True +VK_ORDERS_TRADES = 'fr_aster_user_orders' +VK_MARGIN_CALLS = 'fr_aster_user_margin_calls' +VK_BALANCES = 'fr_aster_user_balances' +VK_POSITIONS = 'fr_aster_user_positions' CON: AsyncContextManager | None = None VAL_KEY = None @@ -34,112 +39,205 @@ load_dotenv() LOG_FILEPATH: str = os.getenv("LOGS_PATH") + '/Fund_Rate_Aster_User.log' ### CONSTANTS ### +WSS_URL = "wss://fstream.asterdex.com/ws/" +LOCAL_RECENT_UPDATES_LOOKBACK_SEC = 30 ### Globals ### -WSS_URL = "wss://fstream.asterdex.com" +LISTEN_KEY: str | None = None +LISTEN_KEY_LAST_UPDATE_TS_S: int = 0 +LISTEN_KEY_PUT_INTERVAL_SEC = 1800 -# HIST_TRADES = np.empty((0, 3)) -# HIST_TRADES_LOOKBACK_SEC = 6 +LOCAL_RECENT_ORDERS: list = [] +LOCAL_RECENT_MARGIN_CALLS: list = [] +LOCAL_RECENT_BALANCES: list = [] +LOCAL_RECENT_POSITIONS: list = [] -# ### Database Funcs ### -# async def create_rtds_btcusd_table( -# CON: AsyncContextManager, -# engine: str = 'mysql', # mysql | duckdb -# ) -> None: -# if CON is None: -# logging.info("NO DB CONNECTION, SKIPPING Create Statements") -# else: -# if engine == 'mysql': -# logging.info('Creating Table if Does Not Exist: binance_btcusd_trades') -# await CON.execute(text(""" -# CREATE TABLE IF NOT EXISTS binance_btcusd_trades ( -# timestamp_arrival BIGINT, -# timestamp_msg BIGINT, -# timestamp_value BIGINT, -# value DOUBLE, -# qty DOUBLE -# ); -# """)) -# await CON.commit() -# else: -# raise ValueError('Only MySQL engine is implemented') -# async def insert_rtds_btcusd_table( -# timestamp_arrival: int, -# timestamp_msg: int, -# timestamp_value: int, -# value: float, -# qty: float, -# CON: AsyncContextManager, -# engine: str = 'mysql', # mysql | duckdb -# ) -> None: -# params={ -# 'timestamp_arrival': timestamp_arrival, -# 'timestamp_msg': timestamp_msg, -# 'timestamp_value': timestamp_value, -# 'value': value, -# 'qty': qty, -# } -# if CON is None: -# logging.info("NO DB CONNECTION, SKIPPING Insert Statements") -# else: -# if engine == 'mysql': -# await CON.execute(text(""" -# INSERT INTO binance_btcusd_trades -# ( -# timestamp_arrival, -# timestamp_msg, -# timestamp_value, -# value, -# qty -# ) -# VALUES -# ( -# :timestamp_arrival, -# :timestamp_msg, -# :timestamp_value, -# :value, -# :qty -# ) -# """), -# parameters=params -# ) -# await CON.commit() -# else: -# raise ValueError('Only MySQL engine is implemented') +def upsert_list_of_dicts_by_id(list_of_dicts, new_dict, id='id'): + for index, item in enumerate(list_of_dicts): + if item.get(id) == new_dict.get(id): + list_of_dicts[index] = new_dict + return list_of_dicts + + list_of_dicts.append(new_dict) + return list_of_dicts + + +def get_new_listen_key() -> str: + global LISTEN_KEY_LAST_UPDATE_TS_S + + listen_key_request = { + "url": "/fapi/v3/listenKey", + "method": "POST", + "params": {} + } + r = aster_auth.post_authenticated_url(listen_key_request) + listen_key = r.get('listenKey', None) + print(f'LISTEN KEY: {listen_key}') + if listen_key is not None: + LISTEN_KEY_LAST_UPDATE_TS_S = round(datetime.now().timestamp()) + return listen_key + else: + raise ValueError(f'Listen Key is None; Failed to Update. response: {r}') + +async def listen_key_interval(): + global LISTEN_KEY + + while True: + await asyncio.sleep(LISTEN_KEY_PUT_INTERVAL_SEC) + LISTEN_KEY = get_new_listen_key() ### Websocket ### async def ws_stream(): - async for websocket in websockets.connect(WSS_URL): + global LISTEN_KEY + global LOCAL_RECENT_ORDERS + global LOCAL_RECENT_MARGIN_CALLS + global LOCAL_RECENT_BALANCES + global LOCAL_RECENT_POSITIONS + + LISTEN_KEY = get_new_listen_key() + + async for websocket in websockets.connect(WSS_URL+LISTEN_KEY): logging.info(f"Connected to {WSS_URL}") - - + asyncio.create_task(listen_key_interval()) try: async for message in websocket: ts_arrival = round(datetime.now().timestamp()*1000) + if isinstance(message, str): try: data = json.loads(message) - channel = data.get('stream', None) + channel = data.get('e', None) if channel is not None: + LOOKBACK_MIN_TS_MS = ts_arrival - (LOCAL_RECENT_UPDATES_LOOKBACK_SEC*1000) + match channel: - case : - continue - case : - # print(f'BT: {data}') - VAL_KEY_OBJ = json.dumps({ + case 'ORDER_TRADE_UPDATE': + logging.info(f'ORDER_TRADE_UPDATE: {data}') + new_order_update = { 'timestamp_arrival': ts_arrival, - 'timestamp_msg': data['data']['E'], - 'timestamp_transaction': data['data']['T'], - 'orderbook_update_id': data['data']['u'], - 'symbol': data['data']['s'], - 'best_bid_px': data['data']['b'], - 'best_bid_qty': data['data']['B'], - 'best_ask_px': data['data']['a'], - 'best_ask_qty': data['data']['A'], - }) - VAL_KEY.set(VK_TICKER, VAL_KEY_OBJ) + 'timestamp_msg': data['E'], + 'timestamp_transaction': data['T'], + + 'symbol': data['o']["s"], # "BTCUSDT", // Symbol + 'client_order_id': data['o']["c"], # "TEST", // Client Order Id + 'side': data['o']["S"], # "SELL", // Side + 'order_type': data['o']["o"], # "TRAILING_STOP_MARKET", // Order Type + 'time_in_force': data['o']["f"], # "GTC", // Time in Force + 'original_qty': float(data['o']["q"]), # "0.001", // Original Quantity + 'original_price': float(data['o']["p"]), # "0", // Original Price + 'avg_price': float(data['o']["ap"]), # :"0", // Average Price + 'stop_price': float(data['o'].get("sp", 0)), # :"7103.04", // Stop Price. Please ignore with TRAILING_STOP_MARKET order + 'execution_type': data['o']["x"], # "NEW", // Execution Type + 'order_status': data['o']["X"], # "NEW", // Order Status + 'order_id': data['o']["i"], # 8886774, // Order Id + 'last_filled_qty': float(data['o']["l"]), # "0", // Order Last Filled Quantity + 'filled_accumulated_qty': float(data['o']["z"]), # "0", // Order Filled Accumulated Quantity + 'last_filled_price': float(data['o']["L"]), # "0", // Last Filled Price + 'commission_asset': data['o'].get("N", None), # "USDT", // Commission Asset, will not push if no commission + 'commission': float(data['o'].get("n",0)), # "0", // Commission, will not push if no commission + 'order_trade_time_ts': data['o']["T"], # 1568879465651, // Order Trade Time + 'trade_id': data['o']["t"], # 0, // Trade Id + 'bid_notional': float(data['o']["b"]), # "0", // Bids Notional + 'ask_notional': float(data['o']["a"]), # "9.91", // Ask Notional + 'trade_is_maker': data['o']["m"], # false, // Is this trade the maker side? + 'trade_is_reduce_only': data['o']["R"], # false, // Is this reduce only + 'stop_px_working_type': data['o']["wt"], # :"CONTRACT_PRICE", // Stop Price Working Type + 'original_order_type': data['o']["ot"], # :"TRAILING_STOP_MARKET", // Original Order Type + 'position_side': data['o']["ps"], # :"LONG", // Position Side + 'pushed_w_conditional_order': bool(data['o'].get("cp", False)), # :false, // If Close-All, pushed with conditional order + 'activation_price': float(data['o'].get("AP", 0)), # :"7476.89", // Activation Price, only puhed with TRAILING_STOP_MARKET order + 'callback_rate': float(data['o'].get("cr", 0)), # :"5.0", // Callback Rate, only puhed with TRAILING_STOP_MARKET order + 'realized_profit': float(data['o']["rp"]), # :"0" // Realized Profit of the trade + } + LOCAL_RECENT_ORDERS = upsert_list_of_dicts_by_id(LOCAL_RECENT_ORDERS, new_order_update, id='client_order_id') + LOCAL_RECENT_ORDERS = [t for t in LOCAL_RECENT_ORDERS if t.get('timestamp_arrival', 0) >= LOOKBACK_MIN_TS_MS] + + VAL_KEY_OBJ = json.dumps(LOCAL_RECENT_ORDERS) + VAL_KEY.set(VK_ORDERS_TRADES, VAL_KEY_OBJ) + + await db.insert_df_to_mysql(table_name='fr_aster_user_order_trade', params=new_order_update, CON=CON) continue + case 'MARGIN_CALL': + logging.info(f'MARGIN_CALL: {data}') + list_for_df = [] + for p in list(data['p']): + margin_call_update = { + 'timestamp_arrival': ts_arrival, + 'timestamp_msg': data['E'], + 'cross_wallet_balance': float(data.get('cw', 0)), + + 'symbol': p["s"], # "ETHUSDT", // Symbol + 'position_side': p["ps"], # :"LONG", // Position Side + 'position_amount': float(p["pa"]), # :"1.327", // Position Amount + 'margin_type': p["mt"], # :"CROSSED", // Margin Type + 'isolated_wallet': float(p.get("iw", 0)), # :"0", // Isolated Wallet (if isolated position) + 'mark_price': float(p["mp"]), # :"187.17127", // Mark Price + 'unrealized_pnl': float(p["up"]), # :"-1.166074", // Unrealized PnL + 'maint_margin_required': float(p["mm"]), # :"1.614445" // Maintenance Margin Required + } + list_for_df.append(margin_call_update) + LOCAL_RECENT_MARGIN_CALLS = upsert_list_of_dicts_by_id(LOCAL_RECENT_MARGIN_CALLS, margin_call_update, id='symbol') + LOCAL_RECENT_MARGIN_CALLS = [t for t in LOCAL_RECENT_MARGIN_CALLS if t.get('timestamp_arrival', 0) >= LOOKBACK_MIN_TS_MS] + + VAL_KEY_OBJ = json.dumps(LOCAL_RECENT_MARGIN_CALLS) + VAL_KEY.set(VK_MARGIN_CALLS, VAL_KEY_OBJ) + + await db.insert_df_to_mysql(table_name='fr_aster_user_margin', params=list_for_df, CON=CON) + continue + case 'ACCOUNT_UPDATE': + logging.info(f'ACCOUNT_UPDATE: {data}') + list_for_df_bal = [] + list_for_df_pos = [] + ### Balance Updates ### + if len(list(data['a']['B'])) > 0: + for b in list(data['a']['B']): + balance_update = { + 'timestamp_arrival': ts_arrival, + 'timestamp_msg': data['E'], + 'timestamp_transaction': data['T'], + + 'event_reason_type': data['a']["m"], + + 'asset': b['a'], + 'wallet_balance': float(b['wb']), + 'cross_wallet_balance': float(b.get('cw', 0)), + 'balance_change_excl_pnl_comms': float(b['bc']), + } + list_for_df_bal.append(balance_update) + LOCAL_RECENT_BALANCES = upsert_list_of_dicts_by_id(LOCAL_RECENT_BALANCES, balance_update, id='asset') + LOCAL_RECENT_BALANCES = [t for t in LOCAL_RECENT_BALANCES if t.get('timestamp_arrival', 0) >= LOOKBACK_MIN_TS_MS] + VAL_KEY.set(VK_BALANCES, json.dumps(LOCAL_RECENT_BALANCES)) + ### Position Updates ### + if len(list(data['a']['P'])) > 0: + for p in list(data['a']['P']): + position_update = { + 'timestamp_arrival': ts_arrival, + 'timestamp_msg': data['E'], + 'timestamp_transaction': data['T'], + + 'event_reason_type': data['a']["m"], + + 'symbol': p['s'], + 'position_amount': float(p['pa']), + 'entry_price': float(p['ep']), + 'accumulated_realized_pre_fees': float(p['cr']), + 'unrealized_pnl': float(p['up']), + 'margin_type': p['mt'], + 'isolated_wallet': float(p.get('iw', 0)), + 'position_side': p['ps'], + } + list_for_df_pos.append(position_update) + LOCAL_RECENT_POSITIONS = upsert_list_of_dicts_by_id(LOCAL_RECENT_POSITIONS, position_update, id='symbol') + LOCAL_RECENT_POSITIONS = [t for t in LOCAL_RECENT_POSITIONS if t.get('timestamp_arrival', 0) >= LOOKBACK_MIN_TS_MS] + VAL_KEY.set(VK_POSITIONS, json.dumps(LOCAL_RECENT_POSITIONS)) + if balance_update: + await db.insert_df_to_mysql(table_name='fr_aster_user_account_bal', params=list_for_df_bal, CON=CON) + if position_update: + await db.insert_df_to_mysql(table_name='fr_aster_user_account_pos', params=list_for_df_bal, CON=CON) + continue + case 'listenKeyExpired': + raise('Listen Key Has Expired; Failed to Update Properly. Restarting.') case _: logging.warning(f'UNMATCHED OTHER MSG: {data}') else: @@ -170,9 +268,12 @@ async def main(): logging.warning("VALKEY NOT BEING USED, NO DATA WILL BE PUBLISHED") if USE_DB: - engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/polymarket') + engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/fund_rate') async with engine.connect() as CON: - # await create_rtds_btcusd_table(CON=CON) + await aster_db.create_fr_aster_user_order_trade_table(CON=CON) + await aster_db.create_fr_aster_user_margin_table(CON=CON) + await aster_db.create_fr_aster_user_account_bal(CON=CON) + await aster_db.create_fr_aster_user_account_pos(CON=CON) await ws_stream() else: CON = None diff --git a/ws_aster_user/Dockerfile b/ws_aster_user/Dockerfile new file mode 100644 index 0000000..4639422 --- /dev/null +++ b/ws_aster_user/Dockerfile @@ -0,0 +1,19 @@ +FROM python:3.13-slim + +RUN apt-get update && \ + apt-get install -y build-essential + +RUN gcc --version +RUN rm -rf /var/lib/apt/lists/* + +WORKDIR /app + +COPY requirements.txt . + +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +# Finally, run gunicorn. +CMD [ "python", "ws_aster_user.py"] +# CMD [ "gunicorn", "--workers=5", "--threads=1", "-b 0.0.0.0:8000", "app:server"] \ No newline at end of file diff --git a/ws_extended_fund_rate.py b/ws_extended_fund_rate.py index e3d4992..64b8f17 100644 --- a/ws_extended_fund_rate.py +++ b/ws_extended_fund_rate.py @@ -132,7 +132,7 @@ async def ws_stream(): try: data = json.loads(message) if data.get('data', None) is not None: - print(f'FR: {data}') + # print(f'FR: {data}') fr_next_update_ts = (time_round_down(dt=datetime.now(timezone.utc), interval_mins=60)+(60*60))*1000 VAL_KEY_OBJ = json.dumps({ 'sequence_id': data['seq'], @@ -173,7 +173,7 @@ async def main(): logging.warning("VALKEY NOT BEING USED, NO DATA WILL BE PUBLISHED") if USE_DB: - engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/polymarket') + engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/fund_rate') async with engine.connect() as CON: # await create_rtds_btcusd_table(CON=CON) await ws_stream() diff --git a/ws_extended_orderbook.py b/ws_extended_orderbook.py index 8abc140..d58bb2b 100644 --- a/ws_extended_orderbook.py +++ b/ws_extended_orderbook.py @@ -32,7 +32,7 @@ VAL_KEY = None ### Logging ### load_dotenv() -LOG_FILEPATH: str = os.getenv("LOGS_PATH") + '/Fund_Rate_Extended.log' +LOG_FILEPATH: str = os.getenv("LOGS_PATH") + '/Fund_Rate_Extended_OB.log' ### CONSTANTS ### WS_SYMBOL: str = 'ETH-USD' @@ -40,76 +40,6 @@ WS_SYMBOL: str = 'ETH-USD' ### Globals ### WSS_URL = f"wss://api.starknet.extended.exchange/stream.extended.exchange/v1/orderbooks/{WS_SYMBOL}?depth=1" -# HIST_TRADES = np.empty((0, 3)) -# HIST_TRADES_LOOKBACK_SEC = 6 - -# ### Database Funcs ### -# async def create_rtds_btcusd_table( -# CON: AsyncContextManager, -# engine: str = 'mysql', # mysql | duckdb -# ) -> None: -# if CON is None: -# logging.info("NO DB CONNECTION, SKIPPING Create Statements") -# else: -# if engine == 'mysql': -# logging.info('Creating Table if Does Not Exist: binance_btcusd_trades') -# await CON.execute(text(""" -# CREATE TABLE IF NOT EXISTS binance_btcusd_trades ( -# timestamp_arrival BIGINT, -# timestamp_msg BIGINT, -# timestamp_value BIGINT, -# value DOUBLE, -# qty DOUBLE -# ); -# """)) -# await CON.commit() -# else: -# raise ValueError('Only MySQL engine is implemented') - -# async def insert_rtds_btcusd_table( -# timestamp_arrival: int, -# timestamp_msg: int, -# timestamp_value: int, -# value: float, -# qty: float, -# CON: AsyncContextManager, -# engine: str = 'mysql', # mysql | duckdb -# ) -> None: -# params={ -# 'timestamp_arrival': timestamp_arrival, -# 'timestamp_msg': timestamp_msg, -# 'timestamp_value': timestamp_value, -# 'value': value, -# 'qty': qty, -# } -# if CON is None: -# logging.info("NO DB CONNECTION, SKIPPING Insert Statements") -# else: -# if engine == 'mysql': -# await CON.execute(text(""" -# INSERT INTO binance_btcusd_trades -# ( -# timestamp_arrival, -# timestamp_msg, -# timestamp_value, -# value, -# qty -# ) -# VALUES -# ( -# :timestamp_arrival, -# :timestamp_msg, -# :timestamp_value, -# :value, -# :qty -# ) -# """), -# parameters=params -# ) -# await CON.commit() -# else: -# raise ValueError('Only MySQL engine is implemented') - ### Websocket ### async def ws_stream(): async for websocket in websockets.connect(WSS_URL): @@ -162,7 +92,7 @@ async def main(): logging.warning("VALKEY NOT BEING USED, NO DATA WILL BE PUBLISHED") if USE_DB: - engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/polymarket') + engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/fund_rate') async with engine.connect() as CON: # await create_rtds_btcusd_table(CON=CON) await ws_stream() diff --git a/ws_extended_user.py b/ws_extended_user.py new file mode 100644 index 0000000..881790c --- /dev/null +++ b/ws_extended_user.py @@ -0,0 +1,273 @@ +import asyncio +import json +import logging +import socket +import traceback +from datetime import datetime, timezone +from typing import AsyncContextManager +import math + +import numpy as np +import pandas as pd +import requests.packages.urllib3.util.connection as urllib3_cn # type: ignore +from sqlalchemy import text +import websockets +from sqlalchemy.ext.asyncio import create_async_engine +import valkey +import os +from dotenv import load_dotenv +import modules.extended_db as extended_db +import modules.db as db + +### Allow only ipv4 ### +def allowed_gai_family(): + return socket.AF_INET +urllib3_cn.allowed_gai_family = allowed_gai_family + +### Database ### +USE_DB: bool = True +USE_VK: bool = True +VK_ORDERS = 'fr_extended_user_orders' +VK_TRADES = 'fr_extended_user_trades' +VK_BALANCES = 'fr_extended_user_balances' +VK_POSITIONS = 'fr_extended_user_positions' +CON: AsyncContextManager | None = None +VAL_KEY = None + +### Logging ### +load_dotenv() +LOG_FILEPATH: str = os.getenv("LOGS_PATH") + '/Fund_Rate_Extended_User.log' + +### CONSTANTS ### +WSS_URL = "wss://api.starknet.extended.exchange/stream.extended.exchange/v1/account" +API_KEY = os.getenv('EXTENDED_API_KEY') +LOCAL_RECENT_UPDATES_LOOKBACK_SEC = 30 + +### Globals ### +LOCAL_RECENT_ORDERS: list = [] +LOCAL_RECENT_TRADES: list = [] +LOCAL_RECENT_BALANCES: list = [] +LOCAL_RECENT_POSITIONS: list = [] + +def upsert_list_of_dicts_by_id(list_of_dicts, new_dict, id='id', seq_check_field: str | None = None): + for index, item in enumerate(list_of_dicts): + if item.get(id) == new_dict.get(id): + if seq_check_field is not None: + if item.get(seq_check_field) < new_dict.get(seq_check_field): + logging.info('Skipping out of sequence msg') + return list_of_dicts + list_of_dicts[index] = new_dict + return list_of_dicts + + list_of_dicts.append(new_dict) + return list_of_dicts + +### Websocket ### +async def ws_stream(): + global LOCAL_RECENT_ORDERS + global LOCAL_RECENT_TRADES + global LOCAL_RECENT_BALANCES + global LOCAL_RECENT_POSITIONS + + async for websocket in websockets.connect(WSS_URL, extra_headers={'X-Api-Key': API_KEY}): + logging.info(f"Connected to {WSS_URL}") + try: + async for message in websocket: + ts_arrival = round(datetime.now().timestamp()*1000) + if isinstance(message, str): + try: + data = json.loads(message) + channel = data.get('type', None) + if channel is not None: + LOOKBACK_MIN_TS_MS = ts_arrival - (LOCAL_RECENT_UPDATES_LOOKBACK_SEC*1000) + match channel: + case 'ORDER': + list_for_df = [] + for o in data['data']['orders']: + order_update = { + 'sequence_id': data['seq'], + 'timestamp_arrival': ts_arrival, + 'timestamp_msg': data['ts'], + + 'order_id': o['id'], + 'account_id': o['accountId'], + 'external_id': o.get('externalId', None), + 'market': o['market'], + 'type': o['type'], + 'side': o['side'], + 'status': o['status'], + 'status_reason': o.get('statusReason', None), + 'price': float(o.get('price', 0)), + 'averagePrice': float(o.get('averagePrice', 0)), + 'qty': float(o['qty']), + 'filled_qty': float(o.get('filledQty', 0)), + 'payed_fee': float(o.get('payedFee', 0)), + # 'trigger_dict': o.get('trigger', None), + 'tp_sl_type': o.get('tpSlType', None), + # 'take_profit_dict': o.get('takeProfit', None), + # 'stop_loss_dict': o.get('stopLoss', None), + 'reduce_only': o.get('reduceOnly', False), + 'post_only': o.get('postOnly', False), + 'created_time_ts': o['createdTime'], + 'updated_time_ts': o['updatedTime'], + 'expire_time_ts': o['expireTime'], + } + list_for_df.append(order_update) + LOCAL_RECENT_ORDERS = upsert_list_of_dicts_by_id(LOCAL_RECENT_ORDERS, order_update, id='order_id', seq_check_field='sequence_id') + LOCAL_RECENT_ORDERS = [t for t in LOCAL_RECENT_ORDERS if t.get('timestamp_arrival', 0) >= LOOKBACK_MIN_TS_MS] + + VAL_KEY_OBJ = json.dumps(LOCAL_RECENT_ORDERS) + VAL_KEY.set(VK_ORDERS, VAL_KEY_OBJ) + await db.insert_df_to_mysql(table_name='fr_extended_user_order', params=list_for_df, CON=CON) + continue + case 'TRADE': + list_for_df = [] + for t in data['data']['trades']: + trade_update = { + 'sequence_id': data['seq'], + 'timestamp_arrival': ts_arrival, + 'timestamp_msg': data['ts'], + + 'trade_id': t['id'], + 'account_id': t['accountId'], + 'market': t['market'], + 'order_id': t['orderId'], + 'external_order_id': t.get('externalOrderId', None), + 'side': t['side'], + 'price': float(t['price']), + 'qty': float(t['qty']), + 'value': float(t['value']), + 'fee': float(t['fee']), + 'trade_type': t['tradeType'], + 'created_time_ts': t['createdTime'], + 'is_taker': t['isTaker'], + } + LOCAL_RECENT_TRADES = upsert_list_of_dicts_by_id(LOCAL_RECENT_TRADES, trade_update, id='trade_id', seq_check_field='sequence_id') + LOCAL_RECENT_TRADES = [t for t in LOCAL_RECENT_TRADES if t.get('timestamp_arrival', 0) >= LOOKBACK_MIN_TS_MS] + + VAL_KEY_OBJ = json.dumps(LOCAL_RECENT_TRADES) + VAL_KEY.set(VK_TRADES, VAL_KEY_OBJ) + await db.insert_df_to_mysql(table_name='fr_extended_user_trade', params=list_for_df, CON=CON) + continue + case 'BALANCE': + balance_update = { + 'sequence_id': data['seq'], + 'timestamp_arrival': ts_arrival, + 'timestamp_msg': data['ts'], + + 'collateral_name': data['data']['balance']['collateralName'], + 'balance': float(data['data']['balance']['balance']), + 'equity': float(data['data']['balance']['equity']), + 'available_for_trade': float(data['data']['balance']['availableForTrade']), + 'available_for_withdrawal': float(data['data']['balance']['availableForWithdrawal']), + 'unrealised_pnl': float(data['data']['balance']['unrealisedPnl']), + 'initial_margin': float(data['data']['balance']['initialMargin']), + 'margin_ratio': float(data['data']['balance']['marginRatio']), + 'updated_time_ts': data['data']['balance']['updatedTime'], + 'exposure': float(data['data']['balance']['exposure']), + 'leverage': float(data['data']['balance']['leverage']), + } + LOCAL_RECENT_BALANCES = upsert_list_of_dicts_by_id(LOCAL_RECENT_BALANCES, balance_update, id='collateral_name', seq_check_field='sequence_id') + LOCAL_RECENT_BALANCES = [t for t in LOCAL_RECENT_BALANCES if t.get('timestamp_arrival', 0) >= LOOKBACK_MIN_TS_MS] + + VAL_KEY_OBJ = json.dumps(LOCAL_RECENT_BALANCES) + VAL_KEY.set(VK_BALANCES, VAL_KEY_OBJ) + await db.insert_df_to_mysql(table_name='fr_extended_user_balance', params=balance_update, CON=CON) + continue + case 'POSITION': + list_for_df = [] + for p in data['data']['positions']: + position_update = { + 'sequence_id': data['seq'], + 'timestamp_arrival': ts_arrival, + 'timestamp_msg': data['ts'], + + 'position_id': p['id'], + 'account_id': p['accountId'], + 'market': p['market'], + 'side': p['side'], + 'leverage': float(p['leverage']), + 'size': float(p['size']), + 'value': float(p['value']), + 'open_price': float(p['openPrice']), + 'mark_price': float(p['markPrice']), + 'liquidation_price': float(p['liquidationPrice']), + 'margin': float(p['margin']), + 'unrealised_pnl': float(p['unrealisedPnl']), + 'realised_pnl': float(p['realisedPnl']), + 'tp_trigger_price': float(p.get('tpTriggerPrice', 0)), + 'tp_limit_price': float(p.get('tpLimitPrice', 0)), + 'sl_trigger_price': float(p.get('slTriggerPrice', 0)), + 'sl_limit_price': float(p.get('slLimitPrice', 0)), + 'adl_percentile': p['adl'], # closer to 100 means higher chance of auto-deleveraging + 'created_at_ts': p['createdAt'], + 'updated_at_ts': p['updatedAt'], + } + LOCAL_RECENT_POSITIONS = upsert_list_of_dicts_by_id(LOCAL_RECENT_POSITIONS, position_update, id='position_id', seq_check_field='sequence_id') + LOCAL_RECENT_POSITIONS = [t for t in LOCAL_RECENT_POSITIONS if t.get('timestamp_arrival', 0) >= LOOKBACK_MIN_TS_MS] + + VAL_KEY_OBJ = json.dumps(LOCAL_RECENT_POSITIONS) + VAL_KEY.set(VK_POSITIONS, VAL_KEY_OBJ) + await db.insert_df_to_mysql(table_name='fr_extended_user_position', params=list_for_df, CON=CON) + continue + case _: + logging.warning(f'UNMATCHED OTHER MSG: {data}') + else: + logging.info(f'Initial or unexpected data struct, skipping: {data}') + continue + except (json.JSONDecodeError, ValueError): + logging.warning(f'Message not in JSON format, skipping: {message}') + continue + else: + raise ValueError(f'Type: {type(data)} not expected: {message}') + except websockets.ConnectionClosed as e: + logging.error(f'Connection closed: {e}') + logging.error(traceback.format_exc()) + continue + except Exception as e: + logging.error(f'Connection closed: {e}') + logging.error(traceback.format_exc()) + + +async def main(): + global VAL_KEY + global CON + + if USE_VK: + VAL_KEY = valkey.Valkey(host='localhost', port=6379, db=0) + else: + VAL_KEY = None + logging.warning("VALKEY NOT BEING USED, NO DATA WILL BE PUBLISHED") + + if USE_DB: + engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/fund_rate') + async with engine.connect() as CON: + await extended_db.create_fr_extended_user_balance(CON=CON) + await extended_db.create_fr_extended_user_order(CON=CON) + await extended_db.create_fr_extended_user_position(CON=CON) + await extended_db.create_fr_extended_user_trade(CON=CON) + await ws_stream() + else: + CON = None + logging.warning("DATABASE NOT BEING USED, NO DATA WILL BE RECORDED") + await ws_stream() + + +if __name__ == '__main__': + START_TIME = round(datetime.now().timestamp()*1000) + + logging.info(f'Log FilePath: {LOG_FILEPATH}') + + logging.basicConfig( + force=True, + filename=LOG_FILEPATH, + level=logging.INFO, + format='%(asctime)s - %(levelname)s - %(message)s', + filemode='w' + ) + logging.info(f"STARTED: {START_TIME}") + + try: + asyncio.run(main()) + except KeyboardInterrupt: + logging.info("Stream stopped") \ No newline at end of file diff --git a/ws_extended_user/Dockerfile b/ws_extended_user/Dockerfile new file mode 100644 index 0000000..a00f065 --- /dev/null +++ b/ws_extended_user/Dockerfile @@ -0,0 +1,19 @@ +FROM python:3.13-slim + +RUN apt-get update && \ + apt-get install -y build-essential + +RUN gcc --version +RUN rm -rf /var/lib/apt/lists/* + +WORKDIR /app + +COPY requirements.txt . + +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +# Finally, run gunicorn. +CMD [ "python", "ws_extended_user.py"] +# CMD [ "gunicorn", "--workers=5", "--threads=1", "-b 0.0.0.0:8000", "app:server"] \ No newline at end of file