saving
This commit is contained in:
400
database.ipynb
400
database.ipynb
@@ -14,7 +14,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"execution_count": 11,
|
||||
"id": "f5040527",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@@ -36,195 +36,236 @@
|
||||
" print(f\"Connection failed: {e}\") "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"id": "5c23110d",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"query = '''\n",
|
||||
"SELECT timestamp_msg,timestamp_value,`value` FROM poly_rtds_cl_btcusd;\n",
|
||||
"'''"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"id": "a866e9ca",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df = pd.read_sql(query, con=engine)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"id": "954a3c3c",
|
||||
"id": "72059b3f",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Connection successful\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"### MYSQL ###\n",
|
||||
"engine_inter_storage = create_engine('mysql+pymysql://root:pwd@100.84.226.40/polymarket')\n",
|
||||
"try:\n",
|
||||
" with engine.connect() as conn:\n",
|
||||
" print(\"Connection successful\")\n",
|
||||
"except Exception as e:\n",
|
||||
" print(f\"Connection failed: {e}\") "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 48,
|
||||
"id": "b723a51f",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df['ts'] = pd.to_datetime(df['timestamp_value'], unit='ms')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"id": "f11fd680",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"1774752413939"
|
||||
]
|
||||
},
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"round(datetime.now().timestamp()*1000)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 11,
|
||||
"id": "eadb0364",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"Timestamp('2026-03-29 03:22:03.145000')"
|
||||
]
|
||||
},
|
||||
"execution_count": 11,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"pd.to_datetime(1774754523145, unit='ms')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 14,
|
||||
"id": "3fbf525c",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"d = {'id': '1753563', 'question': 'Bitcoin Up or Down - March 29, 12:40AM-12:45AM ET', 'conditionId': '0x4cf6815a61939a9a0ee308772e727bbaf8e95803577aad7b9a9d3e028e37f13b', 'slug': 'btc-updown-5m-1774759200', 'resolutionSource': 'https://data.chain.link/streams/btc-usd', 'endDate': '2026-03-29T04:45:00Z', 'liquidity': '40412.2984', 'startDate': '2026-03-28T04:48:26.823506Z', 'image': 'https://polymarket-upload.s3.us-east-2.amazonaws.com/BTC+fullsize.png', 'icon': 'https://polymarket-upload.s3.us-east-2.amazonaws.com/BTC+fullsize.png', 'description': 'This market will resolve to \"Up\" if the Bitcoin price at the end of the time range specified in the title is greater than or equal to the price at the beginning of that range. Otherwise, it will resolve to \"Down\".\\nThe resolution source for this market is information from Chainlink, specifically the BTC/USD data stream available at https://data.chain.link/streams/btc-usd.\\nPlease note that this market is about the price according to Chainlink data stream BTC/USD, not according to other sources or spot markets.', 'outcomes': '[\"Up\", \"Down\"]', 'outcomePrices': '[\"0.505\", \"0.495\"]', 'volume': '1154.594883', 'active': True, 'closed': False, 'marketMakerAddress': '', 'createdAt': '2026-03-28T04:47:13.404208Z', 'updatedAt': '2026-03-29T04:38:45.64501Z', 'new': False, 'featured': False, 'archived': False, 'restricted': True, 'groupItemThreshold': '0', 'questionID': '0x7e86ce5e7ba0eb24758756db7c1443bf48041d08ac10364cf6771ef3f9c26733', 'enableOrderBook': True, 'orderPriceMinTickSize': 0.01, 'orderMinSize': 5, 'volumeNum': 1154.594883, 'liquidityNum': 40412.2984, 'endDateIso': '2026-03-29', 'startDateIso': '2026-03-28', 'hasReviewedDates': True, 'volume24hr': 1154.594883, 'volume1wk': 1154.594883, 'volume1mo': 1154.594883, 'volume1yr': 1154.594883, 'clobTokenIds': '[\"3132608671382208432230794800974499111421928258370863811882545679011841068490\", \"6273455409880805876304408793123549040642604317110995252288748628688958052125\"]', 'volume24hrClob': 1154.594883, 'volume1wkClob': 1154.594883, 'volume1moClob': 1154.594883, 'volume1yrClob': 1154.594883, 'volumeClob': 1154.594883, 'liquidityClob': 40412.2984, 'makerBaseFee': 1000, 'takerBaseFee': 1000, 'acceptingOrders': True, 'negRisk': False, 'ready': False, 'funded': False, 'acceptingOrdersTimestamp': '2026-03-28T04:47:21Z', 'cyom': False, 'competitive': 0.9999750006249843, 'pagerDutyNotificationEnabled': False, 'approved': True, 'rewardsMinSize': 50, 'rewardsMaxSpread': 4.5, 'spread': 0.01, 'lastTradePrice': 0.51, 'bestBid': 0.5, 'bestAsk': 0.51, 'automaticallyActive': True, 'clearBookOnStart': False, 'showGmpSeries': False, 'showGmpOutcome': False, 'manualActivation': False, 'negRiskOther': False, 'umaResolutionStatuses': '[]', 'pendingDeployment': False, 'deploying': False, 'rfqEnabled': False, 'eventStartTime': '2026-03-29T04:40:00Z', 'holdingRewardsEnabled': False, 'feesEnabled': True, 'requiresTranslation': False, 'makerRebatesFeeShareBps': 10000, 'feeType': 'crypto_fees', 'feeSchedule': {'exponent': 2, 'rate': 0.25, 'takerOnly': True, 'rebateRate': 0.2}}"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 15,
|
||||
"id": "699031ee",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{'id': '1753563',\n",
|
||||
" 'question': 'Bitcoin Up or Down - March 29, 12:40AM-12:45AM ET',\n",
|
||||
" 'conditionId': '0x4cf6815a61939a9a0ee308772e727bbaf8e95803577aad7b9a9d3e028e37f13b',\n",
|
||||
" 'slug': 'btc-updown-5m-1774759200',\n",
|
||||
" 'resolutionSource': 'https://data.chain.link/streams/btc-usd',\n",
|
||||
" 'endDate': '2026-03-29T04:45:00Z',\n",
|
||||
" 'liquidity': '40412.2984',\n",
|
||||
" 'startDate': '2026-03-28T04:48:26.823506Z',\n",
|
||||
" 'image': 'https://polymarket-upload.s3.us-east-2.amazonaws.com/BTC+fullsize.png',\n",
|
||||
" 'icon': 'https://polymarket-upload.s3.us-east-2.amazonaws.com/BTC+fullsize.png',\n",
|
||||
" 'description': 'This market will resolve to \"Up\" if the Bitcoin price at the end of the time range specified in the title is greater than or equal to the price at the beginning of that range. Otherwise, it will resolve to \"Down\".\\nThe resolution source for this market is information from Chainlink, specifically the BTC/USD data stream available at https://data.chain.link/streams/btc-usd.\\nPlease note that this market is about the price according to Chainlink data stream BTC/USD, not according to other sources or spot markets.',\n",
|
||||
" 'outcomes': '[\"Up\", \"Down\"]',\n",
|
||||
" 'outcomePrices': '[\"0.505\", \"0.495\"]',\n",
|
||||
" 'volume': '1154.594883',\n",
|
||||
" 'active': True,\n",
|
||||
" 'closed': False,\n",
|
||||
" 'marketMakerAddress': '',\n",
|
||||
" 'createdAt': '2026-03-28T04:47:13.404208Z',\n",
|
||||
" 'updatedAt': '2026-03-29T04:38:45.64501Z',\n",
|
||||
" 'new': False,\n",
|
||||
" 'featured': False,\n",
|
||||
" 'archived': False,\n",
|
||||
" 'restricted': True,\n",
|
||||
" 'groupItemThreshold': '0',\n",
|
||||
" 'questionID': '0x7e86ce5e7ba0eb24758756db7c1443bf48041d08ac10364cf6771ef3f9c26733',\n",
|
||||
" 'enableOrderBook': True,\n",
|
||||
" 'orderPriceMinTickSize': 0.01,\n",
|
||||
" 'orderMinSize': 5,\n",
|
||||
" 'volumeNum': 1154.594883,\n",
|
||||
" 'liquidityNum': 40412.2984,\n",
|
||||
" 'endDateIso': '2026-03-29',\n",
|
||||
" 'startDateIso': '2026-03-28',\n",
|
||||
" 'hasReviewedDates': True,\n",
|
||||
" 'volume24hr': 1154.594883,\n",
|
||||
" 'volume1wk': 1154.594883,\n",
|
||||
" 'volume1mo': 1154.594883,\n",
|
||||
" 'volume1yr': 1154.594883,\n",
|
||||
" 'clobTokenIds': '[\"3132608671382208432230794800974499111421928258370863811882545679011841068490\", \"6273455409880805876304408793123549040642604317110995252288748628688958052125\"]',\n",
|
||||
" 'volume24hrClob': 1154.594883,\n",
|
||||
" 'volume1wkClob': 1154.594883,\n",
|
||||
" 'volume1moClob': 1154.594883,\n",
|
||||
" 'volume1yrClob': 1154.594883,\n",
|
||||
" 'volumeClob': 1154.594883,\n",
|
||||
" 'liquidityClob': 40412.2984,\n",
|
||||
" 'makerBaseFee': 1000,\n",
|
||||
" 'takerBaseFee': 1000,\n",
|
||||
" 'acceptingOrders': True,\n",
|
||||
" 'negRisk': False,\n",
|
||||
" 'ready': False,\n",
|
||||
" 'funded': False,\n",
|
||||
" 'acceptingOrdersTimestamp': '2026-03-28T04:47:21Z',\n",
|
||||
" 'cyom': False,\n",
|
||||
" 'competitive': 0.9999750006249843,\n",
|
||||
" 'pagerDutyNotificationEnabled': False,\n",
|
||||
" 'approved': True,\n",
|
||||
" 'rewardsMinSize': 50,\n",
|
||||
" 'rewardsMaxSpread': 4.5,\n",
|
||||
" 'spread': 0.01,\n",
|
||||
" 'lastTradePrice': 0.51,\n",
|
||||
" 'bestBid': 0.5,\n",
|
||||
" 'bestAsk': 0.51,\n",
|
||||
" 'automaticallyActive': True,\n",
|
||||
" 'clearBookOnStart': False,\n",
|
||||
" 'showGmpSeries': False,\n",
|
||||
" 'showGmpOutcome': False,\n",
|
||||
" 'manualActivation': False,\n",
|
||||
" 'negRiskOther': False,\n",
|
||||
" 'umaResolutionStatuses': '[]',\n",
|
||||
" 'pendingDeployment': False,\n",
|
||||
" 'deploying': False,\n",
|
||||
" 'rfqEnabled': False,\n",
|
||||
" 'eventStartTime': '2026-03-29T04:40:00Z',\n",
|
||||
" 'holdingRewardsEnabled': False,\n",
|
||||
" 'feesEnabled': True,\n",
|
||||
" 'requiresTranslation': False,\n",
|
||||
" 'makerRebatesFeeShareBps': 10000,\n",
|
||||
" 'feeType': 'crypto_fees',\n",
|
||||
" 'feeSchedule': {'exponent': 2,\n",
|
||||
" 'rate': 0.25,\n",
|
||||
" 'takerOnly': True,\n",
|
||||
" 'rebateRate': 0.2}}"
|
||||
]
|
||||
},
|
||||
"execution_count": 15,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"d"
|
||||
"# with engine.connect() as conn:\n",
|
||||
"# print(\"Connection successful\")\n",
|
||||
"# sql = text(\"TRUNCATE TABLE coinbase_btcusd_trades;\")\n",
|
||||
"# conn.execute(sql)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "a620fa17",
|
||||
"id": "5c23110d",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"q_binance = '''\n",
|
||||
"SELECT * FROM binance_btcusd_trades;\n",
|
||||
"'''\n",
|
||||
"q_coinbase = '''\n",
|
||||
"SELECT * FROM coinbase_btcusd_trades;\n",
|
||||
"'''\n",
|
||||
"q_rtds = '''\n",
|
||||
"SELECT * FROM poly_rtds_cl_btcusd;\n",
|
||||
"'''\n",
|
||||
"q_clob = '''\n",
|
||||
"SELECT * FROM poly_btcusd_trades;\n",
|
||||
"'''"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 24,
|
||||
"id": "a866e9ca",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# df_binance = pd.read_sql(q_binance, con=engine)\n",
|
||||
"df_coinbase = pd.read_sql(q_coinbase, con=engine)\n",
|
||||
"df_rtds = pd.read_sql(q_rtds, con=engine)\n",
|
||||
"df_clob = pd.read_sql(q_clob, con=engine)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "954a3c3c",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# df_binance['timestamp_arrival'] = pd.to_datetime(df_binance['timestamp_arrival'], unit='ms')\n",
|
||||
"df_coinbase['timestamp_arrival'] = pd.to_datetime(df_coinbase['timestamp_arrival'], unit='ms')\n",
|
||||
"df_rtds['timestamp_arrival'] = pd.to_datetime(df_rtds['timestamp_arrival'], unit='ms')\n",
|
||||
"df_clob['timestamp_arrival'] = pd.to_datetime(df_clob['timestamp_arrival'], unit='ms')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 57,
|
||||
"id": "50c6339f",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def copy_table_data_btw_servers(df, table_name, engine_destination) -> None:\n",
|
||||
" rows_imported = df.to_sql(name=table_name, con=engine_destination, if_exists='append')\n",
|
||||
" if rows_imported == len(df):\n",
|
||||
" print(f'SUCCESS: COPIED {rows_imported} to table \"{table_name}\" on INTERSERVER_STORAGE')\n",
|
||||
" else:\n",
|
||||
" raise ValueError(f'FAILED: COPIED {rows_imported} rows to table {table_name} on INTERSERVER_STORAGE; EXPECTED {len(df)}')\n",
|
||||
" \n",
|
||||
"def truncate_table(engine, table):\n",
|
||||
" with engine.connect() as conn:\n",
|
||||
" sql = text(f\"TRUNCATE TABLE {table};\")\n",
|
||||
" conn.execute(sql)\n",
|
||||
" conn.commit()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 61,
|
||||
"id": "d0399a96",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def backup_all_tables(engine_origin, engine_destination, tables_to_copy):\n",
|
||||
" for t in tables_to_copy:\n",
|
||||
" q = f'''\n",
|
||||
" SELECT * FROM {t};\n",
|
||||
" '''\n",
|
||||
" df = pd.read_sql(q, con=engine_origin)\n",
|
||||
" print('-------------------------------------------------------------------------')\n",
|
||||
" print(f'Loaded Data for Table: {t}...Attempting to Transfer to Destination Server')\n",
|
||||
" copy_table_data_btw_servers(\n",
|
||||
" df=df,\n",
|
||||
" table_name=t,\n",
|
||||
" engine_destination=engine_destination,\n",
|
||||
" )\n",
|
||||
" print(f'Attempting to Truncate Table: {t}...')\n",
|
||||
" \n",
|
||||
" ### FOR REALTIME - instead of truncate, need to delete rows using a conditon (e.g. delete all rows <= max timestamp arrival in the DF)\n",
|
||||
" \n",
|
||||
" truncate_table(\n",
|
||||
" engine=engine_origin,\n",
|
||||
" table=t,\n",
|
||||
" )\n",
|
||||
" print(f'...Successfully Truncated Table: {t}')\n",
|
||||
" print(f'Done Transferring Data for Table: {t}')\n",
|
||||
" \n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 59,
|
||||
"id": "0de1629a",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"tables_to_copy = [\n",
|
||||
" # 'binance_btcusd_trades',\n",
|
||||
" # 'coinbase_btcusd_trades',\n",
|
||||
" 'poly_btcusd_trades',\n",
|
||||
" 'poly_rtds_cl_btcusd',\n",
|
||||
" # 'user_stream_orders',\n",
|
||||
" # 'user_stream_trades',\n",
|
||||
"]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 60,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"-------------------------------------------------------------------------\n",
|
||||
"Loaded Data for Table: poly_btcusd_trades...Attempting to Transfer to Destination Server\n",
|
||||
"SUCCESS: COPIED 720568 to table \"poly_btcusd_trades\" on INTERSERVER_STORAGE\n",
|
||||
"Attempting to Truncate Table: poly_btcusd_trades...\n",
|
||||
"...Successfully Truncated Table: poly_btcusd_trades\n",
|
||||
"Done Transferring Data for Table: poly_btcusd_trades\n",
|
||||
"-------------------------------------------------------------------------\n",
|
||||
"Loaded Data for Table: poly_rtds_cl_btcusd...Attempting to Transfer to Destination Server\n",
|
||||
"SUCCESS: COPIED 73771 to table \"poly_rtds_cl_btcusd\" on INTERSERVER_STORAGE\n",
|
||||
"Attempting to Truncate Table: poly_rtds_cl_btcusd...\n",
|
||||
"...Successfully Truncated Table: poly_rtds_cl_btcusd\n",
|
||||
"Done Transferring Data for Table: poly_rtds_cl_btcusd\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"backup_all_tables(\n",
|
||||
" engine_origin=engine,\n",
|
||||
" engine_destination=engine_inter_storage,\n",
|
||||
" tables_to_copy=tables_to_copy\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "cd0b40d2",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"SUCCESS COPIED 326007 to binance_btcusd_trades to INTERSERVER_STORAGE\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "48b47799",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "ad030f88",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "cafc5060",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
@@ -232,7 +273,6 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "2071f014",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
|
||||
51
docker-compose.yml
Normal file
51
docker-compose.yml
Normal file
@@ -0,0 +1,51 @@
|
||||
services:
|
||||
ws_binance:
|
||||
container_name: ws_binance
|
||||
restart: "unless-stopped"
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./ws_binance/Dockerfile
|
||||
volumes:
|
||||
- /home/ubuntu/data:/home/ubuntu/data:rw # Read-write access to data
|
||||
- /home/ubuntu/logs:/home/ubuntu/logs:rw # Read-write access to data
|
||||
network_mode: "host"
|
||||
ws_clob:
|
||||
container_name: ws_clob
|
||||
restart: "unless-stopped"
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./ws_clob/Dockerfile
|
||||
volumes:
|
||||
- /home/ubuntu/data:/home/ubuntu/data:rw # Read-write access to data
|
||||
- /home/ubuntu/logs:/home/ubuntu/logs:rw # Read-write access to data
|
||||
network_mode: "host"
|
||||
ws_rtds:
|
||||
container_name: ws_rtds
|
||||
restart: "unless-stopped"
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./ws_rtds/Dockerfile
|
||||
volumes:
|
||||
- /home/ubuntu/data:/home/ubuntu/data:rw # Read-write access to data
|
||||
- /home/ubuntu/logs:/home/ubuntu/logs:rw # Read-write access to data
|
||||
network_mode: "host"
|
||||
ws_user:
|
||||
container_name: ws_user
|
||||
restart: "unless-stopped"
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./ws_user/Dockerfile
|
||||
volumes:
|
||||
- /home/ubuntu/data:/home/ubuntu/data:rw # Read-write access to data
|
||||
- /home/ubuntu/logs:/home/ubuntu/logs:rw # Read-write access to data
|
||||
network_mode: "host"
|
||||
ng:
|
||||
container_name: ng
|
||||
restart: "unless-stopped"
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./ng/Dockerfile
|
||||
volumes:
|
||||
- /home/ubuntu/data:/home/ubuntu/data:rw # Read-write access to data
|
||||
- /home/ubuntu/logs:/home/ubuntu/logs:rw # Read-write access to data
|
||||
network_mode: "host"
|
||||
808
main.py
Normal file
808
main.py
Normal file
@@ -0,0 +1,808 @@
|
||||
import asyncio
|
||||
import json
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
import math
|
||||
import os
|
||||
import time
|
||||
from datetime import datetime, timezone
|
||||
from typing import AsyncContextManager
|
||||
import traceback
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import requests
|
||||
import talib
|
||||
import valkey
|
||||
from dotenv import load_dotenv
|
||||
from py_clob_client.clob_types import (
|
||||
OrderArgs,
|
||||
OrderType,
|
||||
PartialCreateOrderOptions,
|
||||
PostOrdersArgs,
|
||||
BalanceAllowanceParams
|
||||
)
|
||||
from py_clob_client.order_builder.constants import BUY, SELL
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
from functools import wraps
|
||||
import modules.api as api
|
||||
|
||||
### Custom Order Args ###
|
||||
@dataclass
|
||||
class Custom_OrderArgs(OrderArgs):
|
||||
max_price: float = 0.00
|
||||
|
||||
|
||||
### Database ###
|
||||
CLIENT = None
|
||||
CON: AsyncContextManager | None = None
|
||||
VAL_KEY = None
|
||||
|
||||
### Logging ###
|
||||
load_dotenv()
|
||||
LOG_FILEPATH: str = os.getenv("LOGS_PATH") + '/Polymarket_5min_Algo.log'
|
||||
|
||||
### ALGO CONFIG / CONSTANTS ###
|
||||
SLOPE_YES_THRESH = 0.01 # In Percent % Chg (e.g. 0.02 == 0.02%)
|
||||
ENDTIME_BUFFER_SEC = 30 # Stop trading, cancel all open orders and exit positions this many seconds before mkt settles.
|
||||
TGT_PX_INDEX_DIFF_THRESH = 0.1 # In Percent % Chg (e.g. 0.02 == 0.02%)
|
||||
DEFAULT_ORDER_SIZE = 10 # In USDe
|
||||
MIN_ORDER_SIZE = 5 # In USDe
|
||||
TGT_PROFIT_CENTS = 0.02
|
||||
CHASE_TO_BUY_CENTS = 0.05
|
||||
MAX_ALLOWED_POLY_PX = 0.90
|
||||
|
||||
### GLOBALS ###
|
||||
ORDER_LOCK = 0
|
||||
|
||||
SLUG_END_TIME = 0
|
||||
|
||||
FREE_CASH: float = 0
|
||||
|
||||
POLY_BINANCE = {}
|
||||
POLY_REF = {}
|
||||
POLY_CLOB = {}
|
||||
POLY_CLOB_DOWN = {}
|
||||
USER_TRADES = {}
|
||||
USER_ORDERS = {}
|
||||
SLOPE_HIST = []
|
||||
|
||||
LOCAL_ACTIVE_ORDERS = []
|
||||
LOCAL_TOKEN_BALANCES = {}
|
||||
# LOCAL_ACTIVE_POSITIONS = []
|
||||
|
||||
ACTIVE_BALANCES_EXIST = False ### REMOVE
|
||||
|
||||
|
||||
|
||||
### Decorators ###
|
||||
def async_timeit(func):
|
||||
@wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
start_time = time.perf_counter()
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
finally:
|
||||
end_time = time.perf_counter()
|
||||
total_time = (end_time - start_time)*1000
|
||||
print(f"Function '{func.__name__}' executed in {total_time:.4f} ms")
|
||||
|
||||
return wrapper
|
||||
|
||||
### Database Funcs ###
|
||||
# @async_timeit
|
||||
async def create_executions_orders_table(
|
||||
CON: AsyncContextManager,
|
||||
engine: str = 'mysql', # mysql | duckdb
|
||||
) -> None:
|
||||
if CON is None:
|
||||
logging.info("NO DB CONNECTION, SKIPPING Create Statements")
|
||||
else:
|
||||
if engine == 'mysql':
|
||||
logging.info('Creating Table if Does Not Exist: executions_orders')
|
||||
await CON.execute(text("""
|
||||
CREATE TABLE IF NOT EXISTS executions_orders (
|
||||
timestamp_sent BIGINT,
|
||||
token_id VARCHAR(100),
|
||||
limit_price DOUBLE,
|
||||
size DOUBLE,
|
||||
side VARCHAR(8),
|
||||
order_type VARCHAR(8),
|
||||
post_only BOOL,
|
||||
resp_errorMsg VARCHAR(100),
|
||||
resp_orderID VARCHAR(100),
|
||||
resp_takingAmount DOUBLE,
|
||||
resp_makingAmount DOUBLE,
|
||||
resp_status VARCHAR(20),
|
||||
resp_success BOOL
|
||||
);
|
||||
"""))
|
||||
await CON.commit()
|
||||
else:
|
||||
raise ValueError('Only MySQL engine is implemented')
|
||||
|
||||
# @async_timeit
|
||||
async def insert_executions_orders_table(
|
||||
timestamp_sent: int,
|
||||
token_id: str,
|
||||
limit_price: float,
|
||||
size: float,
|
||||
side: str,
|
||||
order_type: str,
|
||||
post_only: bool,
|
||||
resp_errorMsg: str,
|
||||
resp_orderID: str,
|
||||
resp_takingAmount: float,
|
||||
resp_makingAmount: float,
|
||||
resp_status: str,
|
||||
resp_success: bool,
|
||||
CON: AsyncContextManager,
|
||||
engine: str = 'mysql', # mysql | duckdb
|
||||
) -> None:
|
||||
params={
|
||||
'timestamp_sent': timestamp_sent,
|
||||
'token_id': token_id,
|
||||
'limit_price': limit_price,
|
||||
'size': size,
|
||||
'side': side,
|
||||
'order_type': order_type,
|
||||
'post_only': post_only,
|
||||
'resp_errorMsg': resp_errorMsg,
|
||||
'resp_orderID': resp_orderID,
|
||||
'resp_takingAmount': resp_takingAmount,
|
||||
'resp_makingAmount': resp_makingAmount,
|
||||
'resp_status': resp_status,
|
||||
'resp_success': resp_success,
|
||||
}
|
||||
if CON is None:
|
||||
logging.info("NO DB CONNECTION, SKIPPING Insert Statements")
|
||||
else:
|
||||
if engine == 'mysql':
|
||||
await CON.execute(text("""
|
||||
INSERT INTO executions_orders
|
||||
(
|
||||
timestamp_sent,
|
||||
token_id,
|
||||
limit_price,
|
||||
size,
|
||||
side,
|
||||
order_type,
|
||||
post_only,
|
||||
resp_errorMsg,
|
||||
resp_orderID,
|
||||
resp_takingAmount,
|
||||
resp_makingAmount,
|
||||
resp_status,
|
||||
resp_success
|
||||
)
|
||||
VALUES
|
||||
(
|
||||
:timestamp_sent,
|
||||
:token_id,
|
||||
:limit_price,
|
||||
:size,
|
||||
:side,
|
||||
:order_type,
|
||||
:post_only,
|
||||
:resp_errorMsg,
|
||||
:resp_orderID,
|
||||
:resp_takingAmount,
|
||||
:resp_makingAmount,
|
||||
:resp_status,
|
||||
:resp_success
|
||||
)
|
||||
"""),
|
||||
parameters=params
|
||||
)
|
||||
await CON.commit()
|
||||
else:
|
||||
raise ValueError('Only MySQL engine is implemented')
|
||||
|
||||
### Functions ###
|
||||
# @async_timeit
|
||||
async def slope_decision() -> list[bool, str]:
|
||||
hist_trades = np.array(POLY_BINANCE.get('hist_trades', []))
|
||||
|
||||
if ( np.max(hist_trades[:, 0] )*1000 ) - ( np.min(hist_trades[:, 0])*1000 ) < 5:
|
||||
logging.info('Max - Min Trade In History is < 5 Seconds Apart')
|
||||
return False, ''
|
||||
|
||||
last_px = POLY_BINANCE['value']
|
||||
last_px_ts = POLY_BINANCE['timestamp_value']
|
||||
|
||||
ts_min_1_sec = last_px_ts - 1000
|
||||
price_min_1_sec_index = (np.abs(hist_trades[:, 0] - ts_min_1_sec)).argmin()
|
||||
price_min_1_sec = hist_trades[:, 1][price_min_1_sec_index]
|
||||
|
||||
ts_min_5_sec = last_px_ts - 5000
|
||||
price_min_5_sec_index = (np.abs(hist_trades[:, 0] - ts_min_5_sec)).argmin()
|
||||
price_min_5_sec = hist_trades[:, 1][price_min_5_sec_index]
|
||||
|
||||
slope = (last_px - price_min_1_sec) / price_min_1_sec
|
||||
slope_5 = (last_px - price_min_5_sec) / price_min_5_sec
|
||||
SLOPE_HIST.append(slope)
|
||||
|
||||
# print(f'Avg Binance: {np.mean(hist_trades[:, 1])}')
|
||||
# print(f'Len Hist : {len(hist_trades[:, 1])}')
|
||||
# print(f'First Hist : {pd.to_datetime(np.min(hist_trades[:, 0]), unit='ms')}')
|
||||
# print(f'Latest Hist: {pd.to_datetime(np.max(hist_trades[:, 0]), unit='ms')}')
|
||||
print(f'Slope Hist Avg: {np.mean(SLOPE_HIST):.4%}')
|
||||
print(f'Slope Hist Max: {np.max(SLOPE_HIST):.4%}')
|
||||
print(f'Slope Hist Std: {np.std(SLOPE_HIST):.4%}')
|
||||
slope_1_buy = abs(slope) >= ( SLOPE_YES_THRESH / 100)
|
||||
slope_5_buy = abs(slope_5) >= ( SLOPE_YES_THRESH / 100)
|
||||
|
||||
print(f'SLOPE_1: {slope:.4%} == {slope_1_buy}; SLOPE_5: {slope_5:.4%} == {slope_5_buy};')
|
||||
|
||||
### DECISION ###
|
||||
if slope_1_buy and slope_5_buy:
|
||||
print(f'🤑🤑🤑🤑🤑🤑🤑🤑🤑🤑 Slope: {slope_5:.4%};')
|
||||
side = 'UP' if slope > 0.00 else 'DOWN'
|
||||
return True, side
|
||||
else:
|
||||
return False, ''
|
||||
|
||||
# @async_timeit
|
||||
async def cancel_all_orders(CLIENT):
|
||||
logging.info('Attempting to Cancel All Orders')
|
||||
cxl_resp = CLIENT.cancel_all()
|
||||
if bool(cxl_resp.get('not_canceled', True)):
|
||||
logging.warning(f'*** Cancel Request FAILED, trying again and shutting down: {cxl_resp}')
|
||||
cxl_resp = CLIENT.cancel_all()
|
||||
raise Exception('*** Cancel Request FAILED')
|
||||
logging.info(f'Cancel Successful: {cxl_resp}')
|
||||
|
||||
# @async_timeit
|
||||
async def cancel_single_order_by_id(CLIENT, order_id):
|
||||
global LOCAL_ACTIVE_ORDERS
|
||||
|
||||
logging.info(f'Attempting to Cancel Single Order: {order_id}')
|
||||
cxl_resp = CLIENT.cancel(order_id=order_id)
|
||||
|
||||
for idx, o in enumerate(LOCAL_ACTIVE_ORDERS):
|
||||
if o.get('orderID') == order_id:
|
||||
if bool(cxl_resp.get('not_canceled', True)):
|
||||
if cxl_resp.get('not_canceled', {}).get(order_id, None) == "matched orders can't be canceled":
|
||||
logging.info(f'Cancel request failed b/c already matched: {cxl_resp}')
|
||||
return False
|
||||
elif cxl_resp.get('not_canceled', {}).get(order_id, None) == "order can't be found - already canceled or matched":
|
||||
logging.info(f'Cancel request failed b/c already matched or cancelled: {cxl_resp}')
|
||||
LOCAL_ACTIVE_ORDERS.pop(idx)
|
||||
return False
|
||||
else:
|
||||
logging.warning(f'*** Cancel Request FAILED, shutting down: {cxl_resp}')
|
||||
raise Exception('*** Cancel Request FAILED - SHUTDONW')
|
||||
else:
|
||||
LOCAL_ACTIVE_ORDERS.pop(idx)
|
||||
logging.info(f'Cancel Successful: {cxl_resp}')
|
||||
return False
|
||||
|
||||
# @async_timeit
|
||||
async def flatten_open_positions(CLIENT, token_id_up, token_id_down):
|
||||
up = await get_balance_by_token_id(CLIENT=CLIENT, token_id=token_id_up)
|
||||
down = await get_balance_by_token_id(CLIENT=CLIENT, token_id=token_id_down)
|
||||
|
||||
### Submit orders to flatten outstanding balances ###
|
||||
if up > MIN_ORDER_SIZE:
|
||||
logging.info(f'Flattening Up Position: {up}')
|
||||
await post_order(
|
||||
CLIENT = CLIENT,
|
||||
tick_size = POLY_CLOB['tick_size'],
|
||||
neg_risk = POLY_CLOB['neg_risk'],
|
||||
OrderArgs_list = [Custom_OrderArgs(
|
||||
token_id=token_id_up,
|
||||
price=float(POLY_CLOB['price'])-0.01,
|
||||
size=up,
|
||||
side=SELL,
|
||||
)]
|
||||
)
|
||||
if down > MIN_ORDER_SIZE:
|
||||
logging.info(f'Flattening Down Position: {down}')
|
||||
await post_order(
|
||||
CLIENT = CLIENT,
|
||||
tick_size = POLY_CLOB['tick_size'],
|
||||
neg_risk = POLY_CLOB['neg_risk'],
|
||||
OrderArgs_list = [Custom_OrderArgs(
|
||||
token_id=token_id_down,
|
||||
price=float(POLY_CLOB_DOWN['price'])-0.01,
|
||||
size=down,
|
||||
side=SELL,
|
||||
|
||||
)]
|
||||
)
|
||||
|
||||
# @async_timeit
|
||||
async def get_balance_by_token_id(CLIENT, token_id):
|
||||
collateral = CLIENT.get_balance_allowance(
|
||||
BalanceAllowanceParams(
|
||||
asset_type='CONDITIONAL',
|
||||
token_id=token_id,
|
||||
)
|
||||
)
|
||||
return int(collateral['balance']) / 1_000_000
|
||||
|
||||
# @async_timeit
|
||||
async def get_usde_balance(CLIENT):
|
||||
collateral = CLIENT.get_balance_allowance(
|
||||
BalanceAllowanceParams(
|
||||
asset_type='COLLATERAL'
|
||||
)
|
||||
)
|
||||
return int(collateral['balance']) / 1_000_000
|
||||
|
||||
@async_timeit
|
||||
async def check_for_open_positions(CLIENT, token_id_up, token_id_down):
|
||||
global LOCAL_TOKEN_BALANCES
|
||||
|
||||
if token_id_up is None or token_id_down is None:
|
||||
logging.critical('Token Id is None, Exiting')
|
||||
raise ValueError('Token Id is None, Exiting')
|
||||
# return False
|
||||
up = await get_balance_by_token_id(CLIENT=CLIENT, token_id=token_id_up)
|
||||
down = await get_balance_by_token_id(CLIENT=CLIENT, token_id=token_id_down)
|
||||
|
||||
LOCAL_TOKEN_BALANCES = {
|
||||
token_id_up: up if up else 0,
|
||||
token_id_down: down if down else 0,
|
||||
}
|
||||
|
||||
logging.info(f'LOCAL_TOKEN_BALANCES: {LOCAL_TOKEN_BALANCES}')
|
||||
|
||||
if ( abs(up) > 0 ) or ( abs(down) > 0 ):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
@async_timeit
|
||||
async def post_order(CLIENT, OrderArgs_list: list[Custom_OrderArgs], tick_size: float | str, neg_risk: bool):
|
||||
global LOCAL_ACTIVE_ORDERS
|
||||
global LOCAL_TOKEN_BALANCES
|
||||
|
||||
orders = []
|
||||
for oa in OrderArgs_list:
|
||||
orders.append(
|
||||
PostOrdersArgs(
|
||||
order=CLIENT.create_order(
|
||||
order_args=oa,
|
||||
options=PartialCreateOrderOptions(
|
||||
tick_size=str(tick_size),
|
||||
neg_risk=neg_risk
|
||||
),
|
||||
),
|
||||
orderType=OrderType.GTC,
|
||||
postOnly=False,
|
||||
),
|
||||
)
|
||||
|
||||
### POST
|
||||
response = CLIENT.post_orders(orders)
|
||||
for idx, d in enumerate(response):
|
||||
if d['errorMsg'] == '':
|
||||
d['token_id'] = OrderArgs_list[idx].token_id
|
||||
d['price'] = OrderArgs_list[idx].price
|
||||
d['size'] = OrderArgs_list[idx].size
|
||||
d['side'] = str(OrderArgs_list[idx].side).upper()
|
||||
|
||||
if d['status'].upper() =='MATCHED':
|
||||
### Order Immediately Matched, Can Put in Offsetting Order Depending on State ###
|
||||
print('******** ORDER APPEND TO LOCAL - MATCHED ********* ')
|
||||
LOCAL_ACTIVE_ORDERS.append(d)
|
||||
|
||||
if d['status'].upper() == 'CONFIRMED':
|
||||
current_balance = float(LOCAL_TOKEN_BALANCES.get(d['token_id'], 0.00))
|
||||
if d['side'] == 'BUY':
|
||||
size = float(d['size'])
|
||||
else:
|
||||
size = float(d['size']) * -1
|
||||
|
||||
LOCAL_TOKEN_BALANCES[d['token_id']] = current_balance + size
|
||||
print('******** TRADE FILLED, BAL UPDATED ********* ')
|
||||
else:
|
||||
print('******** ORDER APPEND TO LOCAL - LIVE ********* ')
|
||||
LOCAL_ACTIVE_ORDERS.append(d)
|
||||
else:
|
||||
raise ValueError(f'Order entry failed: {d}')
|
||||
|
||||
logging.info(f'Order Posted Resp: {response}')
|
||||
print(f'Order Posted Resp: {response}')
|
||||
|
||||
|
||||
### Routes ###
|
||||
async def no_orders_no_positions_route():
|
||||
global ORDER_LOCK
|
||||
|
||||
### Check for Price Bands ###
|
||||
up_px = float(POLY_CLOB.get('price', 0))
|
||||
down_px = float(POLY_CLOB_DOWN.get('price', 0))
|
||||
|
||||
if (up_px > MAX_ALLOWED_POLY_PX) or (down_px > MAX_ALLOWED_POLY_PX):
|
||||
logging.info(f'Outside max allowed px: {MAX_ALLOWED_POLY_PX}')
|
||||
return False
|
||||
|
||||
### Check for Index vs. Target Px ###
|
||||
tgt_px = float(POLY_CLOB.get('target_price', 0))
|
||||
ref_px = float(POLY_REF.get('value'))
|
||||
tgt_px_diff_to_index = ( abs( tgt_px - ref_px ) / tgt_px)
|
||||
if tgt_px_diff_to_index > (TGT_PX_INDEX_DIFF_THRESH / 100):
|
||||
logging.info(f'Tgt Diff to Index Outside Limit ({TGT_PX_INDEX_DIFF_THRESH}%); Diff {tgt_px_diff_to_index:.4%}; Index: {ref_px:.2f}; Tgt: {tgt_px:.2f}')
|
||||
return False
|
||||
|
||||
|
||||
### Check Slope ###
|
||||
slope_bool, slope_side = await slope_decision()
|
||||
if not slope_bool:
|
||||
logging.info('Failed Slope Check')
|
||||
return False
|
||||
token_id = POLY_CLOB.get('token_id_up', None) if slope_side=='UP' else POLY_CLOB.get('token_id_down', None)
|
||||
|
||||
|
||||
### Order Entry ###
|
||||
px = float(POLY_CLOB['price'])+0.01
|
||||
order = Custom_OrderArgs(
|
||||
token_id=token_id,
|
||||
price=px,
|
||||
size=DEFAULT_ORDER_SIZE,
|
||||
side=BUY,
|
||||
max_price = px + CHASE_TO_BUY_CENTS
|
||||
)
|
||||
|
||||
### ADD CHECK FOR MKT MOVED AWAY FROM OPPORTNITY ###
|
||||
|
||||
if ORDER_LOCK:
|
||||
logging.info(f'BUY ORDER BLOCKED BY LOCK: {order}')
|
||||
|
||||
else:
|
||||
logging.info(f'Attempting BUY Order {order}')
|
||||
await post_order(
|
||||
CLIENT = CLIENT,
|
||||
tick_size = POLY_CLOB['tick_size'],
|
||||
neg_risk = POLY_CLOB['neg_risk'],
|
||||
OrderArgs_list = [order]
|
||||
)
|
||||
# ORDER_LOCK = ORDER_LOCK + 1
|
||||
|
||||
async def active_orders_no_positions_route():
|
||||
if len(LOCAL_ACTIVE_ORDERS) > 2:
|
||||
logging.critical('More than two active orders, shutting down')
|
||||
await kill_algo()
|
||||
b_c = 0
|
||||
s_c = 0
|
||||
for o in LOCAL_ACTIVE_ORDERS:
|
||||
if o['side'] == 'BUY':
|
||||
b_c = b_c + 1
|
||||
elif o['side'] == 'SELL':
|
||||
s_c = s_c + 1
|
||||
if (b_c > 1) or (s_c > 1):
|
||||
logging.critical(f'More than one active buy or more than one active sell: b_c {b_c}; s_c{s_c}')
|
||||
await kill_algo()
|
||||
|
||||
for o in LOCAL_ACTIVE_ORDERS:
|
||||
logging.info(f'Working on order ({o['side']}): {o['orderID']}')
|
||||
|
||||
if o.get('status').upper() == 'MATCHED':
|
||||
logging.info('Order is matched, awaiting confirm or kickback')
|
||||
|
||||
elif o.get('status').upper() == 'FAILED':
|
||||
raise ValueError(f'Trade FAILED after matching: {o}')
|
||||
else:
|
||||
orig_px = float(o['price'])
|
||||
orig_size = float(o['size'])
|
||||
if o['side'] == 'BUY':
|
||||
if POLY_CLOB['token_id_up'] == o['token_id']:
|
||||
clob_px = float(POLY_CLOB['price'])
|
||||
else:
|
||||
clob_px = float(POLY_CLOB_DOWN['price'])
|
||||
|
||||
if clob_px >= orig_px:
|
||||
logging.info(f"Market px: ({clob_px} is above buy order px: {orig_px:.2f})")
|
||||
if o.get('max_price', 0) > clob_px:
|
||||
logging.info(f"Market px: ({clob_px} has moved too far away from original target, cancelling and resetting algo: {o.get('max_price', 0) :.2f})")
|
||||
|
||||
order_matched = await cancel_single_order_by_id(CLIENT=CLIENT, order_id=o['orderID'])
|
||||
|
||||
if order_matched:
|
||||
o['status'] = 'MATCHED'
|
||||
else:
|
||||
px = orig_px+0.01
|
||||
|
||||
await post_order(
|
||||
CLIENT = CLIENT,
|
||||
tick_size = POLY_CLOB['tick_size'],
|
||||
neg_risk = POLY_CLOB['neg_risk'],
|
||||
OrderArgs_list = [Custom_OrderArgs(
|
||||
token_id=o['token_id'],
|
||||
price=px,
|
||||
size=orig_size,
|
||||
side=BUY,
|
||||
max_price=o['max_price']
|
||||
|
||||
)]
|
||||
)
|
||||
else:
|
||||
await cancel_single_order_by_id(CLIENT=CLIENT, order_id=o['orderID'])
|
||||
elif o['side'] == 'SELL':
|
||||
if POLY_CLOB['token_id_up'] == o['token_id']:
|
||||
clob_px = float(POLY_CLOB['price'])
|
||||
else:
|
||||
clob_px = float(POLY_CLOB_DOWN['price'])
|
||||
|
||||
if clob_px <= orig_px:
|
||||
logging.info(f"Market px: ({clob_px} is below sell order px: {orig_px:.2f})")
|
||||
|
||||
order_filled = await cancel_single_order_by_id(CLIENT=CLIENT, order_id=o['orderID'])
|
||||
if not order_filled:
|
||||
await post_order(
|
||||
CLIENT = CLIENT,
|
||||
tick_size = POLY_CLOB['tick_size'],
|
||||
neg_risk = POLY_CLOB['neg_risk'],
|
||||
OrderArgs_list = [Custom_OrderArgs(
|
||||
token_id=o['token_id'],
|
||||
price=orig_px-0.01,
|
||||
size=orig_size,
|
||||
side=SELL,
|
||||
max_price = 0.00
|
||||
)]
|
||||
)
|
||||
|
||||
|
||||
async def no_orders_active_positions_route():
|
||||
'''
|
||||
Succesful Buy, now neeed to take profit and exit
|
||||
'''
|
||||
global LOCAL_TOKEN_BALANCES
|
||||
|
||||
OrderArgs_list = []
|
||||
|
||||
logging.warning(f'LOCAL_TOKEN_BALANCES: {LOCAL_TOKEN_BALANCES}')
|
||||
|
||||
for k, v in LOCAL_TOKEN_BALANCES.items():
|
||||
size = await get_balance_by_token_id(CLIENT=CLIENT, token_id=k)
|
||||
if size >= MIN_ORDER_SIZE:
|
||||
if POLY_CLOB['token_id_up'] == k:
|
||||
clob_px = float(POLY_CLOB['price'])
|
||||
else:
|
||||
clob_px = float(POLY_CLOB_DOWN['price'])
|
||||
|
||||
OrderArgs_list.append(
|
||||
Custom_OrderArgs(
|
||||
token_id=k,
|
||||
price=clob_px + TGT_PROFIT_CENTS,
|
||||
size=size,
|
||||
side='SELL',
|
||||
)
|
||||
)
|
||||
else:
|
||||
LOCAL_TOKEN_BALANCES[k] = 0.00
|
||||
logging.info(f'Wants to flatten small amount, skipping: {v}')
|
||||
|
||||
if OrderArgs_list:
|
||||
logging.info(f'Posting orders to close: {OrderArgs_list}')
|
||||
await post_order(
|
||||
CLIENT = CLIENT,
|
||||
tick_size = POLY_CLOB['tick_size'],
|
||||
neg_risk = POLY_CLOB['neg_risk'],
|
||||
OrderArgs_list = OrderArgs_list
|
||||
)
|
||||
|
||||
async def active_orders_active_positions_route():
|
||||
pass
|
||||
|
||||
async def kill_algo():
|
||||
logging.info('Killing algo...')
|
||||
await cancel_all_orders(CLIENT=CLIENT)
|
||||
await flatten_open_positions(
|
||||
CLIENT=CLIENT,
|
||||
token_id_up = POLY_CLOB.get('token_id_up', None),
|
||||
token_id_down = POLY_CLOB.get('token_id_down', None),
|
||||
)
|
||||
logging.info('...algo killed')
|
||||
raise Exception('Algo Killed')
|
||||
|
||||
async def run_algo():
|
||||
global POLY_BINANCE
|
||||
global POLY_REF
|
||||
global POLY_CLOB
|
||||
global POLY_CLOB_DOWN
|
||||
global USER_TRADES
|
||||
global USER_ORDERS
|
||||
|
||||
global SLOPE_HIST
|
||||
global ACTIVE_BALANCES_EXIST
|
||||
|
||||
global LOCAL_ACTIVE_ORDERS
|
||||
global LOCAL_TOKEN_BALANCES
|
||||
# global LOCAL_ACTIVE_POSITIONS
|
||||
|
||||
|
||||
print(f'token_id_up: {POLY_CLOB.get('token_id_up', None)}')
|
||||
print(f'token_id_down: {POLY_CLOB.get('token_id_down', None)}')
|
||||
|
||||
POLY_CLOB = json.loads(VAL_KEY.get('poly_5min_btcusd'))
|
||||
|
||||
ACTIVE_BALANCES_EXIST = await check_for_open_positions(
|
||||
CLIENT=CLIENT,
|
||||
token_id_up=POLY_CLOB.get('token_id_up', None),
|
||||
token_id_down=POLY_CLOB.get('token_id_down', None),
|
||||
)
|
||||
|
||||
try:
|
||||
while True:
|
||||
loop_start = time.time()
|
||||
print('__________Start___________')
|
||||
POLY_BINANCE = json.loads(VAL_KEY.get('poly_binance_btcusd'))
|
||||
POLY_REF = json.loads(VAL_KEY.get('poly_rtds_cl_btcusd'))
|
||||
POLY_CLOB = json.loads(VAL_KEY.get('poly_5min_btcusd'))
|
||||
POLY_CLOB_DOWN = json.loads(VAL_KEY.get('poly_5min_btcusd_down'))
|
||||
USER_TRADES = json.loads(VAL_KEY.get('poly_user_trades'))
|
||||
USER_ORDERS = VAL_KEY.get('poly_user_orders')
|
||||
USER_ORDERS = json.loads(USER_ORDERS) if USER_ORDERS is not None else []
|
||||
|
||||
### CHANGE METHOD FROM BUY-SELL TO BUY UP - BUY DOWN
|
||||
### DO THIS TO AVOID DELAY WITH FILL CONFIRMS
|
||||
|
||||
### Manage Local vs User Stream Orders ###
|
||||
print(f'LOCAL_ACTIVE_ORDERS: {LOCAL_ACTIVE_ORDERS}')
|
||||
for idx, o in enumerate(LOCAL_ACTIVE_ORDERS):
|
||||
user_order = next((item for item in USER_ORDERS if item["id"] == o['orderID']), None)
|
||||
user_trade = next( ( item for item in USER_TRADES if ( o['orderID'] == item['taker_order_id'] ) or ( o["orderID"] == json.loads(item['maker_orders'])[0]['order_id'] ) ), None )
|
||||
|
||||
print(f'USER TRADE: {user_trade}')
|
||||
|
||||
if user_trade is not None:
|
||||
trade_status = str(user_trade['status']).upper()
|
||||
logging.info(f'Updated Trade Status: {o['status']} --> {trade_status}; {o['orderID']}')
|
||||
if trade_status == 'CONFIRMED':
|
||||
LOCAL_ACTIVE_ORDERS.pop(idx)
|
||||
|
||||
token_id = user_trade['asset_id']
|
||||
current_balance = float(LOCAL_TOKEN_BALANCES.get(token_id, 0.00))
|
||||
|
||||
if user_trade['side'] == 'BUY':
|
||||
size = float(user_trade['size'])
|
||||
else:
|
||||
size = float(user_trade['size']) * -1
|
||||
|
||||
LOCAL_TOKEN_BALANCES[token_id] = current_balance + size
|
||||
|
||||
# px = user_trade['price']
|
||||
# LOCAL_ACTIVE_POSITIONS.append({
|
||||
# 'token_id': token_id,
|
||||
# 'order_id': o['orderID'],
|
||||
# 'associate_trades': user_order['associate_trades'],
|
||||
# 'size_matched': user_order['size_matched'],
|
||||
# 'price': px,
|
||||
# 'timestamp_value': user_order['timestamp'],
|
||||
# })
|
||||
logging.info('Order FILLED!')
|
||||
elif trade_status == 'MATCHED':
|
||||
logging.info(f'Order Matched...awaiting confirm: {trade_status}')
|
||||
else:
|
||||
logging.info(f'Trade status but not filled: trade= {user_trade}; order={o}')
|
||||
|
||||
elif user_order is not None:
|
||||
order_status = str(user_order['status']).upper()
|
||||
logging.info(f'Updated Order Status: {o['status']} --> {order_status}; {o['orderID']}')
|
||||
# LOCAL_ACTIVE_ORDERS[idx]['status'] = order_status
|
||||
# if order_status == 'MATCHED':
|
||||
# LOCAL_ACTIVE_ORDERS.pop(idx)
|
||||
|
||||
# token_id = user_order['asset_id']
|
||||
# current_balance = float(LOCAL_TOKEN_BALANCES.get(token_id, 0.00))
|
||||
|
||||
# if user_order['side'] == 'BUY':
|
||||
# size = float(user_order['size_matched'])
|
||||
# else:
|
||||
# size = float(user_order['size_matched']) * -1
|
||||
|
||||
# LOCAL_TOKEN_BALANCES[token_id] = current_balance + size
|
||||
|
||||
# # px = user_order['price']
|
||||
# # LOCAL_ACTIVE_POSITIONS.append({
|
||||
# # 'token_id': token_id,
|
||||
# # 'order_id': o['orderID'],
|
||||
# # 'associate_trades': user_order['associate_trades'],
|
||||
# # 'size_matched': user_order['size_matched'],
|
||||
# # 'price': px,
|
||||
# # 'timestamp_value': user_order['timestamp'],
|
||||
# # })
|
||||
# logging.info('Order FILLED!')
|
||||
if order_status == 'CANCELED':
|
||||
LOCAL_ACTIVE_ORDERS.pop(idx)
|
||||
logging.info('Order Canceled')
|
||||
else:
|
||||
logging.info('Order Live or Trade Awaiting Confirm')
|
||||
|
||||
### UPDATES CAN COME THRU EITHER ORDER OR TRADE CHANNELS - NEED TO UPDATE TO HANDLE TRADE CHANNLE
|
||||
|
||||
token_id_up = POLY_CLOB.get('token_id_up', 0)
|
||||
token_id_down = POLY_CLOB.get('token_id_down', 0)
|
||||
|
||||
if (token_id_up is None) or (token_id_down is None):
|
||||
print('Missing Token Ids for Market, sleeping 1 sec and retrying...')
|
||||
time.sleep(1)
|
||||
ACTIVE_BALANCES_EXIST = {}
|
||||
continue
|
||||
else:
|
||||
if (LOCAL_TOKEN_BALANCES.get(token_id_up) is None):
|
||||
LOCAL_TOKEN_BALANCES[token_id_up] = 0.00
|
||||
if (LOCAL_TOKEN_BALANCES.get(token_id_down) is None):
|
||||
LOCAL_TOKEN_BALANCES[token_id_down] = 0.00
|
||||
ACTIVE_BALANCES_EXIST = (LOCAL_TOKEN_BALANCES.get(token_id_up) > 0) or (LOCAL_TOKEN_BALANCES.get(token_id_down) > 0)
|
||||
|
||||
### Check for Endtime Buffer ###
|
||||
if ENDTIME_BUFFER_SEC > POLY_CLOB.get('sec_remaining', 0):
|
||||
if LOCAL_ACTIVE_ORDERS:
|
||||
print('buffer zone - orders cancel')
|
||||
await cancel_all_orders(CLIENT=CLIENT)
|
||||
if ACTIVE_BALANCES_EXIST:
|
||||
print('buffer zone - flatten positions')
|
||||
await flatten_open_positions(
|
||||
CLIENT=CLIENT,
|
||||
token_id_up = POLY_CLOB.get('token_id_up', None),
|
||||
token_id_down = POLY_CLOB.get('token_id_down', None),
|
||||
)
|
||||
|
||||
print('buffer zone, sleeping until next session')
|
||||
time.sleep(1)
|
||||
continue
|
||||
|
||||
### Execution Route ###
|
||||
if not(LOCAL_ACTIVE_ORDERS) and not(ACTIVE_BALANCES_EXIST): # No Orders, No Positions
|
||||
print('ROUTE: no_orders_no_positions_route')
|
||||
await no_orders_no_positions_route()
|
||||
|
||||
### Open Orders Route ###
|
||||
elif LOCAL_ACTIVE_ORDERS and not(ACTIVE_BALANCES_EXIST): # Orders, No Positions
|
||||
print('ROUTE: active_orders_no_positions_route')
|
||||
await active_orders_no_positions_route()
|
||||
|
||||
### Open Positions Route ###
|
||||
elif not(LOCAL_ACTIVE_ORDERS) and ACTIVE_BALANCES_EXIST: # No Orders, Positions
|
||||
print('ROUTE: no_orders_no_positions_route')
|
||||
await no_orders_active_positions_route()
|
||||
|
||||
### Open Orders and Open Positions Route ###
|
||||
else:
|
||||
print('ROUTE: active_orders_active_positions_route')
|
||||
await active_orders_no_positions_route() # Orders and Positions
|
||||
|
||||
print(f'__________________________ (Algo Engine ms: {(time.time() - loop_start)*1000})')
|
||||
time.sleep(1)
|
||||
except KeyboardInterrupt:
|
||||
print('...algo stopped')
|
||||
await cancel_all_orders(CLIENT=CLIENT)
|
||||
except Exception as e:
|
||||
logging.critical(f'*** ALGO ENGINE CRASHED: {e}')
|
||||
logging.error(traceback.format_exc())
|
||||
await cancel_all_orders(CLIENT=CLIENT)
|
||||
|
||||
|
||||
async def main():
|
||||
global CLIENT
|
||||
global VAL_KEY
|
||||
global CON
|
||||
|
||||
CLIENT = api.create_client()
|
||||
VAL_KEY = valkey.Valkey(host='localhost', port=6379, db=0, decode_responses=True)
|
||||
engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/polymarket')
|
||||
|
||||
async with engine.connect() as CON:
|
||||
await create_executions_orders_table(CON=CON)
|
||||
await run_algo()
|
||||
|
||||
if __name__ == '__main__':
|
||||
START_TIME = round(datetime.now().timestamp()*1000)
|
||||
|
||||
logging.info(f'Log FilePath: {LOG_FILEPATH}')
|
||||
|
||||
logging.basicConfig(
|
||||
force=True,
|
||||
filename=LOG_FILEPATH,
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(levelname)s - %(message)s',
|
||||
filemode='w'
|
||||
)
|
||||
logging.info(f"STARTED: {START_TIME}")
|
||||
|
||||
asyncio.run(main())
|
||||
|
||||
Binary file not shown.
4
ng.py
4
ng.py
@@ -23,7 +23,7 @@ ALLOW_BODY_SCROLL: bool = True
|
||||
LOOKBACK: int = 60
|
||||
LOOKBACK_RT_TV_MAX_POINTS: int = 300
|
||||
REFRESH_INTERVAL_SEC: int = 10
|
||||
REFRESH_INTERVAL_RT_SEC: int = 0.1
|
||||
REFRESH_INTERVAL_RT_SEC: int = 1/30
|
||||
|
||||
ENGINE = create_engine('mysql+pymysql://root:pwd@localhost/polymarket')
|
||||
VALKEY_R = valkey.Valkey(host='localhost', port=6379, db=0, decode_responses=True)
|
||||
@@ -51,7 +51,7 @@ def root():
|
||||
|
||||
async def update_tv():
|
||||
series_update = json.loads(VALKEY_R.get('poly_rtds_cl_btcusd'))
|
||||
series_update_b = json.loads(VALKEY_R.get('poly_coinbase_btcusd'))
|
||||
series_update_b = json.loads(VALKEY_R.get('poly_binance_btcusd'))
|
||||
series_update_c = json.loads(VALKEY_R.get('poly_5min_btcusd'))
|
||||
timestamp = round( ( series_update['timestamp_arrival'] / 1000 ) , 2)
|
||||
timestamp_b = round( ( series_update_b['timestamp_arrival'] / 1000 ) , 2)
|
||||
|
||||
19
ng/Dockerfile
Normal file
19
ng/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
FROM python:3.13-slim
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y build-essential
|
||||
|
||||
RUN gcc --version
|
||||
RUN rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY requirements.txt .
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
# Finally, run gunicorn.
|
||||
CMD [ "python", "ng.py"]
|
||||
# CMD [ "gunicorn", "--workers=5", "--threads=1", "-b 0.0.0.0:8000", "app:server"]
|
||||
@@ -59,10 +59,11 @@ async function create_tv() {
|
||||
secondsVisible: true // Optional: show seconds
|
||||
},
|
||||
rightPriceScale: {
|
||||
visible: true
|
||||
visible: true,
|
||||
autoScale: true
|
||||
},
|
||||
leftPriceScale: {
|
||||
visible: true
|
||||
visible: true
|
||||
},
|
||||
|
||||
layout: {
|
||||
@@ -102,10 +103,12 @@ async function create_tv() {
|
||||
window.lineSeries_c = chart.addSeries(LightweightCharts.LineSeries, {
|
||||
color: '#ea0707',
|
||||
priceScaleId: 'left',
|
||||
priceRange: {
|
||||
minValue: 0,
|
||||
maxValue: 1
|
||||
},
|
||||
autoscaleInfoProvider: () => ({
|
||||
priceRange: {
|
||||
minValue: 0.0,
|
||||
maxValue: 1.0
|
||||
}
|
||||
})
|
||||
// topColor: '#94fcdf',
|
||||
// bottomColor: 'rgba(112, 171, 249, 0.28)',
|
||||
// invertFilledArea: false
|
||||
|
||||
1197
order_entry.ipynb
1197
order_entry.ipynb
File diff suppressed because one or more lines are too long
22
requirements.txt
Normal file
22
requirements.txt
Normal file
@@ -0,0 +1,22 @@
|
||||
pandas
|
||||
rel
|
||||
websockets
|
||||
pyarrow
|
||||
plotly
|
||||
mysql-connector-python
|
||||
sqlalchemy
|
||||
requests
|
||||
pymysql
|
||||
scipy
|
||||
asyncmy
|
||||
cryptography
|
||||
TA-Lib
|
||||
valkey
|
||||
nicegui
|
||||
py_clob_client
|
||||
# google
|
||||
# google-api-core==2.30.0
|
||||
# google-api-python-client==2.190.0
|
||||
# googleapis-common-protos==1.72.0
|
||||
# grpcio==1.76.0
|
||||
# grpcio-tools==1.76.0
|
||||
63
test.py
Normal file
63
test.py
Normal file
@@ -0,0 +1,63 @@
|
||||
import asyncio
|
||||
import json
|
||||
import websockets
|
||||
import time
|
||||
|
||||
# Credentials
|
||||
API_KEY = "019d2ad3-3755-744b-ace8-ad0f08c958dd"
|
||||
API_SECRET = "vXT1UeliaP89z9vcxDtdv47422mftijJkrJYE7CFqvA="
|
||||
API_PASSPHRASE = "57e703b801f22333d1a66a48c3a71773d3d3a42825ddcf330c3325856bc99756"
|
||||
WS_URL = "wss://ws-subscriptions-clob.polymarket.com/ws/user"
|
||||
|
||||
async def heartbeat(websocket):
|
||||
"""Sends a heartbeat every 10 seconds to keep the connection alive."""
|
||||
while True:
|
||||
try:
|
||||
await asyncio.sleep(10)
|
||||
await websocket.send(json.dumps({}))
|
||||
# print("Heartbeat sent")
|
||||
except Exception:
|
||||
break
|
||||
|
||||
async def connect_polymarket_user_ws():
|
||||
while True: # Outer loop for reconnection
|
||||
try:
|
||||
async with websockets.connect(WS_URL) as websocket:
|
||||
subscribe_message = {
|
||||
"type": "user",
|
||||
"auth": {
|
||||
"apiKey": API_KEY,
|
||||
"secret": API_SECRET,
|
||||
"passphrase": API_PASSPHRASE
|
||||
},
|
||||
"markets": []
|
||||
}
|
||||
|
||||
await websocket.send(json.dumps(subscribe_message))
|
||||
print(f"[{time.strftime('%H:%M:%S')}] Subscription sent...")
|
||||
|
||||
# Start the heartbeat task in the background
|
||||
heartbeat_task = asyncio.create_task(heartbeat(websocket))
|
||||
|
||||
async for message in websocket:
|
||||
data = json.loads(message)
|
||||
|
||||
# Log the specific reason if it's an error message
|
||||
if data.get("type") == "error":
|
||||
print(f"Server Error: {data.get('message')}")
|
||||
break
|
||||
|
||||
if data: # Ignore empty heartbeat responses from server
|
||||
print(f"Update: {data}")
|
||||
|
||||
heartbeat_task.cancel()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Connection lost: {e}. Retrying in 5s...")
|
||||
await asyncio.sleep(5)
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
asyncio.run(connect_polymarket_user_ws())
|
||||
except KeyboardInterrupt:
|
||||
print("Stopped by user.")
|
||||
@@ -13,6 +13,9 @@ from sqlalchemy import text
|
||||
import websockets
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
import valkey
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
|
||||
### Allow only ipv4 ###
|
||||
def allowed_gai_family():
|
||||
@@ -26,13 +29,14 @@ VK_CHANNEL = 'poly_binance_btcusd'
|
||||
CON: AsyncContextManager | None = None
|
||||
VAL_KEY = None
|
||||
|
||||
|
||||
### Logging ###
|
||||
LOG_FILEPATH: str = '/root/logs/Polymarket_Binance_Trades.log'
|
||||
load_dotenv()
|
||||
LOG_FILEPATH: str = os.getenv("LOGS_PATH") + '/Polymarket_Binance_Trades.log'
|
||||
|
||||
### Globals ###
|
||||
WSS_URL = "wss://stream.binance.com:9443/ws/BTCUSDT@trade"
|
||||
# HIST_TRADES = np.empty((0, 2))
|
||||
WSS_URL = "wss://stream.binance.com:9443/ws/BTCUSDT@aggTrade"
|
||||
HIST_TRADES = np.empty((0, 3))
|
||||
HIST_TRADES_LOOKBACK_SEC = 5
|
||||
|
||||
### Database Funcs ###
|
||||
async def create_rtds_btcusd_table(
|
||||
@@ -46,6 +50,7 @@ async def create_rtds_btcusd_table(
|
||||
logging.info('Creating Table if Does Not Exist: binance_btcusd_trades')
|
||||
await CON.execute(text("""
|
||||
CREATE TABLE IF NOT EXISTS binance_btcusd_trades (
|
||||
timestamp_arrival BIGINT,
|
||||
timestamp_msg BIGINT,
|
||||
timestamp_value BIGINT,
|
||||
value DOUBLE,
|
||||
@@ -57,6 +62,7 @@ async def create_rtds_btcusd_table(
|
||||
raise ValueError('Only MySQL engine is implemented')
|
||||
|
||||
async def insert_rtds_btcusd_table(
|
||||
timestamp_arrival: int,
|
||||
timestamp_msg: int,
|
||||
timestamp_value: int,
|
||||
value: float,
|
||||
@@ -65,6 +71,7 @@ async def insert_rtds_btcusd_table(
|
||||
engine: str = 'mysql', # mysql | duckdb
|
||||
) -> None:
|
||||
params={
|
||||
'timestamp_arrival': timestamp_arrival,
|
||||
'timestamp_msg': timestamp_msg,
|
||||
'timestamp_value': timestamp_value,
|
||||
'value': value,
|
||||
@@ -77,6 +84,7 @@ async def insert_rtds_btcusd_table(
|
||||
await CON.execute(text("""
|
||||
INSERT INTO binance_btcusd_trades
|
||||
(
|
||||
timestamp_arrival,
|
||||
timestamp_msg,
|
||||
timestamp_value,
|
||||
value,
|
||||
@@ -84,6 +92,7 @@ async def insert_rtds_btcusd_table(
|
||||
)
|
||||
VALUES
|
||||
(
|
||||
:timestamp_arrival,
|
||||
:timestamp_msg,
|
||||
:timestamp_value,
|
||||
:value,
|
||||
@@ -96,7 +105,6 @@ async def insert_rtds_btcusd_table(
|
||||
else:
|
||||
raise ValueError('Only MySQL engine is implemented')
|
||||
|
||||
|
||||
### Websocket ###
|
||||
async def binance_trades_stream():
|
||||
global HIST_TRADES
|
||||
@@ -106,7 +114,7 @@ async def binance_trades_stream():
|
||||
|
||||
subscribe_msg = {
|
||||
"method": "SUBSCRIBE",
|
||||
"params": ["btcusdt@trade"],
|
||||
"params": ["btcusdt@aggTrade"],
|
||||
"id": 1
|
||||
}
|
||||
|
||||
@@ -114,30 +122,36 @@ async def binance_trades_stream():
|
||||
|
||||
try:
|
||||
async for message in websocket:
|
||||
ts_arrival = round(datetime.now().timestamp()*1000)
|
||||
if isinstance(message, str):
|
||||
try:
|
||||
data = json.loads(message)
|
||||
if data.get('t', None) is not None:
|
||||
if data.get('T', None) is not None:
|
||||
timestamp_msg = data['E']
|
||||
timestamp_value = data['T']
|
||||
last_px = float(data['p'])
|
||||
print(f'🤑 BTC Binance Last Px: {last_px:_.4f}; TS: {pd.to_datetime(data['T'], unit='ms')}')
|
||||
VAL_KEY.publish(VK_CHANNEL, json.dumps({
|
||||
'timestamp_msg': data['E'],
|
||||
'timestamp_value': data['T'],
|
||||
qty = float(data['q'])
|
||||
# print(f'🤑 BTC Binance Last Px: {last_px:_.4f}; TS: {pd.to_datetime(data['T'], unit='ms')}')
|
||||
HIST_TRADES = np.append(HIST_TRADES, np.array([[timestamp_value, last_px, qty]]), axis=0)
|
||||
hist_trades_lookback_ts_ms = round(datetime.now().timestamp() - HIST_TRADES_LOOKBACK_SEC)*1000
|
||||
HIST_TRADES = HIST_TRADES[HIST_TRADES[:, 0] >= hist_trades_lookback_ts_ms]
|
||||
VAL_KEY_OBJ = json.dumps({
|
||||
'timestamp_arrival': ts_arrival,
|
||||
'timestamp_msg': timestamp_msg,
|
||||
'timestamp_value': timestamp_value,
|
||||
'value': last_px,
|
||||
'qty': data['q'],
|
||||
}))
|
||||
VAL_KEY.set(VK_CHANNEL, json.dumps({
|
||||
'timestamp_msg': data['E'],
|
||||
'timestamp_value': data['T'],
|
||||
'value': last_px,
|
||||
'qty': data['q'],
|
||||
}))
|
||||
'qty': qty,
|
||||
'hist_trades': HIST_TRADES.tolist()
|
||||
})
|
||||
# VAL_KEY.publish(VK_CHANNEL, VAL_KEY_OBJ)
|
||||
VAL_KEY.set(VK_CHANNEL, VAL_KEY_OBJ)
|
||||
await insert_rtds_btcusd_table(
|
||||
CON=CON,
|
||||
timestamp_msg=data['E'],
|
||||
timestamp_value=data['T'],
|
||||
timestamp_arrival=ts_arrival,
|
||||
timestamp_msg=timestamp_msg,
|
||||
timestamp_value=timestamp_value,
|
||||
value=last_px,
|
||||
qty=data['q'],
|
||||
qty=qty,
|
||||
)
|
||||
else:
|
||||
logging.info(f'Initial or unexpected data struct, skipping: {data}')
|
||||
@@ -162,8 +176,8 @@ async def main():
|
||||
|
||||
if USE_VK:
|
||||
VAL_KEY = valkey.Valkey(host='localhost', port=6379, db=0)
|
||||
published_count = VAL_KEY.publish(VK_CHANNEL,f"Hola, starting to publish to valkey: {VK_CHANNEL} @ {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
logging.info(f"Valkey message published to {published_count} subscribers of {VK_CHANNEL}")
|
||||
# published_count = VAL_KEY.publish(VK_CHANNEL,f"Hola, starting to publish to valkey: {VK_CHANNEL} @ {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
# logging.info(f"Valkey message published to {published_count} subscribers of {VK_CHANNEL}")
|
||||
else:
|
||||
VAL_KEY = None
|
||||
logging.warning("VALKEY NOT BEING USED, NO DATA WILL BE PUBLISHED")
|
||||
|
||||
19
ws_binance/Dockerfile
Normal file
19
ws_binance/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
FROM python:3.13-slim
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y build-essential
|
||||
|
||||
RUN gcc --version
|
||||
RUN rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY requirements.txt .
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
# Finally, run gunicorn.
|
||||
CMD [ "python", "ws_binance.py"]
|
||||
# CMD [ "gunicorn", "--workers=5", "--threads=1", "-b 0.0.0.0:8000", "app:server"]
|
||||
142
ws_clob.py
142
ws_clob.py
@@ -14,22 +14,34 @@ from sqlalchemy.ext.asyncio import create_async_engine
|
||||
from sqlalchemy import text
|
||||
import valkey
|
||||
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
### Database ###
|
||||
USE_DB: bool = True
|
||||
USE_VK: bool = True
|
||||
VK_CHANNEL = 'poly_5min_btcusd'
|
||||
VK_CHANNEL_DOWN = 'poly_5min_btcusd_down'
|
||||
CON: AsyncContextManager | None = None
|
||||
VAL_KEY = None
|
||||
|
||||
### Logging ###
|
||||
LOG_FILEPATH: str = '/root/logs/Polymarket_5min.log'
|
||||
load_dotenv()
|
||||
LOG_FILEPATH: str = os.getenv("LOGS_PATH") + '/Polymarket_5min.log'
|
||||
|
||||
WSS_URL = "wss://ws-subscriptions-clob.polymarket.com/ws/market"
|
||||
SLUG_END_TIME = 0
|
||||
|
||||
HIST_TRADES = np.empty((0, 2))
|
||||
HIST_TRADES_DOWN = np.empty((0, 2))
|
||||
MIN_TICK_SIZE = 0.01
|
||||
NEG_RISK = False
|
||||
|
||||
TARGET_PX = 0
|
||||
|
||||
TARGET_ASSET_ID = None
|
||||
TARGET_ASSET_ID_DOWN = None
|
||||
|
||||
def format_timestamp(total_seconds) -> str:
|
||||
minutes, seconds = divmod(total_seconds, 60)
|
||||
|
||||
@@ -86,7 +98,8 @@ async def create_poly_btcusd_trades_table(
|
||||
timestamp_value BIGINT,
|
||||
price DOUBLE,
|
||||
qty DOUBLE,
|
||||
side_taker VARCHAR(8)
|
||||
side_taker VARCHAR(8),
|
||||
up_or_down VARCHAR(8)
|
||||
);
|
||||
"""))
|
||||
await CON.commit()
|
||||
@@ -100,6 +113,7 @@ async def insert_poly_btcusd_trades_table(
|
||||
price: float,
|
||||
qty: float,
|
||||
side_taker: str,
|
||||
up_or_down: str,
|
||||
CON: AsyncContextManager,
|
||||
engine: str = 'mysql', # mysql | duckdb
|
||||
) -> None:
|
||||
@@ -110,6 +124,7 @@ async def insert_poly_btcusd_trades_table(
|
||||
'price': price,
|
||||
'qty': qty,
|
||||
'side_taker': side_taker,
|
||||
'up_or_down': up_or_down,
|
||||
}
|
||||
if CON is None:
|
||||
logging.info("NO DB CONNECTION, SKIPPING Insert Statements")
|
||||
@@ -123,7 +138,8 @@ async def insert_poly_btcusd_trades_table(
|
||||
timestamp_value,
|
||||
price,
|
||||
qty,
|
||||
side_taker
|
||||
side_taker,
|
||||
up_or_down
|
||||
)
|
||||
VALUES
|
||||
(
|
||||
@@ -132,7 +148,8 @@ async def insert_poly_btcusd_trades_table(
|
||||
:timestamp_value,
|
||||
:price,
|
||||
:qty,
|
||||
:side_taker
|
||||
:side_taker,
|
||||
:up_or_down
|
||||
)
|
||||
"""),
|
||||
parameters=params
|
||||
@@ -146,24 +163,33 @@ async def polymarket_stream():
|
||||
global SLUG_END_TIME
|
||||
global TARGET_PX
|
||||
global HIST_TRADES
|
||||
global HIST_TRADES_DOWN
|
||||
global MIN_TICK_SIZE
|
||||
global NEG_RISK
|
||||
global TARGET_ASSET_ID
|
||||
global TARGET_ASSET_ID_DOWN
|
||||
|
||||
slug_full = gen_slug()
|
||||
market_details, market = get_mkt_details_by_slug(slug_full)
|
||||
market_details, _ = get_mkt_details_by_slug(slug_full)
|
||||
CONDITION_ID = market_details['ConditionId']
|
||||
TARGET_ASSET_ID = market_details['Up']
|
||||
TARGET_ASSET_ID_DOWN = market_details['Down']
|
||||
MIN_TICK_SIZE = market_details['MinTickSize']
|
||||
NEG_RISK = market_details['isNegRisk']
|
||||
SLUG_END_TIME = round(datetime.strptime(market_details['EndDateTime'], '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo=timezone.utc).timestamp())
|
||||
print(f'********* NEW MKT - END DATETIME: {pd.to_datetime(SLUG_END_TIME, unit='s')} *********')
|
||||
|
||||
async with websockets.connect(WSS_URL) as websocket:
|
||||
async for websocket in websockets.connect(WSS_URL):
|
||||
print(f"Connected to {WSS_URL}")
|
||||
|
||||
subscribe_msg = {
|
||||
"assets_ids": [TARGET_ASSET_ID],
|
||||
"assets_ids": [TARGET_ASSET_ID, TARGET_ASSET_ID_DOWN],
|
||||
"type": "market",
|
||||
"custom_feature_enabled": True
|
||||
"custom_feature_enabled": False
|
||||
}
|
||||
|
||||
await websocket.send(json.dumps(subscribe_msg))
|
||||
print(f"Subscribed to Asset: {TARGET_ASSET_ID}")
|
||||
print(f"Subscribed to Assets: Up {TARGET_ASSET_ID}; Down: {TARGET_ASSET_ID_DOWN}")
|
||||
|
||||
try:
|
||||
async for message in websocket:
|
||||
@@ -178,21 +204,25 @@ async def polymarket_stream():
|
||||
print('*** Attempting to unsub from past 5min')
|
||||
update_unsub_msg = {
|
||||
"operation": 'unsubscribe',
|
||||
"assets_ids": [TARGET_ASSET_ID],
|
||||
"custom_feature_enabled": True
|
||||
"assets_ids": [TARGET_ASSET_ID, TARGET_ASSET_ID_DOWN],
|
||||
"custom_feature_enabled": False
|
||||
}
|
||||
await websocket.send(json.dumps(update_unsub_msg))
|
||||
|
||||
print('*** Attempting to SUB to new 5min')
|
||||
slug_full = gen_slug()
|
||||
market_details, market = get_mkt_details_by_slug(slug_full)
|
||||
CONDITION_ID = market_details['ConditionId']
|
||||
TARGET_ASSET_ID = market_details['Up']
|
||||
TARGET_ASSET_ID_DOWN = market_details['Down']
|
||||
MIN_TICK_SIZE = market_details['MinTickSize']
|
||||
NEG_RISK = market_details['isNegRisk']
|
||||
SLUG_END_TIME = round(datetime.strptime(market_details['EndDateTime'], '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo=timezone.utc).timestamp())
|
||||
|
||||
update_sub_msg = {
|
||||
"operation": 'subscribe',
|
||||
"assets_ids": [TARGET_ASSET_ID],
|
||||
"custom_feature_enabled": True
|
||||
"assets_ids": [TARGET_ASSET_ID, TARGET_ASSET_ID_DOWN],
|
||||
"custom_feature_enabled": False
|
||||
}
|
||||
await websocket.send(json.dumps(update_sub_msg))
|
||||
|
||||
@@ -208,34 +238,65 @@ async def polymarket_stream():
|
||||
if event_type == "price_change":
|
||||
# print("📈 Price Change")
|
||||
# print(pd.DataFrame(data['price_changes']))
|
||||
pass
|
||||
continue
|
||||
elif event_type == "best_bid_ask":
|
||||
# print(pd.DataFrame([data]))
|
||||
pass
|
||||
continue
|
||||
elif event_type == "last_trade_price":
|
||||
token_id = data['asset_id']
|
||||
ts_msg = int(data['timestamp'])
|
||||
ts_value = int(ts_msg)
|
||||
px = float(data['price'])
|
||||
qty = float(data['size'])
|
||||
side_taker = data['side']
|
||||
HIST_TRADES = np.append(HIST_TRADES, np.array([[px, qty]]), axis=0)
|
||||
# SMA = talib.ROC(HIST_TRADES[:,0], timeperiod=10)[-1]
|
||||
# print(f"✨ Last Px: {px:.2f}; ROC: {SMA:.4f}; Qty: {qty:6.2f}; Sec Left: {sec_remaining}")
|
||||
print(f"✨ Last Px: {px:.2f}; Qty: {qty:6.2f}; Sec Left: {sec_remaining}")
|
||||
|
||||
if USE_VK:
|
||||
VAL_KEY_OBJ = json.dumps({
|
||||
'timestamp_arrival': ts_arrival,
|
||||
'timestamp_msg': ts_msg,
|
||||
'timestamp_value': ts_value,
|
||||
'price': px,
|
||||
'qty': qty,
|
||||
'side_taker': side_taker,
|
||||
'sec_remaining': sec_remaining,
|
||||
'target_price': TARGET_PX
|
||||
})
|
||||
VAL_KEY.publish(VK_CHANNEL, VAL_KEY_OBJ)
|
||||
VAL_KEY.set(VK_CHANNEL, VAL_KEY_OBJ)
|
||||
if token_id == TARGET_ASSET_ID:
|
||||
up_or_down = 'UP'
|
||||
HIST_TRADES = np.append(HIST_TRADES, np.array([[px, qty]]), axis=0)
|
||||
# print(f"✨ Last Px: {px:.2f}; Qty: {qty:6.2f}; Sec Left: {sec_remaining}")
|
||||
# print(f'Up: {TARGET_ASSET_ID}')
|
||||
# print(f'Down: {TARGET_ASSET_ID_DOWN}')
|
||||
# SMA = talib.ROC(HIST_TRADES[:,0], timeperiod=10)[-1]
|
||||
# print(f"✨ Last Px: {px:.2f}; ROC: {SMA:.4f}; Qty: {qty:6.2f}; Sec Left: {sec_remaining}")
|
||||
if USE_VK:
|
||||
VAL_KEY_OBJ = json.dumps({
|
||||
'timestamp_arrival': ts_arrival,
|
||||
'timestamp_msg': ts_msg,
|
||||
'timestamp_value': ts_value,
|
||||
'price': px,
|
||||
'qty': qty,
|
||||
'side_taker': side_taker,
|
||||
'sec_remaining': sec_remaining,
|
||||
'target_price': TARGET_PX,
|
||||
'condition_id': CONDITION_ID,
|
||||
'token_id_up': TARGET_ASSET_ID,
|
||||
'token_id_down': TARGET_ASSET_ID_DOWN,
|
||||
'tick_size': MIN_TICK_SIZE,
|
||||
'neg_risk': NEG_RISK,
|
||||
})
|
||||
VAL_KEY.set(VK_CHANNEL, VAL_KEY_OBJ)
|
||||
elif token_id == TARGET_ASSET_ID_DOWN:
|
||||
up_or_down = 'DOWN'
|
||||
HIST_TRADES_DOWN = np.append(HIST_TRADES_DOWN, np.array([[px, qty]]), axis=0)
|
||||
if USE_VK:
|
||||
VAL_KEY_OBJ = json.dumps({
|
||||
'timestamp_arrival': ts_arrival,
|
||||
'timestamp_msg': ts_msg,
|
||||
'timestamp_value': ts_value,
|
||||
'price': px,
|
||||
'qty': qty,
|
||||
'side_taker': side_taker,
|
||||
'sec_remaining': sec_remaining,
|
||||
'target_price': TARGET_PX,
|
||||
'condition_id': CONDITION_ID,
|
||||
'token_id_up': TARGET_ASSET_ID,
|
||||
'token_id_down': TARGET_ASSET_ID_DOWN,
|
||||
'tick_size': MIN_TICK_SIZE,
|
||||
'neg_risk': NEG_RISK,
|
||||
})
|
||||
VAL_KEY.set(VK_CHANNEL_DOWN, VAL_KEY_OBJ)
|
||||
else:
|
||||
logging.warning('Token Id from Market Does Not Match Pricing Data Id')
|
||||
|
||||
if USE_DB:
|
||||
await insert_poly_btcusd_trades_table(
|
||||
CON=CON,
|
||||
@@ -245,29 +306,34 @@ async def polymarket_stream():
|
||||
price=px,
|
||||
qty=qty,
|
||||
side_taker=side_taker,
|
||||
up_or_down=up_or_down
|
||||
)
|
||||
|
||||
elif event_type == "book":
|
||||
pass
|
||||
continue
|
||||
elif event_type == "new_market":
|
||||
print('Received new_market')
|
||||
continue
|
||||
elif event_type == "market_resolved":
|
||||
print(f"Received: {event_type}")
|
||||
print(data)
|
||||
continue
|
||||
elif event_type == "tick_size_change": # may want for CLOB order routing
|
||||
print(f"Received: {event_type}")
|
||||
print(data)
|
||||
continue
|
||||
else:
|
||||
print(f"*********** REC UNMAPPED EVENT: {event_type}")
|
||||
print(data)
|
||||
continue
|
||||
elif isinstance(data, dict):
|
||||
# print(data.get("event_type", None))
|
||||
pass
|
||||
continue
|
||||
else:
|
||||
raise ValueError(f'Type: {type(data)} not expected: {message}')
|
||||
|
||||
except websockets.ConnectionClosed as e:
|
||||
print(f"Connection closed by server. Exception: {e}")
|
||||
continue
|
||||
|
||||
async def main():
|
||||
global VAL_KEY
|
||||
@@ -275,8 +341,8 @@ async def main():
|
||||
|
||||
if USE_VK:
|
||||
VAL_KEY = valkey.Valkey(host='localhost', port=6379, db=0)
|
||||
published_count = VAL_KEY.publish(VK_CHANNEL,f"Hola, starting to publish to valkey: {VK_CHANNEL} @ {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
logging.info(f"Valkey message published to {published_count} subscribers of {VK_CHANNEL}")
|
||||
# published_count = VAL_KEY.publish(VK_CHANNEL,f"Hola, starting to publish to valkey: {VK_CHANNEL} @ {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
# logging.info(f"Valkey message published to {published_count} subscribers of {VK_CHANNEL}")
|
||||
else:
|
||||
VAL_KEY = None
|
||||
logging.warning("VALKEY NOT BEING USED, NO DATA WILL BE PUBLISHED")
|
||||
|
||||
19
ws_clob/Dockerfile
Normal file
19
ws_clob/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
FROM python:3.13-slim
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y build-essential
|
||||
|
||||
RUN gcc --version
|
||||
RUN rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY requirements.txt .
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
# Finally, run gunicorn.
|
||||
CMD [ "python", "ws_clob.py"]
|
||||
# CMD [ "gunicorn", "--workers=5", "--threads=1", "-b 0.0.0.0:8000", "app:server"]
|
||||
@@ -5,14 +5,16 @@ import socket
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from typing import AsyncContextManager
|
||||
|
||||
import numpy as np
|
||||
import os
|
||||
# import numpy as np
|
||||
import pandas as pd
|
||||
import requests.packages.urllib3.util.connection as urllib3_cn # type: ignore
|
||||
from sqlalchemy import text
|
||||
import websockets
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
import valkey
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
### Allow only ipv4 ###
|
||||
def allowed_gai_family():
|
||||
@@ -27,7 +29,8 @@ CON: AsyncContextManager | None = None
|
||||
VAL_KEY = None
|
||||
|
||||
### Logging ###
|
||||
LOG_FILEPATH: str = '/root/logs/Polymarket_coinbase_Trades.log'
|
||||
load_dotenv()
|
||||
LOG_FILEPATH: str = os.getenv("LOGS_PATH") + '/Polymarket_coinbase_Trades.log'
|
||||
|
||||
### Globals ###
|
||||
WSS_URL = "wss://ws-feed.exchange.coinbase.com"
|
||||
|
||||
19
ws_coinbase/Dockerfile
Normal file
19
ws_coinbase/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
FROM python:3.13-slim
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y build-essential
|
||||
|
||||
RUN gcc --version
|
||||
RUN rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY requirements.txt .
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
# Finally, run gunicorn.
|
||||
CMD [ "python", "ws_coinbase.py"]
|
||||
# CMD [ "gunicorn", "--workers=5", "--threads=1", "-b 0.0.0.0:8000", "app:server"]
|
||||
@@ -13,6 +13,8 @@ from sqlalchemy import text
|
||||
import websockets
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
import valkey
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
### Allow only ipv4 ###
|
||||
def allowed_gai_family():
|
||||
@@ -28,7 +30,8 @@ VAL_KEY = None
|
||||
|
||||
|
||||
### Logging ###
|
||||
LOG_FILEPATH: str = '/root/logs/Polymarket_Pionex_Trades.log'
|
||||
load_dotenv()
|
||||
LOG_FILEPATH: str = os.getenv("LOGS_PATH") + '/Polymarket_Pionex_Trades.log'
|
||||
|
||||
### Globals ###
|
||||
WSS_URL = "wss://ws.pionex.com/wsPub"
|
||||
|
||||
13
ws_rtds.py
13
ws_rtds.py
@@ -13,6 +13,8 @@ from sqlalchemy import text
|
||||
import websockets
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
import valkey
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
### Allow only ipv4 ###
|
||||
def allowed_gai_family():
|
||||
@@ -28,7 +30,8 @@ VAL_KEY = None
|
||||
|
||||
|
||||
### Logging ###
|
||||
LOG_FILEPATH: str = '/root/logs/Polymarket_RTDS.log'
|
||||
load_dotenv()
|
||||
LOG_FILEPATH: str = os.getenv("LOGS_PATH") + '/Polymarket_RTDS.log'
|
||||
|
||||
### Globals ###
|
||||
WSS_URL = "wss://ws-live-data.polymarket.com"
|
||||
@@ -124,14 +127,14 @@ async def rtds_stream():
|
||||
data = json.loads(message)
|
||||
if data['payload'].get('value', None) is not None:
|
||||
ts_arrival = round(datetime.now().timestamp()*1000)
|
||||
print(f'🤑 BTC Chainlink Last Px: {data['payload']['value']:_.4f}; TS: {pd.to_datetime(data['payload']['timestamp'], unit='ms')}')
|
||||
# print(f'🤑 BTC Chainlink Last Px: {data['payload']['value']:_.4f}; TS: {pd.to_datetime(data['payload']['timestamp'], unit='ms')}')
|
||||
VAL_KEY_OBJ = json.dumps({
|
||||
'timestamp_arrival': ts_arrival,
|
||||
'timestamp_msg': data['timestamp'],
|
||||
'timestamp_value': data['payload']['timestamp'],
|
||||
'value': data['payload']['value'],
|
||||
})
|
||||
VAL_KEY.publish(VK_CHANNEL, VAL_KEY_OBJ)
|
||||
# VAL_KEY.publish(VK_CHANNEL, VAL_KEY_OBJ)
|
||||
VAL_KEY.set(VK_CHANNEL, VAL_KEY_OBJ)
|
||||
await insert_rtds_btcusd_table(
|
||||
CON=CON,
|
||||
@@ -164,8 +167,8 @@ async def main():
|
||||
|
||||
if USE_VK:
|
||||
VAL_KEY = valkey.Valkey(host='localhost', port=6379, db=0)
|
||||
published_count = VAL_KEY.publish(VK_CHANNEL,f"Hola, starting to publish to valkey: {VK_CHANNEL} @ {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
logging.info(f"Valkey message published to {published_count} subscribers of {VK_CHANNEL}")
|
||||
# published_count = VAL_KEY.publish(VK_CHANNEL,f"Hola, starting to publish to valkey: {VK_CHANNEL} @ {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
# logging.info(f"Valkey message published to {published_count} subscribers of {VK_CHANNEL}")
|
||||
else:
|
||||
VAL_KEY = None
|
||||
logging.warning("VALKEY NOT BEING USED, NO DATA WILL BE PUBLISHED")
|
||||
|
||||
19
ws_rtds/Dockerfile
Normal file
19
ws_rtds/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
FROM python:3.13-slim
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y build-essential
|
||||
|
||||
RUN gcc --version
|
||||
RUN rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY requirements.txt .
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
# Finally, run gunicorn.
|
||||
CMD [ "python", "ws_rtds.py"]
|
||||
# CMD [ "gunicorn", "--workers=5", "--threads=1", "-b 0.0.0.0:8000", "app:server"]
|
||||
431
ws_user.py
Normal file
431
ws_user.py
Normal file
@@ -0,0 +1,431 @@
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
from typing import AsyncContextManager
|
||||
|
||||
import numpy as np
|
||||
import valkey
|
||||
import websockets
|
||||
from dotenv import load_dotenv
|
||||
from py_clob_client.client import ClobClient
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
|
||||
### Database ###
|
||||
USE_DB: bool = True
|
||||
USE_VK: bool = True
|
||||
|
||||
LOCAL_LIVE_ORDERS = []
|
||||
LOCAL_RECENT_TRADES = []
|
||||
LOCAL_RECENT_TRADES_LOOKBACK_SEC = 10
|
||||
|
||||
VK_LIVE_ORDERS = 'poly_user_orders'
|
||||
VK_RECENT_TRADES = 'poly_user_trades'
|
||||
CON: AsyncContextManager | None = None
|
||||
VAL_KEY = None
|
||||
|
||||
### Logging ###
|
||||
load_dotenv()
|
||||
LOG_FILEPATH: str = os.getenv("LOGS_PATH") + '/Polymarket_User.log'
|
||||
|
||||
# https://docs.polymarket.com/market-data/websocket/user-channel
|
||||
WSS_URL = "wss://ws-subscriptions-clob.polymarket.com/ws/user"
|
||||
API_CREDS = {}
|
||||
|
||||
HIST_TRADES = np.empty((0, 2))
|
||||
TARGET_PX = 0
|
||||
|
||||
### Database Funcs ###
|
||||
async def create_user_trades_table(
|
||||
CON: AsyncContextManager,
|
||||
engine: str = 'mysql', # mysql | duckdb
|
||||
) -> None:
|
||||
if CON is None:
|
||||
logging.info("NO DB CONNECTION, SKIPPING Create Statements")
|
||||
else:
|
||||
if engine == 'mysql':
|
||||
logging.info('Creating Table if Does Not Exist: user_stream_trades')
|
||||
await CON.execute(text("""
|
||||
CREATE TABLE IF NOT EXISTS user_stream_trades (
|
||||
-- event_type VARCHAR(8),
|
||||
timestamp_arrival BIGINT,
|
||||
type VARCHAR(20),
|
||||
id VARCHAR(100),
|
||||
taker_order_id VARCHAR(100),
|
||||
market VARCHAR(100),
|
||||
asset_id VARCHAR(100),
|
||||
side VARCHAR(8),
|
||||
size DOUBLE,
|
||||
price DOUBLE,
|
||||
fee_rate_bps DOUBLE,
|
||||
status VARCHAR(20),
|
||||
matchtime BIGINT,
|
||||
last_update BIGINT,
|
||||
outcome VARCHAR(20),
|
||||
owner VARCHAR(100),
|
||||
trade_owner VARCHAR(100),
|
||||
maker_address VARCHAR(100),
|
||||
transaction_hash VARCHAR(100),
|
||||
bucket_index INT,
|
||||
maker_orders JSON NULL,
|
||||
trader_side VARCHAR(8),
|
||||
timestamp BIGINT
|
||||
);
|
||||
"""))
|
||||
await CON.commit()
|
||||
else:
|
||||
raise ValueError('Only MySQL engine is implemented')
|
||||
|
||||
async def insert_user_trades_table(
|
||||
params: dict,
|
||||
CON: AsyncContextManager,
|
||||
engine: str = 'mysql', # mysql | duckdb
|
||||
) -> None:
|
||||
if CON is None:
|
||||
logging.info("NO DB CONNECTION, SKIPPING Insert Statements")
|
||||
else:
|
||||
if engine == 'mysql':
|
||||
await CON.execute(text("""
|
||||
INSERT INTO user_stream_trades
|
||||
(
|
||||
timestamp_arrival,
|
||||
type,
|
||||
id,
|
||||
taker_order_id,
|
||||
market,
|
||||
asset_id,
|
||||
side,
|
||||
size,
|
||||
price,
|
||||
fee_rate_bps,
|
||||
status,
|
||||
matchtime,
|
||||
last_update,
|
||||
outcome,
|
||||
owner,
|
||||
trade_owner,
|
||||
maker_address,
|
||||
transaction_hash,
|
||||
bucket_index,
|
||||
maker_orders,
|
||||
trader_side,
|
||||
timestamp
|
||||
)
|
||||
VALUES
|
||||
(
|
||||
:timestamp_arrival,
|
||||
:type,
|
||||
:id,
|
||||
:taker_order_id,
|
||||
:market,
|
||||
:asset_id,
|
||||
:side,
|
||||
:size,
|
||||
:price,
|
||||
:fee_rate_bps,
|
||||
:status,
|
||||
:matchtime,
|
||||
:last_update,
|
||||
:outcome,
|
||||
:owner,
|
||||
:trade_owner,
|
||||
:maker_address,
|
||||
:transaction_hash,
|
||||
:bucket_index,
|
||||
:maker_orders,
|
||||
:trader_side,
|
||||
:timestamp
|
||||
)
|
||||
"""),
|
||||
parameters=params
|
||||
)
|
||||
await CON.commit()
|
||||
else:
|
||||
raise ValueError('Only MySQL engine is implemented')
|
||||
|
||||
|
||||
async def create_user_orders_table(
|
||||
CON: AsyncContextManager,
|
||||
engine: str = 'mysql', # mysql | duckdb
|
||||
) -> None:
|
||||
if CON is None:
|
||||
logging.info("NO DB CONNECTION, SKIPPING Create Statements")
|
||||
else:
|
||||
if engine == 'mysql':
|
||||
logging.info('Creating Table if Does Not Exist: user_stream_orders')
|
||||
await CON.execute(text("""
|
||||
CREATE TABLE IF NOT EXISTS user_stream_orders (
|
||||
-- event_type VARCHAR(8),
|
||||
timestamp_arrival BIGINT,
|
||||
id VARCHAR(100),
|
||||
owner VARCHAR(100),
|
||||
market VARCHAR(100),
|
||||
asset_id VARCHAR(100),
|
||||
side VARCHAR(8),
|
||||
order_owner VARCHAR(100),
|
||||
original_size DOUBLE,
|
||||
size_matched DOUBLE,
|
||||
price DOUBLE,
|
||||
associate_trades JSON NULL,
|
||||
outcome VARCHAR(20),
|
||||
type VARCHAR(20),
|
||||
created_at BIGINT,
|
||||
expiration VARCHAR(20),
|
||||
order_type VARCHAR(8),
|
||||
status VARCHAR(20),
|
||||
maker_address VARCHAR(100),
|
||||
timestamp BIGINT
|
||||
);
|
||||
"""))
|
||||
await CON.commit()
|
||||
else:
|
||||
raise ValueError('Only MySQL engine is implemented')
|
||||
|
||||
async def insert_user_orders_table(
|
||||
params: dict,
|
||||
CON: AsyncContextManager,
|
||||
engine: str = 'mysql', # mysql | duckdb
|
||||
) -> None:
|
||||
if CON is None:
|
||||
logging.info("NO DB CONNECTION, SKIPPING Insert Statements")
|
||||
else:
|
||||
if engine == 'mysql':
|
||||
await CON.execute(text("""
|
||||
INSERT INTO user_stream_orders
|
||||
(
|
||||
timestamp_arrival,
|
||||
id,
|
||||
owner,
|
||||
market,
|
||||
asset_id,
|
||||
side,
|
||||
order_owner,
|
||||
original_size,
|
||||
size_matched,
|
||||
price,
|
||||
associate_trades,
|
||||
outcome,
|
||||
type,
|
||||
created_at,
|
||||
expiration,
|
||||
order_type,
|
||||
status,
|
||||
maker_address,
|
||||
timestamp
|
||||
)
|
||||
VALUES
|
||||
(
|
||||
:timestamp_arrival,
|
||||
:id,
|
||||
:owner,
|
||||
:market,
|
||||
:asset_id,
|
||||
:side,
|
||||
:order_owner,
|
||||
:original_size,
|
||||
:size_matched,
|
||||
:price,
|
||||
:associate_trades,
|
||||
:outcome,
|
||||
:type,
|
||||
:created_at,
|
||||
:expiration,
|
||||
:order_type,
|
||||
:status,
|
||||
:maker_address,
|
||||
:timestamp
|
||||
)
|
||||
"""),
|
||||
parameters=params
|
||||
)
|
||||
await CON.commit()
|
||||
else:
|
||||
raise ValueError('Only MySQL engine is implemented')
|
||||
|
||||
### Helpers ###
|
||||
def live_orders_only(orders: list[dict]) -> list[dict]:
|
||||
return [d for d in orders if d.get('status')=='LIVE']
|
||||
|
||||
def upsert_list_of_dicts_by_id(list_of_dicts, new_dict):
|
||||
for index, item in enumerate(list_of_dicts):
|
||||
if item.get('id') == new_dict.get('id'):
|
||||
list_of_dicts[index] = new_dict
|
||||
return list_of_dicts
|
||||
|
||||
list_of_dicts.append(new_dict)
|
||||
return list_of_dicts
|
||||
|
||||
|
||||
async def polymarket_stream():
|
||||
global TARGET_PX
|
||||
global HIST_TRADES
|
||||
global LOCAL_LIVE_ORDERS
|
||||
global LOCAL_RECENT_TRADES
|
||||
|
||||
POLY_API_KEY = API_CREDS.api_key
|
||||
POLY_API_SECRET = API_CREDS.api_secret
|
||||
POLY_API_PASS = API_CREDS.api_passphrase
|
||||
|
||||
async for websocket in websockets.connect(WSS_URL):
|
||||
print(f"Connected to {WSS_URL}")
|
||||
|
||||
subscribe_msg = {
|
||||
"auth": {
|
||||
"apiKey": POLY_API_KEY,
|
||||
"secret": POLY_API_SECRET,
|
||||
"passphrase": POLY_API_PASS,
|
||||
},
|
||||
"type": "user",
|
||||
"markets": []
|
||||
}
|
||||
|
||||
await websocket.send(json.dumps(subscribe_msg))
|
||||
print("Subscribed to User Data")
|
||||
|
||||
|
||||
try:
|
||||
async for message in websocket:
|
||||
ts_arrival = round(datetime.now().timestamp()*1000)
|
||||
if isinstance(message, str):
|
||||
data = json.loads(message)
|
||||
if data == {}: # Handle empty server ping - return pong
|
||||
await websocket.send(json.dumps({}))
|
||||
print('SENT HEARTBEAT PING')
|
||||
continue
|
||||
data['timestamp_arrival'] = ts_arrival
|
||||
|
||||
event_type = data.get('event_type', None)
|
||||
match event_type:
|
||||
case 'trade':
|
||||
logging.info(f'TRADE: {data}')
|
||||
# trade_status = data.get('status')
|
||||
# match trade_status: # Raise TELEGRAM ALERT ???
|
||||
# case 'MATCHED':
|
||||
# pass
|
||||
# case 'MINED':
|
||||
# pass
|
||||
# case 'CONFIRMED':
|
||||
# pass
|
||||
# case 'RETRYING':
|
||||
# pass
|
||||
# case 'FAILED':
|
||||
# pass
|
||||
|
||||
### Convert Datatypes ###
|
||||
data['size'] = float(data['size'])
|
||||
data['price'] = float(data['price'])
|
||||
data['fee_rate_bps'] = float(data['fee_rate_bps'])
|
||||
data['matchtime'] = int(data['match_time'])
|
||||
data['last_update'] = int(data['last_update'])
|
||||
data['timestamp'] = int(data['timestamp'])
|
||||
data['maker_orders'] = json.dumps(data['maker_orders']) if data['maker_orders'] else None
|
||||
|
||||
LOCAL_RECENT_TRADES = upsert_list_of_dicts_by_id(LOCAL_RECENT_TRADES, data)
|
||||
LOOKBACK_MIN_TS_MS = ts_arrival-LOCAL_RECENT_TRADES_LOOKBACK_SEC*1000
|
||||
LOCAL_RECENT_TRADES = [t for t in LOCAL_RECENT_TRADES if t.get('timestamp_arrival', 0) >= LOOKBACK_MIN_TS_MS]
|
||||
|
||||
print("---------------------")
|
||||
print(LOCAL_RECENT_TRADES)
|
||||
print("---------------------")
|
||||
|
||||
VAL_KEY_OBJ = json.dumps(LOCAL_RECENT_TRADES)
|
||||
# VAL_KEY.publish(VK_CHANNEL, VAL_KEY_OBJ)
|
||||
VAL_KEY.set(VK_RECENT_TRADES, VAL_KEY_OBJ)
|
||||
|
||||
logging.info(f'User Trade Update: {data}')
|
||||
|
||||
### Insert into DB ###
|
||||
await insert_user_trades_table(
|
||||
params=data,
|
||||
CON=CON
|
||||
)
|
||||
case 'order':
|
||||
logging.info(f'ORDER: {data}')
|
||||
### Convert Datatypes ###
|
||||
data['original_size'] = float(data['original_size'])
|
||||
data['size_matched'] = float(data['size_matched'])
|
||||
data['price'] = float(data['price'])
|
||||
data['associate_trades'] = json.dumps(data['associate_trades']) if data['associate_trades'] else None
|
||||
data['created_at'] = int(data['created_at'])
|
||||
data['timestamp'] = int(data['timestamp'])
|
||||
|
||||
### Match on Status - Pass Live orders to Valkey for Algo Engine ###
|
||||
order_status = data.get('status')
|
||||
match order_status:
|
||||
case 'live':
|
||||
LOCAL_LIVE_ORDERS = upsert_list_of_dicts_by_id(LOCAL_LIVE_ORDERS, data)
|
||||
LOCAL_LIVE_ORDERS = live_orders_only(LOCAL_LIVE_ORDERS)
|
||||
VAL_KEY_OBJ = json.dumps(LOCAL_LIVE_ORDERS)
|
||||
# VAL_KEY.publish(VK_CHANNEL, VAL_KEY_OBJ)
|
||||
VAL_KEY.set(VK_LIVE_ORDERS, VAL_KEY_OBJ)
|
||||
logging.info(f'Order(s) RESTING: {data}')
|
||||
case 'matched':
|
||||
logging.info(f'Order(s) MATCHED: {data}')
|
||||
case 'delayed':
|
||||
raise ValueError(f'Order Status of "delayed" which is not expected for non-sports orders: {data}')
|
||||
case 'unmatched':
|
||||
raise ValueError(f'Order Status of "unmatched" which is not expected for non-sports orders: {data}')
|
||||
|
||||
### Insert into DB ###
|
||||
await insert_user_orders_table(
|
||||
params=data,
|
||||
CON=CON,
|
||||
)
|
||||
else:
|
||||
raise ValueError(f'Type: {type(data)} not expected: {message}')
|
||||
|
||||
except websockets.ConnectionClosed as e:
|
||||
print(f"Connection closed by server. Exception: {e}")
|
||||
|
||||
async def main():
|
||||
global VAL_KEY
|
||||
global CON
|
||||
global API_CREDS
|
||||
|
||||
private_key = os.getenv("PRIVATE_KEY")
|
||||
host = "https://clob.polymarket.com"
|
||||
chain_id = 137 # Polygon mainnet
|
||||
|
||||
temp_client = ClobClient(host, key=private_key, chain_id=chain_id)
|
||||
API_CREDS = temp_client.create_or_derive_api_creds()
|
||||
|
||||
if USE_VK:
|
||||
VAL_KEY = valkey.Valkey(host='localhost', port=6379, db=0)
|
||||
# published_count = VAL_KEY.publish(VK_CHANNEL,f"Hola, starting to publish to valkey: {VK_CHANNEL} @ {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
# logging.info(f"Valkey message published to {published_count} subscribers of {VK_CHANNEL}")
|
||||
else:
|
||||
VAL_KEY = None
|
||||
logging.warning("VALKEY NOT BEING USED, NO DATA WILL BE PUBLISHED")
|
||||
|
||||
if USE_DB:
|
||||
engine = create_async_engine('mysql+asyncmy://root:pwd@localhost/polymarket')
|
||||
async with engine.connect() as CON:
|
||||
await create_user_trades_table(CON=CON)
|
||||
await create_user_orders_table(CON=CON)
|
||||
await polymarket_stream()
|
||||
else:
|
||||
CON = None
|
||||
logging.warning("DATABASE NOT BEING USED, NO DATA WILL BE RECORDED")
|
||||
await polymarket_stream()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
START_TIME = round(datetime.now().timestamp()*1000)
|
||||
|
||||
logging.info(f'Log FilePath: {LOG_FILEPATH}')
|
||||
|
||||
logging.basicConfig(
|
||||
force=True,
|
||||
filename=LOG_FILEPATH,
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(levelname)s - %(message)s',
|
||||
filemode='w'
|
||||
)
|
||||
logging.info(f"STARTED: {START_TIME}")
|
||||
|
||||
try:
|
||||
asyncio.run(main())
|
||||
except KeyboardInterrupt as e:
|
||||
print(f"Stream stopped: {e}")
|
||||
19
ws_user/Dockerfile
Normal file
19
ws_user/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
FROM python:3.13-slim
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y build-essential
|
||||
|
||||
RUN gcc --version
|
||||
RUN rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY requirements.txt .
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
# Finally, run gunicorn.
|
||||
CMD [ "python", "ws_user.py"]
|
||||
# CMD [ "gunicorn", "--workers=5", "--threads=1", "-b 0.0.0.0:8000", "app:server"]
|
||||
Reference in New Issue
Block a user