Skip to main content
Version: Upcoming

Common MLink Queries

ExampleDescription
AAPL FirehoseStreams the NBBO as filtered for AAPL only.
MLinkStream SurfacesStreams surface information from LiveImpliedQuoteAdjust to display best bid and ask.
Child OrdersDisplays working child orders (if any exist.)
MLinkStream Symbol RiskStreams risk information on symbols in their account (if any exist).
Parent Limit OrderDetails a three stage process to send orders with an AUX limit.

AAPL Firehose

import asyncio
import json
import time

import websockets
import nest_asyncio
import threading
import datetime
from IPython.display import display
import pandas as pd
from pandas import json_normalize
nest_asyncio.apply()

uriJson = "wss://mlink-live.nms.venus.spiderrockconnect.com/mlink/json"
authentication_key = ""

stream_df = pd.DataFrame(columns=["ticker", "bidPrice1", "askPrice1"]) # Global DataFrame for accumulating messages

async def recv_msg(websocket):
global stream_df

buffer = await websocket.recv()
result = json.loads(buffer)

if isinstance(result, dict):
if result.get("header", {}).get("mTyp") == "StockBookQuote":
msg = result.get("message", {})
pkey = msg.get("pkey", {}).get("ticker", {})
ticker = f"{pkey.get('tk')}-{pkey.get('ts')}-{pkey.get('at')}"
record = {
"ticker": ticker,
"bidPrice1": msg.get("bidPrice1"),
"askPrice1": msg.get("askPrice1")
}

# Only append if all values are present
if all(record.values()):
stream_df = pd.concat([stream_df, pd.DataFrame([record])], ignore_index=True)
#print(stream_df.tail(1)) # Show latest
display(stream_df.tail(1))

return True

async def query_mlink(authentication_key):
retry = True
while retry:
try:
async with websockets.connect(uriJson,
additional_headers={"Authorization": f"Bearer {authentication_key}"},
ping_timeout=None) as websocket:
msg = {
"header": {
"mTyp": "MLinkStream"
},
"message": {
"queryLabel": "ExampleStockNbbo",
"activeLatency": 1, #you can stream AAPL with minimum latency
"msgName": "StockBookQuote", #the message you wish to stream
"view":"ticker|bidprice1|askprice1",
"where":"ticker:eq:AAPL-NMS-EQT" #can also do ticker.tk:eq:AAPL & ticker.at:eq:EQT & ticker.ts:eq:NMS
}
}
t = time.time_ns()
tstr = '.'.join([time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(t / 1000000000)), "%06d" % ((t / 1000) % 1000000)])
msg['header']['sTim'] = tstr
msg['header']['encT'] = tstr
smsg = json.dumps(msg)
await websocket.send(smsg)
notDone = True
while notDone:
notDone = await recv_msg(websocket)

except asyncio.exceptions.TimeoutError:
print("timeout occurred, retrying...")


if __name__ == "__main__":
asyncio.run(query_mlink(authentication_key))

MLinkStream Surfaces

import asyncio
import json
import time
import websockets
import nest_asyncio
import pandas as pd
from pandas import json_normalize
from IPython.display import display

nest_asyncio.apply()

uriJson = "wss://mlink-live.nms.venus.spiderrockconnect.com/mlink/json"
authentication_key = ""

async def recv_msg(websocket):
buffer = await websocket.recv()
result = json.loads(buffer)
print(result)
return True

stream_df = pd.DataFrame(columns=["ticker","Call/Put" ,"strike","opt_exp","delta" ,"uprc", "obid", "oask","obiv","oaiv","svol","timestamp"])

async def recv_msg(websocket):
global stream_df

buffer = await websocket.recv()
result = json.loads(buffer)

if isinstance(result, dict):
if result.get("header", {}).get("mTyp") == "LiveImpliedQuoteAdj":
msg = result.get("message", {})

# Build ticker from message.ticker
#t = msg.get("ticker", {})
#ticker = f"{t.get('tk')}-{t.get('ts')}-{t.get('at')}"
ticker = msg.get("pkey", {}).get("okey", {}).get("tk")

# Extract strike from pkey.okey.xx
strike = msg.get("pkey", {}).get("okey", {}).get("xx")

# Extract Call/Put from pkey.okey.xx
callput = msg.get("pkey", {}).get("okey", {}).get("cp")

# Extract opt_exp from pkey.okey.xx
opt_exp = msg.get("pkey", {}).get("okey", {}).get("dt")

record = {
"ticker": ticker,
"strike": strike,
"Call/Put": callput,
"opt_exp": opt_exp,
"uprc": msg.get("uprc"),
"obid": msg.get("obid"),
"oask": msg.get("oask"),
"obiv": msg.get("obiv"),
"oaiv": msg.get("oaiv"),
"svol": msg.get("svol"),
"delta": msg.get("de"),
"timestamp": msg.get("timestamp")
}

#if all(record.values()):
#stream_df = pd.concat([stream_df, pd.DataFrame([record])], ignore_index=True)
#print(stream_df.tail(5))
display(pd.DataFrame([record]))

return True

async def send_signal(websocket):
while True:
await asyncio.sleep(20)
signal = {
"header": {
"mTyp": "MLinkSignalReady"
},
"message": {
# "sesionID": "",
# "signalID": "",
"readyScan": "FullScan"
}
}
t = time.time_ns()
tstr = '.'.join([
time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(t / 1_000_000_000)),
"%06d" % ((t / 1_000) % 1_000_000)
])
signal['header']['sTim'] = tstr
signal['header']['encT'] = tstr
smsg = json.dumps(signal)
await websocket.send(smsg)
await asyncio.sleep(0)

async def query_mlink(authentication_key):
retry = True
while retry:
try:
async with websockets.connect(
uriJson,
additional_headers={"Authorization": f"Bearer {authentication_key}"},
ping_timeout=None
) as websocket:

msg = {
"header": {
"mTyp": "MLinkStream"
},
"message": {
"queryLabel": "ExampleStockNbbo",
"activeLatency": 5000,
"msgName": "LiveImpliedQuoteAdj",
#"view":"ticker|xx|uprc|obid|oask|svol|de|ga|th|ve",
#"where": "ticker:eq:SPY-NMS-EQT-2025-05-16%26de:cb:0.40$0.60%26de:cb:-0.40$-0.60"
#"where": "okey:eq:SPY-NMS-EQT-2025-05-16 & ((de:ge:-0.60 & de:le:-0.40) | (de:ge:0.40 & de:le:0.60))"
#line above does the same thing as this line below
#"where": "okey:eq:SPY-NMS-EQT-2025-05-21 & ((de:cb:-0.60$-0.40) | (de:cb:0.40$0.60))"
"where": "ticker:eq:SPY-NMS-EQT & ((de:cb:-0.60$-0.40) | (de:cb:0.40$0.60)) & years:lt:.006"
}
}

t = time.time_ns()
tstr = '.'.join([
time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(t / 1_000_000_000)),
"%06d" % ((t / 1_000) % 1_000_000)
])
msg['header']['sTim'] = tstr
msg['header']['encT'] = tstr

await websocket.send(json.dumps(msg))
asyncio.create_task(send_signal(websocket))

notDone = True
while notDone:
notDone = await recv_msg(websocket)

except asyncio.exceptions.TimeoutError:
print("Timeout occurred, retrying...")

if __name__ == "__main__":
asyncio.run(query_mlink(authentication_key))

Child Orders

import asyncio
import json
import time
import websockets
import nest_asyncio
import pandas as pd
from IPython.display import display

nest_asyncio.apply()

uriJson = "wss://mlink-live.nms.venus.spiderrockconnect.com/mlink/json"
authentication_key = ""

# Initialize DataFrame
#account,username ,ticker, dt, xx, cp, orderside,limitPrice, nbbobid, nbboask, ordersize, leavesQuantity
stream_df = pd.DataFrame(columns=["accnt","userName","ticker","exp","strike","call/put","orderSide","child_px","child_sz","child_ex"])

async def recv_msg(websocket):
global stream_df

buffer = await websocket.recv()
result = json.loads(buffer)

if isinstance(result, dict):
if result.get("header", {}).get("mTyp") == "SpdrParentBrkrState":
msg = result.get("message", {})

account = msg.get("pkey", {}).get("accnt")
user_name = msg.get("userName")
ticker = msg.get("pkey", {}).get("secKey", {}).get("tk")
exp_date = msg.get("pkey", {}).get("secKey", {}).get("dt")
strike = msg.get("pkey", {}).get("secKey", {}).get("xx")
call_put = msg.get("pkey", {}).get("secKey", {}).get("cp")
order_side = msg.get("pkey", {}).get("orderSide")
child_price = msg.get("cpx1")
child_size = msg.get("csz1")
child_ex = msg.get("cex1")
#orderSize = msg.get("orderSize")
#leavesQuantity = msg.get("leavesQuantity")


record = {
"accnt": account,
"userName": user_name,
"ticker": ticker,
"exp": exp_date,
"strike": strike,
"call/put": call_put,
"orderSide": order_side,
"child_px": child_price,
"child_sz": child_size,
"child_ex": child_ex
#"ordersize": orderSize,
#"leavesQuantity": leavesQuantity

}

#if all(record.values()):
# stream_df = pd.concat([stream_df, pd.DataFrame([record])], ignore_index=True)
# print(stream_df.tail(5))
show_df = pd.DataFrame([record])
#print(pd.DataFrame([record]))
display(show_df)
return True

async def send_signal(websocket):
while True:
await asyncio.sleep(20)
signal = {
"header": {
"mTyp": "MLinkSignalReady"
},
"message": {
"readyScan": "FullScan"
}
}
t = time.time_ns()
tstr = '.'.join([
time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(t / 1_000_000_000)),
"%06d" % ((t / 1_000) % 1_000_000)
])
signal['header']['sTim'] = tstr
signal['header']['encT'] = tstr
await websocket.send(json.dumps(signal))
await asyncio.sleep(0)

async def query_mlink(authentication_key):
retry = True
while retry:
try:
async with websockets.connect(
uriJson,
additional_headers={"Authorization": f"Bearer {authentication_key}"},
ping_timeout=None
) as websocket:

msg = {
"header": {
"mTyp": "MLinkStream"
},
"message": {
"queryLabel": "ExampleSymbolRiskSummary",
"activeLatency": 900,
"msgName": "SpdrParentBrkrState",
#"view": "userName|cpx1|cex1|csz1",
"where": "accnt:eq:T.MTL.VEN & spdrBrokerStatus:eq:ACTIVE"
}
}

t = time.time_ns()
tstr = '.'.join([
time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(t / 1_000_000_000)),
"%06d" % ((t / 1_000) % 1_000_000)
])
msg['header']['sTim'] = tstr
msg['header']['encT'] = tstr

await websocket.send(json.dumps(msg))
asyncio.create_task(send_signal(websocket))

while True:
await recv_msg(websocket)

except asyncio.exceptions.TimeoutError:
print("Timeout occurred, retrying...")

if __name__ == "__main__":
asyncio.run(query_mlink(authentication_key))

MLinkStream Symbol Risk

import asyncio
import json
import time
import websockets
import nest_asyncio
import pandas as pd
from IPython.display import display

nest_asyncio.apply()

uriJson = "wss://mlink-live.nms.venus.spiderrockconnect.com/mlink/json"
authentication_key = ""

# Initialize DataFrame
stream_df = pd.DataFrame(columns=["ticker", "tradeDate", "VaRsu50", "VaRsd50"])

async def recv_msg(websocket):
global stream_df

buffer = await websocket.recv()
result = json.loads(buffer)

if isinstance(result, dict):
if result.get("header", {}).get("mTyp") == "SymbolRiskSummaryV5":
msg = result.get("message", {})

#ticker = msg.get("ticker")
ticker = msg.get("pkey", {}).get("ticker", {}).get("tk")
#trade_date = msg.get("tradeDate")
trade_date = msg.get("pkey", {}).get("tradeDate")
var_su50 = msg.get("VaRsu50")
var_sd50 = msg.get("VaRsd50")

record = {
"ticker": ticker,
"tradeDate": trade_date,
"VaRsu50": var_su50,
"VaRsd50": var_sd50
}

#if all(record.values()):
# stream_df = pd.concat([stream_df, pd.DataFrame([record])], ignore_index=True)
# print(stream_df.tail(5))
#print(pd.DataFrame([record]))
display(pd.DataFrame([record]))
return True

async def send_signal(websocket):
while True:
await asyncio.sleep(20)
signal = {
"header": {
"mTyp": "MLinkSignalReady"
},
"message": {
"readyScan": "FullScan"
}
}
t = time.time_ns()
tstr = '.'.join([
time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(t / 1_000_000_000)),
"%06d" % ((t / 1_000) % 1_000_000)
])
signal['header']['sTim'] = tstr
signal['header']['encT'] = tstr
await websocket.send(json.dumps(signal))
await asyncio.sleep(0)

async def query_mlink(authentication_key):
retry = True
while retry:
try:
async with websockets.connect(
uriJson,
additional_headers={"Authorization": f"Bearer {authentication_key}"},
ping_timeout=None
) as websocket:

msg = {
"header": {
"mTyp": "MLinkStream"
},
"message": {
"queryLabel": "ExampleSymbolRiskSummary",
"activeLatency": 2500,
"msgName": "SymbolRiskSummaryV5",
#"view": "ticker|accnt|tradeDate|VaRsu50|VaRsd50",
#"where": "ticker:eq:AAPL-NMS-EQT & risksession:eq:regular & tradeDate:eq:2025-05-20"
"where": "accnt:eq:T.TJ.VEN3 & tradeDate:eq:2025-06-05"
}
}

t = time.time_ns()
tstr = '.'.join([
time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(t / 1_000_000_000)),
"%06d" % ((t / 1_000) % 1_000_000)
])
msg['header']['sTim'] = tstr
msg['header']['encT'] = tstr

await websocket.send(json.dumps(msg))
asyncio.create_task(send_signal(websocket))

while True:
await recv_msg(websocket)

except asyncio.exceptions.TimeoutError:
print("Timeout occurred, retrying...")

if __name__ == "__main__":
asyncio.run(query_mlink(authentication_key))

Parent Limit Order

Step One: Send order to parentOrder table.

Initially, this will do nothing as limitType is equal to AUX.

import requests
import json
import time
import copy

# REST endpoint and auth token
rest_url = "https://mlink-live.nms.venus.spiderrockconnect.com/rest/json"
authentication_key = ""

# Base order payload template
base_order_payload = {
"header": {
"mTyp": "SpdrParentOrder"
},
"message": {
"secKey": {
"at": "EQT",
"ts": "NMS",
"tk": "AAPL",
"dt": "2025-07-18",
"xx": 200,
"cp": "Put"
},
"positionType": "Auto",
"parentShape": "Single",
"secType": "Option",
"accnt": "T.MTL.VEN", # <-- insert your account here
"orderSide": "Sell",
"clientFirm": "SRCORE",
"spdrActionType": "Add",
"startType": 0,
"orderSize": 50,
"orderActiveSize": 7,
"marketSession": "RegMkt",
"parentOrderHandling": "PostOnly",
"parentBalanceHandling": "PostLimit",
"orderLimitType": "Aux",
"takeLimitClass": "SurfProb",
"makeLimitClass": "SurfProb",
"takeProbability": 0.1,
"makeProbability": 0.3,
"takeAlphaType": "Static",
"makeAlphaType": "Static",
"takeReachRule": "None",
"publicSize": "FullSizeR",
"numMakeExchanges": 1,
"autohedge": "None",
"userName": "matt.leli",
"maxGrpDayDDeltaLn": 100000.0,
"maxGrpDayDDeltaSh": 100000.0,
"maxGrpDayVegaLn": 100000.0,
"maxGrpDayVegaSh": 100000.0,
"orderDuration": -1,
"maxExposureSize": -1,
"spdrStageType": "None",
"progressExposeTime": 0,
"userData1": "Default",
"userData2": "EXAMPLE_STRATEGY",
"orderVolLimit": 0.24
}
}

def generate_distinct_ids():
"""Generate 10 distinct grouping codes and risk group IDs"""
grouping_codes = []
risk_group_ids = []

for i in range(10):
# Generate distinct grouping codes (incrementing the last segment)
grouping_code = f"2137-1312-3867-{5309 + i}"
grouping_codes.append(grouping_code)

# Generate distinct risk group IDs (incrementing the last segment)
risk_group_id = f"4124-3534-5867-{5309 + i}"
risk_group_ids.append(risk_group_id)

return grouping_codes, risk_group_ids

def create_order_payload(grouping_code, risk_group_id):
"""Create an order payload with specific grouping code and risk group ID"""
order_payload = base_order_payload.copy()
order_payload["message"] = base_order_payload["message"].copy()
order_payload["message"]["groupingCode"] = grouping_code
order_payload["message"]["riskGroupId"] = risk_group_id
return order_payload

def submit_order(order_payload, order_number):
"""Submit a single order"""
try:
headers = {
"Authorization": f"Bearer {authentication_key}",
"Content-Type": "application/json"
}

# Use postmsgs command with required parameters only
params = {
"cmd": "postmsgs",
"postaction": "I" # (I)nsert - required parameter
}

response = requests.post(rest_url, headers=headers, params=params, json=[order_payload])
response.raise_for_status()
print(f"Order {order_number} - Gateway response:", response.json())
return True

except requests.RequestException as e:
print(f"Order {order_number} - Submission failed:", e)
if hasattr(e, 'response') and e.response is not None:
print(f"Order {order_number} - Response status:", e.response.status_code)
print(f"Order {order_number} - Response text:", e.response.text)
return False

def submit_multiple_orders():
"""Submit 10 orders with distinct grouping codes and risk group IDs"""
grouping_codes, risk_group_ids = generate_distinct_ids()

successful_orders = 0
failed_orders = 0
orders_in_current_second = 0
second_start_time = time.time()

for i in range(10):
# Rate limiting: max 4 orders per second
current_time = time.time()
time_elapsed = current_time - second_start_time

if time_elapsed >= 3.0:
# Reset counter for new second
orders_in_current_second = 0
second_start_time = current_time
elif orders_in_current_second >= 2:
# Wait for next second if we've hit the limit
sleep_time = 1.0 - time_elapsed
print(f"Rate limit reached, waiting {sleep_time:.2f} seconds...")
time.sleep(sleep_time)
orders_in_current_second = 0
second_start_time = time.time()

print(f"\nSubmitting order {i + 1}/10...")
print(f"Grouping Code: {grouping_codes[i]}")
print(f"Risk Group ID: {risk_group_ids[i]}")

order_payload = create_order_payload(grouping_codes[i], risk_group_ids[i])

if submit_order(order_payload, i + 1):
successful_orders += 1
else:
failed_orders += 1

orders_in_current_second += 1

print(f"\n=== Summary ===")
print(f"Successful orders: {successful_orders}")
print(f"Failed orders: {failed_orders}")

if __name__ == "__main__":
submit_multiple_orders()

Step Two: Add order to parentLimit table for further updates.

*Note: When first sending to spdrParentLimit, the orderLimitType and orderActiveSize is changed.*

# Base order payload for Replace
base_order_payload = {
"header": {
"mTyp": "SpdrParentLimit"
},
"message": {
"secKey": {
"at": "EQT",
"ts": "NMS",
"tk": "AAPL",
"dt": "2025-07-18",
"xx": 200,
"cp": "Put"
},
"positionType": "Auto",
"parentShape": "Single",
"secType": "Option",
"accnt": "T.MTL.VEN",
"orderSide": "Sell",
"groupingCode": "placeholder", # will be replaced
"clientFirm": "SRCORE",
"spdrActionType": "Replace",
"orderLimitType": "Vol",
"takeLimitClass": "SurfProb",
"makeLimitClass": "SurfProb",
"orderActiveSize": 27,
"orderVolLimit": 0.275,
"takeAlphaType": "Static",
"makeAlphaType": "Static",
"takeProbability": 0.1,
"makeProbability": 0.3,
"takeSurfVolOffset": 0.1,
"makeSurfVolOffset": 0.1,
"maxGrpDayDDeltaLn": 100000.0,
"maxGrpDayDDeltaSh": 100000.0,
"maxGrpDayVegaLn": 100000.0,
"maxGrpDayVegaSh": 100000.0,
"addCumFillQuantity": "No",
"spdrSource": "MLink"
}
}

def get_matching_grouping_codes():
"""Return groupingCodes used in the original insert orders"""
return [f"2137-1312-3867-{5309 + i}" for i in range(10)]

def create_order_payload(grouping_code):
"""Create a payload with updated grouping code"""
payload = copy.deepcopy(base_order_payload)
payload["message"]["groupingCode"] = grouping_code
return payload

def submit_order(payload, order_number):
"""Submit a single replace order"""
try:
headers = {
"Authorization": f"Bearer {authentication_key}",
"Content-Type": "application/json"
}

params = {
"cmd": "postmsgs",
"postaction": "I" # Insert action still used for Replace
}

response = requests.post(rest_url, headers=headers, params=params, json=[payload])
response.raise_for_status()
print(f"Order {order_number} - Gateway response:", response.json())
return True

except requests.RequestException as e:
print(f"Order {order_number} - Submission failed:", e)
if hasattr(e, 'response') and e.response is not None:
print("Response status:", e.response.status_code)
print("Response text:", e.response.text)
return False

def submit_replacement_orders():
"""Submit 10 SpdrParentLimit Replace orders"""
grouping_codes = get_matching_grouping_codes()
success_count = 0
failure_count = 0

for i, code in enumerate(grouping_codes, start=1):
print(f"\nSubmitting replacement order {i} with groupingCode: {code}")
payload = create_order_payload(code)
if submit_order(payload, i):
success_count += 1
else:
failure_count += 1

print("\n=== Summary ===")
print(f"Successful replacements: {success_count}")
print(f"Failed replacements: {failure_count}")

if __name__ == "__main__":
submit_replacement_orders()

Step Three: Update order params.

# Base order payload for Replace
base_order_payload = {
"header": {
"mTyp": "SpdrParentLimit"
},
"message": {
"secKey": {
"at": "EQT",
"ts": "NMS",
"tk": "AAPL",
"dt": "2025-07-18",
"xx": 200,
"cp": "Put"
},
"positionType": "Auto",
"parentShape": "Single",
"secType": "Option",
"accnt": "T.MTL.VEN",
"orderSide": "Sell",
"groupingCode": "placeholder", # will be replaced
"clientFirm": "SRCORE",
"spdrActionType": "Replace",
"orderLimitType": "Vol",
"takeLimitClass": "SurfProb",
"makeLimitClass": "SurfProb",
"orderActiveSize": 44,
"orderVolLimit": 0.25,
"takeAlphaType": "Static",
"makeAlphaType": "Static",
"takeProbability": 0.1,
"makeProbability": 0.3,
"takeSurfVolOffset": 0.1,
"makeSurfVolOffset": 0.1,
"maxGrpDayDDeltaLn": 100000.0,
"maxGrpDayDDeltaSh": 100000.0,
"maxGrpDayVegaLn": 100000.0,
"maxGrpDayVegaSh": 100000.0,
"addCumFillQuantity": "No",
"spdrSource": "MLink"
}
}

def get_matching_grouping_codes():
"""Return groupingCodes used in the original insert orders"""
return [f"2137-1312-3867-{5309 + i}" for i in range(10)]

def create_order_payload(grouping_code):
"""Create a payload with updated grouping code"""
payload = copy.deepcopy(base_order_payload)
payload["message"]["groupingCode"] = grouping_code
return payload

def submit_order(payload, order_number):
"""Submit a single replace order"""
try:
headers = {
"Authorization": f"Bearer {authentication_key}",
"Content-Type": "application/json"
}

params = {
"cmd": "postmsgs",
"postaction": "U" # Insert action still used for Replace
}

response = requests.post(rest_url, headers=headers, params=params, json=[payload])
response.raise_for_status()
print(f"Order {order_number} - Gateway response:", response.json())
return True

except requests.RequestException as e:
print(f"Order {order_number} - Submission failed:", e)
if hasattr(e, 'response') and e.response is not None:
print("Response status:", e.response.status_code)
print("Response text:", e.response.text)
return False

def submit_replacement_orders():
"""Submit 10 SpdrParentLimit Replace orders"""
grouping_codes = get_matching_grouping_codes()
success_count = 0
failure_count = 0

for i, code in enumerate(grouping_codes, start=1):
print(f"\nSubmitting replacement order {i} with groupingCode: {code}")
payload = create_order_payload(code)
if submit_order(payload, i):
success_count += 1
else:
failure_count += 1

print("\n=== Summary ===")
print(f"Successful replacements: {success_count}")
print(f"Failed replacements: {failure_count}")

if __name__ == "__main__":
submit_replacement_orders()