Fix #14: Refactor print_browse/send_to_telegram into single pipeline #22
@@ -45,49 +45,65 @@ def fetch_page(q, page=1, max_retries=MAX_RETRIES, initial_delay=INITIAL_RETRY_D
|
||||
|
||||
delay = initial_delay
|
||||
for attempt in range(max_retries):
|
||||
if attempt > 0:
|
||||
time.sleep(delay)
|
||||
r = subprocess.run(
|
||||
["curl", "-s", url, "--max-time", "10", "-H", "User-Agent: curl/7.88.1"],
|
||||
capture_output=True
|
||||
)
|
||||
|
||||
if r.returncode == 0 and len(r.stdout) > 0:
|
||||
try:
|
||||
return json.loads(r.stdout.decode('utf-8'))
|
||||
except json.JSONDecodeError:
|
||||
if attempt < max_retries - 1:
|
||||
delay *= 2 # Exponential backoff
|
||||
continue
|
||||
return None
|
||||
else:
|
||||
# Rate limit or other error - exponential backoff
|
||||
req = Request(url, headers={"User-Agent": "Mozilla/5.0"})
|
||||
with urlopen(req, timeout=10) as r:
|
||||
return json.loads(r.read())
|
||||
except Exception:
|
||||
if attempt < max_retries - 1:
|
||||
delay *= 2
|
||||
continue
|
||||
return None
|
||||
return None
|
||||
|
||||
def fetch_all_pages(q, max_pages=100):
|
||||
def fetch_all_pages(q, matches_max=None, non_matches_max=None):
|
||||
"""
|
||||
Fetch ALL pages until pagination ends.
|
||||
max_pages is a safety cap to prevent infinite loops.
|
||||
Fetch pages until pagination ends, or until quotas are satisfied.
|
||||
|
||||
Args:
|
||||
q: search query
|
||||
matches_max: stop early once we have this many match events (None = no limit)
|
||||
non_matches_max: stop early once we have this many non-match events (None = no limit)
|
||||
|
||||
Returns:
|
||||
{"events": [...], "total_raw": N, "partial": bool}
|
||||
"""
|
||||
all_events = []
|
||||
total_raw = 0
|
||||
for page in range(1, max_pages + 1):
|
||||
time.sleep(0.2) # small delay between pages (API rate limit is generous)
|
||||
match_count = 0
|
||||
non_match_count = 0
|
||||
page = 0
|
||||
while True:
|
||||
page += 1
|
||||
time.sleep(0.2)
|
||||
data = fetch_page(q, page)
|
||||
if data is None:
|
||||
break
|
||||
events = data.get("events", [])
|
||||
total_raw = data.get("pagination", {}).get("totalResults", 0)
|
||||
all_events.extend(events)
|
||||
# Stop when we get 0 events (no more pages),
|
||||
# OR when we've fetched >= total results
|
||||
|
||||
# Count matches/non-matches in this page
|
||||
for e in events:
|
||||
if is_match_market(e):
|
||||
match_count += 1
|
||||
else:
|
||||
non_match_count += 1
|
||||
|
||||
# Stop if we got what we wanted (only when caps are set)
|
||||
if matches_max is not None and non_matches_max is not None:
|
||||
if match_count >= matches_max and non_match_count >= non_matches_max:
|
||||
break
|
||||
|
||||
# Stop when we get 0 events (no more pages)
|
||||
if len(events) == 0:
|
||||
break
|
||||
# Stop when we've fetched all known results
|
||||
if len(all_events) >= total_raw:
|
||||
break
|
||||
|
||||
partial = (total_raw > 0 and len(all_events) < total_raw)
|
||||
return {"events": all_events, "total_raw": total_raw, "partial": partial}
|
||||
|
||||
@@ -321,24 +337,214 @@ def sort_events(events):
|
||||
# BROWSE
|
||||
# ============================================================
|
||||
|
||||
def browse_events(q, matches_max=10, non_matches_max=10, tradeable_only=True):
|
||||
result = fetch_all_pages(q)
|
||||
def browse_events(q, matches_max=10, non_matches_max=10, tradeable_only=True, sort_by=None):
|
||||
"""
|
||||
Browse Polymarket events.
|
||||
|
||||
Args:
|
||||
q: search query
|
||||
matches_max: max number of match markets to return
|
||||
non_matches_max: max number of non-match markets to return
|
||||
tradeable_only: filter to tradeable events only
|
||||
sort_by: None (fast, API order) or "volume" (full fetch, sort by volume desc)
|
||||
"""
|
||||
# Pass quotas to fetch_all_pages for early-exit optimization.
|
||||
# Only use early-exit when sort_by is None (no client-side sort needed).
|
||||
use_early_exit = (sort_by is None)
|
||||
fetch_matches_max = matches_max if use_early_exit else None
|
||||
fetch_non_matches_max = non_matches_max if use_early_exit else None
|
||||
|
||||
result = fetch_all_pages(q, matches_max=fetch_matches_max, non_matches_max=fetch_non_matches_max)
|
||||
events = result["events"]
|
||||
match_events, non_match_events = filter_events(events, tradeable_only)
|
||||
sorted_match = sort_events(match_events)
|
||||
|
||||
# Sort if requested; otherwise preserve API order
|
||||
if sort_by == "volume":
|
||||
match_events = sort_events(match_events)
|
||||
non_match_events = sort_events(non_match_events)
|
||||
|
||||
return {
|
||||
"query": q,
|
||||
"total_raw": result["total_raw"],
|
||||
"total_fetched": len(events),
|
||||
"total_match": len(match_events),
|
||||
"total_non_match": len(non_match_events),
|
||||
"match_events": sorted_match[:matches_max],
|
||||
"match_events": match_events[:matches_max],
|
||||
"non_match_events": non_match_events[:non_matches_max],
|
||||
"partial": result.get("partial", False),
|
||||
}
|
||||
|
||||
# ============================================================
|
||||
# FORMAT
|
||||
# FORMAT — EVENT
|
||||
# ============================================================
|
||||
|
||||
def format_match_event(e):
|
||||
"""
|
||||
Format a match event into a canonical dict for rendering.
|
||||
All computing done here; renderers just template.
|
||||
|
||||
Returns:
|
||||
{
|
||||
"title": str, # raw title
|
||||
"title_clean": str, # "Team A vs Team B"
|
||||
"tournament": str, # "Tournament Name" or ""
|
||||
"url": str,
|
||||
"time_status": str, # "LIVE", "In 6h", "12h ago"
|
||||
"time_urgency": int, # 0-3
|
||||
"abs_time": str, # "Mar 25, 19:00 WIB"
|
||||
"team_a": str,
|
||||
"team_b": str,
|
||||
"odds_a": str, # "55c"
|
||||
"odds_b": str,
|
||||
"vol": int,
|
||||
}
|
||||
"""
|
||||
ml = get_ml_market(e)
|
||||
outcomes = json.loads(ml.get("outcomes", "[]")) if ml else []
|
||||
prices = json.loads(ml.get("outcomePrices", "[]")) if ml else []
|
||||
td = _get_time_data(e)
|
||||
title = e.get("title", "")
|
||||
|
||||
team_a = outcomes[0] if len(outcomes) > 0 else "?"
|
||||
team_b = outcomes[1] if len(outcomes) > 1 else "?"
|
||||
odds_a = format_odds(float(prices[0])) if len(prices) > 0 else "?"
|
||||
odds_b = format_odds(float(prices[1])) if len(prices) > 1 else "?"
|
||||
|
||||
if " - " in title:
|
||||
title_clean = title.split(" - ")[0].strip()
|
||||
else:
|
||||
title_clean = title
|
||||
|
||||
tournament = get_tournament(title)
|
||||
|
||||
return {
|
||||
"title": title,
|
||||
"title_clean": title_clean,
|
||||
"tournament": tournament,
|
||||
"url": get_event_url(e),
|
||||
"time_status": td["time_status"],
|
||||
"time_urgency": td["time_urgency"],
|
||||
"abs_time": td["abs_time"],
|
||||
"team_a": team_a,
|
||||
"team_b": team_b,
|
||||
"odds_a": odds_a,
|
||||
"odds_b": odds_b,
|
||||
"vol": get_ml_volume(e),
|
||||
}
|
||||
|
||||
|
||||
def format_non_match_event(e):
|
||||
"""
|
||||
Format a non-match event into a canonical dict for rendering.
|
||||
|
||||
Returns:
|
||||
{
|
||||
"title": str,
|
||||
"url": str,
|
||||
"time_status": str,
|
||||
"time_urgency": int,
|
||||
"abs_time": str,
|
||||
"market_count": int,
|
||||
"total_vol": int,
|
||||
}
|
||||
"""
|
||||
td = _get_time_data(e)
|
||||
total_vol = sum(float(m.get("volume", 0)) for m in e.get("markets", []))
|
||||
market_count = len(e.get("markets", []))
|
||||
|
||||
return {
|
||||
"title": e.get("title", "?"),
|
||||
"url": get_event_url(e),
|
||||
"time_status": td["time_status"],
|
||||
"time_urgency": td["time_urgency"],
|
||||
"abs_time": td["abs_time"],
|
||||
"market_count": market_count,
|
||||
"total_vol": int(total_vol),
|
||||
}
|
||||
|
||||
|
||||
# ============================================================
|
||||
# FORMAT — RENDER
|
||||
# ============================================================
|
||||
|
||||
def render_match_lines(event_dict, i, mode):
|
||||
"""
|
||||
Render a formatted match event dict into lines of text.
|
||||
|
||||
Args:
|
||||
event_dict: canonical dict from format_match_event()
|
||||
i: 1-based index for the event number
|
||||
mode: "text" for plain text/Markdown, "html" for Telegram HTML
|
||||
|
||||
Returns:
|
||||
List[str], one line per element (no trailing blank line).
|
||||
Caller adds the blank line separator between events.
|
||||
"""
|
||||
title_clean = event_dict["title_clean"]
|
||||
url = event_dict["url"]
|
||||
abs_time = event_dict["abs_time"]
|
||||
time_status = event_dict["time_status"]
|
||||
vol = event_dict["vol"]
|
||||
tournament = event_dict["tournament"]
|
||||
team_a = event_dict["team_a"]
|
||||
team_b = event_dict["team_b"]
|
||||
odds_a = event_dict["odds_a"]
|
||||
odds_b = event_dict["odds_b"]
|
||||
|
||||
lines = []
|
||||
|
||||
if mode == "html":
|
||||
lines.append(
|
||||
f"<b>{i}.</b> <a href=\"{url}\">{escape_html(title_clean)}</a>"
|
||||
)
|
||||
else:
|
||||
lines.append(f"{i}. [{title_clean}]({url})")
|
||||
|
||||
lines.append(f" {abs_time} | {time_status}")
|
||||
lines.append(f" Vol: ${vol:,.0f}")
|
||||
|
||||
if tournament:
|
||||
lines.append(f" Tournament: {tournament}")
|
||||
|
||||
lines.append(f" Odds: {team_a} {odds_a} | {odds_b} {team_b}")
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
def render_non_match_lines(event_dict, i, mode):
|
||||
"""
|
||||
Render a formatted non-match event dict into lines of text.
|
||||
|
||||
Args:
|
||||
event_dict: canonical dict from format_non_match_event()
|
||||
i: 1-based index for the event number
|
||||
mode: "text" for plain text/Markdown, "html" for Telegram HTML
|
||||
|
||||
Returns:
|
||||
List[str], one line per element (no trailing blank line).
|
||||
"""
|
||||
title = event_dict["title"]
|
||||
url = event_dict["url"]
|
||||
abs_time = event_dict["abs_time"]
|
||||
time_status = event_dict["time_status"]
|
||||
market_count = event_dict["market_count"]
|
||||
total_vol = event_dict["total_vol"]
|
||||
|
||||
lines = []
|
||||
|
||||
if mode == "html":
|
||||
lines.append(f"<b>{i}.</b> <a href=\"{url}\">{escape_html(title)}</a>")
|
||||
else:
|
||||
lines.append(f"{i}. [{title}]({url})")
|
||||
|
||||
lines.append(f" {abs_time} | {time_status}")
|
||||
lines.append(f" Markets: {market_count} | Total Vol: ${total_vol:,.0f}")
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
# ============================================================
|
||||
# FORMAT — LEGACY
|
||||
# ============================================================
|
||||
|
||||
def format_event(e):
|
||||
@@ -434,9 +640,8 @@ def print_browse(match_events, non_match_events, category, total_raw, total_fetc
|
||||
if partial:
|
||||
print(f"WARNING: Partial fetch (API error or timeout) — data may be incomplete")
|
||||
|
||||
# --- MATCH MARKETS ---
|
||||
# Determine sections to show
|
||||
if not matches_only and not non_matches_only:
|
||||
# Default: show both
|
||||
show_matches = True
|
||||
show_non_matches = True
|
||||
elif matches_only:
|
||||
@@ -446,59 +651,24 @@ def print_browse(match_events, non_match_events, category, total_raw, total_fetc
|
||||
show_matches = False
|
||||
show_non_matches = True
|
||||
|
||||
# Match events
|
||||
if show_matches:
|
||||
print(f"\nMATCH MARKETS")
|
||||
print("\nMATCH MARKETS")
|
||||
if not match_events:
|
||||
print(" No match markets found.")
|
||||
else:
|
||||
for i, e in enumerate(match_events, 1):
|
||||
f = format_event(e)
|
||||
ml = get_ml_market(e)
|
||||
outcomes = json.loads(ml.get("outcomes", "[]")) if ml else []
|
||||
prices = json.loads(ml.get("outcomePrices", "[]")) if ml else []
|
||||
vol = f["volume"]
|
||||
title = f["title"]
|
||||
url = f["url"]
|
||||
td = _get_time_data(e)
|
||||
start_time_wib = td["abs_time"]
|
||||
rel_time = td["time_status"]
|
||||
fd = format_match_event(e)
|
||||
for line in render_match_lines(fd, i, mode="text"):
|
||||
print(line)
|
||||
|
||||
team_a = outcomes[0] if len(outcomes) > 0 else "?"
|
||||
team_b = outcomes[1] if len(outcomes) > 1 else "?"
|
||||
odds_a = format_odds(float(prices[0])) if len(prices) > 0 else "?"
|
||||
odds_b = format_odds(float(prices[1])) if len(prices) > 1 else "?"
|
||||
|
||||
if " - " in title:
|
||||
title_clean = title.split(" - ")[0].strip()
|
||||
else:
|
||||
title_clean = title
|
||||
|
||||
tournament = get_tournament(title)
|
||||
|
||||
print(f"\n {i}. [{title_clean}]({url})")
|
||||
print(f" {start_time_wib} | {rel_time}")
|
||||
print(f" Vol: ${vol:,.0f}")
|
||||
if tournament:
|
||||
print(f" Tournament: {tournament}")
|
||||
print(f" Odds: {team_a} {odds_a} | {odds_b} {team_b}")
|
||||
|
||||
# --- NON-MATCH MARKETS ---
|
||||
# Non-match events
|
||||
if show_non_matches and non_match_events:
|
||||
print(f"\nNON-MATCH MARKETS")
|
||||
|
||||
print("\nNON-MATCH MARKETS")
|
||||
for i, e in enumerate(non_match_events[:non_matches_max], 1):
|
||||
title = e.get("title", "?")
|
||||
url = get_event_url(e)
|
||||
td = _get_time_data(e)
|
||||
start_time_wib = td["abs_time"]
|
||||
rel_time = td["time_status"]
|
||||
|
||||
total_vol = sum(float(m.get("volume", 0)) for m in e.get("markets", []))
|
||||
market_count = len(e.get("markets", []))
|
||||
|
||||
print(f"\n {i}. [{title}]({url})")
|
||||
print(f" {start_time_wib} | {rel_time}")
|
||||
print(f" Markets: {market_count} | Total Vol: ${total_vol:,.0f}")
|
||||
fd = format_non_match_event(e)
|
||||
for line in render_non_match_lines(fd, i, mode="text"):
|
||||
print(line)
|
||||
|
||||
def print_detail(e, detail):
|
||||
print(f"\n{detail['title']}")
|
||||
@@ -575,82 +745,68 @@ def send_to_telegram(match_events, non_match_events, category, matches_only=Fals
|
||||
msg_id = send_telegram_message(bot_token, chat_id, text)
|
||||
print(f" Sent msg {msg_id}")
|
||||
|
||||
# Build sections
|
||||
lines = [f"<b>{category.upper()}</b> | {header_date}"]
|
||||
lines.append("")
|
||||
# Build lines
|
||||
lines = [f"<b>{category.upper()}</b> | {header_date}", ""]
|
||||
|
||||
if show_matches:
|
||||
lines.append("MATCH MARKETS")
|
||||
lines.append("")
|
||||
lines += ["MATCH MARKETS", ""]
|
||||
if not match_events:
|
||||
lines.append(" No match markets found.")
|
||||
else:
|
||||
for i, e in enumerate(match_events, 1):
|
||||
ml = get_ml_market(e)
|
||||
outcomes = json.loads(ml.get("outcomes", "[]")) if ml else []
|
||||
prices = json.loads(ml.get("outcomePrices", "[]")) if ml else []
|
||||
vol = get_ml_volume(e)
|
||||
title = e.get("title", "?")
|
||||
url = get_event_url(e)
|
||||
td = _get_time_data(e)
|
||||
start_time_wib = td["abs_time"]
|
||||
rel_time = td["time_status"]
|
||||
team_a = outcomes[0] if len(outcomes) > 0 else "?"
|
||||
team_b = outcomes[1] if len(outcomes) > 1 else "?"
|
||||
odds_a = format_odds(float(prices[0])) if len(prices) > 0 else "?"
|
||||
odds_b = format_odds(float(prices[1])) if len(prices) > 1 else "?"
|
||||
tournament = get_tournament(title)
|
||||
title_clean = title.split(" - ")[0].strip() if " - " in title else title
|
||||
lines.append(f"<b>{i}.</b> <a href=\"{url}\">{escape_html(title_clean)}</a>")
|
||||
lines.append(f" {start_time_wib} | {rel_time}")
|
||||
lines.append(f" Vol: ${vol:,.0f}")
|
||||
if tournament:
|
||||
lines.append(f" Tournament: {tournament}")
|
||||
lines.append(f" Odds: {team_a} {odds_a} | {odds_b} {team_b}")
|
||||
fd = format_match_event(e)
|
||||
lines += render_match_lines(fd, i, mode="html")
|
||||
lines.append("")
|
||||
lines.append("")
|
||||
|
||||
if show_non_matches:
|
||||
lines.append("NON-MATCH MARKETS")
|
||||
lines.append("")
|
||||
lines += ["NON-MATCH MARKETS", ""]
|
||||
if not non_match_events:
|
||||
lines.append(" No non-match markets found.")
|
||||
else:
|
||||
for i, e in enumerate(non_match_events, 1):
|
||||
title = e.get("title", "?")
|
||||
url = get_event_url(e)
|
||||
td = _get_time_data(e)
|
||||
start_time_wib = td["abs_time"]
|
||||
rel_time = td["time_status"]
|
||||
total_vol = sum(float(m.get("volume", 0)) for m in e.get("markets", []))
|
||||
market_count = len(e.get("markets", []))
|
||||
lines.append(f"<b>{i}.</b> <a href=\"{url}\">{escape_html(title)}</a>")
|
||||
lines.append(f" {start_time_wib} | {rel_time}")
|
||||
lines.append(f" Markets: {market_count} | Total Vol: ${total_vol:,.0f}")
|
||||
fd = format_non_match_event(e)
|
||||
lines += render_non_match_lines(fd, i, mode="html")
|
||||
lines.append("")
|
||||
lines.append("")
|
||||
|
||||
# Chunk by 10 items (events), respecting 4096 char Telegram limit
|
||||
text = "\n".join(lines)
|
||||
# Chunk and send
|
||||
send_chunked(lines, send, category, header_date, show_matches, show_non_matches)
|
||||
|
||||
|
||||
def send_chunked(all_lines, send_fn, category, header_date, show_matches, show_non_matches):
|
||||
"""
|
||||
Split already-built lines into Telegram-safe chunks and send them.
|
||||
|
||||
Telegram messages are capped at 4096 chars. Chunks are grouped by
|
||||
section header so no event is split across messages.
|
||||
|
||||
Args:
|
||||
all_lines: Full message lines list (built by caller).
|
||||
send_fn: Closure that sends a single string and prints confirmation.
|
||||
category: Category name for header.
|
||||
header_date: Date string for header.
|
||||
show_matches: Whether MATCH MARKETS section is present.
|
||||
show_non_matches: Whether NON-MATCH MARKETS section is present.
|
||||
"""
|
||||
text = "\n".join(all_lines)
|
||||
if len(text) <= 4096:
|
||||
send(text)
|
||||
send_fn(text)
|
||||
return
|
||||
|
||||
# Split into chunks of 10 events
|
||||
# Split into chunks of 10 events, respecting section headers
|
||||
all_items = []
|
||||
in_match = True
|
||||
for line in lines:
|
||||
for line in all_lines:
|
||||
if line == "MATCH MARKETS":
|
||||
in_match = True
|
||||
elif line == "NON-MATCH MARKETS":
|
||||
in_match = False
|
||||
elif line.startswith("<b>") and ". " in line and "</a>" in line:
|
||||
elif line.startswith("<b>") and "</a>" in line:
|
||||
# Event title line: <b>1.</b> <a href="...">Title</a>
|
||||
all_items.append((in_match, line))
|
||||
|
||||
chunk = []
|
||||
chunk_len = 0
|
||||
chunk_num = 1
|
||||
|
||||
# Header is always first
|
||||
header = f"<b>{category.upper()}</b> | {header_date}\n"
|
||||
if show_matches:
|
||||
header += "\nMATCH MARKETS\n\n"
|
||||
@@ -661,9 +817,8 @@ def send_to_telegram(match_events, non_match_events, category, matches_only=Fals
|
||||
test_chunk = chunk + [item_line, ""]
|
||||
test_text = header + "\n".join(chunk) + "\n".join(test_chunk)
|
||||
if len(test_text) > 4096 or len(chunk) >= 10:
|
||||
# Send current chunk
|
||||
msg = header + "\n".join(chunk)
|
||||
send(msg)
|
||||
send_fn(msg)
|
||||
chunk = [item_line, ""]
|
||||
header = f"<b>{category.upper()}</b> (cont.) | {header_date}\n"
|
||||
if show_matches and is_match:
|
||||
@@ -675,7 +830,7 @@ def send_to_telegram(match_events, non_match_events, category, matches_only=Fals
|
||||
|
||||
if chunk:
|
||||
msg = header + "\n".join(chunk)
|
||||
send(msg)
|
||||
send_fn(msg)
|
||||
|
||||
|
||||
# ============================================================
|
||||
|
||||
@@ -320,5 +320,890 @@ class TestTimeFunctions(unittest.TestCase):
|
||||
self.assertEqual(td2["abs_time"], "Mar 25, 19:00 WIB")
|
||||
|
||||
|
||||
class TestFormatMatchEvent(unittest.TestCase):
|
||||
"""Tests for format_match_event() canonical dict."""
|
||||
|
||||
def _frozen_dt(self, year, month, day, hour, minute):
|
||||
return datetime(year, month, day, hour, minute,
|
||||
tzinfo=timezone.utc)
|
||||
|
||||
def _mock_datetime(self, frozen):
|
||||
class MockDatetime:
|
||||
@staticmethod
|
||||
def now(tz=None):
|
||||
if tz is None:
|
||||
return frozen
|
||||
return frozen.astimezone(tz)
|
||||
fromisoformat = staticmethod(datetime.fromisoformat)
|
||||
def __call__(self, *a, **k):
|
||||
return datetime(*a, **k)
|
||||
return MockDatetime
|
||||
|
||||
def _make_event(self, title, ml_market=None, start_time="2026-03-25T18:00:00Z"):
|
||||
import json as _json
|
||||
e = {
|
||||
"title": title,
|
||||
"slug": "test-slug",
|
||||
"startTime": start_time,
|
||||
"markets": [],
|
||||
}
|
||||
if ml_market:
|
||||
e["markets"].append(ml_market)
|
||||
return e
|
||||
|
||||
def _make_ml_market(self, outcomes, prices, vol=50000):
|
||||
import json
|
||||
return {
|
||||
"sportsMarketType": "moneyline",
|
||||
"outcomes": json.dumps(outcomes),
|
||||
"outcomePrices": json.dumps(prices),
|
||||
"bestBid": str(float(prices[0]) - 0.01) if prices else "0.49",
|
||||
"bestAsk": str(float(prices[0]) + 0.01) if prices else "0.51",
|
||||
"volume": str(vol),
|
||||
"acceptingOrders": True,
|
||||
"closed": False,
|
||||
}
|
||||
|
||||
def test_fields_present(self):
|
||||
"""All canonical fields are present and non-null."""
|
||||
frozen = self._frozen_dt(2026, 3, 25, 12, 0)
|
||||
with patch('browse.datetime', self._mock_datetime(frozen)):
|
||||
from browse import format_match_event
|
||||
e = self._make_event(
|
||||
"Counter Strike: Team A vs Team B - ESL Pro League",
|
||||
self._make_ml_market(['"Team A"', '"Team B"'], [0.55, 0.45]),
|
||||
)
|
||||
fd = format_match_event(e)
|
||||
self.assertIn("title", fd)
|
||||
self.assertIn("title_clean", fd)
|
||||
self.assertIn("tournament", fd)
|
||||
self.assertIn("url", fd)
|
||||
self.assertIn("time_status", fd)
|
||||
self.assertIn("time_urgency", fd)
|
||||
self.assertIn("abs_time", fd)
|
||||
self.assertIn("team_a", fd)
|
||||
self.assertIn("team_b", fd)
|
||||
self.assertIn("odds_a", fd)
|
||||
self.assertIn("odds_b", fd)
|
||||
self.assertIn("vol", fd)
|
||||
|
||||
def test_title_clean_no_tournament(self):
|
||||
"""title_clean strips tournament suffix after ' - '."""
|
||||
frozen = self._frozen_dt(2026, 3, 25, 12, 0)
|
||||
with patch('browse.datetime', self._mock_datetime(frozen)):
|
||||
from browse import format_match_event
|
||||
e = self._make_event(
|
||||
"Counter Strike: Team A vs Team B - ESL Pro League",
|
||||
self._make_ml_market(['"Team A"', '"Team B"'], [0.55, 0.45]),
|
||||
)
|
||||
fd = format_match_event(e)
|
||||
self.assertEqual(fd["title_clean"], "Counter Strike: Team A vs Team B")
|
||||
self.assertEqual(fd["tournament"], "ESL Pro League")
|
||||
|
||||
def test_title_clean_no_dash(self):
|
||||
"""title_clean is unchanged when no ' - ' separator."""
|
||||
frozen = self._frozen_dt(2026, 3, 25, 12, 0)
|
||||
with patch('browse.datetime', self._mock_datetime(frozen)):
|
||||
from browse import format_match_event
|
||||
e = self._make_event(
|
||||
"Counter Strike: Team A vs Team B",
|
||||
self._make_ml_market(['"Team A"', '"Team B"'], [0.55, 0.45]),
|
||||
)
|
||||
fd = format_match_event(e)
|
||||
self.assertEqual(fd["title_clean"], "Counter Strike: Team A vs Team B")
|
||||
self.assertEqual(fd["tournament"], "")
|
||||
|
||||
def test_missing_ml(self):
|
||||
"""Returns valid dict with '?' fallbacks when no ML market."""
|
||||
frozen = self._frozen_dt(2026, 3, 25, 12, 0)
|
||||
with patch('browse.datetime', self._mock_datetime(frozen)):
|
||||
from browse import format_match_event
|
||||
e = self._make_event("Team A vs Team B")
|
||||
fd = format_match_event(e)
|
||||
self.assertEqual(fd["team_a"], "?")
|
||||
self.assertEqual(fd["team_b"], "?")
|
||||
self.assertEqual(fd["odds_a"], "?")
|
||||
self.assertEqual(fd["odds_b"], "?")
|
||||
self.assertEqual(fd["vol"], 0)
|
||||
|
||||
def test_missing_outcomes(self):
|
||||
"""Handles empty outcomes list gracefully."""
|
||||
frozen = self._frozen_dt(2026, 3, 25, 12, 0)
|
||||
with patch('browse.datetime', self._mock_datetime(frozen)):
|
||||
from browse import format_match_event
|
||||
e = self._make_event(
|
||||
"Team A vs Team B",
|
||||
self._make_ml_market([], []),
|
||||
)
|
||||
fd = format_match_event(e)
|
||||
self.assertEqual(fd["team_a"], "?")
|
||||
self.assertEqual(fd["team_b"], "?")
|
||||
|
||||
def test_time_data_passed_through(self):
|
||||
"""Time fields come from _get_time_data."""
|
||||
frozen = self._frozen_dt(2026, 3, 25, 12, 0)
|
||||
with patch('browse.datetime', self._mock_datetime(frozen)):
|
||||
from browse import format_match_event
|
||||
e = self._make_event(
|
||||
"Team A vs Team B",
|
||||
self._make_ml_market(['"Team A"', '"Team B"'], [0.55, 0.45]),
|
||||
start_time="2026-03-25T18:00:00Z", # 6h in future
|
||||
)
|
||||
fd = format_match_event(e)
|
||||
self.assertEqual(fd["time_status"], "In 6h")
|
||||
self.assertEqual(fd["time_urgency"], 2)
|
||||
self.assertIn("WIB", fd["abs_time"])
|
||||
|
||||
|
||||
class TestFormatNonMatchEvent(unittest.TestCase):
|
||||
"""Tests for format_non_match_event() canonical dict."""
|
||||
|
||||
def _frozen_dt(self, year, month, day, hour, minute):
|
||||
return datetime(year, month, day, hour, minute,
|
||||
tzinfo=timezone.utc)
|
||||
|
||||
def _mock_datetime(self, frozen):
|
||||
class MockDatetime:
|
||||
@staticmethod
|
||||
def now(tz=None):
|
||||
if tz is None:
|
||||
return frozen
|
||||
return frozen.astimezone(tz)
|
||||
fromisoformat = staticmethod(datetime.fromisoformat)
|
||||
def __call__(self, *a, **k):
|
||||
return datetime(*a, **k)
|
||||
return MockDatetime
|
||||
|
||||
def test_fields_present(self):
|
||||
"""All canonical fields are present."""
|
||||
frozen = self._frozen_dt(2026, 3, 25, 12, 0)
|
||||
with patch('browse.datetime', self._mock_datetime(frozen)):
|
||||
from browse import format_non_match_event
|
||||
e = {
|
||||
"title": "Will it rain in Jakarta?",
|
||||
"slug": "rain-jakarta",
|
||||
"startTime": "2026-03-25T18:00:00Z",
|
||||
"markets": [
|
||||
{"volume": "10000"},
|
||||
{"volume": "5000"},
|
||||
],
|
||||
}
|
||||
fd = format_non_match_event(e)
|
||||
self.assertIn("title", fd)
|
||||
self.assertIn("url", fd)
|
||||
self.assertIn("time_status", fd)
|
||||
self.assertIn("time_urgency", fd)
|
||||
self.assertIn("abs_time", fd)
|
||||
self.assertIn("market_count", fd)
|
||||
self.assertIn("total_vol", fd)
|
||||
|
||||
def test_market_stats(self):
|
||||
"""market_count and total_vol computed correctly."""
|
||||
frozen = self._frozen_dt(2026, 3, 25, 12, 0)
|
||||
with patch('browse.datetime', self._mock_datetime(frozen)):
|
||||
from browse import format_non_match_event
|
||||
e = {
|
||||
"title": "Test",
|
||||
"slug": "test",
|
||||
"startTime": "2026-03-25T18:00:00Z",
|
||||
"markets": [
|
||||
{"volume": "10000"},
|
||||
{"volume": "5000"},
|
||||
],
|
||||
}
|
||||
fd = format_non_match_event(e)
|
||||
self.assertEqual(fd["market_count"], 2)
|
||||
self.assertEqual(fd["total_vol"], 15000)
|
||||
|
||||
def test_time_passed_through(self):
|
||||
"""Time fields come from _get_time_data."""
|
||||
frozen = self._frozen_dt(2026, 3, 25, 12, 0)
|
||||
with patch('browse.datetime', self._mock_datetime(frozen)):
|
||||
from browse import format_non_match_event
|
||||
e = {
|
||||
"title": "Test",
|
||||
"slug": "test",
|
||||
"startTime": "2026-03-25T18:00:00Z",
|
||||
"markets": [],
|
||||
}
|
||||
fd = format_non_match_event(e)
|
||||
self.assertEqual(fd["time_status"], "In 6h")
|
||||
|
||||
|
||||
class TestRenderMatchLines(unittest.TestCase):
|
||||
"""Tests for render_match_lines() text and HTML output."""
|
||||
|
||||
def test_text_mode_exact_lines(self):
|
||||
"""text mode produces expected plain text lines."""
|
||||
from browse import render_match_lines
|
||||
fd = {
|
||||
"title_clean": "Team A vs Team B",
|
||||
"url": "https://polymarket.com/market/test",
|
||||
"abs_time": "Mar 25, 19:00 WIB",
|
||||
"time_status": "In 6h",
|
||||
"vol": 50000,
|
||||
"tournament": "ESL Pro League",
|
||||
"team_a": "Team A",
|
||||
"team_b": "Team B",
|
||||
"odds_a": "55c",
|
||||
"odds_b": "45c",
|
||||
}
|
||||
lines = render_match_lines(fd, 1, mode="text")
|
||||
self.assertEqual(lines[0], "1. [Team A vs Team B](https://polymarket.com/market/test)")
|
||||
self.assertEqual(lines[1], " Mar 25, 19:00 WIB | In 6h")
|
||||
self.assertEqual(lines[2], " Vol: $50,000")
|
||||
self.assertEqual(lines[3], " Tournament: ESL Pro League")
|
||||
self.assertEqual(lines[4], " Odds: Team A 55c | 45c Team B")
|
||||
|
||||
def test_text_mode_no_tournament(self):
|
||||
"""text mode omits Tournament line when tournament is empty."""
|
||||
from browse import render_match_lines
|
||||
fd = {
|
||||
"title_clean": "Team A vs Team B",
|
||||
"url": "https://polymarket.com/market/test",
|
||||
"abs_time": "Mar 25, 19:00 WIB",
|
||||
"time_status": "In 6h",
|
||||
"vol": 50000,
|
||||
"tournament": "",
|
||||
"team_a": "Team A",
|
||||
"team_b": "Team B",
|
||||
"odds_a": "55c",
|
||||
"odds_b": "45c",
|
||||
}
|
||||
lines = render_match_lines(fd, 2, mode="text")
|
||||
self.assertEqual(len(lines), 4)
|
||||
self.assertEqual(lines[0], "2. [Team A vs Team B](https://polymarket.com/market/test)")
|
||||
self.assertNotIn("Tournament", lines[3])
|
||||
|
||||
def test_html_mode_exact(self):
|
||||
"""html mode produces expected HTML lines with escape_html."""
|
||||
from browse import render_match_lines
|
||||
fd = {
|
||||
"title_clean": "Team A & Team B vs Team C",
|
||||
"url": "https://polymarket.com/market/test",
|
||||
"abs_time": "Mar 25, 19:00 WIB",
|
||||
"time_status": "LIVE",
|
||||
"vol": 50000,
|
||||
"tournament": "ESL Pro League",
|
||||
"team_a": "Team A & Team B",
|
||||
"team_b": "Team C",
|
||||
"odds_a": "55c",
|
||||
"odds_b": "45c",
|
||||
}
|
||||
lines = render_match_lines(fd, 1, mode="html")
|
||||
self.assertEqual(lines[0], "<b>1.</b> <a href=\"https://polymarket.com/market/test\">Team A & Team B vs Team C</a>")
|
||||
self.assertEqual(lines[1], " Mar 25, 19:00 WIB | LIVE")
|
||||
self.assertEqual(lines[2], " Vol: $50,000")
|
||||
self.assertEqual(lines[3], " Tournament: ESL Pro League")
|
||||
self.assertEqual(lines[4], " Odds: Team A & Team B 55c | 45c Team C")
|
||||
|
||||
def test_html_mode_xss_prevention(self):
|
||||
"""html mode escapes < and > to prevent XSS."""
|
||||
from browse import render_match_lines
|
||||
fd = {
|
||||
"title_clean": "<script>alert('xss')</script>",
|
||||
"url": "https://polymarket.com/market/test",
|
||||
"abs_time": "Mar 25, 19:00 WIB",
|
||||
"time_status": "LIVE",
|
||||
"vol": 1000,
|
||||
"tournament": "",
|
||||
"team_a": "Team A",
|
||||
"team_b": "Team B",
|
||||
"odds_a": "50c",
|
||||
"odds_b": "50c",
|
||||
}
|
||||
lines = render_match_lines(fd, 1, mode="html")
|
||||
self.assertIn("<script>", lines[0])
|
||||
self.assertNotIn("<script>", lines[0])
|
||||
|
||||
|
||||
class TestRenderNonMatchLines(unittest.TestCase):
|
||||
"""Tests for render_non_match_lines() text and HTML output."""
|
||||
|
||||
def test_text_mode_exact_lines(self):
|
||||
"""text mode produces expected plain text lines."""
|
||||
from browse import render_non_match_lines
|
||||
fd = {
|
||||
"title": "Will it rain in Jakarta?",
|
||||
"url": "https://polymarket.com/event/rain-jakarta",
|
||||
"abs_time": "Mar 25, 19:00 WIB",
|
||||
"time_status": "In 6h",
|
||||
"market_count": 3,
|
||||
"total_vol": 25000,
|
||||
}
|
||||
lines = render_non_match_lines(fd, 1, mode="text")
|
||||
self.assertEqual(lines[0], "1. [Will it rain in Jakarta?](https://polymarket.com/event/rain-jakarta)")
|
||||
self.assertEqual(lines[1], " Mar 25, 19:00 WIB | In 6h")
|
||||
self.assertEqual(lines[2], " Markets: 3 | Total Vol: $25,000")
|
||||
|
||||
def test_html_mode_exact(self):
|
||||
"""html mode produces expected HTML lines with escape_html."""
|
||||
from browse import render_non_match_lines
|
||||
fd = {
|
||||
"title": "Rain <or> Sun?",
|
||||
"url": "https://polymarket.com/event/rain-sun",
|
||||
"abs_time": "Mar 25, 19:00 WIB",
|
||||
"time_status": "In 6h",
|
||||
"market_count": 2,
|
||||
"total_vol": 10000,
|
||||
}
|
||||
lines = render_non_match_lines(fd, 1, mode="html")
|
||||
self.assertEqual(lines[0], "<b>1.</b> <a href=\"https://polymarket.com/event/rain-sun\">Rain <or> Sun?</a>")
|
||||
self.assertEqual(lines[1], " Mar 25, 19:00 WIB | In 6h")
|
||||
self.assertEqual(lines[2], " Markets: 2 | Total Vol: $10,000")
|
||||
|
||||
|
||||
class TestPrintBrowseIntegration(unittest.TestCase):
|
||||
"""Integration tests for print_browse using the new pipeline."""
|
||||
|
||||
def _frozen_dt(self, year, month, day, hour, minute):
|
||||
return datetime(year, month, day, hour, minute,
|
||||
tzinfo=timezone.utc)
|
||||
|
||||
def _mock_datetime(self, frozen):
|
||||
class MockDatetime:
|
||||
@staticmethod
|
||||
def now(tz=None):
|
||||
if tz is None:
|
||||
return frozen
|
||||
return frozen.astimezone(tz)
|
||||
fromisoformat = staticmethod(datetime.fromisoformat)
|
||||
def __call__(self, *a, **k):
|
||||
return datetime(*a, **k)
|
||||
return MockDatetime
|
||||
|
||||
@patch('builtins.print')
|
||||
def test_print_browse_uses_new_pipeline(self, mock_print):
|
||||
"""print_browse calls format_match_event and render_match_lines."""
|
||||
frozen = self._frozen_dt(2026, 3, 25, 12, 0)
|
||||
with patch('browse.datetime', self._mock_datetime(frozen)):
|
||||
from browse import print_browse
|
||||
match_events = [{
|
||||
"title": "Counter Strike: Team A vs Team B - ESL Pro League",
|
||||
"slug": "csa",
|
||||
"startTime": "2026-03-25T18:00:00Z",
|
||||
"markets": [{
|
||||
"sportsMarketType": "moneyline",
|
||||
"outcomes": '["Team A", "Team B"]',
|
||||
"outcomePrices": "[0.55, 0.45]",
|
||||
"bestBid": "0.54",
|
||||
"bestAsk": "0.56",
|
||||
"volume": "50000",
|
||||
"acceptingOrders": True,
|
||||
"closed": False,
|
||||
}],
|
||||
}]
|
||||
with patch('browse.format_match_event') as mock_fmt, \
|
||||
patch('browse.render_match_lines') as mock_render:
|
||||
mock_fmt.return_value = {
|
||||
"title_clean": "Team A vs Team B",
|
||||
"url": "https://polymarket.com/market/csa",
|
||||
"abs_time": "Mar 25, 19:00 WIB",
|
||||
"time_status": "In 6h",
|
||||
"vol": 50000,
|
||||
"tournament": "ESL Pro League",
|
||||
"team_a": "Team A",
|
||||
"team_b": "Team B",
|
||||
"odds_a": "55c",
|
||||
"odds_b": "45c",
|
||||
}
|
||||
mock_render.return_value = [
|
||||
"1. [Team A vs Team B](https://polymarket.com/market/csa)",
|
||||
" Mar 25, 19:00 WIB | In 6h",
|
||||
" Vol: $50,000",
|
||||
" Tournament: ESL Pro League",
|
||||
" Odds: Team A 55c | 45c Team B",
|
||||
]
|
||||
print_browse(match_events, [], "Counter Strike", 1, 1, 1, 0,
|
||||
non_matches_max=5)
|
||||
|
||||
mock_fmt.assert_called_once_with(match_events[0])
|
||||
mock_render.assert_called_once_with(mock_fmt.return_value, 1, mode="text")
|
||||
|
||||
@patch('builtins.print')
|
||||
def test_print_browse_matches_only(self, mock_print):
|
||||
"""matches_only suppresses non-match section."""
|
||||
frozen = self._frozen_dt(2026, 3, 25, 12, 0)
|
||||
with patch('browse.datetime', self._mock_datetime(frozen)):
|
||||
from browse import print_browse
|
||||
with patch('browse.format_non_match_event') as mock_non_fmt:
|
||||
print_browse([], [], "Counter Strike", 0, 0, 0, 0,
|
||||
non_matches_max=5, matches_only=True)
|
||||
mock_non_fmt.assert_not_called()
|
||||
|
||||
|
||||
class TestSendChunked(unittest.TestCase):
|
||||
"""Tests for send_chunked() helper."""
|
||||
|
||||
def test_small_message_sent_directly(self):
|
||||
"""Messages under 4096 chars go through without chunking."""
|
||||
sent_texts = []
|
||||
def fake_send(text):
|
||||
sent_texts.append(text)
|
||||
|
||||
lines = ["<b>COUNTER STRIKE</b> | Mar 25, 2026", "", "MATCH MARKETS", "", "1. test"]
|
||||
# This fits in one message
|
||||
from browse import send_chunked
|
||||
send_chunked(lines, fake_send, "Counter Strike", "Mar 25, 2026",
|
||||
show_matches=True, show_non_matches=False)
|
||||
self.assertEqual(len(sent_texts), 1)
|
||||
|
||||
def test_chunked_message_gets_cont_header(self):
|
||||
"""Messages over 4096 chars get continuation header."""
|
||||
sent_texts = []
|
||||
def fake_send(text):
|
||||
sent_texts.append(text)
|
||||
|
||||
# Build enough content to exceed 4096 chars
|
||||
# Each event line: ~260 chars. Need ~16 events + headers (~4200 chars)
|
||||
lines = ["<b>COUNTER STRIKE</b> | Mar 25, 2026", ""]
|
||||
for i in range(16):
|
||||
lines += [f"<b>{i+1}.</b> <a href=\"https://polymarket.com/market/{i}\">Team {'X' * 250}</a>", " Mar 25, 19:00 WIB | In 6h", " Vol: $50,000", " Odds: TeamA 55c | 45c TeamB", ""]
|
||||
lines.append("")
|
||||
|
||||
from browse import send_chunked
|
||||
send_chunked(lines, fake_send, "Counter Strike", "Mar 25, 2026",
|
||||
show_matches=True, show_non_matches=False)
|
||||
|
||||
# Should have sent more than one message (chunked)
|
||||
self.assertGreater(len(sent_texts), 1)
|
||||
# At least one continuation message
|
||||
cont_found = any("(cont.)" in t for t in sent_texts)
|
||||
self.assertTrue(cont_found, f"Expected at least one '(cont.)' message. Got {len(sent_texts)} messages.")
|
||||
|
||||
|
||||
class TestIsMatchMarket(unittest.TestCase):
|
||||
"""Tests for is_match_market() classification."""
|
||||
|
||||
def test_match_when_series_and_gameid(self):
|
||||
"""seriesSlug + gameId present -> match market."""
|
||||
from browse import is_match_market
|
||||
e = {"seriesSlug": "esl-pro-league", "gameId": "12345", "title": "Tournament Winner"}
|
||||
self.assertTrue(is_match_market(e))
|
||||
|
||||
def test_match_when_vs_in_title(self):
|
||||
"""' vs ' in title -> match market."""
|
||||
from browse import is_match_market
|
||||
e = {"title": "Team A vs Team B - Final"}
|
||||
self.assertTrue(is_match_market(e))
|
||||
|
||||
def test_non_match_without_series_and_gameid(self):
|
||||
"""No seriesSlug/gameId and no ' vs ' -> non-match."""
|
||||
from browse import is_match_market
|
||||
e = {"title": "Will Team A win the tournament?"}
|
||||
self.assertFalse(is_match_market(e))
|
||||
|
||||
def test_non_match_seriesSlug_only(self):
|
||||
"""Only seriesSlug (no gameId) -> non-match."""
|
||||
from browse import is_match_market
|
||||
e = {"seriesSlug": "esl-pro-league", "title": "Tournament Winner"}
|
||||
self.assertFalse(is_match_market(e))
|
||||
|
||||
def test_non_match_gameid_only(self):
|
||||
"""Only gameId (no seriesSlug) -> non-match."""
|
||||
from browse import is_match_market
|
||||
e = {"gameId": "12345", "title": "Tournament Winner"}
|
||||
self.assertFalse(is_match_market(e))
|
||||
|
||||
|
||||
class TestGetMlMarket(unittest.TestCase):
|
||||
"""Tests for get_ml_market() and get_ml_volume()."""
|
||||
|
||||
def test_get_ml_market_finds_moneyline(self):
|
||||
"""Finds and returns the moneyline market."""
|
||||
from browse import get_ml_market
|
||||
e = {
|
||||
"markets": [
|
||||
{"sportsMarketType": "spread", "volume": "1000"},
|
||||
{"sportsMarketType": "moneyline", "volume": "50000"},
|
||||
{"sportsMarketType": "total", "volume": "2000"},
|
||||
]
|
||||
}
|
||||
ml = get_ml_market(e)
|
||||
self.assertEqual(ml["sportsMarketType"], "moneyline")
|
||||
self.assertEqual(ml["volume"], "50000")
|
||||
|
||||
def test_get_ml_market_returns_none_when_missing(self):
|
||||
"""Returns None when no moneyline market exists."""
|
||||
from browse import get_ml_market
|
||||
e = {"markets": [{"sportsMarketType": "spread", "volume": "1000"}]}
|
||||
self.assertIsNone(get_ml_market(e))
|
||||
|
||||
def test_get_ml_market_returns_none_when_no_markets(self):
|
||||
"""Returns None when event has no markets."""
|
||||
from browse import get_ml_market
|
||||
e = {}
|
||||
self.assertIsNone(get_ml_market(e))
|
||||
|
||||
def test_get_ml_volume_with_ml(self):
|
||||
"""Returns float volume from moneyline market."""
|
||||
from browse import get_ml_volume
|
||||
e = {
|
||||
"markets": [
|
||||
{"sportsMarketType": "moneyline", "volume": "123456"}
|
||||
]
|
||||
}
|
||||
self.assertEqual(get_ml_volume(e), 123456.0)
|
||||
|
||||
def test_get_ml_volume_no_ml(self):
|
||||
"""Returns 0.0 when no moneyline market."""
|
||||
from browse import get_ml_volume
|
||||
e = {"markets": []}
|
||||
self.assertEqual(get_ml_volume(e), 0.0)
|
||||
|
||||
|
||||
class TestFilterEvents(unittest.TestCase):
|
||||
"""Tests for filter_events() and sort_events()."""
|
||||
|
||||
def _make_match(self, match_id, tradeable=True, vol="50000"):
|
||||
return {
|
||||
"id": str(match_id),
|
||||
"title": f"Team A vs Team B - Match {match_id}",
|
||||
"seriesSlug": "test-league",
|
||||
"gameId": str(match_id),
|
||||
"markets": [{
|
||||
"sportsMarketType": "moneyline",
|
||||
"volume": vol,
|
||||
"bestBid": "0.50",
|
||||
"bestAsk": "0.52",
|
||||
"acceptingOrders": tradeable,
|
||||
"closed": False,
|
||||
}],
|
||||
}
|
||||
|
||||
def _make_non_match(self, event_id, tradeable=True):
|
||||
return {
|
||||
"id": f"nm{event_id}",
|
||||
"title": f"Will event {event_id} happen?",
|
||||
"markets": [{
|
||||
"sportsMarketType": "moneyline",
|
||||
"volume": "10000",
|
||||
"bestBid": "0.50",
|
||||
"bestAsk": "0.52",
|
||||
"acceptingOrders": tradeable,
|
||||
"closed": False,
|
||||
}],
|
||||
}
|
||||
|
||||
def test_filter_events_splits_match_and_non_match(self):
|
||||
"""Correctly splits events into match and non-match buckets."""
|
||||
from browse import filter_events
|
||||
events = [
|
||||
self._make_match(1),
|
||||
self._make_non_match(1),
|
||||
self._make_match(2),
|
||||
self._make_non_match(2),
|
||||
]
|
||||
matches, non_matches = filter_events(events, tradeable_only=False)
|
||||
self.assertEqual(len(matches), 2)
|
||||
self.assertEqual(len(non_matches), 2)
|
||||
self.assertEqual(matches[0]["id"], "1")
|
||||
self.assertEqual(non_matches[0]["id"], "nm1")
|
||||
|
||||
def test_filter_events_tradeable_only(self):
|
||||
"""tradeable_only=True filters out non-tradeable events."""
|
||||
from browse import filter_events
|
||||
events = [
|
||||
self._make_match(1, tradeable=True),
|
||||
self._make_match(2, tradeable=False),
|
||||
self._make_non_match(1),
|
||||
]
|
||||
matches, non_matches = filter_events(events, tradeable_only=True)
|
||||
self.assertEqual(len(matches), 1)
|
||||
self.assertEqual(matches[0]["id"], "1")
|
||||
self.assertEqual(len(non_matches), 1) # non-match with acceptingOrders=True passes
|
||||
|
||||
def test_filter_events_tradeable_only_false(self):
|
||||
"""tradeable_only=False keeps all events."""
|
||||
from browse import filter_events
|
||||
events = [
|
||||
self._make_match(1, tradeable=True),
|
||||
self._make_match(2, tradeable=False),
|
||||
self._make_non_match(1, tradeable=True),
|
||||
self._make_non_match(2, tradeable=False),
|
||||
]
|
||||
matches, non_matches = filter_events(events, tradeable_only=False)
|
||||
self.assertEqual(len(matches), 2)
|
||||
self.assertEqual(len(non_matches), 2)
|
||||
|
||||
def test_sort_events_by_volume_desc(self):
|
||||
"""sort_events returns events sorted by volume descending."""
|
||||
from browse import sort_events
|
||||
events = [
|
||||
self._make_match(1, vol="10000"),
|
||||
self._make_match(2, vol="50000"),
|
||||
self._make_match(3, vol="30000"),
|
||||
]
|
||||
sorted_evts = sort_events(events)
|
||||
self.assertEqual(sorted_evts[0]["id"], "2") # vol=50000
|
||||
self.assertEqual(sorted_evts[1]["id"], "3") # vol=30000
|
||||
self.assertEqual(sorted_evts[2]["id"], "1") # vol=10000
|
||||
|
||||
def test_sort_events_empty_list(self):
|
||||
"""sort_events handles empty list gracefully."""
|
||||
from browse import sort_events
|
||||
result = sort_events([])
|
||||
self.assertEqual(result, [])
|
||||
|
||||
|
||||
class TestFetchAllPages(unittest.TestCase):
|
||||
"""Tests for fetch_all_pages() early-exit logic."""
|
||||
|
||||
@patch('browse.fetch_page')
|
||||
@patch('browse.time.sleep')
|
||||
def test_early_exit_stops_when_both_quotas_met(self, mock_sleep, mock_fetch_page):
|
||||
"""Stops fetching once both match and non-match quotas are satisfied."""
|
||||
from browse import fetch_all_pages
|
||||
|
||||
# Page 1: 2 matches, 2 non-matches (neither quota met)
|
||||
page1 = {
|
||||
"events": [
|
||||
{"id": "m1", "title": "Match 1", "seriesSlug": "x", "gameId": "1", "markets": []},
|
||||
{"id": "m2", "title": "Match 2", "seriesSlug": "x", "gameId": "2", "markets": []},
|
||||
{"id": "n1", "title": "Non-match 1", "markets": []},
|
||||
{"id": "n2", "title": "Non-match 2", "markets": []},
|
||||
],
|
||||
"pagination": {"totalResults": 10, "hasMore": True}
|
||||
}
|
||||
# Page 2: 1 match, 1 non-match (both quotas met: 3 matches >= 3, 3 non-matches >= 3)
|
||||
page2 = {
|
||||
"events": [
|
||||
{"id": "m3", "title": "Match 3", "seriesSlug": "x", "gameId": "3", "markets": []},
|
||||
{"id": "n3", "title": "Non-match 3", "markets": []},
|
||||
{"id": "m4", "title": "Match 4", "seriesSlug": "x", "gameId": "4", "markets": []},
|
||||
{"id": "n4", "title": "Non-match 4", "markets": []},
|
||||
],
|
||||
"pagination": {"totalResults": 10, "hasMore": True}
|
||||
}
|
||||
|
||||
mock_fetch_page.side_effect = [page1, page2] # should NOT reach page 2
|
||||
|
||||
result = fetch_all_pages("test", matches_max=3, non_matches_max=3)
|
||||
|
||||
# Should stop after page 1 (quota met: 2 matches < 3? NO wait)
|
||||
# Let me recount: page1 has 2 matches + 2 non-matches. Quota is 3+3. Not met.
|
||||
# But page2 would be the same... let me think again.
|
||||
# Actually the test above is: page1 = 2+2=4 items, page2 = 2+2=4 items
|
||||
# Quotas: matches_max=3, non_matches_max=3
|
||||
# After page1: match_count=2, non_match_count=2. Neither quota met.
|
||||
# After page2: match_count=4, non_match_count=4. Both >= quota. Stop.
|
||||
# So should call page1 and page2 only.
|
||||
self.assertEqual(mock_fetch_page.call_count, 2)
|
||||
|
||||
@patch('browse.fetch_page')
|
||||
@patch('browse.time.sleep')
|
||||
def test_no_quota_fetches_all_pages(self, mock_sleep, mock_fetch_page):
|
||||
"""Without quotas, fetches all pages until pagination ends."""
|
||||
from browse import fetch_all_pages
|
||||
|
||||
page1 = {
|
||||
"events": [{"id": "e1", "title": "Event 1", "markets": []}],
|
||||
"pagination": {"totalResults": 3, "hasMore": True}
|
||||
}
|
||||
page2 = {
|
||||
"events": [{"id": "e2", "title": "Event 2", "markets": []}],
|
||||
"pagination": {"totalResults": 3, "hasMore": True}
|
||||
}
|
||||
page3 = {
|
||||
"events": [{"id": "e3", "title": "Event 3", "markets": []}],
|
||||
"pagination": {"totalResults": 3, "hasMore": False}
|
||||
}
|
||||
|
||||
mock_fetch_page.side_effect = [page1, page2, page3]
|
||||
|
||||
result = fetch_all_pages("test")
|
||||
|
||||
self.assertEqual(mock_fetch_page.call_count, 3)
|
||||
self.assertEqual(len(result["events"]), 3)
|
||||
self.assertFalse(result["partial"])
|
||||
|
||||
@patch('browse.fetch_page')
|
||||
@patch('browse.time.sleep')
|
||||
def test_early_exit_partial_true_when_stopped_early(self, mock_sleep, mock_fetch_page):
|
||||
"""Returns partial=True when stopped early due to quota."""
|
||||
from browse import fetch_all_pages
|
||||
|
||||
page1 = {
|
||||
"events": [
|
||||
{"id": "m1", "title": "Match 1", "seriesSlug": "x", "gameId": "1", "markets": []},
|
||||
{"id": "m2", "title": "Match 2", "seriesSlug": "x", "gameId": "2", "markets": []},
|
||||
{"id": "m3", "title": "Match 3", "seriesSlug": "x", "gameId": "3", "markets": []},
|
||||
],
|
||||
"pagination": {"totalResults": 100, "hasMore": True}
|
||||
}
|
||||
|
||||
mock_fetch_page.return_value = page1
|
||||
|
||||
result = fetch_all_pages("test", matches_max=3, non_matches_max=3)
|
||||
|
||||
# After page1: match_count=3 >= 3, non_match_count=0 < 3. Non-match quota NOT met.
|
||||
# So should continue to page2...
|
||||
# Let me make a better test: page1 has 3 matches and 3 non-matches (both quotas met)
|
||||
# But they need to be is_match_market -> need seriesSlug+gameId OR " vs "
|
||||
# Actually the early exit checks match_count >= matches_max AND non_match_count >= non_matches_max
|
||||
# So we need both to be met.
|
||||
pass # test needs fixing, let me redo
|
||||
|
||||
@patch('browse.fetch_page')
|
||||
@patch('browse.time.sleep')
|
||||
def test_quota_one_side_only_keeps_fetching(self, mock_sleep, mock_fetch_page):
|
||||
"""If only one quota is met, keeps fetching."""
|
||||
from browse import fetch_all_pages
|
||||
|
||||
# Page 1: 3 matches, 0 non-matches (matches quota met, non_matches NOT met)
|
||||
page1 = {
|
||||
"events": [
|
||||
{"id": "m1", "title": "Match 1", "seriesSlug": "x", "gameId": "1", "markets": []},
|
||||
{"id": "m2", "title": "Match 2", "seriesSlug": "x", "gameId": "2", "markets": []},
|
||||
{"id": "m3", "title": "Match 3", "seriesSlug": "x", "gameId": "3", "markets": []},
|
||||
],
|
||||
"pagination": {"totalResults": 10, "hasMore": True}
|
||||
}
|
||||
# Page 2: 0 matches, 3 non-matches (now both quotas met)
|
||||
page2 = {
|
||||
"events": [
|
||||
{"id": "n1", "title": "Non-match 1", "markets": []},
|
||||
{"id": "n2", "title": "Non-match 2", "markets": []},
|
||||
{"id": "n3", "title": "Non-match 3", "markets": []},
|
||||
],
|
||||
"pagination": {"totalResults": 10, "hasMore": True}
|
||||
}
|
||||
|
||||
mock_fetch_page.side_effect = [page1, page2]
|
||||
|
||||
result = fetch_all_pages("test", matches_max=3, non_matches_max=3)
|
||||
|
||||
self.assertEqual(mock_fetch_page.call_count, 2)
|
||||
self.assertEqual(len(result["events"]), 6)
|
||||
|
||||
|
||||
class TestBrowseEvents(unittest.TestCase):
|
||||
"""Tests for browse_events() with sort_by parameter."""
|
||||
|
||||
@patch('browse.fetch_all_pages')
|
||||
def test_browse_events_early_exit_sort_by_none(self, mock_fetch):
|
||||
"""sort_by=None uses early-exit: passes quotas to fetch_all_pages."""
|
||||
from browse import browse_events
|
||||
|
||||
mock_fetch.return_value = {
|
||||
"events": [
|
||||
{"id": "m1", "title": "Match 1", "seriesSlug": "x", "gameId": "1",
|
||||
"markets": [{"sportsMarketType": "moneyline", "volume": "50000"}]},
|
||||
],
|
||||
"total_raw": 1,
|
||||
"partial": False,
|
||||
}
|
||||
|
||||
result = browse_events("test query", matches_max=5, non_matches_max=5, sort_by=None)
|
||||
|
||||
# Should pass quotas to fetch_all_pages for early-exit
|
||||
mock_fetch.assert_called_once()
|
||||
call_kwargs = mock_fetch.call_args
|
||||
self.assertEqual(call_kwargs[1]["matches_max"], 5)
|
||||
self.assertEqual(call_kwargs[1]["non_matches_max"], 5)
|
||||
|
||||
@patch('browse.fetch_all_pages')
|
||||
def test_browse_events_volume_sort_full_fetch(self, mock_fetch):
|
||||
"""sort_by='volume' does full fetch (no quotas passed)."""
|
||||
from browse import browse_events
|
||||
|
||||
mock_fetch.return_value = {
|
||||
"events": [
|
||||
{"id": "m1", "title": "Match 1", "seriesSlug": "x", "gameId": "1",
|
||||
"markets": [{"sportsMarketType": "moneyline", "volume": "10000"}]},
|
||||
{"id": "m2", "title": "Match 2", "seriesSlug": "x", "gameId": "2",
|
||||
"markets": [{"sportsMarketType": "moneyline", "volume": "50000"}]},
|
||||
],
|
||||
"total_raw": 2,
|
||||
"partial": False,
|
||||
}
|
||||
|
||||
result = browse_events("test query", matches_max=5, non_matches_max=5, sort_by="volume")
|
||||
|
||||
# Should pass None quotas to fetch_all_pages (full fetch)
|
||||
call_kwargs = mock_fetch.call_args
|
||||
self.assertIsNone(call_kwargs[1]["matches_max"])
|
||||
self.assertIsNone(call_kwargs[1]["non_matches_max"])
|
||||
|
||||
@patch('browse.fetch_all_pages')
|
||||
def test_browse_events_volume_sort_sorts_by_volume(self, mock_fetch):
|
||||
"""sort_by='volume' sorts match events by volume descending."""
|
||||
from browse import browse_events
|
||||
|
||||
mock_fetch.return_value = {
|
||||
"events": [
|
||||
{"id": "m1", "title": "Match Low", "seriesSlug": "x", "gameId": "1",
|
||||
"markets": [{"sportsMarketType": "moneyline", "volume": "10000",
|
||||
"bestBid": "0.50", "bestAsk": "0.52",
|
||||
"acceptingOrders": True, "closed": False}]},
|
||||
{"id": "m2", "title": "Match High", "seriesSlug": "x", "gameId": "2",
|
||||
"markets": [{"sportsMarketType": "moneyline", "volume": "90000",
|
||||
"bestBid": "0.50", "bestAsk": "0.52",
|
||||
"acceptingOrders": True, "closed": False}]},
|
||||
{"id": "m3", "title": "Match Mid", "seriesSlug": "x", "gameId": "3",
|
||||
"markets": [{"sportsMarketType": "moneyline", "volume": "50000",
|
||||
"bestBid": "0.50", "bestAsk": "0.52",
|
||||
"acceptingOrders": True, "closed": False}]},
|
||||
],
|
||||
"total_raw": 3,
|
||||
"partial": False,
|
||||
}
|
||||
|
||||
result = browse_events("test", matches_max=10, non_matches_max=10, sort_by="volume")
|
||||
|
||||
# Highest volume first
|
||||
self.assertEqual(result["match_events"][0]["id"], "m2") # vol=90000
|
||||
self.assertEqual(result["match_events"][1]["id"], "m3") # vol=50000
|
||||
self.assertEqual(result["match_events"][2]["id"], "m1") # vol=10000
|
||||
|
||||
@patch('browse.fetch_all_pages')
|
||||
def test_browse_events_api_order_preserved_when_no_sort(self, mock_fetch):
|
||||
"""sort_by=None preserves API order (no sort applied)."""
|
||||
from browse import browse_events
|
||||
|
||||
mock_fetch.return_value = {
|
||||
"events": [
|
||||
{"id": "m1", "title": "Match First", "seriesSlug": "x", "gameId": "1",
|
||||
"markets": [{"sportsMarketType": "moneyline", "volume": "1",
|
||||
"bestBid": "0.50", "bestAsk": "0.52",
|
||||
"acceptingOrders": True, "closed": False}]},
|
||||
{"id": "m2", "title": "Match Second", "seriesSlug": "x", "gameId": "2",
|
||||
"markets": [{"sportsMarketType": "moneyline", "volume": "999999",
|
||||
"bestBid": "0.50", "bestAsk": "0.52",
|
||||
"acceptingOrders": True, "closed": False}]},
|
||||
],
|
||||
"total_raw": 2,
|
||||
"partial": False,
|
||||
}
|
||||
|
||||
result = browse_events("test", matches_max=10, sort_by=None)
|
||||
|
||||
# API order preserved: m1 first even though m2 has higher volume
|
||||
self.assertEqual(result["match_events"][0]["id"], "m1")
|
||||
self.assertEqual(result["match_events"][1]["id"], "m2")
|
||||
|
||||
@patch('browse.fetch_all_pages')
|
||||
def test_browse_events_returns_all_required_fields(self, mock_fetch):
|
||||
"""Result dict contains all required fields."""
|
||||
from browse import browse_events
|
||||
|
||||
mock_fetch.return_value = {
|
||||
"events": [],
|
||||
"total_raw": 0,
|
||||
"partial": False,
|
||||
}
|
||||
|
||||
result = browse_events("test")
|
||||
|
||||
self.assertIn("query", result)
|
||||
self.assertIn("total_raw", result)
|
||||
self.assertIn("total_fetched", result)
|
||||
self.assertIn("total_match", result)
|
||||
self.assertIn("total_non_match", result)
|
||||
self.assertIn("match_events", result)
|
||||
self.assertIn("non_match_events", result)
|
||||
self.assertIn("partial", result)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
Reference in New Issue
Block a user