Compare commits
19 Commits
feat/add-t
...
aef5f79dad
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aef5f79dad | ||
|
|
b4148570f4 | ||
|
|
0a1aab7883 | ||
|
|
dfad8d3072 | ||
|
|
cc197b0c7e | ||
| c0f008ab8b | |||
|
|
53c268511a | ||
|
|
7f3b885521 | ||
|
|
e0ae6bbd14 | ||
| f468a12ea4 | |||
|
|
c75d123dfd | ||
|
|
9d1e328f53 | ||
|
|
09f3cb9066 | ||
|
|
1ae60f5661 | ||
|
|
bab373ab8f | ||
|
|
eafbdba4a5 | ||
| 0a7911653b | |||
| bae69043f8 | |||
|
|
d6c0342c0f |
@@ -1,5 +1,6 @@
|
||||
---
|
||||
name: polymarket-browse
|
||||
version: 0.0.2
|
||||
category: research
|
||||
description: Browse tradeable Polymarket events by game category. Shows active matches with ML odds (cents format), volume, tournament, and market URLs. Supports Counter Strike, League of Legends, Dota 2, Valorant, NBA, NFL, UFC, Tennis.
|
||||
---
|
||||
@@ -34,7 +35,7 @@ hermes mcp add polymarket https://docs.polymarket.com/mcp
|
||||
## Usage
|
||||
|
||||
```
|
||||
polymarket-browse [--category "Counter Strike"] [--limit 5] [--matches N] [--non-matches N] [--search "TeamName"] [--matches-only] [--non-matches-only] [--detail N] [--raw] [--telegram]
|
||||
polymarket-browse [--category "Counter Strike"] [--limit 5] [--matches N] [--non-matches N] [--search "TeamName"] [--matches-only] [--non-matches-only] [--detail N] [--raw] [--telegram] [--no-cache] [--max-total N] [--starts-before TIMESTAMP] [--timezone UTC+X]
|
||||
```
|
||||
|
||||
## Arguments
|
||||
@@ -49,6 +50,10 @@ polymarket-browse [--category "Counter Strike"] [--limit 5] [--matches N] [--non
|
||||
- `--detail` : Index of match event (1-indexed) to show detailed markets. Default: 1. Set to 0 to disable.
|
||||
- `--list-categories` : List available game categories and exit
|
||||
- `--raw` : Show all events without tradeable filter (for debugging). Includes fetch stats.
|
||||
- `--no-cache` : Disable caching and fetch fresh data from the API.
|
||||
- `--max-total` : Maximum total events to fetch before early exit. Default: no limit. Useful for quick snapshots.
|
||||
- `--starts-before` : Unix timestamp filter. Only show match events starting before this time (LIVE events always shown regardless of timestamp).
|
||||
- `--timezone` : Timezone for displaying times. Format: `UTC+X` or `UTC-X` (e.g., `UTC+7`, `UTC-5`). Default: UTC+7 (WIB).
|
||||
- `--telegram` : Send results to Telegram. Requires `BOT_TOKEN` and `CHAT_ID` in environment variables.
|
||||
|
||||
## Output Format
|
||||
@@ -118,13 +123,32 @@ Use `--raw` to disable the tradeable filter and see all match markets regardless
|
||||
|
||||
## Pagination
|
||||
|
||||
The script fetches **ALL pages** until the API runs out of results (up to 100 pages as a safety cap).
|
||||
The script fetches **ALL pages** until the API runs out of results.
|
||||
|
||||
### Parallel Fetching
|
||||
|
||||
Pages are fetched in **parallel batches of 5** using ThreadPoolExecutor. This significantly reduces fetch time:
|
||||
|
||||
| Scenario | Without Parallelization | With Parallelization |
|
||||
|----------|------------------------|---------------------|
|
||||
| 10 pages (50 events) | ~20s (2s per page × 10) | ~4s (2s per batch × 2 batches) |
|
||||
| 20 pages (100 events) | ~40s | ~8s |
|
||||
|
||||
The script first fetches page 1 to determine total pages, then fetches remaining pages in parallel batches of 5.
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
- Exponential backoff: 2s → 4s → 8s → 16s → 32s
|
||||
- Max 5 retries before aborting
|
||||
|
||||
## Caching
|
||||
|
||||
Results are cached in `~/.cache/polymarket-browse/` with a **5-minute TTL** to reduce redundant API calls.
|
||||
|
||||
- Use `--no-cache` to bypass the cache and fetch fresh data
|
||||
- Cached data is automatically used when available and not expired
|
||||
- Useful when running the script repeatedly (e.g., for monitoring)
|
||||
|
||||
## Odds Format
|
||||
|
||||
All odds are shown in **cents** format:
|
||||
|
||||
@@ -8,6 +8,9 @@ import html
|
||||
import json
|
||||
import time
|
||||
import argparse
|
||||
import hashlib
|
||||
import os
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Any, Callable, TypedDict
|
||||
from urllib.parse import urlencode
|
||||
@@ -95,6 +98,45 @@ PAGE_SIZE = 50
|
||||
MAX_RETRIES = 5
|
||||
INITIAL_RETRY_DELAY = 2 # exponential backoff starts at 2s
|
||||
WIB = timezone(timedelta(hours=7)) # UTC+7 for Indonesian users
|
||||
_DISPLAY_TZ = WIB # Module-level timezone for display (configurable via --timezone)
|
||||
|
||||
|
||||
def parse_timezone(tz_str: str) -> timezone:
|
||||
"""
|
||||
Parse timezone string to datetime.timezone.
|
||||
Supports: UTC offset format (UTC+7, UTC-5).
|
||||
Falls back to WIB (UTC+7) on parse failure.
|
||||
"""
|
||||
tz_str = tz_str.strip()
|
||||
if tz_str.startswith("UTC"):
|
||||
offset_str = tz_str[3:].strip()
|
||||
if not offset_str:
|
||||
return timezone.utc
|
||||
sign = -1 if offset_str[0] == "-" else 1
|
||||
if offset_str[0] in "+-":
|
||||
offset_str = offset_str[1:]
|
||||
try:
|
||||
if ":" in offset_str:
|
||||
hours, minutes = offset_str.split(":")
|
||||
hours = int(hours)
|
||||
minutes = int(minutes)
|
||||
else:
|
||||
hours = int(offset_str)
|
||||
minutes = 0
|
||||
total_minutes = hours * 60 + minutes
|
||||
if sign == -1:
|
||||
total_minutes = -total_minutes
|
||||
return timezone(timedelta(minutes=total_minutes))
|
||||
except ValueError:
|
||||
return WIB
|
||||
return WIB
|
||||
try:
|
||||
from datetime import ZoneInfo
|
||||
|
||||
return ZoneInfo(tz_str).utcoffset(None)
|
||||
except Exception:
|
||||
return WIB
|
||||
|
||||
|
||||
GAME_CATEGORIES = {
|
||||
"All Esports": "Esports",
|
||||
@@ -108,6 +150,48 @@ GAME_CATEGORIES = {
|
||||
"Tennis": "Tennis",
|
||||
}
|
||||
|
||||
CACHE_DIR = os.path.join(os.path.expanduser("~"), ".cache", "polymarket-browse")
|
||||
CACHE_TTL = 300 # 5 minutes default
|
||||
MAX_PARALLEL_FETCHES = 5
|
||||
|
||||
# ============================================================
|
||||
# CACHE
|
||||
# ============================================================
|
||||
|
||||
|
||||
def _get_cache_key(q: str) -> str:
|
||||
return hashlib.md5(q.encode()).hexdigest()
|
||||
|
||||
|
||||
def _get_cache_path(q: str) -> str:
|
||||
os.makedirs(CACHE_DIR, exist_ok=True)
|
||||
return os.path.join(CACHE_DIR, f"{_get_cache_key(q)}.json")
|
||||
|
||||
|
||||
def _read_cache(q: str) -> dict[str, Any] | None:
|
||||
cache_path = _get_cache_path(q)
|
||||
if not os.path.exists(cache_path):
|
||||
return None
|
||||
try:
|
||||
mtime = os.path.getmtime(cache_path)
|
||||
age = time.time() - mtime
|
||||
if age > CACHE_TTL:
|
||||
return None
|
||||
with open(cache_path) as f:
|
||||
return json.load(f)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _write_cache(q: str, data: dict[str, Any]) -> None:
|
||||
try:
|
||||
cache_path = _get_cache_path(q)
|
||||
with open(cache_path, "w") as f:
|
||||
json.dump(data, f)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
# ============================================================
|
||||
# FETCH
|
||||
# ============================================================
|
||||
@@ -142,8 +226,16 @@ def fetch_page(
|
||||
return None
|
||||
|
||||
|
||||
def _fetch_page_with_index(q: str, page: int) -> tuple[int, dict[str, Any] | None]:
|
||||
return page, fetch_page(q, page)
|
||||
|
||||
|
||||
def fetch_all_pages(
|
||||
q: str, matches_max: int | None = None, non_matches_max: int | None = None
|
||||
q: str,
|
||||
matches_max: int | None = None,
|
||||
non_matches_max: int | None = None,
|
||||
max_total: int | None = None,
|
||||
use_cache: bool = True,
|
||||
) -> FetchResult:
|
||||
"""
|
||||
Fetch pages until pagination ends, or until quotas are satisfied.
|
||||
@@ -152,46 +244,94 @@ def fetch_all_pages(
|
||||
q: search query
|
||||
matches_max: stop early once we have this many match events (None = no limit)
|
||||
non_matches_max: stop early once we have this many non-match events (None = no limit)
|
||||
max_total: stop early once we have this many total events (None = no limit)
|
||||
use_cache: whether to use cache (default True)
|
||||
|
||||
Returns:
|
||||
FetchResult with events, total_raw, and partial flag
|
||||
"""
|
||||
all_events = []
|
||||
cached = _read_cache(q) if use_cache else None
|
||||
if cached is not None:
|
||||
events = cached.get("events", [])
|
||||
total_raw = cached.get("total_raw", 0)
|
||||
if events:
|
||||
return {"events": events, "total_raw": total_raw, "partial": False}
|
||||
|
||||
total_raw = 0
|
||||
match_count = 0
|
||||
non_match_count = 0
|
||||
page = 0
|
||||
page_count = 0
|
||||
page1_data = None
|
||||
|
||||
while True:
|
||||
page += 1
|
||||
time.sleep(0.2)
|
||||
data = fetch_page(q, page)
|
||||
page_count += 1
|
||||
data = fetch_page(q, page_count)
|
||||
if data is None:
|
||||
break
|
||||
events = data.get("events", [])
|
||||
total_raw = data.get("pagination", {}).get("totalResults", 0)
|
||||
all_events.extend(events)
|
||||
if page_count == 1:
|
||||
page1_data = data
|
||||
if total_raw > 0:
|
||||
break
|
||||
if not data.get("events"):
|
||||
break
|
||||
|
||||
# Count matches/non-matches in this page
|
||||
for e in events:
|
||||
if is_match_market(e):
|
||||
match_count += 1
|
||||
else:
|
||||
non_match_count += 1
|
||||
if total_raw == 0 or page1_data is None:
|
||||
return {"events": [], "total_raw": 0, "partial": False}
|
||||
|
||||
page1_events = page1_data.get("events", [])
|
||||
actual_page_size = len(page1_events)
|
||||
|
||||
# Use actual events per page from API for ceiling division
|
||||
# ceil(total_raw / actual_page_size) = (total_raw + actual_page_size - 1) // actual_page_size
|
||||
total_pages = (total_raw + actual_page_size - 1) // actual_page_size
|
||||
concurrency = min(MAX_PARALLEL_FETCHES, total_pages)
|
||||
|
||||
all_page_data: dict[int, list[Any]] = {1: page1_events}
|
||||
|
||||
if total_pages > 1:
|
||||
with ThreadPoolExecutor(max_workers=concurrency) as executor:
|
||||
futures = {
|
||||
executor.submit(_fetch_page_with_index, q, page): page
|
||||
for page in range(2, total_pages + 1)
|
||||
}
|
||||
for future in as_completed(futures):
|
||||
try:
|
||||
page_num, data = future.result()
|
||||
if data is not None:
|
||||
all_page_data[page_num] = data.get("events", [])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
all_events = []
|
||||
for page_num in sorted(all_page_data.keys()):
|
||||
all_events.extend(all_page_data[page_num])
|
||||
|
||||
_write_cache(q, {"events": all_events, "total_raw": total_raw})
|
||||
|
||||
match_count = 0
|
||||
non_match_count = 0
|
||||
filtered_events = []
|
||||
total_seen = 0
|
||||
|
||||
for e in all_events:
|
||||
is_match = is_match_market(e)
|
||||
if is_match:
|
||||
match_count += 1
|
||||
else:
|
||||
non_match_count += 1
|
||||
|
||||
filtered_events.append(e)
|
||||
|
||||
# Stop if we got what we wanted (only when caps are set)
|
||||
if matches_max is not None and non_matches_max is not None:
|
||||
if match_count >= matches_max and non_match_count >= non_matches_max:
|
||||
break
|
||||
|
||||
# Stop when we get 0 events (no more pages)
|
||||
if len(events) == 0:
|
||||
break
|
||||
# Stop when we've fetched all known results
|
||||
if len(all_events) >= total_raw:
|
||||
break
|
||||
if max_total is not None:
|
||||
total_seen += 1
|
||||
if total_seen >= max_total:
|
||||
break
|
||||
|
||||
partial = total_raw > 0 and len(all_events) < total_raw
|
||||
return {"events": all_events, "total_raw": total_raw, "partial": partial}
|
||||
partial = len(all_events) < total_raw
|
||||
return {"events": filtered_events, "total_raw": total_raw, "partial": partial}
|
||||
|
||||
|
||||
# ============================================================
|
||||
@@ -352,12 +492,12 @@ def _get_time_data(e: dict[str, Any], tz: timezone | None = None) -> TimeData:
|
||||
Args:
|
||||
e: Event dict with 'startTime' or 'startDate' key.
|
||||
tz: datetime.timezone for abs_time formatting.
|
||||
Defaults to WIB (UTC+7).
|
||||
Defaults to _DISPLAY_TZ (set via --timezone, or WIB).
|
||||
|
||||
Returns:
|
||||
TimeData with time_status, time_urgency, and abs_time
|
||||
"""
|
||||
tz = tz or WIB
|
||||
tz = tz or _DISPLAY_TZ
|
||||
start_str = e.get("startTime") or e.get("startDate", "")
|
||||
|
||||
if not start_str:
|
||||
@@ -369,8 +509,8 @@ def _get_time_data(e: dict[str, Any], tz: timezone | None = None) -> TimeData:
|
||||
delta = start_dt - now_utc
|
||||
total_sec = delta.total_seconds()
|
||||
|
||||
if total_sec < 0:
|
||||
# Event is in the past
|
||||
if total_sec <= 0:
|
||||
# Event is in the past or happening now
|
||||
hours_ago = abs(total_sec) / 3600
|
||||
if hours_ago < 1:
|
||||
time_status = "LIVE"
|
||||
@@ -443,12 +583,56 @@ def sort_events(events: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
||||
# ============================================================
|
||||
|
||||
|
||||
def _is_live_event(e: dict[str, Any]) -> bool:
|
||||
"""Check if event is LIVE (started within last 4 hours)."""
|
||||
start_str = e.get("startTime") or e.get("startDate", "")
|
||||
if not start_str:
|
||||
return False
|
||||
try:
|
||||
start_dt = datetime.fromisoformat(start_str.replace("Z", "+00:00"))
|
||||
now = datetime.now(timezone.utc)
|
||||
delta = now - start_dt
|
||||
if delta.total_seconds() < 0:
|
||||
return False
|
||||
hours_ago = delta.total_seconds() / 3600
|
||||
return hours_ago < 4
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def filter_by_starts_before(
|
||||
events: list[dict[str, Any]], timestamp: int | None
|
||||
) -> list[dict[str, Any]]:
|
||||
"""Filter events to only include those starting before timestamp or LIVE events."""
|
||||
if timestamp is None:
|
||||
return events
|
||||
filtered = []
|
||||
for e in events:
|
||||
start_str = e.get("startTime") or e.get("startDate", "")
|
||||
if not start_str:
|
||||
filtered.append(e)
|
||||
continue
|
||||
try:
|
||||
start_dt = datetime.fromisoformat(start_str.replace("Z", "+00:00"))
|
||||
start_ts = start_dt.timestamp()
|
||||
if start_ts <= timestamp:
|
||||
filtered.append(e)
|
||||
elif _is_live_event(e):
|
||||
filtered.append(e)
|
||||
except Exception:
|
||||
filtered.append(e)
|
||||
return filtered
|
||||
|
||||
|
||||
def browse_events(
|
||||
q: str,
|
||||
matches_max: int = 10,
|
||||
non_matches_max: int = 10,
|
||||
tradeable_only: bool = True,
|
||||
sort_by: str | None = None,
|
||||
max_total: int | None = None,
|
||||
use_cache: bool = True,
|
||||
starts_before: int | None = None,
|
||||
) -> BrowseResult:
|
||||
"""
|
||||
Browse Polymarket events.
|
||||
@@ -459,20 +643,26 @@ def browse_events(
|
||||
non_matches_max: max number of non-match markets to return
|
||||
tradeable_only: filter to tradeable events only
|
||||
sort_by: None (fast, API order) or "volume" (full fetch, sort by volume desc)
|
||||
max_total: max total events to fetch before early exit (None = no limit)
|
||||
use_cache: whether to use cache (default True)
|
||||
starts_before: unix timestamp filter for match events (None = no filter)
|
||||
"""
|
||||
# Pass quotas to fetch_all_pages for early-exit optimization.
|
||||
# Only use early-exit when sort_by is None (no client-side sort needed).
|
||||
use_early_exit = sort_by is None
|
||||
fetch_matches_max = matches_max if use_early_exit else None
|
||||
fetch_non_matches_max = non_matches_max if use_early_exit else None
|
||||
|
||||
result = fetch_all_pages(
|
||||
q, matches_max=fetch_matches_max, non_matches_max=fetch_non_matches_max
|
||||
q,
|
||||
matches_max=fetch_matches_max,
|
||||
non_matches_max=fetch_non_matches_max,
|
||||
max_total=max_total,
|
||||
use_cache=use_cache,
|
||||
)
|
||||
events = result["events"]
|
||||
match_events, non_match_events = filter_events(events, tradeable_only)
|
||||
|
||||
# Sort if requested; otherwise preserve API order
|
||||
match_events = filter_by_starts_before(match_events, starts_before)
|
||||
|
||||
if sort_by == "volume":
|
||||
match_events = sort_events(match_events)
|
||||
non_match_events = sort_events(non_match_events)
|
||||
@@ -712,11 +902,10 @@ def format_detail_event(e: dict[str, Any]) -> DetailEvent:
|
||||
|
||||
|
||||
def get_header_date() -> str:
|
||||
"""Return current date string like 'Mar 25, 2026'"""
|
||||
"""Return current date string like 'Mar 25, 2026' in display timezone."""
|
||||
now_utc = datetime.now(timezone.utc)
|
||||
utc7 = timezone(timedelta(hours=7))
|
||||
now_utc7 = now_utc.astimezone(utc7)
|
||||
return now_utc7.strftime("%b %d, %Y")
|
||||
now_display = now_utc.astimezone(_DISPLAY_TZ)
|
||||
return now_display.strftime("%b %d, %Y")
|
||||
|
||||
|
||||
def get_tournament(title: str) -> str:
|
||||
@@ -802,7 +991,9 @@ def print_detail(e: dict[str, Any], detail: DetailEvent) -> None:
|
||||
)
|
||||
print(f"\n{detail['time_status']}")
|
||||
print(
|
||||
f"ML: {detail['outcomes'][0]} {format_odds(float(detail['prices'][0]))} vs {detail['outcomes'][1]} {format_odds(float(detail['prices'][1]))}"
|
||||
f"ML: {detail['outcomes'][0]} "
|
||||
f"{format_odds(float(detail['prices'][0]))} vs "
|
||||
f"{detail['outcomes'][1]} {format_odds(float(detail['prices'][1]))}"
|
||||
)
|
||||
print(f"ML Vol: ${detail['volume']:,.0f} | {spread_str}")
|
||||
|
||||
@@ -815,7 +1006,9 @@ def print_detail(e: dict[str, Any], detail: DetailEvent) -> None:
|
||||
)
|
||||
print(f" [{m['type']}]")
|
||||
print(
|
||||
f" {m['outcomes'][0]} {format_odds(float(m['prices'][0]))} vs {m['outcomes'][1]} {format_odds(float(m['prices'][1]))}"
|
||||
f" {m['outcomes'][0]} "
|
||||
f"{format_odds(float(m['prices'][0]))} vs "
|
||||
f"{m['outcomes'][1]} {format_odds(float(m['prices'][1]))}"
|
||||
)
|
||||
print(f" Vol: ${m['volume']:,.0f} | {spread_str}")
|
||||
print(f" URL: {m['url']}")
|
||||
@@ -1052,6 +1245,29 @@ def main() -> None:
|
||||
action="store_true",
|
||||
help="Show all events without tradeable filter (for debugging).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-cache",
|
||||
action="store_true",
|
||||
help="Disable cache and fetch fresh data from API.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--max-total",
|
||||
type=int,
|
||||
default=None,
|
||||
help="Max total events to fetch before early exit. Default: no limit.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--starts-before",
|
||||
type=int,
|
||||
default=None,
|
||||
help="Unix timestamp filter. Only show match events starting before this time (LIVE events always shown).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--timezone",
|
||||
type=str,
|
||||
default="UTC+7",
|
||||
help="Timezone for displaying times (e.g., UTC+7, UTC-5). Default: UTC+7",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--telegram",
|
||||
action="store_true",
|
||||
@@ -1071,6 +1287,9 @@ def main() -> None:
|
||||
matches_max = args.matches if args.matches is not None else args.limit
|
||||
non_matches_max = args.non_matches if args.non_matches is not None else args.limit
|
||||
|
||||
global _DISPLAY_TZ
|
||||
_DISPLAY_TZ = parse_timezone(args.timezone)
|
||||
|
||||
if args.search:
|
||||
print(f"\nFetching {args.category} events matching '{args.search}'...")
|
||||
else:
|
||||
@@ -1081,6 +1300,9 @@ def main() -> None:
|
||||
matches_max=matches_max,
|
||||
non_matches_max=non_matches_max,
|
||||
tradeable_only=tradeable_only,
|
||||
max_total=args.max_total,
|
||||
use_cache=not args.no_cache,
|
||||
starts_before=args.starts_before,
|
||||
)
|
||||
|
||||
print_browse(
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user