Compare commits
10 Commits
feat/paral
...
aef5f79dad
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aef5f79dad | ||
|
|
b4148570f4 | ||
|
|
0a1aab7883 | ||
|
|
dfad8d3072 | ||
|
|
cc197b0c7e | ||
| c0f008ab8b | |||
|
|
53c268511a | ||
|
|
7f3b885521 | ||
|
|
e0ae6bbd14 | ||
| f468a12ea4 |
@@ -1,5 +1,6 @@
|
|||||||
---
|
---
|
||||||
name: polymarket-browse
|
name: polymarket-browse
|
||||||
|
version: 0.0.2
|
||||||
category: research
|
category: research
|
||||||
description: Browse tradeable Polymarket events by game category. Shows active matches with ML odds (cents format), volume, tournament, and market URLs. Supports Counter Strike, League of Legends, Dota 2, Valorant, NBA, NFL, UFC, Tennis.
|
description: Browse tradeable Polymarket events by game category. Shows active matches with ML odds (cents format), volume, tournament, and market URLs. Supports Counter Strike, League of Legends, Dota 2, Valorant, NBA, NFL, UFC, Tennis.
|
||||||
---
|
---
|
||||||
@@ -34,7 +35,7 @@ hermes mcp add polymarket https://docs.polymarket.com/mcp
|
|||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
```
|
```
|
||||||
polymarket-browse [--category "Counter Strike"] [--limit 5] [--matches N] [--non-matches N] [--search "TeamName"] [--matches-only] [--non-matches-only] [--detail N] [--raw] [--telegram] [--no-cache] [--max-total N]
|
polymarket-browse [--category "Counter Strike"] [--limit 5] [--matches N] [--non-matches N] [--search "TeamName"] [--matches-only] [--non-matches-only] [--detail N] [--raw] [--telegram] [--no-cache] [--max-total N] [--starts-before TIMESTAMP] [--timezone UTC+X]
|
||||||
```
|
```
|
||||||
|
|
||||||
## Arguments
|
## Arguments
|
||||||
@@ -51,6 +52,8 @@ polymarket-browse [--category "Counter Strike"] [--limit 5] [--matches N] [--non
|
|||||||
- `--raw` : Show all events without tradeable filter (for debugging). Includes fetch stats.
|
- `--raw` : Show all events without tradeable filter (for debugging). Includes fetch stats.
|
||||||
- `--no-cache` : Disable caching and fetch fresh data from the API.
|
- `--no-cache` : Disable caching and fetch fresh data from the API.
|
||||||
- `--max-total` : Maximum total events to fetch before early exit. Default: no limit. Useful for quick snapshots.
|
- `--max-total` : Maximum total events to fetch before early exit. Default: no limit. Useful for quick snapshots.
|
||||||
|
- `--starts-before` : Unix timestamp filter. Only show match events starting before this time (LIVE events always shown regardless of timestamp).
|
||||||
|
- `--timezone` : Timezone for displaying times. Format: `UTC+X` or `UTC-X` (e.g., `UTC+7`, `UTC-5`). Default: UTC+7 (WIB).
|
||||||
- `--telegram` : Send results to Telegram. Requires `BOT_TOKEN` and `CHAT_ID` in environment variables.
|
- `--telegram` : Send results to Telegram. Requires `BOT_TOKEN` and `CHAT_ID` in environment variables.
|
||||||
|
|
||||||
## Output Format
|
## Output Format
|
||||||
@@ -120,7 +123,7 @@ Use `--raw` to disable the tradeable filter and see all match markets regardless
|
|||||||
|
|
||||||
## Pagination
|
## Pagination
|
||||||
|
|
||||||
The script fetches **ALL pages** until the API runs out of results (up to 100 pages as a safety cap).
|
The script fetches **ALL pages** until the API runs out of results.
|
||||||
|
|
||||||
### Parallel Fetching
|
### Parallel Fetching
|
||||||
|
|
||||||
|
|||||||
@@ -98,6 +98,45 @@ PAGE_SIZE = 50
|
|||||||
MAX_RETRIES = 5
|
MAX_RETRIES = 5
|
||||||
INITIAL_RETRY_DELAY = 2 # exponential backoff starts at 2s
|
INITIAL_RETRY_DELAY = 2 # exponential backoff starts at 2s
|
||||||
WIB = timezone(timedelta(hours=7)) # UTC+7 for Indonesian users
|
WIB = timezone(timedelta(hours=7)) # UTC+7 for Indonesian users
|
||||||
|
_DISPLAY_TZ = WIB # Module-level timezone for display (configurable via --timezone)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_timezone(tz_str: str) -> timezone:
|
||||||
|
"""
|
||||||
|
Parse timezone string to datetime.timezone.
|
||||||
|
Supports: UTC offset format (UTC+7, UTC-5).
|
||||||
|
Falls back to WIB (UTC+7) on parse failure.
|
||||||
|
"""
|
||||||
|
tz_str = tz_str.strip()
|
||||||
|
if tz_str.startswith("UTC"):
|
||||||
|
offset_str = tz_str[3:].strip()
|
||||||
|
if not offset_str:
|
||||||
|
return timezone.utc
|
||||||
|
sign = -1 if offset_str[0] == "-" else 1
|
||||||
|
if offset_str[0] in "+-":
|
||||||
|
offset_str = offset_str[1:]
|
||||||
|
try:
|
||||||
|
if ":" in offset_str:
|
||||||
|
hours, minutes = offset_str.split(":")
|
||||||
|
hours = int(hours)
|
||||||
|
minutes = int(minutes)
|
||||||
|
else:
|
||||||
|
hours = int(offset_str)
|
||||||
|
minutes = 0
|
||||||
|
total_minutes = hours * 60 + minutes
|
||||||
|
if sign == -1:
|
||||||
|
total_minutes = -total_minutes
|
||||||
|
return timezone(timedelta(minutes=total_minutes))
|
||||||
|
except ValueError:
|
||||||
|
return WIB
|
||||||
|
return WIB
|
||||||
|
try:
|
||||||
|
from datetime import ZoneInfo
|
||||||
|
|
||||||
|
return ZoneInfo(tz_str).utcoffset(None)
|
||||||
|
except Exception:
|
||||||
|
return WIB
|
||||||
|
|
||||||
|
|
||||||
GAME_CATEGORIES = {
|
GAME_CATEGORIES = {
|
||||||
"All Esports": "Esports",
|
"All Esports": "Esports",
|
||||||
@@ -453,12 +492,12 @@ def _get_time_data(e: dict[str, Any], tz: timezone | None = None) -> TimeData:
|
|||||||
Args:
|
Args:
|
||||||
e: Event dict with 'startTime' or 'startDate' key.
|
e: Event dict with 'startTime' or 'startDate' key.
|
||||||
tz: datetime.timezone for abs_time formatting.
|
tz: datetime.timezone for abs_time formatting.
|
||||||
Defaults to WIB (UTC+7).
|
Defaults to _DISPLAY_TZ (set via --timezone, or WIB).
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
TimeData with time_status, time_urgency, and abs_time
|
TimeData with time_status, time_urgency, and abs_time
|
||||||
"""
|
"""
|
||||||
tz = tz or WIB
|
tz = tz or _DISPLAY_TZ
|
||||||
start_str = e.get("startTime") or e.get("startDate", "")
|
start_str = e.get("startTime") or e.get("startDate", "")
|
||||||
|
|
||||||
if not start_str:
|
if not start_str:
|
||||||
@@ -470,8 +509,8 @@ def _get_time_data(e: dict[str, Any], tz: timezone | None = None) -> TimeData:
|
|||||||
delta = start_dt - now_utc
|
delta = start_dt - now_utc
|
||||||
total_sec = delta.total_seconds()
|
total_sec = delta.total_seconds()
|
||||||
|
|
||||||
if total_sec < 0:
|
if total_sec <= 0:
|
||||||
# Event is in the past
|
# Event is in the past or happening now
|
||||||
hours_ago = abs(total_sec) / 3600
|
hours_ago = abs(total_sec) / 3600
|
||||||
if hours_ago < 1:
|
if hours_ago < 1:
|
||||||
time_status = "LIVE"
|
time_status = "LIVE"
|
||||||
@@ -544,6 +583,47 @@ def sort_events(events: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
|||||||
# ============================================================
|
# ============================================================
|
||||||
|
|
||||||
|
|
||||||
|
def _is_live_event(e: dict[str, Any]) -> bool:
|
||||||
|
"""Check if event is LIVE (started within last 4 hours)."""
|
||||||
|
start_str = e.get("startTime") or e.get("startDate", "")
|
||||||
|
if not start_str:
|
||||||
|
return False
|
||||||
|
try:
|
||||||
|
start_dt = datetime.fromisoformat(start_str.replace("Z", "+00:00"))
|
||||||
|
now = datetime.now(timezone.utc)
|
||||||
|
delta = now - start_dt
|
||||||
|
if delta.total_seconds() < 0:
|
||||||
|
return False
|
||||||
|
hours_ago = delta.total_seconds() / 3600
|
||||||
|
return hours_ago < 4
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def filter_by_starts_before(
|
||||||
|
events: list[dict[str, Any]], timestamp: int | None
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
"""Filter events to only include those starting before timestamp or LIVE events."""
|
||||||
|
if timestamp is None:
|
||||||
|
return events
|
||||||
|
filtered = []
|
||||||
|
for e in events:
|
||||||
|
start_str = e.get("startTime") or e.get("startDate", "")
|
||||||
|
if not start_str:
|
||||||
|
filtered.append(e)
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
start_dt = datetime.fromisoformat(start_str.replace("Z", "+00:00"))
|
||||||
|
start_ts = start_dt.timestamp()
|
||||||
|
if start_ts <= timestamp:
|
||||||
|
filtered.append(e)
|
||||||
|
elif _is_live_event(e):
|
||||||
|
filtered.append(e)
|
||||||
|
except Exception:
|
||||||
|
filtered.append(e)
|
||||||
|
return filtered
|
||||||
|
|
||||||
|
|
||||||
def browse_events(
|
def browse_events(
|
||||||
q: str,
|
q: str,
|
||||||
matches_max: int = 10,
|
matches_max: int = 10,
|
||||||
@@ -552,6 +632,7 @@ def browse_events(
|
|||||||
sort_by: str | None = None,
|
sort_by: str | None = None,
|
||||||
max_total: int | None = None,
|
max_total: int | None = None,
|
||||||
use_cache: bool = True,
|
use_cache: bool = True,
|
||||||
|
starts_before: int | None = None,
|
||||||
) -> BrowseResult:
|
) -> BrowseResult:
|
||||||
"""
|
"""
|
||||||
Browse Polymarket events.
|
Browse Polymarket events.
|
||||||
@@ -564,6 +645,7 @@ def browse_events(
|
|||||||
sort_by: None (fast, API order) or "volume" (full fetch, sort by volume desc)
|
sort_by: None (fast, API order) or "volume" (full fetch, sort by volume desc)
|
||||||
max_total: max total events to fetch before early exit (None = no limit)
|
max_total: max total events to fetch before early exit (None = no limit)
|
||||||
use_cache: whether to use cache (default True)
|
use_cache: whether to use cache (default True)
|
||||||
|
starts_before: unix timestamp filter for match events (None = no filter)
|
||||||
"""
|
"""
|
||||||
use_early_exit = sort_by is None
|
use_early_exit = sort_by is None
|
||||||
fetch_matches_max = matches_max if use_early_exit else None
|
fetch_matches_max = matches_max if use_early_exit else None
|
||||||
@@ -579,7 +661,8 @@ def browse_events(
|
|||||||
events = result["events"]
|
events = result["events"]
|
||||||
match_events, non_match_events = filter_events(events, tradeable_only)
|
match_events, non_match_events = filter_events(events, tradeable_only)
|
||||||
|
|
||||||
# Sort if requested; otherwise preserve API order
|
match_events = filter_by_starts_before(match_events, starts_before)
|
||||||
|
|
||||||
if sort_by == "volume":
|
if sort_by == "volume":
|
||||||
match_events = sort_events(match_events)
|
match_events = sort_events(match_events)
|
||||||
non_match_events = sort_events(non_match_events)
|
non_match_events = sort_events(non_match_events)
|
||||||
@@ -819,11 +902,10 @@ def format_detail_event(e: dict[str, Any]) -> DetailEvent:
|
|||||||
|
|
||||||
|
|
||||||
def get_header_date() -> str:
|
def get_header_date() -> str:
|
||||||
"""Return current date string like 'Mar 25, 2026'"""
|
"""Return current date string like 'Mar 25, 2026' in display timezone."""
|
||||||
now_utc = datetime.now(timezone.utc)
|
now_utc = datetime.now(timezone.utc)
|
||||||
utc7 = timezone(timedelta(hours=7))
|
now_display = now_utc.astimezone(_DISPLAY_TZ)
|
||||||
now_utc7 = now_utc.astimezone(utc7)
|
return now_display.strftime("%b %d, %Y")
|
||||||
return now_utc7.strftime("%b %d, %Y")
|
|
||||||
|
|
||||||
|
|
||||||
def get_tournament(title: str) -> str:
|
def get_tournament(title: str) -> str:
|
||||||
@@ -1174,6 +1256,18 @@ def main() -> None:
|
|||||||
default=None,
|
default=None,
|
||||||
help="Max total events to fetch before early exit. Default: no limit.",
|
help="Max total events to fetch before early exit. Default: no limit.",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--starts-before",
|
||||||
|
type=int,
|
||||||
|
default=None,
|
||||||
|
help="Unix timestamp filter. Only show match events starting before this time (LIVE events always shown).",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--timezone",
|
||||||
|
type=str,
|
||||||
|
default="UTC+7",
|
||||||
|
help="Timezone for displaying times (e.g., UTC+7, UTC-5). Default: UTC+7",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--telegram",
|
"--telegram",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
@@ -1193,6 +1287,9 @@ def main() -> None:
|
|||||||
matches_max = args.matches if args.matches is not None else args.limit
|
matches_max = args.matches if args.matches is not None else args.limit
|
||||||
non_matches_max = args.non_matches if args.non_matches is not None else args.limit
|
non_matches_max = args.non_matches if args.non_matches is not None else args.limit
|
||||||
|
|
||||||
|
global _DISPLAY_TZ
|
||||||
|
_DISPLAY_TZ = parse_timezone(args.timezone)
|
||||||
|
|
||||||
if args.search:
|
if args.search:
|
||||||
print(f"\nFetching {args.category} events matching '{args.search}'...")
|
print(f"\nFetching {args.category} events matching '{args.search}'...")
|
||||||
else:
|
else:
|
||||||
@@ -1205,6 +1302,7 @@ def main() -> None:
|
|||||||
tradeable_only=tradeable_only,
|
tradeable_only=tradeable_only,
|
||||||
max_total=args.max_total,
|
max_total=args.max_total,
|
||||||
use_cache=not args.no_cache,
|
use_cache=not args.no_cache,
|
||||||
|
starts_before=args.starts_before,
|
||||||
)
|
)
|
||||||
|
|
||||||
print_browse(
|
print_browse(
|
||||||
|
|||||||
@@ -309,6 +309,16 @@ class TestTimeFunctions(unittest.TestCase):
|
|||||||
self.assertEqual(td["time_urgency"], 3)
|
self.assertEqual(td["time_urgency"], 3)
|
||||||
self.assertIn("WIB", td["abs_time"])
|
self.assertIn("WIB", td["abs_time"])
|
||||||
|
|
||||||
|
def test_get_time_data_live_exactly_now(self):
|
||||||
|
"""Event starts exactly now -> 'LIVE', urgency 3 (not 'In 0m')."""
|
||||||
|
frozen = self._frozen_dt(2026, 3, 25, 12, 0, 0)
|
||||||
|
with patch("browse.datetime", self._mock_datetime(frozen)):
|
||||||
|
from browse import _get_time_data
|
||||||
|
|
||||||
|
td = _get_time_data(self._make_event("2026-03-25T12:00:00Z"))
|
||||||
|
self.assertEqual(td["time_status"], "LIVE")
|
||||||
|
self.assertEqual(td["time_urgency"], 3)
|
||||||
|
|
||||||
def test_get_time_data_started_2h_ago(self):
|
def test_get_time_data_started_2h_ago(self):
|
||||||
"""Started 2 hours ago -> 'LIVE 2h', urgency 3."""
|
"""Started 2 hours ago -> 'LIVE 2h', urgency 3."""
|
||||||
frozen = self._frozen_dt(2026, 3, 25, 14, 0, 0)
|
frozen = self._frozen_dt(2026, 3, 25, 14, 0, 0)
|
||||||
@@ -1829,5 +1839,135 @@ class TestBrowseEvents(unittest.TestCase):
|
|||||||
self.assertIn("partial", result)
|
self.assertIn("partial", result)
|
||||||
|
|
||||||
|
|
||||||
|
class TestStartsBeforeFilter(unittest.TestCase):
|
||||||
|
"""Tests for --starts-before filter in browse_events()."""
|
||||||
|
|
||||||
|
def _make_event(self, event_id, start_time, volume="50000"):
|
||||||
|
"""Helper to create a minimal match event with startTime and valid tradeable data."""
|
||||||
|
return {
|
||||||
|
"id": event_id,
|
||||||
|
"title": f"Match {event_id}",
|
||||||
|
"seriesSlug": "x",
|
||||||
|
"gameId": "1",
|
||||||
|
"startTime": start_time,
|
||||||
|
"markets": [
|
||||||
|
{
|
||||||
|
"sportsMarketType": "moneyline",
|
||||||
|
"volume": volume,
|
||||||
|
"bestBid": "0.50",
|
||||||
|
"bestAsk": "0.52",
|
||||||
|
"acceptingOrders": True,
|
||||||
|
"closed": False,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
@patch("browse.fetch_all_pages")
|
||||||
|
def test_starts_before_filters_future_events(self, mock_fetch):
|
||||||
|
"""Events with startTime > timestamp should be filtered out."""
|
||||||
|
from browse import browse_events
|
||||||
|
|
||||||
|
mock_fetch.return_value = {
|
||||||
|
"events": [
|
||||||
|
self._make_event(
|
||||||
|
"m1", "2026-03-27T14:00:00Z"
|
||||||
|
), # After cutoff (14:00 > 12:00)
|
||||||
|
self._make_event("m2", "2026-03-28T12:00:00Z"), # After cutoff
|
||||||
|
],
|
||||||
|
"total_raw": 2,
|
||||||
|
"partial": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
# 2026-03-27T12:00:00Z = 1774612800
|
||||||
|
result = browse_events("test", starts_before=1774612800)
|
||||||
|
|
||||||
|
self.assertEqual(len(result["match_events"]), 0)
|
||||||
|
|
||||||
|
@patch("browse.fetch_all_pages")
|
||||||
|
def test_starts_before_includes_past_events(self, mock_fetch):
|
||||||
|
"""Events with startTime <= timestamp should be included."""
|
||||||
|
from browse import browse_events
|
||||||
|
|
||||||
|
mock_fetch.return_value = {
|
||||||
|
"events": [
|
||||||
|
self._make_event(
|
||||||
|
"m1", "2026-03-27T10:00:00Z"
|
||||||
|
), # Before cutoff (10:00 < 12:00)
|
||||||
|
self._make_event(
|
||||||
|
"m2", "2026-03-27T11:00:00Z"
|
||||||
|
), # Before cutoff (11:00 < 12:00)
|
||||||
|
],
|
||||||
|
"total_raw": 2,
|
||||||
|
"partial": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
# 2026-03-27T12:00:00Z = 1774612800
|
||||||
|
result = browse_events("test", starts_before=1774612800)
|
||||||
|
|
||||||
|
self.assertEqual(len(result["match_events"]), 2)
|
||||||
|
|
||||||
|
@patch("browse.fetch_all_pages")
|
||||||
|
def test_starts_before_without_timestamp(self, mock_fetch):
|
||||||
|
"""Without starts_before, all events should be returned."""
|
||||||
|
from browse import browse_events
|
||||||
|
|
||||||
|
mock_fetch.return_value = {
|
||||||
|
"events": [
|
||||||
|
self._make_event("m1", "2026-03-27T14:00:00Z"),
|
||||||
|
self._make_event("m2", "2026-03-28T12:00:00Z"),
|
||||||
|
],
|
||||||
|
"total_raw": 2,
|
||||||
|
"partial": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
result = browse_events("test")
|
||||||
|
|
||||||
|
# No filter, all events returned
|
||||||
|
self.assertEqual(len(result["match_events"]), 2)
|
||||||
|
|
||||||
|
|
||||||
|
class TestTimezoneParsing(unittest.TestCase):
|
||||||
|
"""Tests for parse_timezone() and timezone display."""
|
||||||
|
|
||||||
|
def test_parse_timezone_utc_plus7(self):
|
||||||
|
"""UTC+7 should parse to WIB."""
|
||||||
|
from browse import parse_timezone
|
||||||
|
from datetime import timezone, timedelta
|
||||||
|
|
||||||
|
tz = parse_timezone("UTC+7")
|
||||||
|
self.assertEqual(tz, timezone(timedelta(hours=7)))
|
||||||
|
|
||||||
|
def test_parse_timezone_utc_minus5(self):
|
||||||
|
"""UTC-5 should parse correctly."""
|
||||||
|
from browse import parse_timezone
|
||||||
|
from datetime import timezone, timedelta
|
||||||
|
|
||||||
|
tz = parse_timezone("UTC-5")
|
||||||
|
self.assertEqual(tz, timezone(timedelta(hours=-5)))
|
||||||
|
|
||||||
|
def test_parse_timezone_utc_no_offset(self):
|
||||||
|
"""UTC should return timezone.utc."""
|
||||||
|
from browse import parse_timezone
|
||||||
|
|
||||||
|
tz = parse_timezone("UTC")
|
||||||
|
self.assertEqual(tz, timezone.utc)
|
||||||
|
|
||||||
|
def test_parse_timezone_with_minutes(self):
|
||||||
|
"""UTC+5:30 should parse correctly."""
|
||||||
|
from browse import parse_timezone
|
||||||
|
from datetime import timezone, timedelta
|
||||||
|
|
||||||
|
tz = parse_timezone("UTC+5:30")
|
||||||
|
self.assertEqual(tz, timezone(timedelta(hours=5, minutes=30)))
|
||||||
|
|
||||||
|
def test_parse_timezone_invalid_falls_back_to_wib(self):
|
||||||
|
"""Invalid timezone should fall back to WIB."""
|
||||||
|
from browse import parse_timezone
|
||||||
|
from datetime import timezone, timedelta
|
||||||
|
|
||||||
|
tz = parse_timezone("Invalid/Timezone")
|
||||||
|
self.assertEqual(tz, timezone(timedelta(hours=7)))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
Reference in New Issue
Block a user