Merge branch 'pr-30' into 0.0.3-draft
This commit is contained in:
@@ -35,7 +35,7 @@ hermes mcp add polymarket https://docs.polymarket.com/mcp
|
|||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
```
|
```
|
||||||
polymarket-browse [--category "Counter Strike"] [--limit 5] [--matches N] [--non-matches N] [--search "TeamName"] [--matches-only] [--non-matches-only] [--detail N] [--raw] [--telegram] [--no-cache] [--max-total N]
|
polymarket-browse [--category "Counter Strike"] [--limit 5] [--matches N] [--non-matches N] [--search "TeamName"] [--matches-only] [--non-matches-only] [--detail N] [--raw] [--telegram] [--no-cache] [--max-total N] [--starts-before TIMESTAMP]
|
||||||
```
|
```
|
||||||
|
|
||||||
## Arguments
|
## Arguments
|
||||||
@@ -52,6 +52,7 @@ polymarket-browse [--category "Counter Strike"] [--limit 5] [--matches N] [--non
|
|||||||
- `--raw` : Show all events without tradeable filter (for debugging). Includes fetch stats.
|
- `--raw` : Show all events without tradeable filter (for debugging). Includes fetch stats.
|
||||||
- `--no-cache` : Disable caching and fetch fresh data from the API.
|
- `--no-cache` : Disable caching and fetch fresh data from the API.
|
||||||
- `--max-total` : Maximum total events to fetch before early exit. Default: no limit. Useful for quick snapshots.
|
- `--max-total` : Maximum total events to fetch before early exit. Default: no limit. Useful for quick snapshots.
|
||||||
|
- `--starts-before` : Unix timestamp filter. Only show match events starting before this time (LIVE events always shown regardless of timestamp).
|
||||||
- `--telegram` : Send results to Telegram. Requires `BOT_TOKEN` and `CHAT_ID` in environment variables.
|
- `--telegram` : Send results to Telegram. Requires `BOT_TOKEN` and `CHAT_ID` in environment variables.
|
||||||
|
|
||||||
## Output Format
|
## Output Format
|
||||||
|
|||||||
@@ -544,6 +544,47 @@ def sort_events(events: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
|||||||
# ============================================================
|
# ============================================================
|
||||||
|
|
||||||
|
|
||||||
|
def _is_live_event(e: dict[str, Any]) -> bool:
|
||||||
|
"""Check if event is LIVE (started within last 4 hours)."""
|
||||||
|
start_str = e.get("startTime") or e.get("startDate", "")
|
||||||
|
if not start_str:
|
||||||
|
return False
|
||||||
|
try:
|
||||||
|
start_dt = datetime.fromisoformat(start_str.replace("Z", "+00:00"))
|
||||||
|
now = datetime.now(timezone.utc)
|
||||||
|
delta = now - start_dt
|
||||||
|
if delta.total_seconds() < 0:
|
||||||
|
return False
|
||||||
|
hours_ago = delta.total_seconds() / 3600
|
||||||
|
return hours_ago < 4
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def filter_by_starts_before(
|
||||||
|
events: list[dict[str, Any]], timestamp: int | None
|
||||||
|
) -> list[dict[str, Any]]:
|
||||||
|
"""Filter events to only include those starting before timestamp or LIVE events."""
|
||||||
|
if timestamp is None:
|
||||||
|
return events
|
||||||
|
filtered = []
|
||||||
|
for e in events:
|
||||||
|
start_str = e.get("startTime") or e.get("startDate", "")
|
||||||
|
if not start_str:
|
||||||
|
filtered.append(e)
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
start_dt = datetime.fromisoformat(start_str.replace("Z", "+00:00"))
|
||||||
|
start_ts = start_dt.timestamp()
|
||||||
|
if start_ts <= timestamp:
|
||||||
|
filtered.append(e)
|
||||||
|
elif _is_live_event(e):
|
||||||
|
filtered.append(e)
|
||||||
|
except Exception:
|
||||||
|
filtered.append(e)
|
||||||
|
return filtered
|
||||||
|
|
||||||
|
|
||||||
def browse_events(
|
def browse_events(
|
||||||
q: str,
|
q: str,
|
||||||
matches_max: int = 10,
|
matches_max: int = 10,
|
||||||
@@ -552,6 +593,7 @@ def browse_events(
|
|||||||
sort_by: str | None = None,
|
sort_by: str | None = None,
|
||||||
max_total: int | None = None,
|
max_total: int | None = None,
|
||||||
use_cache: bool = True,
|
use_cache: bool = True,
|
||||||
|
starts_before: int | None = None,
|
||||||
) -> BrowseResult:
|
) -> BrowseResult:
|
||||||
"""
|
"""
|
||||||
Browse Polymarket events.
|
Browse Polymarket events.
|
||||||
@@ -564,6 +606,7 @@ def browse_events(
|
|||||||
sort_by: None (fast, API order) or "volume" (full fetch, sort by volume desc)
|
sort_by: None (fast, API order) or "volume" (full fetch, sort by volume desc)
|
||||||
max_total: max total events to fetch before early exit (None = no limit)
|
max_total: max total events to fetch before early exit (None = no limit)
|
||||||
use_cache: whether to use cache (default True)
|
use_cache: whether to use cache (default True)
|
||||||
|
starts_before: unix timestamp filter for match events (None = no filter)
|
||||||
"""
|
"""
|
||||||
use_early_exit = sort_by is None
|
use_early_exit = sort_by is None
|
||||||
fetch_matches_max = matches_max if use_early_exit else None
|
fetch_matches_max = matches_max if use_early_exit else None
|
||||||
@@ -579,7 +622,8 @@ def browse_events(
|
|||||||
events = result["events"]
|
events = result["events"]
|
||||||
match_events, non_match_events = filter_events(events, tradeable_only)
|
match_events, non_match_events = filter_events(events, tradeable_only)
|
||||||
|
|
||||||
# Sort if requested; otherwise preserve API order
|
match_events = filter_by_starts_before(match_events, starts_before)
|
||||||
|
|
||||||
if sort_by == "volume":
|
if sort_by == "volume":
|
||||||
match_events = sort_events(match_events)
|
match_events = sort_events(match_events)
|
||||||
non_match_events = sort_events(non_match_events)
|
non_match_events = sort_events(non_match_events)
|
||||||
@@ -1174,6 +1218,12 @@ def main() -> None:
|
|||||||
default=None,
|
default=None,
|
||||||
help="Max total events to fetch before early exit. Default: no limit.",
|
help="Max total events to fetch before early exit. Default: no limit.",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--starts-before",
|
||||||
|
type=int,
|
||||||
|
default=None,
|
||||||
|
help="Unix timestamp filter. Only show match events starting before this time (LIVE events always shown).",
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--telegram",
|
"--telegram",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
@@ -1205,6 +1255,7 @@ def main() -> None:
|
|||||||
tradeable_only=tradeable_only,
|
tradeable_only=tradeable_only,
|
||||||
max_total=args.max_total,
|
max_total=args.max_total,
|
||||||
use_cache=not args.no_cache,
|
use_cache=not args.no_cache,
|
||||||
|
starts_before=args.starts_before,
|
||||||
)
|
)
|
||||||
|
|
||||||
print_browse(
|
print_browse(
|
||||||
|
|||||||
@@ -1839,5 +1839,92 @@ class TestBrowseEvents(unittest.TestCase):
|
|||||||
self.assertIn("partial", result)
|
self.assertIn("partial", result)
|
||||||
|
|
||||||
|
|
||||||
|
class TestStartsBeforeFilter(unittest.TestCase):
|
||||||
|
"""Tests for --starts-before filter in browse_events()."""
|
||||||
|
|
||||||
|
def _make_event(self, event_id, start_time, volume="50000"):
|
||||||
|
"""Helper to create a minimal match event with startTime and valid tradeable data."""
|
||||||
|
return {
|
||||||
|
"id": event_id,
|
||||||
|
"title": f"Match {event_id}",
|
||||||
|
"seriesSlug": "x",
|
||||||
|
"gameId": "1",
|
||||||
|
"startTime": start_time,
|
||||||
|
"markets": [
|
||||||
|
{
|
||||||
|
"sportsMarketType": "moneyline",
|
||||||
|
"volume": volume,
|
||||||
|
"bestBid": "0.50",
|
||||||
|
"bestAsk": "0.52",
|
||||||
|
"acceptingOrders": True,
|
||||||
|
"closed": False,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
@patch("browse.fetch_all_pages")
|
||||||
|
def test_starts_before_filters_future_events(self, mock_fetch):
|
||||||
|
"""Events with startTime > timestamp should be filtered out."""
|
||||||
|
from browse import browse_events
|
||||||
|
|
||||||
|
mock_fetch.return_value = {
|
||||||
|
"events": [
|
||||||
|
self._make_event(
|
||||||
|
"m1", "2026-03-27T14:00:00Z"
|
||||||
|
), # After cutoff (14:00 > 12:00)
|
||||||
|
self._make_event("m2", "2026-03-28T12:00:00Z"), # After cutoff
|
||||||
|
],
|
||||||
|
"total_raw": 2,
|
||||||
|
"partial": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
# 2026-03-27T12:00:00Z = 1774612800
|
||||||
|
result = browse_events("test", starts_before=1774612800)
|
||||||
|
|
||||||
|
self.assertEqual(len(result["match_events"]), 0)
|
||||||
|
|
||||||
|
@patch("browse.fetch_all_pages")
|
||||||
|
def test_starts_before_includes_past_events(self, mock_fetch):
|
||||||
|
"""Events with startTime <= timestamp should be included."""
|
||||||
|
from browse import browse_events
|
||||||
|
|
||||||
|
mock_fetch.return_value = {
|
||||||
|
"events": [
|
||||||
|
self._make_event(
|
||||||
|
"m1", "2026-03-27T10:00:00Z"
|
||||||
|
), # Before cutoff (10:00 < 12:00)
|
||||||
|
self._make_event(
|
||||||
|
"m2", "2026-03-27T11:00:00Z"
|
||||||
|
), # Before cutoff (11:00 < 12:00)
|
||||||
|
],
|
||||||
|
"total_raw": 2,
|
||||||
|
"partial": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
# 2026-03-27T12:00:00Z = 1774612800
|
||||||
|
result = browse_events("test", starts_before=1774612800)
|
||||||
|
|
||||||
|
self.assertEqual(len(result["match_events"]), 2)
|
||||||
|
|
||||||
|
@patch("browse.fetch_all_pages")
|
||||||
|
def test_starts_before_without_timestamp(self, mock_fetch):
|
||||||
|
"""Without starts_before, all events should be returned."""
|
||||||
|
from browse import browse_events
|
||||||
|
|
||||||
|
mock_fetch.return_value = {
|
||||||
|
"events": [
|
||||||
|
self._make_event("m1", "2026-03-27T14:00:00Z"),
|
||||||
|
self._make_event("m2", "2026-03-28T12:00:00Z"),
|
||||||
|
],
|
||||||
|
"total_raw": 2,
|
||||||
|
"partial": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
result = browse_events("test")
|
||||||
|
|
||||||
|
# No filter, all events returned
|
||||||
|
self.assertEqual(len(result["match_events"]), 2)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
Reference in New Issue
Block a user