The easiest way to create your own agent that places bets on prediction markets is to subclass the DeployableTraderAgent class. This guide walks you through creating a custom agent, using the DeployableCoinFlipAgent as a minimal example.
The DeployableTraderAgent is the base class that provides the framework for creating trading agents. Your custom agent needs to implement two key methods:
verify_market() - Validates whether the agent should trade on a given market
answer_binary_market() - Returns a prediction for a binary market question
from prediction_market_agent_tooling.deploy.agent import DeployableTraderAgentfrom prediction_market_agent_tooling.gtypes import Probabilityfrom prediction_market_agent_tooling.loggers import loggerfrom prediction_market_agent_tooling.markets.agent_market import AgentMarketfrom prediction_market_agent_tooling.markets.data_models import ProbabilisticAnswerfrom prediction_market_agent_tooling.tools.google_utils import search_google_serperfrom prediction_market_agent_tooling.tools.openai_utils import get_openai_providerfrom prediction_market_agent_tooling.tools.utils import utcnowfrom pydantic_ai import Agentfrom pydantic_ai.models.openai import OpenAIModelfrom prediction_market_agent.tools.web_scrape.markdown import web_scrapefrom prediction_market_agent.utils import APIKeysclass AdvancedAgent(DeployableTraderAgent): """ This is the most basic agent that should be actually able to do some evidence-based predictions. Use as a baseline for comparing with other agents. """ bet_on_n_markets_per_run = 4 def answer_binary_market(self, market: AgentMarket) -> ProbabilisticAnswer | None: # Search for results on Google google_results = search_google_serper(market.question) # Filter out Manifold results google_results = [url for url in google_results if "manifold" not in url] if not google_results: logger.info(f"No results found for {market.question}.") return None # Scrape content from URLs contents = [ scraped[:10000] for url in google_results[:5] if (scraped := web_scrape(url)) ] if not contents: logger.info(f"No contents found for {market.question}") return None # Use LLM to predict probability and confidence probability, confidence = llm(market.question, contents) return ProbabilisticAnswer( confidence=confidence, p_yes=Probability(probability), reasoning="I asked Google and LLM to do it!", )def llm(question: str, contents: list[str]) -> tuple[float, float]: agent = Agent( OpenAIModel( "gpt-4o-mini", provider=get_openai_provider(api_key=APIKeys().openai_api_key), ), system_prompt="You are professional prediction market trading agent.", ) result = agent.run_sync( f"""Today is {utcnow()}.Given the following question and content from google search, what's the probability that the thing in the question will happen?Question: {question}Content: {contents}Return only the probability float number and confidence float number, separated by space, nothing else.""" ).output probability, confidence = map(float, result.split()) return probability, confidence
1
Search for Information
Use search_google_serper() to find relevant URLs about the market question.
2
Scrape Content
Extract text content from the top URLs using web_scrape().
3
Analyze with LLM
Pass the question and scraped content to an LLM to generate a probability and confidence score.
4
Return Prediction
Return a ProbabilisticAnswer with the prediction results.