Planner¶
The Planner agent parses natural language queries into structured parameters (intent, location, time range, dataset).
geoagent.core.planner
¶
Planner agent for parsing natural language queries into structured parameters.
Planner
¶
Agent for parsing natural language queries into structured parameters.
Source code in geoagent/core/planner.py
class Planner:
"""Agent for parsing natural language queries into structured parameters."""
def __init__(
self,
llm: Optional[BaseChatModel] = None,
collections: Optional[List[Dict[str, str]]] = None,
):
"""
Initialize the planner agent.
Args:
llm: Language model to use. Uses default if None.
"""
self.llm = llm or get_default_llm(temperature=0.0)
# Format collections into a readable list for the system prompt
collections_text = ""
if collections:
lines = ["Available collections in the STAC catalog:"]
for c in collections:
cid = c.get("id", "")
title = c.get("title", "")
if title and title != cid:
lines.append(f"- {cid}: {title}")
else:
lines.append(f"- {cid}")
collections_text = "\n".join(lines)
# Use replace instead of format to avoid conflicts with {{ }} in examples
system_prompt = SYSTEM_PROMPT.replace("{collections}", collections_text)
self.prompt = ChatPromptTemplate.from_messages(
[("system", system_prompt), ("human", "{query}")]
)
# Build structured output chains — try strict first, json_mode as fallback
self._chain_strict = None
self._chain_json = None
try:
self._chain_strict = self.prompt | self.llm.with_structured_output(
_PlannerLLMSchema
)
except Exception:
pass
try:
self._chain_json = self.prompt | self.llm.with_structured_output(
_PlannerLLMSchema, method="json_mode"
)
except Exception:
pass
@staticmethod
def _convert_to_planner_output(result: _PlannerLLMSchema) -> PlannerOutput:
"""Convert LLM schema output to the canonical PlannerOutput model."""
location = None
if result.location:
try:
parts = [float(x) for x in result.location.split(",")]
if len(parts) == 4:
location = {"bbox": parts}
else:
location = {"name": result.location}
except ValueError:
location = {"name": result.location}
time_range = None
if result.time_range:
time_range = {
"start_date": result.time_range[0],
"end_date": result.time_range[1],
}
# Build parameters dict from explicit fields
parameters: Dict[str, Any] = {}
if result.max_cloud_cover is not None:
parameters["max_cloud_cover"] = result.max_cloud_cover
if result.max_items is not None:
parameters["max_items"] = result.max_items
return PlannerOutput(
intent=result.intent.value,
location=location,
time_range=time_range,
dataset=result.dataset,
analysis_type=result.analysis_type,
parameters=parameters,
confidence=1.0,
)
def parse_query(self, query: str) -> PlannerOutput:
"""
Parse a natural language query into structured parameters.
Args:
query: Natural language query about Earth observation data
Returns:
PlannerOutput with extracted structured information
Raises:
Exception: If LLM fails to parse the query
"""
last_err = None
for chain in (self._chain_strict, self._chain_json):
if chain is None:
continue
try:
result = chain.invoke({"query": query})
if isinstance(result, _PlannerLLMSchema):
return self._convert_to_planner_output(result)
except Exception as e:
last_err = e
logger.debug(f"Structured output attempt failed: {e}")
continue
raise Exception(
f"Failed to parse query: {last_err or 'no structured output chain available'}"
)
def parse_batch(self, queries: List[str]) -> List[PlannerOutput]:
"""
Parse multiple queries in batch.
Args:
queries: List of natural language queries
Returns:
List of PlannerOutput objects
"""
results = []
for query in queries:
try:
result = self.parse_query(query)
results.append(result)
except Exception as e:
# Create a minimal output for failed queries
fallback = PlannerOutput(
intent=Intent.SEARCH.value,
parameters={"error": str(e), "original_query": query},
)
results.append(fallback)
return results
__init__(self, llm=None, collections=None)
special
¶
Initialize the planner agent.
Parameters:
| Name | Type | Description | Default |
|---|---|---|---|
llm |
Optional[langchain_core.language_models.chat_models.BaseChatModel] |
Language model to use. Uses default if None. |
None |
Source code in geoagent/core/planner.py
def __init__(
self,
llm: Optional[BaseChatModel] = None,
collections: Optional[List[Dict[str, str]]] = None,
):
"""
Initialize the planner agent.
Args:
llm: Language model to use. Uses default if None.
"""
self.llm = llm or get_default_llm(temperature=0.0)
# Format collections into a readable list for the system prompt
collections_text = ""
if collections:
lines = ["Available collections in the STAC catalog:"]
for c in collections:
cid = c.get("id", "")
title = c.get("title", "")
if title and title != cid:
lines.append(f"- {cid}: {title}")
else:
lines.append(f"- {cid}")
collections_text = "\n".join(lines)
# Use replace instead of format to avoid conflicts with {{ }} in examples
system_prompt = SYSTEM_PROMPT.replace("{collections}", collections_text)
self.prompt = ChatPromptTemplate.from_messages(
[("system", system_prompt), ("human", "{query}")]
)
# Build structured output chains — try strict first, json_mode as fallback
self._chain_strict = None
self._chain_json = None
try:
self._chain_strict = self.prompt | self.llm.with_structured_output(
_PlannerLLMSchema
)
except Exception:
pass
try:
self._chain_json = self.prompt | self.llm.with_structured_output(
_PlannerLLMSchema, method="json_mode"
)
except Exception:
pass
parse_batch(self, queries)
¶
Parse multiple queries in batch.
Parameters:
| Name | Type | Description | Default |
|---|---|---|---|
queries |
List[str] |
List of natural language queries |
required |
Returns:
| Type | Description |
|---|---|
List[geoagent.core.models.PlannerOutput] |
List of PlannerOutput objects |
Source code in geoagent/core/planner.py
def parse_batch(self, queries: List[str]) -> List[PlannerOutput]:
"""
Parse multiple queries in batch.
Args:
queries: List of natural language queries
Returns:
List of PlannerOutput objects
"""
results = []
for query in queries:
try:
result = self.parse_query(query)
results.append(result)
except Exception as e:
# Create a minimal output for failed queries
fallback = PlannerOutput(
intent=Intent.SEARCH.value,
parameters={"error": str(e), "original_query": query},
)
results.append(fallback)
return results
parse_query(self, query)
¶
Parse a natural language query into structured parameters.
Parameters:
| Name | Type | Description | Default |
|---|---|---|---|
query |
str |
Natural language query about Earth observation data |
required |
Returns:
| Type | Description |
|---|---|
PlannerOutput |
PlannerOutput with extracted structured information |
Exceptions:
| Type | Description |
|---|---|
Exception |
If LLM fails to parse the query |
Source code in geoagent/core/planner.py
def parse_query(self, query: str) -> PlannerOutput:
"""
Parse a natural language query into structured parameters.
Args:
query: Natural language query about Earth observation data
Returns:
PlannerOutput with extracted structured information
Raises:
Exception: If LLM fails to parse the query
"""
last_err = None
for chain in (self._chain_strict, self._chain_json):
if chain is None:
continue
try:
result = chain.invoke({"query": query})
if isinstance(result, _PlannerLLMSchema):
return self._convert_to_planner_output(result)
except Exception as e:
last_err = e
logger.debug(f"Structured output attempt failed: {e}")
continue
raise Exception(
f"Failed to parse query: {last_err or 'no structured output chain available'}"
)
create_planner(llm=None, collections=None)
¶
Create a planner instance.
Parameters:
| Name | Type | Description | Default |
|---|---|---|---|
llm |
Optional[langchain_core.language_models.chat_models.BaseChatModel] |
Language model to use. Uses default if None. |
None |
Returns:
| Type | Description |
|---|---|
Planner |
Configured Planner instance |
Source code in geoagent/core/planner.py
def create_planner(
llm: Optional[BaseChatModel] = None,
collections: Optional[List[Dict[str, str]]] = None,
) -> Planner:
"""
Create a planner instance.
Args:
llm: Language model to use. Uses default if None.
Returns:
Configured Planner instance
"""
return Planner(llm=llm, collections=collections)
parse_query(query, llm=None, collections=None)
¶
Convenience function to parse a single query.
Parameters:
| Name | Type | Description | Default |
|---|---|---|---|
query |
str |
Natural language query |
required |
llm |
Optional[langchain_core.language_models.chat_models.BaseChatModel] |
Language model to use. Uses default if None. |
None |
Returns:
| Type | Description |
|---|---|
PlannerOutput |
PlannerOutput with extracted information |
Source code in geoagent/core/planner.py
def parse_query(
query: str,
llm: Optional[BaseChatModel] = None,
collections: Optional[List[Dict[str, str]]] = None,
) -> PlannerOutput:
"""
Convenience function to parse a single query.
Args:
query: Natural language query
llm: Language model to use. Uses default if None.
Returns:
PlannerOutput with extracted information
"""
planner = create_planner(llm=llm, collections=collections)
return planner.parse_query(query)