forked from i-am-bee/beeai-framework
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtemplate.py
More file actions
88 lines (68 loc) · 3.14 KB
/
template.py
File metadata and controls
88 lines (68 loc) · 3.14 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
# Copyright 2025 © BeeAI a Series of LF Projects, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections.abc import Callable
from typing import Any, Generic, Self, TypeVar
import chevron
from pydantic import BaseModel, Field
from beeai_framework.errors import FrameworkError
from beeai_framework.utils.models import ModelLike, to_model_optional
T = TypeVar("T", bound=BaseModel)
class PromptTemplateInput(BaseModel, Generic[T]):
input_schema: type[T] = Field(..., alias="schema")
template: str
functions: dict[str, Callable[[dict[str, Any]], str]] = {}
defaults: dict[str, Any] = {}
class PromptTemplate(Generic[T]):
def __init__(self, config: PromptTemplateInput[T]) -> None:
self._config = config
def render(self, template_input: ModelLike[T] | None = None, /, **kwargs: Any) -> str:
input_model = to_model_optional(self._config.input_schema, template_input)
data = input_model.model_dump() if input_model else kwargs
if self._config.defaults:
for key, value in self._config.defaults.items():
if data.get(key) is None:
data.update({key: value})
# Apply function derived data
for key in self._config.functions:
if key in data:
raise PromptTemplateError(f"Function named '{key}' clashes with input data field!")
data[key] = self._config.functions[key](data)
return chevron.render(template=self._config.template, data=data)
def fork(
self, customizer: Callable[[PromptTemplateInput[Any]], PromptTemplateInput[Any]] | None
) -> "PromptTemplate[Any]":
new_config = customizer(self._config) if customizer else self._config
if not isinstance(new_config, PromptTemplateInput):
raise ValueError("Return type from customizer must be a PromptTemplateInput or nothing.")
return PromptTemplate(new_config)
def update(
self,
*,
functions: dict[str, Callable[[dict[str, Any]], str]] | None = None,
defaults: dict[str, Any] | None = None,
) -> Self:
self._config.functions.update(functions or {})
self._config.defaults.update(defaults or {})
return self
class PromptTemplateError(FrameworkError):
"""Raised for errors caused by PromptTemplate."""
def __init__(
self,
message: str = "PromptTemplate error",
*,
cause: Exception | None = None,
context: dict[str, Any] | None = None,
) -> None:
super().__init__(message, is_fatal=True, is_retryable=False, cause=cause, context=context)
__all__ = ["PromptTemplate", "PromptTemplateError", "PromptTemplateInput"]