1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
|
import json
from typing import Any, Optional
from shared.api.models import (
WrappedBooleanResponse,
WrappedGenericMessageResponse,
WrappedPromptResponse,
WrappedPromptsResponse,
)
class PromptsSDK:
def __init__(self, client):
self.client = client
async def create(
self, name: str, template: str, input_types: dict
) -> WrappedGenericMessageResponse:
"""Create a new prompt.
Args:
name (str): The name of the prompt
template (str): The template string for the prompt
input_types (dict): A dictionary mapping input names to their types
Returns:
dict: Created prompt information
"""
data: dict[str, Any] = {
"name": name,
"template": template,
"input_types": input_types,
}
response_dict = await self.client._make_request(
"POST",
"prompts",
json=data,
version="v3",
)
return WrappedGenericMessageResponse(**response_dict)
async def list(self) -> WrappedPromptsResponse:
"""List all available prompts.
Returns:
dict: List of all available prompts
"""
response_dict = await self.client._make_request(
"GET",
"prompts",
version="v3",
)
return WrappedPromptsResponse(**response_dict)
async def retrieve(
self,
name: str,
inputs: Optional[dict] = None,
prompt_override: Optional[str] = None,
) -> WrappedPromptResponse:
"""Get a specific prompt by name, optionally with inputs and override.
Args:
name (str): The name of the prompt to retrieve
inputs (Optional[dict]): JSON-encoded inputs for the prompt
prompt_override (Optional[str]): An override for the prompt template
Returns:
dict: The requested prompt with applied inputs and/or override
"""
params = {}
if inputs:
params["inputs"] = json.dumps(inputs)
if prompt_override:
params["prompt_override"] = prompt_override
response_dict = await self.client._make_request(
"POST",
f"prompts/{name}",
params=params,
version="v3",
)
return WrappedPromptResponse(**response_dict)
async def update(
self,
name: str,
template: Optional[str] = None,
input_types: Optional[dict] = None,
) -> WrappedGenericMessageResponse:
"""Update an existing prompt's template and/or input types.
Args:
name (str): The name of the prompt to update
template (Optional[str]): The updated template string for the prompt
input_types (Optional[dict]): The updated dictionary mapping input names to their types
Returns:
dict: The updated prompt details
"""
data: dict = {}
if template:
data["template"] = template
if input_types:
data["input_types"] = json.dumps(input_types)
response_dict = await self.client._make_request(
"PUT",
f"prompts/{name}",
json=data,
version="v3",
)
return WrappedGenericMessageResponse(**response_dict)
async def delete(self, name: str) -> WrappedBooleanResponse:
"""Delete a prompt by name.
Args:
name (str): The name of the prompt to delete
Returns:
bool: True if deletion was successful
"""
response_dict = await self.client._make_request(
"DELETE",
f"prompts/{name}",
version="v3",
)
return WrappedBooleanResponse(**response_dict)
|