@@ -4,46 +4,16 @@ import (
44 "context"
55 "encoding/json"
66 "fmt"
7- "os"
87 "sync"
98 "sync/atomic"
109
11- "github.com/gptscript-ai/gptscript/pkg/openai "
10+ "github.com/gptscript-ai/gptscript/pkg/system "
1211 "github.com/gptscript-ai/gptscript/pkg/types"
1312 "github.com/gptscript-ai/gptscript/pkg/version"
1413)
1514
16- // InternalSystemPrompt is added to all threads. Changing this is very dangerous as it has a
17- // terrible global effect and changes the behavior of all scripts.
18- var InternalSystemPrompt = `
19- You are task oriented system.
20- You receive input from a user, process the input from the given instructions, and then output the result.
21- Your objective is to provide consistent and correct results.
22- You do not need to explain the steps taken, only provide the result to the given instructions.
23- You are referred to as a tool.
24- `
25-
26- var DefaultToolSchema = types.JSONSchema {
27- Property : types.Property {
28- Type : "object" ,
29- },
30- Properties : map [string ]types.Property {
31- openai .DefaultPromptParameter : {
32- Description : "Prompt to send to the tool or assistant. This may be instructions or question." ,
33- Type : "string" ,
34- },
35- },
36- Required : []string {openai .DefaultPromptParameter },
37- }
38-
3915var completionID int64
4016
41- func init () {
42- if p := os .Getenv ("GPTSCRIPT_INTERNAL_SYSTEM_PROMPT" ); p != "" {
43- InternalSystemPrompt = p
44- }
45- }
46-
4717type ErrToolNotFound struct {
4818 ToolName string
4919}
@@ -52,10 +22,14 @@ func (e *ErrToolNotFound) Error() string {
5222 return fmt .Sprintf ("tool not found: %s" , e .ToolName )
5323}
5424
25+ type Model interface {
26+ Call (ctx context.Context , messageRequest types.CompletionRequest , status chan <- types.CompletionStatus ) (* types.CompletionMessage , error )
27+ }
28+
5529type Engine struct {
56- Client * openai. Client
30+ Model Model
5731 Env []string
58- Progress chan <- openai. Status
32+ Progress chan <- types. CompletionStatus
5933}
6034
6135type State struct {
@@ -172,18 +146,12 @@ func (e *Engine) Start(ctx Context, input string) (*Return, error) {
172146 }
173147
174148 completion := types.CompletionRequest {
175- Model : tool .Parameters .ModelName ,
176- MaxToken : tool .Parameters .MaxTokens ,
177- JSONResponse : tool .Parameters .JSONResponse ,
178- Cache : tool .Parameters .Cache ,
179- Temperature : tool .Parameters .Temperature ,
180- }
181-
182- if InternalSystemPrompt != "" && (tool .Parameters .InternalPrompt == nil || * tool .Parameters .InternalPrompt ) {
183- completion .Messages = append (completion .Messages , types.CompletionMessage {
184- Role : types .CompletionMessageRoleTypeSystem ,
185- Content : types .Text (InternalSystemPrompt ),
186- })
149+ Model : tool .Parameters .ModelName ,
150+ MaxTokens : tool .Parameters .MaxTokens ,
151+ JSONResponse : tool .Parameters .JSONResponse ,
152+ Cache : tool .Parameters .Cache ,
153+ Temperature : tool .Parameters .Temperature ,
154+ InternalSystemPrompt : tool .Parameters .InternalPrompt ,
187155 }
188156
189157 for _ , subToolName := range tool .Parameters .Tools {
@@ -193,10 +161,9 @@ func (e *Engine) Start(ctx Context, input string) (*Return, error) {
193161 }
194162 args := subTool .Parameters .Arguments
195163 if args == nil && ! subTool .IsCommand () {
196- args = & DefaultToolSchema
164+ args = & system . DefaultToolSchema
197165 }
198166 completion .Tools = append (completion .Tools , types.CompletionTool {
199- Type : types .CompletionToolTypeFunction ,
200167 Function : types.CompletionFunctionDefinition {
201168 Name : subToolName ,
202169 Description : subTool .Parameters .Description ,
@@ -207,12 +174,8 @@ func (e *Engine) Start(ctx Context, input string) (*Return, error) {
207174
208175 if tool .Instructions != "" {
209176 completion .Messages = append (completion .Messages , types.CompletionMessage {
210- Role : types .CompletionMessageRoleTypeSystem ,
211- Content : []types.ContentPart {
212- {
213- Text : tool .Instructions ,
214- },
215- },
177+ Role : types .CompletionMessageRoleTypeSystem ,
178+ Content : types .Text (tool .Instructions ),
216179 })
217180 }
218181
@@ -230,7 +193,7 @@ func (e *Engine) Start(ctx Context, input string) (*Return, error) {
230193
231194func (e * Engine ) complete (ctx context.Context , state * State ) (* Return , error ) {
232195 var (
233- progress = make (chan openai. Status )
196+ progress = make (chan types. CompletionStatus )
234197 ret = Return {
235198 State : state ,
236199 Calls : map [string ]Call {},
@@ -241,6 +204,7 @@ func (e *Engine) complete(ctx context.Context, state *State) (*Return, error) {
241204 // ensure we aren't writing to the channel anymore on exit
242205 wg .Add (1 )
243206 defer wg .Wait ()
207+ defer close (progress )
244208
245209 go func () {
246210 defer wg .Done ()
@@ -251,8 +215,7 @@ func (e *Engine) complete(ctx context.Context, state *State) (*Return, error) {
251215 }
252216 }()
253217
254- resp , err := e .Client .Call (ctx , state .Completion , progress )
255- close (progress )
218+ resp , err := e .Model .Call (ctx , state .Completion , progress )
256219 if err != nil {
257220 return nil , err
258221 }
0 commit comments