@@ -18,8 +18,8 @@ running `npm install`.
1818
1919## Usage
2020
21- To use the module and run gptscripts, you need to first set the OPENAI_API_KEY environment variable to your OpenAI API
22- key.
21+ To use the module and run gptscripts, you need to first set the ` OPENAI_API_KEY ` environment variable to your OpenAI API
22+ key. You can also set the ` GPTSCRIPT_BIN ` environment variable to change the execution of the gptscripts.
2323
2424To ensure it is working properly, you can run the following command:
2525
@@ -31,11 +31,10 @@ You will see "Hello, World!" in the output of the command.
3131
3232## Client
3333
34- There are currently a couple "global" options, and the client helps to manage those. A client without any options is
35- likely what you want. However, here are the current global options:
36-
37- - ` gptscriptURL ` : The URL (including `http(s)://) of an "SDK server" to use instead of the fork/exec model.
38- - ` gptscriptBin ` : The path to a ` gptscript ` binary to use instead of the bundled one.
34+ The client allows the caller to run gptscript files, tools, and other operations (see below). There are currently no
35+ options for this singleton client, so ` await gptscript.Client.init() ` is all you need. Although, the intention is that a
36+ single client is all you need for the life of your application, you should call ` close() ` on the client when you are
37+ done.
3938
4039## Options
4140
@@ -45,7 +44,6 @@ None of the options is required, and the defaults will reduce the number of call
4544- ` disableCache ` : Enable or disable caching, default (true)
4645- ` cacheDir ` : Specify the cache directory
4746- ` quiet ` : No output logging
48- - ` chdir ` : Change current working directory
4947- ` subTool ` : Use tool of this name, not the first tool
5048- ` workspace ` : Directory to use for the workspace, if specified it will not be deleted on exit
5149
@@ -61,9 +59,10 @@ Lists all the available built-in tools.
6159const gptscript = require (' @gptscript-ai/gptscript' );
6260
6361async function listTools () {
64- const client = new gptscript.Client ();
62+ const client = await gptscript .Client . init ();
6563 const tools = await client .listTools ();
6664 console .log (tools);
65+ client .close ()
6766}
6867```
6968
@@ -78,12 +77,13 @@ const gptscript = require('@gptscript-ai/gptscript');
7877
7978async function listModels () {
8079 let models = [];
80+ const client = await gptscript .Client .init ();
8181 try {
82- const client = new gptscript.Client ();
8382 models = await client .listModels ();
8483 } catch (error) {
8584 console .error (error);
8685 }
86+ client .close ()
8787}
8888```
8989
@@ -97,12 +97,13 @@ Get the first of the current `gptscript` binary being used for the calls.
9797const gptscript = require (' @gptscript-ai/gptscript' );
9898
9999async function version () {
100+ const client = await gptscript .Client .init ();
100101 try {
101- const client = new gptscript.Client ();
102102 console .log (await client .version ());
103103 } catch (error) {
104104 console .error (error);
105105 }
106+ client .close ()
106107}
107108```
108109
@@ -118,13 +119,14 @@ const t = {
118119 instructions: " Who was the president of the united states in 1928?"
119120};
120121
122+ const client = await gptscript .Client .init ();
121123try {
122- const client = new gptscript.Client ();
123124 const run = client .evaluate (t);
124125 console .log (await run .text ());
125126} catch (error) {
126127 console .error (error);
127128}
129+ client .close ();
128130```
129131
130132### run
@@ -140,13 +142,14 @@ const opts = {
140142};
141143
142144async function execFile () {
145+ const client = await gptscript .Client .init ();
143146 try {
144- const client = new gptscript.Client ();
145147 const run = client .run (' ./hello.gpt' , opts);
146148 console .log (await run .text ());
147149 } catch (e) {
148150 console .error (e);
149151 }
152+ client .close ();
150153}
151154```
152155
@@ -178,8 +181,8 @@ const opts = {
178181};
179182
180183async function streamExecFileWithEvents () {
184+ const client = await gptscript .Client .init ();
181185 try {
182- const client = new gptscript.Client ();
183186 const run = client .run (' ./test.gpt' , opts);
184187
185188 run .on (gptscript .RunEventType .Event , data => {
@@ -190,6 +193,7 @@ async function streamExecFileWithEvents() {
190193 } catch (e) {
191194 console .error (e);
192195 }
196+ client .close ();
193197}
194198```
195199
@@ -218,7 +222,7 @@ const t = {
218222};
219223
220224async function streamExecFileWithEvents () {
221- const client = new gptscript.Client ();
225+ const client = await gptscript .Client . init ();
222226 let run = client .evaluate (t, opts);
223227 try {
224228 // Wait for the initial run to complete.
@@ -238,6 +242,7 @@ async function streamExecFileWithEvents() {
238242 console .error (e);
239243 }
240244
245+ client .close ();
241246
242247 // The state here should either be RunState.Finished (on success) or RunState.Error (on error).
243248 console .log (run .state )
0 commit comments