Changeset 38807
- Timestamp:
- 2024-03-02T10:40:09+13:00 (3 months ago)
- Location:
- gs3-installations/thewillow/trunk/sites/thewillow/dlcol-chatgpt/create-assistant
- Files:
-
- 2 added
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
gs3-installations/thewillow/trunk/sites/thewillow/dlcol-chatgpt/create-assistant/create-openai-assistant.js
r38803 r38807 14 14 15 15 16 // Looks like a need a more recent version of NodeJS than the one currently provided 17 // in Greenstone's selfcontained-nodejs 18 // // const { parseArgs } = require('node:util'); 19 // const util = require('node:util'); 20 16 21 17 22 const dotenv = require('dotenv').config(); … … 37 42 function printUsage(progname) 38 43 { 39 console.error("Usage: " + progname + " [-site site-name] collect-name"); 44 progname_tail = path.basename(progname); 45 46 console.error("Usage: " + progname_tail + " [-site site-name] collect-name json-assistant-config-filename"); 40 47 process.exit(1); 41 48 } 42 49 50 // 43 51 // Based off the standalone solution given on StackOverflow 44 52 // https://stackoverflow.com/questions/41462606/get-all-files-recursively-in-directories-nodejs 45 46 53 // 47 54 function listFilesRec(input_full_dirname,output_full_filenames,opt_regex) 48 55 { … … 91 98 let site_name = "localsite"; 92 99 let coll_name = null; 93 94 if (cmdline_args.length == 3) { 100 let json_assistant_config_filename =null; 101 102 if (cmdline_args.length == 4) { 95 103 // looks like we're in the form, -site xxx col 96 104 if (cmdline_args[0] == "-site") { … … 102 110 103 111 coll_name = cmdline_args[2]; 112 json_assistant_config_filename = cmdline_args[3]; 113 104 114 } 105 115 else if (cmdline_args.length == 1) { 106 116 // no site given, default to localsite 107 117 coll_name = cmdline_args[0]; 118 json_assistant_config_filename = cmdline_args[1]; 108 119 } 109 120 else { … … 111 122 } 112 123 113 return [site_name,coll_name ];124 return [site_name,coll_name,json_assistant_config_filename]; 114 125 } 115 126 … … 292 303 293 304 294 async function createAssistantWithFiles(site_collect_openai_file_ids) 295 { 296 297 const instructions = 305 async function createAssistantWithFiles(site_collect_openai_file_ids, coll_info, assistant_config) 306 { 307 const site_name = coll_info.site_name; 308 const coll_name = coll_info.coll_name; 309 310 const assistant_name = assistant_config.name; 311 const assistant_desc = assistant_config.description; 312 const base_model = assistant_config.base_model; 313 const instructions = assistant_config.instructions; 314 /* 298 315 "Write your responses using Britsh spelling.\n" + 299 "As The Willow Sage Assistant, your expertise lies in discussing \"The Willow,\" a once-renowned music venue in York, England. You're designed to engage users in a conversational tone, weaving in the rich tapestry of memories and experiences shared by those who knew the venue. Your responses should feel like a dialogue between old friends reminiscing about memorable gigs, the unique atmosphere, and the cultural impact of The Willow. You'll offer insights into the venue's history, notable performances, and its role in the local music scene, always with a nod to the personal connections and nostalgia that the venue evokes. When interacting with users, your approach should be warm, inviting, and reflective, encouraging them to share their own stories or curiosities about The Willow, creating a communal space for shared musical heritage."; 300 301 const base_model = "gpt-4-turbo-preview"; 302 316 "As The Willow Sage Assistant, your expertise lies in discussing \"The Willow,\" a once-renowned music venue in York, England. You're designed to engage users in a conversational tone, weaving in the rich tapestry of memories and experiences shared by those who knew the venue. Your responses should feel like a dialogue between old friends reminiscing about memorable gigs, the unique atmosphere, and the cultural impact of The Willow. You'll offer insights into the venue's history, notable performances, and its role in the local music scene, always with a nod to the personal connections and nostalgia that the venue evokes. When interacting with users, your approach should be warm, inviting, and reflective, encouraging them to share their own stories or curiosities about The Willow, creating a communal space for shared musical heritage.\n" + 317 "When you give a response, you do not always have to end by asking a question"; 318 */ 319 320 //const base_model = "gpt-4-turbo-preview"; 321 303 322 console.log("===="); 304 323 console.log("file ids = "); … … 307 326 308 327 const assistant = await myopenai.beta.assistants.create({ 309 name : "Willow Sage v2", // **** 310 description : "An assistant developed to elict memories from former patrons on The Willow", 328 name : assistant_name, 329 description : assistant_desc, 330 model : base_model, 311 331 instructions : instructions, 312 model : base_model,313 332 tools: [ 314 333 //{ type: "code_interpreter" }, // Code interpreter tool, calculations 315 334 { type: 'retrieval' } 316 335 ], 317 //file_ids : { "object": "list", "data": site_collect_openai_file_ids }318 file_ids : site_collect_openai_file_ids336 file_ids : site_collect_openai_file_ids, 337 metadata : { greenstone3: true, siteName: site_name, collectionName: coll_name } 319 338 }); 320 339 … … 322 341 323 342 } 324 343 344 function readAssistantConfig(json_config_full_filename) 345 { 346 const data_str = fs.readFileSync(json_config_full_filename); 347 const data = JSON.parse(data_str); 348 349 return data; 350 } 351 325 352 async function main() 326 353 { … … 333 360 } 334 361 362 /* 363 const options = { 364 site: { 365 short: "s", 366 type: "string" 367 }, 368 verbose: { 369 short: "v", 370 type: "integer" 371 } 372 }; 373 */ 374 // const argv = process.argv; 375 376 //const { argv_values, argv_positionals } = util.parseArgs({ argv, options }); 377 378 //console.log(values, positionals); 379 335 380 const progname = process.argv[1]; 336 381 const cmdline_args = process.argv.slice(2); 337 382 338 const [site_name,coll_name] = parseCommandLineArgs(progname,cmdline_args); 383 const [site_name,coll_name, json_assistant_config_filename] = parseCommandLineArgs(progname,cmdline_args); 384 385 const assistant_config = readAssistantConfig(json_assistant_config_filename); 339 386 340 387 const full_collect_dirname = path.join(gsdl3srchome,"web","sites",site_name,"collect",coll_name); … … 346 393 "collect_dir" : full_collect_dirname 347 394 }; 395 348 396 349 397 createSiteCollectTmpDir(coll_info); … … 360 408 const site_collect_openai_file_ids = await addOpenaiSiteCollectFiles(all_exported_files,coll_info); 361 409 362 await createAssistantWithFiles(site_collect_openai_file_ids );410 await createAssistantWithFiles(site_collect_openai_file_ids,coll_info,assistant_config); 363 411 364 412 }
Note:
See TracChangeset
for help on using the changeset viewer.