The askLlamaForAChapterSynopsis
function asynchronously generates a list of chapter or character synopses for a given topic by interacting with an LLM (Large Language Model), logging the user's prompt and the LLM's response, and parsing the response to extract the synopsis titles and descriptions. The function returns an object with key-value pairs representing the synopses, assuming the LLM's response contains a list of numbered titles followed by their descriptions.
npm run import -- "ask llm to write chapter titles and descriptions"
// TODO: parse title from descriptions and return in a nicely formatted object array
async function askLlamaForAChapterSynopsis(topic, chapterOrCharacterOrSections = 'chapters') {
const {llmPrompt} = await importer.import("create llm session")
let q1 = 'Brainstorm a list of 12 ' + chapterOrCharacterOrSections + ' synopsis:\n'
+ topic + '\nRespond with the numbered titles followed by the description.'
console.log("User: " + q1);
const a1 = await llmPrompt(q1);
console.log("AI: " + a1);
let titles = a1.trim().split(/(^|\n)\s*[0-9]+\.\s*/gi)
.filter(a => a && a.trim().length > 0)
let obj = {}
for(let i = 0; i < titles.length; i++) {
if(topic.includes(titles[i]) && titles[i+1].length < 128) continue;
let lines = titles[i].split(/\*\*|\#\#|\n/gi).filter(a => a && a.trim().length > 0)
let key = lines[0]
obj[key] = lines.slice(1).join('\n')
if(obj[key].length == 0 && titles[i+1].length > 128) {
obj[key] = titles[i+1]
i++
}
}
console.log(obj)
return obj
}
module.exports = askLlamaForAChapterSynopsis
```javascript
/**
* Asks Llama for a chapter synopsis of the given topic.
*
* @param {string} topic The topic to generate a chapter synopsis for.
* @param {string} [chapterOrCharacterOrSections='chapters'] The type of synopsis to generate.
* @returns {Promise<object>} A promise resolving to an object containing the chapter synopsis.
*/
async function askLlamaForAChapterSynopsis(topic, chapterOrCharacterOrSections = 'chapters') {
const { llmPrompt } = await import('create-llm-session');
const prompt = `Brainstorm a list of 12 ${chapterOrCharacterOrSections} synopsis for: ${topic}\nPlease respond with numbered titles followed by the description.`;
console.log('User:', prompt);
const response = await llmPrompt(prompt);
console.log('AI:', response);
const titleRegex = /^([0-9]+\.\s*)(.*)/gi;
const titles = response.trim().split('\n').filter((line) => line.match(titleRegex));
const obj = {};
for (const title of titles) {
const match = title.match(titleRegex);
if (!match) continue;
const key = match[2].trim();
const description = match[1].trim().replace(/\*\*|\#\#/g, '');
if (topic.includes(key) && description.length < 128) continue;
obj[key] = description;
if (description.length === 0) {
const nextTitle = titles[titles.indexOf(title) + 1];
const nextMatch = nextTitle.match(titleRegex);
if (nextMatch) {
obj[key] = nextMatch[2].trim();
continue;
}
}
}
console.log(obj);
return obj;
}
export default askLlamaForAChapterSynopsis;
```
askLlamaForAChapterSynopsis(topic, chapterOrCharacterOrSections = 'chapters')
topic
: The topic for which synopses are generated.chapterOrCharacterOrSections
: The type of synopses to generate. Defaults to 'chapters'
.create llm session
.