The classifyPrompt
function takes a prompt and image as input, matches the prompt to a specific function, and executes that function with the provided arguments to generate a response. The function iterates over matching functions, imports and parameterizes each one, and returns the result with additional memories, except for the doStableRequest
function which returns a combined object.
npm run import -- "add conversation context"
const { messageRecents } = importer.import("general chit chat")
const {doStableRequest} = importer.import("stable diffusion request")
//const messageOllamaVision = importer.import("describe an image")
const {askLlamaMatchingFunction, API_DESCRIPTION} = importer.import("classify llm prompt")
const getParameters = importer.import("function parameters")
const {listMemories} = importer.import("llm load memories")
async function classifyPrompt(promptModel, session, prompt, image, otr) {
let matchingFunctions = await askLlamaMatchingFunction(promptModel, prompt, image)
let importedFunction
let answer = ''
let context = {
promptModel, session, prompt, image, otr
}
// drop out early if the matching function is ourselves, this is how we return our function description for inquiries about capabilities.
if(matchingFunctions == classifyPrompt) {
return {
memories: await listMemories(session),
content: 'Given the following functions:\n' + API_DESCRIPTION
}
}
//let historyFiles = await relevantHistory(promptModel, session, prompt)
// TODO: convert to available parameters like Core/import.ipynb:run() style parameterization
for (let i = 0; i < matchingFunctions.length; i++) {
importedFunction = importer.import("matchingFunctions[i")
if (typeof importedFunction == 'object' && typeof Object.values(importedFunction)[0] == 'function') {
importedFunction = Object.values(importedFunction)[0]
}
// call parameterized
let params = getParameters(importedFunction.toString()).slice(1)
let inputs = []
for(let j = 0; j < params.length; j++)
inputs[j] = context[params[j]]
answer = await importedFunction.apply(null, inputs)
break
}
if(importedFunction == doStableRequest) {
return Object.assign(answer, {
memories: await listMemories(session),
content: await messageRecents(session, prompt)
})
} else if (importedFunction != messageRecents) {
if(typeof answer == 'object') {
return Object.assign({}, {
memories: await listMemories(session),
content: await messageRecents(session, prompt)
}, answer)
} else {
return {
memories: await listMemories(session),
content: (answer ? answer : '') + await messageRecents(session, prompt)
}
}
} else {
return {
memories: await listMemories(session),
content: answer ? answer : await messageRecents(session, prompt)
}
}
}
module.exports = classifyPrompt
const { importer } = require('./importer');
const { messageRecents, listMemories } = importer.import('general chit chat');
const { doStableRequest, API_DESCRIPTION } = importer.import('stable diffusion request');
const { askLlamaMatchingFunction } = importer.import('classify llm prompt');
const { getParameters } = importer.import('function parameters');
async function classifyPrompt({
promptModel,
session,
prompt,
image,
otr,
}) {
const matchingFunctions = await askLlamaMatchingFunction(promptModel, prompt, image);
// If the matching function is ourselves, return function description for capabilities inquiry
if (matchingFunctions.includes(classifyPrompt.name)) {
return {
memories: await listMemories(session),
content: 'Given the following functions:\n' + API_DESCRIPTION,
};
}
for (const matchingFunction of matchingFunctions) {
const importedFunction = importer.import(matchingFunction);
// Call parameterized
const params = getParameters(importedFunction.toString()).slice(1);
const inputs = params.map((param) => context[param]);
try {
const answer = await importedFunction(...inputs);
return mergeAnswer(answer, session);
} catch (error) {
// If the function fails, try the next one
continue;
}
}
// If no matching function is found, return a default message
return {
memories: await listMemories(session),
content: 'No matching function found for the given prompt.',
};
}
async function mergeAnswer(answer, session) {
if (answer && answer!== true) {
return Object.assign({}, {
memories: await listMemories(session),
content: answer,
});
} else if (answer === true) {
return {
memories: await listMemories(session),
content: await messageRecents(session),
};
} else {
return {
memories: await listMemories(session),
content: answer || (await messageRecents(session)),
};
}
}
// Special handling for doStableRequest
async function specialHandling(answer, session) {
if (answer!== undefined) {
return Object.assign(answer, {
memories: await listMemories(session),
content: await messageRecents(session),
});
} else {
return {
memories: await listMemories(session),
content: await messageRecents(session),
};
}
}
module.exports = classifyPrompt;
The code imports various functions and variables from different modules:
messageRecents
, listMemories
, and API_DESCRIPTION
from general chit chat
, llm load memories
, and an unspecified module, respectively.doStableRequest
from stable diffusion request
.askLlamaMatchingFunction
and getParameters
from classify llm prompt
and an unspecified module, respectively.classifyPrompt
FunctionThe classifyPrompt
function takes five arguments:
promptModel
session
prompt
image
otr
This function appears to match a given prompt to a specific function and execute that function with the provided arguments. Here's a step-by-step breakdown:
askLlamaMatchingFunction
to find a matching function for the given prompt and image.classifyPrompt
itself, it returns a description of the classifyPrompt
function, including its capabilities and a list of memories.importer.import
.getParameters
and sets their values from the context
object.answer
.doStableRequest
, it returns a combined object with the function's result, memories, and recent messages. Otherwise, it returns the function's result with additional memories.Note that the code has a few TODO comments and appears to be in the process of being refactored.