The askLlamaGeneralizeCategories
function generalizes a list of categories by iteratively refining the list with the LLM until a satisfactory result is obtained. It returns a refined list of categories and updates the function cache if necessary.
npm run import -- "ask llm to generalize categories"
const { functionCache } = importer.import("cache rpc functions with llm descriptions")
const { askLlamaAboutFunctions } = importer.import("ask llm about functions")
const { storeLlamaFunction } = importer.import("store llama function")
const { askLlamaToGeneralizeAll } = importer.import("ask llm for a shorter list of categories")
async function askLlamaGeneralizeCategories(categories) {
let update = false
let keys = Object.keys(functionCache)
if(!categories) {
categories = keys.map(k => functionCache[k].categories)
.filter((a, i, arr) => a && arr.indexOf(a) == i && !a.includes('\n'))
update = true
}
if(categories.length == 0) {
return []
}
let uniqueValues = await askLlamaToGeneralizeAll(categories)
console.log(uniqueValues)
if(uniqueValues.length > 100) {
uniqueValues = (await askLlamaGeneralizeCategories(uniqueValues))
.filter((a, i, arr) => arr.indexOf(a) == i)
}
if(update) {
let convertedList = []
for(let i = 0; i < keys.length; i++) {
let newCategory = await askLlamaAboutFunctions(functionCache[keys[i]].categories, uniqueValues, [], true)
if(newCategory) {
storeLlamaFunction(keys[i], functionCache[keys[i]].mtime,
functionCache[keys[i]].exports, functionCache[keys[i]].description,
functionCache[keys[i]].summary, functionCache[keys[i]].categories, newCategory)
}
convertedList[i] = newCategory
}
console.log(convertedList)
}
return uniqueValues
}
module.exports = {
askLlamaGeneralizeCategories
}
// Import required functions from the cache rpc functions with LLM descriptions module
const {
functionCache,
askLlamaAboutFunctions,
storeLlamaFunction,
askLlamaToGeneralizeAll,
} = importer.import('cache rpc functions with llm descriptions');
/**
* Generalize categories using the LLM.
* @param {string[]} [categories] - List of categories to generalize.
* @returns {Promise<string[]>} A promise that resolves to a list of generalized categories.
*/
async function askLlamaGeneralizeCategories(categories = []) {
if (!categories.length) {
// If no categories are provided, retrieve them from the function cache.
categories = Object.keys(functionCache)
.map((k) => functionCache[k].categories)
.filter((a, i, arr) => a && arr.indexOf(a) === i &&!a.includes('\n'));
}
if (categories.length === 0) {
// If no categories are found, return an empty array.
return [];
}
try {
// Generalize the categories using the LLM.
const uniqueValues = await askLlamaToGeneralizeAll(categories);
console.log(uniqueValues);
// If the list of unique values is too large, recursively call this function.
if (uniqueValues.length > 100) {
const generalizedCategories = await askLlamaGeneralizeCategories(uniqueValues);
return [...new Set(generalizedCategories)];
}
// Update the function cache with the generalized categories.
if (categories.length > 0) {
const convertedList = await Promise.all(
categories.map(async (category) => {
const newCategory = await askLlamaAboutFunctions(category, uniqueValues, [], true);
return newCategory? newCategory : category;
})
);
console.log(convertedList);
await Promise.all(
categories.map((category, index) => {
return storeLlamaFunction(
Object.keys(functionCache)[index],
functionCache[Object.keys(functionCache)[index]].mtime,
functionCache[Object.keys(functionCache)[index]].exports,
functionCache[Object.keys(functionCache)[index]].description,
functionCache[Object.keys(functionCache)[index]].summary,
functionCache[Object.keys(functionCache)[index]].categories,
convertedList[index]
);
})
);
}
return uniqueValues;
} catch (error) {
console.error(error);
return [];
}
}
module.exports = {
askLlamaGeneralizeCategories,
};
Function Overview
askLlamaGeneralizeCategories
function:
Function Parameters
categories
: The list of categories to be generalized (optional)Function Return Value
Function Flow
Dependencies
functionCache
: A cache of functions and their metadataaskLlamaToGeneralizeAll
: A function that asks the LLM to generalize a list of categoriesaskLlamaAboutFunctions
: A function that asks the LLM about a specific function or categorystoreLlamaFunction
: A function that stores a LLM function in the cache