This function, askLlamaAboutCategories
, queries the Llama language model about categories related to a given query and returns a list of notebook filenames containing matching categories.
npm run import -- "ask llm about categories"
const { functionCache } = importer.import("cache rpc functions with llm descriptions")
const {askLlamaAboutFunctions} = importer.import("ask llm about functions")
async function askLlamaAboutCategories(query) {
// TODO: list all categories in database
let keys = Object.keys(functionCache)
let categorys = keys.map(k => functionCache[k].categories)
.concat(keys.map(k => functionCache[k].category))
.filter((a, i, arr) => a && arr.indexOf(a) == i && !a.includes('\n'))
// TODO: ask llm if any of the categories match, don't choose best one, choose all matches
let returnValues = []
let functions = []
for(let i = 0; i < categorys.length; i++) {
let category = categorys[i]
functions[functions.length] = category
if(functions.length == 20) {
let result = await askLlamaAboutFunctions(query, functions, [], true)
functions = []
if(result)
returnValues = returnValues.concat(result)
}
}
if(functions.length > 0) {
let result = await askLlamaAboutFunctions(query, functions, [], true)
functions = []
if(result)
returnValues = returnValues.concat(result)
}
// TODO: return notebook filenames that contain matching categories
let matching = keys.filter(k => returnValues.includes(functionCache[k].category) || returnValues.includes(functionCache[k].categories))
.map(k => k.replace(/\[[0-9]*\]/, ''))
.filter((a, i, arr) => a && arr.indexOf(a) == i)
return matching
}
module.exports = {
askLlamaAboutCategories
}
const { functionCache, cacheSize } = importer.import('cache rpc functions with llm descriptions');
const { askLlamaAboutFunctions } = importer.import('ask llm about functions');
/**
* Asks the LLaMA model about categories and returns notebook filenames containing matching categories.
*
* @param {string} query - The query to ask the LLaMA model.
* @returns {Promise} - A promise resolving to an array of notebook filenames containing matching categories.
*/
async function askLlamaAboutCategories(query) {
// Get unique categories from function cache
const categories = Array.from(new Set(
Object.values(functionCache).flatMap(fc => [fc.category,...fc.categories])
));
// Ask LLaMA model for matching categories
const results = [];
const chunkSize = Math.min(cacheSize, 20);
for (const chunk of chunked(categories, chunkSize)) {
const response = await askLlamaAboutFunctions(query, chunk, [], true);
if (response) results.push(...response);
}
// Get notebook filenames containing matching categories
const matching = Object.keys(functionCache)
.filter(k => results.includes(functionCache[k].category) || results.includes(functionCache[k].categories))
.map(k => k.replace(/\[[0-9]*\]/, ''))
.filter((a, i, arr) => a && arr.indexOf(a) == i);
return matching;
}
// Chunk an array into smaller arrays of a specified size
function chunked(array, size) {
return Array(Math.ceil(array.length / size))
.fill()
.map((_, i) => array.slice(i * size, (i + 1) * size));
}
module.exports = { askLlamaAboutCategories };
Code Breakdown
askLlamaAboutCategories
Queries the Llama language model about categories related to a given query.
query
(string): The query to ask the Llama language model.TODO
comments indicate that the code is incomplete and requires further implementation.askLlamaAboutFunctions
function is called with true
as the last argument, which is not clear what this flag is supposed to do.functionCache
object and importer
variable are not defined in this code snippet.