llama vision | llm voice | ollama vision request | Search

The code consists of imported modules, functions, and notes for an asynchronous Python script that uses custom modules browser_use and langchain_ollama to execute a search task on the r/LocalLLaMA subreddit. The script defines two asynchronous functions: run_search() and main().

Cell 6


import asyncio
from browser_use import Agent
from browser_use.agent.views import AgentHistoryList
from langchain_ollama import ChatOllama


async def run_search() -> AgentHistoryList:
    agent = Agent(
        task="Search for a 'browser use' post on the r/LocalLLaMA subreddit and open it.",
        llm=ChatOllama(
            # model="qwen2.5:32b-instruct-q4_K_M",
            # model="deepseek-r1:14b",
            model="qwen2.5:latest",
            num_ctx=32000,
        ),
    )

    result = await agent.run()
    return result


async def main():
    result = await run_search()
    print("\n\n", result)

__all__ = {
  "main": main,
  "run_search": run_search
}

What the code could have been:

# Import necessary libraries
import asyncio
from browser_use import Agent
from browser_use.agent.views import AgentHistoryList
from langchain_ollama import ChatOllama

# Define constants for LLama model and context
DEFAULT_LLM_MODEL = "qwen2.5:latest"
DEFAULT_NUM_CTX = 32000

class LlamaAgent:
    """An agent using LLama to perform tasks."""
    
    def __init__(self, model=DEFAULT_LLM_MODEL, num_ctx=DEFAULT_NUM_CTX):
        """Initialize the LLama agent.

        Args:
            model (str): The name of the LLama model to use. Defaults to DEFAULT_LLM_MODEL.
            num_ctx (int): The number of context to use. Defaults to DEFAULT_NUM_CTX.
        """
        self.agent = Agent(
            task="Search for a 'browser use' post on the r/LocalLLaMA subreddit and open it.",
            llm=ChatOllama(model=model, num_ctx=num_ctx),
        )

    async def run_search(self) -> AgentHistoryList:
        """Run the search task using the LLama agent.

        Returns:
            AgentHistoryList: The result of the search task.
        """
        result = await self.agent.run()
        return result


async def main():
    """The main entry point of the script."""
    # Create a new LLama agent
    agent = LlamaAgent()

    # Run the search task
    result = await agent.run_search()

    # Print the result
    print("\n\n", result)


if __name__ == "__main__":
    # Run the main function
    asyncio.run(main())

Code Breakdown

Imported Modules

Functions

run_search()

main()

Exported Functions

Notes