update tools

This commit is contained in:
Olivier Chafik 2024-10-02 14:57:25 +01:00
parent 5b01402655
commit f3538e755b
7 changed files with 67 additions and 43 deletions

View file

@ -44,13 +44,13 @@
--chat-template-file tests/chat/templates/meta-llama-Llama-3.2-3B-Instruct.jinja --chat-template-file tests/chat/templates/meta-llama-Llama-3.2-3B-Instruct.jinja
``` ```
- Run some tools inside a docker container (check http://localhost:8088/docs once running): - Run the tools in [examples/agent/tools](./examples/agent/tools) inside a docker container (check http://localhost:8088/docs once running):
```bash ```bash
docker run -p 8088:8088 -w /src -v $PWD/examples/agent:/src \ docker run -p 8088:8088 -w /src -v $PWD/examples/agent:/src \
--env BRAVE_SEARCH_API_KEY=$BRAVE_SEARCH_API_KEY \ --env BRAVE_SEARCH_API_KEY=$BRAVE_SEARCH_API_KEY \
--rm -it ghcr.io/astral-sh/uv:python3.12-alpine \ --rm -it ghcr.io/astral-sh/uv:python3.12-alpine \
uv run fastify.py --port 8088 tools uv run fastify.py --port 8088 tools/
``` ```
> [!WARNING] > [!WARNING]

View file

@ -12,15 +12,29 @@
# /// # ///
''' '''
Discovers and binds python script functions as a FastAPI server. Discovers and binds python script functions as a FastAPI server.
Usage (docker isolation - with network access):
docker run -p 8088:8088 -w /src -v $PWD/examples/agent:/src \
--env BRAVE_SEARCH_API_KEY=$BRAVE_SEARCH_API_KEY \
--rm -it ghcr.io/astral-sh/uv:python3.12-alpine \
uv run fastify.py --port 8088 tools/
Usage (non-siloed, DANGEROUS):
uv run examples/agent/fastify.py --port 8088 examples/agent/tools
uv run examples/agent/fastify.py --port 8088 examples/agent/tools/python.py
''' '''
import fastapi
import importlib.util
import logging
import os import os
import sys
import fastapi, uvicorn
from pathlib import Path from pathlib import Path
import sys
import typer import typer
from typing import List from typing import List
import uvicorn
import importlib.util
def _load_source_as_module(source): def _load_source_as_module(source):
@ -45,11 +59,13 @@ def _load_module(f: str):
return importlib.import_module(f) return importlib.import_module(f)
def main(files: List[str], host: str = '0.0.0.0', port: int = 8000): def main(files: List[str], host: str = '0.0.0.0', port: int = 8000, verbose: bool = False):
logging.basicConfig(level=logging.DEBUG if verbose else logging.INFO)
app = fastapi.FastAPI() app = fastapi.FastAPI()
def load_python(f): def load_python(f):
print(f'Binding functions from {f}') logging.info(f'Binding functions from {f}')
module = _load_module(f) module = _load_module(f)
for k in dir(module): for k in dir(module):
if k.startswith('_'): if k.startswith('_'):
@ -66,11 +82,12 @@ def main(files: List[str], host: str = '0.0.0.0', port: int = 8000):
if vt.__module__ == 'langchain_core.tools' and vt.__name__.endswith('Tool') and hasattr(v, 'func') and callable(func := getattr(v, 'func')): if vt.__module__ == 'langchain_core.tools' and vt.__name__.endswith('Tool') and hasattr(v, 'func') and callable(func := getattr(v, 'func')):
v = func v = func
print(f'INFO: Binding /{k}')
try: try:
app.post('/' + k)(v) app.post('/' + k)(v)
logging.info(f'Bound /{k}')
except Exception as e: except Exception as e:
print(f'WARNING: Failed to bind /{k}\n\t{e}') logging.warning(f'Failed to bind /{k}\n\t{e}')
for f in files: for f in files:
if os.path.isdir(f): if os.path.isdir(f):

View file

@ -18,7 +18,7 @@ from openai.types.chat import ChatCompletionMessageParam, ChatCompletionToolMess
from pydantic import BaseModel from pydantic import BaseModel
import sys import sys
import typer import typer
from typing import Annotated, Optional from typing import Optional
import urllib.parse import urllib.parse
class OpenAPIMethod: class OpenAPIMethod:

View file

@ -1,9 +1,9 @@
import aiohttp import aiohttp
import sys
from typing import Optional
from pydantic import BaseModel
import html2text import html2text
import logging
from pydantic import BaseModel
from typing import Optional
import sys
class FetchResult(BaseModel): class FetchResult(BaseModel):
@ -18,11 +18,13 @@ async def fetch_page(url: str) -> FetchResult:
''' '''
try: try:
logging.debug(f'[fetch_page] Fetching %s', url)
async with aiohttp.ClientSession() as session: async with aiohttp.ClientSession() as session:
async with session.get(url) as res: async with session.get(url) as res:
res.raise_for_status() res.raise_for_status()
content = await res.text() content = await res.text()
except aiohttp.ClientError as e: except aiohttp.ClientError as e:
logging.error('[fetch_page] Failed to fetch %s: %s', url, e)
return FetchResult(error=str(e)) return FetchResult(error=str(e))
# NOTE: Pyppeteer doesn't work great in docker, short of installing a bunch of dependencies # NOTE: Pyppeteer doesn't work great in docker, short of installing a bunch of dependencies
@ -34,13 +36,13 @@ async def fetch_page(url: str) -> FetchResult:
# response = await page.goto(url) # response = await page.goto(url)
# if not response.ok: # if not response.ok:
# return FetchResult(error=f"HTTP {response.status} {response.statusText}") # return FetchResult(error=f'HTTP {response.status} {response.statusText}')
# content=await page.content() # content=await page.content()
# except TimeoutError: # except TimeoutError:
# return FetchResult(error="Page load timed out") # return FetchResult(error='Page load timed out')
# except NetworkError: # except NetworkError:
# return FetchResult(error="Network error occurred") # return FetchResult(error='Network error occurred')
# except Exception as e: # except Exception as e:
# return FetchResult(error=str(e)) # return FetchResult(error=str(e))
# finally: # finally:
@ -54,5 +56,5 @@ async def fetch_page(url: str) -> FetchResult:
markdown = h.handle(content) markdown = h.handle(content)
return FetchResult(markdown=markdown) return FetchResult(markdown=markdown)
except Exception as e: except Exception as e:
print(f'Failed to convert HTML of {url} to markdown: {e}', file=sys.stderr) logging.warning('[fetch_page] Failed to convert HTML of %s to markdown: %s', url, e)
return FetchResult(content=content) return FetchResult(content=content)

View file

@ -1,10 +1,11 @@
from IPython.core.interactiveshell import InteractiveShell from IPython.core.interactiveshell import InteractiveShell
from io import StringIO from io import StringIO
import logging
import sys import sys
def python(code: str) -> str: def python(code: str) -> str:
""" '''
Execute Python code in a siloed environment using IPython and returns the output. Execute Python code in a siloed environment using IPython and returns the output.
Parameters: Parameters:
@ -12,7 +13,8 @@ def python(code: str) -> str:
Returns: Returns:
str: The output of the executed code. str: The output of the executed code.
""" '''
logging.debug('[python] Executing %s', code)
shell = InteractiveShell() shell = InteractiveShell()
old_stdout = sys.stdout old_stdout = sys.stdout
@ -21,7 +23,8 @@ def python(code: str) -> str:
try: try:
shell.run_cell(code) shell.run_cell(code)
except Exception as e: except Exception as e:
return f"An error occurred: {e}" logging.debug('[python] Execution failed: %s\nCode: %s', e, code)
return f'An error occurred: {e}'
finally: finally:
sys.stdout = old_stdout sys.stdout = old_stdout

View file

@ -1,8 +1,8 @@
import aiohttp import aiohttp
import itertools import itertools
import json import json
import logging
import os import os
import sys
from typing import Dict, List from typing import Dict, List
import urllib.parse import urllib.parse
@ -19,17 +19,17 @@ def _extract_values(keys, obj):
# Let's keep this tool aligned w/ llama_stack.providers.impls.meta_reference.agents.tools.builtin.BraveSearch # Let's keep this tool aligned w/ llama_stack.providers.impls.meta_reference.agents.tools.builtin.BraveSearch
# (see https://github.com/meta-llama/llama-stack/blob/main/llama_stack/providers/impls/meta_reference/agents/tools/builtin.py) # (see https://github.com/meta-llama/llama-stack/blob/main/llama_stack/providers/impls/meta_reference/agents/tools/builtin.py)
_result_keys_by_type = { _result_keys_by_type = {
"web": ("type", "title", "url", "description", "date", "extra_snippets"), 'web': ('type', 'title', 'url', 'description', 'date', 'extra_snippets'),
"videos": ("type", "title", "url", "description", "date"), 'videos': ('type', 'title', 'url', 'description', 'date'),
"news": ("type", "title", "url", "description"), 'news': ('type', 'title', 'url', 'description'),
"infobox": ("type", "title", "url", "description", "long_desc"), 'infobox': ('type', 'title', 'url', 'description', 'long_desc'),
"locations": ("type", "title", "url", "description", "coordinates", "postal_address", "contact", "rating", "distance", "zoom_level"), 'locations': ('type', 'title', 'url', 'description', 'coordinates', 'postal_address', 'contact', 'rating', 'distance', 'zoom_level'),
"faq": ("type", "title", "url", "question", "answer"), 'faq': ('type', 'title', 'url', 'question', 'answer'),
} }
async def brave_search(query: str, max_results: int = 10) -> List[Dict]: async def brave_search(query: str, max_results: int = 10) -> List[Dict]:
""" '''
Search the Brave Search API for the specified query. Search the Brave Search API for the specified query.
Parameters: Parameters:
@ -38,9 +38,10 @@ async def brave_search(query: str, max_results: int = 10) -> List[Dict]:
Returns: Returns:
List[Dict]: The search results. List[Dict]: The search results.
""" '''
logging.debug('[brave_search] Searching for %s', query)
url = f"https://api.search.brave.com/res/v1/web/search?q={urllib.parse.quote(query)}" url = f'https://api.search.brave.com/res/v1/web/search?q={urllib.parse.quote(query)}'
headers = { headers = {
'Accept': 'application/json', 'Accept': 'application/json',
'Accept-Encoding': 'gzip', 'Accept-Encoding': 'gzip',
@ -52,13 +53,13 @@ async def brave_search(query: str, max_results: int = 10) -> List[Dict]:
result_type = m['type'] result_type = m['type']
keys = _result_keys_by_type.get(result_type) keys = _result_keys_by_type.get(result_type)
if keys is None: if keys is None:
print(f'[brave_search] Unknown result type: {result_type}', file=sys.stderr) logging.warning(f'[brave_search] Unknown result type: %s', result_type)
continue continue
results_of_type = search_response[result_type]["results"] results_of_type = search_response[result_type]['results']
if (idx := m.get("index")) is not None: if (idx := m.get('index')) is not None:
yield _extract_values(keys, results_of_type[idx]) yield _extract_values(keys, results_of_type[idx])
elif m["all"]: elif m['all']:
for r in results_of_type: for r in results_of_type:
yield _extract_values(keys, r) yield _extract_values(keys, r)

View file

@ -1,7 +1,7 @@
import asyncio import asyncio
import datetime import datetime
import logging
from pydantic import BaseModel from pydantic import BaseModel
import sys
from typing import Optional from typing import Optional
class Duration(BaseModel): class Duration(BaseModel):
@ -40,12 +40,12 @@ class Duration(BaseModel):
class WaitForDuration(BaseModel): class WaitForDuration(BaseModel):
duration: Duration duration: Duration
async def __call__(self):
sys.stderr.write(f"Waiting for {self.duration}...\n")
await asyncio.sleep(self.duration.get_total_seconds)
async def wait_for_duration(duration: Duration) -> None: async def wait_for_duration(duration: Duration) -> None:
'Wait for a certain amount of time before continuing.' '''
Wait for a certain amount of time before continuing.
'''
logging.debug(f"[wait_for_duration] Waiting for %s...", duration.get_total_seconds)
await asyncio.sleep(duration.get_total_seconds) await asyncio.sleep(duration.get_total_seconds)
async def wait_for_date(target_date: datetime.date) -> None: async def wait_for_date(target_date: datetime.date) -> None:
@ -55,10 +55,11 @@ async def wait_for_date(target_date: datetime.date) -> None:
''' '''
current_date = datetime.date.today() current_date = datetime.date.today()
if target_date < current_date: if target_date < current_date:
raise ValueError("Target date cannot be in the past.") raise ValueError("Target date cannot be in the past.")
logging.debug(f"[wait_for_date] Waiting until %s (current date = %s)...", target_date, current_date)
time_diff = datetime.datetime.combine(target_date, datetime.time.min) - datetime.datetime.combine(current_date, datetime.time.min) time_diff = datetime.datetime.combine(target_date, datetime.time.min) - datetime.datetime.combine(current_date, datetime.time.min)
days, seconds = time_diff.days, time_diff.seconds days, seconds = time_diff.days, time_diff.seconds