update tools

This commit is contained in:
Olivier Chafik 2024-10-02 14:57:25 +01:00
parent 5b01402655
commit f3538e755b
7 changed files with 67 additions and 43 deletions

View file

@ -44,13 +44,13 @@
--chat-template-file tests/chat/templates/meta-llama-Llama-3.2-3B-Instruct.jinja
```
- Run some tools inside a docker container (check http://localhost:8088/docs once running):
- Run the tools in [examples/agent/tools](./examples/agent/tools) inside a docker container (check http://localhost:8088/docs once running):
```bash
docker run -p 8088:8088 -w /src -v $PWD/examples/agent:/src \
--env BRAVE_SEARCH_API_KEY=$BRAVE_SEARCH_API_KEY \
--rm -it ghcr.io/astral-sh/uv:python3.12-alpine \
uv run fastify.py --port 8088 tools
uv run fastify.py --port 8088 tools/
```
> [!WARNING]

View file

@ -12,15 +12,29 @@
# ///
'''
Discovers and binds python script functions as a FastAPI server.
Usage (docker isolation - with network access):
docker run -p 8088:8088 -w /src -v $PWD/examples/agent:/src \
--env BRAVE_SEARCH_API_KEY=$BRAVE_SEARCH_API_KEY \
--rm -it ghcr.io/astral-sh/uv:python3.12-alpine \
uv run fastify.py --port 8088 tools/
Usage (non-siloed, DANGEROUS):
uv run examples/agent/fastify.py --port 8088 examples/agent/tools
uv run examples/agent/fastify.py --port 8088 examples/agent/tools/python.py
'''
import fastapi
import importlib.util
import logging
import os
import sys
import fastapi, uvicorn
from pathlib import Path
import sys
import typer
from typing import List
import importlib.util
import uvicorn
def _load_source_as_module(source):
@ -45,11 +59,13 @@ def _load_module(f: str):
return importlib.import_module(f)
def main(files: List[str], host: str = '0.0.0.0', port: int = 8000):
def main(files: List[str], host: str = '0.0.0.0', port: int = 8000, verbose: bool = False):
logging.basicConfig(level=logging.DEBUG if verbose else logging.INFO)
app = fastapi.FastAPI()
def load_python(f):
print(f'Binding functions from {f}')
logging.info(f'Binding functions from {f}')
module = _load_module(f)
for k in dir(module):
if k.startswith('_'):
@ -66,11 +82,12 @@ def main(files: List[str], host: str = '0.0.0.0', port: int = 8000):
if vt.__module__ == 'langchain_core.tools' and vt.__name__.endswith('Tool') and hasattr(v, 'func') and callable(func := getattr(v, 'func')):
v = func
print(f'INFO: Binding /{k}')
try:
app.post('/' + k)(v)
logging.info(f'Bound /{k}')
except Exception as e:
print(f'WARNING: Failed to bind /{k}\n\t{e}')
logging.warning(f'Failed to bind /{k}\n\t{e}')
for f in files:
if os.path.isdir(f):

View file

@ -18,7 +18,7 @@ from openai.types.chat import ChatCompletionMessageParam, ChatCompletionToolMess
from pydantic import BaseModel
import sys
import typer
from typing import Annotated, Optional
from typing import Optional
import urllib.parse
class OpenAPIMethod:

View file

@ -1,9 +1,9 @@
import aiohttp
import sys
from typing import Optional
from pydantic import BaseModel
import html2text
import logging
from pydantic import BaseModel
from typing import Optional
import sys
class FetchResult(BaseModel):
@ -18,11 +18,13 @@ async def fetch_page(url: str) -> FetchResult:
'''
try:
logging.debug(f'[fetch_page] Fetching %s', url)
async with aiohttp.ClientSession() as session:
async with session.get(url) as res:
res.raise_for_status()
content = await res.text()
except aiohttp.ClientError as e:
logging.error('[fetch_page] Failed to fetch %s: %s', url, e)
return FetchResult(error=str(e))
# NOTE: Pyppeteer doesn't work great in docker, short of installing a bunch of dependencies
@ -34,13 +36,13 @@ async def fetch_page(url: str) -> FetchResult:
# response = await page.goto(url)
# if not response.ok:
# return FetchResult(error=f"HTTP {response.status} {response.statusText}")
# return FetchResult(error=f'HTTP {response.status} {response.statusText}')
# content=await page.content()
# except TimeoutError:
# return FetchResult(error="Page load timed out")
# return FetchResult(error='Page load timed out')
# except NetworkError:
# return FetchResult(error="Network error occurred")
# return FetchResult(error='Network error occurred')
# except Exception as e:
# return FetchResult(error=str(e))
# finally:
@ -54,5 +56,5 @@ async def fetch_page(url: str) -> FetchResult:
markdown = h.handle(content)
return FetchResult(markdown=markdown)
except Exception as e:
print(f'Failed to convert HTML of {url} to markdown: {e}', file=sys.stderr)
logging.warning('[fetch_page] Failed to convert HTML of %s to markdown: %s', url, e)
return FetchResult(content=content)

View file

@ -1,10 +1,11 @@
from IPython.core.interactiveshell import InteractiveShell
from io import StringIO
import logging
import sys
def python(code: str) -> str:
"""
'''
Execute Python code in a siloed environment using IPython and returns the output.
Parameters:
@ -12,7 +13,8 @@ def python(code: str) -> str:
Returns:
str: The output of the executed code.
"""
'''
logging.debug('[python] Executing %s', code)
shell = InteractiveShell()
old_stdout = sys.stdout
@ -21,7 +23,8 @@ def python(code: str) -> str:
try:
shell.run_cell(code)
except Exception as e:
return f"An error occurred: {e}"
logging.debug('[python] Execution failed: %s\nCode: %s', e, code)
return f'An error occurred: {e}'
finally:
sys.stdout = old_stdout

View file

@ -1,8 +1,8 @@
import aiohttp
import itertools
import json
import logging
import os
import sys
from typing import Dict, List
import urllib.parse
@ -19,17 +19,17 @@ def _extract_values(keys, obj):
# Let's keep this tool aligned w/ llama_stack.providers.impls.meta_reference.agents.tools.builtin.BraveSearch
# (see https://github.com/meta-llama/llama-stack/blob/main/llama_stack/providers/impls/meta_reference/agents/tools/builtin.py)
_result_keys_by_type = {
"web": ("type", "title", "url", "description", "date", "extra_snippets"),
"videos": ("type", "title", "url", "description", "date"),
"news": ("type", "title", "url", "description"),
"infobox": ("type", "title", "url", "description", "long_desc"),
"locations": ("type", "title", "url", "description", "coordinates", "postal_address", "contact", "rating", "distance", "zoom_level"),
"faq": ("type", "title", "url", "question", "answer"),
'web': ('type', 'title', 'url', 'description', 'date', 'extra_snippets'),
'videos': ('type', 'title', 'url', 'description', 'date'),
'news': ('type', 'title', 'url', 'description'),
'infobox': ('type', 'title', 'url', 'description', 'long_desc'),
'locations': ('type', 'title', 'url', 'description', 'coordinates', 'postal_address', 'contact', 'rating', 'distance', 'zoom_level'),
'faq': ('type', 'title', 'url', 'question', 'answer'),
}
async def brave_search(query: str, max_results: int = 10) -> List[Dict]:
"""
'''
Search the Brave Search API for the specified query.
Parameters:
@ -38,9 +38,10 @@ async def brave_search(query: str, max_results: int = 10) -> List[Dict]:
Returns:
List[Dict]: The search results.
"""
'''
logging.debug('[brave_search] Searching for %s', query)
url = f"https://api.search.brave.com/res/v1/web/search?q={urllib.parse.quote(query)}"
url = f'https://api.search.brave.com/res/v1/web/search?q={urllib.parse.quote(query)}'
headers = {
'Accept': 'application/json',
'Accept-Encoding': 'gzip',
@ -52,13 +53,13 @@ async def brave_search(query: str, max_results: int = 10) -> List[Dict]:
result_type = m['type']
keys = _result_keys_by_type.get(result_type)
if keys is None:
print(f'[brave_search] Unknown result type: {result_type}', file=sys.stderr)
logging.warning(f'[brave_search] Unknown result type: %s', result_type)
continue
results_of_type = search_response[result_type]["results"]
if (idx := m.get("index")) is not None:
results_of_type = search_response[result_type]['results']
if (idx := m.get('index')) is not None:
yield _extract_values(keys, results_of_type[idx])
elif m["all"]:
elif m['all']:
for r in results_of_type:
yield _extract_values(keys, r)

View file

@ -1,7 +1,7 @@
import asyncio
import datetime
import logging
from pydantic import BaseModel
import sys
from typing import Optional
class Duration(BaseModel):
@ -40,12 +40,12 @@ class Duration(BaseModel):
class WaitForDuration(BaseModel):
duration: Duration
async def __call__(self):
sys.stderr.write(f"Waiting for {self.duration}...\n")
await asyncio.sleep(self.duration.get_total_seconds)
async def wait_for_duration(duration: Duration) -> None:
'Wait for a certain amount of time before continuing.'
'''
Wait for a certain amount of time before continuing.
'''
logging.debug(f"[wait_for_duration] Waiting for %s...", duration.get_total_seconds)
await asyncio.sleep(duration.get_total_seconds)
async def wait_for_date(target_date: datetime.date) -> None:
@ -55,10 +55,11 @@ async def wait_for_date(target_date: datetime.date) -> None:
'''
current_date = datetime.date.today()
if target_date < current_date:
raise ValueError("Target date cannot be in the past.")
logging.debug(f"[wait_for_date] Waiting until %s (current date = %s)...", target_date, current_date)
time_diff = datetime.datetime.combine(target_date, datetime.time.min) - datetime.datetime.combine(current_date, datetime.time.min)
days, seconds = time_diff.days, time_diff.seconds