diff --git a/examples/agent/Dockerfile.tools b/examples/agent/Dockerfile.tools index a26244f4b..d27b64803 100644 --- a/examples/agent/Dockerfile.tools +++ b/examples/agent/Dockerfile.tools @@ -4,6 +4,7 @@ RUN python -m pip install --upgrade pip && \ apt clean cache COPY requirements.txt /root/ +# COPY . /root/ WORKDIR /root RUN pip install -r requirements.txt @@ -13,5 +14,5 @@ COPY ./tools/*.py /root/tools/ COPY ./squid/ssl_cert/squidCA.crt /usr/local/share/ca-certificates/squidCA.crt RUN chmod 644 /usr/local/share/ca-certificates/squidCA.crt && update-ca-certificates -# ENTRYPOINT [ "python" ] -# CMD ["serve_tools.py"] +ENTRYPOINT [ "uvicorn" ] +CMD ["serve_tools:app", "--host", "0.0.0.0", "--port", "8088"] \ No newline at end of file diff --git a/examples/agent/docker-compose.yml b/examples/agent/docker-compose.yml index df04b1fc2..fbbe005da 100644 --- a/examples/agent/docker-compose.yml +++ b/examples/agent/docker-compose.yml @@ -32,12 +32,11 @@ services: networks: - private_net environment: - - PORT=8088 + - VERBOSE=1 - BRAVE_SEARCH_API_KEY=${BRAVE_SEARCH_API_KEY} + - REQUESTS_CA_BUNDLE=/usr/local/share/ca-certificates/squidCA.crt - http_proxy=http://outgoing_proxy:3128 - https_proxy=http://outgoing_proxy:3128 - entrypoint: python - command: serve_tools.py # entrypoint: /usr/bin/bash # command: ["-c", "pip install --upgrade gguf && apt update && apt install -y curl && curl https://ochafik.com && pip install gguf"] diff --git a/examples/agent/serve_tools.py b/examples/agent/serve_tools.py index 70c4b0225..b20d6dcdf 100644 --- a/examples/agent/serve_tools.py +++ b/examples/agent/serve_tools.py @@ -12,6 +12,7 @@ uv run examples/agent/serve_tools.py --port 8088 ''' +import asyncio import logging import re import fastapi @@ -24,6 +25,11 @@ from tools.fetch import fetch_page from tools.search import brave_search from tools.python import python, python_tools +# try: +# # https://github.com/aio-libs/aiohttp/discussions/6044 +# setattr(asyncio.sslproto._SSLProtocolTransport, "_start_tls_compatible", True) # type: ignore +# except Exception as e: +# print(f'Failed to patch asyncio: {e}', file=sys.stderr) verbose = os.environ.get('VERBOSE', '0') == '1' include = os.environ.get('INCLUDE_TOOLS') diff --git a/examples/agent/squid/conf/squid.conf b/examples/agent/squid/conf/squid.conf index ce649e10a..90f660feb 100755 --- a/examples/agent/squid/conf/squid.conf +++ b/examples/agent/squid/conf/squid.conf @@ -23,7 +23,7 @@ http_access allow all refresh_pattern -i ($|\.)(files\.pythonhosted\.org|pypi\.org)/.*?\.(whl|zip|tar\.gz)$ 10080 90% 43200 reload-into-ims # Cache Debian packages -refresh_pattern \.debian\.org/.*?\.(deb|udeb|tar\.(gz|xz|bz2)$ 129600 100% 129600 +refresh_pattern \.debian\.org/.*?\.(deb|udeb|tar\.(gz|xz|bz2))$ 129600 100% 129600 # Configure cache cache_dir ufs /var/spool/squid 10000 16 256 diff --git a/examples/agent/tools/fetch.py b/examples/agent/tools/fetch.py index b354c4911..d1aff4887 100644 --- a/examples/agent/tools/fetch.py +++ b/examples/agent/tools/fetch.py @@ -1,6 +1,7 @@ -import aiohttp +# import aiohttp import html2text import logging +import requests async def fetch_page(url: str): @@ -10,11 +11,16 @@ async def fetch_page(url: str): try: logging.debug(f'[fetch_page] Fetching %s', url) - async with aiohttp.ClientSession() as session: - async with session.get(url) as res: - res.raise_for_status() - content = await res.text() - except aiohttp.ClientError as e: + response = requests.get(url) + response.raise_for_status() + content = response.text + # async with aiohttp.ClientSession(trust_env=True) as session: + # async with session.get(url) as res: + # res.raise_for_status() + # content = await res.text() + # except aiohttp.ClientError as e: + # raise Exception(f'Failed to fetch {url}: {e}') + except requests.exceptions.RequestException as e: raise Exception(f'Failed to fetch {url}: {e}') # NOTE: Pyppeteer doesn't work great in docker, short of installing a bunch of dependencies diff --git a/examples/agent/tools/search.py b/examples/agent/tools/search.py index 63c92d8a1..c36c2cbab 100644 --- a/examples/agent/tools/search.py +++ b/examples/agent/tools/search.py @@ -1,13 +1,13 @@ -import sys -from pydantic import Field -import aiohttp +# import aiohttp import itertools import json import logging import os -from typing import Annotated, Dict, List +from typing import Dict, List import urllib.parse +import requests + def _extract_values(keys, obj): values = {} @@ -66,13 +66,19 @@ async def brave_search(*, query: str) -> List[Dict]: for r in results_of_type: yield _extract_values(keys, r) - async with aiohttp.ClientSession() as session: - async with session.get(url, headers=headers) as res: - if not res.ok: - raise Exception(await res.text()) - res.raise_for_status() - response = await res.json() + res = requests.get(url, headers=headers) + if not res.ok: + raise Exception(res.text) + reponse = res.json() + res.raise_for_status() + response = res.text + # async with aiohttp.ClientSession(trust_env=True) as session: + # async with session.get(url, headers=headers) as res: + # if not res.ok: + # raise Exception(await res.text()) + # res.raise_for_status() + # response = await res.json() - results = list(itertools.islice(extract_results(response), max_results)) - print(json.dumps(dict(query=query, response=response, results=results), indent=2)) - return results + results = list(itertools.islice(extract_results(response), max_results)) + print(json.dumps(dict(query=query, response=response, results=results), indent=2)) + return results