diff --git a/mxAgent/agent_sdk/executor/recipe_executor/executor.py b/mxAgent/agent_sdk/executor/recipe_executor/executor.py index 93581364dc1e6d4c93311a94997aa4795658bec0..fd3befb6e29f3aa29817e685728b21872827cf91 100644 --- a/mxAgent/agent_sdk/executor/recipe_executor/executor.py +++ b/mxAgent/agent_sdk/executor/recipe_executor/executor.py @@ -4,6 +4,7 @@ from typing import Dict import asyncio import time +from datetime import datetime, timezone import re import traceback import json @@ -143,8 +144,8 @@ class AgentExecutor(): def run_task(self, action, executor_state, llm): graph = executor_state.sop_graph.actions sop_handler = self.operation_handler - mommt = time.time() - logger.debug(f'{action.name} start:{mommt}') + mommt = datetime.now(tz=timezone.utc) + logger.debug(f'{action.name} start: {mommt.strftime("%Y-%m-%d %H:%M:%S")}') output = sop_handler.invoke(action, llm=llm) parsed_out, history = self.parser_output(output, action) @@ -156,8 +157,8 @@ class AgentExecutor(): "history": history } - mommt = time.time() - logger.debug(f'step {action.step}. action: {action.name} has finished') + mommt = datetime.now(tz=timezone.utc) + logger.debug(f'step {action.step}. action: {action.name} has finished. {mommt.strftime("%Y-%m-%d %H:%M:%S")}') return res @@ -187,9 +188,8 @@ class AgentExecutor(): executor_state.activate_actions = activate_actions while executor_state.activate_actions: # 活跃的 - cur_operation = executor_state.activate_actions.pop( - 0) # starts from right - mommt = time.time() + cur_operation = executor_state.activate_actions.pop(0) + # starts from right logger.error("start run step %d, action [%s]", cur_operation.step, cur_operation.name) output = sop_handler.invoke(cur_operation) parsed_out, history = self.parser_output(output, cur_operation) diff --git a/mxAgent/requirements.txt b/mxAgent/requirements.txt index 701bfc1bf586708fb7d99074b8203349e5d08615..37bd91f112675de8fff761ac72a48fbdeb80e7dc 100644 --- a/mxAgent/requirements.txt +++ b/mxAgent/requirements.txt @@ -1,6 +1,5 @@ -requests=2.27.1 +requests==2.27.1 tqdm -selenium=4.9.0 bs4 transformers openai @@ -9,11 +8,11 @@ datasets peft fschat langchain -vllm rouge langchain_openai colorlog rouge-score -langchain-community=0.38 +langchain-community==0.3.8 loguru -tiktoken \ No newline at end of file +tiktoken +duckduckgo-search==6.3.7 \ No newline at end of file diff --git a/mxAgent/samples/basic_demo/agent_test.py b/mxAgent/samples/basic_demo/agent_test.py index 0d037fcb0d38794f7e6bfb8874413d8193b52cbf..fa79ab18fba0333a0818880ebfe9a3a97bb30f32 100644 --- a/mxAgent/samples/basic_demo/agent_test.py +++ b/mxAgent/samples/basic_demo/agent_test.py @@ -88,7 +88,7 @@ def get_default_react_reflect_agent(api_base, api_key, llm_name, max_context_len def test_react_agent(): a = get_default_react_agent_fewshot(API_BASE, API_KEY, LLM_NAME, MAX_CONTEXT_LEN) response = a.run("Can you help with a 5 day trip from Orlando to Paris? Departure date is April 10, 2022.") - + a.save_agent_status("./react_execution_log.jsonl") logger.info(f"5 day trip from Orlando to Paris:{response.answer}") diff --git a/mxAgent/samples/basic_demo/agent_traj_systhesis.py b/mxAgent/samples/basic_demo/agent_traj_systhesis.py index 651a0a3162411bec0c39e7581ef5a3aa135d9518..2ca48e1e2cbe89bf75eea541a36f0334be31880f 100644 --- a/mxAgent/samples/basic_demo/agent_traj_systhesis.py +++ b/mxAgent/samples/basic_demo/agent_traj_systhesis.py @@ -10,11 +10,11 @@ from loguru import logger from tqdm import tqdm from agent_sdk.agentchain.react_agent import ReactAgent -from agent_sdk.common.enum_type import AgentRunStatus +from agent_sdk.common.constant import AgentRunStatus from agent_sdk.llms.llm import get_llm_backend, BACKEND_OPENAI_COMPATIBLE from samples.tools import QueryAttractions, QueryTransports, QueryAccommodations, \ QueryRestaurants, QueryGoogleDistanceMatrix -from mxAgent.samples.basic_demo.agent_test import EXAMPLE +from samples.basic_demo.agent_test import EXAMPLE warnings.filterwarnings('ignore') diff --git a/mxAgent/samples/tools/duck_search.py b/mxAgent/samples/tools/duck_search.py index 9beba59bb656ea9835a10632cfd398ac2e40a6aa..f99a791292f513d981f7b046dd76b49b9332932e 100644 --- a/mxAgent/samples/tools/duck_search.py +++ b/mxAgent/samples/tools/duck_search.py @@ -1,8 +1,9 @@ import json from typing import List import re +import time -from langchain_community.tools import DuckDuckGoSearchResults +from duckduckgo_search import DDGS from loguru import logger from agent_sdk.toolmngt.api import API @@ -51,10 +52,16 @@ class DuckDuckGoSearch(API): def call_duck_duck_go_search(query: str, count: int) -> List[str]: - try: - logger.debug(f"search DuckDuckGo({query}, {count})") - search = DuckDuckGoSearchResults(output_format="list", max_results=count) - return search.invoke(query) - except Exception as e: - logger.error(e) - return [] + retry = 1 + while retry <= 3: + try: + logger.debug(f"search DuckDuckGo({query}, {count})") + results = DDGS().text(query, backend="html", max_results=count) + return results + except Exception as e: + retry += 1 + logger.warning("duck search error. will retry") + time.sleep(1) + if retry > 3: + logger.error(e) + return [] diff --git a/mxAgent/samples/tools/tool_query_accommodations.py b/mxAgent/samples/tools/tool_query_accommodations.py index c4a5d86355c486da5df8d438162aec7b46ec5742..c8ed08a7443082abb86454eaff49a51a6685d5fb 100644 --- a/mxAgent/samples/tools/tool_query_accommodations.py +++ b/mxAgent/samples/tools/tool_query_accommodations.py @@ -64,7 +64,7 @@ class QueryAccommodations(API): filtered.append("住宿") webs = WebSummary.web_summary( filtered, search_num=3, summary_num=3, summary_prompt=prompt, llm=llm) - res = {"accommodation": json.dumps(webs)} + res = {"accommodation": json.dumps(webs, ensure_ascii=False)} return self.make_response(input_parameter, results=res, exception="") except Exception as e: logger.error(e) diff --git a/mxAgent/samples/tools/tool_query_attractions.py b/mxAgent/samples/tools/tool_query_attractions.py index b9c53b81c1ce9c3e56094fe899fa45a432aa9486..79b72a9862a417df7ec2a08dcfee6000208b5894 100644 --- a/mxAgent/samples/tools/tool_query_attractions.py +++ b/mxAgent/samples/tools/tool_query_attractions.py @@ -73,7 +73,7 @@ class QueryAttractions(API): filtered.append("景点") webs = WebSummary.web_summary( filtered, search_num=3, summary_num=3, summary_prompt=summary_prompt, llm=llm) - res = {'attractions': json.dumps(webs)} + res = {'attractions': json.dumps(webs, ensure_ascii=False)} return self.make_response(input_parameter, results=res, exception="") except Exception as e: logger.error(e) diff --git a/mxAgent/samples/tools/tool_query_transports.py b/mxAgent/samples/tools/tool_query_transports.py index bd693028f47bbd8f3adfbc87067f486188af9f30..f31e0c03ac45f858509c0b16556c28aa31fc7ae9 100644 --- a/mxAgent/samples/tools/tool_query_transports.py +++ b/mxAgent/samples/tools/tool_query_transports.py @@ -65,7 +65,7 @@ class QueryTransports(API): filtered.append("购票") webs = WebSummary.web_summary( filtered, search_num=3, summary_num=3, summary_prompt=prompt, llm=llm) - res = {'transport': json.dumps(webs)} + res = {'transport': json.dumps(webs, ensure_ascii=False)} return self.make_response(input_parameter, results=res, exception="") except Exception as e: logger.error(e) diff --git a/mxAgent/samples/tools/tool_query_weather.py b/mxAgent/samples/tools/tool_query_weather.py index 8c7cb2de1dd12823488f90913e44d4be9600ed56..2e982b8bee60781c0c7a40d4f54c970704ba0c0a 100644 --- a/mxAgent/samples/tools/tool_query_weather.py +++ b/mxAgent/samples/tools/tool_query_weather.py @@ -59,10 +59,6 @@ class QueryWeather(API): } """) - def __init__(self, ): - os.environ['CURL_CA_BUNDLE'] = '' # 关闭SSL证书验证 - urllib3.disable_warnings() - def get_forecast(self, url, param, city=""): headers = REQUEST_HEADERS response = requests.get(url, params=param, headers=headers, timeout=5) @@ -123,6 +119,8 @@ class QueryWeather(API): return params def call(self, input_parameter, **kwargs): + os.environ['CURL_CA_BUNDLE'] = '' + urllib3.disable_warnings() des = input_parameter.get('destination_city') departure_date = input_parameter.get("date") weather_type = "forecast_24h" @@ -133,6 +131,7 @@ class QueryWeather(API): try: data = self.get_city2province("https://wis.qq.com/city/like", des) except Exception as e: + logger.error(e) e = str(e) return self.make_response(input_parameter, results=e, success=False, exception=e) if len(data) == 0: @@ -158,7 +157,7 @@ class QueryWeather(API): formated_departure = datetime.datetime.strptime( departure_date, "%Y-%m-%d").date() except ValueError as e: - logger.warning(e) + # 默认当前日期 formated_departure = datetime.date.today() gaps = (formated_departure - datetime.date.today()).days weather_summary = summary_copy[gaps + 1:] diff --git a/mxAgent/samples/tools/web_summary_api.py b/mxAgent/samples/tools/web_summary_api.py index d7bcce9fbdc42f2695a58fe66361a2a59a887baa..4937f10b8a115937dbfa26951c528001de4862d0 100644 --- a/mxAgent/samples/tools/web_summary_api.py +++ b/mxAgent/samples/tools/web_summary_api.py @@ -5,7 +5,7 @@ import asyncio import os import re -import time +from datetime import datetime, timezone from concurrent.futures import ThreadPoolExecutor, wait, as_completed import aiohttp @@ -50,12 +50,12 @@ class WebSummary: Chrome/126.0.0.0 Safari/537.36" } try: - mommt = time.time() - logger.info(f"start request website: {mommt},{url}") + mommt = datetime.now(tz=timezone.utc) + logger.info(f"start request website: {mommt.strftime("%Y-%m-%d %H:%M:%S")},{url}") response = requests.get( url, headers=headers, timeout=(3, 3), stream=True) - mommt = time.time() - logger.info(f"finish request website: {mommt},{url}") + mommt = datetime.now(tz=timezone.utc) + logger.info(f"finish request website: {mommt.strftime("%Y-%m-%d %H:%M:%S")},{url}") if response.status_code != 200: logger.error(f"获取网页{url}内容失败") return '', f"获取网页{url}内容失败" @@ -69,8 +69,8 @@ class WebSummary: logger.error(e) return '', e res = cls.generate_content(text, summary_prompt) - mommt = time.time() - logger.info(f"finish summary website: {mommt},{url}") + mommt = datetime.now(tz=timezone.utc) + logger.info(f"finish summary website: {mommt.strftime("%Y-%m-%d %H:%M:%S")},{url}") return res, None @classmethod @@ -78,11 +78,11 @@ class WebSummary: os.environ['CURL_CA_BUNDLE'] = '' urllib3.disable_warnings() try: - mommt = time.time() - logger.info(f"start request website: {mommt},{url}") + mommt = datetime.now(tz=timezone.utc) + logger.info(f"start request website: {mommt.strftime("%Y-%m-%d %H:%M:%S")},{url}") response = await bai_du(url) - mommt = time.time() - logger.debug(f"finish request website: {mommt},{url}") + mommt = datetime.now(tz=timezone.utc) + logger.debug(f"finish request website: {mommt.strftime("%Y-%m-%d %H:%M:%S")},{url}") content = response bsobj = BeautifulSoup(content, 'html.parser') txt = bsobj.get_text() @@ -96,15 +96,15 @@ class WebSummary: if len(text) == 0: return "", "no valid website content" res = cls.generate_content(text, summary_prompt) - mommt = time.time() - logger.info(f"finish summary website: {mommt},{url}") + mommt = datetime.now(tz=timezone.utc) + logger.info(f"finish summary website: {mommt.strftime("%Y-%m-%d %H:%M:%S")},{url}") return res, None @classmethod def summary_call(cls, web, max_summary_number, summary_prompt): title = web.get("title", "") - url = web.get("link") - snippet = web.get("snippet", "") + url = web.get("href") + snippet = web.get("body", "") web_summary = {} if url is None: return web_summary @@ -127,13 +127,13 @@ class WebSummary: logger.add('app.log', level='DEBUG') cls.llm = llm try: - mommt = time.time() - logger.debug(f"start google search: {mommt}") + mommt = datetime.now(tz=timezone.utc) + logger.debug(f"start duck duck go search: {mommt.strftime("%Y-%m-%d %H:%M:%S")}") if isinstance(keys, list): keys = ",".join(keys) search_result = call_duck_duck_go_search(keys, search_num) - mommt = time.time() - logger.debug(f"finish google search: {mommt}") + mommt = datetime.now(tz=timezone.utc) + logger.debug(f"finish duck duck go search: {mommt.strftime("%Y-%m-%d %H:%M:%S")}") except Exception as e: logger.error(e) return []