init
This commit is contained in:
6
.gitignore
vendored
Normal file
6
.gitignore
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
.idea
|
||||||
|
.env
|
||||||
|
./test/*
|
||||||
|
./db/*
|
||||||
|
./db/vec/*
|
||||||
|
./docs/files/*
|
13
Dockerfile
Normal file
13
Dockerfile
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# pull official base image
|
||||||
|
FROM private_ai:base
|
||||||
|
|
||||||
|
ENV APP_CACHE /app/cache
|
||||||
|
# set work dir
|
||||||
|
WORKDIR /app
|
||||||
|
#RUN mkdir /app/cache
|
||||||
|
COPY . .
|
||||||
|
# isntall dependencies
|
||||||
|
RUN pip install --upgrade pip -i https://pypi.tuna.tsinghua.edu.cn/simple
|
||||||
|
RUN pip install -r requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple
|
||||||
|
# run
|
||||||
|
ENTRYPOINT python http_test.py
|
4
api/__init__.py
Normal file
4
api/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
from api import logger
|
||||||
|
|
||||||
|
from api import interface, schema, mcp, prompt, file
|
||||||
|
from api.base import app
|
71
api/base.py
Normal file
71
api/base.py
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
# @Filename: base.py
|
||||||
|
# @Author: lychang
|
||||||
|
# @Time: 2022/5/9 13:33
|
||||||
|
import functools
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
import os
|
||||||
|
|
||||||
|
from fastapi import FastAPI, Response
|
||||||
|
from starlette.middleware.cors import CORSMiddleware
|
||||||
|
from api import logger
|
||||||
|
|
||||||
|
logger.set_log_root_name("private_assistant")
|
||||||
|
log_level = os.getenv("LOG_LEVEL", "info")
|
||||||
|
if log_level == "debug":
|
||||||
|
logger.set_log_level("debug")
|
||||||
|
|
||||||
|
|
||||||
|
def set_logger_config(api_path=None, business_type="api", source=""):
|
||||||
|
def decorator(func, p=api_path):
|
||||||
|
@functools.wraps(func)
|
||||||
|
def wrapper(api=p, *args, **kwargs):
|
||||||
|
deal_stage = func.__name__
|
||||||
|
if api is None:
|
||||||
|
api = deal_stage
|
||||||
|
if "{" in api:
|
||||||
|
for i in kwargs:
|
||||||
|
if i in api:
|
||||||
|
api = api.replace("{" + i + "}", str(kwargs[i]))
|
||||||
|
x_trace_id = kwargs.get("x_trace_id", None)
|
||||||
|
if x_trace_id is None:
|
||||||
|
x_trace_id = ""
|
||||||
|
_s = time.time()
|
||||||
|
logger.set_extra(deal_stage, business_type, api, x_trace_id, source)
|
||||||
|
data, status_code, headers = func(*args, **kwargs)
|
||||||
|
x_server_time = str(time.time() - _s)
|
||||||
|
headers["x-server-time"] = x_server_time
|
||||||
|
ret = Response(content=data, status_code=status_code, headers=headers)
|
||||||
|
logger.info(f"[{api}] spend time: {x_server_time}.")
|
||||||
|
return ret
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
|
def response(data: bytes = b'', status_code: int = 200, headers=None) -> (bytes, int, dict):
|
||||||
|
code_mapping = {400: "Bad Request",
|
||||||
|
404: "The requested resource is not available",
|
||||||
|
409: "Original content exists, creation failed",
|
||||||
|
206: "Updated success",
|
||||||
|
201: "Created success",
|
||||||
|
500: "Internal Server Error"
|
||||||
|
}
|
||||||
|
if headers is None:
|
||||||
|
headers = {'Content-Type': 'application/json'}
|
||||||
|
error_msg = code_mapping.get(status_code)
|
||||||
|
if error_msg:
|
||||||
|
data_dict = {"error_code": status_code, "error_msg": error_msg}
|
||||||
|
data = json.dumps(data_dict).encode('utf-8')
|
||||||
|
|
||||||
|
return data, status_code, headers
|
||||||
|
|
||||||
|
|
||||||
|
app = FastAPI()
|
||||||
|
app.add_middleware(CORSMiddleware, allow_origins=['*'], allow_credentials=True, allow_methods=['*'],
|
||||||
|
allow_headers=['*'])
|
||||||
|
|
||||||
|
|
40
api/file.py
Normal file
40
api/file.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
import base64
|
||||||
|
from sys import prefix
|
||||||
|
|
||||||
|
from fastapi import File, UploadFile
|
||||||
|
from extension.standard import chat_file_manager
|
||||||
|
import json
|
||||||
|
from api.base import set_logger_config, response, app
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/api/file/upload")
|
||||||
|
@set_logger_config("/api/file/upload")
|
||||||
|
def upload_file(file: UploadFile = File(...)):
|
||||||
|
try:
|
||||||
|
file_name = file.filename
|
||||||
|
file_content = file.file.read()
|
||||||
|
file_id = chat_file_manager.c_create(file_name, file_content)
|
||||||
|
file.file.close()
|
||||||
|
if not file_id:
|
||||||
|
return response(status_code=500)
|
||||||
|
result = json.dumps({"file_id": file_id}, ensure_ascii=False).encode("utf-8")
|
||||||
|
return response(result, 200, {'Content-Type': 'application/json'})
|
||||||
|
except Exception as e:
|
||||||
|
return response(status_code=500)
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/file/download/{file_id}")
|
||||||
|
@set_logger_config("/api/file/download/{file_id}")
|
||||||
|
def download_file(file_id: str):
|
||||||
|
try:
|
||||||
|
content_type = "application/octet-stream"
|
||||||
|
file_content, postfix = chat_file_manager.c_get(file_id)
|
||||||
|
if postfix:
|
||||||
|
content_type = chat_file_manager.extension_mapping.get(postfix, "application/octet-stream")
|
||||||
|
return response(file_content, 200, {'Content-Type': content_type,
|
||||||
|
"Content-Disposition": f"attachment; filename={file_id}{postfix}"})
|
||||||
|
|
||||||
|
except FileNotFoundError:
|
||||||
|
return response(status_code=404)
|
||||||
|
except Exception as e:
|
||||||
|
return response(status_code=500)
|
84
api/interface.py
Normal file
84
api/interface.py
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
# -*- coding: UTF-8 -*-
|
||||||
|
import json
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from api.base import set_logger_config, response, app
|
||||||
|
from api import logger
|
||||||
|
from core.config import conf
|
||||||
|
from extension.chat import model_manager, mcp_engine
|
||||||
|
from api.version import VERSION, DATETIME
|
||||||
|
|
||||||
|
|
||||||
|
class ChatBody(BaseModel):
|
||||||
|
message: Optional[str] = ""
|
||||||
|
type: Optional[str] = "text"
|
||||||
|
chat_history: Optional[list[dict]] = None
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return self.__dict__
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/hello")
|
||||||
|
@set_logger_config("/hello")
|
||||||
|
def hello():
|
||||||
|
try:
|
||||||
|
result = {"version": VERSION, "datetime": DATETIME}
|
||||||
|
result = json.dumps(result, ensure_ascii=False).encode("utf-8")
|
||||||
|
return response(result, 200, {'Content-Type': 'application/json'})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
return response(status_code=500)
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/self")
|
||||||
|
@app.get("/self")
|
||||||
|
@set_logger_config("/self")
|
||||||
|
def self_info():
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = {"my_name": conf.assistant_name,
|
||||||
|
"my_company": conf.company_name,
|
||||||
|
"my_role": "智能助理",
|
||||||
|
"my_tools": mcp_engine.pool}
|
||||||
|
result = json.dumps(result, ensure_ascii=False).encode("utf-8")
|
||||||
|
return response(result, 200, {'Content-Type': 'application/json', 'Content-Language': 'zh-CN'})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
return response(status_code=500)
|
||||||
|
|
||||||
|
@app.get("/raw")
|
||||||
|
@set_logger_config("/raw")
|
||||||
|
def raw(url: str):
|
||||||
|
try:
|
||||||
|
result = requests.get(url, headers={"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36"})
|
||||||
|
return response(result.content, 200, {'Content-Type': 'application/html', 'Content-Language': 'zh-CN'})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
return response(status_code=500)
|
||||||
|
@app.post("/chat")
|
||||||
|
@app.post("/api/chat")
|
||||||
|
@set_logger_config("/chat")
|
||||||
|
def chat(item: ChatBody):
|
||||||
|
try:
|
||||||
|
result = model_manager.chat(item.message, item.type, item.chat_history)
|
||||||
|
result = json.dumps(result, ensure_ascii=False).encode("utf-8")
|
||||||
|
return response(result, 200, {'Content-Type': 'application/json'})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
return response(status_code=500)
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/completions")
|
||||||
|
@app.post("/api/call")
|
||||||
|
@set_logger_config("/completions")
|
||||||
|
def completions(item: ChatBody):
|
||||||
|
try:
|
||||||
|
result = model_manager.generate(item.message)
|
||||||
|
result = json.dumps({"message": result}, ensure_ascii=False).encode("utf-8")
|
||||||
|
return response(result, 200, {'Content-Type': 'application/json'})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
|
return response(status_code=500)
|
139
api/logger.py
Normal file
139
api/logger.py
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
# !/usr/bin/env python
|
||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
# @Filename: logger.py
|
||||||
|
# @Author: lychang
|
||||||
|
# @Time: 2021/7/15 16:41
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_str(msg: Any) -> str:
|
||||||
|
"""确保消息为字符串类型"""
|
||||||
|
return msg if isinstance(msg, str) else str(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class JsonFormatter(logging.Formatter):
|
||||||
|
"""自定义JSON格式化器"""
|
||||||
|
default_msec_format = '%s.%03d'
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__()
|
||||||
|
|
||||||
|
def format(self, record: logging.LogRecord) -> str:
|
||||||
|
"""格式化日志记录为JSON字符串"""
|
||||||
|
formatted_record = self._build_record_dict(record)
|
||||||
|
return json.dumps(formatted_record, ensure_ascii=False)
|
||||||
|
|
||||||
|
def _build_record_dict(self, record: logging.LogRecord) -> Dict[str, Any]:
|
||||||
|
"""构建日志字典结构"""
|
||||||
|
return {
|
||||||
|
'deal_time': self.formatTime(record),
|
||||||
|
'level': record.levelname,
|
||||||
|
'app_type': record.name,
|
||||||
|
'business_type': getattr(record, 'business_type', ''),
|
||||||
|
'deal_stage': getattr(record, 'deal_stage', ''),
|
||||||
|
'object_uuid': getattr(record, 'object_uuid', ''),
|
||||||
|
'trace_id': getattr(record, 'trace_id', ''),
|
||||||
|
'source': getattr(record, 'source', ''),
|
||||||
|
'message': record.getMessage(),
|
||||||
|
}
|
||||||
|
|
||||||
|
def formatTime(self, record: logging.LogRecord, datefmt: str = None) -> str:
|
||||||
|
"""格式化时间(包含毫秒)"""
|
||||||
|
ct = self.converter(record.created)
|
||||||
|
if datefmt:
|
||||||
|
s = time.strftime(datefmt, ct)
|
||||||
|
else:
|
||||||
|
s = time.strftime(self.default_time_format, ct)
|
||||||
|
return self.default_msec_format % (s, record.msecs)
|
||||||
|
|
||||||
|
|
||||||
|
# 初始化日志系统
|
||||||
|
INSTLOG = logging.getLogger("private_assistant")
|
||||||
|
INSTLOG.setLevel(logging.INFO)
|
||||||
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
|
handler.setFormatter(JsonFormatter())
|
||||||
|
INSTLOG.addHandler(handler)
|
||||||
|
INSTLOG.propagate = False # 防止日志向上传播
|
||||||
|
|
||||||
|
DEFAULT_EXTRA = {
|
||||||
|
"business_type": "",
|
||||||
|
"deal_stage": "",
|
||||||
|
"object_uuid": "",
|
||||||
|
"trace_id": "",
|
||||||
|
"source": ""
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def set_extra(
|
||||||
|
deal_stage: str = "",
|
||||||
|
business_type: str = "",
|
||||||
|
object_uuid: str = "",
|
||||||
|
trace_id: str = "",
|
||||||
|
source: str = ""
|
||||||
|
) -> None:
|
||||||
|
"""设置全局日志附加信息"""
|
||||||
|
global DEFAULT_EXTRA
|
||||||
|
DEFAULT_EXTRA.update({
|
||||||
|
'deal_stage': deal_stage,
|
||||||
|
'business_type': business_type,
|
||||||
|
'object_uuid': object_uuid,
|
||||||
|
'trace_id': trace_id,
|
||||||
|
'source': source,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def _merge_extra(**kwargs: Dict) -> Dict:
|
||||||
|
"""合并用户extra与默认配置"""
|
||||||
|
extra = kwargs.get('extra', {})
|
||||||
|
return {
|
||||||
|
'business_type': extra.get('business_type', DEFAULT_EXTRA['business_type']),
|
||||||
|
'deal_stage': extra.get('deal_stage', DEFAULT_EXTRA['deal_stage']),
|
||||||
|
'object_uuid': extra.get('object_uuid', DEFAULT_EXTRA['object_uuid']),
|
||||||
|
'trace_id': extra.get('trace_id', DEFAULT_EXTRA['trace_id']),
|
||||||
|
'source': extra.get('source', DEFAULT_EXTRA['source']),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _log_wrapper(logger_method: Any) -> Any:
|
||||||
|
"""日志方法装饰器"""
|
||||||
|
|
||||||
|
def wrapper(msg: Any, *args: Any, **kwargs: Any) -> None:
|
||||||
|
msg = _ensure_str(msg)
|
||||||
|
kwargs['extra'] = _merge_extra(**kwargs)
|
||||||
|
logger_method(msg, *args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
# 使用装饰器统一处理日志方法
|
||||||
|
info = _log_wrapper(INSTLOG.info)
|
||||||
|
debug = _log_wrapper(INSTLOG.debug)
|
||||||
|
warning = _log_wrapper(INSTLOG.warning)
|
||||||
|
error = _log_wrapper(INSTLOG.error)
|
||||||
|
|
||||||
|
|
||||||
|
def exception(msg: Any, *args: Any, **kwargs: Any) -> None:
|
||||||
|
"""异常日志特殊处理"""
|
||||||
|
msg = _ensure_str(msg)
|
||||||
|
kwargs['extra'] = _merge_extra(**kwargs)
|
||||||
|
INSTLOG.error(msg, *args, exc_info=True, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def set_log_level(level: str) -> None:
|
||||||
|
"""设置日志级别"""
|
||||||
|
level_map = {
|
||||||
|
'DEBUG': logging.DEBUG,
|
||||||
|
'INFO': logging.INFO,
|
||||||
|
'WARNING': logging.WARNING,
|
||||||
|
'ERROR': logging.ERROR,
|
||||||
|
}
|
||||||
|
INSTLOG.setLevel(level_map.get(level.upper(), logging.INFO))
|
||||||
|
|
||||||
|
|
||||||
|
def set_log_root_name(name: str) -> None:
|
||||||
|
"""设置日志根名称"""
|
||||||
|
INSTLOG.name = name
|
113
api/mcp.py
Normal file
113
api/mcp.py
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
from fastapi import HTTPException
|
||||||
|
|
||||||
|
from api.base import app
|
||||||
|
from extension.mcp import mcp_manager
|
||||||
|
|
||||||
|
# manager
|
||||||
|
@app.get("/api/mcp/services", response_model=Dict)
|
||||||
|
async def get_mcp_services():
|
||||||
|
"""获取所有MCP服务器状态"""
|
||||||
|
return {
|
||||||
|
'services': [dict(service) for service in mcp_manager.list_services()],
|
||||||
|
'status': mcp_manager.get_status()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/api/mcp/services", response_model=Dict)
|
||||||
|
async def create_mcp_service(data: Dict):
|
||||||
|
"""创建新的MCP服务"""
|
||||||
|
name = data.get('name')
|
||||||
|
endpoint = data.get('endpoint')
|
||||||
|
protocol = data.get('protocol')
|
||||||
|
if not name or not endpoint or not protocol:
|
||||||
|
raise HTTPException(status_code=400, detail='服务器名称、端点和协议不能为空')
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = mcp_manager.create_service(name, endpoint, protocol, data.get('config'))
|
||||||
|
return result
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(status_code=400, detail=str(e)) from e
|
||||||
|
|
||||||
|
|
||||||
|
@app.delete("/api/mcp/services/{service_id}", response_model=Dict)
|
||||||
|
async def delete_mcp_service(service_id: str):
|
||||||
|
"""删除MCP服务"""
|
||||||
|
try:
|
||||||
|
result = mcp_manager.delete_service(service_id)
|
||||||
|
return result
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(status_code=404, detail=str(e)) from e
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/mcp/services/{service_id}/start", response_model=Dict)
|
||||||
|
async def start_mcp_service(service_id: str):
|
||||||
|
"""启动MCP服务"""
|
||||||
|
try:
|
||||||
|
result = mcp_manager.start_service(service_id)
|
||||||
|
return result
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(status_code=400, detail=str(e)) from e
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f'启动失败: {str(e)}') from e
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/mcp/services/{service_id}/stop", response_model=Dict)
|
||||||
|
async def stop_mcp_service(service_id: str):
|
||||||
|
"""停止MCP服务"""
|
||||||
|
try:
|
||||||
|
result = mcp_manager.stop_service(service_id)
|
||||||
|
return result
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(status_code=404, detail=str(e)) from e
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f'停止失败: {str(e)}') from e
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/mcp/services/{service_id}/restart", response_model=Dict)
|
||||||
|
async def restart_mcp_service(service_id: str):
|
||||||
|
"""重启MCP服务"""
|
||||||
|
try:
|
||||||
|
result = mcp_manager.restart_service(service_id)
|
||||||
|
return result
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(status_code=400, detail=str(e)) from e
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f'重启失败: {str(e)}') from e
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/mcp/services/{service_id}/status", response_model=Dict)
|
||||||
|
async def get_mcp_service_status(service_id: str):
|
||||||
|
"""获取指定MCP服务的状态"""
|
||||||
|
try:
|
||||||
|
# Find service by ID
|
||||||
|
service = next((s for s in mcp_manager.services.values() if str(s.id) == service_id), None)
|
||||||
|
if not service:
|
||||||
|
raise HTTPException(status_code=404, detail=f'Service {service_id} not found')
|
||||||
|
|
||||||
|
return {
|
||||||
|
'id': service.id,
|
||||||
|
'name': service.name,
|
||||||
|
"instance": service.instance_name,
|
||||||
|
'endpoint': service.endpoint,
|
||||||
|
'protocol': service.protocol.name,
|
||||||
|
'status': service.status,
|
||||||
|
'config': service.config if service.config else {}
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||||
|
|
||||||
|
|
||||||
|
@app.put("/api/mcp/services/{service_id}", response_model=Dict)
|
||||||
|
async def update_mcp_service(service_id: str, data: Dict):
|
||||||
|
"""更新MCP服务配置"""
|
||||||
|
try:
|
||||||
|
result = mcp_manager.update_service(service_id, **data)
|
||||||
|
return result
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(status_code=404, detail=str(e)) from e
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||||
|
|
||||||
|
|
57
api/prompt.py
Normal file
57
api/prompt.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
from fastapi import HTTPException
|
||||||
|
from api.base import app
|
||||||
|
from extension.standard import db_manager
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/prompts", response_model=Dict)
|
||||||
|
async def get_prompts(page: int = 1, per_page: int = 5):
|
||||||
|
result = db_manager.prompts.get_all(page=page, per_page=per_page)
|
||||||
|
return {
|
||||||
|
'items': [{
|
||||||
|
'id': row[0],
|
||||||
|
'name': row[1],
|
||||||
|
'content': '\n'.join(row[2].split('\n')[:2]) + ('\n…' if len(row[2].split('\n')) > 2 else ''),
|
||||||
|
'created_at': row[3]
|
||||||
|
} for row in result['items']],
|
||||||
|
'pagination': {
|
||||||
|
'total': result['total'],
|
||||||
|
'page': result['page'],
|
||||||
|
'per_page': result['per_page'],
|
||||||
|
'total_pages': result['total_pages']
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/api/prompts", response_model=Dict)
|
||||||
|
async def create_prompt(data: Dict):
|
||||||
|
content = data.get('content')
|
||||||
|
if not content:
|
||||||
|
raise HTTPException(status_code=400, detail='内容不能为空')
|
||||||
|
prompt_id = db_manager.prompts.create(name=data.get('name', ''), content=content)
|
||||||
|
return {'id': prompt_id}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/prompts/{prompt_id}", response_model=Dict)
|
||||||
|
async def get_prompt(prompt_id: int):
|
||||||
|
prompt = db_manager.prompts.get_by_id(prompt_id)
|
||||||
|
if prompt:
|
||||||
|
return dict(prompt)
|
||||||
|
raise HTTPException(status_code=404, detail='Prompt not found')
|
||||||
|
|
||||||
|
|
||||||
|
@app.put("/api/prompts/{prompt_id}", response_model=Dict)
|
||||||
|
async def update_prompt(prompt_id: int, data: Dict):
|
||||||
|
name = data.get('name', '')
|
||||||
|
content = data.get('content')
|
||||||
|
if not content:
|
||||||
|
raise HTTPException(status_code=400, detail='内容不能为空')
|
||||||
|
db_manager.prompts.update(prompt_id, name=name, content=content)
|
||||||
|
return {'message': 'Prompt updated'}
|
||||||
|
|
||||||
|
|
||||||
|
@app.delete("/api/prompts/{prompt_id}", response_model=Dict)
|
||||||
|
async def delete_prompt(prompt_id: int):
|
||||||
|
db_manager.prompts.delete(prompt_id)
|
||||||
|
return {'message': 'Prompt deleted'}
|
53
api/schema.py
Normal file
53
api/schema.py
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
from fastapi import HTTPException
|
||||||
|
|
||||||
|
from api.base import app
|
||||||
|
from extension.standard import db_manager
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/schemas", response_model=Dict)
|
||||||
|
async def get_schemas(page: int = 1, per_page: int = 5):
|
||||||
|
result = db_manager.schemas.get_all(page=page, per_page=per_page)
|
||||||
|
return {
|
||||||
|
'items': [dict(row) for row in result['items']],
|
||||||
|
'pagination': {
|
||||||
|
'total': result['total'],
|
||||||
|
'page': result['page'],
|
||||||
|
'per_page': result['per_page'],
|
||||||
|
'total_pages': result['total_pages']
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/api/schemas", response_model=Dict)
|
||||||
|
async def create_schema(data: Dict):
|
||||||
|
content = data.get('content')
|
||||||
|
if not content:
|
||||||
|
raise HTTPException(status_code=400, detail='内容不能为空')
|
||||||
|
schema_id = db_manager.schemas.create(name=data.get('name', ''), content=content)
|
||||||
|
return {'id': schema_id}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/api/schemas/{schema_id}", response_model=Dict)
|
||||||
|
async def get_schema(schema_id: int):
|
||||||
|
schema = db_manager.schemas.get_by_id(schema_id)
|
||||||
|
if schema:
|
||||||
|
return dict(schema)
|
||||||
|
raise HTTPException(status_code=404, detail='Schema not found')
|
||||||
|
|
||||||
|
|
||||||
|
@app.put("/api/schemas/{schema_id}", response_model=Dict)
|
||||||
|
async def update_schema(schema_id: int, data: Dict):
|
||||||
|
name = data.get('name', '')
|
||||||
|
content = data.get('content')
|
||||||
|
if not content:
|
||||||
|
raise HTTPException(status_code=400, detail='内容不能为空')
|
||||||
|
db_manager.schemas.update(schema_id, name=name, content=content)
|
||||||
|
return {'message': 'Schema updated'}
|
||||||
|
|
||||||
|
|
||||||
|
@app.delete("/api/schemas/{schema_id}", response_model=Dict)
|
||||||
|
async def delete_schema(schema_id: int):
|
||||||
|
db_manager.schemas.delete(schema_id)
|
||||||
|
return {'message': 'Schema deleted'}
|
2
api/version.py
Normal file
2
api/version.py
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
VERSION = "1.0.5"
|
||||||
|
DATETIME = "2025-02-06"
|
8
client_test.py
Normal file
8
client_test.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
from langchain_community.utilities import SearxSearchWrapper
|
||||||
|
|
||||||
|
# 使用API代理服务提高访问稳定性
|
||||||
|
search = SearxSearchWrapper(searx_host="http://192.168.1.100:10001")
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
result = search.run("谁是深田咏美")
|
||||||
|
print(result)
|
94
core/config.py
Normal file
94
core/config.py
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
# @Filename: config.py
|
||||||
|
# @Author: lychang
|
||||||
|
# @Time: 7/5/2023 1:51 PM
|
||||||
|
import os
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
|
||||||
|
class Configuration:
|
||||||
|
def __init__(self):
|
||||||
|
self.api_uri = os.getenv(
|
||||||
|
"OPENAI_API_BASE"
|
||||||
|
)
|
||||||
|
self.api_key = os.getenv(
|
||||||
|
"OPENAI_API_KEY"
|
||||||
|
)
|
||||||
|
self.think_model = os.getenv(
|
||||||
|
"THINK_MODEL"
|
||||||
|
)
|
||||||
|
self.llm_model = os.getenv(
|
||||||
|
"LLM_MODEL"
|
||||||
|
)
|
||||||
|
self.vision_model = os.getenv(
|
||||||
|
"VISION_MODEL"
|
||||||
|
)
|
||||||
|
self.db_uri = os.getenv(
|
||||||
|
"DATABASE"
|
||||||
|
)
|
||||||
|
self.assistant_name = os.getenv(
|
||||||
|
"ASSISTANT_NAME"
|
||||||
|
)
|
||||||
|
self.company_name = os.getenv(
|
||||||
|
"COMPANY_NAME"
|
||||||
|
)
|
||||||
|
conf = Configuration()
|
||||||
|
|
||||||
|
|
||||||
|
class ModelConfiguration:
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.CONFIG = {
|
||||||
|
"deepseek-chat": {
|
||||||
|
"name": "deepseek-chat",
|
||||||
|
"type": "llm",
|
||||||
|
"temperature": 0.1
|
||||||
|
},
|
||||||
|
"deepseek-r1": {
|
||||||
|
"name": "deepseek-r1",
|
||||||
|
"type": "llm",
|
||||||
|
"temperature": 0.1
|
||||||
|
},
|
||||||
|
"default": {
|
||||||
|
"type": "llm",
|
||||||
|
"temperature": 0.1
|
||||||
|
},
|
||||||
|
"qwen3:8b":{
|
||||||
|
"type": "llm",
|
||||||
|
"temperature": 0.1,
|
||||||
|
"thinking":True
|
||||||
|
},
|
||||||
|
"minicpm-v": {
|
||||||
|
"type": "vision",
|
||||||
|
"temperature": 0.1
|
||||||
|
},
|
||||||
|
"text-embedding-ada-002": {
|
||||||
|
"name": "text-embedding-ada-002",
|
||||||
|
"type": "embedding"
|
||||||
|
|
||||||
|
},
|
||||||
|
"bge-large": {
|
||||||
|
"name": "bge-large",
|
||||||
|
"type": "embedding"
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def models(self) -> list[str]:
|
||||||
|
return [i for i in self.CONFIG]
|
||||||
|
|
||||||
|
def get(self, name) -> dict:
|
||||||
|
temp = self.CONFIG.get(name, self.CONFIG["default"])
|
||||||
|
if "name" not in temp:
|
||||||
|
temp["name"] = name
|
||||||
|
temp["host"] = conf.api_uri
|
||||||
|
temp["key"] = conf.api_key
|
||||||
|
return temp
|
||||||
|
|
||||||
|
|
||||||
|
model_conf = ModelConfiguration()
|
135
core/model.py
Normal file
135
core/model.py
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
import base64
|
||||||
|
|
||||||
|
from langchain_openai import ChatOpenAI, OpenAIEmbeddings
|
||||||
|
from langchain.schema import BaseMessage
|
||||||
|
|
||||||
|
from core.config import model_conf, conf
|
||||||
|
from core.role import User, AI, System
|
||||||
|
from core.types import BaseModel
|
||||||
|
|
||||||
|
user = User()
|
||||||
|
ai = AI()
|
||||||
|
system = System()
|
||||||
|
|
||||||
|
|
||||||
|
class ChatModel(BaseModel):
|
||||||
|
def __init__(self, config: dict):
|
||||||
|
super().__init__(config)
|
||||||
|
self._model = ChatOpenAI(
|
||||||
|
base_url=config["host"],
|
||||||
|
api_key=config["key"],
|
||||||
|
model=config["name"],
|
||||||
|
temperature=config["temperature"],
|
||||||
|
extra_body={"enable_thinking":config.get("thinking",False)})
|
||||||
|
self.dialogue = []
|
||||||
|
|
||||||
|
def add_message(self, message: BaseMessage):
|
||||||
|
self.dialogue.append(message)
|
||||||
|
|
||||||
|
def load_chat_history(self, chat_history: list[dict]):
|
||||||
|
self.dialogue = []
|
||||||
|
for i in chat_history:
|
||||||
|
if i["role"] == "user":
|
||||||
|
msg = user.generate(i["message"])
|
||||||
|
self.add_message(msg)
|
||||||
|
elif i["role"] == "system":
|
||||||
|
msg = ai.generate(i["message"])
|
||||||
|
self.add_message(msg)
|
||||||
|
elif i["role"] == "external":
|
||||||
|
msg = system.generate(i["message"])
|
||||||
|
self.add_message(msg)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _parser_llm_result(llm_result):
|
||||||
|
content = ""
|
||||||
|
for r in llm_result.generations[0]:
|
||||||
|
content += r.message.content
|
||||||
|
return content
|
||||||
|
|
||||||
|
def llm(self, message: str) -> str:
|
||||||
|
return self._model.invoke(message).content
|
||||||
|
|
||||||
|
def chat(self, message: str) -> str:
|
||||||
|
llm_result = self._model.generate([self.dialogue])
|
||||||
|
return self._parser_llm_result(llm_result)
|
||||||
|
|
||||||
|
|
||||||
|
class EmbeddingModel(BaseModel):
|
||||||
|
def __init__(self, config: dict):
|
||||||
|
super().__init__(config)
|
||||||
|
self._model = OpenAIEmbeddings(
|
||||||
|
base_url=config["host"],
|
||||||
|
api_key=config["key"],
|
||||||
|
model=config["name"],
|
||||||
|
check_embedding_ctx_length=False)
|
||||||
|
|
||||||
|
def embed_query(self, text: str):
|
||||||
|
return self._model.embed_query(text)
|
||||||
|
|
||||||
|
def embed_documents(self, texts: list):
|
||||||
|
return self._model.embed_documents(texts)
|
||||||
|
|
||||||
|
|
||||||
|
class VisionModel(BaseModel):
|
||||||
|
def __init__(self, config: dict):
|
||||||
|
super().__init__(config)
|
||||||
|
self._model = ChatOpenAI(
|
||||||
|
base_url=config["host"],
|
||||||
|
api_key=config["key"],
|
||||||
|
model=config["name"],
|
||||||
|
temperature=config["temperature"])
|
||||||
|
self._data = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def base64(self):
|
||||||
|
if self._data is None:
|
||||||
|
return ""
|
||||||
|
return base64.b64encode(self._data).decode('utf-8')
|
||||||
|
|
||||||
|
def load_image(self, bytes_data: bytes):
|
||||||
|
self._data = bytes_data
|
||||||
|
|
||||||
|
def _call_vision_function(self, message: str):
|
||||||
|
messages = [
|
||||||
|
user.generate([
|
||||||
|
|
||||||
|
{"type": "text", "text": message},
|
||||||
|
{"type": "image_url", "image_url": f"data:image/jpeg;base64,{self.base64}"},
|
||||||
|
])
|
||||||
|
]
|
||||||
|
return self._model.invoke(messages).content
|
||||||
|
|
||||||
|
def _call_vision_messages(self, messages: list):
|
||||||
|
return self._model.invoke(messages).content
|
||||||
|
|
||||||
|
def query(self, message: str) -> str:
|
||||||
|
return self._call_vision_function(message)
|
||||||
|
|
||||||
|
def describe(self):
|
||||||
|
return self._call_vision_function("完整详细的描述图片中的信息")
|
||||||
|
|
||||||
|
|
||||||
|
def get_embedding_model(model_type: str = ""):
|
||||||
|
return EmbeddingModel(model_conf.get(model_type))
|
||||||
|
|
||||||
|
|
||||||
|
def get_vision_model(model_type: str = ""):
|
||||||
|
return VisionModel(model_conf.get(model_type))
|
||||||
|
|
||||||
|
|
||||||
|
def get_chat_model(model_type: str = ""):
|
||||||
|
return ChatModel(model_conf.get(model_type))
|
||||||
|
|
||||||
|
|
||||||
|
think_instance = get_chat_model(conf.think_model)
|
||||||
|
llm_instance = get_chat_model(conf.llm_model)
|
||||||
|
vision_instance = get_vision_model(conf.vision_model)
|
||||||
|
|
||||||
|
|
||||||
|
def run_llm_by_message(message: str):
|
||||||
|
llm_instance.load_chat_history([])
|
||||||
|
return llm_instance.llm(message)
|
||||||
|
|
||||||
|
def think_by_message(message: str):
|
||||||
|
think_instance.load_chat_history([])
|
||||||
|
return think_instance.llm(message)
|
45
core/role.py
Normal file
45
core/role.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
from core.types import BaseRole
|
||||||
|
|
||||||
|
from langchain.schema import HumanMessage, SystemMessage, AIMessage, ChatMessage
|
||||||
|
|
||||||
|
|
||||||
|
class User(BaseRole):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__("user")
|
||||||
|
|
||||||
|
def generate(self, message):
|
||||||
|
return HumanMessage(content=message)
|
||||||
|
|
||||||
|
|
||||||
|
class AI(BaseRole):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__("ai")
|
||||||
|
|
||||||
|
def generate(self, message):
|
||||||
|
return AIMessage(content=message)
|
||||||
|
|
||||||
|
|
||||||
|
class System(BaseRole):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__("system")
|
||||||
|
|
||||||
|
def generate(self, message):
|
||||||
|
return SystemMessage(content=message)
|
||||||
|
|
||||||
|
|
||||||
|
class Uploader(BaseRole):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__("uploader")
|
||||||
|
self._file = None
|
||||||
|
|
||||||
|
def generate(self, message):
|
||||||
|
return HumanMessage(content=message)
|
||||||
|
|
||||||
|
|
||||||
|
class Sender(BaseRole):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__("sender")
|
||||||
|
self._file = None
|
||||||
|
|
||||||
|
def generate(self, message):
|
||||||
|
return AIMessage(content=message)
|
189
core/types.py
Normal file
189
core/types.py
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
import os
|
||||||
|
import time
|
||||||
|
import sqlite3
|
||||||
|
import abc
|
||||||
|
|
||||||
|
from langchain.schema import Document, BaseMessage
|
||||||
|
|
||||||
|
from pysmx.SM3 import hexdigest
|
||||||
|
|
||||||
|
from contextlib import contextmanager
|
||||||
|
|
||||||
|
|
||||||
|
class BaseCRUD:
|
||||||
|
def __init__(self, table_name, db_path):
|
||||||
|
self.table_name = table_name
|
||||||
|
self.db_path = db_path
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def get_connection(self):
|
||||||
|
conn = sqlite3.connect(self.db_path)
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
try:
|
||||||
|
yield conn
|
||||||
|
finally:
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
def get_all(self, page=1, per_page=10):
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
total = conn.execute(f'SELECT COUNT(*) FROM {self.table_name}').fetchone()[0]
|
||||||
|
offset = (page - 1) * per_page
|
||||||
|
items = conn.execute(
|
||||||
|
f'SELECT * FROM {self.table_name} ORDER BY created_at DESC LIMIT ? OFFSET ?',
|
||||||
|
(per_page, offset)
|
||||||
|
).fetchall()
|
||||||
|
return {
|
||||||
|
'items': items,
|
||||||
|
'total': total,
|
||||||
|
'page': page,
|
||||||
|
'per_page': per_page,
|
||||||
|
'total_pages': (total + per_page - 1) // per_page
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_by_id(self, item_id):
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
return conn.execute(f'SELECT * FROM {self.table_name} WHERE id = ?', (item_id,)).fetchone()
|
||||||
|
|
||||||
|
def get_by_name(self, item_name):
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
return conn.execute(f'SELECT * FROM {self.table_name} WHERE name = ?', (item_name,)).fetchone()
|
||||||
|
|
||||||
|
def create(self, **kwargs):
|
||||||
|
columns = ', '.join(kwargs.keys())
|
||||||
|
placeholders = ', '.join(['?'] * len(kwargs))
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
cursor = conn.execute(
|
||||||
|
f'INSERT INTO {self.table_name} ({columns}) VALUES ({placeholders})',
|
||||||
|
tuple(kwargs.values())
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
return cursor.lastrowid
|
||||||
|
|
||||||
|
def update(self, item_id, **kwargs):
|
||||||
|
set_clause = ', '.join([f'{k} = ?' for k in kwargs.keys()])
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
conn.execute(
|
||||||
|
f'UPDATE {self.table_name} SET {set_clause} WHERE id = ?',
|
||||||
|
(*kwargs.values(), item_id)
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
def delete(self, item_id):
|
||||||
|
with self.get_connection() as conn:
|
||||||
|
conn.execute(f'DELETE FROM {self.table_name} WHERE id = ?', (item_id,))
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
class BaseLoader:
|
||||||
|
def __init__(self):
|
||||||
|
self.content = None
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_content(self, file_path):
|
||||||
|
return b""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_and_split(self):
|
||||||
|
return []
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def generate_sm3(file_bytes: bytes):
|
||||||
|
return "_" + hexdigest(file_bytes)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sm3(self):
|
||||||
|
return self.generate_sm3(self.content)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseRole:
|
||||||
|
def __init__(self, role: str):
|
||||||
|
self.type = role
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def generate(self, message) -> BaseMessage:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class BaseTool:
|
||||||
|
def __init__(self, name: str, description: str):
|
||||||
|
self.name = name
|
||||||
|
self._file = None
|
||||||
|
self.description = description
|
||||||
|
self.execute = None
|
||||||
|
|
||||||
|
def set_file_name(self, file_name: str):
|
||||||
|
self._file = file_name
|
||||||
|
|
||||||
|
def normal(self, data: str, data_type: str = "text") -> dict:
|
||||||
|
return {"data": data, "tool": self.name, "type": data_type}
|
||||||
|
|
||||||
|
|
||||||
|
class BaseEngine:
|
||||||
|
def __init__(self, name):
|
||||||
|
self.name = name
|
||||||
|
self.tool_pool = {}
|
||||||
|
self._allow_tools = None
|
||||||
|
|
||||||
|
def set_allow_list(self, allow_list: list) -> None:
|
||||||
|
self._allow_tools = allow_list
|
||||||
|
|
||||||
|
def check_tool_auth(self, name: str) -> bool:
|
||||||
|
if not self._allow_tools:
|
||||||
|
return True
|
||||||
|
return name in self._allow_tools
|
||||||
|
|
||||||
|
def add_tool(self, tool: BaseTool) -> None:
|
||||||
|
self.tool_pool[tool.name] = tool
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def run(self, message: str, file_name=None, plugin_type=None) -> dict:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class BaseModel:
|
||||||
|
def __init__(self, config: dict):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class BasePrompt:
|
||||||
|
def __init__(self, name):
|
||||||
|
self.name = name
|
||||||
|
this_date = time.localtime()
|
||||||
|
week_list = ["一", "二", "三", "四", "五", "六", "日"]
|
||||||
|
self._year = this_date.tm_year
|
||||||
|
self._month = this_date.tm_mon
|
||||||
|
self._day = this_date.tm_mday
|
||||||
|
self._week = "星期" + week_list[this_date.tm_wday]
|
||||||
|
self._prompt = self._load_prompt()
|
||||||
|
self._schema = self._load_schema()
|
||||||
|
self._external = None
|
||||||
|
self._role = None
|
||||||
|
self._context = None
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _load_prompt(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def _load_schema(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def set_context(self, context: str):
|
||||||
|
self._context = context
|
||||||
|
|
||||||
|
def set_role(self, role: str):
|
||||||
|
self._role = role
|
||||||
|
|
||||||
|
def set_external(self, external: str):
|
||||||
|
self._external = external
|
||||||
|
|
||||||
|
def generate(self, question: str):
|
||||||
|
_field = ["schema", "year", "month", "day", "week", "role", "context", "external"]
|
||||||
|
dic = self.__dict__
|
||||||
|
result = self._prompt
|
||||||
|
for i in _field:
|
||||||
|
if "{{" + i + "}}" in result and dic.get("_" + i):
|
||||||
|
result = result.replace("{{" + i + "}}", str(dic["_" + i]))
|
||||||
|
result = result.replace("{{base_year}}", str(self._year - 1))
|
||||||
|
result = result.replace("{{input}}", question)
|
||||||
|
return result
|
BIN
db/system.db
Normal file
BIN
db/system.db
Normal file
Binary file not shown.
13704
docs/json/city.json
Normal file
13704
docs/json/city.json
Normal file
File diff suppressed because it is too large
Load Diff
328
docs/json/file_extension.json
Normal file
328
docs/json/file_extension.json
Normal file
@@ -0,0 +1,328 @@
|
|||||||
|
{
|
||||||
|
".001": "application/x-001",
|
||||||
|
".301": "application/x-301",
|
||||||
|
".323": "text/h323",
|
||||||
|
".906": "application/x-906",
|
||||||
|
".907": "drawing/907",
|
||||||
|
".a11": "application/x-a11",
|
||||||
|
".acp": "audio/x-mei-aac",
|
||||||
|
".ai": "application/postscript",
|
||||||
|
".aif": "audio/aiff",
|
||||||
|
".aifc": "audio/aiff",
|
||||||
|
".aiff": "audio/aiff",
|
||||||
|
".anv": "application/x-anv",
|
||||||
|
".asa": "text/asa",
|
||||||
|
".asf": "video/x-ms-asf",
|
||||||
|
".asp": "text/asp",
|
||||||
|
".asx": "video/x-ms-asf",
|
||||||
|
".au": "audio/basic",
|
||||||
|
".avi": "video/avi",
|
||||||
|
".awf": "application/vnd.adobe.workflow",
|
||||||
|
".biz": "text/xml",
|
||||||
|
".bmp": "application/x-bmp",
|
||||||
|
".bot": "application/x-bot",
|
||||||
|
".c4t": "application/x-c4t",
|
||||||
|
".c90": "application/x-c90",
|
||||||
|
".cal": "application/x-cals",
|
||||||
|
".cat": "application/vnd.ms-pki.seccat",
|
||||||
|
".cdf": "application/x-netcdf",
|
||||||
|
".cdr": "application/x-cdr",
|
||||||
|
".cel": "application/x-cel",
|
||||||
|
".cer": "application/x-x509-ca-cert",
|
||||||
|
".cg4": "application/x-g4",
|
||||||
|
".cgm": "application/x-cgm",
|
||||||
|
".cit": "application/x-cit",
|
||||||
|
".class": "java/*",
|
||||||
|
".cml": "text/xml",
|
||||||
|
".cmp": "application/x-cmp",
|
||||||
|
".cmx": "application/x-cmx",
|
||||||
|
".cot": "application/x-cot",
|
||||||
|
".crl": "application/pkix-crl",
|
||||||
|
".crt": "application/x-x509-ca-cert",
|
||||||
|
".csi": "application/x-csi",
|
||||||
|
".css": "text/css",
|
||||||
|
".cut": "application/x-cut",
|
||||||
|
".dbf": "application/x-dbf",
|
||||||
|
".dbm": "application/x-dbm",
|
||||||
|
".dbx": "application/x-dbx",
|
||||||
|
".dcd": "text/xml",
|
||||||
|
".dcx": "application/x-dcx",
|
||||||
|
".der": "application/x-x509-ca-cert",
|
||||||
|
".dgn": "application/x-dgn",
|
||||||
|
".dib": "application/x-dib",
|
||||||
|
".dll": "application/x-msdownload",
|
||||||
|
".doc": "application/msword",
|
||||||
|
".dot": "application/msword",
|
||||||
|
".drw": "application/x-drw",
|
||||||
|
".dtd": "text/xml",
|
||||||
|
".dwf": "Model/vnd.dwf",
|
||||||
|
".dwg": "application/x-dwg",
|
||||||
|
".dxb": "application/x-dxb",
|
||||||
|
".dxf": "application/x-dxf",
|
||||||
|
".edn": "application/vnd.adobe.edn",
|
||||||
|
".emf": "application/x-emf",
|
||||||
|
".eml": "message/rfc822",
|
||||||
|
".ent": "text/xml",
|
||||||
|
".epi": "application/x-epi",
|
||||||
|
".eps": "application/postscript",
|
||||||
|
".etd": "application/x-ebx",
|
||||||
|
".exe": "application/x-msdownload",
|
||||||
|
".fax": "image/fax",
|
||||||
|
".fdf": "application/vnd.fdf",
|
||||||
|
".fif": "application/fractals",
|
||||||
|
".fo": "text/xml",
|
||||||
|
".frm": "application/x-frm",
|
||||||
|
".g4": "application/x-g4",
|
||||||
|
".gbr": "application/x-gbr",
|
||||||
|
".gif": "image/gif",
|
||||||
|
".gl2": "application/x-gl2",
|
||||||
|
".gp4": "application/x-gp4",
|
||||||
|
".hgl": "application/x-hgl",
|
||||||
|
".hmr": "application/x-hmr",
|
||||||
|
".hpg": "application/x-hpgl",
|
||||||
|
".hpl": "application/x-hpl",
|
||||||
|
".hqx": "application/mac-binhex40",
|
||||||
|
".hrf": "application/x-hrf",
|
||||||
|
".hta": "application/hta",
|
||||||
|
".htc": "text/x-component",
|
||||||
|
".htm": "text/html",
|
||||||
|
".html": "text/html",
|
||||||
|
".htt": "text/webviewhtml",
|
||||||
|
".htx": "text/html",
|
||||||
|
".icb": "application/x-icb",
|
||||||
|
".ico": "image/x-icon",
|
||||||
|
".iff": "application/x-iff",
|
||||||
|
".ig4": "application/x-g4",
|
||||||
|
".igs": "application/x-igs",
|
||||||
|
".iii": "application/x-iphone",
|
||||||
|
".img": "application/x-img",
|
||||||
|
".ins": "application/x-internet-signup",
|
||||||
|
".isp": "application/x-internet-signup",
|
||||||
|
".IVF": "video/x-ivf",
|
||||||
|
".java": "java/*",
|
||||||
|
".jfif": "image/jpeg",
|
||||||
|
".jpe": "image/jpeg",
|
||||||
|
".jpeg": "image/jpeg",
|
||||||
|
".jpg": "image/jpeg",
|
||||||
|
".js": "application/x-javascript",
|
||||||
|
".jsp": "text/html",
|
||||||
|
".la1": "audio/x-liquid-file",
|
||||||
|
".lar": "application/x-laplayer-reg",
|
||||||
|
".latex": "application/x-latex",
|
||||||
|
".lavs": "audio/x-liquid-secure",
|
||||||
|
".lbm": "application/x-lbm",
|
||||||
|
".lmsff": "audio/x-la-lms",
|
||||||
|
".ls": "application/x-javascript",
|
||||||
|
".ltr": "application/x-ltr",
|
||||||
|
".m1v": "video/x-mpeg",
|
||||||
|
".m2v": "video/x-mpeg",
|
||||||
|
".m3u": "audio/mpegurl",
|
||||||
|
".m4e": "video/mpeg4",
|
||||||
|
".mac": "application/x-mac",
|
||||||
|
".man": "application/x-troff-man",
|
||||||
|
".math": "text/xml",
|
||||||
|
".mdb": "application/msaccess",
|
||||||
|
".mfp": "application/x-shockwave-flash",
|
||||||
|
".mht": "message/rfc822",
|
||||||
|
".mhtml": "message/rfc822",
|
||||||
|
".mi": "application/x-mi",
|
||||||
|
".mid": "audio/mid",
|
||||||
|
".midi": "audio/mid",
|
||||||
|
".mil": "application/x-mil",
|
||||||
|
".mml": "text/xml",
|
||||||
|
".mnd": "audio/x-musicnet-download",
|
||||||
|
".mns": "audio/x-musicnet-stream",
|
||||||
|
".mocha": "application/x-javascript",
|
||||||
|
".movie": "video/x-sgi-movie",
|
||||||
|
".mp1": "audio/mp1",
|
||||||
|
".mp2": "audio/mp2",
|
||||||
|
".mp2v": "video/mpeg",
|
||||||
|
".mp3": "audio/mp3",
|
||||||
|
".mp4": "video/mpeg4",
|
||||||
|
".mpa": "video/x-mpg",
|
||||||
|
".mpd": "application/vnd.ms-project",
|
||||||
|
".mpe": "video/x-mpeg",
|
||||||
|
".mpeg": "video/mpg",
|
||||||
|
".mpg": "video/mpg",
|
||||||
|
".mpga": "audio/rn-mpeg",
|
||||||
|
".mpp": "application/vnd.ms-project",
|
||||||
|
".mps": "video/x-mpeg",
|
||||||
|
".mpt": "application/vnd.ms-project",
|
||||||
|
".mpv": "video/mpg",
|
||||||
|
".mpv2": "video/mpeg",
|
||||||
|
".mpw": "application/vnd.ms-project",
|
||||||
|
".mpx": "application/vnd.ms-project",
|
||||||
|
".mtx": "text/xml",
|
||||||
|
".mxp": "application/x-mmxp",
|
||||||
|
".net": "image/pnetvue",
|
||||||
|
".nrf": "application/x-nrf",
|
||||||
|
".nws": "message/rfc822",
|
||||||
|
".odc": "text/x-ms-odc",
|
||||||
|
".out": "application/x-out",
|
||||||
|
".p10": "application/pkcs10",
|
||||||
|
".p12": "application/x-pkcs12",
|
||||||
|
".p7b": "application/x-pkcs7-certificates",
|
||||||
|
".p7c": "application/pkcs7-mime",
|
||||||
|
".p7m": "application/pkcs7-mime",
|
||||||
|
".p7r": "application/x-pkcs7-certreqresp",
|
||||||
|
".p7s": "application/pkcs7-signature",
|
||||||
|
".pc5": "application/x-pc5",
|
||||||
|
".pci": "application/x-pci",
|
||||||
|
".pcl": "application/x-pcl",
|
||||||
|
".pcx": "application/x-pcx",
|
||||||
|
".pdf": "application/pdf",
|
||||||
|
".pdx": "application/vnd.adobe.pdx",
|
||||||
|
".pfx": "application/x-pkcs12",
|
||||||
|
".pgl": "application/x-pgl",
|
||||||
|
".pic": "application/x-pic",
|
||||||
|
".pko": "application/vnd.ms-pki.pko",
|
||||||
|
".pl": "application/x-perl",
|
||||||
|
".plg": "text/html",
|
||||||
|
".pls": "audio/scpls",
|
||||||
|
".plt": "application/x-plt",
|
||||||
|
".png": "image/png",
|
||||||
|
".pot": "application/vnd.ms-powerpoint",
|
||||||
|
".ppa": "application/vnd.ms-powerpoint",
|
||||||
|
".ppm": "application/x-ppm",
|
||||||
|
".pps": "application/vnd.ms-powerpoint",
|
||||||
|
".ppt": "application/x-ppt",
|
||||||
|
".pr": "application/x-pr",
|
||||||
|
".prf": "application/pics-rules",
|
||||||
|
".prn": "application/x-prn",
|
||||||
|
".prt": "application/x-prt",
|
||||||
|
".ps": "application/x-ps",
|
||||||
|
".ptn": "application/x-ptn",
|
||||||
|
".pwz": "application/vnd.ms-powerpoint",
|
||||||
|
".r3t": "text/vnd.rn-realtext3d",
|
||||||
|
".ra": "audio/vnd.rn-realaudio",
|
||||||
|
".ram": "audio/x-pn-realaudio",
|
||||||
|
".ras": "application/x-ras",
|
||||||
|
".rat": "application/rat-file",
|
||||||
|
".rdf": "text/xml",
|
||||||
|
".rec": "application/vnd.rn-recording",
|
||||||
|
".red": "application/x-red",
|
||||||
|
".rgb": "application/x-rgb",
|
||||||
|
".rjs": "application/vnd.rn-realsystem-rjs",
|
||||||
|
".rjt": "application/vnd.rn-realsystem-rjt",
|
||||||
|
".rlc": "application/x-rlc",
|
||||||
|
".rle": "application/x-rle",
|
||||||
|
".rm": "application/vnd.rn-realmedia",
|
||||||
|
".rmf": "application/vnd.adobe.rmf",
|
||||||
|
".rmi": "audio/mid",
|
||||||
|
".rmj": "application/vnd.rn-realsystem-rmj",
|
||||||
|
".rmm": "audio/x-pn-realaudio",
|
||||||
|
".rmp": "application/vnd.rn-rn_music_package",
|
||||||
|
".rms": "application/vnd.rn-realmedia-secure",
|
||||||
|
".rmvb": "application/vnd.rn-realmedia-vbr",
|
||||||
|
".rmx": "application/vnd.rn-realsystem-rmx",
|
||||||
|
".rnx": "application/vnd.rn-realplayer",
|
||||||
|
".rp": "image/vnd.rn-realpix",
|
||||||
|
".rpm": "audio/x-pn-realaudio-plugin",
|
||||||
|
".rsml": "application/vnd.rn-rsml",
|
||||||
|
".rt": "text/vnd.rn-realtext",
|
||||||
|
".rtf": "application/x-rtf",
|
||||||
|
".rv": "video/vnd.rn-realvideo",
|
||||||
|
".sam": "application/x-sam",
|
||||||
|
".sat": "application/x-sat",
|
||||||
|
".sdp": "application/sdp",
|
||||||
|
".sdw": "application/x-sdw",
|
||||||
|
".sit": "application/x-stuffit",
|
||||||
|
".slb": "application/x-slb",
|
||||||
|
".sld": "application/x-sld",
|
||||||
|
".slk": "drawing/x-slk",
|
||||||
|
".smi": "application/smil",
|
||||||
|
".smil": "application/smil",
|
||||||
|
".smk": "application/x-smk",
|
||||||
|
".snd": "audio/basic",
|
||||||
|
".sol": "text/plain",
|
||||||
|
".sor": "text/plain",
|
||||||
|
".spc": "application/x-pkcs7-certificates",
|
||||||
|
".spl": "application/futuresplash",
|
||||||
|
".spp": "text/xml",
|
||||||
|
".ssm": "application/streamingmedia",
|
||||||
|
".sst": "application/vnd.ms-pki.certstore",
|
||||||
|
".stl": "application/vnd.ms-pki.stl",
|
||||||
|
".stm": "text/html",
|
||||||
|
".sty": "application/x-sty",
|
||||||
|
".svg": "text/xml",
|
||||||
|
".swf": "application/x-shockwave-flash",
|
||||||
|
".tdf": "application/x-tdf",
|
||||||
|
".tg4": "application/x-tg4",
|
||||||
|
".tga": "application/x-tga",
|
||||||
|
".tif": "image/tiff",
|
||||||
|
".tiff": "image/tiff",
|
||||||
|
".tld": "text/xml",
|
||||||
|
".top": "drawing/x-top",
|
||||||
|
".torrent": "application/x-bittorrent",
|
||||||
|
".tsd": "text/xml",
|
||||||
|
".txt": "text/plain",
|
||||||
|
".uin": "application/x-icq",
|
||||||
|
".uls": "text/iuls",
|
||||||
|
".vcf": "text/x-vcard",
|
||||||
|
".vda": "application/x-vda",
|
||||||
|
".vdx": "application/vnd.visio",
|
||||||
|
".vml": "text/xml",
|
||||||
|
".vpg": "application/x-vpeg005",
|
||||||
|
".vsd": "application/vnd.visio",
|
||||||
|
".vss": "application/vnd.visio",
|
||||||
|
".vst": "application/vnd.visio",
|
||||||
|
".vsw": "application/vnd.visio",
|
||||||
|
".vsx": "application/vnd.visio",
|
||||||
|
".vtx": "application/vnd.visio",
|
||||||
|
".vxml": "text/xml",
|
||||||
|
".wav": "audio/wav",
|
||||||
|
".wax": "audio/x-ms-wax",
|
||||||
|
".wb1": "application/x-wb1",
|
||||||
|
".wb2": "application/x-wb2",
|
||||||
|
".wb3": "application/x-wb3",
|
||||||
|
".wbmp": "image/vnd.wap.wbmp",
|
||||||
|
".wiz": "application/msword",
|
||||||
|
".wk3": "application/x-wk3",
|
||||||
|
".wk4": "application/x-wk4",
|
||||||
|
".wkq": "application/x-wkq",
|
||||||
|
".wks": "application/x-wks",
|
||||||
|
".wm": "video/x-ms-wm",
|
||||||
|
".wma": "audio/x-ms-wma",
|
||||||
|
".wmd": "application/x-ms-wmd",
|
||||||
|
".wmf": "application/x-wmf",
|
||||||
|
".wml": "text/vnd.wap.wml",
|
||||||
|
".wmv": "video/x-ms-wmv",
|
||||||
|
".wmx": "video/x-ms-wmx",
|
||||||
|
".wmz": "application/x-ms-wmz",
|
||||||
|
".wp6": "application/x-wp6",
|
||||||
|
".wpd": "application/x-wpd",
|
||||||
|
".wpg": "application/x-wpg",
|
||||||
|
".wpl": "application/vnd.ms-wpl",
|
||||||
|
".wq1": "application/x-wq1",
|
||||||
|
".wr1": "application/x-wr1",
|
||||||
|
".wri": "application/x-wri",
|
||||||
|
".wrk": "application/x-wrk",
|
||||||
|
".ws": "application/x-ws",
|
||||||
|
".ws2": "application/x-ws",
|
||||||
|
".wsc": "text/scriptlet",
|
||||||
|
".wsdl": "text/xml",
|
||||||
|
".wvx": "video/x-ms-wvx",
|
||||||
|
".xdp": "application/vnd.adobe.xdp",
|
||||||
|
".xdr": "text/xml",
|
||||||
|
".xfd": "application/vnd.adobe.xfd",
|
||||||
|
".xfdf": "application/vnd.adobe.xfdf",
|
||||||
|
".xhtml": "text/html",
|
||||||
|
".xls": "application/vnd.ms-excel",
|
||||||
|
".xlw": "application/x-xlw",
|
||||||
|
".xml": "text/xml",
|
||||||
|
".xpl": "audio/scpls",
|
||||||
|
".xq": "text/xml",
|
||||||
|
".xql": "text/xml",
|
||||||
|
".xquery": "text/xml",
|
||||||
|
".xsd": "text/xml",
|
||||||
|
".xsl": "text/xml",
|
||||||
|
".xslt": "text/xml",
|
||||||
|
".xwd": "application/x-xwd",
|
||||||
|
".x_b": "application/x-x_b",
|
||||||
|
".sis": "application/vnd.symbian.install",
|
||||||
|
".sisx": "application/vnd.symbian.install",
|
||||||
|
".x_t": "application/x-x_t",
|
||||||
|
".ipa": "application/vnd.iphone",
|
||||||
|
".apk": "application/vnd.android.package-archive",
|
||||||
|
".xap": "application/x-silverlight-app"
|
||||||
|
}
|
70
extension/agent/learner.py
Normal file
70
extension/agent/learner.py
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
# -*- coding: UTF-8 -*-
|
||||||
|
from urllib.parse import quote
|
||||||
|
|
||||||
|
from core.model import get_chat_model
|
||||||
|
from function.web_tool import WebScraper
|
||||||
|
from extension.rag import RAGPipeline
|
||||||
|
|
||||||
|
# Initialize the agent components
|
||||||
|
coder_instance = get_chat_model("qwen2.5-coder:14b")
|
||||||
|
llm_instance = get_chat_model("deepseek-r1:14b")
|
||||||
|
|
||||||
|
rag_pipeline = RAGPipeline(table_name="learner_knowledge")
|
||||||
|
|
||||||
|
web_scraper = WebScraper()
|
||||||
|
|
||||||
|
|
||||||
|
def learn_from_web(urls):
|
||||||
|
result = web_scraper.search(urls)
|
||||||
|
rag_pipeline.insert_text_content(result['data'])
|
||||||
|
|
||||||
|
|
||||||
|
def query_agent(question):
|
||||||
|
response = rag_pipeline.query(question)
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def get_relevant_content(url: str, tasks: list[str]) -> list:
|
||||||
|
list_result = []
|
||||||
|
query_result = web_scraper.search(url)
|
||||||
|
list_result.append(query_result['data'])
|
||||||
|
for task in tasks:
|
||||||
|
llm_result = llm_instance.llm(f"{query_result['data']}\n{task}")
|
||||||
|
list_result.append(llm_result)
|
||||||
|
return list_result
|
||||||
|
|
||||||
|
|
||||||
|
# Example usage
|
||||||
|
if __name__ == "__main__":
|
||||||
|
topic = "fastgpt"
|
||||||
|
|
||||||
|
# bing 一级索引
|
||||||
|
bing_url = f"https://cn.bing.com/search?q={quote(topic)}"
|
||||||
|
task_1 = f"找到{topic}相关的知识的网址,只输出网址地址列表"
|
||||||
|
# task_2 = f"找到{topic}相关的知识的,输出整理好的内容,尽可能多的保留原文"
|
||||||
|
first_result, first_urls = get_relevant_content(bing_url, [task_1])
|
||||||
|
print(first_urls)
|
||||||
|
|
||||||
|
# 对一级索引的网址进行爬取,获取二级索引地址,获得的内容进行总结
|
||||||
|
web_urls = web_scraper.split_urls(first_urls)
|
||||||
|
print(len(web_urls))
|
||||||
|
second_urls = ""
|
||||||
|
for url in web_urls:
|
||||||
|
temp_result, temp_urls = get_relevant_content(url, [task_1])
|
||||||
|
print(first_urls)
|
||||||
|
rag_pipeline.insert_text_content(temp_result, url)
|
||||||
|
second_urls += temp_urls + "\n"
|
||||||
|
|
||||||
|
web_urls = web_scraper.split_urls(second_urls)
|
||||||
|
print(len(web_urls))
|
||||||
|
for url in web_urls:
|
||||||
|
temp_result = get_relevant_content(url, [])[0]
|
||||||
|
print(first_urls)
|
||||||
|
rag_pipeline.insert_text_content(temp_result, url)
|
||||||
|
|
||||||
|
# 第二级索引的网址获得的内容进行总结
|
||||||
|
|
||||||
|
question = f"{topic}的技术架构是什么?"
|
||||||
|
answer = query_agent(question)
|
||||||
|
print(answer)
|
||||||
|
print("Agent believes it has learned enough.")
|
123
extension/chat.py
Normal file
123
extension/chat.py
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
from extension.rag import rag_pipline
|
||||||
|
from core.model import run_llm_by_message
|
||||||
|
from core.types import BaseEngine
|
||||||
|
from extension.mcp import mcp_engine
|
||||||
|
from extension.standard import chat_file_manager, run_llm_by_template
|
||||||
|
|
||||||
|
|
||||||
|
class ChatEngine(BaseEngine):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__("chat_engine")
|
||||||
|
self.dialogue = []
|
||||||
|
self.file = {}
|
||||||
|
self._rag = rag_pipline
|
||||||
|
self._body = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def context(self):
|
||||||
|
contexts = self.dialogue[-16:] if len(self.dialogue) > 16 else self.dialogue
|
||||||
|
return "\n##\n\n".join([msg["role"]+": "+msg["message"] for msg in contexts])
|
||||||
|
|
||||||
|
def set_body(self, body):
|
||||||
|
self._body = body
|
||||||
|
|
||||||
|
def load_chat_history(self, chat_history):
|
||||||
|
self.dialogue = []
|
||||||
|
self.file.clear()
|
||||||
|
for msg in chat_history:
|
||||||
|
if msg["role"] in ["user", "system", "external"]:
|
||||||
|
self.dialogue.append(msg)
|
||||||
|
elif msg["role"] == "uploader":
|
||||||
|
file_id, _ = chat_file_manager.parse_file_id(msg["message"])
|
||||||
|
self.file["uploader"] = file_id
|
||||||
|
elif msg["role"] == "sender":
|
||||||
|
self.file["sender"] = msg["message"]
|
||||||
|
|
||||||
|
def add_message(self, message: str, role: str):
|
||||||
|
self.dialogue.append({"role": role, "message": message})
|
||||||
|
|
||||||
|
def add_user_message(self, message: str):
|
||||||
|
self.add_message(message, "user")
|
||||||
|
|
||||||
|
def add_system_message(self, message: str):
|
||||||
|
self.add_message(message, "system")
|
||||||
|
|
||||||
|
def load_file(self, message: str):
|
||||||
|
try:
|
||||||
|
file_id, ext = chat_file_manager.parse_file_id(message)
|
||||||
|
self._rag.insert_document(file_id, file_id, ext)
|
||||||
|
self.file["uploader"] = file_id
|
||||||
|
return {"message": "文件上传成功!请继续提问。", "role": "system"}
|
||||||
|
except Exception as e:
|
||||||
|
return {"message": "文件上传失败!请联系管理员。", "role": "system"}
|
||||||
|
|
||||||
|
def run(self, message: str, file_name=None, plugin_type=None) -> dict:
|
||||||
|
current_context = self.context
|
||||||
|
self.add_user_message(message)
|
||||||
|
mcp_engine.set_context(current_context)
|
||||||
|
mcp_engine.set_file(self.file.get("uploader"))
|
||||||
|
|
||||||
|
try:
|
||||||
|
mcp_data = mcp_engine.run(message)
|
||||||
|
action = mcp_data["tool"]
|
||||||
|
if action == "answer":
|
||||||
|
data = mcp_data.get("data", "")
|
||||||
|
if not data:
|
||||||
|
data = self._generate_chat(message, current_context, "")["data"]
|
||||||
|
return self._chat(data)
|
||||||
|
print(mcp_data)
|
||||||
|
external_data = mcp_data.get("data", "")
|
||||||
|
generate_data = self._generate_chat(message, current_context, external_data)
|
||||||
|
return self._data_parse(generate_data)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return {"message": f"处理错误: {str(e)}", "role": "system"}
|
||||||
|
|
||||||
|
def _chat(self, answer: str):
|
||||||
|
self.add_system_message(answer)
|
||||||
|
return {"message": answer, "role": "system"}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _generate_chat(message: str, memory: str, external: str):
|
||||||
|
context = ""
|
||||||
|
if memory:
|
||||||
|
context += f"<content>{memory}</content>\n"
|
||||||
|
if external:
|
||||||
|
context += f"<external>{external}</external>\n"
|
||||||
|
context += f"<input>{message}</input>"
|
||||||
|
result = run_llm_by_template(context,"text_generate")
|
||||||
|
print(result)
|
||||||
|
return {"type": "text", "data": result }
|
||||||
|
|
||||||
|
def _data_parse(self, data: dict) -> dict:
|
||||||
|
if data["type"] == "text":
|
||||||
|
self.add_system_message(data["data"])
|
||||||
|
return {"message": data["data"], "role": "system"}
|
||||||
|
elif data["type"] == "file":
|
||||||
|
self.file["sender"] = data["data"]
|
||||||
|
return {"message": data["data"], "role": "sender"}
|
||||||
|
else:
|
||||||
|
raise ValueError("Unsupported data type")
|
||||||
|
|
||||||
|
|
||||||
|
class ModelManager:
|
||||||
|
def __init__(self):
|
||||||
|
self.allow_list = []
|
||||||
|
|
||||||
|
def chat(self, message: str, message_type: str = "text", chat_history=None):
|
||||||
|
ce = ChatEngine()
|
||||||
|
ce.set_allow_list(self.allow_list)
|
||||||
|
ce.load_chat_history(chat_history if chat_history else [])
|
||||||
|
if message_type == "text":
|
||||||
|
return ce.run(message)
|
||||||
|
elif message_type == "file":
|
||||||
|
return ce.load_file(message)
|
||||||
|
else:
|
||||||
|
raise Exception("Message type not supported!")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def generate(prompt):
|
||||||
|
return run_llm_by_message(prompt)
|
||||||
|
|
||||||
|
|
||||||
|
model_manager = ModelManager()
|
70
extension/document_loader.py
Normal file
70
extension/document_loader.py
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
from langchain_community.document_loaders import PyPDFLoader, TextLoader, Docx2txtLoader
|
||||||
|
from langchain_text_splitters import RecursiveCharacterTextSplitter
|
||||||
|
from core.model import vision_instance
|
||||||
|
from core.types import BaseLoader
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class ImageLoader:
|
||||||
|
def __init__(self, file_path: str):
|
||||||
|
self.file_path = file_path
|
||||||
|
|
||||||
|
def load(self):
|
||||||
|
from langchain.schema import Document
|
||||||
|
|
||||||
|
with open(self.file_path, "rb") as f:
|
||||||
|
image_data = f.read()
|
||||||
|
|
||||||
|
vision_instance.load_image(image_data)
|
||||||
|
description = vision_instance.describe()
|
||||||
|
|
||||||
|
return [Document(
|
||||||
|
page_content=description,
|
||||||
|
metadata={"source": self.file_path}
|
||||||
|
)]
|
||||||
|
|
||||||
|
|
||||||
|
class DocumentLoader(BaseLoader):
|
||||||
|
SUPPORTED_LOADERS = {
|
||||||
|
".pdf": PyPDFLoader,
|
||||||
|
".txt": TextLoader,
|
||||||
|
".docx": Docx2txtLoader,
|
||||||
|
".doc": Docx2txtLoader,
|
||||||
|
".jpeg": ImageLoader,
|
||||||
|
".jpg": ImageLoader,
|
||||||
|
".png": ImageLoader
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, config=None):
|
||||||
|
super().__init__()
|
||||||
|
config = config or {}
|
||||||
|
self.file_path = None
|
||||||
|
self.extension = None
|
||||||
|
self._document = None
|
||||||
|
self.chunk_size = config.get("chunk_size", 512)
|
||||||
|
self.chunk_overlap = config.get("chunk_overlap", 200)
|
||||||
|
self.text_splitter = RecursiveCharacterTextSplitter(
|
||||||
|
chunk_size=self.chunk_size,
|
||||||
|
chunk_overlap=self.chunk_overlap,
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def loader(self):
|
||||||
|
if self.extension not in self.SUPPORTED_LOADERS:
|
||||||
|
raise ValueError(f"不支持的文件类型: {self.extension}")
|
||||||
|
return self.SUPPORTED_LOADERS[self.extension](self.file_path)
|
||||||
|
|
||||||
|
def load_and_split(self):
|
||||||
|
return self.text_splitter.split_documents(self._document)
|
||||||
|
|
||||||
|
def load_content(self, file_path,extension=None):
|
||||||
|
self.extension = extension if extension else ""
|
||||||
|
self.file_path = "./docs/files/" + file_path
|
||||||
|
with open(self.file_path, "rb") as f:
|
||||||
|
self.content = f.read()
|
||||||
|
|
||||||
|
self._document = self.loader.load()
|
||||||
|
|
||||||
|
def split_text(self, content: str) -> list[str]:
|
||||||
|
return self.text_splitter.split_text(content)
|
444
extension/mcp.py
Normal file
444
extension/mcp.py
Normal file
@@ -0,0 +1,444 @@
|
|||||||
|
import json
|
||||||
|
|
||||||
|
from typing import Dict, List, Optional
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from urllib.parse import quote
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from core.config import conf
|
||||||
|
from core.model import run_llm_by_message, think_by_message
|
||||||
|
from core.types import BaseCRUD, BaseEngine
|
||||||
|
from extension.standard import parse_json_string, OnlinePrompt, db_manager
|
||||||
|
from function.context import rag_search
|
||||||
|
from function.weather import weather_search
|
||||||
|
from function.web_tool import web_scraper
|
||||||
|
|
||||||
|
|
||||||
|
class ProtocolAdapter(ABC):
|
||||||
|
name: str
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def name(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def handle_request(self, request: dict) -> dict:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def request(self, request: dict) -> str:
|
||||||
|
result = self.handle_request(request)
|
||||||
|
if result["status"] == 500:
|
||||||
|
return ""
|
||||||
|
else:
|
||||||
|
return result["message"]
|
||||||
|
|
||||||
|
|
||||||
|
class HttpProtocolAdapter(ProtocolAdapter):
|
||||||
|
def __init__(self, secure: bool = False):
|
||||||
|
self.secure = secure
|
||||||
|
self.headers = {
|
||||||
|
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36 Edg/133.0.0.0",
|
||||||
|
"Accept": "*/*",
|
||||||
|
"Connection": "keep-alive"
|
||||||
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
return 'https' if self.secure else 'http'
|
||||||
|
|
||||||
|
def handle_request(self, request: dict) -> dict:
|
||||||
|
method = request['method']
|
||||||
|
uri = f"{self.name}://{request['uri']}"
|
||||||
|
try:
|
||||||
|
if method == 'get':
|
||||||
|
params = request['parameters']
|
||||||
|
uri += f"?{'&'.join([f'{k}={quote(str(v))}' for k, v in params.items()])}"
|
||||||
|
return {"message": web_scraper.get_uri_resource(uri), "status": 200}
|
||||||
|
elif method == 'post':
|
||||||
|
data = requests.post(uri, json=request['parameters'], headers=self.headers).json()
|
||||||
|
return {'status': 200, 'message': str(data)}
|
||||||
|
else:
|
||||||
|
return {"status": 500, "message": f"Unsupported method: {method}"}
|
||||||
|
except Exception as e:
|
||||||
|
return {"status": 500, "message": str(e)}
|
||||||
|
|
||||||
|
|
||||||
|
class LocalProtocolAdapter(ProtocolAdapter):
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
return "local"
|
||||||
|
|
||||||
|
def handle_request(self, request: dict) -> dict:
|
||||||
|
return {"status": "success", "message": "Local request handled successfully"}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class MCPService:
|
||||||
|
instance_name: str
|
||||||
|
name: str
|
||||||
|
endpoint: str
|
||||||
|
id: int = -1
|
||||||
|
protocol: Optional[ProtocolAdapter] = None
|
||||||
|
status: int = 0
|
||||||
|
config: dict = None
|
||||||
|
|
||||||
|
def set_protocol(self, protocol: str):
|
||||||
|
secure = protocol.endswith('s')
|
||||||
|
base_protocol = protocol[:-1] if secure else protocol
|
||||||
|
|
||||||
|
if base_protocol == 'http':
|
||||||
|
self.protocol = HttpProtocolAdapter(secure=secure)
|
||||||
|
elif base_protocol == "local":
|
||||||
|
self.protocol = LocalProtocolAdapter()
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unsupported protocol: {protocol}")
|
||||||
|
|
||||||
|
def start_service(self) -> dict:
|
||||||
|
self.status = 1
|
||||||
|
return {
|
||||||
|
'status': 'success',
|
||||||
|
'message': f'Service {self.id} started successfully',
|
||||||
|
'service': {
|
||||||
|
'id': self.id,
|
||||||
|
'name': self.name,
|
||||||
|
'endpoint': self.endpoint,
|
||||||
|
'protocol': self.protocol.__class__.__name__,
|
||||||
|
'status': 'running'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def stop_service(self) -> dict:
|
||||||
|
self.status = 0
|
||||||
|
return {
|
||||||
|
'status': 'success',
|
||||||
|
'message': f'Service {self.id} stopped successfully',
|
||||||
|
'service': {
|
||||||
|
'id': self.id,
|
||||||
|
'name': self.name,
|
||||||
|
'endpoint': self.endpoint,
|
||||||
|
'protocol': self.protocol.__class__.__name__,
|
||||||
|
'status': 'stopped'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def execute(self, message: str) -> str:
|
||||||
|
schema = self._get_schema()
|
||||||
|
params = self._generate_parameters(message, schema)
|
||||||
|
try:
|
||||||
|
return self.protocol.request({
|
||||||
|
"method": self.config.get('method', ""),
|
||||||
|
"parameters": params,
|
||||||
|
"uri": self.endpoint
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
return f"Execution error: {str(e)}"
|
||||||
|
|
||||||
|
def _get_schema(self) -> str:
|
||||||
|
schema_name = self.config.get('schema', "")
|
||||||
|
if schema_name:
|
||||||
|
schema = db_manager.schemas.get_by_name(schema_name)
|
||||||
|
if schema:
|
||||||
|
schema = dict(schema)
|
||||||
|
return schema.get('content') if schema else ""
|
||||||
|
|
||||||
|
return ""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _generate_parameters(message: str, schema: str) -> dict:
|
||||||
|
prompt = OnlinePrompt("parameter_generate").generate(message)
|
||||||
|
prompt = prompt.replace("{{schema}}", schema)
|
||||||
|
return parse_json_string(run_llm_by_message(prompt))
|
||||||
|
|
||||||
|
|
||||||
|
class MCPManager:
|
||||||
|
def __init__(self, db_path: str):
|
||||||
|
self.db = BaseCRUD('services', db_path)
|
||||||
|
self._init_db()
|
||||||
|
self.services: Dict[str, MCPService] = {}
|
||||||
|
self._sync_from_db()
|
||||||
|
|
||||||
|
def _init_db(self):
|
||||||
|
with self.db.get_connection() as conn:
|
||||||
|
conn.execute('''
|
||||||
|
CREATE TABLE IF NOT EXISTS services (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name TEXT UNIQUE NOT NULL,
|
||||||
|
instance_name TEXT NOT NULL,
|
||||||
|
instance_status INTEGER NOT NULL,
|
||||||
|
endpoint TEXT NOT NULL,
|
||||||
|
protocol TEXT NOT NULL,
|
||||||
|
config JSON,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
''')
|
||||||
|
|
||||||
|
def _sync_from_db(self):
|
||||||
|
services = self.db.get_all()['items']
|
||||||
|
for service in services:
|
||||||
|
mcp_service = MCPService(
|
||||||
|
id=service['id'],
|
||||||
|
instance_name=service['instance_name'],
|
||||||
|
name=service['name'],
|
||||||
|
endpoint=service['endpoint'],
|
||||||
|
status=service['instance_status'],
|
||||||
|
config=json.loads(service['config'])
|
||||||
|
)
|
||||||
|
mcp_service.set_protocol(service['protocol'])
|
||||||
|
self.services[str(mcp_service.id)] = mcp_service
|
||||||
|
|
||||||
|
def list_services(self) -> List[dict]:
|
||||||
|
return [dict(item) for item in self.db.get_all()['items']]
|
||||||
|
|
||||||
|
def get_status(self) -> dict:
|
||||||
|
with self.db.get_connection() as conn:
|
||||||
|
total_services = conn.execute('SELECT COUNT(*) FROM services').fetchone()[0]
|
||||||
|
return {"total_services": total_services}
|
||||||
|
|
||||||
|
def register_service(self, name: str, endpoint: str, protocol: str, config: dict = None) -> None:
|
||||||
|
if protocol not in ['http', 'https', 'local']:
|
||||||
|
raise ValueError(f"Unsupported protocol: {protocol}")
|
||||||
|
|
||||||
|
instance_name = f"mcp-{name.lower().replace(' ', '-')}"
|
||||||
|
config = config if config else {}
|
||||||
|
|
||||||
|
_id = self.db.create(
|
||||||
|
name=name,
|
||||||
|
instance_name=instance_name,
|
||||||
|
instance_status=0,
|
||||||
|
endpoint=endpoint,
|
||||||
|
protocol=protocol,
|
||||||
|
config=json.dumps(config)
|
||||||
|
)
|
||||||
|
|
||||||
|
service = MCPService(
|
||||||
|
id=_id,
|
||||||
|
instance_name=instance_name,
|
||||||
|
name=name,
|
||||||
|
endpoint=endpoint,
|
||||||
|
config=config
|
||||||
|
)
|
||||||
|
service.set_protocol(protocol)
|
||||||
|
self.services[str(_id)] = service
|
||||||
|
|
||||||
|
def unregister_service(self, service_id: str) -> None:
|
||||||
|
self.db.delete(service_id)
|
||||||
|
if service_id in self.services:
|
||||||
|
del self.services[service_id]
|
||||||
|
|
||||||
|
def create_service(self, name: str, endpoint: str, protocol: str, config: dict = None) -> dict:
|
||||||
|
try:
|
||||||
|
self.register_service(name, endpoint, protocol, config)
|
||||||
|
return {'status': 'success', 'message': f'Service {name} created successfully'}
|
||||||
|
except Exception as e:
|
||||||
|
return {'status': 'error', 'message': str(e)}
|
||||||
|
|
||||||
|
def delete_service(self, service_id: str) -> dict:
|
||||||
|
try:
|
||||||
|
self.unregister_service(service_id)
|
||||||
|
return {'status': 'success', 'message': f'Service {service_id} deleted successfully'}
|
||||||
|
except Exception as e:
|
||||||
|
return {'status': 'error', 'message': str(e)}
|
||||||
|
|
||||||
|
def start_service(self, service_id: str) -> dict:
|
||||||
|
if service_id not in self.services:
|
||||||
|
return {'status': 'error', 'message': f"Service {service_id} not found"}
|
||||||
|
|
||||||
|
service = self.services[service_id]
|
||||||
|
try:
|
||||||
|
result = service.start_service()
|
||||||
|
self.db.update(service.id, instance_status=1)
|
||||||
|
return result
|
||||||
|
except Exception as e:
|
||||||
|
return {'status': 'error', 'message': str(e)}
|
||||||
|
|
||||||
|
def stop_service(self, service_id: str) -> dict:
|
||||||
|
if service_id not in self.services:
|
||||||
|
return {'status': 'error', 'message': f"Service {service_id} not found"}
|
||||||
|
|
||||||
|
service = self.services[service_id]
|
||||||
|
try:
|
||||||
|
result = service.stop_service()
|
||||||
|
self.db.update(service.id, instance_status=0)
|
||||||
|
return result
|
||||||
|
except Exception as e:
|
||||||
|
return {'status': 'error', 'message': str(e)}
|
||||||
|
|
||||||
|
def restart_service(self, service_id: str) -> dict:
|
||||||
|
if service_id not in self.services:
|
||||||
|
return {'status': 'error', 'message': f"Service {service_id} not found"}
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.stop_service(service_id)
|
||||||
|
return self.start_service(service_id)
|
||||||
|
except Exception as e:
|
||||||
|
return {'status': 'error', 'message': str(e)}
|
||||||
|
|
||||||
|
def check_health(self, service_id: str) -> dict:
|
||||||
|
if service_id not in self.services:
|
||||||
|
return {'status': 'error', 'message': f"Service {service_id} not found"}
|
||||||
|
|
||||||
|
service = self.services[service_id]
|
||||||
|
is_healthy = service.status == 1
|
||||||
|
return {
|
||||||
|
'status': 'success',
|
||||||
|
'healthy': is_healthy,
|
||||||
|
'message': f'Service {service_id} is {"healthy" if is_healthy else "unhealthy"}'
|
||||||
|
}
|
||||||
|
|
||||||
|
def update_service(self, service_id: str, **kwargs) -> dict:
|
||||||
|
if service_id not in self.services:
|
||||||
|
return {'status': 'error', 'message': f"Service {service_id} not found"}
|
||||||
|
|
||||||
|
service = self.services[service_id]
|
||||||
|
update_fields = {}
|
||||||
|
|
||||||
|
if 'name' in kwargs:
|
||||||
|
service.name = kwargs['name']
|
||||||
|
update_fields['name'] = kwargs['name']
|
||||||
|
|
||||||
|
if 'endpoint' in kwargs:
|
||||||
|
service.endpoint = kwargs['endpoint']
|
||||||
|
update_fields['endpoint'] = kwargs['endpoint']
|
||||||
|
|
||||||
|
if 'protocol' in kwargs:
|
||||||
|
service.set_protocol(kwargs['protocol'])
|
||||||
|
update_fields['protocol'] = kwargs['protocol']
|
||||||
|
|
||||||
|
if 'config' in kwargs:
|
||||||
|
service.config = kwargs['config']
|
||||||
|
update_fields['config'] = json.dumps(kwargs['config'])
|
||||||
|
|
||||||
|
if 'status' in kwargs:
|
||||||
|
service.status = kwargs['status']
|
||||||
|
update_fields['instance_status'] = kwargs['status']
|
||||||
|
|
||||||
|
try:
|
||||||
|
if update_fields:
|
||||||
|
self.db.update(service.id, **update_fields)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'status': 'success',
|
||||||
|
'message': f'Service {service_id} updated successfully',
|
||||||
|
'service': {
|
||||||
|
'id': service.id,
|
||||||
|
'name': service.name,
|
||||||
|
'endpoint': service.endpoint,
|
||||||
|
'protocol': service.protocol.name,
|
||||||
|
'status': service.status,
|
||||||
|
'config': service.config
|
||||||
|
}
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
return {'status': 'error', 'message': str(e)}
|
||||||
|
|
||||||
|
|
||||||
|
mcp_manager = MCPManager(conf.db_uri)
|
||||||
|
|
||||||
|
|
||||||
|
class MCPPredictError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class MCPEngine(BaseEngine):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__("mcp_engine")
|
||||||
|
self._manager = mcp_manager
|
||||||
|
self._context = None
|
||||||
|
self._file = None
|
||||||
|
self.pool = {}
|
||||||
|
self.services = {}
|
||||||
|
|
||||||
|
def set_context(self, context: str):
|
||||||
|
self._context = context
|
||||||
|
|
||||||
|
def set_file(self, file: str):
|
||||||
|
self._file = file
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _load_services_info() -> Dict[str, dict]:
|
||||||
|
services = mcp_manager.list_services()
|
||||||
|
return {i['name']: i for i in services if i['instance_status'] == 1}
|
||||||
|
|
||||||
|
# def _rewrite(self, message: str):
|
||||||
|
# op = OnlinePrompt("rewrite_question")
|
||||||
|
# op.set_external(self._context)
|
||||||
|
# prompt = op.generate(message)
|
||||||
|
# response = run_llm_by_message(prompt)
|
||||||
|
# try:
|
||||||
|
# return parse_json_string(response)
|
||||||
|
# except Exception as e:
|
||||||
|
# return {"rewrite": message, "keywords": []}
|
||||||
|
|
||||||
|
def _predict(self, message: str) -> List[dict]:
|
||||||
|
self.services = self._load_services_info()
|
||||||
|
external_str = "- [chat](优先级2) 根据上下文回答一些简单的问题\n- [context](优先级5) 涉及前文,前面内容时,必须调用\n"
|
||||||
|
|
||||||
|
for name, info in self.services.items():
|
||||||
|
if info['protocol'] == 'local':
|
||||||
|
tool = self.tool_pool.get(name)
|
||||||
|
if tool:
|
||||||
|
self.pool[name] = tool.description
|
||||||
|
external_str += f"- [{name}](优先级4) {tool.description} \n"
|
||||||
|
else:
|
||||||
|
config = json.loads(info.get('config', "{}"))
|
||||||
|
desc = config.get('description', '')
|
||||||
|
self.pool[name] = desc
|
||||||
|
external_str += f"- [{name}](优先级3) {desc} \n"
|
||||||
|
|
||||||
|
op = OnlinePrompt(self.name)
|
||||||
|
op.set_external(external_str)
|
||||||
|
prompt = op.generate(message)
|
||||||
|
response = think_by_message(prompt)
|
||||||
|
try:
|
||||||
|
return parse_json_string(response)
|
||||||
|
except Exception as e:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def _run_task(self, message: str, task: dict, external_data: str = "") -> dict:
|
||||||
|
tool_name = task.get("tool", "")
|
||||||
|
question = task.get("question", "")
|
||||||
|
|
||||||
|
if not tool_name:
|
||||||
|
return {"tool": "error", "output": "text", "data": f"Tool:{tool_name} execute error"}
|
||||||
|
|
||||||
|
task_prompt = f"main_task: {message}\nsub_task: {question}"
|
||||||
|
if external_data:
|
||||||
|
task_prompt = f"{external_data}\n{task_prompt}"
|
||||||
|
if tool_name == "chat":
|
||||||
|
return {"tool": "answer", "output": "text", "data": task.get("answer", "")}
|
||||||
|
elif tool_name == "context":
|
||||||
|
return {"tool": "context", "output": "text", "data": self._context}
|
||||||
|
if tool_name in self.tool_pool:
|
||||||
|
tool = self.tool_pool[tool_name]
|
||||||
|
tool.set_file_name(self._file)
|
||||||
|
return tool.execute(task_prompt)
|
||||||
|
|
||||||
|
if tool_name in self.services:
|
||||||
|
service = self._manager.services[str(self.services[tool_name]['id'])]
|
||||||
|
return {"tool": tool_name, "output": "text", "data": service.execute(task_prompt)}
|
||||||
|
|
||||||
|
return {"tool": "chat", "output": "text", "data": message}
|
||||||
|
|
||||||
|
def run(self, message: str, file_name: str = None, plugin_type: str = None) -> dict:
|
||||||
|
# question = self._rewrite(message)["rewrite"]
|
||||||
|
question = message
|
||||||
|
tasks = self._predict(question)
|
||||||
|
if isinstance(tasks, dict):
|
||||||
|
tasks = [tasks]
|
||||||
|
|
||||||
|
external_data = ""
|
||||||
|
result = {"tool": "error", "output": "text", "data": "MCP engine execute error"}
|
||||||
|
print(tasks)
|
||||||
|
for task in tasks:
|
||||||
|
result = self._run_task(message, task, external_data)
|
||||||
|
if result['tool'] not in ["chat", "error"]:
|
||||||
|
external_data += result['data']
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
mcp_engine = MCPEngine()
|
||||||
|
mcp_engine.add_tool(weather_search)
|
||||||
|
mcp_engine.add_tool(web_scraper)
|
||||||
|
mcp_engine.add_tool(rag_search)
|
142
extension/rag.py
Normal file
142
extension/rag.py
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
import time
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
import jieba
|
||||||
|
import jieba.analyse
|
||||||
|
from lancedb import connect
|
||||||
|
from core.model import get_embedding_model, get_chat_model
|
||||||
|
from extension.document_loader import DocumentLoader
|
||||||
|
from extension.standard import OnlinePrompt, parse_json_string
|
||||||
|
|
||||||
|
|
||||||
|
class RAGPipeline:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
db_path: str = "./db/vec",
|
||||||
|
default_table_name: str = "documents",
|
||||||
|
embedding_model: str = "bge-large",
|
||||||
|
llm_model: str = "qwen2.5:7b",
|
||||||
|
chunk_size: int = 512,
|
||||||
|
chunk_overlap: int = 200,
|
||||||
|
):
|
||||||
|
self.embeddings = get_embedding_model(embedding_model)
|
||||||
|
self.llm = get_chat_model(llm_model)
|
||||||
|
self.db = connect(db_path)
|
||||||
|
self.default_table_name = default_table_name
|
||||||
|
self.document_loader = DocumentLoader(config={"chunk_size": chunk_size, "chunk_overlap": chunk_overlap})
|
||||||
|
|
||||||
|
def _insert(self, texts: list, extension: str, table_name: str = None):
|
||||||
|
table_name = table_name or self.default_table_name
|
||||||
|
embeddings = self.embeddings.embed_documents(texts)
|
||||||
|
time_stamp = int(time.time())
|
||||||
|
data = [
|
||||||
|
{
|
||||||
|
"text": text.replace("\n", "").replace("\t", "").replace("\r", ""),
|
||||||
|
"text_fts":text.replace("\n", "").replace("\t", "").replace("\r", "").replace(" ", ""),
|
||||||
|
"vector": embedding,
|
||||||
|
"extension": extension,
|
||||||
|
"time_stamp": time_stamp
|
||||||
|
}
|
||||||
|
for text, embedding in zip(texts, embeddings)
|
||||||
|
]
|
||||||
|
|
||||||
|
if table_name in self.db.table_names():
|
||||||
|
table = self.db.open_table(table_name)
|
||||||
|
table.add(data)
|
||||||
|
else:
|
||||||
|
# Create table with explicit schema including FTS index
|
||||||
|
self.db.create_table(
|
||||||
|
table_name,
|
||||||
|
data=data,
|
||||||
|
mode="overwrite"
|
||||||
|
)
|
||||||
|
|
||||||
|
def insert_document(self, file_path: str, table_name: str = None, extension: str = None):
|
||||||
|
self.document_loader.load_content(file_path, extension)
|
||||||
|
chunks = self.document_loader.load_and_split()
|
||||||
|
texts = [chunk.page_content for chunk in chunks]
|
||||||
|
self._insert(texts, self.document_loader.extension, table_name)
|
||||||
|
|
||||||
|
def insert_text_content(self, content: str, table_name: str = None):
|
||||||
|
chunks = self.document_loader.split_text(content)
|
||||||
|
texts = [chunk for chunk in chunks]
|
||||||
|
self._insert(texts, ".txt", table_name)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _extract_keywords(question: str) -> List[str]:
|
||||||
|
# Use TF-IDF and TextRank combination for keyword extraction
|
||||||
|
tfidf_kws = jieba.analyse.extract_tags(
|
||||||
|
question, topK=3, withWeight=False, allowPOS=('n', 'vn', 'v'))
|
||||||
|
textrank_kws = jieba.analyse.textrank(
|
||||||
|
question, topK=3, withWeight=False, allowPOS=('n', 'vn', 'v'))
|
||||||
|
|
||||||
|
# Merge and deduplicate keywords
|
||||||
|
combined_kws = list(set(tfidf_kws + textrank_kws))
|
||||||
|
return [kw for kw in combined_kws if len(kw) > 1] # filter short keywords
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _build_keyword_condition(keywords: List[str]) -> str:
|
||||||
|
if not keywords:
|
||||||
|
return ""
|
||||||
|
conditions = [f"text LIKE '%{kw}%'" for kw in keywords]
|
||||||
|
return " OR ".join(conditions)
|
||||||
|
|
||||||
|
def _rewrite_question(self, question: str) -> dict:
|
||||||
|
# Use LLM to rewrite the question
|
||||||
|
op = OnlinePrompt("rewrite_question")
|
||||||
|
prompt = op.generate(question)
|
||||||
|
response = self.llm.llm(prompt)
|
||||||
|
return parse_json_string(response)
|
||||||
|
|
||||||
|
def query(self, question: str, k: int = 10, table_name: str = None) -> list[dict]:
|
||||||
|
# Extract keywords and parse question
|
||||||
|
keywords = self._extract_keywords(question)
|
||||||
|
rewritten_data = self._rewrite_question(question)
|
||||||
|
keywords = [k for k in rewritten_data.get("keywords", []) if k in keywords]
|
||||||
|
rewritten_question = rewritten_data.get("rewrite", "")
|
||||||
|
|
||||||
|
# Vector search with rewritten question
|
||||||
|
question_embedding = self.embeddings.embed_query(rewritten_question)
|
||||||
|
table_name = table_name or self.default_table_name
|
||||||
|
table = self.db.open_table(table_name)
|
||||||
|
|
||||||
|
indices = table.list_indices()
|
||||||
|
index_exists = any(
|
||||||
|
index["column_name"] == "text_fts" and index["index_type"] == "INVERTED"
|
||||||
|
for index in indices
|
||||||
|
)
|
||||||
|
|
||||||
|
if not index_exists:
|
||||||
|
try:
|
||||||
|
table.create_fts_index("text_fts")
|
||||||
|
except ValueError as e:
|
||||||
|
if "Index already exists" in str(e):
|
||||||
|
# If index exists but was not detected, try replacing it
|
||||||
|
table.create_fts_index("text_fts", replace=True)
|
||||||
|
else:
|
||||||
|
raise e
|
||||||
|
combined = []
|
||||||
|
for key in keywords:
|
||||||
|
combined += (table.search(query_type="hybrid")
|
||||||
|
.vector(question_embedding)
|
||||||
|
.text(key)
|
||||||
|
.select(["text", "extension", "time_stamp"])
|
||||||
|
.limit(k)
|
||||||
|
.to_list())
|
||||||
|
# print(table.search(query_type="fts")
|
||||||
|
# .vector([])
|
||||||
|
# .text(key)
|
||||||
|
# .select(["text", "extension", "time_stamp"])
|
||||||
|
# .limit(k)
|
||||||
|
# .to_list())
|
||||||
|
# Rerank results (simple time-weighted sort)
|
||||||
|
return sorted(combined,
|
||||||
|
key=lambda x: -x['time_stamp'])
|
||||||
|
|
||||||
|
|
||||||
|
rag_pipline = RAGPipeline()
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# 直接插入文本内容的示例
|
||||||
|
rag_pipline.insert_text_content("这是一个要嵌入的示例文本。")
|
||||||
|
result = rag_pipline.query("示例文本是什么?")
|
||||||
|
print(result)
|
208
extension/standard.py
Normal file
208
extension/standard.py
Normal file
@@ -0,0 +1,208 @@
|
|||||||
|
import base64
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
from pysmx.SM3 import hexdigest
|
||||||
|
|
||||||
|
from core.config import conf
|
||||||
|
from core.types import BaseCRUD
|
||||||
|
from core.model import run_llm_by_message
|
||||||
|
from core.types import BasePrompt
|
||||||
|
|
||||||
|
|
||||||
|
class LocalResourceManager:
|
||||||
|
|
||||||
|
def __init__(self, base_dir: str = "/docs"):
|
||||||
|
self.BASE_DIR = os.getcwd() + base_dir
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def generate_sm3(file_bytes: bytes):
|
||||||
|
return hexdigest(file_bytes)
|
||||||
|
|
||||||
|
def exists(self, resource_type: str, resource_name: str) -> bool:
|
||||||
|
path = "\\".join([i for i in [self.BASE_DIR, resource_type, resource_name] if i != ""])
|
||||||
|
return os.path.exists(path)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _write(path: str, file: bytes):
|
||||||
|
with open(path, "wb") as f:
|
||||||
|
f.write(file)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _read(path: str) -> bytes:
|
||||||
|
with open(path, "rb") as f:
|
||||||
|
return f.read()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _remove(path) -> bool:
|
||||||
|
try:
|
||||||
|
os.remove(path)
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def create(self, resource_type: str, resource_name: str, file: bytes) -> bool:
|
||||||
|
exists = self.exists(resource_type, resource_name)
|
||||||
|
if exists:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
file_path = "/".join([i for i in [self.BASE_DIR, resource_type, resource_name] if i != ""])
|
||||||
|
try:
|
||||||
|
self._write(file_path, file)
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception(f"Create file failed: {file_path}")
|
||||||
|
|
||||||
|
def update(self, resource_type: str, resource_name: str, file: bytes) -> bool:
|
||||||
|
exists = self.exists(resource_type, resource_name)
|
||||||
|
if not exists:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
file_path = "/".join([i for i in [self.BASE_DIR, resource_type, resource_name] if i != ""])
|
||||||
|
try:
|
||||||
|
self._write(file_path, file)
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception(f"Update file failed: {file_path}")
|
||||||
|
|
||||||
|
def get(self, resource_type: str, resource_name: str) -> bytes:
|
||||||
|
file_path = "\\".join([i for i in [self.BASE_DIR, resource_type, resource_name] if i != ""])
|
||||||
|
exists = self.exists(resource_type, resource_name)
|
||||||
|
if exists:
|
||||||
|
return self._read(file_path)
|
||||||
|
else:
|
||||||
|
raise FileNotFoundError(f"File is not exists: {file_path}")
|
||||||
|
|
||||||
|
def delete(self, resource_type: str, resource_name: str) -> bool:
|
||||||
|
file_path = "/".join([i for i in [self.BASE_DIR, resource_type, resource_name] if i != ""])
|
||||||
|
exists = self.exists(resource_type, resource_name)
|
||||||
|
if not exists:
|
||||||
|
return self._remove(file_path)
|
||||||
|
else:
|
||||||
|
raise FileNotFoundError(f"File is not exists: {file_path}")
|
||||||
|
|
||||||
|
def download_bytes(self, path: str, file_type: str = ""):
|
||||||
|
return self.get(file_type, path)
|
||||||
|
|
||||||
|
|
||||||
|
class ChatFileManager(LocalResourceManager):
|
||||||
|
def __init__(self, base_dir: str = "/docs"):
|
||||||
|
super().__init__(base_dir)
|
||||||
|
self.type = "files"
|
||||||
|
self.extension_mapping = json.loads(self.get("json", "file_extension.json"))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def encode_postfix(content: str) -> str:
|
||||||
|
return base64.b64encode(content.encode("utf-8")).decode("utf-8")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def decode_postfix(content: str) -> str:
|
||||||
|
return base64.b64decode(content).decode("utf-8")
|
||||||
|
|
||||||
|
def parse_file_id(self, file_id):
|
||||||
|
postfix = ""
|
||||||
|
if "_" in file_id:
|
||||||
|
file_id, postfix = file_id.split("_")
|
||||||
|
postfix = "." + self.decode_postfix(postfix)
|
||||||
|
return file_id, postfix
|
||||||
|
|
||||||
|
def _generate_file_id(self, file_name: str, file_content: bytes):
|
||||||
|
postfix = ""
|
||||||
|
if "." in file_name:
|
||||||
|
postfix = file_name.split(".")[-1]
|
||||||
|
file_id = resource_manager.generate_sm3(file_content)
|
||||||
|
return file_id, self.encode_postfix(postfix)
|
||||||
|
|
||||||
|
def c_get(self, file_id: str) -> (bytes, str):
|
||||||
|
file_id, postfix = self.parse_file_id(file_id)
|
||||||
|
return self.get(self.type, file_id), postfix
|
||||||
|
|
||||||
|
def c_create(self, file_name: str, file_content: bytes) -> str:
|
||||||
|
file_id, postfix = self._generate_file_id(file_name, file_content)
|
||||||
|
if self.exists(self.type, file_id):
|
||||||
|
return file_id + "_" + postfix
|
||||||
|
if self.create(self.type, file_id, file_content):
|
||||||
|
return file_id + "_" + postfix
|
||||||
|
else:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseManager:
|
||||||
|
def __init__(self, db_path):
|
||||||
|
self.db_path = db_path
|
||||||
|
self.prompts = BaseCRUD('prompts', db_path)
|
||||||
|
self.schemas = BaseCRUD('schemas', db_path)
|
||||||
|
|
||||||
|
def init_db(self):
|
||||||
|
if not os.path.exists(self.db_path):
|
||||||
|
with self.prompts.get_connection() as conn:
|
||||||
|
conn.execute('''
|
||||||
|
CREATE TABLE prompts (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name TEXT UNIQUE NOT NULL,
|
||||||
|
content TEXT NOT NULL,
|
||||||
|
variables TEXT, -- 存储变量信息
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
''')
|
||||||
|
conn.execute('''
|
||||||
|
CREATE TABLE schemas (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name TEXT UNIQUE NOT NULL,
|
||||||
|
content TEXT NOT NULL,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
''')
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
class Prompt(BasePrompt):
|
||||||
|
def _load_prompt(self):
|
||||||
|
return resource_manager.get("prompt", self.name)
|
||||||
|
|
||||||
|
def _load_schema(self):
|
||||||
|
return resource_manager.get("schema", self.name)
|
||||||
|
|
||||||
|
|
||||||
|
class OnlinePrompt(BasePrompt):
|
||||||
|
def _load_prompt(self):
|
||||||
|
result = db_manager.prompts.get_by_name(self.name)
|
||||||
|
if result is None:
|
||||||
|
return None
|
||||||
|
result = dict(result)
|
||||||
|
return result.get("content")
|
||||||
|
|
||||||
|
def _load_schema(self):
|
||||||
|
result = db_manager.schemas.get_by_name(self.name)
|
||||||
|
if result is None:
|
||||||
|
return None
|
||||||
|
result = dict(result)
|
||||||
|
return result.get("content")
|
||||||
|
|
||||||
|
|
||||||
|
db_manager = DatabaseManager(conf.db_uri)
|
||||||
|
db_manager.init_db()
|
||||||
|
|
||||||
|
resource_manager = LocalResourceManager()
|
||||||
|
chat_file_manager = ChatFileManager()
|
||||||
|
|
||||||
|
|
||||||
|
def run_llm_by_template(message: str, template_name: str):
|
||||||
|
pmt = OnlinePrompt(template_name)
|
||||||
|
prompt = pmt.generate(message)
|
||||||
|
return run_llm_by_message(prompt)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_json_string(text: str):
|
||||||
|
if "</think>" in text:
|
||||||
|
text = text.split("</think>")[-1]
|
||||||
|
if "```" in text:
|
||||||
|
text = re.findall("```(.*?){0,1}\n(.*?)\n```", text, re.S)[0][1]
|
||||||
|
else:
|
||||||
|
if "'" in text and '"' not in text:
|
||||||
|
text = text.replace("'", "\"")
|
||||||
|
try:
|
||||||
|
return json.loads(text)
|
||||||
|
except Exception:
|
||||||
|
return {}
|
37
function/context.py
Normal file
37
function/context.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
# @Filename: context.py
|
||||||
|
# @Author: lychang
|
||||||
|
# @Time: 7/5/2023 5:53 PM
|
||||||
|
from sympy.parsing.maxima import sub_dict
|
||||||
|
|
||||||
|
from extension.rag import rag_pipline
|
||||||
|
|
||||||
|
from core.types import BaseTool
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class RAGSearch(BaseTool):
|
||||||
|
def __init__(self):
|
||||||
|
name = "rag_search"
|
||||||
|
description = "提及在文件中搜索时,使用此工具。"
|
||||||
|
super(RAGSearch, self).__init__(name, description)
|
||||||
|
self.execute = self.search
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_sub_task(message: str):
|
||||||
|
return message.split("sub_task:")[-1] if "sub_task:" in message else message
|
||||||
|
|
||||||
|
def search(self, message: str):
|
||||||
|
sub_task = self._get_sub_task(message)
|
||||||
|
result = "不包含相关内容"
|
||||||
|
if self._file:
|
||||||
|
response = rag_pipline.query(sub_task,10,self._file)
|
||||||
|
results = set([r["text"] for r in response])
|
||||||
|
result = "\n\n##\n".join(results)
|
||||||
|
return self.normal(result)
|
||||||
|
|
||||||
|
|
||||||
|
rag_search = RAGSearch()
|
0
function/file_persist.py
Normal file
0
function/file_persist.py
Normal file
83
function/weather.py
Normal file
83
function/weather.py
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
# @Filename: weather.py
|
||||||
|
# @Author: lychang
|
||||||
|
# @Time: 7/5/2023 5:53 PM
|
||||||
|
import re
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
from fuzzywuzzy import process
|
||||||
|
|
||||||
|
from core.types import BaseTool
|
||||||
|
from extension.standard import resource_manager
|
||||||
|
|
||||||
|
|
||||||
|
class CityMatcher:
|
||||||
|
def __init__(self):
|
||||||
|
self.data = json.loads(resource_manager.get("json", "city.json").decode('utf-8'))
|
||||||
|
self.area_map = self._build_area_map()
|
||||||
|
|
||||||
|
def _build_area_map(self):
|
||||||
|
area_map = {}
|
||||||
|
for province, cities in self.data.items():
|
||||||
|
for city, districts in cities.items():
|
||||||
|
for district, info in districts.items():
|
||||||
|
area_map[info['NAMECN']] = info['AREAID']
|
||||||
|
return area_map
|
||||||
|
|
||||||
|
def find_area_id(self, area_name, threshold=80):
|
||||||
|
match = process.extract(area_name, self.area_map.keys(), limit=5)
|
||||||
|
return [self.area_map[i[0]] for i in match if i[1] >= threshold]
|
||||||
|
|
||||||
|
|
||||||
|
matcher = CityMatcher()
|
||||||
|
|
||||||
|
|
||||||
|
class WeatherSearch(BaseTool):
|
||||||
|
headers = {
|
||||||
|
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
|
||||||
|
"(KHTML, like Gecko) Chrome/115.0.0.0 Safari/537.36 Edg/115.0.0.0"}
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
name = "weather_search"
|
||||||
|
description = "在提及天气时,对天气数据进行查询"
|
||||||
|
super(WeatherSearch, self).__init__(name, description)
|
||||||
|
self.execute = self.search
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_city_code(message: str):
|
||||||
|
area_id = matcher.find_area_id(message)
|
||||||
|
if area_id:
|
||||||
|
return area_id
|
||||||
|
else:
|
||||||
|
return ["101030100"]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_sub_task(message: str):
|
||||||
|
return message.split("sub_task:")[-1] if "sub_task:" in message else message
|
||||||
|
|
||||||
|
def _get_city_info(self, city_code: str):
|
||||||
|
url = f"http://www.weather.com.cn/weather/{city_code}.shtml"
|
||||||
|
resp = requests.get(url, headers=self.headers)
|
||||||
|
resp.encoding = "utf-8"
|
||||||
|
weather_html = re.findall('<ul class="t clearfix">(.*?)</ul>', resp.text, re.S)[0].replace("\n", "")
|
||||||
|
weather_info = re.findall(
|
||||||
|
'<h1>(.*?)</h1>.*?<p.*?>(.*?)</p><p class="tem">(.*?)</p><p '
|
||||||
|
'class="win">',
|
||||||
|
weather_html)
|
||||||
|
result = city_code + "\n"
|
||||||
|
for wea in weather_info:
|
||||||
|
result += " ".join(wea) + "\n"
|
||||||
|
result = result.replace("</span>", "<span>").replace("<span>", "")
|
||||||
|
result = result.replace("</i>", "<i>").replace("<i>", "")
|
||||||
|
return result
|
||||||
|
|
||||||
|
def search(self, message: str):
|
||||||
|
city_info = ""
|
||||||
|
sub_task = self._get_sub_task(message)
|
||||||
|
for city_code in self._get_city_code(sub_task):
|
||||||
|
city_info += self._get_city_info(city_code)
|
||||||
|
return self.normal(city_info)
|
||||||
|
|
||||||
|
|
||||||
|
weather_search = WeatherSearch()
|
139
function/web_tool.py
Normal file
139
function/web_tool.py
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
import re
|
||||||
|
from urllib.parse import quote
|
||||||
|
|
||||||
|
import html2text
|
||||||
|
import requests
|
||||||
|
from urlextract import URLExtract
|
||||||
|
|
||||||
|
from core.types import BaseTool
|
||||||
|
|
||||||
|
|
||||||
|
class SplashDriver:
|
||||||
|
def __init__(self, base_url: str):
|
||||||
|
self.base_url = base_url
|
||||||
|
self.headers = None
|
||||||
|
|
||||||
|
def set_options(self, headers: dict = None):
|
||||||
|
"""Set options for splash"""
|
||||||
|
|
||||||
|
headers_lua = """
|
||||||
|
local headers = {
|
||||||
|
["User-Agent"] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36 Edg/134.0.0.0",
|
||||||
|
["Accept-Language"] = "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
|
||||||
|
["Connection"] = "keep-alive",
|
||||||
|
["Cache-Control"] = "max-age=0",
|
||||||
|
["Upgrade-Insecure-Requests"] = "1",
|
||||||
|
["Accept"] = "text/html,application/xhtml+xml,application/xml,*/*;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
|
||||||
|
{{extra_headers}}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
if headers:
|
||||||
|
headers_lua = headers_lua.replace("{{extra_headers}}",
|
||||||
|
'\n'.join([f'["{k}"] = "{v}"' for k, v in headers.items()]))
|
||||||
|
else:
|
||||||
|
headers_lua = headers_lua.replace("{{extra_headers}}", "")
|
||||||
|
self.headers = headers_lua
|
||||||
|
|
||||||
|
def _set_lua_script(self, url: str):
|
||||||
|
"""Set lua script for splash"""
|
||||||
|
if not self.headers:
|
||||||
|
self.set_options()
|
||||||
|
lua = f'''
|
||||||
|
function main(splash, args)
|
||||||
|
splash.images_enabled = false
|
||||||
|
splash.private_mode_enabled = true
|
||||||
|
splash.resource_timeout = 10.0
|
||||||
|
local url = "{url}"
|
||||||
|
{self.headers}
|
||||||
|
''' + '''
|
||||||
|
-- 发起请求
|
||||||
|
local ok, reason = splash:go({url, headers = headers })
|
||||||
|
if not ok then
|
||||||
|
return { error = reason }
|
||||||
|
end
|
||||||
|
|
||||||
|
-- 返回结果
|
||||||
|
return {
|
||||||
|
html = splash:html(),
|
||||||
|
url = splash:url(),
|
||||||
|
}
|
||||||
|
end
|
||||||
|
'''
|
||||||
|
return lua
|
||||||
|
|
||||||
|
def get(self, url: str):
|
||||||
|
"""Get url with splash"""
|
||||||
|
lua = self._set_lua_script(url)
|
||||||
|
url = f'{self.base_url}/execute?lua_source=' + quote(lua)
|
||||||
|
|
||||||
|
response = requests.get(url)
|
||||||
|
|
||||||
|
return response.json()["html"]
|
||||||
|
|
||||||
|
|
||||||
|
class WebScraper(BaseTool):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
name = "web_scraper"
|
||||||
|
description = "在提到某些具体的网站地址时,可以从该网站上获取相关信息。"
|
||||||
|
super(WebScraper, self).__init__(name, description)
|
||||||
|
self.extractor = URLExtract()
|
||||||
|
self.driver = SplashDriver(base_url="http://192.168.1.100:8050")
|
||||||
|
|
||||||
|
self.execute = self.search
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _parse_html(html: str) -> str:
|
||||||
|
"""解析HTML"""
|
||||||
|
html = re.sub("<style.*?>.*?</style>", "", html, flags=re.S)
|
||||||
|
html = re.sub("<script.*?>.*?</script>", "", html, flags=re.S)
|
||||||
|
html = re.sub("<textarea.*?>.*?</textarea>", "", html, flags=re.S)
|
||||||
|
html = re.sub("<link.*?/>", "", html, flags=re.S)
|
||||||
|
h = html2text.HTML2Text()
|
||||||
|
h.ignore_links = False
|
||||||
|
h.ignore_images = True
|
||||||
|
h.ignore_tables = False
|
||||||
|
h.ignore_emphasis = True
|
||||||
|
h.ignore_headers = True
|
||||||
|
h.ignore_br = True
|
||||||
|
h.body_width = 0
|
||||||
|
text = h.handle(html)
|
||||||
|
text = re.sub("\[]\(.*?\)", "", text, flags=re.S)
|
||||||
|
return text
|
||||||
|
|
||||||
|
def set_headers(self, headers: dict):
|
||||||
|
self.driver.set_options(headers)
|
||||||
|
|
||||||
|
def split_urls(self, message: str):
|
||||||
|
"""Get urls from message"""
|
||||||
|
urls = self.extractor.find_urls(message)
|
||||||
|
return list(set(urls))
|
||||||
|
|
||||||
|
def get_uri_resource(self, url):
|
||||||
|
"""Fetch and process URI resource"""
|
||||||
|
try:
|
||||||
|
html = self.driver.get(url)
|
||||||
|
protocol = url.split("://")[0]
|
||||||
|
markdown = self._parse_html(html)
|
||||||
|
base_url = url[:-1] if url.endswith("/") else url
|
||||||
|
markdown = markdown.replace("(//", f"({protocol}://")
|
||||||
|
markdown = markdown.replace("(/", f"({base_url}/")
|
||||||
|
return markdown
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error fetching {url}: {e}")
|
||||||
|
return f"获取网页信息失败\n"
|
||||||
|
|
||||||
|
def search(self, message: str):
|
||||||
|
urls = self.extractor.find_urls(message)
|
||||||
|
uri_resource = "网页信息:\n"
|
||||||
|
if urls:
|
||||||
|
for url in urls:
|
||||||
|
uri_resource += self.get_uri_resource(url)
|
||||||
|
return self.normal(uri_resource)
|
||||||
|
|
||||||
|
|
||||||
|
web_scraper = WebScraper()
|
||||||
|
if __name__ == '__main__':
|
||||||
|
question = "https://cn.bing.com/search?q=%E5%8B%92%E5%B8%83%E6%9C%97%E8%A9%B9%E5%A7%86%E6%96%AF%E6%9C%80%E8%BF%91%E7%9A%84%E6%88%98%E7%BB%A9"
|
||||||
|
result = web_scraper.search(question)
|
||||||
|
print(result["data"])
|
4
http_test.py
Normal file
4
http_test.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
import uvicorn
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
uvicorn.run("api:app", host="0.0.0.0", port=80)
|
27
readme.md
Normal file
27
readme.md
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
# chatbot
|
||||||
|
|
||||||
|
## api
|
||||||
|
- base `api config`
|
||||||
|
- chat `chat interface`
|
||||||
|
- mcp `model control protocol interface`
|
||||||
|
- ui `user interface design`
|
||||||
|
- prompt `prompt interface`
|
||||||
|
- schema `schema interface`
|
||||||
|
|
||||||
|
## core
|
||||||
|
- config `config parameters`
|
||||||
|
- model `model parameters`
|
||||||
|
- types `types definition`
|
||||||
|
- role `role definition`
|
||||||
|
- mcp `model control protocol`
|
||||||
|
## ui
|
||||||
|
- index.html `index page`
|
||||||
|
- mcp.html `mcp page`
|
||||||
|
- schema_manager.html `schema manager page`
|
||||||
|
- static `static files`
|
||||||
|
## function
|
||||||
|
- mcp `model control protocol`
|
||||||
|
- caht `chat`
|
||||||
|
- prompt `prompt`
|
||||||
|
- schema `schema`
|
||||||
|
- utils `utils functions`
|
32
requirements.txt
Normal file
32
requirements.txt
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
langchain
|
||||||
|
langchain-ollama
|
||||||
|
langchain-community
|
||||||
|
langchain-openai
|
||||||
|
langchain-core
|
||||||
|
langchain-text-splitters
|
||||||
|
openai
|
||||||
|
requests
|
||||||
|
lancedb
|
||||||
|
pydantic
|
||||||
|
pypdf
|
||||||
|
jieba
|
||||||
|
tantivy
|
||||||
|
minio
|
||||||
|
|
||||||
|
################
|
||||||
|
docx2txt
|
||||||
|
snowland-smx
|
||||||
|
PyJWT
|
||||||
|
fastapi
|
||||||
|
uvicorn
|
||||||
|
cn2an
|
||||||
|
pandas
|
||||||
|
|
||||||
|
python-Levenshtein
|
||||||
|
python-multipart
|
||||||
|
starlette
|
||||||
|
python-dotenv
|
||||||
|
docling
|
||||||
|
fuzzywuzzy
|
||||||
|
html2text
|
||||||
|
urlextract
|
13
structure.md
Normal file
13
structure.md
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# chat-bot structure
|
||||||
|
|
||||||
|
## agent
|
||||||
|
|
||||||
|
## api
|
||||||
|
|
||||||
|
## db(database)
|
||||||
|
|
||||||
|
## docs
|
||||||
|
|
||||||
|
## function
|
||||||
|
|
||||||
|
## ui
|
BIN
test/1.pdf
Normal file
BIN
test/1.pdf
Normal file
Binary file not shown.
4491
test/a.html
Normal file
4491
test/a.html
Normal file
File diff suppressed because one or more lines are too long
247
test/a.py
Normal file
247
test/a.py
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
import re
|
||||||
|
import requests
|
||||||
|
from multiprocessing import Pool
|
||||||
|
|
||||||
|
|
||||||
|
def try_url(url_address):
|
||||||
|
try:
|
||||||
|
response = requests.head(url_address) # 使用生成的url
|
||||||
|
if response.headers.get('Content-Type') == 'image/jpeg':
|
||||||
|
print(f"成功获取 {url_address}")
|
||||||
|
return url_address
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
print(f"请求 {url_address} 时发生错误: {e}")
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
def find_url_end(url_address):
|
||||||
|
end = 50
|
||||||
|
_, _, _, db, table, start = url_address.split('/')
|
||||||
|
url = f"https://i.tuiimg.net/{db}/{table}/{end}.jpg"
|
||||||
|
first_try = 1 if try_url(url) else -1
|
||||||
|
flag = True
|
||||||
|
while flag:
|
||||||
|
end += first_try
|
||||||
|
url = f"https://i.tuiimg.net/{db}/{table}/{end}.jpg"
|
||||||
|
this_try = 1 if try_url(url) else -1
|
||||||
|
if (this_try + first_try) == 0:
|
||||||
|
flag = False
|
||||||
|
return db, table, start.split('.')[0], end
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
# urls = []
|
||||||
|
# for i in range(8):
|
||||||
|
# for j in range(1, 501):
|
||||||
|
# it = i * 500 + j
|
||||||
|
# db = f"{(i + 1):03}" # 使用字符串格式化来简化代码
|
||||||
|
# table = f"{it:04}" # 同上
|
||||||
|
# url = f"https://i.tuiimg.net/{db}/{table}/1.jpg"
|
||||||
|
# urls.append(url)
|
||||||
|
#
|
||||||
|
|
||||||
|
x = [('007', '3446', '1', 41), ('007', '3076', '1', 1), ('004', '1975', '1', 1), ('006', '2764', '1', 1),
|
||||||
|
('007', '3231', '1', 41), ('007', '3187', '1', 35), ('006', '2513', '1', 6), ('007', '3241', '1', 35),
|
||||||
|
('006', '2576', '1', 1), ('007', '3369', '1', 38), ('007', '3416', '1', 36), ('007', '3456', '1', 15),
|
||||||
|
('007', '3120', '1', 4), ('007', '3171', '1', 15), ('007', '3447', '1', 21), ('006', '2980', '1', 1),
|
||||||
|
('006', '2610', '1', 1), ('006', '2923', '1', 1), ('007', '3334', '1', 38), ('007', '3047', '1', 1),
|
||||||
|
('006', '2921', '1', 1), ('007', '3271', '1', 41), ('006', '2864', '1', 1), ('007', '3170', '1', 22),
|
||||||
|
('003', '1282', '1', 1), ('005', '2324', '1', 2), ('006', '2766', '1', 1), ('006', '2985', '1', 1),
|
||||||
|
('007', '3494', '1', 44), ('006', '2749', '1', 1), ('007', '3393', '1', 42), ('006', '2792', '1', 1),
|
||||||
|
('007', '3250', '1', 51), ('007', '3433', '1', 33), ('007', '3078', '1', 1), ('007', '3408', '1', 46),
|
||||||
|
('007', '3130', '1', 9), ('006', '2742', '1', 9), ('007', '3443', '1', 43), ('004', '1791', '1', 1),
|
||||||
|
('006', '2845', '1', 1), ('005', '2341', '1', 4), ('007', '3432', '1', 48), ('008', '3514', '1', 27),
|
||||||
|
('003', '1272', '1', 1), ('005', '2239', '1', 1), ('007', '3259', '1', 44), ('006', '2747', '1', 7),
|
||||||
|
('007', '3053', '1', 1), ('007', '3459', '1', 30), ('007', '3161', '1', 18), ('007', '3206', '1', 47),
|
||||||
|
('007', '3214', '1', 39), ('004', '1520', '1', 1), ('007', '3097', '1', 1), ('007', '3221', '1', 42),
|
||||||
|
('007', '3325', '1', 47), ('004', '1732', '1', 4), ('004', '1941', '1', 1), ('007', '3145', '1', 15),
|
||||||
|
('006', '2658', '1', 1), ('007', '3335', '1', 44), ('007', '3406', '1', 36), ('007', '3101', '1', 1),
|
||||||
|
('006', '2794', '1', 1), ('007', '3151', '1', 20), ('007', '3418', '1', 41), ('007', '3463', '1', 20),
|
||||||
|
('007', '3485', '1', 31), ('006', '2933', '1', 1), ('007', '3184', '1', 24), ('006', '2564', '1', 1),
|
||||||
|
('007', '3125', '1', 5), ('007', '3402', '1', 27), ('007', '3209', '1', 51), ('007', '3083', '1', 1),
|
||||||
|
('007', '3476', '1', 30), ('005', '2368', '1', 1), ('007', '3058', '1', 1), ('007', '3167', '1', 20),
|
||||||
|
('007', '3424', '1', 30), ('007', '3046', '1', 1), ('006', '2834', '1', 1), ('006', '2784', '1', 1),
|
||||||
|
('006', '2667', '1', 1), ('006', '2519', '1', 6), ('007', '3479', '1', 22), ('007', '3499', '1', 55),
|
||||||
|
('007', '3026', '1', 8), ('007', '3034', '1', 6), ('007', '3390', '1', 42), ('007', '3382', '1', 53),
|
||||||
|
('007', '3194', '1', 41), ('006', '2943', '1', 1), ('006', '2638', '1', 1), ('008', '3513', '1', 45),
|
||||||
|
('007', '3141', '1', 11), ('004', '1630', '1', 2), ('007', '3383', '1', 51), ('007', '3210', '1', 44),
|
||||||
|
('005', '2412', '1', 1), ('005', '2165', '1', 1), ('004', '1636', '1', 4), ('006', '2684', '1', 1),
|
||||||
|
('006', '2780', '1', 1), ('006', '2914', '1', 1), ('007', '3379', '1', 39), ('003', '1161', '1', 1),
|
||||||
|
('007', '3378', '1', 43), ('005', '2476', '1', 1), ('006', '2657', '1', 1), ('007', '3277', '1', 36),
|
||||||
|
('007', '3150', '1', 15), ('007', '3331', '1', 39), ('006', '2844', '1', 1), ('007', '3222', '1', 45),
|
||||||
|
('007', '3478', '1', 38), ('007', '3191', '1', 39), ('005', '2365', '1', 3), ('007', '3066', '1', 1),
|
||||||
|
('007', '3264', '1', 45), ('007', '3055', '1', 1), ('007', '3285', '1', 43), ('007', '3050', '1', 1),
|
||||||
|
('007', '3471', '1', 44), ('006', '2920', '1', 1), ('007', '3260', '1', 45), ('006', '2963', '1', 1),
|
||||||
|
('007', '3352', '1', 39), ('007', '3032', '1', 5), ('007', '3468', '1', 23), ('007', '3403', '1', 48),
|
||||||
|
('007', '3243', '1', 39), ('007', '3483', '1', 28), ('005', '2455', '1', 1), ('007', '3138', '1', 13),
|
||||||
|
('007', '3339', '1', 39), ('007', '3018', '1', 1), ('007', '3126', '1', 7), ('007', '3258', '1', 49),
|
||||||
|
('007', '3280', '1', 29), ('007', '3401', '1', 33), ('007', '3491', '1', 32), ('003', '1086', '1', 1),
|
||||||
|
('007', '3128', '1', 9), ('007', '3088', '1', 1), ('007', '3102', '1', 1), ('007', '3351', '1', 41),
|
||||||
|
('007', '3080', '1', 1), ('007', '3237', '1', 41), ('006', '2717', '1', 1), ('007', '3196', '1', 41),
|
||||||
|
('006', '2869', '1', 1), ('004', '1871', '1', 1), ('007', '3027', '1', 1), ('007', '3411', '1', 40),
|
||||||
|
('007', '3440', '1', 37), ('007', '3131', '1', 11), ('006', '2863', '1', 1), ('006', '2946', '1', 1),
|
||||||
|
('004', '1756', '1', 1), ('007', '3136', '1', 14), ('004', '1574', '1', 8), ('007', '3168', '1', 20),
|
||||||
|
('007', '3395', '1', 44), ('007', '3090', '1', 1), ('007', '3367', '1', 42), ('007', '3014', '1', 1),
|
||||||
|
('007', '3118', '1', 11), ('007', '3200', '1', 43), ('007', '3024', '1', 1), ('007', '3336', '1', 35),
|
||||||
|
('006', '2824', '1', 1), ('007', '3410', '1', 29), ('007', '3435', '1', 24), ('007', '3012', '1', 12),
|
||||||
|
('007', '3291', '1', 35), ('006', '2651', '1', 1), ('007', '3436', '1', 58), ('006', '2714', '1', 1),
|
||||||
|
('006', '2790', '1', 1), ('007', '3279', '1', 39), ('005', '2362', '1', 4), ('007', '3069', '1', 1),
|
||||||
|
('007', '3300', '1', 51), ('008', '3512', '1', 36), ('005', '2356', '1', 1), ('006', '2581', '1', 1),
|
||||||
|
('007', '3391', '1', 40), ('007', '3052', '1', 1), ('007', '3165', '1', 18), ('006', '2740', '1', 9),
|
||||||
|
('006', '2798', '1', 1), ('007', '3375', '1', 40), ('007', '3158', '1', 16), ('006', '2935', '1', 1),
|
||||||
|
('006', '2689', '1', 1), ('005', '2424', '1', 1), ('007', '3085', '1', 1), ('007', '3318', '1', 45),
|
||||||
|
('007', '3430', '1', 35), ('007', '3109', '1', 1), ('007', '3450', '1', 30), ('004', '1565', '1', 1),
|
||||||
|
('007', '3354', '1', 43), ('007', '3488', '1', 33), ('006', '2512', '1', 1), ('007', '3333', '1', 39),
|
||||||
|
('007', '3174', '1', 23), ('006', '2970', '1', 1), ('007', '3452', '1', 30), ('006', '2637', '1', 1),
|
||||||
|
('006', '2841', '1', 1), ('006', '2915', '1', 1), ('007', '3016', '1', 1), ('007', '3091', '1', 1),
|
||||||
|
('007', '3328', '1', 44), ('007', '3147', '1', 15), ('007', '3301', '1', 45), ('007', '3162', '1', 19),
|
||||||
|
('007', '3068', '1', 1), ('003', '1240', '1', 1), ('004', '1953', '1', 1), ('007', '3114', '1', 1),
|
||||||
|
('007', '3124', '1', 7), ('007', '3282', '1', 41), ('006', '2823', '1', 14), ('007', '3195', '1', 39),
|
||||||
|
('003', '1035', '1', 1), ('007', '3361', '1', 45), ('007', '3293', '1', 42), ('004', '1536', '1', 3),
|
||||||
|
('007', '3313', '1', 46), ('007', '3453', '1', 42), ('007', '3481', '1', 49), ('007', '3442', '1', 30),
|
||||||
|
('007', '3057', '1', 1), ('007', '3340', '1', 36), ('007', '3105', '1', 1), ('006', '2875', '1', 1),
|
||||||
|
('007', '3079', '1', 1), ('007', '3358', '1', 44), ('006', '2785', '1', 1), ('006', '2902', '1', 1),
|
||||||
|
('006', '2991', '1', 1), ('006', '2907', '1', 1), ('006', '2857', '1', 1), ('007', '3213', '1', 39),
|
||||||
|
('007', '3245', '1', 39), ('007', '3357', '1', 42), ('006', '2672', '1', 1), ('007', '3148', '1', 10),
|
||||||
|
('007', '3486', '1', 32), ('006', '2752', '1', 1), ('006', '2774', '1', 1), ('006', '2958', '1', 1),
|
||||||
|
('007', '3226', '1', 42), ('007', '3374', '1', 43), ('005', '2296', '1', 7), ('005', '2437', '1', 1),
|
||||||
|
('007', '3087', '1', 1), ('007', '3341', '1', 39), ('005', '2475', '1', 1), ('007', '3013', '1', 1),
|
||||||
|
('007', '3084', '1', 1), ('007', '3460', '1', 27), ('007', '3225', '1', 39), ('007', '3065', '1', 5),
|
||||||
|
('007', '3135', '1', 13), ('007', '3190', '1', 27), ('008', '3516', '1', 33), ('007', '3425', '1', 32),
|
||||||
|
('007', '3475', '1', 45), ('007', '3474', '1', 24), ('006', '2858', '1', 1), ('004', '1900', '1', 1),
|
||||||
|
('007', '3123', '1', 4), ('005', '2121', '1', 1), ('007', '3344', '1', 40), ('007', '3428', '1', 35),
|
||||||
|
('006', '2691', '1', 1), ('006', '2702', '1', 1), ('007', '3297', '1', 43), ('006', '2791', '1', 1),
|
||||||
|
('007', '3247', '1', 49), ('007', '3113', '1', 1), ('006', '2679', '1', 1), ('007', '3033', '1', 17),
|
||||||
|
('007', '3077', '1', 1), ('006', '2760', '1', 1), ('007', '3234', '1', 44), ('007', '3160', '1', 20),
|
||||||
|
('006', '2817', '1', 1), ('007', '3322', '1', 46), ('007', '3464', '1', 26), ('007', '3239', '1', 41),
|
||||||
|
('006', '2854', '1', 1), ('006', '2734', '1', 1), ('007', '3265', '1', 51), ('007', '3489', '1', 29),
|
||||||
|
('007', '3480', '1', 30), ('007', '3415', '1', 29), ('007', '3299', '1', 46), ('006', '2886', '1', 4),
|
||||||
|
('007', '3204', '1', 53), ('006', '2642', '1', 1), ('007', '3388', '1', 36), ('006', '2663', '1', 1),
|
||||||
|
('006', '2815', '1', 1), ('006', '2715', '1', 1), ('006', '2856', '1', 1), ('007', '3177', '1', 26),
|
||||||
|
('007', '3342', '1', 39), ('005', '2034', '1', 10), ('007', '3154', '1', 14), ('006', '2964', '1', 1),
|
||||||
|
('006', '2853', '1', 1), ('007', '3327', '1', 38), ('006', '2913', '1', 7), ('006', '2762', '1', 1),
|
||||||
|
('007', '3248', '1', 31), ('007', '3405', '1', 40), ('006', '2832', '1', 1), ('007', '3372', '1', 44),
|
||||||
|
('006', '2934', '1', 1), ('007', '3030', '1', 1), ('007', '3186', '1', 30), ('007', '3208', '1', 32),
|
||||||
|
('006', '2705', '1', 1), ('007', '3255', '1', 40), ('005', '2177', '1', 1), ('003', '1342', '1', 3),
|
||||||
|
('006', '2669', '1', 1), ('006', '2685', '1', 1), ('006', '2729', '1', 1), ('007', '3044', '1', 5),
|
||||||
|
('005', '2294', '1', 6), ('007', '3037', '1', 1), ('007', '3266', '1', 49), ('007', '3278', '1', 40),
|
||||||
|
('007', '3311', '1', 51), ('007', '3253', '1', 55), ('007', '3107', '1', 1), ('007', '3202', '1', 42),
|
||||||
|
('007', '3407', '1', 53), ('007', '3272', '1', 45), ('004', '1947', '1', 1), ('005', '2281', '1', 6),
|
||||||
|
('006', '2931', '1', 1), ('007', '3347', '1', 44), ('007', '3178', '1', 27), ('008', '3509', '1', 21),
|
||||||
|
('007', '3377', '1', 38), ('007', '3059', '1', 1), ('007', '3438', '1', 28), ('006', '2969', '1', 1),
|
||||||
|
('007', '3146', '1', 14), ('007', '3249', '1', 47), ('007', '3455', '1', 34), ('006', '2905', '1', 1),
|
||||||
|
('007', '3309', '1', 44), ('007', '3121', '1', 3), ('007', '3244', '1', 46), ('007', '3427', '1', 35),
|
||||||
|
('006', '2619', '1', 1), ('006', '2928', '1', 1), ('007', '3337', '1', 42), ('007', '3317', '1', 45),
|
||||||
|
('007', '3039', '1', 4), ('007', '3404', '1', 49), ('006', '2903', '1', 1), ('006', '2656', '1', 1),
|
||||||
|
('007', '3070', '1', 1), ('006', '2768', '1', 9), ('007', '3197', '1', 35), ('008', '3503', '1', 34),
|
||||||
|
('007', '3320', '1', 46), ('006', '2972', '1', 1), ('007', '3261', '1', 43), ('007', '3086', '1', 1),
|
||||||
|
('007', '3353', '1', 28), ('007', '3286', '1', 40), ('006', '2604', '1', 1), ('007', '3095', '1', 1),
|
||||||
|
('007', '3010', '1', 1), ('007', '3431', '1', 32), ('006', '2924', '1', 1), ('007', '3099', '1', 8),
|
||||||
|
('006', '2986', '1', 1), ('007', '3462', '1', 25), ('007', '3230', '1', 43), ('007', '3363', '1', 49),
|
||||||
|
('003', '1353', '1', 6), ('007', '3380', '1', 37), ('004', '1679', '1', 1), ('007', '3263', '1', 43),
|
||||||
|
('007', '3296', '1', 39), ('006', '2866', '1', 1), ('007', '3129', '1', 11), ('006', '2661', '1', 1),
|
||||||
|
('005', '2472', '1', 1), ('007', '3038', '1', 1), ('007', '3396', '1', 37), ('007', '3139', '1', 14),
|
||||||
|
('008', '3507', '1', 32), ('007', '3305', '1', 41), ('007', '3314', '1', 45), ('006', '2833', '1', 1),
|
||||||
|
('007', '3500', '1', 31), ('007', '3180', '1', 36), ('006', '2753', '1', 10), ('006', '2622', '1', 1),
|
||||||
|
('007', '3042', '1', 1), ('004', '1502', '1', 1), ('007', '3175', '1', 21), ('007', '3089', '1', 1),
|
||||||
|
('006', '2660', '1', 8), ('003', '1196', '1', 1), ('007', '3212', '1', 41), ('007', '3346', '1', 47),
|
||||||
|
('006', '2620', '1', 1), ('007', '3412', '1', 26), ('007', '3142', '1', 14), ('006', '2997', '1', 1),
|
||||||
|
('007', '3022', '1', 4), ('007', '3386', '1', 37), ('007', '3398', '1', 16), ('006', '2593', '1', 1),
|
||||||
|
('005', '2452', '1', 1), ('006', '2837', '1', 1), ('006', '2850', '1', 1), ('006', '2925', '1', 1),
|
||||||
|
('007', '3469', '1', 21), ('007', '3306', '1', 43), ('007', '3143', '1', 15), ('007', '3417', '1', 47),
|
||||||
|
('006', '2582', '1', 1), ('006', '2874', '1', 1), ('007', '3072', '1', 1), ('007', '3270', '1', 42),
|
||||||
|
('007', '3426', '1', 32), ('006', '2635', '1', 1), ('006', '2879', '1', 1), ('007', '3302', '1', 36),
|
||||||
|
('007', '3497', '1', 45), ('007', '3330', '1', 46), ('007', '3111', '1', 1), ('007', '3067', '1', 1),
|
||||||
|
('007', '3423', '1', 39), ('006', '2503', '1', 1), ('004', '1589', '1', 1), ('007', '3173', '1', 25),
|
||||||
|
('003', '1339', '1', 1), ('007', '3185', '1', 32), ('006', '2554', '1', 1), ('005', '2499', '1', 1),
|
||||||
|
('006', '2968', '1', 1), ('007', '3294', '1', 48), ('007', '3470', '1', 33), ('007', '3048', '1', 1),
|
||||||
|
('007', '3227', '1', 49), ('007', '3295', '1', 43), ('007', '3268', '1', 51), ('007', '3045', '1', 1),
|
||||||
|
('006', '2992', '1', 1), ('007', '3399', '1', 31), ('007', '3366', '1', 38), ('007', '3267', '1', 44),
|
||||||
|
('007', '3183', '1', 26), ('007', '3495', '1', 30), ('006', '2952', '1', 1), ('007', '3473', '1', 30),
|
||||||
|
('007', '3133', '1', 10), ('006', '2835', '1', 1), ('006', '2563', '1', 1), ('007', '3332', '1', 45),
|
||||||
|
('007', '3188', '1', 39), ('007', '3419', '1', 24), ('004', '1577', '1', 14), ('006', '2825', '1', 1),
|
||||||
|
('006', '2848', '1', 1), ('007', '3219', '1', 39), ('007', '3326', '1', 45), ('006', '2951', '1', 1),
|
||||||
|
('006', '2893', '1', 1), ('007', '3273', '1', 54), ('005', '2070', '1', 7), ('006', '2718', '1', 1),
|
||||||
|
('007', '3207', '1', 46), ('007', '3062', '1', 1), ('007', '3284', '1', 46), ('003', '1124', '1', 1),
|
||||||
|
('005', '2438', '1', 1), ('005', '2168', '1', 1), ('006', '2602', '1', 1), ('007', '3017', '1', 14),
|
||||||
|
('006', '2682', '1', 1), ('007', '3073', '1', 1), ('006', '2754', '1', 1), ('007', '3021', '1', 1),
|
||||||
|
('006', '2797', '1', 1), ('006', '2881', '1', 1), ('006', '2818', '1', 1), ('007', '3229', '1', 40),
|
||||||
|
('007', '3252', '1', 49), ('007', '3329', '1', 39), ('007', '3137', '1', 14), ('007', '3155', '1', 12),
|
||||||
|
('007', '3275', '1', 42), ('008', '3502', '1', 45), ('006', '2767', '1', 6), ('006', '2671', '1', 1),
|
||||||
|
('007', '3355', '1', 45), ('007', '3198', '1', 22), ('007', '3100', '1', 1), ('007', '3220', '1', 38),
|
||||||
|
('003', '1147', '1', 1), ('007', '3233', '1', 43), ('006', '2805', '1', 1), ('006', '2736', '1', 1),
|
||||||
|
('007', '3451', '1', 42), ('007', '3119', '1', 7), ('006', '2540', '1', 1), ('007', '3304', '1', 38),
|
||||||
|
('007', '3181', '1', 37), ('008', '3511', '1', 45), ('006', '2575', '1', 1), ('007', '3345', '1', 45),
|
||||||
|
('006', '2829', '1', 1), ('007', '3028', '1', 1), ('007', '3164', '1', 16), ('007', '3246', '1', 55),
|
||||||
|
('007', '3149', '1', 18), ('007', '3287', '1', 40), ('007', '3472', '1', 39), ('007', '3323', '1', 44),
|
||||||
|
('006', '2793', '1', 1), ('007', '3482', '1', 41), ('006', '2861', '1', 1), ('007', '3029', '1', 1),
|
||||||
|
('007', '3223', '1', 43), ('007', '3093', '1', 1), ('007', '3132', '1', 12), ('007', '3218', '1', 39),
|
||||||
|
('007', '3228', '1', 48), ('007', '3312', '1', 45), ('007', '3454', '1', 23), ('007', '3115', '1', 1),
|
||||||
|
('007', '3216', '1', 39), ('007', '3307', '1', 47), ('007', '3496', '1', 31), ('006', '2782', '1', 1),
|
||||||
|
('006', '2855', '1', 1), ('006', '2998', '1', 1), ('007', '3292', '1', 39), ('007', '3082', '1', 1),
|
||||||
|
('007', '3169', '1', 17), ('007', '3276', '1', 45), ('007', '3251', '1', 44), ('005', '2474', '1', 1),
|
||||||
|
('005', '2465', '1', 1), ('006', '2772', '1', 1), ('007', '3338', '1', 41), ('006', '2571', '1', 1),
|
||||||
|
('006', '2888', '1', 1), ('007', '3257', '1', 36), ('007', '3303', '1', 42), ('007', '3394', '1', 37),
|
||||||
|
('006', '2867', '1', 1), ('007', '3199', '1', 54), ('006', '2826', '1', 1), ('007', '3360', '1', 38),
|
||||||
|
('007', '3281', '1', 40), ('007', '3074', '1', 7), ('007', '3444', '1', 25), ('007', '3144', '1', 11),
|
||||||
|
('004', '1541', '1', 1), ('007', '3364', '1', 44), ('006', '2724', '1', 1), ('007', '3384', '1', 35),
|
||||||
|
('007', '3236', '1', 45), ('007', '3315', '1', 43), ('007', '3420', '1', 21), ('006', '2683', '1', 1),
|
||||||
|
('007', '3116', '1', 1), ('007', '3461', '1', 35), ('006', '2831', '1', 1), ('007', '3172', '1', 22),
|
||||||
|
('007', '3434', '1', 26), ('007', '3256', '1', 48), ('006', '2643', '1', 1), ('003', '1248', '1', 1),
|
||||||
|
('007', '3140', '1', 14), ('006', '2917', '1', 1), ('008', '3506', '1', 36), ('006', '2981', '1', 1),
|
||||||
|
('007', '3441', '1', 44), ('007', '3163', '1', 16), ('007', '3400', '1', 32), ('006', '2650', '1', 1),
|
||||||
|
('007', '3477', '1', 31), ('007', '3448', '1', 25), ('006', '2967', '1', 1), ('007', '3201', '1', 45),
|
||||||
|
('006', '2725', '1', 1), ('007', '3298', '1', 38), ('007', '3465', '1', 29), ('007', '3075', '1', 1),
|
||||||
|
('007', '3466', '1', 29), ('006', '2865', '1', 1), ('006', '2584', '1', 1), ('007', '3321', '1', 44),
|
||||||
|
('006', '2960', '1', 1), ('008', '3508', '1', 37), ('007', '3011', '1', 1), ('006', '2956', '1', 1),
|
||||||
|
('005', '2192', '1', 1), ('006', '2955', '1', 1), ('006', '2577', '1', 1), ('007', '3397', '1', 41),
|
||||||
|
('007', '3081', '1', 1), ('007', '3290', '1', 38), ('007', '3389', '1', 38), ('008', '3515', '1', 34),
|
||||||
|
('007', '3005', '1', 1), ('007', '3371', '1', 34), ('007', '3224', '1', 41), ('007', '3343', '1', 37),
|
||||||
|
('006', '2950', '1', 1), ('007', '3487', '1', 31), ('006', '2878', '1', 1), ('006', '2558', '1', 1),
|
||||||
|
('007', '3152', '1', 20), ('007', '3283', '1', 43), ('007', '3240', '1', 39), ('007', '3274', '1', 44),
|
||||||
|
('008', '3505', '1', 32), ('006', '2809', '1', 1), ('007', '3413', '1', 40), ('007', '3288', '1', 41),
|
||||||
|
('006', '2680', '1', 1), ('006', '2912', '1', 1), ('008', '3504', '1', 33), ('007', '3356', '1', 55),
|
||||||
|
('006', '2652', '1', 1), ('007', '3319', '1', 40), ('007', '3035', '1', 9), ('007', '3365', '1', 49),
|
||||||
|
('006', '2871', '1', 1), ('006', '2803', '1', 1), ('006', '2966', '1', 1), ('006', '2748', '1', 10),
|
||||||
|
('006', '2974', '1', 1), ('007', '3238', '1', 49), ('007', '3490', '1', 31), ('005', '2463', '1', 1),
|
||||||
|
('007', '3370', '1', 30), ('006', '2988', '1', 1), ('006', '2601', '1', 1), ('007', '3467', '1', 30),
|
||||||
|
('007', '3007', '1', 1), ('007', '3381', '1', 48), ('006', '2801', '1', 1), ('007', '3362', '1', 48),
|
||||||
|
('007', '3232', '1', 59), ('006', '2948', '1', 1), ('007', '3117', '1', 1), ('007', '3254', '1', 48),
|
||||||
|
('007', '3324', '1', 44), ('005', '2490', '1', 5), ('007', '3445', '1', 33), ('007', '3205', '1', 38),
|
||||||
|
('007', '3106', '1', 1), ('006', '2949', '1', 1), ('005', '2443', '1', 1), ('007', '3004', '1', 1),
|
||||||
|
('007', '3019', '1', 1), ('006', '2665', '1', 1), ('007', '3063', '1', 1), ('007', '3211', '1', 47),
|
||||||
|
('007', '3385', '1', 59), ('007', '3457', '1', 28), ('003', '1024', '1', 1), ('006', '2588', '1', 1),
|
||||||
|
('007', '3112', '1', 1), ('007', '3308', '1', 43), ('007', '3159', '1', 13), ('007', '3421', '1', 40),
|
||||||
|
('007', '3449', '1', 17), ('007', '3484', '1', 20), ('006', '2975', '1', 1), ('005', '2366', '1', 1),
|
||||||
|
('007', '3203', '1', 36), ('005', '2498', '1', 1), ('007', '3493', '1', 39), ('006', '2795', '1', 1),
|
||||||
|
('007', '3376', '1', 38), ('006', '2822', '1', 1), ('007', '3189', '1', 38), ('007', '3009', '1', 1),
|
||||||
|
('007', '3349', '1', 51), ('007', '3103', '1', 1), ('006', '2509', '1', 1), ('006', '2959', '1', 1),
|
||||||
|
('006', '2900', '1', 1), ('007', '3182', '1', 26), ('007', '3269', '1', 51), ('007', '3310', '1', 45),
|
||||||
|
('006', '2901', '1', 1), ('007', '3235', '1', 49), ('007', '3458', '1', 30), ('006', '2779', '1', 1),
|
||||||
|
('007', '3387', '1', 44), ('007', '3492', '1', 38), ('006', '2836', '1', 1), ('007', '3392', '1', 34),
|
||||||
|
('006', '2501', '1', 1), ('004', '1852', '1', 1), ('007', '3316', '1', 42), ('006', '2945', '1', 1),
|
||||||
|
('007', '3242', '1', 39), ('007', '3350', '1', 35), ('007', '3110', '1', 1), ('006', '2787', '1', 1),
|
||||||
|
('005', '2299', '1', 6), ('007', '3289', '1', 41), ('007', '3008', '1', 1), ('006', '2618', '1', 1),
|
||||||
|
('007', '3043', '1', 4), ('007', '3368', '1', 42), ('007', '3414', '1', 37), ('007', '3193', '1', 36),
|
||||||
|
('008', '3501', '1', 41), ('008', '3510', '1', 32), ('007', '3156', '1', 12), ('007', '3498', '1', 31),
|
||||||
|
('006', '2897', '1', 1), ('006', '2614', '1', 1), ('007', '3157', '1', 12), ('007', '3215', '1', 42),
|
||||||
|
('006', '2585', '1', 1), ('007', '3422', '1', 28), ('007', '3134', '1', 12), ('007', '3262', '1', 42),
|
||||||
|
('004', '1633', '1', 1), ('007', '3429', '1', 26), ('006', '2745', '1', 11), ('006', '2693', '1', 3),
|
||||||
|
('007', '3437', '1', 28), ('007', '3071', '1', 1), ('007', '3409', '1', 40), ('007', '3166', '1', 20),
|
||||||
|
('007', '3153', '1', 15), ('007', '3348', '1', 40), ('007', '3359', '1', 27), ('007', '3217', '1', 40),
|
||||||
|
('005', '2237', '1', 1), ('007', '3176', '1', 24), ('007', '3122', '1', 4), ('006', '2739', '1', 1),
|
||||||
|
('006', '2843', '1', 10), ('007', '3179', '1', 28), ('007', '3373', '1', 41), ('007', '3439', '1', 24),
|
||||||
|
('007', '3023', '1', 1), ('007', '3192', '1', 30), ('007', '3127', '1', 11)]
|
||||||
|
|
||||||
|
mapping = {}
|
||||||
|
for i in x:
|
||||||
|
if i[0] in mapping:
|
||||||
|
mapping[i[0]][i[1]] = (int(i[2]), i[3])
|
||||||
|
else:
|
||||||
|
mapping[i[0]] = {i[1]: (int(i[2]), i[3])}
|
||||||
|
for k,v in mapping.items():
|
||||||
|
print(f"{k}:")
|
||||||
|
for i,j in v.items():
|
||||||
|
print(f" {i}:")
|
||||||
|
for m in range(j[0],j[1]+1):
|
||||||
|
print(f" https://i.tuiimg.net/{k}/{i}/{m}.jpg")
|
BIN
test/img.png
Normal file
BIN
test/img.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 89 KiB |
869
test/news.md
Normal file
869
test/news.md
Normal file
@@ -0,0 +1,869 @@
|
|||||||
|
[](//www.chinanews.com.cn/gn/2023/03-21/9975606.shtml)
|
||||||
|
|
||||||
|
* 头条
|
||||||
|
* 要闻
|
||||||
|
* 专题
|
||||||
|
* 视频
|
||||||
|
* 精彩图片
|
||||||
|
* 时政
|
||||||
|
* 社会
|
||||||
|
* 国际
|
||||||
|
* 财经
|
||||||
|
* 健康生活
|
||||||
|
* 创意
|
||||||
|
* 大湾区
|
||||||
|
* 融视听
|
||||||
|
* 文体
|
||||||
|
* 华人
|
||||||
|
* 直播
|
||||||
|
* English
|
||||||
|
* 融媒体矩阵
|
||||||
|
* 品牌栏目
|
||||||
|
|
||||||
|
搜 索
|
||||||
|
|
||||||
|
[WAP版](https://m.chinanews.com/)
|
||||||
|
[客户端](//www.chinanews.com.cn/app/chinanews/index.shtml)
|
||||||
|
|
||||||
|
融媒矩阵
|
||||||
|
|
||||||
|
* [_中新社新浪法人微博_](//weibo.com/u/3604378011)
|
||||||
|
* [_中新网新浪法人微博_](//weibo.com/chinanewsv)
|
||||||
|
* _中新社微信公众号_
|
||||||
|
* 中新网微信公众号
|
||||||
|
|
||||||
|
[企业邮箱](http://mail.chinanews.com.cn/) [English](//www.ecns.cn/)
|
||||||
|
|
||||||
|
* [即时](/scroll-news/news1.html)
|
||||||
|
* [时政](/china/)
|
||||||
|
* [理论](/theory.shtml)
|
||||||
|
* [东西问](/dxw/)
|
||||||
|
* [财经](/finance/)
|
||||||
|
* [国际](/world/)
|
||||||
|
* [社会](/society/)
|
||||||
|
* [大湾区](/dwq/)
|
||||||
|
* [华人](//www.chinaqw.com/)
|
||||||
|
* [文娱](/wy/)
|
||||||
|
* [体育](/sports/)
|
||||||
|
* [教育](/edu/)
|
||||||
|
* [法治](/fazhi/)
|
||||||
|
* [健康](/health/)
|
||||||
|
* [生活](/life/)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
* [同心圆](/txy/)
|
||||||
|
* [铸牢中华民族共同体意识](/zlgtt/)
|
||||||
|
* [一带一路](/ydyl/index.shtml)
|
||||||
|
* [中国侨网](//www.chinaqw.com/)
|
||||||
|
* [中国新闻周刊](//www.inewsweek.cn/)
|
||||||
|
* [国是直通车](/gsztc/)
|
||||||
|
* [视频](/shipin/)
|
||||||
|
* [直播](/live.shtml)
|
||||||
|
* [创意](/chuangyi/)
|
||||||
|
* [图片](/photo/)
|
||||||
|
* [专栏](/zhuanlan/)
|
||||||
|
* 各地
|
||||||
|
|
||||||
|
[安徽](//www.ah.chinanews.com.cn/)|[北京](//www.bj.chinanews.com.cn/)|[重庆](//www.cq.chinanews.com.cn/)|[福建](//www.fj.chinanews.com.cn/)|[甘肃](//www.gs.chinanews.com.cn/)|
|
||||||
|
[贵州](//www.gz.chinanews.com.cn/)|[广东](//www.gd.chinanews.com.cn/)|[广西](//www.gx.chinanews.com.cn/)|[海南](//www.hi.chinanews.com.cn/)|[河北](//www.heb.chinanews.com.cn/)|
|
||||||
|
[河南](//www.ha.chinanews.com.cn/)|
|
||||||
|
[湖北](//www.hb.chinanews.com.cn/)|[湖南](//www.hn.chinanews.com.cn/)|[黑龙江](//www.hlj.chinanews.com.cn/)|[江苏](//www.js.chinanews.com.cn/)|[江西](//www.jx.chinanews.com.cn/)|
|
||||||
|
[吉林](//www.jl.chinanews.com.cn/)|
|
||||||
|
[辽宁](//www.ln.chinanews.com.cn/)|[内蒙古](//www.nmg.chinanews.com.cn/)|[宁夏](//www.nx.chinanews.com.cn/)|[青海](//www.qh.chinanews.com.cn/)|[山东](//www.sd.chinanews.com.cn/)|
|
||||||
|
[山西](//www.sx.chinanews.com.cn/)|
|
||||||
|
[陕西](//www.shx.chinanews.com.cn/)|[上海](//www.sh.chinanews.com.cn/)|[四川](//www.sc.chinanews.com.cn/)|[新疆](//www.xj.chinanews.com.cn/)|
|
||||||
|
[兵团](//www.bt.chinanews.com.cn/)|
|
||||||
|
[云南](//www.yn.chinanews.com.cn/)|[浙江](//www.zj.chinanews.com.cn/)
|
||||||
|
|
||||||
|
**[即时](/scroll-news/news1.html)**
|
||||||
|
|
||||||
|
**[时政](/china/)**
|
||||||
|
|
||||||
|
[高层](//channel.chinanews.com.cn/cns/cl/gn-gcdt.shtml) _|_
|
||||||
|
[人事](//channel.chinanews.com.cn/cns/cl/gn-rsbd.shtml) _|_
|
||||||
|
[反腐](//channel.chinanews.com.cn/cns/cl/fz-ffcl.shtml) _|_
|
||||||
|
[深度](//channel.chinanews.com.cn/u/sdbd.shtml) _|_
|
||||||
|
[两岸](//channel.chinanews.com.cn/u/gn-la.shtml) _|_
|
||||||
|
[科教](//channel.chinanews.com.cn/cns/cl/gn-kjww.shtml) _|_
|
||||||
|
[军事](//channel.chinanews.com.cn/cns/cl/gn-js.shtml)
|
||||||
|
|
||||||
|
**[东西问](/dxw/)**
|
||||||
|
|
||||||
|
[中外对话](//channel.chinanews.com.cn/u/dxw-wetalk.shtml)
|
||||||
|
_|_[世界观](//channel.chinanews.com.cn/u/dxw-sjg.shtml)
|
||||||
|
_|_[广角镜](//channel.chinanews.com.cn/u/dxw-gjj.shtml)
|
||||||
|
_|_[Z世代](//channel.chinanews.com.cn/u/dxw-wszsd.shtml)
|
||||||
|
_|_[洋腔队](//channel.chinanews.com.cn/u/dxw-yqd.shtml)
|
||||||
|
_|_[舆论场](//channel.chinanews.com.cn/u/dxw-ylc.shtml)
|
||||||
|
_|_[新漫评](//channel.chinanews.com.cn/u/dxw-xmp.shtml)
|
||||||
|
_|_[思享家](//channel.chinanews.com.cn/u/dxw-sxj.shtml) _|_
|
||||||
|
[会客厅](//channel.chinanews.com.cn/u/dxw-hkt.shtml)
|
||||||
|
_|_[研究院](//channel.chinanews.com.cn/u/dxw-yjy.shtml)
|
||||||
|
_|_[问西东](//channel.chinanews.com.cn/u/dxw-wxd.shtml)
|
||||||
|
|
||||||
|
**[财经](/finance/)**
|
||||||
|
|
||||||
|
[宏观](//channel.chinanews.com.cn/u/cj/cj-hongguan.shtml)
|
||||||
|
_|_[产经](//channel.chinanews.com.cn/u/cj/cj-chanjing.shtml)
|
||||||
|
_|_[中国新观察](//channel.chinanews.com.cn/cns/cl/cj-hgds.shtml)
|
||||||
|
_|_[三里河](//channel.chinanews.com.cn/cns/cl/cj-slh.shtml)
|
||||||
|
_|_[民生调查局](//channel.chinanews.com.cn/cns/cl/cj-msrd.shtml)
|
||||||
|
_|_[中新财评](//channel.chinanews.com.cn/cns/cl/cj-zxcp.shtml) _|_
|
||||||
|
[商业风云录](//channel.chinanews.com.cn/cns/cl/cj-fyrw.shtml)
|
||||||
|
_|_[智库](//channel.chinanews.com.cn/cns/cl/cj-zk.shtml)
|
||||||
|
_|_[公益](//channel.chinanews.com.cn/cns/cl/cj-gy.shtml)
|
||||||
|
|
||||||
|
**[国际](/world/)**
|
||||||
|
|
||||||
|
[世界观](//channel.chinanews.com.cn/cns/cl/gj-sjg.shtml)
|
||||||
|
_|_[国际识局](//channel.chinanews.com.cn/cns/cl/gj-zxsjg.shtml)
|
||||||
|
_|_[国际人物](//channel.chinanews.com.cn/u/gj-rw.shtml)
|
||||||
|
_|_[国际热评](//channel.chinanews.com.cn/cns/cl/gj-gjrp.shtml)
|
||||||
|
_|_[新漫评](//channel.chinanews.com.cn/cns/cl/gj-xmp.shtml)
|
||||||
|
_|_[国际视野](//www.chinanews.com.cn/shipin/m/gj/views.shtml)
|
||||||
|
|
||||||
|
**[社会](/society/)**
|
||||||
|
|
||||||
|
[热点追踪](//channel.chinanews.com.cn/u/rdzz.shtml)
|
||||||
|
_|_[中新真探](//channel.chinanews.com.cn/cns/cl/sh-zxzt.shtml) _|_
|
||||||
|
[新闻浮世绘](//channel.chinanews.com.cn/u/fsh.shtml) _|_
|
||||||
|
[中新法治](//channel.chinanews.com.cn/cns/cl/fz-jdrw.shtml) _|_
|
||||||
|
[第一现场](//channel.chinanews.com.cn/u/gn-dyxc.shtml)
|
||||||
|
|
||||||
|
**[大湾区](/dwq/)**
|
||||||
|
|
||||||
|
[港澳](//channel.chinanews.com.cn/u/dwq-ga.shtml)
|
||||||
|
_|_[风向标](//channel.chinanews.com.cn/cns/cl/dwq-fxb.shtml)
|
||||||
|
_|_[快车道](//channel.chinanews.com.cn/cns/cl/dwq-kcd.shtml)
|
||||||
|
_|_[青年说](//channel.chinanews.com.cn/cns/cl/dwq-qns.shtml)
|
||||||
|
_|_[专家建言](//channel.chinanews.com.cn/u/dwq-wqzjjy.shtml)
|
||||||
|
_|_[湾得福](//channel.chinanews.com.cn/u/dwq-wqwdf.shtml)
|
||||||
|
_|_[Video](//channel.chinanews.com.cn/cns/cl/dwq-video.shtml)
|
||||||
|
_|_[湾区MAP](//www.chinanews.com.cn/dwq/#wqmap)
|
||||||
|
|
||||||
|
**[健康](/health/)**
|
||||||
|
|
||||||
|
[大医生来了](//channel.chinanews.com.cn/cns/cl/life-doctoriscoming.shtml)
|
||||||
|
_|_[医学的温度](//channel.chinanews.com.cn/cns/cl/life-temperatureofmedicine.shtml)
|
||||||
|
_|_[医药新观察](//channel.chinanews.com.cn/cns/cl/life-
|
||||||
|
observationsonmedicine.shtml)
|
||||||
|
|
||||||
|
**[教育](/edu/)**
|
||||||
|
|
||||||
|
**[法治](/fazhi/)**
|
||||||
|
|
||||||
|
**[生活](/life/)**
|
||||||
|
|
||||||
|
**[华人](//www.chinaqw.com/)**
|
||||||
|
|
||||||
|
**[同心圆](/txy/)**
|
||||||
|
|
||||||
|
[统战要闻](//channel.chinanews.com.cn/cns/cl/txy-tzyw.shtml)
|
||||||
|
_|_[统战时讯](//channel.chinanews.com.cn/cns/cl/txy-tzsx.shtml)
|
||||||
|
_|_[各地统战](//channel.chinanews.com.cn/cns/cl/txy-gdtz.shtml)
|
||||||
|
|
||||||
|
**[铸牢中华民族共同体意识](/zlgtt/)**
|
||||||
|
|
||||||
|
[民族传承](//channel.chinanews.com.cn/cns/cl/mz-cc.shtml)
|
||||||
|
_|_[政策法规](//channel.chinanews.com.cn/cns/cl/mz-fg.shtml)
|
||||||
|
_|_[学术动态](//channel.chinanews.com.cn/cns/cl/mz-xs.shtml)
|
||||||
|
_|_[地方实践](//channel.chinanews.com.cn/cns/cl/mz-df.shtml)
|
||||||
|
_|_[国际传播](//channel.chinanews.com.cn/cns/cl/mz-gj.shtml)
|
||||||
|
|
||||||
|
**[一带一路](/ydyl/)**
|
||||||
|
|
||||||
|
[热点聚焦](//channel.chinanews.com.cn/cns/cl/ydyl-rdjj.shtml)
|
||||||
|
_|_[深度访谈](//channel.chinanews.com.cn/cns/cl/ydyl-sdft.shtml)
|
||||||
|
_|_[各方关注](//channel.chinanews.com.cn/cns/cl/ydyl-gfgz.shtml)
|
||||||
|
_|_[海外视角](//channel.chinanews.com.cn/u/ydyl-hwsj.shtml)
|
||||||
|
_|_[项目动态](//channel.chinanews.com.cn/cns/cl/ydyl-xmdt.shtml)
|
||||||
|
_|_[丰硕成果](//channel.chinanews.com.cn/cns/cl/ydyl-fscg.shtml)
|
||||||
|
|
||||||
|
**[文娱](/wy/)**
|
||||||
|
|
||||||
|
[文化漫谈](//channel.chinanews.com.cn/cns/cl/cul-whmt.shtml)
|
||||||
|
_|_[艺苑大观](//channel.chinanews.com.cn/cns/cl/cul-xjgw.shtml)
|
||||||
|
_|_[收藏考古](//channel.chinanews.com.cn/cns/cl/cul-sckgd.shtml)
|
||||||
|
_|_[星闻联播](//channel.chinanews.com.cn/cns/cl/yl-mxnd.shtml)
|
||||||
|
_|_[影音综艺](//channel.chinanews.com.cn/cns/cl/yl-ypkb.shtml)
|
||||||
|
|
||||||
|
**[体育](/sports/)**
|
||||||
|
|
||||||
|
[独家视角](//channel.chinanews.com.cn/cns/cl/ty-bdjj.shtml)
|
||||||
|
_|_[绿茵赛场](//channel.chinanews.com.cn/cns/cl/ty-gnzq.shtml)
|
||||||
|
_|_[篮坛竞技](//channel.chinanews.com.cn/cns/cl/ty-klsk.shtml)
|
||||||
|
_|_[综合其他](//channel.chinanews.com.cn/cns/cl/ty-zhqt.shtml)
|
||||||
|
_|_[视频播报](/shipin/m/tt/views.shtml)
|
||||||
|
|
||||||
|
**[视频](/shipin/)**
|
||||||
|
|
||||||
|
[热点](/shipin/m/rd/views.shtml) _|_[国内](/shipin/m/gn/views.shtml)
|
||||||
|
_|_[社会](/shipin/m/sh/views.shtml) _|_[国际](/shipin/m/gj/views.shtml)
|
||||||
|
_|_[军事](/shipin/m/jq/views.shtml) _|_[文娱](/shipin/m/wy/views.shtml)
|
||||||
|
_|_[体育](/shipin/m/tt/views.shtml) _|_[财经](/shipin/m/cj/views.shtml)
|
||||||
|
_|_[港澳台侨](/shipin/m/ga/views.shtml) _|_[微视界](/shipin/minidocu.shtml) _|_
|
||||||
|
[洋腔队](/shipin/yqd.shtml) _|_[Z世代](/shipin/zsd.shtml)
|
||||||
|
_|_[澜湄印象](/gn/z/lanmeiyinxiang/index.shtml)
|
||||||
|
_|_[中国风](/shipin/AuthenticChina.shtml) _|_[中国新视野](/shipin/chinarevealed.shtml)
|
||||||
|
|
||||||
|
**[图片](/photo/)**
|
||||||
|
|
||||||
|
[中新画报](//channel.chinanews.com.cn/u/zxhb.shtml)
|
||||||
|
_|_[国内](//channel.chinanews.com.cn/u/pic/gn.shtml)
|
||||||
|
_|_[社会](//channel.chinanews.com.cn/u/pic/sh.shtml)
|
||||||
|
_|_[国际](//channel.chinanews.com.cn/u/pic/gj.shtml)
|
||||||
|
_|_[娱乐](//channel.chinanews.com.cn/u/pic/yl.shtml)
|
||||||
|
_|_[体育](//channel.chinanews.com.cn/u/pic/ty.shtml)
|
||||||
|
_|_[军事](//channel.chinanews.com.cn/u/pic/js.shtml)
|
||||||
|
_|_[科技](//channel.chinanews.com.cn/u/pic/kj.shtml) _|_
|
||||||
|
[港澳台侨](//channel.chinanews.com.cn/u/pic/gatq.shtml)
|
||||||
|
_|_[凡人歌](//channel.chinanews.com.cn/u/pic/frg.shtml)
|
||||||
|
|
||||||
|
**[创意](/chuangyi/)**
|
||||||
|
|
||||||
|
**[理论](/theory.shtml)**
|
||||||
|
|
||||||
|
**[直播](/live.shtml)**
|
||||||
|
|
||||||
|
**[专题](/allspecial/)**
|
||||||
|
|
||||||
|
**[专栏](/zhuanlan/)**
|
||||||
|
|
||||||
|
[](//www.chinaqw.com/) [](//www.jwview.com/) [](//www.ecns.cn/) [](/gsztc/)
|
||||||
|
[](//www.inewsweek.cn/)[](http://epaper.chinanews.com/)
|
||||||
|
[](http://www.cnsphoto.com/)
|
||||||
|
|
||||||
|
[【新思想引领新征程】传统文化赋能城市更新
|
||||||
|
展现新时代风采](https://www.chinanews.com.cn/gn/2025/02-13/10368229.shtml)
|
||||||
|
|
||||||
|
[习言道|祝你们的生活像吃汤圆一样,安逸!](//www.chinanews.com.cn/gn/shipin/cns/2025/02-12/news1013039.shtml)[学习进行时丨花灯长廊
|
||||||
|
情深意长](//www.chinanews.com.cn/gn/2025/02-13/10367926.shtml)[学习知行·中国式现代化丨绿杨平江
|
||||||
|
古城新韵](//www.chinanews.com.cn/gn/2025/02-13/10367977.shtml)
|
||||||
|
|
||||||
|
PreviousNext
|
||||||
|
|
||||||
|
* [](//www.chinanews.com.cn/tp/hd2011/2025/02-13/1140505.shtml)
|
||||||
|
* [](https://www.chinanews.com.cn/tp/hd2011/2025/02-12/1140334.shtml)
|
||||||
|
* [](https://www.chinanews.com.cn/tp/hd2011/2025/02-12/1140232.shtml)
|
||||||
|
* [](//www.chinanews.com.cn/tp/hd2011/2025/02-13/1140469.shtml)
|
||||||
|
* [](//www.chinanews.com.cn/tp/hd2011/2025/02-13/1140505.shtml)
|
||||||
|
* [](https://www.chinanews.com.cn/tp/hd2011/2025/02-12/1140334.shtml)
|
||||||
|
* [](https://www.chinanews.com.cn/tp/hd2011/2025/02-12/1140232.shtml)
|
||||||
|
* [](//www.chinanews.com.cn/tp/hd2011/2025/02-13/1140469.shtml)
|
||||||
|
|
||||||
|
* [【中新画报】亚冬会上的冰上之“花”](//www.chinanews.com.cn/tp/hd2011/2025/02-13/1140505.shtml)
|
||||||
|
* [【中新画报】元宵夜 赏灯时 光影交错间体验满满中式浪漫](https://www.chinanews.com.cn/tp/hd2011/2025/02-12/1140334.shtml)
|
||||||
|
* [【这个城市有点潮】南京:灯影秦淮畔 一梦入金陵](https://www.chinanews.com.cn/tp/hd2011/2025/02-12/1140232.shtml)
|
||||||
|
* [海口元宵烟花晚会点亮夜空](//www.chinanews.com.cn/tp/hd2011/2025/02-13/1140469.shtml)
|
||||||
|
|
||||||
|
* * * *
|
||||||
|
|
||||||
|
[陪哪吒一起冲刺百亿元票房](//www.chinanews.com.cn/shipin/spfts/20250213/5766.shtml)
|
||||||
|
|
||||||
|
[专访《哪吒2》太乙真人配音演员张珈铭:录音前半小时才拿到剧本](//www.chinanews.com.cn/shipin/cns/2025/02-13/news1013146.shtml)
|
||||||
|
|
||||||
|
[【丝路·华章】从“冰天雪地”到“金山银山”,北京“冰雪经济”热力足](//www.chinanews.com.cn/ty/shipin/cns/2025/02-13/news1013182.shtml)
|
||||||
|
|
||||||
|
[《习近平总书记关于加强和改进民族工作的重要思想学习读本》出版发行](https://www.chinanews.com.cn/gn/2025/02-13/10368248.shtml)
|
||||||
|
|
||||||
|
[李强将出席哈尔滨第九届亚洲冬季运动会闭幕式并举行外事活动](https://www.chinanews.com.cn/gn/2025/02-13/10368205.shtml)
|
||||||
|
|
||||||
|
[石泰峰在北京会见班禅额尔德尼·确吉杰布](https://www.chinanews.com.cn/gn/2025/02-13/10368204.shtml)
|
||||||
|
|
||||||
|
[中央纪委国家监委规范中管高校联合审查调查工作](https://www.chinanews.com.cn/gn/2025/02-13/10367939.shtml)
|
||||||
|
|
||||||
|
[亚冬会丨](//www.chinanews.com.cn/ty/z/ydh9/index.shtml)[徐梦桃将担任闭幕式中国体育代表团旗手](https://www.chinanews.com.cn/ty/2025/02-13/10368169.shtml)
|
||||||
|
|
||||||
|
[近40天的年俗长卷,让世界看见这抹“非遗”中国红!](https://www.chinanews.com.cn/cul/2025/02-13/10367954.shtml)
|
||||||
|
|
||||||
|
[优化营商环境,多地让领导干部“换位”体验](https://www.chinanews.com.cn/sh/2025/02-13/10368273.shtml)
|
||||||
|
|
||||||
|
[《哪吒2》观影人次破2亿](https://www.chinanews.com.cn/cj/2025/02-13/10368287.shtml)[这些细节看哭许多人](https://www.chinanews.com.cn/cul/2025/02-13/10368046.shtml)
|
||||||
|
|
||||||
|
[商务部回应美对钢铁和铝进口加征25%关税](https://www.chinanews.com.cn/cj/2025/02-13/10368311.shtml)
|
||||||
|
|
||||||
|
[普京与特朗普通电话](https://www.chinanews.com.cn/gj/2025/02-13/10367873.shtml)[中方:乐见俄美加强沟通对话](https://www.chinanews.com.cn/gn/2025/02-13/10368209.shtml)
|
||||||
|
|
||||||
|
[缅甸向泰国移交261名电诈园区被解救人员](https://www.chinanews.com.cn/gj/2025/02-13/10368189.shtml)
|
||||||
|
|
||||||
|
[美国南加州一名中国留学生不幸遇害](https://www.chinanews.com.cn/gj/2025/02-13/10368066.shtml)[驻洛杉矶总领馆通报](https://www.chinanews.com.cn/gj/2025/02-13/10367935.shtml)
|
||||||
|
|
||||||
|
[即时新闻精选](/scroll-news/news1.html)
|
||||||
|
|
||||||
|
********
|
||||||
|
|
||||||
|
* [外媒:哈马斯称“渴望”继续执行停火协议](//www.chinanews.com.cn/gj/2025/02-13/10368392.shtml "外媒:哈马斯称“渴望”继续执行停火协议")
|
||||||
|
* [德国慕尼黑一汽车冲撞人群 多人受伤](//www.chinanews.com.cn/gj/2025/02-13/10368386.shtml "德国慕尼黑一汽车冲撞人群 多人受伤")
|
||||||
|
* [掀起岛内文化思考,台湾网友为何如此期待...](//www.chinanews.com.cn/sh/2025/02-13/10368347.shtml "掀起岛内文化思考,台湾网友为何如此期待《哪吒2》上映?")
|
||||||
|
* [外眼看亚冬会:绿色环保、宾至如归,“中...](//www.chinanews.com.cn/gj/2025/02-13/10368344.shtml "外眼看亚冬会:绿色环保、宾至如归,“中国送给世界的礼物”")
|
||||||
|
|
||||||
|
* [孩子何时该去学习困难门诊?专家解读](//www.chinanews.com.cn/jk/2025/02-13/10368324.shtml "孩子何时该去学习困难门诊?专家解读")
|
||||||
|
* [《哪吒2》观影人次破2亿](//www.chinanews.com.cn/cj/2025/02-13/10368287.shtml "《哪吒2》观影人次破2亿")
|
||||||
|
* [优化营商环境,多地让领导干部“换位”体...](//www.chinanews.com.cn/sh/2025/02-13/10368273.shtml "优化营商环境,多地让领导干部“换位”体验")
|
||||||
|
* [特鲁多:加拿大绝无可能成为美国的第51个...](//www.chinanews.com.cn/gj/2025/02-13/10368253.shtml "特鲁多:加拿大绝无可能成为美国的第51个州")
|
||||||
|
|
||||||
|
* [《哪吒2》进入全球动画电影票房榜前三](//www.chinanews.com.cn/cul/2025/02-13/10368247.shtml "《哪吒2》进入全球动画电影票房榜前三")
|
||||||
|
* [“哪吒闹海”最早的故事图像在辽宁省...](//www.chinanews.com.cn/shipin/cns-d/2025/02-13/news1013180.shtml "“哪吒闹海”最早的故事图像在辽宁省博物馆展出")
|
||||||
|
* [澳军机蓄意侵闯中国西沙群岛领空 中方已提...](//www.chinanews.com.cn/gn/2025/02-13/10368223.shtml "澳军机蓄意侵闯中国西沙群岛领空 中方已提出严正交涉")
|
||||||
|
* [敦促美方回到正确轨道 商务部回应美对进口...](//www.chinanews.com.cn/cj/2025/02-13/10368211.shtml "敦促美方回到正确轨道 商务部回应美对进口钢铝征收25%关税")
|
||||||
|
|
||||||
|
* [李强将出席哈尔滨第九届亚洲冬季运动会闭...](//www.chinanews.com.cn/gn/2025/02-13/10368205.shtml "李强将出席哈尔滨第九届亚洲冬季运动会闭幕式并举行外事活动")
|
||||||
|
* [缅甸向泰国移交261名电诈园区被解救人员](//www.chinanews.com.cn/gj/2025/02-13/10368189.shtml "缅甸向泰国移交261名电诈园区被解救人员")
|
||||||
|
* [湖北检察机关依法对李勇涉嫌受贿案提起公...](//www.chinanews.com.cn/gn/2025/02-13/10368186.shtml "湖北检察机关依法对李勇涉嫌受贿案提起公诉")
|
||||||
|
* [依法严惩家族宗族势力和常见高发领域黑恶...](//www.chinanews.com.cn/gn/2025/02-13/10368185.shtml "依法严惩家族宗族势力和常见高发领域黑恶犯罪!典型案例发布→")
|
||||||
|
|
||||||
|
* [再创纪录!《哪吒2》成为中国影史首部观影...](//www.chinanews.com.cn/cul/2025/02-13/10368183.shtml "再创纪录!《哪吒2》成为中国影史首部观影人次破2亿电影")
|
||||||
|
* [徐梦桃将担任亚冬会闭幕式中国体育代表团...](//www.chinanews.com.cn/ty/2025/02-13/10368169.shtml "徐梦桃将担任亚冬会闭幕式中国体育代表团旗手")
|
||||||
|
* [“攒着一股劲,把天花板顶上去” ——哪吒...](//www.chinanews.com.cn/cul/2025/02-13/10368153.shtml "“攒着一股劲,把天花板顶上去” ——哪吒之“火”如何点燃动漫产业信心?")
|
||||||
|
* [中国队获得亚冬会冬季两项男子4×7.5公里...](//www.chinanews.com.cn/ty/2025/02-13/10368155.shtml "中国队获得亚冬会冬季两项男子4×7.5公里接力铜牌")
|
||||||
|
|
||||||
|
[](//www.chinanews.com.cn/kong/z/zhongxinrenwu/index.shtml)
|
||||||
|
|
||||||
|
[](/ai-chinanews/)
|
||||||
|
|
||||||
|
[](//www.chinanews.com.cn/kong/z/zhongxinrenwu/index.shtml)
|
||||||
|
|
||||||
|
[](/ai-chinanews/)
|
||||||
|
|
||||||
|
[](//www.chinanews.com.cn/kong/z/zhongxinrenwu/index.shtml)
|
||||||
|
|
||||||
|
[](/ai-chinanews/)
|
||||||
|
|
||||||
|
[](/cnspp/pp-xyd/index.shtml)
|
||||||
|
|
||||||
|
[ 祝你们的生活像吃汤圆一样,安逸!
|
||||||
|
](//www.chinanews.com.cn/gn/shipin/cns/2025/02-12/news1013039.shtml)
|
||||||
|
|
||||||
|
* __[把各民族优秀传统文化发扬光大](/gn/2025/02-11/10366848.shtml)
|
||||||
|
* __[20天内两赴东北,习近平谈到几个关键词](/gn/2025/02-09/10365600.shtml)
|
||||||
|
* __[这个机遇,习近平强调要“紧紧抓住”](/gn/2025/02-08/10365481.shtml)
|
||||||
|
|
||||||
|
[](/cnspp/pp-zwdh/index.shtml)
|
||||||
|
|
||||||
|
[ 特朗普2.0时代,中美两国如何相处? ](/dxw/2025/01-19/10356210.shtml)
|
||||||
|
|
||||||
|
* __[畅聊互联网+乡村振兴,比利时前大使:我喜欢在云南当农民](/gj/2025/01-04/10347582.shtml)
|
||||||
|
* __[全球贸易面临挑战,中国如何引领区域发展?](/dxw/2024/12-27/10343533.shtml)
|
||||||
|
* __[联合国前副秘书长: 中国绿色转型成就惊艳世界](/dxw/2024/11-30/10328358.shtml)
|
||||||
|
|
||||||
|
[](/cnspp/pp-talk/index.shtml)
|
||||||
|
|
||||||
|
[ 当元宵节遇上Citywalk,古人玩得有多嗨?
|
||||||
|
](//www.chinanews.com.cn/sh/shipin/cns/2025/02-11/news1012967.shtml)
|
||||||
|
|
||||||
|
* __[1996亚冬会跟2025亚冬会换了个pin](//www.chinanews.com.cn/ty/shipin/cns/2025/02-10/news1012841.shtml)
|
||||||
|
* __[哈尔滨亚冬会开幕式门票长啥样?小新为你揭秘](//www.chinanews.com.cn/ty/shipin/cns-d/2025/02-07/news1012592.shtml)
|
||||||
|
* __[“年三十儿”消失之谜](//www.chinanews.com.cn/shipin/cns/2025/02-01/news1012208.shtml)
|
||||||
|
|
||||||
|
[要闻](/importnews.html)
|
||||||
|
|
||||||
|
AM PM
|
||||||
|
|
||||||
|
[<<](javascript:void\(0\))[>>](javascript:void\(0\))
|
||||||
|
|
||||||
|
日
|
||||||
|
一
|
||||||
|
二
|
||||||
|
三
|
||||||
|
四
|
||||||
|
五
|
||||||
|
六
|
||||||
|
|
||||||
|
| | | | | | [1](javascript:void\(0\))
|
||||||
|
---|---|---|---|---|---|---
|
||||||
|
[2](javascript:void\(0\))| [3](javascript:void\(0\))|
|
||||||
|
[4](javascript:void\(0\))| [5](javascript:void\(0\))|
|
||||||
|
[6](javascript:void\(0\))| [7](javascript:void\(0\))|
|
||||||
|
[8](javascript:void\(0\))
|
||||||
|
[9](javascript:void\(0\))| [10](javascript:void\(0\))| [11](javascript:void\(0\))| [12](javascript:void\(0\))| [13](javascript:void\(0\))| |
|
||||||
|
| | | | | |
|
||||||
|
| | | | | |
|
||||||
|
| | | | | |
|
||||||
|
|
||||||
|
* __[公安机关持续打击防范邪教组织违法犯罪活动](/gn/2025/02-13/10368116.shtml)
|
||||||
|
* __[](/iframe/2025/02-08/10364991.shtml)[新春走基层丨](https://www.chinanews.com.cn/all/z/2025xczjc/index.shtml)[海口古韵新风年味浓 换花祈福喜闹春](//www.chinanews.com.cn/cul/2025/02-13/10367961.shtml)
|
||||||
|
* __[湖北检察机关依法对李勇涉嫌受贿案提起公诉](/gn/2025/02-13/10368186.shtml)
|
||||||
|
* __[](/iframe/2025/02-13/10368107.shtml)[国家安全部:冒充国安干警招摇撞骗?严肃查办!](https://www.chinanews.com.cn/gn/2025/02-13/10367891.shtml)
|
||||||
|
* __[整治春节网络环境 网信部门累计处置账号9.7万个](/gn/2025/02-13/10368053.shtml)
|
||||||
|
* __[计划满 任务重!今年中国航天这些“首飞”值得期待](/gn/2025/02-13/10368037.shtml)
|
||||||
|
* __[两地检察机关已对“油罐车运输食用油”案件提起公诉](/gn/2025/02-13/10368025.shtml)
|
||||||
|
* __[泰国总理佩通坦·钦那瓦一行参访华彬集团](/cj/2025/02-07/10364703.shtml)
|
||||||
|
* * __[特朗普:乌克兰加入北约是不切实际的](/gj/2025/02-13/10368071.shtml)
|
||||||
|
* __[日本多地连日强降雪已致13人死亡173人受伤](/gj/2025/02-13/10368302.shtml)
|
||||||
|
* __[特鲁多:加拿大绝无可能成为美国的第51个州](/gj/2025/02-13/10368253.shtml)
|
||||||
|
* __[阿富汗首都喀布尔发生自杀式袭击 已致1死3伤](/gj/2025/02-13/10368231.shtml)
|
||||||
|
* __[美国8名被解雇的监察官员对特朗普政府提起诉讼](/gj/2025/02-13/10368072.shtml)
|
||||||
|
* __[马斯克再次把矛头对准北约,呼吁对其进行彻底改革](/gj/2025/02-13/10368047.shtml)
|
||||||
|
* __[李在明遇袭案行凶者终审获刑15年](/gj/2025/02-13/10368068.shtml)
|
||||||
|
* __[古茗在港上市,将持续投资产品研发及供应链能力](/cj/2025/02-12/10367413.shtml)
|
||||||
|
* * __[盘点中国七大消费“万亿之城”:向新升级 向优进发](/cj/2025/02-13/10368165.shtml)
|
||||||
|
* __[国家发改委下达以工代赈中央投资50亿元](/cj/2025/02-13/10368054.shtml)
|
||||||
|
* __[2024年全国社会物流总额突破360万亿元](/gn/2025/02-13/10367999.shtml)
|
||||||
|
* __[“嗓子几乎说不出话”,汽车销售经历“最忙一天”](/cj/2025/02-13/10367903.shtml)
|
||||||
|
* __[热“雪”沸腾!尔滨的流量被这个经济大省狠狠抓住了!](/cj/2025/02-13/10367898.shtml)
|
||||||
|
* __[持续看好!华尔街预测DeepSeek将推动A股“牛市”](/cj/2025/02-12/10367688.shtml)
|
||||||
|
* __[中国多地“新春第一会”民企坐C位](/cj/2025/02-12/10367647.shtml)
|
||||||
|
* __[从衣食住行到吃喝玩乐,在这里看见多彩生活](/life/)
|
||||||
|
* * __[河南南阳野生动物园售卖老虎毛?官方回应](/sh/2025/02-13/10368052.shtml)
|
||||||
|
* __[孩子何时该去学习困难门诊?专家解读](/jk/2025/02-13/10368324.shtml)
|
||||||
|
* __[人这一辈子,一定要到莆田闹一次元宵](/sh/2025/02-13/10368091.shtml)
|
||||||
|
* __[北京朝阳公安:28岁男子李某某酒后砸车,已被刑拘](/sh/2025/02-13/10368076.shtml)
|
||||||
|
* __[近期多发,家庭悲剧!“一老一小”千万警惕→](/sh/2025/02-13/10368034.shtml)
|
||||||
|
* __[节后快递行业迎业务高峰 警惕快递“空包诈骗”](/sh/2025/02-13/10367892.shtml)
|
||||||
|
* __[卖课卖相机卖药……一些“老年大学”怎就沦为了推销场](/sh/2025/02-13/10367879.shtml)
|
||||||
|
* __[铸牢中华民族共同体意识文物古籍展](https://www.neac.gov.cn/seac/c103611/wenwugujizhan.shtml)
|
||||||
|
|
||||||
|
* [台湾高校师生邂逅郎静山故里 感受光影魅...](//www.chinanews.com.cn/gn/2025/02-12/10367747.shtml "台湾高校师生邂逅郎静山故里 感受光影魅力")
|
||||||
|
* [大陆花灯点亮台湾多地元宵灯会](//www.chinanews.com.cn/gn/2025/02-12/10367667.shtml "大陆花灯点亮台湾多地元宵灯会")
|
||||||
|
* [“急急如律令”翻译引热议 折射中国文化...](//www.chinanews.com.cn/cul/2025/02-12/10367729.shtml "“急急如律令”翻译引热议 折射中国文化“走出去”难点")
|
||||||
|
* [持续看好!华尔街预测DeepSeek将...](//www.chinanews.com.cn/cj/2025/02-12/10367688.shtml "持续看好!华尔街预测DeepSeek将推动A股“牛市”")
|
||||||
|
* [国台办举办蛇年首场发布会 聚焦两岸交流...](//www.chinanews.com.cn/gn/2025/02-12/10367666.shtml "国台办举办蛇年首场发布会 聚焦两岸交流等热点")
|
||||||
|
* [受冷空气影响 福州马尾至马祖“小三通”...](//www.chinanews.com.cn/gn/2025/02-12/10367727.shtml "受冷空气影响 福州马尾至马祖“小三通”客运航线将停航")
|
||||||
|
* [(新春走基层)“闽台东石灯俗”在晋江举...](//www.chinanews.com.cn/gn/2025/02-12/10367759.shtml "(新春走基层)“闽台东石灯俗”在晋江举办 逾百位台胞同数一宫灯")
|
||||||
|
* [中国多地“新春第一会”民企坐C位](//www.chinanews.com.cn/cj/2025/02-12/10367647.shtml "中国多地“新春第一会”民企坐C位")
|
||||||
|
* [解放军南部战区:任何搅局南海的军事活动...](//www.chinanews.com.cn/gn/2025/02-12/10367645.shtml "解放军南部战区:任何搅局南海的军事活动尽在掌握")
|
||||||
|
* [食为天·年味丨独属海南人的“年味”,这...](//www.chinanews.com.cn/sh/2025/02-12/10367856.shtml "食为天·年味丨独属海南人的“年味”,这几道菜上桌氛围拉满!")
|
||||||
|
|
||||||
|
[](https://actshow.chinanews.com/act/2025recommendation)
|
||||||
|
|
||||||
|
* [(哈尔滨亚冬会)花样滑冰:双人滑中国组...](//www.chinanews.com.cn/tp/hd2011/2025/02-12/1140422.shtml "(哈尔滨亚冬会)花样滑冰:双人滑中国组合王瑀晨/朱磊位列第五")
|
||||||
|
* [(新春见闻)广州大熊猫“妹猪”乐享特制...](//www.chinanews.com.cn/tp/hd2011/2025/02-12/1140397.shtml "(新春见闻)广州大熊猫“妹猪”乐享特制“元宵大餐”")
|
||||||
|
* [(哈尔滨亚冬会)中国组合次旦玉珍/布鲁...](//www.chinanews.com.cn/tp/hd2011/2025/02-12/1140391.shtml "(哈尔滨亚冬会)中国组合次旦玉珍/布鲁尔夺得滑雪登山混合接力金牌")
|
||||||
|
* [亚冬会花样滑冰项目女子单人滑短节目比赛...](//www.chinanews.com.cn/tp/hd2011/2025/02-12/1140383.shtml "亚冬会花样滑冰项目女子单人滑短节目比赛举行")
|
||||||
|
* [喜庆氛围感拉满 多彩民俗“闹”元宵](//www.chinanews.com.cn/tp/hd2011/2025/02-12/1140413.shtml "喜庆氛围感拉满 多彩民俗“闹”元宵")
|
||||||
|
* [亚冬会花样滑冰项目冰上舞蹈自由舞比赛举...](//www.chinanews.com.cn/tp/hd2011/2025/02-12/1140407.shtml "亚冬会花样滑冰项目冰上舞蹈自由舞比赛举行")
|
||||||
|
* [海口元宵烟花晚会点亮夜空](//www.chinanews.com.cn/tp/hd2011/2025/02-13/1140469.shtml "海口元宵烟花晚会点亮夜空")
|
||||||
|
* [中外秧歌队伍齐聚陕西榆林 欢喜共舞“闹...](//www.chinanews.com.cn/tp/hd2011/2025/02-13/1140459.shtml "中外秧歌队伍齐聚陕西榆林 欢喜共舞“闹”元宵")
|
||||||
|
* [当无人机遇上烟花 阿拉善夜空开启“梦幻...](//www.chinanews.com.cn/tp/hd2011/2025/02-13/1140485.shtml "当无人机遇上烟花 阿拉善夜空开启“梦幻魔法”")
|
||||||
|
* [【中新画报】亚冬会上的冰上之“花”](//www.chinanews.com.cn/tp/hd2011/2025/02-13/1140505.shtml "【中新画报】亚冬会上的冰上之“花”")
|
||||||
|
|
||||||
|
[](/live.shtml)
|
||||||
|
|
||||||
|
* [【新春纪事】花灯如昼,点亮元宵团圆夜](//www.chinanews.com.cn/shipin/cns-d/2025/02-12/news1013113.shtml "【新春纪事】花灯如昼,点亮元宵团圆夜")
|
||||||
|
* [成都哪吒雕塑成网红打卡点 市民纷纷体验...](//www.chinanews.com.cn/cul/shipin/cns-d/2025/02-12/news1013108.shtml "成都哪吒雕塑成网红打卡点 市民纷纷体验中国动画文化魅力")
|
||||||
|
* [次旦玉珍:跟搭档布鲁尔配合良好,争取在...](//www.chinanews.com.cn/ty/shipin/cns-d/2025/02-12/news1013117.shtml "次旦玉珍:跟搭档布鲁尔配合良好,争取在国际赛场发挥得更好")
|
||||||
|
* [陕西社火脸谱绘制技艺代表性传承人:让东...](//www.chinanews.com.cn/sh/shipin/cns/2025/02-12/news1013105.shtml "陕西社火脸谱绘制技艺代表性传承人:让东方神韵的脸谱文化“走出去”")
|
||||||
|
* [【新春纪事】带你一览各地如何“闹”元宵](//www.chinanews.com.cn/sh/shipin/cns-d/2025/02-12/news1013114.shtml "【新春纪事】带你一览各地如何“闹”元宵")
|
||||||
|
* [梁振英“点赞”黑龙江援非眼科医疗队:传...](//www.chinanews.com.cn/sh/shipin/cns/2025/02-12/news1013118.shtml "梁振英“点赞”黑龙江援非眼科医疗队:传递光明 播种友谊")
|
||||||
|
* [云南昆明:动物家族享用“元宵大餐”](//www.chinanews.com.cn/sh/shipin/cns-d/2025/02-12/news1013097.shtml "云南昆明:动物家族享用“元宵大餐”")
|
||||||
|
* [【新春纪事】浙江杭州:萧山河上龙灯胜会...](//www.chinanews.com.cn/sh/shipin/cns-d/2025/02-12/news1013115.shtml "【新春纪事】浙江杭州:萧山河上龙灯胜会点燃喜庆元宵氛围")
|
||||||
|
* [“电力巡检机械狗”身着东北花袄助力亚冬...](//www.chinanews.com.cn/sh/shipin/cns-d/2025/02-12/news1013098.shtml "“电力巡检机械狗”身着东北花袄助力亚冬会")
|
||||||
|
* [【新春纪事】福建莆田上演元宵狂欢 村民...](//www.chinanews.com.cn/sh/shipin/cns-d/2025/02-12/news1013116.shtml "【新春纪事】福建莆田上演元宵狂欢 村民抬棕轿撞火 踏火祈福")
|
||||||
|
|
||||||
|
[ ](https://www.xuexi.cn/)
|
||||||
|
|
||||||
|
[ ](https://www.12377.cn/)
|
||||||
|
|
||||||
|
[ ](//www.piyao.org.cn/yybgt/index.htm)
|
||||||
|
|
||||||
|
[专题·活动](/allspecial/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/gn/z/2024wangluowenming/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/gj/z/COP29/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/cj/z/CIIE2024/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/gn/z/sazgtxyw/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/gn/z/jsrh2024/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/gn/z/ForeignersinChina/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/gn/z/ChinaView/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/gn/z/xiyandao/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/gn/z/esjszqh/index.shtml)
|
||||||
|
|
||||||
|
[ ](//www.chinanews.com.cn/gn/z/jiaodian/index.shtml )
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/cj/z/Economic2024/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/gn/z/focac/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/gn/z/2024wangluowenming/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/gj/z/COP29/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/cj/z/CIIE2024/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/gn/z/sazgtxyw/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/gn/z/jsrh2024/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/gn/z/ForeignersinChina/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/gn/z/ChinaView/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/gn/z/xiyandao/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/gn/z/esjszqh/index.shtml)
|
||||||
|
|
||||||
|
[ ](//www.chinanews.com.cn/gn/z/jiaodian/index.shtml )
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/cj/z/Economic2024/index.shtml)
|
||||||
|
|
||||||
|
[ ](https://www.chinanews.com.cn/gn/z/focac/index.shtml)
|
||||||
|
|
||||||
|
[视频](/shipin/)
|
||||||
|
|
||||||
|
[微视界](/shipin/minidocu.shtml) [洋腔队](/shipin/yqd.shtml)
|
||||||
|
[Z世代](/shipin/zsd.shtml) [澜湄印象](/gn/z/lanmeiyinxiang/index.shtml)
|
||||||
|
[中国风](/shipin/AuthenticChina.shtml) [中国新视野](/shipin/chinarevealed.shtml)
|
||||||
|
|
||||||
|
[ 黄河壶口瀑布赏冰挂美景 ](/sh/shipin/cns-d/2025/02-13/news1013162.shtml)
|
||||||
|
|
||||||
|
[“哪吒闹海”最早的故事图像在辽宁省博物馆展出](/shipin/cns-d/2025/02-13/news1013180.shtml)
|
||||||
|
|
||||||
|
[中国空域面积最大的空管分局新工作区启用](/cj/shipin/cns/2025/02-13/news1013177.shtml)
|
||||||
|
|
||||||
|
[给电池“打针”就能增加“寿命”?](/shipin/cns-d/2025/02-13/news1013168.shtml)
|
||||||
|
|
||||||
|
[赣南小山村里的马术“骑”妙之旅](/ty/shipin/cns-d/2025/02-13/news1013163.shtml)
|
||||||
|
|
||||||
|
[中国高跷之乡的“00后”们:踩两米高跷如履平地](/sh/shipin/cns/2025/02-13/news1013143.shtml)
|
||||||
|
|
||||||
|
[外国游客点赞亚冬会上的便利支付服务](/ty/shipin/cns/2025/02-13/news1013136.shtml)
|
||||||
|
|
||||||
|
[精彩图片](/photo/)
|
||||||
|
|
||||||
|
* [](https://www.chinanews.com.cn/tp/hd2011/2025/02-13/1140459.shtml)
|
||||||
|
|
||||||
|
[](https://www.chinanews.com.cn/tp/hd2011/2025/02-13/1140459.shtml)
|
||||||
|
|
||||||
|
[中外秧歌队伍齐聚陕西榆林
|
||||||
|
欢喜共舞“闹”元宵](https://www.chinanews.com.cn/tp/hd2011/2025/02-13/1140459.shtml)
|
||||||
|
|
||||||
|
* [](https://www.chinanews.com.cn/tp/2025/02-13/10367976.shtml)
|
||||||
|
|
||||||
|
[](https://www.chinanews.com.cn/tp/2025/02-13/10367976.shtml)
|
||||||
|
|
||||||
|
[(新春见闻)重庆荣昌:“火树银花”闹元宵](https://www.chinanews.com.cn/tp/2025/02-13/10367976.shtml)
|
||||||
|
|
||||||
|
* [](https://www.chinanews.com.cn/tp/hd2011/2025/02-13/1140495.shtml)
|
||||||
|
|
||||||
|
[](https://www.chinanews.com.cn/tp/hd2011/2025/02-13/1140495.shtml)
|
||||||
|
|
||||||
|
[(哈尔滨亚冬会)中国队摘冬季两项女子4x6公里接力金牌](https://www.chinanews.com.cn/tp/hd2011/2025/02-13/1140495.shtml)
|
||||||
|
|
||||||
|
* [](https://www.chinanews.com.cn/tp/hd2011/2025/02-13/1140505.shtml)
|
||||||
|
|
||||||
|
[](https://www.chinanews.com.cn/tp/hd2011/2025/02-13/1140505.shtml)
|
||||||
|
|
||||||
|
[【中新画报】亚冬会上的冰上之“花”](https://www.chinanews.com.cn/tp/hd2011/2025/02-13/1140505.shtml)
|
||||||
|
|
||||||
|
* [](https://www.chinanews.com.cn/tp/hd2011/2025/02-13/1140455.shtml)
|
||||||
|
|
||||||
|
[](https://www.chinanews.com.cn/tp/hd2011/2025/02-13/1140455.shtml)
|
||||||
|
|
||||||
|
[(哈尔滨亚冬会)自由式滑雪女子大跳台:中国队包揽金银铜牌](https://www.chinanews.com.cn/tp/hd2011/2025/02-13/1140455.shtml)
|
||||||
|
|
||||||
|
* [](https://www.chinanews.com.cn/tp/hd2011/2025/02-13/1140436.shtml)
|
||||||
|
|
||||||
|
[](https://www.chinanews.com.cn/tp/hd2011/2025/02-13/1140436.shtml)
|
||||||
|
|
||||||
|
[(新春见闻)北京市民艺术园区内“闹元宵”](https://www.chinanews.com.cn/tp/hd2011/2025/02-13/1140436.shtml)
|
||||||
|
|
||||||
|
* [](https://www.chinanews.com.cn/tp/hd2011/2025/02-13/1140485.shtml)
|
||||||
|
|
||||||
|
[](https://www.chinanews.com.cn/tp/hd2011/2025/02-13/1140485.shtml)
|
||||||
|
|
||||||
|
[当无人机遇上烟花
|
||||||
|
阿拉善夜空开启“梦幻魔法”](https://www.chinanews.com.cn/tp/hd2011/2025/02-13/1140485.shtml)
|
||||||
|
|
||||||
|
[时政](/china/)
|
||||||
|
|
||||||
|
[](/gn/2025/02-13/10367928.shtml)
|
||||||
|
|
||||||
|
[**中国科研团队为作物抵御寄生植物威胁提出解决方案**](/gn/2025/02-13/10367928.shtml)
|
||||||
|
|
||||||
|
* [去年前十一个月 检察机关起诉职务犯罪23000余人](/gn/2025/02-13/10367917.shtml)
|
||||||
|
* [力促“睡得安心” 我国已建成宁静小区2132个 ](/gn/2025/02-13/10367912.shtml)
|
||||||
|
* [主食吃得少了,我们还需要这么多粮食吗?](/gn/2025/02-13/10367890.shtml)
|
||||||
|
* [我国科研团队新发现:锂电池“打一针”就能“重生”](/gn/2025/02-13/10367889.shtml)
|
||||||
|
|
||||||
|
[社会](/society/)
|
||||||
|
|
||||||
|
[](/sh/2025/02-13/10368210.shtml)
|
||||||
|
|
||||||
|
[**四川筠连:滑坡山体再现险情 科技助力撤离及时零伤亡**](/sh/2025/02-13/10368210.shtml)
|
||||||
|
|
||||||
|
* [大年夜深山救老人 浙江长兴温暖寒冬传递大爱](/sh/2025/02-13/10367983.shtml)
|
||||||
|
* [主人欲取回“网红狗” 被要求签百万赔偿合同?](/sh/2025/02-13/10367946.shtml)
|
||||||
|
* [赶作业用AI 学生乐家长忧](/sh/2025/02-13/10367930.shtml)
|
||||||
|
* [交警提醒:返程“满载而归”或存安全隐患](/sh/2025/02-13/10367929.shtml)
|
||||||
|
|
||||||
|
[国际](/world/)
|
||||||
|
|
||||||
|
[](/gj/2025/02-13/10368350.shtml)
|
||||||
|
|
||||||
|
[**以防长:若哈马斯不按时释放被扣押人员,以将重启战事**](/gj/2025/02-13/10368350.shtml)
|
||||||
|
|
||||||
|
* [苏丹快速支援部队袭击多座村庄 致数十名平民死亡](/gj/2025/02-13/10368246.shtml)
|
||||||
|
* [日本连日强降雪已致13人死亡173人受伤](/gj/2025/02-13/10368222.shtml)
|
||||||
|
* [尹锡悦再次申请韩国总理韩德洙作为弹劾案证人作证](/gj/2025/02-13/10368118.shtml)
|
||||||
|
* [萨尔瓦多允许被判有组织犯罪的未成年人关押在成人监狱](/gj/2025/02-13/10368167.shtml)
|
||||||
|
|
||||||
|
[财经](/finance/)
|
||||||
|
|
||||||
|
[ 火热的亚冬会,为南方城市带来了什么?](/gsztc/2025/02-13/10368122.shtml)
|
||||||
|
|
||||||
|
* [为全年经济开好局起好步——各地力争首季“开门红”](/cj/2025/02-13/10368004.shtml)
|
||||||
|
* [国家铁路春节假期完成货物发送量7525.1万吨](/cj/2025/02-13/10367953.shtml)
|
||||||
|
* [中国与世界共享“冰雪机遇”](/cj/2025/02-13/10367927.shtml)
|
||||||
|
* [即时配送 “跑”向万亿元大市场](/cj/2025/02-13/10367924.shtml)
|
||||||
|
* [冰雪经济的中国“冰法”](/cj/2025/02-13/10367922.shtml)
|
||||||
|
* [从春节消费看中国市场活力](/cj/2025/02-13/10367921.shtml)
|
||||||
|
* [27岁女孩接班成“厂二代” 成功带领服装工厂转型](/cj/2025/02-13/10367907.shtml)
|
||||||
|
|
||||||
|
[ 全国铁路今天预计发送旅客1260万人次 ](/sh/2025/02-13/10367919.shtml)
|
||||||
|
|
||||||
|
[ 税费红利全力托举企业攀“高”向“新” ](/cj/2025/02-13/10367916.shtml)
|
||||||
|
|
||||||
|
[ 人勤春来早 各地有序推进春季农业生产 ](/cj/2025/02-13/10367914.shtml)
|
||||||
|
|
||||||
|
[ 便利支付让消费更顺畅 ](/cj/2025/02-13/10367904.shtml)
|
||||||
|
|
||||||
|
[健康](/health/) · [生活](/life/)
|
||||||
|
|
||||||
|
* [ 胎龄仅24周 武汉一“巴掌宝宝”获成功救治](/jk/2025/02-13/10368114.shtml)
|
||||||
|
* [ 寒假伊始立下读书Flag,如今没完成怎么办](/life/2025/02-13/10367947.shtml)
|
||||||
|
* [ 北京今年推动实现区级托育综合服务中心全覆盖](/jk/2025/02-13/10367906.shtml)
|
||||||
|
* [ 北京优化国际医疗服务 开设预约挂号平台英文版](/jk/2025/02-13/10367909.shtml)
|
||||||
|
|
||||||
|
[创意](/chuangyi/)
|
||||||
|
|
||||||
|
* [ 开开心心闹元宵|春到人间暖,节来万家圆 ](https://www.chinanews.com.cn/sh/shipin/cns/2025/02-12/news1013000.shtml)
|
||||||
|
|
||||||
|
* [ 胖虎元宵乐翻天,福气满满一整年! ](https://www.chinanews.com.cn/shipin/cns/2025/02-12/news1013002.shtml)
|
||||||
|
|
||||||
|
* [ 世界老乡@义乌|“花式”闹元宵 ](https://www.chinanews.com.cn/chuangyi/2025/02-12/10367110.shtml)
|
||||||
|
|
||||||
|
* [ 元宵佳节至,幸福团圆时 ](https://www.chinanews.com.cn/chuangyi/2025/02-12/10367111.shtml)
|
||||||
|
|
||||||
|
* [ 公路会唱歌?一组AI海报看亚冬会中的高科技 ](https://www.chinanews.com.cn/chuangyi/2025/02-12/10367458.shtml)
|
||||||
|
|
||||||
|
[大湾区](/dwq/)
|
||||||
|
|
||||||
|
[粤港澳台青年同台献艺闹元宵](/dwq/2025/02-13/10367970.shtml)
|
||||||
|
|
||||||
|
* [香港中文大学(深圳)成立两个人工智能平台](/dwq/2025/02-13/10368363.shtml)
|
||||||
|
* [李家超:苏港携手共进 推动两地合作交流](/dwq/2025/02-13/10368207.shtml)
|
||||||
|
* [深圳港外贸集装箱吞吐量创新高](/dwq/2025/02-13/10368086.shtml)
|
||||||
|
* [粤港澳大湾区涉外律师学院在深揭牌](/dwq/2025/02-13/10368081.shtml)
|
||||||
|
* [南沙国际邮轮母港出入境旅客破5万人次](/dwq/2025/02-13/10367965.shtml)
|
||||||
|
* [广州多校调整课表、安排活动 用好“课间15分钟”](/dwq/2025/02-13/10367964.shtml)
|
||||||
|
* [广州筹建10万套保障性租赁住房](/dwq/2025/02-13/10367959.shtml)
|
||||||
|
|
||||||
|
[ 2025内地高校澳门大学生大湾区科创考察团赴横琴深圳研学交流 ](/dwq/2025/02-13/10368144.shtml)
|
||||||
|
|
||||||
|
[ 广东省金融机构累计发放碳减排贷款1236亿元 ](/dwq/2025/02-13/10368139.shtml)
|
||||||
|
|
||||||
|
[ 春运30天深圳铁路到发旅客近1700万人次 ](/dwq/2025/02-13/10368126.shtml)
|
||||||
|
|
||||||
|
[ 深港金融市场互联互通持续扩面提质 ](/dwq/2025/02-13/10368084.shtml)
|
||||||
|
|
||||||
|
[两岸](/taiwan.shtml)
|
||||||
|
|
||||||
|
* [ 评论:恢复两岸旅游,“小两会”沟通并非先决条件](/gn/2025/02-13/10368151.shtml)
|
||||||
|
* [ 从方言碰撞到文化共融 两岸夫妻成网红博主](/gn/2025/02-13/10368067.shtml)
|
||||||
|
* [ 领略古法生新奇——台中雾峰三代陶艺师的两岸足迹](/gn/2025/02-13/10368057.shtml)
|
||||||
|
* [ 台湾艺术家在江南古镇品非遗、闹元宵、共团圆](/sh/2025/02-13/10368000.shtml)
|
||||||
|
|
||||||
|
[融视听](/rst/)
|
||||||
|
|
||||||
|
[ ](//www.chinanews.com.cn/sh/shipin/cns/2025/01-23/news1011370.shtml)
|
||||||
|
|
||||||
|
[ **在北京中轴线上找“年味儿”** 跟着中新网一起穿梭北京中轴线,来一场别样的“年味儿”探索之旅。
|
||||||
|
](//www.chinanews.com.cn/sh/shipin/cns/2025/01-23/news1011370.shtml)
|
||||||
|
|
||||||
|
[ 义乌老板娘有多“卷”?她用36种语言把伞卖到全世界
|
||||||
|
](//www.chinanews.com.cn/sh/shipin/cns/2025/01-27/news1011813.shtml)
|
||||||
|
|
||||||
|
[ 春运大考,这个候车室的创新细节已上线!
|
||||||
|
](https://www.chinanews.com.cn/sh/shipin/cns/2025/01-27/news1011812.shtml)
|
||||||
|
|
||||||
|
[ 世界老乡@义乌丨这位约旦商人用美食连起两种“年味”
|
||||||
|
](https://www.chinanews.com.cn/sh/shipin/cns/2025/01-26/news1011707.shtml)
|
||||||
|
|
||||||
|
[文娱](/wy/)
|
||||||
|
|
||||||
|
[](/cul/2025/02-13/10368164.shtml)
|
||||||
|
|
||||||
|
[**赵涛、贾樟柯法国维苏尔领取终身成就“金三轮车”奖**](/cul/2025/02-13/10368164.shtml)
|
||||||
|
|
||||||
|
* [湖北省博物馆开启夜游 元宵奇妙夜精彩上演](/cul/2025/02-13/10367982.shtml)
|
||||||
|
* [海口古韵新风年味浓 换花祈福喜闹春](/cul/2025/02-13/10367961.shtml)
|
||||||
|
* [元宵节:传统与创新碰撞出怎样的新风味?](//www.chinanews.com.cn/shipin/2025/02-13/news1013123.shtml)
|
||||||
|
* [北京:四地非遗贺新春](/cul/2025/02-13/10367925.shtml)
|
||||||
|
|
||||||
|
[体育](/sports/)
|
||||||
|
|
||||||
|
[](/ty/2025/02-13/10368162.shtml)
|
||||||
|
|
||||||
|
[**亚冬会pin到底是啥?解读“别针上的文化名片”**](/ty/2025/02-13/10368162.shtml)
|
||||||
|
|
||||||
|
* [揭秘亚冬会赛事背后的气象保障](/ty/2025/02-13/10368137.shtml)
|
||||||
|
* [亚团赛:中国香港队取首胜 中国台北队力克新加坡队](/ty/2025/02-13/10367978.shtml)
|
||||||
|
* [开门红!中国国青力克卡塔尔队](/ty/2025/02-13/10367956.shtml)
|
||||||
|
* [擦干眼泪再出发 中国女冰很能拼](/ty/2025/02-13/10367936.shtml)
|
||||||
|
|
||||||
|
[华人](//www.chinaqw.com/)
|
||||||
|
|
||||||
|
[](//www.chinaqw.com/hqhr/2025/02-13/389916.shtml)
|
||||||
|
|
||||||
|
[**海外华侨华人:遥寄乡情于元宵**](//www.chinaqw.com/hqhr/2025/02-13/389916.shtml)
|
||||||
|
|
||||||
|
* [成都彭州市举办港澳台侨同胞闹元宵活动](//www.chinaqw.com/qx/2025/02-13/389923.shtml)
|
||||||
|
* [侨助广东高质量发展“新春第一会”将在广州举行](//www.chinaqw.com/qwxs/2025/02-13/389922.shtml)
|
||||||
|
* [华侨牵挂的,检察官都放在心上!](//www.chinaqw.com/qwxs/2025/02-13/389921.shtml)
|
||||||
|
* [2024年“汉语桥”参赛师生见面会圆满举行](//www.chinaqw.com/hwjy/2025/02-13/389918.shtml)
|
||||||
|
|
||||||
|
[直播](/live.shtml)
|
||||||
|
|
||||||
|
* [](/shipin/spfts/20250213/5768.shtml) [ 人生必去内蒙古 **瞰中国·我的家乡丨长调悠扬心自舒,人生必去内蒙古** ](/shipin/spfts/20250213/5768.shtml)
|
||||||
|
* [](/shipin/spfts/20250213/5766.shtml) [ 陪哪吒一起冲刺百亿元票房 **陪哪吒一起冲刺百亿元票房** ](/shipin/spfts/20250213/5766.shtml)
|
||||||
|
* [](/shipin/spfts/20250211/5765.shtml) [ 正月十五闹元宵 **吾乡 | 元宵节,体验灯月交辉里的中式浪漫** ](/shipin/spfts/20250211/5765.shtml)
|
||||||
|
* [](/shipin/spfts/20250209/5762.shtml) [ 深切缅怀黄旭华院士 **社会各界深切缅怀黄旭华院士** ](/shipin/spfts/20250209/5762.shtml)
|
||||||
|
* [](/shipin/spfts/20250208/5761.shtml) [ 春运返程路 **瞰中国 | 春运返程路 温情满归途** ](/shipin/spfts/20250208/5761.shtml)
|
||||||
|
* [](/shipin/spfts/20250206/5759.shtml) [ 云看亚冬之城 燃动冰雪激情 **瞰中国 | 云看亚冬之城 燃动冰雪激情** ](/shipin/spfts/20250206/5759.shtml)
|
||||||
|
|
||||||
|
[English](//www.ecns.cn/)
|
||||||
|
|
||||||
|
[Photo](//www.ecns.cn/photo/)
|
||||||
|
|
||||||
|
[](//www.ecns.cn/hd/2025-02-13/detail-ihentfhp8759320.shtml)
|
||||||
|
|
||||||
|
[Robot dogs perform lion dance for lantern festival
|
||||||
|
celebration](//www.ecns.cn/hd/2025-02-13/detail-ihentfhp8759320.shtml)
|
||||||
|
|
||||||
|
* [Monks perform Cham dance to pray for good harvest, peaceful life in Qinghai](//www.ecns.cn/hd/2025-02-13/detail-ihentfhp8759341.shtml)
|
||||||
|
* [Giant panda cub enjoys special treat for Lantern Festival in Guangzhou](//www.ecns.cn/hd/2025-02-13/detail-ihentfhp8759388.shtml)
|
||||||
|
|
||||||
|
[ECNS Wire](//www.ecns.cn/cns-wire/)
|
||||||
|
|
||||||
|
* [EconoScope | Will Tesla's megafactory boost China's energy storage industry?](//www.ecns.cn/cns-wire/2025-02-13/detail-ihentfhp8760192.shtml)
|
||||||
|
* [Palm-sized premature baby born at 24 weeks thrives after 90 days of intensive care](//www.ecns.cn/cns-wire/2025-02-13/detail-ihentfhp8760000.shtml)
|
||||||
|
* ['Ne Zha 2' cracks global top 18 amid overseas release](//www.ecns.cn/news/cns-wire/2025-02-13/detail-ihentfhp8759952.shtml)
|
||||||
|
* [World's first panoramic tourist train begins operation in Yunnan](//www.ecns.cn/cns-wire/2025-02-13/detail-ihentfhp8760009.shtml)
|
||||||
|
* [China opposes hegemonic, domineering and bullying practices in international relations: spokesperson](//www.ecns.cn/news/cns-wire/2025-02-13/detail-ihentfhp8759719.shtml)
|
||||||
|
|
||||||
|
[Latest](//www.ecns.cn/scroll/)
|
||||||
|
|
||||||
|
* [DPP condemned for impeding exchanges](//www.ecns.cn/news/politics/2025-02-13/detail-ihentfhp8759007.shtml)
|
||||||
|
* [U.S. blasted for actions aiding Taiwan](//www.ecns.cn/news/politics/2025-02-13/detail-ihentfhp8759008.shtml)
|
||||||
|
* [Nation's rail network continued to break records in 2024](//www.ecns.cn/news/society/2025-02-13/detail-ihentfhp8758836.shtml)
|
||||||
|
* [Names of moon rover, spacesuits unveiled](//www.ecns.cn/news/sci-tech/2025-02-13/detail-ihentfhp8758768.shtml)
|
||||||
|
* [China calls on Manila to halt missile deployment in South China Sea](//www.ecns.cn/news/politics/2025-02-13/detail-ihentfhp8758729.shtml)
|
||||||
|
|
||||||
|
[Video](//www.ecns.cn/video/)
|
||||||
|
|
||||||
|
* [Int’l audiences celebrate Our Spring Festival together at UNESCO Headquarters](//www.ecns.cn/video/2025-02-08/detail-ihennytc9754944.shtml)
|
||||||
|
* [(W.E. Talk) British scholar: World will be hugely impacted if Trump doubles down on aggressive policies](//www.ecns.cn/video/2025-02-07/detail-ihennytc9753016.shtml)
|
||||||
|
* [Insights丨China’s innovation and consumption upgrade inject economic momentum amid harsh international environment](//www.ecns.cn/video/2025-02-06/detail-ihennytc9751249.shtml)
|
||||||
|
* [Over 2.3 billion trips made during 8-day Spring Festival holiday](//www.ecns.cn/video/2025-02-05/detail-ihennytc9750899.shtml)
|
||||||
|
|
||||||
|
融媒体矩阵
|
||||||
|
|
||||||
|
中国新闻社
|
||||||
|
|
||||||
|
微信官方公众号
|
||||||
|
|
||||||
|
[ 热“雪”沸腾!尔滨的流量被这个经济大省狠狠抓住了!
|
||||||
|
](https://mp.weixin.qq.com/s/Jlnnc36-cA_t_4B3XFmZdg?token=691243154&lang=zh_CN)
|
||||||
|
|
||||||
|
[ 特朗普与普京、泽连斯基通电话
|
||||||
|
](https://mp.weixin.qq.com/s/jNfFEap5kSf0HPTbjWtyog?token=691243154&lang=zh_CN)
|
||||||
|
|
||||||
|
中国新闻社
|
||||||
|
|
||||||
|
微博官方账号
|
||||||
|
|
||||||
|
[](https://weibo.com/3604378011/Pe3sW14dD)
|
||||||
|
|
||||||
|
[ 【#特朗普与泽连斯基通话讨论和平可能性#】 ](https://weibo.com/3604378011/Pe3sW14dD)
|
||||||
|
|
||||||
|
[](https://weibo.com/3604378011/Pe3PEjQfJ)
|
||||||
|
|
||||||
|
[ 【#日本宫城县近海发生5级地震#】 ](https://weibo.com/3604378011/Pe3PEjQfJ)
|
||||||
|
|
||||||
|
中国新闻网
|
||||||
|
|
||||||
|
微信官方公众号
|
||||||
|
|
||||||
|
[ 与普京、泽连斯基通电话后,特朗普最新发声 ](https://mp.weixin.qq.com/s/N3IDEHkpZlafwIIaiBkozw)
|
||||||
|
|
||||||
|
[ 索赔30万,“哪吒”出品方起诉多家公司 ](https://mp.weixin.qq.com/s/fozP2NR5_F5AtgvbDoD6HA)
|
||||||
|
|
||||||
|
中国新闻网
|
||||||
|
|
||||||
|
微博官方账号
|
||||||
|
|
||||||
|
[](https://weibo.com/1784473157/Pe3qD3hCN)
|
||||||
|
|
||||||
|
[ 【好样的!#亚冬会中国队2位旗手拿5金#】 ](https://weibo.com/1784473157/Pe3qD3hCN)
|
||||||
|
|
||||||
|
[](https://weibo.com/1784473157/Pe3dTccxb)
|
||||||
|
|
||||||
|
[ 【#国安部披露一起冒充国安干警案件#】 ](https://weibo.com/1784473157/Pe3dTccxb)
|
||||||
|
|
||||||
|
[](//www.chinanews.com.cn/gn/z/ChinaView/index.shtml)
|
||||||
|
|
||||||
|
[](//www.chinanews.com.cn/cnspp/pp-jdmdm/index.shtml)
|
||||||
|
|
||||||
|
[](//www.chinanews.com.cn/cnspp/pp-xyd/index.shtml)
|
||||||
|
|
||||||
|
[](//www.chinanews.com.cn/cnspp/pp-talk/index.shtml)
|
||||||
|
|
||||||
|
[](/dxw/)
|
||||||
|
|
||||||
|
[](/ai-chinanews/)
|
||||||
|
|
||||||
|
[](//www.chinanews.com.cn/cnspp/pp-sjg/index.shtml)
|
||||||
|
|
||||||
|
[](//www.chinanews.com.cn/cnspp/pp-dh/index.shtml)
|
||||||
|
|
||||||
|
[](//www.chinanews.com.cn/cnspp/pp-visit/index.shtml)
|
||||||
|
|
||||||
|
[](//www.chinanews.com.cn/cnspp/pp-zwdh/index.shtml)
|
||||||
|
|
||||||
|
[](//www.chinanews.com.cn/cnspp/pp-zgxgc/index.shtml)
|
||||||
|
|
||||||
|
[](//www.chinanews.com.cn/cnspp/pp-yxgc/index.shtml)
|
||||||
|
|
||||||
|
友情链接
|
||||||
|
|
||||||
|
[中央纪委国家监委](http://www.ccdi.gov.cn/)[中央统战部](http://www.zytzb.gov.cn/)[外交部](http://www.fmprc.gov.cn/)[国侨办](http://www.gqb.gov.cn/)[国台办](http://www.gwytb.gov.cn/)[人民网](http://www.people.com.cn/)[新华网](http://www.xinhuanet.com/)[中国网](http://www.china.com.cn/)[国际在线](http://www.cri.cn/)[中国日报网](http://cn.chinadaily.com.cn/)[央视网](http://www.cctv.com/)[中国青年网](http://www.youth.cn/)[中国经济网](http://www.ce.cn/)[中国台湾网](http://www.taiwan.cn/)
|
||||||
|
[中国西藏网](http://www.tibet.cn/)
|
||||||
|
[央广网](http://www.cnr.cn/)[光明网](http://www.gmw.cn/)[中国军网](http://www.chinamil.com.cn/)[人民政协网](http://www.rmzxb.com.cn/index.shtml)[法治网](http://www.legaldaily.com.cn/)[环球人物网](//www.globalpeople.com.cn/)[中青在线](http://www.cyol.com/)[中国侨网](http://www.chinaqw.com/)[中新经纬](//www.jwview.com/)[中国新闻周刊](//www.inewsweek.cn/)[国是直通车](/gsztc/)[中国新闻电子报](http://epaper.chinanews.com/)[中国新闻图片网](http://www.cnsphoto.com/)[中国华文教育网](http://www.hwjyw.com/)
|
||||||
|
[中国法院网](http://www.chinacourt.org/)[新京报](http://www.thebeijingnews.com/)[千龙网](http://www.qianlong.com/)[北方网](http://www.enorth.com.cn/)[长城网](http://www.hebei.com.cn/)
|
||||||
|
[东北新闻网](http://www.nen.com.cn/)[东北网](https://www.dbw.cn/)[胶东在线](http://www.jiaodong.net/)[红网](http://www.rednet.cn/)[南方网](http://www.southcn.com/)[华龙网](http://www.cqnews.net/)[四川新闻网](http://www.newssc.org/)[四川广播电视台](http://www.sctv.com/)[新浪](https://news.sina.com.cn/)[搜狐](http://news.sohu.com/)[网易](https://news.163.com/)[腾讯](https://news.qq.com/)
|
||||||
|
|
||||||
|
* [关于我们](/common/footer/intro.shtml)
|
||||||
|
* [About us](/common/footer/aboutus.shtml)
|
||||||
|
* [联系我们](/common/footer/contact.shtml)
|
||||||
|
* [广告服务](//ad.chinanews.com.cn/)
|
||||||
|
* [供稿服务](/common/footer/news-service.shtml)
|
||||||
|
* [法律声明](/common/footer/law.shtml)
|
||||||
|
* [招聘信息](/hr/)
|
||||||
|
* [网站地图](/common/footer/sitemap.shtml)
|
||||||
|
|
||||||
|
本网站所刊载信息,不代表中新社和中新网观点。 刊用本网站稿件,务经书面授权。
|
||||||
|
|
||||||
|
未经授权禁止转载、摘编、复制及建立镜像,违者将依法追究法律责任。
|
||||||
|
|
||||||
|
[[网上传播视听节目许可证(0106168)](/news/xuke.html)]
|
||||||
|
[[京ICP证040655号](//beian.miit.gov.cn)] [ [京公网安备
|
||||||
|
11010202009201号](http://www.beian.gov.cn/portal/registerSystemInfo?recordcode=11010202009201)]
|
||||||
|
[[京ICP备2021034286号-7](//beian.miit.gov.cn)]
|
||||||
|
[[互联网宗教信息服务许可证:京(2022)0000118;京(2022)0000119](/news/zjxuke.html)]
|
||||||
|
[[互联网新闻信息服务许可证10120180010](//image.cns.com.cn/default/113c4582/20240924/certificate01.jpg)]
|
||||||
|
违法和不良信息举报电话:15699788000 举报邮箱:jubao@chinanews.com.cn
|
||||||
|
[举报受理和处置管理办法](/kong/2018/01-17/8426100.shtml) 总机:86-10-87826688
|
||||||
|
|
||||||
|
Copyright ©1999-2025 chinanews.com. All Rights Reserved
|
||||||
|
|
||||||
|
|
||||||
|
|
50
test/ts.py
Normal file
50
test/ts.py
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
import json
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import requests
|
||||||
|
import os
|
||||||
|
from openai import OpenAI
|
||||||
|
|
||||||
|
api_key = "ollama" # 示例:遮蔽后的API密钥
|
||||||
|
|
||||||
|
api_url = "http://192.168.1.95:11434/v1"
|
||||||
|
|
||||||
|
client = OpenAI(api_key=api_key, base_url=api_url)
|
||||||
|
|
||||||
|
def get_common_advice(image_path, prompt):
|
||||||
|
# 内部函数:编码图像为Base64
|
||||||
|
def encode_image(image_path):
|
||||||
|
with open(image_path, "rb") as image_file:
|
||||||
|
return base64.b64encode(image_file.read()).decode('utf-8')
|
||||||
|
|
||||||
|
# 获取Base64字符串
|
||||||
|
base64_image = encode_image(image_path)
|
||||||
|
response = client.chat.completions.create(
|
||||||
|
model="minicpm-v:latest",
|
||||||
|
messages=[
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": [
|
||||||
|
{
|
||||||
|
"type": "text",
|
||||||
|
"text": prompt
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "image_url",
|
||||||
|
"image_url": {
|
||||||
|
"url": f"data:image/png;base64,{base64_image}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
return response.choices[0].message.content
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
image_path = "img.png"
|
||||||
|
prompt = "图片说了什么信息"
|
||||||
|
advice = get_common_advice(image_path, prompt)
|
||||||
|
print(advice)
|
Reference in New Issue
Block a user