diff --git a/apps/ichat/serializers.py b/apps/ichat/serializers.py new file mode 100644 index 00000000..1e7a6b33 --- /dev/null +++ b/apps/ichat/serializers.py @@ -0,0 +1,4 @@ +from rest_framework import serializers + +class CustomLLMrequestSerializer(serializers.Serializer): + prompt = serializers.CharField() \ No newline at end of file diff --git a/apps/ichat/urls.py b/apps/ichat/urls.py new file mode 100644 index 00000000..581a7a6f --- /dev/null +++ b/apps/ichat/urls.py @@ -0,0 +1,12 @@ +from apps.ichat.views import QueryLLMview +from django.urls import path, include +from rest_framework.routers import DefaultRouter + +API_BASE_URL = 'api/hrm/' +HTML_BASE_URL = 'dhtml/hrm/' + +router = DefaultRouter() +router.register('llm/query/', QueryLLMview, basename='llm_query') +urlpatterns = [ + path(API_BASE_URL, include(router.urls)), +] \ No newline at end of file diff --git a/apps/ichat/views.py b/apps/ichat/views.py index 2a7a2a0f..25cb5d80 100644 --- a/apps/ichat/views.py +++ b/apps/ichat/views.py @@ -4,11 +4,20 @@ from langchain_core.language_models import LLM from langchain_core.outputs import LLMResult, Generation from langchain_experimental.sql import SQLDatabaseChain from langchain_community.utilities import SQLDatabase +from server.conf import DATABASES +from serializers import CustomLLMrequestSerializer +from rest_framework.views import APIView +from urllib.parse import quote_plus # fastapi from fastapi import FastAPI from pydantic import BaseModel -db = SQLDatabase.from_uri("postgresql+pg8000://postgres:zcDsj%402024@127.0.0.1:5432/factory", include_tables=["enm_mpoint", "enm_mpointstat"]) + +db_conf = DATABASES['default'] +# 密码需要 URL 编码(因为有特殊字符如 @) +password_encodeed = quote_plus(db_conf['password']) + +db = SQLDatabase.from_uri(f"postgresql+psycopg2://{db_conf['user']}:{password_encodeed}@{db_conf['host']}/{db_conf['name']}", include_tables=["enm_mpoint", "enm_mpointstat"]) # model_url = "http://14.22.88.72:11025/v1/chat/completions" model_url = "http://139.159.180.64:11434/v1/chat/completions" @@ -59,17 +68,12 @@ class CustomLLM(LLM): return "custom_llm" -# 实例化 -app = FastAPI() - -class CustomLLMRequest(BaseModel): - prompt: str - -@app.post("/llm/query/") -def query(custom_llm_request: CustomLLMRequest): - prompt = custom_llm_request.prompt - llm = CustomLLM(model_url=model_url) - db_chain = SQLDatabaseChain.from_llm(llm, db, verbose=True) - result = db_chain.invoke(prompt) - print('result--', result, prompt) - return {"result": result} +class QueryLLMview(APIView): + def post(self, request): + serializer = CustomLLMrequestSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + prompt = serializer.validated_data['prompt'] + llm = CustomLLM(model_url=model_url) + chain = SQLDatabaseChain(llm=llm, database=db, verbose=True) + result = chain.invoke(prompt) + return result \ No newline at end of file