feat:修改大模型文件

This commit is contained in:
zty 2025-04-23 16:54:49 +08:00
parent 8183073026
commit 7e6ca8cf70
1 changed files with 23 additions and 17 deletions

View File

@ -1,15 +1,19 @@
from django.shortcuts import render
# Create your views here.
import requests
from pydantic import Field
from langchain_core.language_models import LLM
from langchain_core.outputs import LLMResult, Generation
import requests
from langchain_experimental.sql import SQLDatabaseChain
from langchain_community.utilities import SQLDatabase
# fastapi
from fastapi import FastAPI
from pydantic import BaseModel
db = SQLDatabase.from_uri("postgresql+pg8000://postgres:zcDsj%402024@127.0.0.1:5432/factory", include_tables=["enm_mpoint", "enm_mpointstat"])
# model_url = "http://14.22.88.72:11025/v1/chat/completions"
model_url = "http://139.159.180.64:11434/v1/chat/completions"
class CustomLLM(LLM):
model_url: str
def _call(self, prompt: str, stop: list = None) -> str:
data = {
"model": "glm4",
@ -30,7 +34,6 @@ class CustomLLM(LLM):
response.raise_for_status()
content = response.json()["choices"][0]["message"]["content"]
clean_sql = self.strip_sql_markdown(content)
print('clean content----------', clean_sql)
return clean_sql
def _generate(self, prompts: list, stop: list = None) -> LLMResult:
@ -54,16 +57,19 @@ class CustomLLM(LLM):
@property
def _llm_type(self) -> str:
return "custom_llm"
# 实例化
# model_url = "http://14.22.88.72:11025/v1/chat/completions"
model_url = "http://139.159.180.64:11434/v1/chat/completions"
llm = CustomLLM(model_url=model_url)
app = FastAPI()
# 测试数据库查询
from langchain_experimental.sql import SQLDatabaseChain
from langchain_community.utilities import SQLDatabase
class CustomLLMRequest(BaseModel):
prompt: str
db = SQLDatabase.from_uri("postgresql+pg8000://postgres:zcDsj%402024@127.0.0.1:5432/factory")
db_chain = SQLDatabaseChain.from_llm(llm, db, verbose=True)
result = db_chain.invoke("帮我生成一份数据库的表结构")
print('res-------------------------------',result)
@app.post("/llm/query/")
def query(custom_llm_request: CustomLLMRequest):
prompt = custom_llm_request.prompt
llm = CustomLLM(model_url=model_url)
db_chain = SQLDatabaseChain.from_llm(llm, db, verbose=True)
result = db_chain.invoke(prompt)
print('result--', result, prompt)
return {"result": result}