123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268 |
- package aiSearch
- import (
- "aiChat/api/aiSearch/v1"
- "aiChat/internal/model"
- "aiChat/internal/model/bidSearch"
- "aiChat/utility"
- "context"
- "fmt"
- "io/ioutil"
- "strings"
- "time"
- . "app.yhyue.com/moapp/jybase/encrypt"
- "github.com/gogf/gf/v2/database/gdb"
- "github.com/gogf/gf/v2/encoding/gjson"
- "github.com/gogf/gf/v2/frame/g"
- "github.com/gogf/gf/v2/os/gtime"
- "github.com/gogf/gf/v2/util/gconv"
- )
- const (
- DateTimeMill = "Y-m-d h:i:s.u"
- largeModelDouBao = "豆包"
- largeModelZhiPu = "智普"
- )
- func (c *ControllerV1) Chat(ctx context.Context, req *v1.ChatReq) (res *v1.ChatRes, err error) {
- res = &v1.ChatRes{Status: 0}
- sid := gconv.Int64(SE.Decode4HexByCheck(req.SId))
- if sid == 0 {
- g.Log().Error(ctx, "无效的sid参数", req.SId)
- return
- }
- sess, sessErr := model.GetSession(g.RequestFromCtx(ctx))
- if sessErr != nil {
- g.Log().Error(ctx, "获取session出错", sessErr)
- return
- }
- startTime := gtime.Now().Format(DateTimeMill)
- answerStatus := 0
- largeModel := largeModelDouBao
- callLogs := g.List{}
- content, largeModelReply, err, isLimit := c.doubao(ctx, largeModelDouBao, req.Question)
- if !isLimit {
- large_model_success := 1
- error_msg := ""
- if err != nil {
- error_msg = err.Error()
- large_model_success = 0
- }
- callLogs = append(callLogs, g.Map{
- "position_id": sess.PositionId,
- "large_model": largeModel,
- "large_model_reply": largeModelReply,
- "large_model_starttime": startTime,
- "large_model_endtime": gtime.Now().Format(DateTimeMill),
- "large_model_success": large_model_success,
- "error_msg": error_msg,
- })
- }
- if isLimit || err != nil {
- content, largeModelReply, err, _ = c.zhipu(ctx, largeModelZhiPu, req.Question)
- largeModel = largeModelZhiPu
- large_model_success := 1
- error_msg := ""
- if err != nil {
- error_msg = err.Error()
- large_model_success = 0
- }
- callLogs = append(callLogs, g.Map{
- "position_id": sess.PositionId,
- "large_model": largeModel,
- "large_model_reply": largeModelReply,
- "large_model_starttime": startTime,
- "large_model_endtime": gtime.Now().Format(DateTimeMill),
- "large_model_success": large_model_success,
- "error_msg": error_msg,
- })
- }
- large_model_endtime := gtime.Now().Format(DateTimeMill)
- if err == nil {
- answerStatus = 1
- } else {
- largeModel = ""
- }
- bs, bsErr := bidSearch.NewBidSearch(ctx, sess.PersonId, content)
- if bsErr != nil {
- return
- }
- query, list := bs.Search()
- answer := ""
- var bestBids []*v1.ResBidding
- if len(list) > 0 {
- if bestBidListMaxLen := g.Cfg("ai_search.yaml").MustGet(ctx, "bestBidListMaxLen").Int(); len(list) > bestBidListMaxLen {
- bestBids = make([]*v1.ResBidding, bestBidListMaxLen)
- copy(bestBids, list[:bestBidListMaxLen])
- } else {
- bestBids = make([]*v1.ResBidding, len(list))
- copy(bestBids, list[:])
- }
- answer = gconv.String(bestBids)
- collection := utility.GetMyBidCollect(ctx, sess.PositionId)
- for _, v := range bestBids {
- if collection[v.InfoId] {
- v.Collect = 1
- }
- v.InfoId = EncodeArticleId2ByCheck(v.InfoId)
- }
- }
- var chatId int64
- if err := g.DB().Transaction(ctx, func(ctx context.Context, tx gdb.TX) error {
- var chatErr error
- chatId, chatErr = tx.InsertAndGetId("ai_search_chat", g.Map{
- "position_id": sess.PositionId,
- "item": req.Item,
- "question": req.Question,
- "answer": answer,
- "starttime": startTime,
- "large_model_endtime": large_model_endtime,
- "endtime": gtime.Now().Format(DateTimeMill),
- "es_query": query,
- "list_count": len(list),
- "session_id": sid,
- "status": 1,
- "large_model": largeModel,
- "answer_status": answerStatus,
- "create_time": gtime.Datetime(),
- })
- if chatErr != nil {
- g.Log().Error(ctx, "ai_search_chat保存出错", chatErr)
- return chatErr
- }
- //
- bids := g.List{}
- for _, v := range list {
- bids = append(bids, g.Map{
- "position_id": sess.PositionId,
- "chat_id": chatId,
- "infoid": v.InfoId,
- "title": v.Title,
- "area": v.Area,
- "city": v.City,
- "district": v.District,
- "subtype": v.Subtype,
- "industry": v.Industry,
- "annex": v.Annex,
- "buyerclass": v.Buyerclass,
- "budget": v.Budget,
- "bidamount": v.Bidamount,
- "publishtime": v.Publishtime,
- "create_time": gtime.Datetime(),
- })
- }
- if len(bids) > 0 {
- if _, bidsErr := tx.Insert("ai_search_bidding", bids, 200); bidsErr != nil {
- g.Log().Error(ctx, "ai_search_bidding保存出错", bidsErr)
- return bidsErr
- }
- }
- //
- for _, v := range callLogs {
- v["chat_id"] = chatId
- v["create_time"] = gtime.Datetime()
- }
- if len(callLogs) > 0 {
- if _, callLogsErr := tx.Insert("ai_search_log", callLogs); callLogsErr != nil {
- g.Log().Error(ctx, "ai_search_log保存出错", callLogsErr)
- return callLogsErr
- }
- }
- return nil
- }); err == nil {
- res.Id = SE.Encode2HexByCheck(gconv.String(chatId))
- res.Status = 1
- res.List = bestBids
- res.LargeModelName = largeModel
- res.LargeModelReply = gconv.Map(largeModelReply)
- } else {
- g.Log().Error(ctx, sess.PositionId, "保存数据库出错", err)
- }
- return
- }
- //调用豆包大模型
- //{"choices":[{"finish_reason":"stop","index":0,"logprobs":null,"message":{"content":"```json\n{\n \"关键词\": {\n \"选择\": [\"华为\"],\n \"排除\": []\n },\n \"发布时间范围\": \"20250128-20250227\",\n \"信息类型\": [\"招标公告\"],\n \"地区\": {\n \"选择\": [],\n \"排除\": []\n },\n \"金额\": \"不限\",\n \"搜索范围\": [\"标题\", \"正文\"],\n \"附件\": \"不限\",\n \"匹配模式\": \"精准匹配\",\n \"中标单位\": \"不限\",\n \"采购单位\": \"华为\"\n}\n```","role":"assistant"}}],"created":1740638708,"id":"02174063870434056d8eac7f9446a6e5013632558f9a2392ad2ee","model":"deepseek-v3-241226","object":"chat.completion","usage":{"completion_tokens":115,"prompt_tokens":1036,"total_tokens":1151,"prompt_tokens_details":{"cached_tokens":0},"completion_tokens_details":{"reasoning_tokens":0}}}
- func (c *ControllerV1) doubao(ctx context.Context, largeModel, question string) (string, string, error, bool) {
- content := fmt.Sprintf(g.Cfg("ai_search.yaml").MustGet(ctx, "doubaoPrompt").String(), gtime.Now().Format("Ymd"), question)
- // 构造请求数据
- messages := []map[string]interface{}{}
- messages = append(messages, map[string]interface{}{
- "role": "user",
- "content": content,
- })
- //glm-4-air glm-4-0520 glm-4-flash
- requestData := map[string]interface{}{
- "model": "ep-20250207170552-g8dsx",
- "temperature": 0.1,
- "top_p": 0.7,
- "messages": messages,
- }
- return c.post(ctx, largeModel, "aiSearch_doubaoCall_%s", "doubaoCallMax", "https://ark.cn-beijing.volces.com/api/v3/chat/completions", "3dd861bf-b8a7-41d4-bb0b-5076362c572d", requestData)
- }
- //调用智普大模型
- //{"choices":[{"finish_reason":"stop","index":0,"message":{"content":"```json\n{\n \"关键词\": {\n \"选择\": [\"华为\"]\n },\n \"发布时间范围\": \"20250201-20250227\",\n \"信息类型\": [\"招标公告\"],\n \"地区\": {\n \"选择\": []\n },\n \"金额\": \"不限\",\n \"搜索范围\": [\"标题\", \"正文\"],\n \"附件\": \"不限\",\n \"匹配模式\": \"精准匹配\",\n \"中标单位\": \"不限\",\n \"采购单位\": \"不限\"\n}\n```","role":"assistant"}}],"created":1740639272,"id":"20250227145429c1507e742fc643f1","model":"glm-4-flash","request_id":"20250227145429c1507e742fc643f1","usage":{"completion_tokens":110,"prompt_tokens":1033,"total_tokens":1143}}
- func (c *ControllerV1) zhipu(ctx context.Context, largeModel, question string) (string, string, error, bool) {
- content := fmt.Sprintf(g.Cfg("ai_search.yaml").MustGet(ctx, "zhipuPrompt").String(), gtime.Now().Format("Ymd"), question)
- // 构造请求数据
- messages := []map[string]interface{}{}
- messages = append(messages, map[string]interface{}{
- "role": "user",
- "content": content,
- })
- //glm-4-air glm-4-0520 glm-4-flash
- requestData := map[string]interface{}{
- "model": "glm-4-flash",
- "messages": messages,
- "temperature": 0.1,
- "max_tokens": 4096,
- }
- return c.post(ctx, largeModel, "aiSearch_zhipuCall_%s", "zhipuCallMax", "https://open.bigmodel.cn/api/paas/v4/chat/completions", "3d84d30b7ab4c94dbf71853cb7e44719.hLLS4CA2MqVQs6kR", requestData)
- }
- func (c *ControllerV1) post(ctx context.Context, largeModel, redisKey, callMaxConf, apiURL, pass string, requestData map[string]interface{}) (string, string, error, bool) {
- count, err := g.Redis("main").Incr(ctx, fmt.Sprintf(redisKey, gtime.Now().Format("Ymd")))
- if err != nil {
- g.Log().Error(ctx, largeModel, "从redis获取调用次数出错", err)
- return "", "", err, true
- } else if callMax := g.Cfg("ai_search.yaml").MustGet(ctx, callMaxConf).Int64(); count > callMax {
- g.Log().Info(ctx, largeModel, "调用次数达到上限", callMax, count)
- return "", "", nil, true
- }
- resp, err := g.Client().Timeout(time.Duration(g.Cfg("ai_search.yaml").MustGet(ctx, "timeout").Int())*time.Second).
- SetHeader("Authorization", fmt.Sprintf("Bearer %s", pass)).
- ContentType("application/json").
- Post(ctx, apiURL, requestData)
- if err != nil {
- g.Log().Error(ctx, largeModel, "请求出错", err)
- return "", "", err, false
- }
- defer resp.Body.Close()
- b, be := ioutil.ReadAll(resp.Body)
- if be != nil {
- g.Log().Error(ctx, largeModel, "gjson.LoadJson出错", be)
- return "", "", err, false
- }
- largeModelReply := string(b)
- g.Log().Info(ctx, largeModel, "请求回复", largeModelReply)
- r, re := gjson.LoadJson(b)
- if re != nil {
- g.Log().Error(ctx, largeModel, largeModelReply, "gjson.LoadJson出错", re)
- return "", largeModelReply, err, false
- }
- content := ""
- choices := r.GetJsons("choices")
- if len(choices) == 0 {
- return "", largeModelReply, err, false
- }
- message := choices[0].GetJson("message")
- if message == nil {
- return "", largeModelReply, err, false
- }
- content = message.Get("content").String()
- content = strings.ReplaceAll(content, "```json", "")
- content = strings.ReplaceAll(content, "```", "")
- return content, largeModelReply, nil, false
- }
|