Преглед на файлове

Merge branch 'hotfix/v4.9.19.1_ws' of qmx/jy into hotfix/v4.9.19.1

wangshan преди 1 година
родител
ревизия
6b977c2893
променени са 2 файла, в които са добавени 248 реда и са изтрити 78 реда
  1. 56 49
      src/jfw/front/pcIndex.go
  2. 192 29
      src/jfw/front/tags.go

+ 56 - 49
src/jfw/front/pcIndex.go

@@ -22,6 +22,7 @@ import (
 	"regexp"
 	"strconv"
 	"strings"
+	"sync"
 	"time"
 )
 
@@ -311,6 +312,8 @@ func GetNewArticle(typ int, pageSize int) (list []map[string]interface{}) {
 		typ = 1
 	}
 	rediskey := fmt.Sprintf("pcindex_newArticle_%d", typ)
+	redisKeySL := fmt.Sprintf("pcindex_newArticle_sl_%d", typ)
+	rl := &sync.Mutex{}
 	subtype := ""
 	switch typ {
 	case 1:
@@ -328,32 +331,45 @@ func GetNewArticle(typ int, pageSize int) (list []map[string]interface{}) {
 	}
 	if l, ok := redis.Get("other", rediskey).([]interface{}); ok && l != nil && len(l) > 0 {
 		list = util.ObjArrToMapArr(l)
-	} else {
-		var (
-			now         = time.Now()
-			startTime   = fmt.Sprint(time.Date(now.Year(), now.Month(), now.Day()-7, now.Hour(), now.Minute(), now.Second(), 0, time.Local).Unix()) //最近7天
-			endTime     = fmt.Sprint(now.Unix())
-			publishTime = fmt.Sprintf("%s_%s", startTime, endTime)
-		)
-		_, _, lists := bidsearch.GetPcBidSearchData("", "", "", publishTime, subtype, "", "", "", "", "", "", "", "", 1, false, nil, bidSearch_field_1, "", false, false, "", pageSize, "")
-		if lists != nil && len(*lists) > 5 {
-			*lists = (*lists)[0:6]
-			for _, v := range *lists {
-				// v["_id"] = EncodeArticleId2ByCheck(v["_id"].(string))
-				v["_id"] = encrypt.CommonEncodeArticle("indexcontent", v["_id"].(string))
-				delete(v, "toptype")
-				delete(v, "s_subscopeclass")
-				tmpdate := v["publishtime"]
-				v["publishtime"] = util.Int64All(tmpdate.(float64))
-				if v["budget"] != nil {
-					v["budget"] = ConversionMoeny(v["budget"])
-				} else if v["bidamount"] != nil {
-					v["budget"] = ConversionMoeny(v["bidamount"])
+	}
+	if list == nil || len(list) == 0 {
+		rl.Lock()
+		if l, ok := redis.Get("other", redisKeySL).([]interface{}); ok && l != nil && len(l) > 0 {
+			list = util.ObjArrToMapArr(l)
+		}
+		rl.Unlock()
+		if list == nil || len(list) == 0 {
+			var (
+				now         = time.Now()
+				startTime   = fmt.Sprint(time.Date(now.Year(), now.Month(), now.Day()-7, now.Hour(), now.Minute(), now.Second(), 0, time.Local).Unix()) //最近7天
+				endTime     = fmt.Sprint(now.Unix())
+				publishTime = fmt.Sprintf("%s_%s", startTime, endTime)
+			)
+			_, _, lists := bidsearch.GetPcBidSearchData("", "", "", publishTime, subtype, "", "", "", "", "", "", "", "", 1, false, nil, bidSearch_field_1, "", false, false, "", pageSize, "")
+			if lists != nil && len(*lists) > 5 {
+				*lists = (*lists)[0:6]
+				for _, v := range *lists {
+					// v["_id"] = EncodeArticleId2ByCheck(v["_id"].(string))
+					v["_id"] = encrypt.CommonEncodeArticle("indexcontent", v["_id"].(string))
+					delete(v, "toptype")
+					delete(v, "s_subscopeclass")
+					tmpdate := v["publishtime"]
+					v["publishtime"] = util.Int64All(tmpdate.(float64))
+					if v["budget"] != nil {
+						v["budget"] = ConversionMoeny(v["budget"])
+					} else if v["bidamount"] != nil {
+						v["budget"] = ConversionMoeny(v["bidamount"])
+					}
 				}
-			}
 
-			list = *lists
+				list = *lists
+			}
+		}
+		if list == nil || len(list) == 0 {
+			rl.Lock()
 			redis.Put("other", rediskey, list, 2*60*60)
+			redis.Put("other", redisKeySL, list, -1)
+			rl.Unlock()
 		}
 	}
 	return list
@@ -588,28 +604,18 @@ func (f *PcIndex) SearchResult(at, name string) error {
 		return f.Redirect("/swordfish/searchinfolist.html")
 	}
 	if area != "" || stype != "" || industry != "" || city != "" || keywords != "" {
+		var (
+			sl bool
+			rl = &sync.Mutex{}
+		)
 		//
 		list := redis.Get("other", "classify_"+name)
-		query1 := `{"query": {"bool": {"must":[`
-		if area != "" {
-			query1 += `{"term":{"area":"` + area + `"}}`
-		} else if stype != "" {
-			stype = getstype(stype)
-			query1 += `{"terms":{"subtype":[`
-			for k, v := range strings.Split(stype, ",") {
-				if k > 0 {
-					query1 += `,`
-				}
-				query1 += `"` + v + `"`
-			}
-			query1 += `]}}`
-			// query1 += `{"term":{"subtype":"` + stype + `"}}`
-		} else if industry != "" {
-			query1 += `{"term":{"industry":"` + industry + `"}}`
-		} else if city != "" {
-			query1 += `{"term":{"city":"` + city + `"}}`
+		if list == nil || len(list.([]interface{})) == 0 {
+			rl.Unlock()
+			sl = true
+			list = redis.Get("other", "classify_sl_"+name)
+			rl.Unlock()
 		}
-		query1 += `],"should": [],"minimum_should_match": 0}}}`
 		//p353包含采购意向与拟建
 		//获取到昨天数据
 		if stype == "" {
@@ -619,14 +625,10 @@ func (f *PcIndex) SearchResult(at, name string) error {
 		}
 		var datas *[]map[string]interface{}
 		if list == nil || len(list.([]interface{})) == 0 {
+			sl = true
 			if keywords == "" {
-				//count := elastic.Count(INDEX, TYPE, query1)
 				currentPage = no
 				startPage = rand.New(rand.NewSource(time.Now().UnixNano())).Intn(currentPage * limitcount)
-				//count1 := util.IntAll(count)
-				//if count1 < startPage || startPage < 0 {
-				//	startPage = 0
-				//}
 				query := getLastNewsQuery(area, "", stype, industry, city)
 				datas = elastic.GetPage(INDEX, TYPE, query, bidSearch_sort, `"_id","title","publishtime","toptype","subtype","type","area","href","bidopentime","winner","buyer","bidamount","budget","s_subscopeclass","projectname"`, startPage, limitcount)
 			} else {
@@ -647,13 +649,18 @@ func (f *PcIndex) SearchResult(at, name string) error {
 					v["_id"] = encrypt.CommonEncodeArticle("content", util.ObjToString(v["_id"]))
 				}
 			}
-			timeout := util.IntAllDef(config.Sysconfig["pcIndexHotCacheTime"], 7200)
-			redis.Put("other", "classify_"+name, datas, timeout)
 		} else {
 			b, _ := json.Marshal(list)
 			json.Unmarshal(b, &datas)
 		}
 		if datas != nil && len(*datas) > 0 {
+			if sl {
+				rl.Lock()
+				timeout := util.IntAllDef(config.Sysconfig["pcIndexHotCacheTime"], 7200)
+				redis.Put("other", "classify_"+name, datas, timeout)
+				redis.Put("other", "classify_sl_"+name, datas, -1) //二级缓存
+				rl.Unlock()
+			}
 			public.BidListConvert(industry, datas)
 			res = searchresulthtml(datas)
 		}

+ 192 - 29
src/jfw/front/tags.go

@@ -12,6 +12,7 @@ import (
 	"net/http"
 	"strconv"
 	"strings"
+	"sync"
 	"time"
 
 	qu "app.yhyue.com/moapp/jybase/common"
@@ -459,10 +460,12 @@ func IsInArr(arr []string, s string) bool {
 
 // 获取最新招标信息
 func (this *Tags) GetNewBidInfo() (list []map[string]interface{}) {
-	rediskey := fmt.Sprintf("pcindex_newArticle")
-	if l, ok := redis.Get("seoCache", rediskey).([]interface{}); ok && l != nil && len(l) > 0 {
-		list = qu.ObjArrToMapArr(l)
-	} else {
+	var (
+		redisKey   = fmt.Sprintf("pcindex_newArticle")
+		redisKeySL = fmt.Sprintf("pcindex_newArticle_second_level") //二级缓存
+		redisLock  = &sync.Mutex{}
+	)
+	var newBidInfos = func() (list []map[string]interface{}) {
 		// p397 未登录详情页最新招投标信息去掉拟建
 		var (
 			now         = time.Now()
@@ -471,9 +474,11 @@ func (this *Tags) GetNewBidInfo() (list []map[string]interface{}) {
 			publishTime = fmt.Sprintf("%s_%s", startTime, endTime)
 		)
 		_, _, lists := bidsearch.GetPcBidSearchData("", "", "", publishTime, "招标预告,招标公告,招标结果,招标信用信息", "", "", "", "", "", "", "", "", 1, false, nil, bidSearch_field_1, "", false, false, "", 50, "")
-		if lists != nil && len(*lists) > 10 {
-			*lists = (*lists)[0:10]
-			for _, v := range *lists {
+		if lists != nil && len(*lists) > 0 {
+			for k, v := range *lists {
+				if k >= 10 {
+					break
+				}
 				v["_id"] = encrypt.CommonEncodeArticle("content", v["_id"].(string))
 				delete(v, "toptype")
 				delete(v, "s_subscopeclass")
@@ -487,9 +492,66 @@ func (this *Tags) GetNewBidInfo() (list []map[string]interface{}) {
 				}
 			}
 			list = *lists
-			redis.Put("seoCache", rediskey, list, cacheTime)
+		}
+		return
+	}
+	var bidInfosByCache = func(rk string) (list []map[string]interface{}) {
+		redisLock.Lock()
+		defer redisLock.Unlock()
+		if l, ok := redis.Get("seoCache", rk).([]interface{}); ok && l != nil && len(l) > 0 {
+			list = qu.ObjArrToMapArr(l)
+		}
+		return
+	}
+	var bidInfosToCache = func(list []map[string]interface{}) {
+		redisLock.Lock()
+		defer redisLock.Unlock()
+		redis.Put("seoCache", redisKey, list, cacheTime)
+		redis.Put("seoCache", redisKeySL, list, -1)
+
+	}
+
+	list = bidInfosByCache(redisKey)
+	if len(list) == 0 {
+		list = bidInfosByCache(redisKeySL)
+		if len(list) == 0 {
+			list = newBidInfos()
+		}
+		if len(list) > 0 {
+			bidInfosToCache(list)
 		}
 	}
+	//rediskey := fmt.Sprintf("pcindex_newArticle")
+	//if l, ok := redis.Get("seoCache", rediskey).([]interface{}); ok && l != nil && len(l) > 0 {
+	//	list = qu.ObjArrToMapArr(l)
+	//} else {
+	//	// p397 未登录详情页最新招投标信息去掉拟建
+	//	var (
+	//		now         = time.Now()
+	//		startTime   = fmt.Sprint(time.Date(now.Year(), now.Month(), now.Day()-7, now.Hour(), now.Minute(), now.Second(), 0, time.Local).Unix()) //最近7天
+	//		endTime     = fmt.Sprint(now.Unix())
+	//		publishTime = fmt.Sprintf("%s_%s", startTime, endTime)
+	//	)
+	//	_, _, lists := bidsearch.GetPcBidSearchData("", "", "", publishTime, "招标预告,招标公告,招标结果,招标信用信息", "", "", "", "", "", "", "", "", 1, false, nil, bidSearch_field_1, "", false, false, "", 50, "")
+	//	if lists != nil && len(*lists) > 10 {
+	//		*lists = (*lists)[0:10]
+	//		for _, v := range *lists {
+	//			v["_id"] = encrypt.CommonEncodeArticle("content", v["_id"].(string))
+	//			delete(v, "toptype")
+	//			delete(v, "s_subscopeclass")
+	//			tmpdate := v["publishtime"]
+	//			v["publishtime"] = qu.Int64All(tmpdate.(float64))
+	//			v["date"] = time.Unix(qu.Int64All(tmpdate.(float64)), 0).Format(date.Date_Short_Layout)
+	//			if v["budget"] != nil {
+	//				v["budget"] = ConversionMoeny(v["budget"])
+	//			} else if v["bidamount"] != nil {
+	//				v["budget"] = ConversionMoeny(v["bidamount"])
+	//			}
+	//		}
+	//		list = *lists
+	//		redis.Put("seoCache", rediskey, list, cacheTime)
+	//	}
+	//}
 	return list
 }
 
@@ -661,13 +723,17 @@ func (r *reqLimit) Release() {
 // GetBidding
 // 金额限制在"1000"~"100000000";需求因es压力过大,只返回60%的数据
 func (this *Tags) GetBidding(industry, area, city, stype, keyword string, request *http.Request, responseWriter http.ResponseWriter, session *httpsession.Session) ([]map[string]interface{}, int64, bool) {
-	rediskey := fmt.Sprintf("pcseo_getbidding_%s_%s_%s_%s_%s", industry, area, city, stype, keyword)
-	rediskeyCount := fmt.Sprintf("pcseo_getbidding_count_%s_%s_%s_%s_%s", industry, area, city, stype, keyword)
-
-	if l, ok := redis.Get("seoCache", rediskey).([]interface{}); ok && l != nil {
-		count := redis.GetInt("seoCache", rediskeyCount)
-		return qu.ObjArrToMapArr(l), int64(count), false
-	} else {
+	var (
+		rediskey        = fmt.Sprintf("pcseo_getbidding_%s_%s_%s_%s_%s", industry, area, city, stype, keyword)
+		rediskeyCount   = fmt.Sprintf("pcseo_getbidding_count_%s_%s_%s_%s_%s", industry, area, city, stype, keyword)
+		redisKeySL      = fmt.Sprintf("pcseo_getbidding_sl_%s_%s_%s_%s_%s", industry, area, city, stype, keyword)
+		redisKeyCountSL = fmt.Sprintf("pcseo_getbidding_count_sl_%s_%s_%s_%s_%s", industry, area, city, stype, keyword)
+		redisLock       = &sync.Mutex{}
+		data            []map[string]interface{}
+		count           int64
+		b               bool
+	)
+	var biddingData = func() ([]map[string]interface{}, int64, bool) {
 		if area != "" || stype != "" || industry != "" || city != "" || keyword != "" {
 			if flag := reqLimitInit.Limit(context.Background()); flag == 1 {
 				defer reqLimitInit.Release()
@@ -689,21 +755,10 @@ func (this *Tags) GetBidding(industry, area, city, stype, keyword string, reques
 			starttime := fmt.Sprint(time.Date(now.Year()-1, now.Month(), now.Day(), now.Hour(), now.Minute(), now.Second(), 0, time.Local).Unix())
 			endtime := fmt.Sprint(now.Unix())
 			var datas *[]map[string]interface{}
-			//var startPage int
 			var count int64
-			//currentPage := 5
 			limitCount := qu.IntAllDef(config.Seoconfig["tagsLimitCount"], 50)
 			if keyword == "" {
 				query := bidsearch.GetSearchQuery("", industry, "0.1", "1000", "", "", "", "", bidsearch.GetBidSearchQuery(area, city, fmt.Sprintf("%s_%s", starttime, endtime), stype, "", ""), "", false, seoBidField)
-				//count = elastic.Count(INDEX, TYPE, query)
-				//if count == 0 {
-				//	return nil, 0, false
-				//}
-				//startPage = rand.New(rand.NewSource(time.Now().UnixNano())).Intn(currentPage * limitCount)
-				//count1 := qu.IntAll(count)
-				//if count1 < startPage || startPage < 0 {
-				//	startPage = 0
-				//}
 				var countTmp int
 				datas, countTmp = elastic.GetOAPage(INDEX, TYPE, query, bidSearch_sort, seoBidField, 0, limitCount)
 				count = qu.Int64All(countTmp)
@@ -733,13 +788,121 @@ func (this *Tags) GetBidding(industry, area, city, stype, keyword string, reques
 					industry = strings.Split(industry, "_")[0]
 				}
 				public.BidListConvert(industry, datas)
-				redis.Put("seoCache", rediskey, datas, cacheTime)      //生产环境配置4G单独redis,超出4G自动清除
-				redis.Put("seoCache", rediskeyCount, count, cacheTime) //生产环境配置4G单独redis,超出4G自动清除
+				//redis.Put("seoCache", rediskey, datas, cacheTime)      //生产环境配置4G单独redis,超出4G自动清除
+				//redis.Put("seoCache", rediskeyCount, count, cacheTime) //生产环境配置4G单独redis,超出4G自动清除
 				return *datas, count, false
 			}
 		}
+		return nil, 0, false
+	}
+	var biddingByCache = func(rk, rc string) ([]map[string]interface{}, int64, bool) {
+		redisLock.Unlock()
+		defer redisLock.Unlock()
+		if l, ok := redis.Get("seoCache", rk).([]interface{}); ok && l != nil {
+			count := redis.GetInt("seoCache", rc)
+			return qu.ObjArrToMapArr(l), int64(count), false
+		}
+		return nil, 0, false
 	}
-	return nil, 0, false
+	var biddingToCache = func(datas []map[string]interface{}, count int64) {
+		redisLock.Lock()
+		defer redisLock.Unlock()
+		redis.Put("seoCache", rediskey, datas, cacheTime)      //生产环境配置4G单独redis,超出4G自动清除
+		redis.Put("seoCache", rediskeyCount, count, cacheTime) //生产环境配置4G单独redis,超出4G自动清除
+		redis.Put("seoCache", redisKeySL, datas, -1)           //生产环境配置4G单独redis,超出4G自动清除
+		redis.Put("seoCache", redisKeyCountSL, count, -1)      //生产环境配置4G单独redis,超出4G自动清除
+
+	}
+	//缓存数据
+	data, count, b = biddingByCache(rediskey, rediskeyCount)
+	if data == nil || len(data) == 0 || count == 0 {
+		data, count, b = biddingByCache(redisKeySL, redisKeyCountSL)
+		if data == nil || len(data) == 0 || count == 0 {
+			//获取数据库数据
+			data, count, b = biddingData()
+		}
+		//更新缓存
+		if data != nil && len(data) > 0 && count > 0 {
+			biddingToCache(data, count)
+		}
+	}
+	return data, count, b
+	//if l, ok := redis.Get("seoCache", rediskey).([]interface{}); ok && l != nil {
+	//    count := redis.GetInt("seoCache", rediskeyCount)
+	//    return qu.ObjArrToMapArr(l), int64(count), false
+	//} else {
+	//    if area != "" || stype != "" || industry != "" || city != "" || keyword != "" {
+	//        if flag := reqLimitInit.Limit(context.Background()); flag == 1 {
+	//            defer reqLimitInit.Release()
+	//        } else {
+	//            if flag == -2 {
+	//                log.Println("等待队列已满")
+	//            } else if flag == -1 {
+	//                log.Println("等待超时")
+	//            }
+	//            return nil, 0, true
+	//        }
+	//
+	//        if public.Lst.IsLimited(request, responseWriter, session, false) == 1 { //没有被限制
+	//            defer public.Lst.Limit()
+	//        } else {
+	//            return nil, 0, true
+	//        }
+	//        now := time.Now()
+	//        starttime := fmt.Sprint(time.Date(now.Year()-1, now.Month(), now.Day(), now.Hour(), now.Minute(), now.Second(), 0, time.Local).Unix())
+	//        endtime := fmt.Sprint(now.Unix())
+	//        var datas *[]map[string]interface{}
+	//        //var startPage int
+	//        var count int64
+	//        //currentPage := 5
+	//        limitCount := qu.IntAllDef(config.Seoconfig["tagsLimitCount"], 50)
+	//        if keyword == "" {
+	//            query := bidsearch.GetSearchQuery("", industry, "0.1", "1000", "", "", "", "", bidsearch.GetBidSearchQuery(area, city, fmt.Sprintf("%s_%s", starttime, endtime), stype, "", ""), "", false, seoBidField)
+	//            //count = elastic.Count(INDEX, TYPE, query)
+	//            //if count == 0 {
+	//            //	return nil, 0, false
+	//            //}
+	//            //startPage = rand.New(rand.NewSource(time.Now().UnixNano())).Intn(currentPage * limitCount)
+	//            //count1 := qu.IntAll(count)
+	//            //if count1 < startPage || startPage < 0 {
+	//            //	startPage = 0
+	//            //}
+	//            var countTmp int
+	//            datas, countTmp = elastic.GetOAPage(INDEX, TYPE, query, bidSearch_sort, seoBidField, 0, limitCount)
+	//            count = qu.Int64All(countTmp)
+	//            if count == 0 {
+	//                return nil, 0, false
+	//            }
+	//        } else {
+	//            count, _, datas = bidsearch.GetPcBidSearchData(keyword, area, city, fmt.Sprintf("%s_%s", starttime, endtime), stype, industry, "", "", "", "", "", "", "", 0, true, []string{"title", "detail", "purchasing"}, seoBidField, "", false, false, "", limitCount, "")
+	//            if datas != nil && len(*datas) > limitCount {
+	//                *datas = (*datas)[0:limitCount]
+	//            }
+	//        }
+	//        if datas != nil && len(*datas) > 0 {
+	//            for _, v := range *datas {
+	//                v["_id"] = encrypt.CommonEncodeArticle("content", qu.ObjToString(v["_id"]))
+	//                v["date"] = time.Unix(qu.Int64All(v["publishtime"]), 0).Format(date.Date_Short_Layout)
+	//                highlight, _ := v["highlight"].(map[string][]string)
+	//                detail := ""
+	//                for _, val := range highlight["detail"] {
+	//                    detail += public.ClearHtml.ReplaceAllString(val, "")
+	//                }
+	//                if detail != "" {
+	//                    v["detail"] = detail
+	//                }
+	//            }
+	//            if strings.Contains(industry, "_") {
+	//                industry = strings.Split(industry, "_")[0]
+	//            }
+	//            public.BidListConvert(industry, datas)
+	//            redis.Put("seoCache", rediskey, datas, cacheTime)      //生产环境配置4G单独redis,超出4G自动清除
+	//            redis.Put("seoCache", rediskeyCount, count, cacheTime) //生产环境配置4G单独redis,超出4G自动清除
+	//            return *datas, count, false
+	//        }
+	//    }
+	//}
+	//return nil, 0, false
 }
 
 // 获取关键词