|
@@ -310,13 +310,13 @@ func (this *Tags) GetSEOArea() (provinceArr, cityArr, districtArr []string, name
|
|
|
codeArr = map[int64][]int64{} //code 及 pcode的从属关系
|
|
|
data := []map[string]interface{}{}
|
|
|
rediskey := fmt.Sprintf("pcseo_area")
|
|
|
- if l, ok := redis.Get("other", rediskey).([]interface{}); ok && l != nil && len(l) > 0 {
|
|
|
+ if l, ok := redis.Get("seoCache", rediskey).([]interface{}); ok && l != nil && len(l) > 0 {
|
|
|
data = qu.ObjArrToMapArr(l)
|
|
|
} else {
|
|
|
list := public.BaseMysql.SelectBySql(`select name,code,level,pcode from seo_words.seo_area order by code`)
|
|
|
//获取组合成必要参数
|
|
|
if list != nil && len(*list) > 0 {
|
|
|
- redis.Put("other", rediskey, *list, 2*60*60)
|
|
|
+ redis.Put("seoCache", rediskey, *list, -1)
|
|
|
data = *list
|
|
|
}
|
|
|
}
|
|
@@ -384,7 +384,7 @@ func (this *Tags) GetkeysMap() []map[string][]map[string]interface{} {
|
|
|
|
|
|
func (this *Tags) GetIndustry(industryHref string) interface{} {
|
|
|
rediskey := fmt.Sprintf("pcindex_getIndustry_%s", industryHref)
|
|
|
- if l := redis.Get("other", rediskey); l != nil {
|
|
|
+ if l := redis.Get("seoCache", rediskey); l != nil {
|
|
|
return l
|
|
|
} else {
|
|
|
data := public.BaseMysql.SelectBySql(`select a.id,a.name,b.id class_id,b.name class_1 from seo_words.seo_industry a inner join seo_words.seo_industry_class b on a.class_1=b.name and a.class_2 !='药品' order by a.class_1`)
|
|
@@ -413,7 +413,7 @@ func (this *Tags) GetIndustry(industryHref string) interface{} {
|
|
|
v: industryMap[v],
|
|
|
})
|
|
|
}
|
|
|
- redis.Put("other", rediskey, m, 2*60)
|
|
|
+ redis.Put("seoCache", rediskey, m, -1)
|
|
|
return m
|
|
|
}
|
|
|
return nil
|
|
@@ -432,7 +432,7 @@ func IsInArr(arr []string, s string) bool {
|
|
|
// 获取最新招标信息
|
|
|
func (this *Tags) GetNewBidInfo() (list []map[string]interface{}) {
|
|
|
rediskey := fmt.Sprintf("pcindex_newArticle")
|
|
|
- if l, ok := redis.Get("other", rediskey).([]interface{}); ok && l != nil && len(l) > 0 {
|
|
|
+ if l, ok := redis.Get("seoCache", rediskey).([]interface{}); ok && l != nil && len(l) > 0 {
|
|
|
list = qu.ObjArrToMapArr(l)
|
|
|
} else {
|
|
|
_, _, lists := bidsearch.GetPcBidSearchData("", "", "", "", "招标预告,招标公告,招标结果,招标信用信息", "", "", "", "", "", "", "", "", 1, false, nil, bidSearch_field_1, "", false, false, "", 50, "")
|
|
@@ -452,7 +452,7 @@ func (this *Tags) GetNewBidInfo() (list []map[string]interface{}) {
|
|
|
}
|
|
|
}
|
|
|
list = *lists
|
|
|
- redis.Put("other", rediskey, list, 2*60*60)
|
|
|
+ redis.Put("seoCache", rediskey, list, -1)
|
|
|
}
|
|
|
}
|
|
|
return list
|
|
@@ -461,7 +461,7 @@ func (this *Tags) GetNewBidInfo() (list []map[string]interface{}) {
|
|
|
// 获取信息类型相关url
|
|
|
func (this *Tags) GetStype(href string) (list []map[string]interface{}) {
|
|
|
rediskey := fmt.Sprintf("pcseo_stypelist_%s", href)
|
|
|
- if l, ok := redis.Get("other", rediskey).([]interface{}); ok && l != nil && len(l) > 0 {
|
|
|
+ if l, ok := redis.Get("seoCache", rediskey).([]interface{}); ok && l != nil && len(l) > 0 {
|
|
|
list = qu.ObjArrToMapArr(l)
|
|
|
} else {
|
|
|
m := []map[string]interface{}{}
|
|
@@ -498,7 +498,7 @@ func (this *Tags) GetStype(href string) (list []map[string]interface{}) {
|
|
|
}
|
|
|
}
|
|
|
list = m
|
|
|
- redis.Put("other", rediskey, list, 2*60)
|
|
|
+ redis.Put("seoCache", rediskey, list, -1)
|
|
|
}
|
|
|
return list
|
|
|
}
|
|
@@ -506,7 +506,7 @@ func (this *Tags) GetStype(href string) (list []map[string]interface{}) {
|
|
|
// 剑鱼博客
|
|
|
func (this *Tags) GetConsult() (list []map[string]interface{}) {
|
|
|
rediskey := fmt.Sprintf("pcseo_jybk")
|
|
|
- if l, ok := redis.Get("other", rediskey).([]interface{}); ok && l != nil && len(l) > 0 {
|
|
|
+ if l, ok := redis.Get("seoCache", rediskey).([]interface{}); ok && l != nil && len(l) > 0 {
|
|
|
list = qu.ObjArrToMapArr(l)
|
|
|
} else {
|
|
|
|
|
@@ -521,7 +521,7 @@ func (this *Tags) GetConsult() (list []map[string]interface{}) {
|
|
|
v["url"] = fmt.Sprintf("/jyblog/%s.html", qu.ObjToString(v["_id"]))
|
|
|
}
|
|
|
list = *rs
|
|
|
- redis.Put("other", rediskey, list, 2*60*60)
|
|
|
+ redis.Put("seoCache", rediskey, list, -1)
|
|
|
}
|
|
|
}
|
|
|
return list
|
|
@@ -529,8 +529,8 @@ func (this *Tags) GetConsult() (list []map[string]interface{}) {
|
|
|
|
|
|
func (this *Tags) GetLetterMap(pageSize, pageNum int64, letter string) ([]map[string]interface{}, int64) {
|
|
|
m := []map[string]interface{}{}
|
|
|
- sql := `select id,name,letter from seo_words.seo_resource where letter = ? order by id desc`
|
|
|
- cql := `select count(1) from seo_words.seo_resource where letter = ?`
|
|
|
+ sql := `select id,name,letter from seo_words.seo_resource where letter = ? and state=1 order by id desc`
|
|
|
+ cql := `select count(1) from seo_words.seo_resource where letter = ? and state=1 `
|
|
|
offset := (pageNum - 1) * pageSize
|
|
|
sql += fmt.Sprintf(" limit %v,%v", offset, pageSize)
|
|
|
data := public.BaseMysql.SelectBySql(sql, letter)
|
|
@@ -552,7 +552,7 @@ func (this *Tags) GetLetterMap(pageSize, pageNum int64, letter string) ([]map[st
|
|
|
|
|
|
func (this *Tags) GetHotLabel(length int64) []map[string]interface{} {
|
|
|
rediskey := fmt.Sprintf("pcseo_getHotLabel_%v", length)
|
|
|
- if l, ok := redis.Get("other", rediskey).([]interface{}); ok && l != nil && len(l) > 0 {
|
|
|
+ if l, ok := redis.Get("seoCache", rediskey).([]interface{}); ok && l != nil && len(l) > 0 {
|
|
|
return qu.ObjArrToMapArr(l)
|
|
|
} else {
|
|
|
m := []map[string]interface{}{}
|
|
@@ -575,7 +575,7 @@ func (this *Tags) GetHotLabel(length int64) []map[string]interface{} {
|
|
|
})
|
|
|
}
|
|
|
}
|
|
|
- redis.Put("other", rediskey, m, 60)
|
|
|
+ redis.Put("seoCache", rediskey, m, -1)
|
|
|
return m
|
|
|
}
|
|
|
return nil
|
|
@@ -601,8 +601,8 @@ func (this *Tags) GetBidding(industry, area, city, stype, keyword string, reques
|
|
|
rediskey := fmt.Sprintf("pcseo_getbidding_%s_%s_%s_%s_%s", industry, area, city, stype, keyword)
|
|
|
rediskeyCount := fmt.Sprintf("pcseo_getbidding_count_%s_%s_%s_%s_%s", industry, area, city, stype, keyword)
|
|
|
|
|
|
- if l, ok := redis.Get("other", rediskey).([]interface{}); ok && l != nil {
|
|
|
- count := redis.GetInt("other", rediskeyCount)
|
|
|
+ if l, ok := redis.Get("seoCache", rediskey).([]interface{}); ok && l != nil {
|
|
|
+ count := redis.GetInt("seoCache", rediskeyCount)
|
|
|
return qu.ObjArrToMapArr(l), int64(count), false
|
|
|
} else {
|
|
|
if area != "" || stype != "" || industry != "" || city != "" || keyword != "" {
|
|
@@ -611,9 +611,14 @@ func (this *Tags) GetBidding(industry, area, city, stype, keyword string, reques
|
|
|
} else {
|
|
|
return nil, 0, true
|
|
|
}
|
|
|
+ now := time.Now()
|
|
|
+ starttime := fmt.Sprint(time.Date(now.Year()-1, now.Month(), now.Day(), now.Hour(), now.Minute(), now.Second(), 0, time.Local).Unix())
|
|
|
+ endtime := fmt.Sprint(now.Unix())
|
|
|
+
|
|
|
//
|
|
|
query1 := `{"query": {"bool": {"must":[`
|
|
|
query_start := `{"query": {"bool": {"must":[`
|
|
|
+
|
|
|
if area != "" {
|
|
|
query1 += `{"term":{"area":"` + area + `"}}`
|
|
|
}
|
|
@@ -647,11 +652,16 @@ func (this *Tags) GetBidding(industry, area, city, stype, keyword string, reques
|
|
|
if query1 != query_start {
|
|
|
query1 += ","
|
|
|
}
|
|
|
- query1 += `{ "bool": {"must": [ { "multi_match": { "query": "` + keyword + `","type": "phrase", "fields": [ "title","detail" ]}}]}}`
|
|
|
+ query1 += `{ "bool": {"must": [ { "multi_match": { "query": "` + keyword + `","type": "phrase", "fields": [ "title","detail","purchasing" ]}}]}}`
|
|
|
}
|
|
|
+ if query1 != query_start {
|
|
|
+ query1 += ","
|
|
|
+ }
|
|
|
+ query1 += `{"range":{"publishtime":{"gte":` + starttime + `,"lt":` + endtime + `}}}`
|
|
|
+
|
|
|
query1 += `],"should": [],"minimum_should_match": 0}}}`
|
|
|
log.Println("~~~query1:", query1)
|
|
|
- query := getLastNewsQuery(area, "", stype, industry, city)
|
|
|
+ query := getLastNewsQuery(area, fmt.Sprintf("%s_%s", starttime, endtime), stype, industry, city)
|
|
|
var datas *[]map[string]interface{}
|
|
|
var startPage int
|
|
|
currentPage := 5
|
|
@@ -667,7 +677,7 @@ func (this *Tags) GetBidding(industry, area, city, stype, keyword string, reques
|
|
|
datas = elastic.GetPage(INDEX, TYPE, query, bidSearch_sort, bidField, startPage, limitcount)
|
|
|
} else {
|
|
|
log.Println(keyword, area, city, industry)
|
|
|
- _, _, datas = bidsearch.GetPcBidSearchData(keyword, area, city, "", stype, industry, "", "", "", "", "", "", "", 0, true, []string{"title", "detail"}, bidField, "", false, false, "", limitcount, "")
|
|
|
+ _, _, datas = bidsearch.GetPcBidSearchData(keyword, area, city, fmt.Sprintf("%s_%s", starttime, endtime), stype, industry, "", "", "", "", "", "", "", 0, true, []string{"title", "detail", "purchasing"}, bidField, "", false, false, "", limitcount, "")
|
|
|
|
|
|
if datas != nil && len(*datas) > limitcount {
|
|
|
*datas = (*datas)[0:limitcount]
|
|
@@ -690,8 +700,8 @@ func (this *Tags) GetBidding(industry, area, city, stype, keyword string, reques
|
|
|
industry = strings.Split(industry, "_")[0]
|
|
|
}
|
|
|
public.BidListConvert(industry, datas)
|
|
|
- redis.Put("other", rediskey, datas, 2*60)
|
|
|
- redis.Put("other", rediskeyCount, count, 2*60)
|
|
|
+ redis.Put("seoCache", rediskey, datas, -1) //生产环境配置4G单独redis,超出4G自动清除
|
|
|
+ redis.Put("seoCache", rediskeyCount, count, -1) //生产环境配置4G单独redis,超出4G自动清除
|
|
|
return *datas, count, false
|
|
|
}
|
|
|
}
|