|
@@ -7,6 +7,7 @@ import (
|
|
|
"jfw/jylabutil"
|
|
|
"jfw/public"
|
|
|
"jfw/wx"
|
|
|
+ "log"
|
|
|
"math/rand"
|
|
|
"qfw/util"
|
|
|
"qfw/util/bidsearch"
|
|
@@ -365,6 +366,7 @@ func (f *PcIndex) SearchResult(at, name string) error {
|
|
|
return f.Redirect("/swordfish/searchinfolist.html")
|
|
|
}
|
|
|
if area != "" || stype != "" || industry != "" || city != "" || keywords != "" {
|
|
|
+ //
|
|
|
list := redis.Get("other", "classify_"+name)
|
|
|
query1 := `{"query": {"bool": {"must":[`
|
|
|
if area != "" {
|
|
@@ -386,7 +388,36 @@ func (f *PcIndex) SearchResult(at, name string) error {
|
|
|
query1 += `{"term":{"city":"` + city + `"}}`
|
|
|
}
|
|
|
query1 += `],"should": [],"minimum_should_match": 0}}}`
|
|
|
+ //获取到昨天数据
|
|
|
+ // ct := SeoDateCount(area, stype, industry, city, keywords, key_industry)
|
|
|
+ // publishtime := ""
|
|
|
+ // if ct < 100 {
|
|
|
+ // limitcount = 20
|
|
|
+ // start, _ := time.ParseInLocation(util.Date_Short_Layout, time.Now().Format(util.Date_Short_Layout), time.Local)
|
|
|
+ // st := start.Unix()
|
|
|
+ // et := start.AddDate(0, 0, -7).Unix()
|
|
|
+ // publishtime = fmt.Sprintf("%v_%v", et, st)
|
|
|
+ // } else if ct >= 100 && ct < 500 {
|
|
|
+ // limitcount = 50
|
|
|
+ // start, _ := time.ParseInLocation(util.Date_Short_Layout, time.Now().Format(util.Date_Short_Layout), time.Local)
|
|
|
+ // st := start.Unix()
|
|
|
+ // et := start.AddDate(0, 0, -3).Unix()
|
|
|
+ // publishtime = fmt.Sprintf("%v_%v", et, st)
|
|
|
+ // } else if ct >= 500 && ct < 1500 {
|
|
|
+ // limitcount = 50
|
|
|
+ // start, _ := time.ParseInLocation(util.Date_Short_Layout, time.Now().Format(util.Date_Short_Layout), time.Local)
|
|
|
+ // st := start.Unix()
|
|
|
+ // et := start.AddDate(0, 0, 1).Unix()
|
|
|
+ // publishtime = fmt.Sprintf("%v_%v", st, et)
|
|
|
+ // } else if ct >= 1500 {
|
|
|
+ // limitcount = 100
|
|
|
+ // start, _ := time.ParseInLocation(util.Date_Short_Layout, time.Now().Format(util.Date_Short_Layout), time.Local)
|
|
|
+ // st := start.Unix()
|
|
|
+ // et := start.AddDate(0, 0, 1).Unix()
|
|
|
+ // publishtime = fmt.Sprintf("%v_%v", st, et)
|
|
|
+ // }
|
|
|
query := getLastNewsQuery(area, "", stype, industry, city)
|
|
|
+
|
|
|
var datas *[]map[string]interface{}
|
|
|
if list == nil || len(list.([]interface{})) == 0 {
|
|
|
if keywords == "" {
|
|
@@ -399,12 +430,23 @@ func (f *PcIndex) SearchResult(at, name string) error {
|
|
|
startPage = 0
|
|
|
}
|
|
|
datas = elastic.GetPage(INDEX, TYPE, query, bidSearch_sort, `"_id","title","publishtime","toptype","subtype","type","area","href","bidopentime","winner","buyer","bidamount","budget","s_subscopeclass","projectname"`, startPage, limitcount)
|
|
|
+ log.Println(len(*datas), "=====", limitcount)
|
|
|
+ // // log.Println("datas:", *datas)
|
|
|
+ // if datas == nil || len(*datas) == 0 {
|
|
|
+ //datas = elastic.GetPage(INDEX, TYPE, getLastNewsQuery(area, "", stype, industry, city), bidSearch_sort, `"_id","title","publishtime","toptype","subtype","type","area","href","bidopentime","winner","buyer","bidamount","budget","s_subscopeclass","projectname"`, startPage, limitcount)
|
|
|
+ // }
|
|
|
+
|
|
|
} else {
|
|
|
- //关键词
|
|
|
- _, _, datas = bidsearch.GetPcBidSearchData(keywords, "", "", "", key_industry, "", "", "", "", "", "", 0, bidsearch.SearchPageSize_PC, false, nil, bidSearch_field_1, "", false)
|
|
|
+ //关键词 如果规定时间内未取到数据 则按照老规则取数据 //规则开发一半被砍 暂时注释
|
|
|
+ // _, _, datas = bidsearch.GetPcBidSearchData(keywords, "", publishtime, "", key_industry, "", "", "", "", "", "", 0, bidsearch.SearchPageSize_PC, true, nil, bidSearch_field_1, "", false)
|
|
|
+ // if datas != nil && len(*datas) > limitcount {
|
|
|
+ // *datas = (*datas)[0:limitcount]
|
|
|
+ // } else {
|
|
|
+ _, _, datas = bidsearch.GetPcBidSearchData(keywords, "", "", "", key_industry, "", "", "", "", "", "", 0, bidsearch.SearchPageSize_PC, true, nil, bidSearch_field_1, "", false)
|
|
|
if datas != nil && len(*datas) > limitcount {
|
|
|
*datas = (*datas)[0:limitcount]
|
|
|
}
|
|
|
+ // }
|
|
|
}
|
|
|
if datas != nil && len(*datas) > 0 {
|
|
|
for _, v := range *datas {
|
|
@@ -422,6 +464,7 @@ func (f *PcIndex) SearchResult(at, name string) error {
|
|
|
public.BidListConvert(industry, datas)
|
|
|
res = searchresulthtml(datas)
|
|
|
}
|
|
|
+ InSeoContent(datas)
|
|
|
f.T["res"] = res
|
|
|
f.T["area"] = area
|
|
|
f.T["stype"] = stype
|
|
@@ -573,3 +616,92 @@ func NumberFormat(i int) string {
|
|
|
}
|
|
|
return strings.Join(arr, ".") //将一系列字符串连接为一个字符串,之间用sep来分隔。
|
|
|
}
|
|
|
+
|
|
|
+/* 判断是否入库
|
|
|
+ 单独存一张表 用于记录是否是已保存过seo数据
|
|
|
+*/
|
|
|
+func InSeoContent(arr *[]map[string]interface{}) {
|
|
|
+ for _, v := range *arr {
|
|
|
+ bid := util.CommonDecodeArticle("indexcontent", util.ObjToString(v["_id"]))[0]
|
|
|
+ if data, ok := mongodb.FindOne("seobidding", map[string]interface{}{"bid": bid}); ok && len(*data) > 0 {
|
|
|
+ continue
|
|
|
+ }
|
|
|
+ id := mongodb.Save("seobidding", map[string]interface{}{
|
|
|
+ "bid": bid,
|
|
|
+ "createtime": time.Now().Unix(),
|
|
|
+ })
|
|
|
+ if id == "" {
|
|
|
+ log.Printf("%v保存失败", bid)
|
|
|
+ }
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+/*
|
|
|
+//分段 <100条 取最近7天20条
|
|
|
+//分段 100~500 最近3天50条
|
|
|
+//分段 500~1500 当天50条
|
|
|
+// 1500+ 当天100条
|
|
|
+
|
|
|
+func SeoDateCount(area, stype, industry, city, keywords, key_industry string) int {
|
|
|
+ query1 := `{"query": {"bool": {"must":[`
|
|
|
+ if area != "" {
|
|
|
+ query1 += `{"term":{"area":"` + area + `"}}`
|
|
|
+ } else if stype != "" {
|
|
|
+ stype = getstype(stype)
|
|
|
+ query1 += `{"terms":{"subtype":[`
|
|
|
+ for k, v := range strings.Split(stype, ",") {
|
|
|
+ if k > 0 {
|
|
|
+ query1 += `,`
|
|
|
+ }
|
|
|
+ query1 += `"` + v + `"`
|
|
|
+ }
|
|
|
+ query1 += `]}}`
|
|
|
+ // query1 += `{"term":{"subtype":"` + stype + `"}}`
|
|
|
+ } else if industry != "" {
|
|
|
+ query1 += `{"term":{"industry":"` + industry + `"}}`
|
|
|
+ } else if city != "" {
|
|
|
+ query1 += `{"term":{"city":"` + city + `"}}`
|
|
|
+ } else {
|
|
|
+ if keywords == "" {
|
|
|
+ return 0
|
|
|
+ }
|
|
|
+ }
|
|
|
+ //time
|
|
|
+ st, _ := time.ParseInLocation(util.Date_Short_Layout, util.NowFormat(util.Date_Short_Layout), time.Local)
|
|
|
+ st_unix := st.Unix()
|
|
|
+ et := time.Now()
|
|
|
+ et_unix := time.Date(et.Year(), et.Month(), et.Day()+1, 0, 0, 0, 0, time.Local).Unix()
|
|
|
+ query := getLastNewsQuery(area, fmt.Sprintf("%v_%v", st_unix, et_unix), stype, industry, city)
|
|
|
+ query1 += `,{"range": { "publishtime": { "from": "` + fmt.Sprint(st_unix) + `", "to": "` + fmt.Sprint(et_unix) + `" } }}`
|
|
|
+ query1 += `],"should": [],"minimum_should_match": 0}}}`
|
|
|
+
|
|
|
+ log.Println("query:", query)
|
|
|
+ log.Println("query1:", query1)
|
|
|
+
|
|
|
+ if keywords == "" {
|
|
|
+ count := elastic.Count(INDEX, TYPE, query1)
|
|
|
+ return int(count)
|
|
|
+ } else {
|
|
|
+ //关键词
|
|
|
+ publictimes := fmt.Sprintf("%v_%v", st.Unix(), et.Unix())
|
|
|
+ _, _, datas := bidsearch.GetPcBidSearchData(keywords, "", publictimes, "", key_industry, "", "", "", "", "", "", 0, bidsearch.SearchPageSize_PC, false, nil, bidSearch_field_1, "", false)
|
|
|
+ if datas != nil {
|
|
|
+ return len(*datas)
|
|
|
+ } else {
|
|
|
+ return 0
|
|
|
+ }
|
|
|
+ //count (searchvalue, area, publishtime, subtype, industry, minprice, maxprice, winner, buyerclass, hasBuyerTel, hasWinnerTel string, start, pageSize int, isGetCount bool, selectTypeArr []string, field, notkey string, ispayed bool)
|
|
|
+ qstr := bidsearch.GetSearchQuery(keywords, "", publictimes, "", "", "", bidSearch_field_1, bidsearch.GetBidSearchQuery("", publictimes, "", "", ""), "")
|
|
|
+ if qstr != "" {
|
|
|
+ count := elastic.Count(INDEX, TYPE, qstr)
|
|
|
+ return int(count)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return 0
|
|
|
+}
|
|
|
+
|
|
|
+func (f *PcIndex) Count() {
|
|
|
+
|
|
|
+}
|
|
|
+
|
|
|
+*/
|