|
@@ -14,11 +14,12 @@ import (
|
|
"sync"
|
|
"sync"
|
|
"time"
|
|
"time"
|
|
|
|
|
|
|
|
+ . "app.yhyue.com/moapp/jybase/date"
|
|
|
|
+ . "app.yhyue.com/moapp/jybase/encrypt"
|
|
|
|
+
|
|
"github.com/gogf/gf/v2/util/gconv"
|
|
"github.com/gogf/gf/v2/util/gconv"
|
|
|
|
|
|
qutil "app.yhyue.com/moapp/jybase/common"
|
|
qutil "app.yhyue.com/moapp/jybase/common"
|
|
- . "app.yhyue.com/moapp/jybase/date"
|
|
|
|
- . "app.yhyue.com/moapp/jybase/encrypt"
|
|
|
|
elastic "app.yhyue.com/moapp/jybase/es"
|
|
elastic "app.yhyue.com/moapp/jybase/es"
|
|
mg "app.yhyue.com/moapp/jybase/mongodb"
|
|
mg "app.yhyue.com/moapp/jybase/mongodb"
|
|
"app.yhyue.com/moapp/jypkg/common/src/qfw/util/jy"
|
|
"app.yhyue.com/moapp/jypkg/common/src/qfw/util/jy"
|
|
@@ -53,6 +54,7 @@ type SieveCondition struct {
|
|
Industry []string `json:"industry"` //行业
|
|
Industry []string `json:"industry"` //行业
|
|
Keyword []KeyWord `json:"keywords"` //关键词
|
|
Keyword []KeyWord `json:"keywords"` //关键词
|
|
Buyer []string `json:"buyer"` //招标单位(采购单位)
|
|
Buyer []string `json:"buyer"` //招标单位(采购单位)
|
|
|
|
+ Agency []string `json:"agency"` //招标代理机构(采购单位)
|
|
Buyerclass []string `json:"buyerclass"` //采购单位类型
|
|
Buyerclass []string `json:"buyerclass"` //采购单位类型
|
|
HasBuyerTel string `json:"hasBuyertel"` //是否有采购单位电话
|
|
HasBuyerTel string `json:"hasBuyertel"` //是否有采购单位电话
|
|
Winner []string `json:"winner"` //中标单位
|
|
Winner []string `json:"winner"` //中标单位
|
|
@@ -111,6 +113,23 @@ func DetailANDTitle(findfields string) bool {
|
|
return strings.Contains(findfields, "detail") && strings.Contains(findfields, "title")
|
|
return strings.Contains(findfields, "detail") && strings.Contains(findfields, "title")
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+var getMatchPhraseSql = func(field string, val ...string) (sql string) {
|
|
|
|
+ if len(val) == 0 {
|
|
|
|
+ return
|
|
|
|
+ }
|
|
|
|
+ var arr []string
|
|
|
|
+ for _, s := range val {
|
|
|
|
+ if s == "" {
|
|
|
|
+ continue
|
|
|
|
+ }
|
|
|
|
+ arr = append(arr, fmt.Sprintf(`{"match_phrase": {"%s": "%s"}}`, field, s))
|
|
|
|
+ }
|
|
|
|
+ if len(arr) == 0 {
|
|
|
|
+ return ""
|
|
|
|
+ }
|
|
|
|
+ return fmt.Sprintf(`{"bool": {"should": [%s],"minimum_should_match": 1}}`, strings.Join(arr, ","))
|
|
|
|
+}
|
|
|
|
+
|
|
// 获取数据导出查询语句
|
|
// 获取数据导出查询语句
|
|
func getDataExportSql(scd *SieveCondition) string {
|
|
func getDataExportSql(scd *SieveCondition) string {
|
|
if len(scd.SelectIds) > 0 {
|
|
if len(scd.SelectIds) > 0 {
|
|
@@ -242,14 +261,25 @@ func getDataExportSql(scd *SieveCondition) string {
|
|
if len(scd.Industry) > 0 {
|
|
if len(scd.Industry) > 0 {
|
|
musts = append(musts, fmt.Sprintf(query_bool_must, "s_subscopeclass", `"`+strings.Join(scd.Industry, `","`)+`"`))
|
|
musts = append(musts, fmt.Sprintf(query_bool_must, "s_subscopeclass", `"`+strings.Join(scd.Industry, `","`)+`"`))
|
|
}
|
|
}
|
|
- if len(scd.Buyer) > 0 {
|
|
|
|
- musts = append(musts, fmt.Sprintf(query_bool_must, "buyer", `"`+strings.Join(scd.Buyer, `","`)+`"`))
|
|
|
|
- }
|
|
|
|
if len(scd.Buyerclass) > 0 {
|
|
if len(scd.Buyerclass) > 0 {
|
|
musts = append(musts, fmt.Sprintf(query_bool_must, "buyerclass", `"`+strings.Join(scd.Buyerclass, `","`)+`"`))
|
|
musts = append(musts, fmt.Sprintf(query_bool_must, "buyerclass", `"`+strings.Join(scd.Buyerclass, `","`)+`"`))
|
|
}
|
|
}
|
|
|
|
+ //P492招标采购搜索匹配采购单位等优化 新增buyer、agency、winner模糊搜索,数据导出同步改为模糊
|
|
|
|
+ //query_bool_should := `{"bool":{"should":[%s],"minimum_should_match": 1}}`
|
|
|
|
+ if len(scd.Buyer) > 0 {
|
|
|
|
+ if sql := getMatchPhraseSql("buyer.mbuyer", scd.Buyer...); sql != "" {
|
|
|
|
+ musts = append(musts, sql)
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ if len(scd.Agency) > 0 {
|
|
|
|
+ if sql := getMatchPhraseSql("agency.magency", scd.Agency...); sql != "" {
|
|
|
|
+ musts = append(musts, sql)
|
|
|
|
+ }
|
|
|
|
+ }
|
|
if len(scd.Winner) > 0 {
|
|
if len(scd.Winner) > 0 {
|
|
- musts = append(musts, fmt.Sprintf(query_bool_must, "s_winner", `"`+strings.Join(scd.Winner, `","`)+`"`))
|
|
|
|
|
|
+ if sql := getMatchPhraseSql("s_winner.mwinner", scd.Winner...); sql != "" {
|
|
|
|
+ musts = append(musts, sql)
|
|
|
|
+ }
|
|
}
|
|
}
|
|
_minPrice := ""
|
|
_minPrice := ""
|
|
_maxPrice := ""
|
|
_maxPrice := ""
|
|
@@ -496,6 +526,7 @@ func GetSqlObjFromId(mongo mg.MongodbSim, _id string) *SieveCondition {
|
|
SelectType: qutil.ObjToString((*query)["selectType"]),
|
|
SelectType: qutil.ObjToString((*query)["selectType"]),
|
|
PublishTime: qutil.ObjToString((*query)["publishtime"]),
|
|
PublishTime: qutil.ObjToString((*query)["publishtime"]),
|
|
Buyer: getStringArrFromDbResult((*query)["buyer"]),
|
|
Buyer: getStringArrFromDbResult((*query)["buyer"]),
|
|
|
|
+ Agency: getStringArrFromDbResult((*query)["agency"]),
|
|
Buyerclass: getStringArrFromDbResult((*query)["buyerclass"]),
|
|
Buyerclass: getStringArrFromDbResult((*query)["buyerclass"]),
|
|
HasBuyerTel: qutil.ObjToString((*query)["hasBuyertel"]),
|
|
HasBuyerTel: qutil.ObjToString((*query)["hasBuyertel"]),
|
|
Winner: getStringArrFromDbResult((*query)["winner"]),
|
|
Winner: getStringArrFromDbResult((*query)["winner"]),
|
|
@@ -629,23 +660,30 @@ func GetDataExportSelectReallyCountFromEs(ids []string) int64 {
|
|
wait := &sync.WaitGroup{}
|
|
wait := &sync.WaitGroup{}
|
|
var total int64
|
|
var total int64
|
|
var lock sync.Mutex
|
|
var lock sync.Mutex
|
|
- for _, v := range SplitArray(ids, 200) {
|
|
|
|
- pool <- true
|
|
|
|
- wait.Add(1)
|
|
|
|
- go func(arr []string) {
|
|
|
|
- defer func() {
|
|
|
|
- wait.Done()
|
|
|
|
- <-pool
|
|
|
|
- }()
|
|
|
|
- query := fmt.Sprintf(`{"query":{"bool":{"must":[{"terms":{"id":["%s"]}}]}}}`, strings.Join(arr, "\",\""))
|
|
|
|
- tCount := elastic.Count(INDEX, TYPE, query)
|
|
|
|
- if tCount > 0 {
|
|
|
|
- lock.Lock()
|
|
|
|
- total += tCount
|
|
|
|
- lock.Unlock()
|
|
|
|
- }
|
|
|
|
- return
|
|
|
|
- }(v)
|
|
|
|
|
|
+ var idArr []string
|
|
|
|
+ for i, id := range ids {
|
|
|
|
+ idArr = append(idArr, id)
|
|
|
|
+ if len(idArr) == 200 || i+1 == len(ids) {
|
|
|
|
+ pool <- true
|
|
|
|
+ wait.Add(1)
|
|
|
|
+ go func(arr []string) {
|
|
|
|
+ defer func() {
|
|
|
|
+ wait.Done()
|
|
|
|
+ <-pool
|
|
|
|
+ }()
|
|
|
|
+ log.Println("GetDataExportSelectReallyCountFromEs===", arr[0])
|
|
|
|
+ query := fmt.Sprintf(`{"query":{"bool":{"must":[{"terms":{"id":["%s"]}}]}}}`, strings.Join(arr, "\",\""))
|
|
|
|
+ tCount := elastic.Count(INDEX, TYPE, query)
|
|
|
|
+ if tCount > 0 {
|
|
|
|
+ lock.Lock()
|
|
|
|
+ total += tCount
|
|
|
|
+ lock.Unlock()
|
|
|
|
+ }
|
|
|
|
+ return
|
|
|
|
+ }(idArr)
|
|
|
|
+ idArr = []string{}
|
|
|
|
+ }
|
|
|
|
+
|
|
}
|
|
}
|
|
wait.Wait()
|
|
wait.Wait()
|
|
log.Printf("GetDataExportSelectReallyCount 选择数据共%d条记录,实际查询%d条\n", len(ids), total)
|
|
log.Printf("GetDataExportSelectReallyCount 选择数据共%d条记录,实际查询%d条\n", len(ids), total)
|
|
@@ -666,44 +704,50 @@ func GetDataExportSelectReallyCountFromMongo(bid mg.MongodbSim, biddingName stri
|
|
)
|
|
)
|
|
pool := make(chan bool, 10)
|
|
pool := make(chan bool, 10)
|
|
wait := &sync.WaitGroup{}
|
|
wait := &sync.WaitGroup{}
|
|
-
|
|
|
|
- for _, i2 := range SplitArray(ids, 200) {
|
|
|
|
- pool <- true
|
|
|
|
- wait.Add(1)
|
|
|
|
- go func(arr []string) {
|
|
|
|
- defer func() {
|
|
|
|
- wait.Done()
|
|
|
|
- <-pool
|
|
|
|
- }()
|
|
|
|
- lenNum := int64(len(arr))
|
|
|
|
- var (
|
|
|
|
- queryIds []interface{}
|
|
|
|
- num1, num2 int64
|
|
|
|
- err error
|
|
|
|
- )
|
|
|
|
- for _, idStr := range arr {
|
|
|
|
- queryIds = append(queryIds, mg.StringTOBsonId(idStr))
|
|
|
|
- }
|
|
|
|
- num1, err = sess.DB(biddingName).C("bidding").Find(map[string]interface{}{"_id": map[string]interface{}{
|
|
|
|
- "$in": queryIds,
|
|
|
|
- }}).Count()
|
|
|
|
- if err == nil {
|
|
|
|
- if num1 == lenNum {
|
|
|
|
- lock.Lock()
|
|
|
|
- count += num1
|
|
|
|
- lock.Unlock()
|
|
|
|
- return
|
|
|
|
|
|
+ var idArr []string
|
|
|
|
+ for i, id := range ids {
|
|
|
|
+ idArr = append(idArr, id)
|
|
|
|
+ if len(idArr) == 200 || i+1 == len(ids) {
|
|
|
|
+ pool <- true
|
|
|
|
+ wait.Add(1)
|
|
|
|
+ go func(arr []string) {
|
|
|
|
+ defer func() {
|
|
|
|
+ wait.Done()
|
|
|
|
+ <-pool
|
|
|
|
+ }()
|
|
|
|
+ log.Println("GetDataExportSelectReallyCountFromMongo===", arr[0])
|
|
|
|
+ lenNum := int64(len(arr))
|
|
|
|
+ var (
|
|
|
|
+ queryIds []interface{}
|
|
|
|
+ num1, num2 int64
|
|
|
|
+ err error
|
|
|
|
+ )
|
|
|
|
+ for _, idStr := range arr {
|
|
|
|
+ queryIds = append(queryIds, mg.StringTOBsonId(idStr))
|
|
}
|
|
}
|
|
- num2, err = sess.DB(biddingName).C("bidding_back").Find(map[string]interface{}{"_id": map[string]interface{}{
|
|
|
|
|
|
+ num1, err = sess.DB(biddingName).C("bidding").Find(map[string]interface{}{"_id": map[string]interface{}{
|
|
"$in": queryIds,
|
|
"$in": queryIds,
|
|
}}).Count()
|
|
}}).Count()
|
|
if err == nil {
|
|
if err == nil {
|
|
- lock.Lock()
|
|
|
|
- count += qutil.If(num2+num1 >= lenNum, lenNum, num2+num1).(int64)
|
|
|
|
- lock.Unlock()
|
|
|
|
|
|
+ if num1 == lenNum {
|
|
|
|
+ lock.Lock()
|
|
|
|
+ count += num1
|
|
|
|
+ lock.Unlock()
|
|
|
|
+ return
|
|
|
|
+ }
|
|
|
|
+ num2, err = sess.DB(biddingName).C("bidding_back").Find(map[string]interface{}{"_id": map[string]interface{}{
|
|
|
|
+ "$in": queryIds,
|
|
|
|
+ }}).Count()
|
|
|
|
+ if err == nil {
|
|
|
|
+ lock.Lock()
|
|
|
|
+ count += qutil.If(num2+num1 >= lenNum, lenNum, num2+num1).(int64)
|
|
|
|
+ lock.Unlock()
|
|
|
|
+ }
|
|
}
|
|
}
|
|
- }
|
|
|
|
- }(i2)
|
|
|
|
|
|
+ }(idArr)
|
|
|
|
+ idArr = []string{}
|
|
|
|
+ }
|
|
|
|
+
|
|
}
|
|
}
|
|
wait.Wait()
|
|
wait.Wait()
|
|
return qutil.If(count > 0, count, -2).(int64)
|
|
return qutil.If(count > 0, count, -2).(int64)
|
|
@@ -717,19 +761,15 @@ func GetDataExportSelectResultFromEs(bidding mg.MongodbSim, biddingName string,
|
|
if checkCount == -1 && len(scd.SelectIds) > 500 {
|
|
if checkCount == -1 && len(scd.SelectIds) > 500 {
|
|
scd.SelectIds = scd.SelectIds[:500]
|
|
scd.SelectIds = scd.SelectIds[:500]
|
|
}
|
|
}
|
|
- pool := make(chan bool, 10)
|
|
|
|
- wait := &sync.WaitGroup{}
|
|
|
|
- var lock sync.Mutex
|
|
|
|
|
|
+
|
|
returnLsit := make([]map[string]interface{}, 0, len(scd.SelectIds))
|
|
returnLsit := make([]map[string]interface{}, 0, len(scd.SelectIds))
|
|
- for _, v := range SplitArray(scd.SelectIds, 200) {
|
|
|
|
- pool <- true
|
|
|
|
- wait.Add(1)
|
|
|
|
- go func(arr []string) error {
|
|
|
|
- defer func() {
|
|
|
|
- wait.Done()
|
|
|
|
- <-pool
|
|
|
|
- }()
|
|
|
|
- query := fmt.Sprintf(`{"query":{"bool":{"must":[{"terms":{"id":["%s"]}}]}},"_source": [%s],"size":%d}`, strings.Join(arr, "\",\""), bidField, len(arr))
|
|
|
|
|
|
+ var idArr []string
|
|
|
|
+ for i, id := range scd.SelectIds {
|
|
|
|
+ idArr = append(idArr, id)
|
|
|
|
+ if len(idArr) == 200 || i+1 == len(scd.SelectIds) {
|
|
|
|
+
|
|
|
|
+ log.Println(scd.Id, "GetDataExportSelectResultFromEs===", idArr[0])
|
|
|
|
+ query := fmt.Sprintf(`{"query":{"bool":{"must":[{"terms":{"id":["%s"]}}]}},"_source": [%s],"size":%d}`, strings.Join(idArr, "\",\""), bidField, len(idArr))
|
|
log.Println("GetDataExportSelectResultFromEs 数据流量包 es count 信息查询:", query)
|
|
log.Println("GetDataExportSelectResultFromEs 数据流量包 es count 信息查询:", query)
|
|
data := *elastic.Get(INDEX, TYPE, query)
|
|
data := *elastic.Get(INDEX, TYPE, query)
|
|
if data != nil && len(data) > 0 {
|
|
if data != nil && len(data) > 0 {
|
|
@@ -739,15 +779,12 @@ func GetDataExportSelectResultFromEs(bidding mg.MongodbSim, biddingName string,
|
|
if detail != "" {
|
|
if detail != "" {
|
|
bv["detail"] = contentfilterReg.ReplaceAllString(detail, "")
|
|
bv["detail"] = contentfilterReg.ReplaceAllString(detail, "")
|
|
}
|
|
}
|
|
- lock.Lock()
|
|
|
|
returnLsit = append(returnLsit, bv)
|
|
returnLsit = append(returnLsit, bv)
|
|
- lock.Unlock()
|
|
|
|
}
|
|
}
|
|
}
|
|
}
|
|
- return nil
|
|
|
|
- }(v)
|
|
|
|
|
|
+ idArr = []string{}
|
|
|
|
+ }
|
|
}
|
|
}
|
|
- wait.Wait()
|
|
|
|
if len(returnLsit) == checkCount || checkCount == -1 || checkCount == -2 {
|
|
if len(returnLsit) == checkCount || checkCount == -1 || checkCount == -2 {
|
|
return &returnLsit, nil
|
|
return &returnLsit, nil
|
|
} else {
|
|
} else {
|
|
@@ -769,20 +806,18 @@ func GetDataExportSelectResultFromMongoDb(bidding mg.MongodbSim, biddingName str
|
|
if checkCount == -1 && len(scd.SelectIds) > 500 {
|
|
if checkCount == -1 && len(scd.SelectIds) > 500 {
|
|
scd.SelectIds = scd.SelectIds[:500]
|
|
scd.SelectIds = scd.SelectIds[:500]
|
|
}
|
|
}
|
|
- pool := make(chan bool, 10)
|
|
|
|
- wait := &sync.WaitGroup{}
|
|
|
|
- var lock sync.Mutex
|
|
|
|
returnLsit := make([]map[string]interface{}, 0, len(scd.SelectIds))
|
|
returnLsit := make([]map[string]interface{}, 0, len(scd.SelectIds))
|
|
- for _, v := range SplitArray(scd.SelectIds, 200) {
|
|
|
|
- pool <- true
|
|
|
|
- wait.Add(1)
|
|
|
|
- go func(arr []string) error {
|
|
|
|
- defer func() {
|
|
|
|
- wait.Done()
|
|
|
|
- <-pool
|
|
|
|
- }()
|
|
|
|
- var queryIds []interface{}
|
|
|
|
- for _, idStr := range arr {
|
|
|
|
|
|
+ var idArr []string
|
|
|
|
+ for i, id := range scd.SelectIds {
|
|
|
|
+ idArr = append(idArr, id)
|
|
|
|
+ if len(idArr) == 200 || i+1 == len(scd.SelectIds) {
|
|
|
|
+
|
|
|
|
+ log.Println(scd.Id, "GetDataExportSelectResultFromMongoDb===", idArr[0])
|
|
|
|
+ var (
|
|
|
|
+ queryIds []interface{}
|
|
|
|
+ count int
|
|
|
|
+ )
|
|
|
|
+ for _, idStr := range idArr {
|
|
queryIds = append(queryIds, mg.StringTOBsonId(idStr))
|
|
queryIds = append(queryIds, mg.StringTOBsonId(idStr))
|
|
}
|
|
}
|
|
iter := sess.DB(biddingName).C("bidding").Select(selectMap).Find(map[string]interface{}{"_id": map[string]interface{}{
|
|
iter := sess.DB(biddingName).C("bidding").Select(selectMap).Find(map[string]interface{}{"_id": map[string]interface{}{
|
|
@@ -794,66 +829,35 @@ func GetDataExportSelectResultFromMongoDb(bidding mg.MongodbSim, biddingName str
|
|
if detail != "" {
|
|
if detail != "" {
|
|
m["detail"] = contentfilterReg.ReplaceAllString(detail, "")
|
|
m["detail"] = contentfilterReg.ReplaceAllString(detail, "")
|
|
}
|
|
}
|
|
- lock.Lock()
|
|
|
|
|
|
+ count++
|
|
returnLsit = append(returnLsit, m)
|
|
returnLsit = append(returnLsit, m)
|
|
- lock.Unlock()
|
|
|
|
m = make(map[string]interface{})
|
|
m = make(map[string]interface{})
|
|
}
|
|
}
|
|
- iter_back := sess.DB(biddingName).C("bidding_back").Select(selectMap).Find(map[string]interface{}{"_id": map[string]interface{}{
|
|
|
|
- "$in": queryIds,
|
|
|
|
- }}).Iter()
|
|
|
|
- for m := make(map[string]interface{}); iter_back.Next(&m); {
|
|
|
|
- m["_id"] = mg.BsonIdToSId(m["_id"])
|
|
|
|
- detail, _ := m["detail"].(string)
|
|
|
|
- if detail != "" {
|
|
|
|
- m["detail"] = contentfilterReg.ReplaceAllString(detail, "")
|
|
|
|
|
|
+ if count != len(idArr) {
|
|
|
|
+ iter_back := sess.DB(biddingName).C("bidding_back").Select(selectMap).Find(map[string]interface{}{"_id": map[string]interface{}{
|
|
|
|
+ "$in": queryIds,
|
|
|
|
+ }}).Iter()
|
|
|
|
+ for m := make(map[string]interface{}); iter_back.Next(&m); {
|
|
|
|
+ m["_id"] = mg.BsonIdToSId(m["_id"])
|
|
|
|
+ detail, _ := m["detail"].(string)
|
|
|
|
+ if detail != "" {
|
|
|
|
+ m["detail"] = contentfilterReg.ReplaceAllString(detail, "")
|
|
|
|
+ }
|
|
|
|
+ returnLsit = append(returnLsit, m)
|
|
|
|
+ m = make(map[string]interface{})
|
|
}
|
|
}
|
|
- lock.Lock()
|
|
|
|
- returnLsit = append(returnLsit, m)
|
|
|
|
- lock.Unlock()
|
|
|
|
- m = make(map[string]interface{})
|
|
|
|
}
|
|
}
|
|
- return nil
|
|
|
|
- }(v)
|
|
|
|
|
|
+
|
|
|
|
+ idArr = []string{}
|
|
|
|
+ }
|
|
}
|
|
}
|
|
- wait.Wait()
|
|
|
|
- if len(returnLsit) == checkCount || checkCount == -1 {
|
|
|
|
|
|
+ if len(returnLsit) == checkCount || checkCount == -1 || checkCount == -2 {
|
|
return &returnLsit, nil
|
|
return &returnLsit, nil
|
|
} else {
|
|
} else {
|
|
return nil, fmt.Errorf("GetDataExportSelectResultFromMongoDb 选择数据导出异常 数据量期望%d条,实际查询%d条", checkCount, len(returnLsit))
|
|
return nil, fmt.Errorf("GetDataExportSelectResultFromMongoDb 选择数据导出异常 数据量期望%d条,实际查询%d条", checkCount, len(returnLsit))
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
-// SplitArray 分割数组
|
|
|
|
-func SplitArray(arr []string, num int64) [][]string {
|
|
|
|
- max := int64(len(arr))
|
|
|
|
- //判断数组大小是否小于等于指定分割大小的值,是则把原数组放入二维数组返回
|
|
|
|
- if max <= num {
|
|
|
|
- return [][]string{arr}
|
|
|
|
- }
|
|
|
|
- //获取应该数组分割为多少份
|
|
|
|
- var quantity int64
|
|
|
|
- if max%num == 0 {
|
|
|
|
- quantity = max / num
|
|
|
|
- } else {
|
|
|
|
- quantity = (max / num) + 1
|
|
|
|
- }
|
|
|
|
- //声明分割好的二维数组
|
|
|
|
- var segments = make([][]string, 0)
|
|
|
|
- //声明分割数组的截止下标
|
|
|
|
- var start, end, i int64
|
|
|
|
- for i = 1; i <= quantity; i++ {
|
|
|
|
- end = i * num
|
|
|
|
- if i != quantity {
|
|
|
|
- segments = append(segments, arr[start:end])
|
|
|
|
- } else {
|
|
|
|
- segments = append(segments, arr[start:])
|
|
|
|
- }
|
|
|
|
- start = i * num
|
|
|
|
- }
|
|
|
|
- return segments
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
func GetDataExportIds(elasticAddress string, scd *SieveCondition, checkCount int) ([]string, error) {
|
|
func GetDataExportIds(elasticAddress string, scd *SieveCondition, checkCount int) ([]string, error) {
|
|
defer qutil.Catch()
|
|
defer qutil.Catch()
|
|
if scd == nil {
|
|
if scd == nil {
|
|
@@ -1046,6 +1050,11 @@ func FormatExportData(entmg mg.MongodbSim, data *[]map[string]interface{}, webdo
|
|
if len(encry) > 0 {
|
|
if len(encry) > 0 {
|
|
isEncry = true
|
|
isEncry = true
|
|
}
|
|
}
|
|
|
|
+ sort.Slice(*data, func(i, j int) bool {
|
|
|
|
+ time1 := qutil.Int64All((*data)[i]["publishtime"])
|
|
|
|
+ time2 := qutil.Int64All((*data)[j]["publishtime"])
|
|
|
|
+ return time1 > time2
|
|
|
|
+ })
|
|
var entCacheMap = map[string]map[string]interface{}{}
|
|
var entCacheMap = map[string]map[string]interface{}{}
|
|
for index := 0; index < len(*data); index++ {
|
|
for index := 0; index < len(*data); index++ {
|
|
v := (*data)[index]
|
|
v := (*data)[index]
|