util.go 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518
  1. package util
  2. import (
  3. "fmt"
  4. mgo "mongodb"
  5. qu "qfw/util"
  6. "regexp"
  7. "sort"
  8. sp "spiderutil"
  9. "strings"
  10. "time"
  11. "github.com/yuin/gopher-lua"
  12. )
  13. const Role_Admin, Role_Examine, Role_Dev = 3, 2, 1 //管理员,审核员,开发员
  14. var (
  15. //MgoE *mgo.MongodbSim //编辑器87
  16. MgoEB *mgo.MongodbSim //编辑器163
  17. MgoS *mgo.MongodbSim
  18. Province map[string][]string
  19. City map[string][]string
  20. DomainNameReg = regexp.MustCompile(`(http|https)[::]+`)
  21. DownLoadReg = regexp.MustCompile(`download\(.*?\)`)
  22. CodeTypeReg = regexp.MustCompile(`(utf8|utf-8|gbk)`)
  23. TitleFilterReg1 = regexp.MustCompile(`[\p{Han}]`)
  24. TitleFilterReg2 = regexp.MustCompile(`((上|下)一页|阅读次数)`)
  25. Area []string //省份
  26. DomainReg = regexp.MustCompile(`(?://).+?(?:[::/])`)
  27. SymbolReg = regexp.MustCompile("[,,\\s\u3000\u2003\u00a0]+")
  28. ReplaceReg = regexp.MustCompile(`[]::/]+`)
  29. CheckText = `item["spidercode"]="%s";item["site"]="%s";item["channel"]="%s"`
  30. JsonDataMap = map[string]bool{ //jsondata
  31. "extweight": true,
  32. "projecthref": true,
  33. "sourcewebsite": true,
  34. "sourcehref": true,
  35. "area_city_district": true,
  36. "projectname": true,
  37. "projectcode": true,
  38. "approvalno": true,
  39. "projectscope": true,
  40. "item": true,
  41. "buyer": true,
  42. "agency": true,
  43. "budget": true,
  44. "buyer_info": true,
  45. "buyerperson": true,
  46. "buyertel": true,
  47. "buyeraddr": true,
  48. "projectaddr": true,
  49. "publishdept": true,
  50. "funds": true,
  51. "paymenttype": true,
  52. "projectscale": true,
  53. "bidmethod": true,
  54. "bidopentime": true,
  55. "agency_info": true,
  56. "agencyperson": true,
  57. "agencytel": true,
  58. "agencyaddr": true,
  59. "isppp": true,
  60. "winner": true,
  61. "winneraddr": true,
  62. "winnerperson": true,
  63. "winnertel": true,
  64. "bidamount": true,
  65. "currency": true,
  66. "experts": true,
  67. "bidamounttype": true,
  68. "contractname": true,
  69. "countryprojectcode": true,
  70. "contractnumber": true,
  71. "projectperiod": true,
  72. "signaturedate": true,
  73. "multipackage": true,
  74. "package": true,
  75. "supervisorrate": true,
  76. "jsoncontent": true,
  77. "purchasinglist": true,
  78. "toptype": true,
  79. "subtype": true,
  80. "winnerorder": true,
  81. "bidopendate": true,
  82. "bidtype": true,
  83. }
  84. Bu = "_bu" //创建采历史爬虫后缀
  85. )
  86. func InitMgo() {
  87. defer qu.Catch()
  88. //MgoE = &mgo.MongodbSim{
  89. // MongodbAddr: sp.Config.Dbaddr,
  90. // DbName: sp.Config.Dbname,
  91. // Size: 10,
  92. //}
  93. //MgoE.InitPool()
  94. MgoEB = &mgo.MongodbSim{
  95. MongodbAddr: sp.Config.BidEditor.Addr,
  96. DbName: sp.Config.BidEditor.Db,
  97. Size: sp.Config.BidEditor.Size,
  98. UserName: sp.Config.BidEditor.Username,
  99. Password: sp.Config.BidEditor.Password,
  100. }
  101. MgoEB.InitPool()
  102. MgoS = &mgo.MongodbSim{
  103. MongodbAddr: sp.Config.Dbaddr,
  104. DbName: sp.Config.Dbname2,
  105. Size: 10,
  106. }
  107. MgoS.InitPool()
  108. }
  109. //初始化省市行政区划信息
  110. func InitAreaCity() {
  111. //qu.ReadConfig("areacity.json", &Province)
  112. //Area = append(Area, "全国")
  113. //for area, _ := range Province {
  114. // if area == "全国" {
  115. // continue
  116. // }
  117. // Area = append(Area, area)
  118. //}
  119. Province = map[string][]string{}
  120. City = map[string][]string{}
  121. Area = append(Area, "全国")
  122. list, _ := MgoEB.Find("address", nil, nil, nil, false, -1, -1)
  123. for _, tmp := range *list {
  124. province := qu.ObjToString(tmp["province"])
  125. city := qu.ObjToString(tmp["city"])
  126. district := qu.ObjToString(tmp["district"])
  127. if province != "" && city == "" && district == "" { //area
  128. Area = append(Area, province)
  129. } else if province != "" && city != "" && district == "" { //city
  130. cityArr := Province[province]
  131. cityArr = append(cityArr, city)
  132. Province[province] = cityArr
  133. } else if province != "" && city != "" && district != "" { //district
  134. districtArr := City[city]
  135. districtArr = append(districtArr, district)
  136. City[city] = districtArr
  137. }
  138. }
  139. }
  140. //爬虫整体测试时校验爬虫代码
  141. func SpiderPassCheckLua(liststr, contentstr string, lua map[string]interface{}) (msg []string) {
  142. //校验含过滤方法stringFind但没有过滤注释“--关键词过滤”
  143. if strings.Contains(liststr, "stringFind") && !strings.Contains(liststr, "--关键词过滤") {
  144. msg = append(msg, "列表页代码有过滤方法stringFind但缺少注释:--关键词过滤")
  145. }
  146. if strings.Contains(contentstr, "--关键词过滤") && !strings.Contains(contentstr, "delete") {
  147. msg = append(msg, `三级页代码有过滤方法但缺少data["delete"]="true"`)
  148. }
  149. if !strings.Contains(contentstr, "s_title") {
  150. msg = append(msg, "三级页缺少s_title")
  151. }
  152. if !strings.Contains(contentstr, "getFileAttachmentsArrayWithTag") && !strings.Contains(contentstr, "downloadFile") {
  153. msg = append(msg, "三级页缺少下载附件方法")
  154. }
  155. //1.检测spidercode、site、channel
  156. if param, ok := lua["param_common"].([]interface{}); ok && len(param) >= 3 {
  157. spidercode := qu.ObjToString(param[0])
  158. site := qu.ObjToString(param[1])
  159. channel := qu.ObjToString(param[2])
  160. checkText := fmt.Sprintf(CheckText, spidercode, site, channel)
  161. if strings.Contains(liststr, `item["spidercode"]`) && !strings.Contains(liststr, checkText) {
  162. msg = append(msg, "检查代码spidercode、site、channel字段值")
  163. }
  164. }
  165. //2.检测https
  166. isHttps := false
  167. for _, text := range DomainNameReg.FindAllString(liststr, -1) {
  168. if strings.Contains(text, "https") {
  169. isHttps = true
  170. }
  171. }
  172. if isHttps {
  173. for tmpStr, tmpText := range map[string]string{"列表页": liststr, "三级页": contentstr} {
  174. downLoadText := DownLoadReg.FindString(tmpText)
  175. if downLoadText != "" {
  176. textArr := strings.Split(downLoadText, ",")
  177. if len(textArr) < 4 {
  178. msg = append(msg, tmpStr+"download方法添加下载参数")
  179. } else if len(textArr) == 4 {
  180. if !CodeTypeReg.MatchString(textArr[0]) || (textArr[1] != "true" && textArr[1] != "false") {
  181. msg = append(msg, tmpStr+"download方法添加下载参数")
  182. }
  183. }
  184. }
  185. }
  186. }
  187. //3.检测title
  188. if strings.Contains(liststr, `item["title"]="a"`) {
  189. if !strings.Contains(contentstr, `data["title"]`) {
  190. msg = append(msg, "检查代码title的完整性")
  191. }
  192. }
  193. // 4.检测sendListNum
  194. if !strings.Contains(liststr, "sendListNum") {
  195. msg = append(msg, "sendListNum方法缺失")
  196. }
  197. return
  198. }
  199. //爬虫整体测试时校验列表页和详情页内容
  200. func SpiderPassCheckListAndDetail(result map[int64][]map[string]interface{}, data map[string]interface{}) (msg []string) {
  201. msgMap := map[string]bool{}
  202. //校验列表页信息
  203. for _, list := range result {
  204. for _, l := range list {
  205. //校验title
  206. title := qu.ObjToString(l["title"])
  207. if !TitleFilterReg1.MatchString(title) {
  208. msgMap["列表页title中无汉字"] = true
  209. } else if TitleFilterReg2.MatchString(title) {
  210. msgMap["列表页title中含有上(下)一页"] = true
  211. }
  212. //校验发布时间
  213. publishtime := qu.ObjToString(l["publishtime"])
  214. if publishtime == "0" || publishtime == "" {
  215. msgMap["列表页publishtime取值异常"] = true
  216. } else {
  217. t, err := time.ParseInLocation(qu.Date_Full_Layout, publishtime, time.Local)
  218. if err != nil || t.Unix() <= 0 {
  219. msgMap["列表页publishtime取值异常"] = true
  220. }
  221. }
  222. }
  223. }
  224. if len(data) > 0 {
  225. //校验publishtime
  226. if l_np_publishtime, ok := data["l_np_publishtime"].(lua.LNumber); ok {
  227. if l_np_publishtime <= 0 || l_np_publishtime > 0 && l_np_publishtime < 1000000000 {
  228. msgMap["三级页publishtime取值异常"] = true
  229. }
  230. } else if l_np_publishtime, ok := data["l_np_publishtime"].(int64); ok {
  231. if l_np_publishtime <= 0 || l_np_publishtime > 0 && l_np_publishtime < 1000000000 {
  232. msgMap["三级页publishtime取值异常"] = true
  233. }
  234. } else {
  235. msgMap["三级页publishtime值类型异常"] = true
  236. }
  237. contenthtml := qu.ObjToString(data["contenthtml"])
  238. if strings.Contains(contenthtml, "img") {
  239. msgMap["contenthtml中含有img是否下载"] = true
  240. }
  241. if strings.Contains(contenthtml, "iframe") {
  242. msgMap["contenthtml中含有iframe是否下载"] = true
  243. }
  244. detail := qu.ObjToString(data["detail"])
  245. if TitleFilterReg2.MatchString(detail) {
  246. msgMap["三级页正文提取异常"] = true
  247. }
  248. //校验jsondata
  249. if jsondata, ok := data["jsondata"].(map[string]interface{}); ok && len(jsondata) > 0 {
  250. for field, _ := range jsondata {
  251. if !JsonDataMap[field] {
  252. msgMap["jsondata中"+field+"属性错误"] = true
  253. }
  254. }
  255. }
  256. }
  257. for text, _ := range msgMap {
  258. msg = append(msg, text)
  259. }
  260. return
  261. }
  262. //爬虫整体测试时校验列表页和详情页内容
  263. func SpiderPassCheckListAndDetail_back(list []map[string]interface{}, data map[string]interface{}) (msg []string) {
  264. if len(list) > 0 {
  265. p_zero := 0
  266. h_flag := true
  267. n_flag := true
  268. l_flag := true
  269. for _, l := range list {
  270. //校验title
  271. title := qu.ObjToString(l["title"])
  272. if !TitleFilterReg1.MatchString(title) && h_flag {
  273. msg = append(msg, "列表页title中无汉字")
  274. h_flag = false
  275. } else if TitleFilterReg2.MatchString(title) && n_flag {
  276. msg = append(msg, "列表页title中含有上(下)一页")
  277. n_flag = false
  278. }
  279. publishtime := qu.ObjToString(l["publishtime"])
  280. if publishtime == "0" {
  281. p_zero++
  282. } else if l_flag {
  283. t, _ := time.ParseInLocation(qu.Date_Full_Layout, publishtime, time.Local)
  284. if t.Unix() <= 0 {
  285. msg = append(msg, "列表页数据发布时间异常")
  286. l_flag = false
  287. }
  288. }
  289. }
  290. if len(data) > 0 {
  291. //校验publishtime
  292. if l_np_publishtime := data["l_np_publishtime"].(lua.LNumber); l_np_publishtime <= 0 {
  293. msg = append(msg, "三级页发布时间小于0")
  294. } else if p_zero == len(list) && l_np_publishtime == 0 {
  295. msg = append(msg, "三级页发布时间异常")
  296. }
  297. contenthtml := qu.ObjToString(data["contenthtml"])
  298. if strings.Contains(contenthtml, "img") {
  299. msg = append(msg, "contenthtml中含有img是否下载")
  300. }
  301. detail := qu.ObjToString(data["detail"])
  302. if TitleFilterReg2.MatchString(detail) {
  303. msg = append(msg, "三级页正文提取异常")
  304. }
  305. //校验jsondata
  306. if jsondata, ok := data["jsondata"].(map[string]interface{}); ok && len(jsondata) > 0 {
  307. for field, _ := range jsondata {
  308. if !JsonDataMap[field] {
  309. msg = append(msg, "jsondata中"+field+"属性错误")
  310. }
  311. }
  312. }
  313. }
  314. }
  315. return
  316. }
  317. func GetLuasInfoBySite(site, area, city, district string) (domain, status, event, platform, infotype, specialtype string, remarktime int64) {
  318. shelveUp := 0
  319. eventMap, platformMap := map[int]interface{}{}, map[string]interface{}{}
  320. infoformatMap := map[int]bool{}
  321. eventArr, platformArr, infoformatArr := []string{}, []string{}, []string{}
  322. //areaMap := map[string]int{}
  323. //areaCityMap := map[string]map[string]int{}
  324. //cityDistrictMap := map[string]map[string]int{}
  325. domainMap := map[string]bool{}
  326. domainArr := []string{}
  327. remarktime = time.Now().Unix()
  328. //luas, _ := MgoE.Find("luaconfig", `{"param_common.1":"`+site+`"}`, ``, `{"model":1,"event":1,"state":1,"platform":1,"param_common":1,"comeintime":1}`, false, -1, -1)
  329. luas, _ := MgoEB.Find("luaconfig", `{"site":"`+site+`"}`, ``, `{"projecthref":1,"model":1,"event":1,"state":1,"platform":1,"param_common":1,"comeintime":1,"infoformat":1}`, false, -1, -1)
  330. arr := [][]map[string]interface{}{}
  331. for _, l := range *luas {
  332. update := []map[string]interface{}{}
  333. set := map[string]interface{}{}
  334. if b, ok := l["projecthref"].(bool); ok && b { //爬虫采集的数据是流程性信息
  335. specialtype = "含流程数据"
  336. }
  337. //更新爬虫area、city、district
  338. if area != "" {
  339. set["model.area"] = area
  340. }
  341. if area == "全国" {
  342. set["model.city"] = ""
  343. set["model.district"] = ""
  344. } else if area != "" {
  345. if city != "" {
  346. set["model.city"] = city
  347. }
  348. if district != "" {
  349. set["model.district"] = district
  350. }
  351. }
  352. if len(set) > 0 {
  353. update = append(update, map[string]interface{}{"_id": l["_id"]})
  354. update = append(update, map[string]interface{}{"$set": set})
  355. arr = append(arr, update)
  356. }
  357. //remarktime
  358. if comeintime := qu.Int64All(l["comeintime"]); comeintime != int64(0) && comeintime < remarktime {
  359. remarktime = comeintime
  360. }
  361. //domain
  362. paramCommon := l["param_common"].([]interface{})
  363. if len(paramCommon) >= 12 {
  364. href := qu.ObjToString(paramCommon[11])
  365. domain := DomainReg.FindString(href)
  366. if domain != "" {
  367. domain = ReplaceReg.ReplaceAllString(domain, "")
  368. if !domainMap[domain] {
  369. domainArr = append(domainArr, domain)
  370. domainMap[domain] = true
  371. }
  372. }
  373. }
  374. //state、event、platform
  375. state := qu.IntAll(l["state"])
  376. event := qu.IntAll(l["event"])
  377. platform := qu.ObjToString(l["platform"])
  378. if state == 5 || state == 11 { //5:lua已上架;11:python已上线
  379. shelveUp++
  380. }
  381. eventMap[event] = true
  382. platformMap[platform] = true
  383. //infoformat
  384. infoformat := qu.IntAll(l["infoformat"])
  385. infoformatMap[infoformat] = true
  386. //area、city、district
  387. //if model, ok := l["model"].(map[string]interface{}); ok && model != nil {
  388. // a := qu.ObjToString(model["area"])
  389. // c := qu.ObjToString(model["city"])
  390. // d := qu.ObjToString(model["district"])
  391. // if a != "" {
  392. // areaMap[a] = areaMap[a] + 1
  393. // if c != "" {
  394. // if cityNum := areaCityMap[a]; cityNum != nil {
  395. // cityNum[c] = cityNum[c] + 1
  396. // } else {
  397. // areaCityMap[a] = map[string]int{c: 1}
  398. // }
  399. // if d != "" {
  400. // if distrctNum := cityDistrictMap[c]; distrctNum != nil {
  401. // distrctNum[d] = distrctNum[d] + 1
  402. // } else {
  403. // cityDistrictMap[c] = map[string]int{d: 1}
  404. // }
  405. // }
  406. // }
  407. //
  408. // }
  409. //}
  410. }
  411. //domain
  412. domain = strings.Join(domainArr, ";")
  413. for e, _ := range eventMap {
  414. eventArr = append(eventArr, fmt.Sprint(e))
  415. }
  416. event = strings.Join(eventArr, ",")
  417. for p, _ := range platformMap {
  418. platformArr = append(platformArr, p)
  419. }
  420. sort.Strings(platformArr)
  421. platform = strings.Join(platformArr, ",")
  422. for infoformat, _ := range infoformatMap {
  423. text := "招标"
  424. if infoformat == 2 {
  425. text = "拟建/审批"
  426. } else if infoformat == 3 {
  427. text = "产权"
  428. } else if infoformat == 4 {
  429. text = "舆情"
  430. }
  431. infoformatArr = append(infoformatArr, text)
  432. }
  433. sort.Strings(infoformatArr)
  434. infotype = strings.Join(infoformatArr, ",")
  435. //
  436. status = fmt.Sprintf("%d%s%d", shelveUp, "/", len(*luas))
  437. //批量更新
  438. if len(arr) > 0 {
  439. MgoEB.UpdateBulk("luaconfig", arr...)
  440. arr = [][]map[string]interface{}{}
  441. }
  442. //an, cn, dn := 0, 0, 0
  443. //for at, num := range areaMap {
  444. // if num > an {
  445. // area = at
  446. // an = num
  447. // }
  448. //}
  449. //if area != "" {
  450. // for ct, num := range areaCityMap[area] {
  451. // if num > cn {
  452. // city = ct
  453. // cn = num
  454. // }
  455. // }
  456. //}
  457. //if city != "" {
  458. // for dt, num := range cityDistrictMap[city] {
  459. // if num > dn {
  460. // district = dt
  461. // dn = num
  462. // }
  463. // }
  464. //}
  465. return
  466. }
  467. type StringValSorter struct {
  468. Keys []string
  469. Vals []string
  470. }
  471. func MapStringValueSort(m map[string]string) *StringValSorter {
  472. vs := NewStringValSorter(m)
  473. vs.Sort()
  474. return vs
  475. }
  476. func NewStringValSorter(m map[string]string) *StringValSorter {
  477. vs := &StringValSorter{
  478. Keys: make([]string, 0, len(m)),
  479. Vals: make([]string, 0, len(m)),
  480. }
  481. for k, v := range m {
  482. vs.Keys = append(vs.Keys, k)
  483. vs.Vals = append(vs.Vals, v)
  484. }
  485. return vs
  486. }
  487. func (vs *StringValSorter) Sort() {
  488. sort.Sort(vs)
  489. }
  490. func (vs *StringValSorter) Len() int {
  491. return len(vs.Vals)
  492. }
  493. func (vs *StringValSorter) Less(i, j int) bool {
  494. return vs.Vals[i] < vs.Vals[j]
  495. }
  496. func (vs *StringValSorter) Swap(i, j int) {
  497. vs.Vals[i], vs.Vals[j] = vs.Vals[j], vs.Vals[i]
  498. vs.Keys[i], vs.Keys[j] = vs.Keys[j], vs.Keys[i]
  499. }