|
@@ -2,6 +2,7 @@
|
|
package front
|
|
package front
|
|
|
|
|
|
import (
|
|
import (
|
|
|
|
+ "commutil"
|
|
"fmt"
|
|
"fmt"
|
|
"github.com/go-xweb/httpsession"
|
|
"github.com/go-xweb/httpsession"
|
|
"github.com/go-xweb/xweb"
|
|
"github.com/go-xweb/xweb"
|
|
@@ -25,23 +26,24 @@ import (
|
|
|
|
|
|
type Front struct {
|
|
type Front struct {
|
|
*xweb.Action
|
|
*xweb.Action
|
|
- login xweb.Mapper `xweb:"/"`
|
|
|
|
- logout xweb.Mapper `xweb:"/center/logout"` //退出
|
|
|
|
- loadIndex xweb.Mapper `xweb:"/center"` //控制中心
|
|
|
|
- spidernew xweb.Mapper `xweb:"/center/spider"` //爬虫新建
|
|
|
|
- reg xweb.Mapper `xweb:"/center/reg"` //爬虫注册
|
|
|
|
- assign xweb.Mapper `xweb:"/center/user/assign"` //分配爬虫
|
|
|
|
- auditExport xweb.Mapper `xweb:"/center/user/auditexport"` //导出审核日志
|
|
|
|
- loadSpider xweb.Mapper `xweb:"/center/spider/edit/(.*)"` //爬虫加载
|
|
|
|
- viewSpider xweb.Mapper `xweb:"/center/spider/view/(.*)"` //爬虫查看
|
|
|
|
- downSpider xweb.Mapper `xweb:"/center/spider/download/(.*)"` //爬虫下载
|
|
|
|
- upState xweb.Mapper `xweb:"/center/spider/upstate"` //爬虫状态更新
|
|
|
|
- assort xweb.Mapper `xweb:"/center/spider/assort"` //审核人员分类(无发布、需登录、无法处理、需删除)
|
|
|
|
- batchShelves xweb.Mapper `xweb:"/center/spider/batchShelves"` //批量上下架
|
|
|
|
- checktime xweb.Mapper `xweb:"/center/spider/checktime"` //爬虫核对
|
|
|
|
- disables xweb.Mapper `xweb:"/center/spider/disable"` //批量作废
|
|
|
|
- changeEvent xweb.Mapper `xweb:"/center/changeEvent"` //节点更新
|
|
|
|
- getJson xweb.Mapper `xweb:"/center/spider/json"` //
|
|
|
|
|
|
+ login xweb.Mapper `xweb:"/"`
|
|
|
|
+ logout xweb.Mapper `xweb:"/center/logout"` //退出
|
|
|
|
+ loadIndex xweb.Mapper `xweb:"/center"` //脚本列表
|
|
|
|
+ monitorCenter xweb.Mapper `xweb:"/center/luaMonitor"` //lua脚本管理中心
|
|
|
|
+ spidernew xweb.Mapper `xweb:"/center/spider"` //爬虫新建
|
|
|
|
+ reg xweb.Mapper `xweb:"/center/reg"` //爬虫注册
|
|
|
|
+ assign xweb.Mapper `xweb:"/center/user/assign"` //分配爬虫
|
|
|
|
+ auditExport xweb.Mapper `xweb:"/center/user/auditexport"` //导出审核日志
|
|
|
|
+ loadSpider xweb.Mapper `xweb:"/center/spider/edit/(.*)"` //爬虫加载
|
|
|
|
+ viewSpider xweb.Mapper `xweb:"/center/spider/view/(.*)"` //爬虫查看
|
|
|
|
+ downSpider xweb.Mapper `xweb:"/center/spider/download/(.*)"` //爬虫下载
|
|
|
|
+ upState xweb.Mapper `xweb:"/center/spider/upstate"` //爬虫状态更新
|
|
|
|
+ assort xweb.Mapper `xweb:"/center/spider/assort"` //审核人员分类(无发布、需登录、无法处理、需删除)
|
|
|
|
+ batchShelves xweb.Mapper `xweb:"/center/spider/batchShelves"` //批量上下架
|
|
|
|
+ checktime xweb.Mapper `xweb:"/center/spider/checktime"` //爬虫核对
|
|
|
|
+ disables xweb.Mapper `xweb:"/center/spider/disable"` //批量作废
|
|
|
|
+ changeEvent xweb.Mapper `xweb:"/center/changeEvent"` //节点更新
|
|
|
|
+ getJson xweb.Mapper `xweb:"/center/spider/json"` //
|
|
//delRedis xweb.Mapper `xweb:"/center/spider/delRedis"` //清理Redis
|
|
//delRedis xweb.Mapper `xweb:"/center/spider/delRedis"` //清理Redis
|
|
updateESP xweb.Mapper `xweb:"/center/spider/updateesp"` //修改爬虫的节点/状态/平台
|
|
updateESP xweb.Mapper `xweb:"/center/spider/updateesp"` //修改爬虫的节点/状态/平台
|
|
updatePendState xweb.Mapper `xweb:"/center/spider/updatePendState"` //更新爬虫挂起状态
|
|
updatePendState xweb.Mapper `xweb:"/center/spider/updatePendState"` //更新爬虫挂起状态
|
|
@@ -75,7 +77,6 @@ type Front struct {
|
|
StepRe3 StepRe3
|
|
StepRe3 StepRe3
|
|
U U
|
|
U U
|
|
|
|
|
|
- luaList xweb.Mapper `xweb:"/center/lualist.html"` //脚本管理
|
|
|
|
user xweb.Mapper `xweb:"/center/user.html"` //用户管理
|
|
user xweb.Mapper `xweb:"/center/user.html"` //用户管理
|
|
delUser xweb.Mapper `xweb:"/center/user/del"` //删除用户
|
|
delUser xweb.Mapper `xweb:"/center/user/del"` //删除用户
|
|
updateUser xweb.Mapper `xweb:"/center/user/updateUser"` //修改用户信息
|
|
updateUser xweb.Mapper `xweb:"/center/user/updateUser"` //修改用户信息
|
|
@@ -95,6 +96,10 @@ type Front struct {
|
|
//爬虫认领
|
|
//爬虫认领
|
|
claimCode xweb.Mapper `xweb:"/center/claim/claimcode"` //爬虫认领
|
|
claimCode xweb.Mapper `xweb:"/center/claim/claimcode"` //爬虫认领
|
|
returnCode xweb.Mapper `xweb:"/center/claim/returncode"` //爬虫归还
|
|
returnCode xweb.Mapper `xweb:"/center/claim/returncode"` //爬虫归还
|
|
|
|
+
|
|
|
|
+ //通用爬虫
|
|
|
|
+ commCodesCenter xweb.Mapper `xweb:"/center/commMonitor"` //通用爬虫管理中心
|
|
|
|
+ updateCodeBase xweb.Mapper `xweb:"/center/commspider/updatecodebase"` //
|
|
}
|
|
}
|
|
|
|
|
|
const Sp_state_0, Sp_state_1, Sp_state_2, Sp_state_3, Sp_state_4, Sp_state_5, Sp_state_6, Sp_state_7, Sp_state_8, Sp_state_9, Sp_state_10 = 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 //0待完成,1待审核,2打回,3发布,4作废,5已上架,6已下架,7无发布,8需登录,9转python,10已删除
|
|
const Sp_state_0, Sp_state_1, Sp_state_2, Sp_state_3, Sp_state_4, Sp_state_5, Sp_state_6, Sp_state_7, Sp_state_8, Sp_state_9, Sp_state_10 = 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 //0待完成,1待审核,2打回,3发布,4作废,5已上架,6已下架,7无发布,8需登录,9转python,10已删除
|
|
@@ -559,7 +564,9 @@ func saveLua(o map[string]interface{}) bool {
|
|
|
|
|
|
ok := spider.SaveSpider(o["code"].(string), param)
|
|
ok := spider.SaveSpider(o["code"].(string), param)
|
|
if ok { //保存成功,校验新导入的爬虫对应站点是否存在,否则加站点记录
|
|
if ok { //保存成功,校验新导入的爬虫对应站点是否存在,否则加站点记录
|
|
- site, _ := u.MgoEB.FindOneByField("site", map[string]interface{}{"site": o["name"]}, map[string]interface{}{"important": 1})
|
|
|
|
|
|
+ site, _ := u.MgoEB.FindOneByField("site", map[string]interface{}{"site": o["name"]}, map[string]interface{}{"important": 1, "site_toptype": 1, "site_subtype": 1})
|
|
|
|
+ var site_toptype string
|
|
|
|
+ var site_subtype string
|
|
if len(*site) == 0 {
|
|
if len(*site) == 0 {
|
|
qu.Debug("补充站点信息:", o["name"])
|
|
qu.Debug("补充站点信息:", o["name"])
|
|
domain := u.DomainReg.FindString(qu.ObjToString(AutoTpl["Base.SpiderTargetChannelUrl"]))
|
|
domain := u.DomainReg.FindString(qu.ObjToString(AutoTpl["Base.SpiderTargetChannelUrl"]))
|
|
@@ -605,21 +612,40 @@ func saveLua(o map[string]interface{}) bool {
|
|
"type_plate": "",
|
|
"type_plate": "",
|
|
}
|
|
}
|
|
u.MgoEB.Save("site", siteInfo)
|
|
u.MgoEB.Save("site", siteInfo)
|
|
- } else if qu.IntAll((*site)["important"]) == 1 { //重点网站
|
|
|
|
- u.MgoEB.Update("luaconfig", map[string]interface{}{"code": o["code"]}, map[string]interface{}{"$set": map[string]interface{}{"spiderimportant": true}}, false, false)
|
|
|
|
- u.MgoEB.Update("site_code_baseinfo", map[string]interface{}{"spidercode": o["code"]}, map[string]interface{}{"$set": map[string]interface{}{
|
|
|
|
- "site": o["name"],
|
|
|
|
- "channel": o["channel"],
|
|
|
|
- "spidercode": o["code"],
|
|
|
|
- "platform": o["platform"],
|
|
|
|
- "modifyuser": "",
|
|
|
|
- "state": 0,
|
|
|
|
- }}, true, false)
|
|
|
|
|
|
+ } else {
|
|
|
|
+ site_toptype = qu.ObjToString((*site)["site_toptype"])
|
|
|
|
+ site_subtype = qu.ObjToString((*site)["site_subtype"])
|
|
|
|
+ if qu.IntAll((*site)["important"]) == 1 { //重点网站
|
|
|
|
+ u.MgoEB.Update("luaconfig", map[string]interface{}{"code": o["code"]}, map[string]interface{}{"$set": map[string]interface{}{"spiderimportant": true}}, false, false)
|
|
|
|
+ u.MgoEB.Update("site_code_baseinfo", map[string]interface{}{"spidercode": o["code"]}, map[string]interface{}{"$set": map[string]interface{}{
|
|
|
|
+ "site": o["name"],
|
|
|
|
+ "channel": o["channel"],
|
|
|
|
+ "spidercode": o["code"],
|
|
|
|
+ "platform": o["platform"],
|
|
|
|
+ "modifyuser": "",
|
|
|
|
+ "state": 0,
|
|
|
|
+ }}, true, false)
|
|
|
|
+ }
|
|
}
|
|
}
|
|
//生成认领日志
|
|
//生成认领日志
|
|
if len(claimLog) > 0 {
|
|
if len(claimLog) > 0 {
|
|
u.MgoEB.Save("lua_logs_claim", claimLog)
|
|
u.MgoEB.Save("lua_logs_claim", claimLog)
|
|
}
|
|
}
|
|
|
|
+ //爬虫基本属性
|
|
|
|
+ u.MgoEB.Save("luaconfig_cfg", map[string]interface{}{
|
|
|
|
+ "site_toptype": site_toptype,
|
|
|
|
+ "site_subtype": site_subtype,
|
|
|
|
+ "infoformat": infoformat,
|
|
|
|
+ "site": o["name"],
|
|
|
|
+ "channel": o["channel"],
|
|
|
|
+ "code": o["code"],
|
|
|
|
+ "platform": o["platform"],
|
|
|
|
+ "priority": o["priority"],
|
|
|
|
+ "href": o["channeladdr"],
|
|
|
|
+ "property_top": "",
|
|
|
|
+ "property_sub": "",
|
|
|
|
+ "property_third": "",
|
|
|
|
+ })
|
|
}
|
|
}
|
|
return ok
|
|
return ok
|
|
}
|
|
}
|
|
@@ -705,11 +731,11 @@ func (f *Front) ImportAi() {
|
|
if infoformat == 5 {
|
|
if infoformat == 5 {
|
|
stype = "news"
|
|
stype = "news"
|
|
}
|
|
}
|
|
- u.CommCodesCache <- map[string]string{
|
|
|
|
|
|
+ commutil.CommCodesCache <- map[string]interface{}{
|
|
"code": code,
|
|
"code": code,
|
|
"href": href,
|
|
"href": href,
|
|
- "site": qu.ObjToString(o["name"]),
|
|
|
|
- "channel": qu.ObjToString(o["channel"]),
|
|
|
|
|
|
+ "site": o["name"],
|
|
|
|
+ "channel": o["channel"],
|
|
"stype": stype,
|
|
"stype": stype,
|
|
}
|
|
}
|
|
}
|
|
}
|
|
@@ -842,7 +868,9 @@ func saveLuaAi(o map[string]interface{}) bool {
|
|
}
|
|
}
|
|
ok := spider.SaveSpider(o["code"].(string), param) //爬虫保存
|
|
ok := spider.SaveSpider(o["code"].(string), param) //爬虫保存
|
|
if ok { //保存成功,校验新导入的爬虫对应站点是否存在,否则加站点记录
|
|
if ok { //保存成功,校验新导入的爬虫对应站点是否存在,否则加站点记录
|
|
- site, _ := u.MgoEB.FindOneByField("site", map[string]interface{}{"site": o["name"]}, map[string]interface{}{"important": 1})
|
|
|
|
|
|
+ site, _ := u.MgoEB.FindOneByField("site", map[string]interface{}{"site": o["name"]}, map[string]interface{}{"important": 1, "site_toptype": 1, "site_subtype": 1})
|
|
|
|
+ var site_toptype string
|
|
|
|
+ var site_subtype string
|
|
if len(*site) == 0 {
|
|
if len(*site) == 0 {
|
|
qu.Debug("补充站点信息:", o["name"])
|
|
qu.Debug("补充站点信息:", o["name"])
|
|
domain := u.DomainReg.FindString(qu.ObjToString(AutoTpl["Base.SpiderTargetChannelUrl"]))
|
|
domain := u.DomainReg.FindString(qu.ObjToString(AutoTpl["Base.SpiderTargetChannelUrl"]))
|
|
@@ -888,21 +916,40 @@ func saveLuaAi(o map[string]interface{}) bool {
|
|
"type_plate": "",
|
|
"type_plate": "",
|
|
}
|
|
}
|
|
u.MgoEB.Save("site", siteInfo)
|
|
u.MgoEB.Save("site", siteInfo)
|
|
- } else if qu.IntAll((*site)["important"]) == 1 { //重点网站
|
|
|
|
- u.MgoEB.Update("luaconfig", map[string]interface{}{"code": o["code"]}, map[string]interface{}{"$set": map[string]interface{}{"spiderimportant": true}}, false, false)
|
|
|
|
- u.MgoEB.Update("site_code_baseinfo", map[string]interface{}{"spidercode": o["code"]}, map[string]interface{}{"$set": map[string]interface{}{
|
|
|
|
- "site": o["name"],
|
|
|
|
- "channel": o["channel"],
|
|
|
|
- "spidercode": o["code"],
|
|
|
|
- "platform": o["platform"],
|
|
|
|
- "modifyuser": "",
|
|
|
|
- "state": 0,
|
|
|
|
- }}, true, false)
|
|
|
|
|
|
+ } else {
|
|
|
|
+ site_toptype = qu.ObjToString((*site)["site_toptype"])
|
|
|
|
+ site_subtype = qu.ObjToString((*site)["site_subtype"])
|
|
|
|
+ if qu.IntAll((*site)["important"]) == 1 { //重点网站
|
|
|
|
+ u.MgoEB.Update("luaconfig", map[string]interface{}{"code": o["code"]}, map[string]interface{}{"$set": map[string]interface{}{"spiderimportant": true}}, false, false)
|
|
|
|
+ u.MgoEB.Update("site_code_baseinfo", map[string]interface{}{"spidercode": o["code"]}, map[string]interface{}{"$set": map[string]interface{}{
|
|
|
|
+ "site": o["name"],
|
|
|
|
+ "channel": o["channel"],
|
|
|
|
+ "spidercode": o["code"],
|
|
|
|
+ "platform": o["platform"],
|
|
|
|
+ "modifyuser": "",
|
|
|
|
+ "state": 0,
|
|
|
|
+ }}, true, false)
|
|
|
|
+ }
|
|
}
|
|
}
|
|
//生成认领日志
|
|
//生成认领日志
|
|
if len(claimLog) > 0 {
|
|
if len(claimLog) > 0 {
|
|
u.MgoEB.Save("lua_logs_claim", claimLog)
|
|
u.MgoEB.Save("lua_logs_claim", claimLog)
|
|
}
|
|
}
|
|
|
|
+ //爬虫基本属性
|
|
|
|
+ u.MgoEB.Save("luaconfig_cfg", map[string]interface{}{
|
|
|
|
+ "site_toptype": site_toptype,
|
|
|
|
+ "site_subtype": site_subtype,
|
|
|
|
+ "infoformat": infoformat,
|
|
|
|
+ "site": o["name"],
|
|
|
|
+ "channel": o["channel"],
|
|
|
|
+ "code": o["code"],
|
|
|
|
+ "platform": o["platform"],
|
|
|
|
+ "priority": o["priority"],
|
|
|
|
+ "href": o["channeladdr"],
|
|
|
|
+ "property_top": "",
|
|
|
|
+ "property_sub": "",
|
|
|
|
+ "property_third": "",
|
|
|
|
+ })
|
|
}
|
|
}
|
|
return ok
|
|
return ok
|
|
}
|
|
}
|