Эх сурвалжийг харах

验证码服务新增代理ip

maxiaoshan 2 жил өмнө
parent
commit
4217f49cd9

+ 2 - 0
src/analysiscode/config.json

@@ -5,6 +5,8 @@
 	"servercodefreeaddress": "http://123.57.163.80:2119/v1/images/verify",
 	"username":"jianyu001",
 	"password": "123qwe!A",
+	"proxyaddr": "http://cc.spdata.jianyu360.com/crawl/proxy/socks5/fetch",
+	"proxyauthor": "Basic amlhbnl1MDAxOjEyM3F3ZSFB",
 	"timeout": 100
 }
 

+ 20 - 4
src/analysiscode/server.go

@@ -70,7 +70,10 @@ func getCodeByFree(path, stype, head, cookie string) (code string, respheader ht
 	defer util.Catch()
 	client := req.C().
 		SetTimeout(spiderutil.Config.TimeOut * time.Second).
-		SetTLSClientConfig(&tls.Config{InsecureSkipVerify: true}) //忽略证书验证
+		SetTLSClientConfig(&tls.Config{
+			Renegotiation:      tls.RenegotiateOnceAsClient,
+			InsecureSkipVerify: true,
+		}) //忽略证书验证
 	headers := map[string]string{}
 	if head != "" {
 		json.Unmarshal([]byte(head), &headers)
@@ -79,7 +82,12 @@ func getCodeByFree(path, stype, head, cookie string) (code string, respheader ht
 	if cookie != "" {
 		json.Unmarshal([]byte(cookie), &cookies)
 	}
-	for times := 1; times <= 3; times++ { //重试三次
+	for times := 1; times <= 6; times++ { //重试三次
+		if times > 4 { //重试第4次开始,使用代理ip
+			proxyIp := spiderutil.GetProxyAddr(spiderutil.Config.ProxyAddr, spiderutil.Config.ProxyAuthor) //获取代理地址
+			util.Debug("proxy:", proxyIp)
+			client.SetProxyURL(proxyIp) //设置代理IP
+		}
 		request := client.R()
 		if len(headers) > 0 {
 			request.SetHeaders(headers)
@@ -129,7 +137,10 @@ func getCodeByPay(path, stype, head, cookie string) (code string, respheader htt
 	defer util.Catch()
 	client := req.C().
 		SetTimeout(spiderutil.Config.TimeOut * time.Second).
-		SetTLSClientConfig(&tls.Config{InsecureSkipVerify: true}) //忽略证书验证
+		SetTLSClientConfig(&tls.Config{
+			Renegotiation:      tls.RenegotiateOnceAsClient,
+			InsecureSkipVerify: true,
+		}) //忽略证书验证
 	headers := map[string]string{}
 	if head != "" {
 		json.Unmarshal([]byte(head), &headers)
@@ -138,7 +149,12 @@ func getCodeByPay(path, stype, head, cookie string) (code string, respheader htt
 	if cookie != "" {
 		json.Unmarshal([]byte(cookie), &cookies)
 	}
-	for times := 1; times <= 3; times++ { //重试三次
+	for times := 1; times <= 6; times++ { //重试三次
+		if times > 4 { //重试第4次开始,使用代理ip
+			proxyIp := spiderutil.GetProxyAddr(spiderutil.Config.ProxyAddr, spiderutil.Config.ProxyAuthor) //获取代理地址
+			util.Debug("proxy:", proxyIp)
+			client.SetProxyURL(proxyIp) //设置代理IP
+		}
 		request := client.R()
 		if len(headers) > 0 {
 			request.SetHeaders(headers)

+ 1 - 41
src/spiderutil/renderdownload.go

@@ -3,8 +3,6 @@ package spiderutil
 import (
 	"encoding/json"
 	"github.com/imroc/req"
-	"io/ioutil"
-	"net/http"
 	qu "qfw/util"
 	"time"
 )
@@ -34,7 +32,7 @@ func DownloadByRender(href string) (contenthtml string) {
 	}
 	for i := 1; i <= 3; i++ {
 		if i != 1 {
-			param["proxy"] = GetProxyAddr() //获取代理地址
+			param["proxy"] = GetProxyAddr(Config.ProxyAddr, Config.ProxyAuthor) //获取代理地址
 		}
 		resp, err := client.R().SetQueryParams(param).Get(Config.RenderAddr)
 		if err != nil {
@@ -53,41 +51,3 @@ func DownloadByRender(href string) (contenthtml string) {
 	}
 	return
 }
-
-func GetProxyAddr() string {
-	defer qu.Catch()
-	//获取代理
-	req, err := http.NewRequest(http.MethodGet, Config.ProxyAddr, nil)
-	if err != nil {
-		qu.Debug(err)
-		return ""
-	}
-	//添加请求头
-	req.Header.Add("Authorization", Config.ProxyAuthor)
-	client := http.Client{}
-	//发送请求
-	resp, err := client.Do(req)
-	if err != nil {
-		qu.Debug(err)
-		return ""
-	}
-	defer resp.Body.Close()
-	bodyByte, err := ioutil.ReadAll(resp.Body)
-	if err != nil {
-		qu.Debug(err)
-		return ""
-	}
-	tmp := map[string]interface{}{}
-	if json.Unmarshal(bodyByte, &tmp) != nil {
-		qu.Debug(err)
-		return ""
-	}
-	if data, ok := tmp["data"].(map[string]interface{}); ok && len(data) > 0 {
-		if httpProxy, ok := data["http"].(string); ok {
-			return httpProxy
-		} else if httpsProxy, ok := data["https"].(string); ok {
-			return httpsProxy
-		}
-	}
-	return ""
-}

+ 1 - 0
src/spiderutil/template.go

@@ -7,6 +7,7 @@ const (
 spiderHistoryMaxPage=%d;
 spiderMoveEvent="%s";
 spiderIsCompete=%v;
+spiderInfoformat=%d;
 `
 
 	//通用配置

+ 1 - 1
src/spiderutil/upload.go

@@ -56,7 +56,7 @@ func Upload(code, fileName, url string, bt []byte) (string, string, string) {
 
 //返回下载链接,文件名称,文件大小,文件类型,文件fid
 func UploadFile(code, fileName, url string, bt []byte) (string, string, string, string, string) {
-	if bt == nil || len(bt) < 1024*5 {
+	if bt == nil || len(bt) < 1024*3 {
 		logger.Error("下载文件出错!", code, " upload file "+fileName, url)
 		return "", "", "", "", ""
 	}

+ 37 - 0
src/spiderutil/util.go

@@ -10,6 +10,7 @@ import (
 	"fmt"
 	"github.com/yuin/gopher-lua"
 	"io"
+	"io/ioutil"
 	"math/big"
 	"math/rand"
 	"net/http"
@@ -242,3 +243,39 @@ func GetMd5String(s string) string {
 }
 
 var Se = util.SimpleEncrypt{Key: "topnet#2017@editor"}
+
+func GetProxyAddr(proxyAddr, roxyAuthor string) string {
+	//获取代理
+	req, err := http.NewRequest(http.MethodGet, proxyAddr, nil)
+	if err != nil {
+		fmt.Println("get proxy request err:", err)
+		return ""
+	}
+	//添加请求头
+	req.Header.Add("Authorization", roxyAuthor)
+	client := http.Client{}
+	//发送请求
+	resp, err := client.Do(req)
+	if err != nil {
+		fmt.Println("get proxy client err:", err)
+		return ""
+	}
+	defer resp.Body.Close()
+	bodyByte, err := ioutil.ReadAll(resp.Body)
+	if err != nil {
+		fmt.Println("get proxy read body err:", err)
+		return ""
+	}
+	tmp := map[string]interface{}{}
+	if json.Unmarshal(bodyByte, &tmp) != nil {
+		return ""
+	}
+	if data, ok := tmp["data"].(map[string]interface{}); ok && len(data) > 0 {
+		if httpProxy, ok := data["http"].(string); ok {
+			return httpProxy
+		} else if httpsProxy, ok := data["https"].(string); ok {
+			return httpsProxy
+		}
+	}
+	return ""
+}