add more node
This commit is contained in:
@@ -20,6 +20,15 @@ func CrawlTGChannel() {
|
||||
node = append(node, getter.NewTGChannelGetter("https://t.me/s/FreeSSR666", 200).Get()...)
|
||||
node = append(node, getter.NewTGChannelGetter("https://t.me/s/fanqiang666", 200).Get()...)
|
||||
|
||||
// 各种网站上公开的
|
||||
node = append(node, getter.WebFreessrXyz{}.Get()...)
|
||||
node = append(node, getter.WebLucnOrg{}.Get()...)
|
||||
|
||||
// 从web页面模糊获取
|
||||
node = append(node, getter.NewWebFuzz("https://zfjvpn.gitbook.io/").Get()...)
|
||||
node = append(node, getter.NewWebFuzz("https://www.freefq.com/d/file/free-ssr/20200811/1f3e9d0d0064f662457062712dcf1b66.txt").Get()...)
|
||||
node = append(node, getter.NewWebFuzz("https://merlinblog.xyz/wiki/freess.html").Get()...)
|
||||
|
||||
node = append(node, GetProxies()...)
|
||||
node = proxy.Deduplication(node)
|
||||
SetProxies(node)
|
||||
|
||||
@@ -1,7 +1,43 @@
|
||||
package getter
|
||||
|
||||
import "github.com/zu1k/proxypool/proxy"
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/zu1k/proxypool/proxy"
|
||||
)
|
||||
|
||||
type Getter interface {
|
||||
Get() []proxy.Proxy
|
||||
}
|
||||
|
||||
func String2Proxy(link string) proxy.Proxy {
|
||||
var err error
|
||||
var data proxy.Proxy
|
||||
if strings.HasPrefix(link, "ssr://") {
|
||||
data, err = proxy.ParseSSRLink(link)
|
||||
} else if strings.HasPrefix(link, "vmess://") {
|
||||
data, err = proxy.ParseVmessLink(link)
|
||||
}
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
func StringArray2ProxyArray(origin []string) []proxy.Proxy {
|
||||
var err error
|
||||
results := make([]proxy.Proxy, 0)
|
||||
var data proxy.Proxy
|
||||
for _, link := range origin {
|
||||
if strings.HasPrefix(link, "ssr://") {
|
||||
data, err = proxy.ParseSSRLink(link)
|
||||
} else if strings.HasPrefix(link, "vmess://") {
|
||||
data, err = proxy.ParseVmessLink(link)
|
||||
}
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
results = append(results, data)
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
||||
13
getter/subscribe.go
Normal file
13
getter/subscribe.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package getter
|
||||
|
||||
import "github.com/zu1k/proxypool/proxy"
|
||||
|
||||
type Subscribe struct {
|
||||
NumNeeded int
|
||||
Results []string
|
||||
Url string
|
||||
}
|
||||
|
||||
func (s Subscribe) Get() []proxy.Proxy {
|
||||
return nil
|
||||
}
|
||||
@@ -2,7 +2,6 @@ package getter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/gocolly/colly"
|
||||
"github.com/zu1k/proxypool/proxy"
|
||||
@@ -47,18 +46,5 @@ func (g TGChannelGetter) Get() []proxy.Proxy {
|
||||
_ = fmt.Errorf("%s", err.Error())
|
||||
}
|
||||
|
||||
results := make([]proxy.Proxy, 0)
|
||||
var data proxy.Proxy
|
||||
for _, link := range g.Results {
|
||||
if strings.HasPrefix(link, "ssr://") {
|
||||
data, err = proxy.ParseSSRLink(link)
|
||||
} else if strings.HasPrefix(link, "vmess://") {
|
||||
data, err = proxy.ParseVmessLink(link)
|
||||
}
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
results = append(results, data)
|
||||
}
|
||||
return results
|
||||
return StringArray2ProxyArray(g.Results)
|
||||
}
|
||||
|
||||
51
getter/web_free_ssr_xyz.go
Normal file
51
getter/web_free_ssr_xyz.go
Normal file
@@ -0,0 +1,51 @@
|
||||
package getter
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
|
||||
"github.com/zu1k/proxypool/proxy"
|
||||
)
|
||||
|
||||
const (
|
||||
freessrxyzSsrLink = "https://api.free-ssr.xyz/ssr"
|
||||
freessrxyzV2rayLink = "https://api.free-ssr.xyz/v2ray"
|
||||
)
|
||||
|
||||
type WebFreessrXyz struct {
|
||||
}
|
||||
|
||||
func (w WebFreessrXyz) Get() []proxy.Proxy {
|
||||
results := freessrxyzFetch(freessrxyzSsrLink)
|
||||
results = append(results, freessrxyzFetch(freessrxyzV2rayLink)...)
|
||||
return results
|
||||
}
|
||||
|
||||
func freessrxyzFetch(link string) []proxy.Proxy {
|
||||
resp, err := http.Get(link)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
type node struct {
|
||||
Url string `json:"url"`
|
||||
}
|
||||
ssrs := make([]node, 0)
|
||||
err = json.Unmarshal(body, &ssrs)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
result := make([]string, 0)
|
||||
for _, node := range ssrs {
|
||||
result = append(result, node.Url)
|
||||
}
|
||||
|
||||
return StringArray2ProxyArray(result)
|
||||
}
|
||||
34
getter/web_fuzz.go
Normal file
34
getter/web_fuzz.go
Normal file
@@ -0,0 +1,34 @@
|
||||
package getter
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
|
||||
"github.com/zu1k/proxypool/proxy"
|
||||
)
|
||||
|
||||
type WebFuzz struct {
|
||||
Url string
|
||||
}
|
||||
|
||||
func (w WebFuzz) Get() []proxy.Proxy {
|
||||
resp, err := http.Get(w.Url)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
text := string(body)
|
||||
|
||||
results := proxy.GrepSSRLinkFromString(text)
|
||||
results = append(results, proxy.GrepVmessLinkFromString(text)...)
|
||||
|
||||
return StringArray2ProxyArray(results)
|
||||
}
|
||||
|
||||
func NewWebFuzz(url string) *WebFuzz {
|
||||
return &WebFuzz{Url: url}
|
||||
}
|
||||
70
getter/web_lucn_org.go
Normal file
70
getter/web_lucn_org.go
Normal file
@@ -0,0 +1,70 @@
|
||||
package getter
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
|
||||
"github.com/zu1k/proxypool/tool"
|
||||
|
||||
"github.com/zu1k/proxypool/proxy"
|
||||
)
|
||||
|
||||
const lucnorgSsrLink = "https://lncn.org/api/ssrList"
|
||||
|
||||
type WebLucnOrg struct {
|
||||
}
|
||||
|
||||
func (w WebLucnOrg) Get() []proxy.Proxy {
|
||||
resp, err := http.Post(lucnorgSsrLink, "", nil)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
response := struct {
|
||||
Code string `json:"code"`
|
||||
Ssrs string `json:"ssrs"`
|
||||
}{}
|
||||
err = json.Unmarshal(body, &response)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
dec := decryptAesForLucn(response.Code, response.Ssrs)
|
||||
if dec == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
type node struct {
|
||||
Url string `json:"url"`
|
||||
}
|
||||
ssrs := make([]node, 0)
|
||||
err = json.Unmarshal(dec, &ssrs)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
result := make([]string, 0)
|
||||
for _, node := range ssrs {
|
||||
result = append(result, node.Url)
|
||||
}
|
||||
return StringArray2ProxyArray(result)
|
||||
}
|
||||
|
||||
func decryptAesForLucn(code string, c string) []byte {
|
||||
if code == "" {
|
||||
code = "abclnv561cqqfg30"
|
||||
}
|
||||
cipher, err := base64.StdEncoding.DecodeString(c)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
result := tool.AesEcbDecryptWithPKCS7Unpadding(cipher, []byte(code))
|
||||
return result
|
||||
}
|
||||
18
getter/web_test.go
Normal file
18
getter/web_test.go
Normal file
@@ -0,0 +1,18 @@
|
||||
package getter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestWebLucnOrg_Get(t *testing.T) {
|
||||
fmt.Println(WebLucnOrg{}.Get())
|
||||
}
|
||||
|
||||
func TestWebFreessrXyz_Get(t *testing.T) {
|
||||
fmt.Println(WebFreessrXyz{}.Get())
|
||||
}
|
||||
|
||||
func TestWebFuzz_Get(t *testing.T) {
|
||||
fmt.Println(NewWebFuzz("https://merlinblog.xyz/wiki/freess.html").Get())
|
||||
}
|
||||
43
tool/aes.go
Normal file
43
tool/aes.go
Normal file
@@ -0,0 +1,43 @@
|
||||
package tool
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/aes"
|
||||
)
|
||||
|
||||
func PKCS7Padding(ciphertext []byte, blockSize int) []byte {
|
||||
padding := blockSize - len(ciphertext)%blockSize
|
||||
padtext := bytes.Repeat([]byte{byte(padding)}, padding)
|
||||
return append(ciphertext, padtext...)
|
||||
}
|
||||
|
||||
func PKCS7UnPadding(origData []byte) []byte {
|
||||
length := len(origData)
|
||||
unpadding := int(origData[length-1])
|
||||
return origData[:(length - unpadding)]
|
||||
}
|
||||
|
||||
func AesEcbDecryptWithPKCS7Unpadding(data, key []byte) []byte {
|
||||
block, _ := aes.NewCipher(key)
|
||||
decrypted := make([]byte, len(data))
|
||||
size := block.BlockSize()
|
||||
|
||||
for bs, be := 0, size; bs < len(data); bs, be = bs+size, be+size {
|
||||
block.Decrypt(decrypted[bs:be], data[bs:be])
|
||||
}
|
||||
|
||||
return PKCS7UnPadding(decrypted)
|
||||
}
|
||||
|
||||
func AesEcbEncryptWithPKCS7Padding(data, key []byte) []byte {
|
||||
block, _ := aes.NewCipher(key)
|
||||
data = PKCS7Padding(data, block.BlockSize())
|
||||
decrypted := make([]byte, len(data))
|
||||
size := block.BlockSize()
|
||||
|
||||
for bs, be := 0, size; bs < len(data); bs, be = bs+size, be+size {
|
||||
block.Encrypt(decrypted[bs:be], data[bs:be])
|
||||
}
|
||||
|
||||
return decrypted
|
||||
}
|
||||
18
tool/httpclient.go
Normal file
18
tool/httpclient.go
Normal file
@@ -0,0 +1,18 @@
|
||||
package tool
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
var httpClient = http.DefaultClient
|
||||
|
||||
func init() {
|
||||
httpClient.Timeout = time.Second * 10
|
||||
http.DefaultClient.Timeout = time.Second * 10
|
||||
}
|
||||
|
||||
func GetHttpClient() *http.Client {
|
||||
c := *httpClient
|
||||
return &c
|
||||
}
|
||||
Reference in New Issue
Block a user