diff --git a/app/task.go b/app/task.go index 178f292..fe993d1 100644 --- a/app/task.go +++ b/app/task.go @@ -20,6 +20,15 @@ func CrawlTGChannel() { node = append(node, getter.NewTGChannelGetter("https://t.me/s/FreeSSR666", 200).Get()...) node = append(node, getter.NewTGChannelGetter("https://t.me/s/fanqiang666", 200).Get()...) + // 各种网站上公开的 + node = append(node, getter.WebFreessrXyz{}.Get()...) + node = append(node, getter.WebLucnOrg{}.Get()...) + + // 从web页面模糊获取 + node = append(node, getter.NewWebFuzz("https://zfjvpn.gitbook.io/").Get()...) + node = append(node, getter.NewWebFuzz("https://www.freefq.com/d/file/free-ssr/20200811/1f3e9d0d0064f662457062712dcf1b66.txt").Get()...) + node = append(node, getter.NewWebFuzz("https://merlinblog.xyz/wiki/freess.html").Get()...) + node = append(node, GetProxies()...) node = proxy.Deduplication(node) SetProxies(node) diff --git a/getter/base.go b/getter/base.go index 47cfbd4..bf28139 100644 --- a/getter/base.go +++ b/getter/base.go @@ -1,7 +1,43 @@ package getter -import "github.com/zu1k/proxypool/proxy" +import ( + "strings" + + "github.com/zu1k/proxypool/proxy" +) type Getter interface { Get() []proxy.Proxy } + +func String2Proxy(link string) proxy.Proxy { + var err error + var data proxy.Proxy + if strings.HasPrefix(link, "ssr://") { + data, err = proxy.ParseSSRLink(link) + } else if strings.HasPrefix(link, "vmess://") { + data, err = proxy.ParseVmessLink(link) + } + if err != nil { + return nil + } + return data +} + +func StringArray2ProxyArray(origin []string) []proxy.Proxy { + var err error + results := make([]proxy.Proxy, 0) + var data proxy.Proxy + for _, link := range origin { + if strings.HasPrefix(link, "ssr://") { + data, err = proxy.ParseSSRLink(link) + } else if strings.HasPrefix(link, "vmess://") { + data, err = proxy.ParseVmessLink(link) + } + if err != nil { + continue + } + results = append(results, data) + } + return results +} diff --git a/getter/subscribe.go b/getter/subscribe.go new file mode 100644 index 0000000..69c6dc1 --- /dev/null +++ b/getter/subscribe.go @@ -0,0 +1,13 @@ +package getter + +import "github.com/zu1k/proxypool/proxy" + +type Subscribe struct { + NumNeeded int + Results []string + Url string +} + +func (s Subscribe) Get() []proxy.Proxy { + return nil +} diff --git a/getter/tgchannel.go b/getter/tgchannel.go index 9311d10..8d07c49 100644 --- a/getter/tgchannel.go +++ b/getter/tgchannel.go @@ -2,7 +2,6 @@ package getter import ( "fmt" - "strings" "github.com/gocolly/colly" "github.com/zu1k/proxypool/proxy" @@ -47,18 +46,5 @@ func (g TGChannelGetter) Get() []proxy.Proxy { _ = fmt.Errorf("%s", err.Error()) } - results := make([]proxy.Proxy, 0) - var data proxy.Proxy - for _, link := range g.Results { - if strings.HasPrefix(link, "ssr://") { - data, err = proxy.ParseSSRLink(link) - } else if strings.HasPrefix(link, "vmess://") { - data, err = proxy.ParseVmessLink(link) - } - if err != nil { - continue - } - results = append(results, data) - } - return results + return StringArray2ProxyArray(g.Results) } diff --git a/getter/web_free_ssr_xyz.go b/getter/web_free_ssr_xyz.go new file mode 100644 index 0000000..61778f9 --- /dev/null +++ b/getter/web_free_ssr_xyz.go @@ -0,0 +1,51 @@ +package getter + +import ( + "encoding/json" + "io/ioutil" + "net/http" + + "github.com/zu1k/proxypool/proxy" +) + +const ( + freessrxyzSsrLink = "https://api.free-ssr.xyz/ssr" + freessrxyzV2rayLink = "https://api.free-ssr.xyz/v2ray" +) + +type WebFreessrXyz struct { +} + +func (w WebFreessrXyz) Get() []proxy.Proxy { + results := freessrxyzFetch(freessrxyzSsrLink) + results = append(results, freessrxyzFetch(freessrxyzV2rayLink)...) + return results +} + +func freessrxyzFetch(link string) []proxy.Proxy { + resp, err := http.Get(link) + if err != nil { + return nil + } + defer resp.Body.Close() + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + return nil + } + + type node struct { + Url string `json:"url"` + } + ssrs := make([]node, 0) + err = json.Unmarshal(body, &ssrs) + if err != nil { + return nil + } + + result := make([]string, 0) + for _, node := range ssrs { + result = append(result, node.Url) + } + + return StringArray2ProxyArray(result) +} diff --git a/getter/web_fuzz.go b/getter/web_fuzz.go new file mode 100644 index 0000000..f66c791 --- /dev/null +++ b/getter/web_fuzz.go @@ -0,0 +1,34 @@ +package getter + +import ( + "io/ioutil" + "net/http" + + "github.com/zu1k/proxypool/proxy" +) + +type WebFuzz struct { + Url string +} + +func (w WebFuzz) Get() []proxy.Proxy { + resp, err := http.Get(w.Url) + if err != nil { + return nil + } + defer resp.Body.Close() + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + return nil + } + text := string(body) + + results := proxy.GrepSSRLinkFromString(text) + results = append(results, proxy.GrepVmessLinkFromString(text)...) + + return StringArray2ProxyArray(results) +} + +func NewWebFuzz(url string) *WebFuzz { + return &WebFuzz{Url: url} +} diff --git a/getter/web_lucn_org.go b/getter/web_lucn_org.go new file mode 100644 index 0000000..9af5f7f --- /dev/null +++ b/getter/web_lucn_org.go @@ -0,0 +1,70 @@ +package getter + +import ( + "encoding/base64" + "encoding/json" + "io/ioutil" + "net/http" + + "github.com/zu1k/proxypool/tool" + + "github.com/zu1k/proxypool/proxy" +) + +const lucnorgSsrLink = "https://lncn.org/api/ssrList" + +type WebLucnOrg struct { +} + +func (w WebLucnOrg) Get() []proxy.Proxy { + resp, err := http.Post(lucnorgSsrLink, "", nil) + if err != nil { + return nil + } + defer resp.Body.Close() + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + return nil + } + + response := struct { + Code string `json:"code"` + Ssrs string `json:"ssrs"` + }{} + err = json.Unmarshal(body, &response) + if err != nil { + return nil + } + + dec := decryptAesForLucn(response.Code, response.Ssrs) + if dec == nil { + return nil + } + + type node struct { + Url string `json:"url"` + } + ssrs := make([]node, 0) + err = json.Unmarshal(dec, &ssrs) + if err != nil { + return nil + } + + result := make([]string, 0) + for _, node := range ssrs { + result = append(result, node.Url) + } + return StringArray2ProxyArray(result) +} + +func decryptAesForLucn(code string, c string) []byte { + if code == "" { + code = "abclnv561cqqfg30" + } + cipher, err := base64.StdEncoding.DecodeString(c) + if err != nil { + return nil + } + result := tool.AesEcbDecryptWithPKCS7Unpadding(cipher, []byte(code)) + return result +} diff --git a/getter/web_test.go b/getter/web_test.go new file mode 100644 index 0000000..aaf4a50 --- /dev/null +++ b/getter/web_test.go @@ -0,0 +1,18 @@ +package getter + +import ( + "fmt" + "testing" +) + +func TestWebLucnOrg_Get(t *testing.T) { + fmt.Println(WebLucnOrg{}.Get()) +} + +func TestWebFreessrXyz_Get(t *testing.T) { + fmt.Println(WebFreessrXyz{}.Get()) +} + +func TestWebFuzz_Get(t *testing.T) { + fmt.Println(NewWebFuzz("https://merlinblog.xyz/wiki/freess.html").Get()) +} diff --git a/tool/aes.go b/tool/aes.go new file mode 100644 index 0000000..d10d6a5 --- /dev/null +++ b/tool/aes.go @@ -0,0 +1,43 @@ +package tool + +import ( + "bytes" + "crypto/aes" +) + +func PKCS7Padding(ciphertext []byte, blockSize int) []byte { + padding := blockSize - len(ciphertext)%blockSize + padtext := bytes.Repeat([]byte{byte(padding)}, padding) + return append(ciphertext, padtext...) +} + +func PKCS7UnPadding(origData []byte) []byte { + length := len(origData) + unpadding := int(origData[length-1]) + return origData[:(length - unpadding)] +} + +func AesEcbDecryptWithPKCS7Unpadding(data, key []byte) []byte { + block, _ := aes.NewCipher(key) + decrypted := make([]byte, len(data)) + size := block.BlockSize() + + for bs, be := 0, size; bs < len(data); bs, be = bs+size, be+size { + block.Decrypt(decrypted[bs:be], data[bs:be]) + } + + return PKCS7UnPadding(decrypted) +} + +func AesEcbEncryptWithPKCS7Padding(data, key []byte) []byte { + block, _ := aes.NewCipher(key) + data = PKCS7Padding(data, block.BlockSize()) + decrypted := make([]byte, len(data)) + size := block.BlockSize() + + for bs, be := 0, size; bs < len(data); bs, be = bs+size, be+size { + block.Encrypt(decrypted[bs:be], data[bs:be]) + } + + return decrypted +} diff --git a/tool/httpclient.go b/tool/httpclient.go new file mode 100644 index 0000000..39c3de1 --- /dev/null +++ b/tool/httpclient.go @@ -0,0 +1,18 @@ +package tool + +import ( + "net/http" + "time" +) + +var httpClient = http.DefaultClient + +func init() { + httpClient.Timeout = time.Second * 10 + http.DefaultClient.Timeout = time.Second * 10 +} + +func GetHttpClient() *http.Client { + c := *httpClient + return &c +}