Compare commits

...

4 Commits

Author SHA1 Message Date
zu1k
655d5facaa opti some func 2020-08-24 19:26:29 +08:00
zu1k
51f6f9f953 update readme 2020-08-24 13:28:40 +08:00
zu1k
ffda71e641 docker add tzdata dep 2020-08-24 13:26:18 +08:00
zu1k
2b22846028 add getters count show 2020-08-24 13:20:55 +08:00
20 changed files with 199 additions and 53 deletions

View File

@@ -9,7 +9,7 @@ RUN go mod download && \
FROM alpine:latest
RUN apk add --no-cache ca-certificates
RUN apk add --no-cache ca-certificates tzdata
WORKDIR /proxypool-src
COPY ./assets /proxypool-src/assets
COPY --from=builder /proxypool /proxypool-src/

View File

@@ -32,7 +32,7 @@
点击按钮进入部署页面,填写基本信息然后运行
其中 `DOMAIN` 需要填写为你需要绑定的域名,`CONFIG_FILE` 需要填写你的配置文件路径,配置文件模板见 config/source.yaml 文件
其中 `DOMAIN` 需要填写为你需要绑定的域名,`CONFIG_FILE` 需要填写你的配置文件路径,配置文件模板见 config/config.yaml 文件
`CF` 开头的选项暂不需要填写,不影响程序运行
@@ -53,6 +53,8 @@ $ go get -u -v github.com/zu1k/proxypool
从这里下载预编译好的程序 [release](https://github.com/zu1k/proxypool/releases)
除了编译好的二进制程序,你还需要下载仓库 `assets` 文件夹,放置在可执行文件同目录
### 使用docker
```sh
@@ -63,16 +65,16 @@ docker pull docker.pkg.github.com/zu1k/proxypool/proxypool:latest
### 修改配置文件
首先修改 source.yaml 中的必要配置信息cf开头的选项不需要填写
首先修改 config.yaml 中的必要配置信息cf开头的选项不需要填写
配置文件中定义了抓取源,需要定期手动维护更新
source.yaml 文件中定义了抓取源,需要定期手动维护更新
### 启动程序
使用 `-c` 参数指定配置文件路径支持http链接
```shell
proxypool -c source.yaml
proxypool -c config.yaml
```
## 截图

View File

@@ -12,17 +12,20 @@ import (
"github.com/zu1k/proxypool/pkg/provider"
)
const version = "v0.3.0"
const version = "v0.3.1"
var router *gin.Engine
func setupRouter() {
router = gin.Default()
gin.SetMode(gin.ReleaseMode)
router = gin.New()
router.Use(gin.Recovery())
router.LoadHTMLGlob("assets/html/*")
router.GET("/", func(c *gin.Context) {
c.HTML(http.StatusOK, "index.html", gin.H{
"domain": config.Config.Domain,
"getters_count": cache.GettersCount,
"all_proxies_count": cache.AllProxiesCount,
"ss_proxies_count": cache.SSProxiesCount,
"ssr_proxies_count": cache.SSRProxiesCount,

View File

@@ -50,6 +50,19 @@ proxy-groups:
- provider
url: 'http://www.gstatic.com/generate_204'
interval: 300
- name: 网址 https://proxy.tgbot.co
type: select
proxies:
- DIRECT
- name: 欢迎Star https://github.com/zu1k/proxypool
type: select
proxies:
- DIRECT
- name: TG频道 @peekfun
type: select
proxies:
- DIRECT
proxy-providers:
provider:

View File

@@ -106,7 +106,7 @@
<div class='section friendly'>
<h1><strong>免费节点</strong></h1>
<div class='article'>
<p>自动抓取tg频道、订阅地址、公开互联网上的ss、ssr、vmess、trojan节点信息聚合去重后提供节点列表每15分钟更新</p>
<p>自动抓取tg频道、订阅地址、公开互联网上的ss、ssr、vmess、trojan节点信息聚合去重后提供节点列表每15分钟更新,目前共有{{.getters_count}}个抓取源</p>
<p>汇总节点数量:{{ .all_proxies_count }}</p>
<p>ss节点数量{{ .ss_proxies_count }}</p>
<p>ssr节点数量{{ .ssr_proxies_count }}</p>
@@ -130,6 +130,7 @@
</p>
<br>
{{- /* 所有使用本代码提供服务的禁止删除该行*/}}
<p>欢迎关注tg频道<a href="https://t.me/peekfun">@peekfun</a></p>
<p>抓取程序已开源:<a href="https://github.com/zu1k/proxypool">https://github.com/zu1k/proxypool</a> {{ .version }}</p>
</div>
</div>

1
go.mod
View File

@@ -17,6 +17,7 @@ require (
github.com/gocolly/colly v1.2.0
github.com/golang/protobuf v1.4.2 // indirect
github.com/heroku/x v0.0.25
github.com/ivpusic/grpool v1.0.0
github.com/jasonlvhit/gocron v0.0.1
github.com/json-iterator/go v1.1.10 // indirect
github.com/kennygrant/sanitize v1.2.4 // indirect

2
go.sum
View File

@@ -120,6 +120,8 @@ github.com/heroku/x v0.0.25/go.mod h1:qE/I0jp6rIeTBBosrPYV4ygRX3OMhqmC/A6x8ewodJ
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/hydrogen18/memlistener v0.0.0-20141126152155-54553eb933fb/go.mod h1:qEIFzExnS6016fRpRfxrExeVn2gbClQA99gQhnIcdhE=
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/ivpusic/grpool v1.0.0 h1:+FCiCo3GhfsvzfXuJWnpJUNb/VaqyYVgG8C+qvh07Rc=
github.com/ivpusic/grpool v1.0.0/go.mod h1:WPmiAI5ExAn06vg+0JzyPzXMQutJmpb7TrBtyLJkOHQ=
github.com/jasonlvhit/gocron v0.0.1 h1:qTt5qF3b3srDjeOIR4Le1LfeyvoYzJlYpqvG7tJX5YU=
github.com/jasonlvhit/gocron v0.0.1/go.mod h1:k9a3TV8VcU73XZxfVHCHWMWF9SOqgoku0/QlY2yvlA4=
github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=

View File

@@ -4,6 +4,8 @@ import (
"errors"
"fmt"
"github.com/zu1k/proxypool/internal/cache"
"github.com/ghodss/yaml"
"github.com/zu1k/proxypool/config"
@@ -48,4 +50,5 @@ func initGetters(sourceFiles []string) {
}
}
fmt.Println("Getter count:", len(Getters))
cache.GettersCount = len(Getters)
}

View File

@@ -33,19 +33,28 @@ func CrawlGo() {
proxies = proxies.Deduplication()
log.Println("CrawlGo node count:", len(proxies))
proxies = provider.Clash{Proxies: proxies}.CleanProxies()
proxies.NameAddCounrty().Sort().NameAddIndex()
log.Println("CrawlGo cleaned node count:", len(proxies))
proxies.NameAddCounrty().Sort().NameAddIndex().NameAddTG()
log.Println("Proxy rename DONE!")
cache.SetProxies("allproxies", proxies)
cache.AllProxiesCount = proxies.Len()
log.Println("AllProxiesCount:", cache.AllProxiesCount)
cache.SSProxiesCount = proxies.TypeLen("ss")
log.Println("SSProxiesCount:", cache.SSProxiesCount)
cache.SSRProxiesCount = proxies.TypeLen("ssr")
log.Println("SSRProxiesCount:", cache.SSRProxiesCount)
cache.VmessProxiesCount = proxies.TypeLen("vmess")
log.Println("VmessProxiesCount:", cache.VmessProxiesCount)
cache.TrojanProxiesCount = proxies.TypeLen("trojan")
log.Println("TrojanProxiesCount:", cache.TrojanProxiesCount)
cache.LastCrawlTime = time.Now().In(location).Format("2006-01-02 15:04:05")
// 可用性检测
proxies = proxy.CleanBadProxies(proxies)
log.Println("CrawlGo clash useable node count:", len(proxies))
proxies.NameAddCounrty().Sort().NameAddIndex()
log.Println("Now check proxy health...")
proxies = proxy.CleanBadProxiesWithGrpool(proxies)
log.Println("CrawlGo clash usefull node count:", len(proxies))
proxies.NameReIndex()
cache.SetProxies("proxies", proxies)
cache.UsefullProxiesCount = proxies.Len()

View File

@@ -1,7 +1,6 @@
package cache
import (
"log"
"time"
"github.com/patrickmn/go-cache"
@@ -13,10 +12,8 @@ var c = cache.New(cache.NoExpiration, 10*time.Minute)
func GetProxies(key string) proxy.ProxyList {
result, found := c.Get(key)
if found {
log.Println("found cache for:", key, "length:", len(result.(proxy.ProxyList)))
return result.(proxy.ProxyList)
}
log.Println("cache not found:", key)
return nil
}

View File

@@ -1,6 +1,8 @@
package cache
var (
GettersCount = 0
AllProxiesCount = 0
SSRProxiesCount = 0
SSProxiesCount = 0

View File

@@ -40,7 +40,7 @@ func NewTGChannelGetter(options tool.Options) (getter Getter, err error) {
return nil, err
}
return &TGChannelGetter{
c: colly.NewCollector(),
c: tool.GetColly(),
NumNeeded: t,
Url: "https://t.me/s/" + url,
}, nil

View File

@@ -16,9 +16,9 @@ func init() {
}
type WebFanqiangdang struct {
c *colly.Collector
Url string
results proxy.ProxyList
c *colly.Collector
Url string
results proxy.ProxyList
}
func NewWebFanqiangdangGetter(options tool.Options) (getter Getter, err error) {
@@ -29,8 +29,8 @@ func NewWebFanqiangdangGetter(options tool.Options) (getter Getter, err error) {
return nil, err
}
return &WebFanqiangdang{
c: colly.NewCollector(),
Url: url,
c: colly.NewCollector(),
Url: url,
}, nil
}
return nil, ErrorUrlNotFound
@@ -70,11 +70,10 @@ func (w *WebFanqiangdang) Get2Chan(pc chan proxy.Proxy, wg *sync.WaitGroup) {
}
}
type WebFanqiangdangRSS struct {
c *colly.Collector
Url string
results []string
c *colly.Collector
Url string
results []string
}
func NewWebFanqiangdangRSSGetter(options tool.Options) (getter Getter, err error) {
@@ -85,8 +84,8 @@ func NewWebFanqiangdangRSSGetter(options tool.Options) (getter Getter, err error
return nil, err
}
return &WebFanqiangdangRSS{
c: colly.NewCollector(),
Url: url,
c: tool.GetColly(),
Url: url,
}, nil
}
return nil, ErrorUrlNotFound

View File

@@ -19,6 +19,10 @@ func (b *Base) SetName(name string) {
b.Name = name
}
func (b *Base) SetIP(ip string) {
b.Server = ip
}
func (b *Base) BaseInfo() *Base {
return b
}
@@ -34,6 +38,7 @@ type Proxy interface {
ToSurge() string
Identifier() string
SetName(name string)
SetIP(ip string)
TypeName() string
BaseInfo() *Base
Clone() Proxy

View File

@@ -4,8 +4,11 @@ import (
"context"
"encoding/json"
"fmt"
"sync"
"time"
"github.com/ivpusic/grpool"
"github.com/Dreamacro/clash/adapters/outbound"
)
@@ -35,21 +38,75 @@ func testDelay(p Proxy) (delay uint16, err error) {
return delay, err
}
func CleanBadProxiesWithGrpool(proxies []Proxy) (cproxies []Proxy) {
pool := grpool.NewPool(500, 200)
c := make(chan checkResult)
defer close(c)
pool.WaitCount(len(proxies))
go func() {
for _, p := range proxies {
pp := p
pool.JobQueue <- func() {
defer pool.JobDone()
delay, err := testDelay(pp)
if err == nil {
c <- checkResult{
name: pp.Identifier(),
delay: delay,
}
}
}
}
}()
done := make(chan struct{})
defer close(done)
go func() {
pool.WaitAll()
pool.Release()
done <- struct{}{}
}()
okMap := make(map[string]struct{})
for {
select {
case r := <-c:
if r.delay > 0 {
okMap[r.name] = struct{}{}
}
case <-done:
cproxies = make(ProxyList, 0, 500)
for _, p := range proxies {
if _, ok := okMap[p.Identifier()]; ok {
cproxies = append(cproxies, p.Clone())
}
}
return
}
}
}
func CleanBadProxies(proxies []Proxy) (cproxies []Proxy) {
c := make(chan checkResult, 40)
defer close(c)
wg := &sync.WaitGroup{}
wg.Add(len(proxies))
for _, p := range proxies {
go testProxyDelayToChan(p, c)
go testProxyDelayToChan(p, c, wg)
}
go func() {
wg.Wait()
close(c)
}()
okMap := make(map[string]struct{})
size := len(proxies)
for i := 0; i < size; i++ {
r := <-c
for r := range c {
if r.delay > 0 {
okMap[r.name] = struct{}{}
}
}
cproxies = make([]Proxy, 0)
cproxies = make(ProxyList, 0, 500)
for _, p := range proxies {
if _, ok := okMap[p.Identifier()]; ok {
cproxies = append(cproxies, p.Clone())
@@ -63,17 +120,13 @@ type checkResult struct {
delay uint16
}
func testProxyDelayToChan(p Proxy, c chan checkResult) {
func testProxyDelayToChan(p Proxy, c chan checkResult, wg *sync.WaitGroup) {
defer wg.Done()
delay, err := testDelay(p)
if err != nil {
if err == nil {
c <- checkResult{
name: p.Identifier(),
delay: 0,
delay: delay,
}
return
}
c <- checkResult{
name: p.Identifier(),
delay: delay,
}
}

View File

@@ -37,15 +37,15 @@ func NewGeoIP(filePath string) (geoip GeoIP) {
}
// find ip info
func (g GeoIP) Find(ipORdomain string) (country string, err error) {
func (g GeoIP) Find(ipORdomain string) (ip, country string, err error) {
ips, err := net.LookupIP(ipORdomain)
if err != nil {
return "", err
return "", "", err
}
ipData := net.ParseIP(ips[0].String())
record, err := g.db.City(ipData)
if err != nil {
return "", err
return "", "", err
}
return record.Country.IsoCode, nil
return ips[0].String(), record.Country.IsoCode, nil
}

View File

@@ -3,6 +3,7 @@ package proxy
import (
"fmt"
"sort"
"sync"
)
type ProxyList []Proxy
@@ -63,13 +64,21 @@ func (ps ProxyList) Sort() ProxyList {
func (ps ProxyList) NameAddCounrty() ProxyList {
num := len(ps)
wg := &sync.WaitGroup{}
wg.Add(num)
for i := 0; i < num; i++ {
country, err := geoIp.Find(ps[i].BaseInfo().Server)
if err != nil || country == "" {
country = "Earth"
}
ps[i].SetName(fmt.Sprintf("%s", country))
ii := i
go func() {
defer wg.Done()
_, country, err := geoIp.Find(ps[ii].BaseInfo().Server)
if err != nil || country == "" {
country = "ZZ"
}
ps[ii].SetName(fmt.Sprintf("%s", country))
//ps[ii].SetIP(ip)
}()
}
wg.Wait()
return ps
}
@@ -81,6 +90,24 @@ func (ps ProxyList) NameAddIndex() ProxyList {
return ps
}
func (ps ProxyList) NameReIndex() ProxyList {
num := len(ps)
for i := 0; i < num; i++ {
originName := ps[i].BaseInfo().Name
country := string([]rune(originName)[:2])
ps[i].SetName(fmt.Sprintf("%s_%d", country, i+1))
}
return ps
}
func (ps ProxyList) NameAddTG() ProxyList {
num := len(ps)
for i := 0; i < num; i++ {
ps[i].SetName(fmt.Sprintf("%s %s", ps[i].BaseInfo().Name, "@peekfun"))
}
return ps
}
func Deduplication(src ProxyList) ProxyList {
result := make(ProxyList, 0, len(src))
temp := map[string]struct{}{}

View File

@@ -34,7 +34,7 @@ type ShadowsocksR struct {
}
func (ssr ShadowsocksR) Identifier() string {
return net.JoinHostPort(ssr.Server, strconv.Itoa(ssr.Port)) + ssr.ProtocolParam
return net.JoinHostPort(ssr.Server, strconv.Itoa(ssr.Port)) + ssr.Password + ssr.ProtocolParam
}
func (ssr ShadowsocksR) String() string {

27
pkg/tool/colly.go Normal file
View File

@@ -0,0 +1,27 @@
package tool
import (
"net"
"net/http"
"time"
"github.com/gocolly/colly"
)
func GetColly() *colly.Collector {
c := colly.NewCollector(
colly.UserAgent(UserAgent),
)
c.WithTransport(&http.Transport{
Proxy: http.ProxyFromEnvironment,
DialContext: (&net.Dialer{
Timeout: 10 * time.Second, // 超时时间
KeepAlive: 10 * time.Second, // keepAlive 超时时间
}).DialContext,
MaxIdleConns: 100, // 最大空闲连接数
IdleConnTimeout: 20 * time.Second, // 空闲连接超时
TLSHandshakeTimeout: 10 * time.Second, // TLS 握手超时
ExpectContinueTimeout: 10 * time.Second,
})
return c
}

View File

@@ -6,6 +6,8 @@ import (
"time"
)
const UserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36"
type HttpClient struct {
*http.Client
}
@@ -28,7 +30,7 @@ func (c *HttpClient) Get(url string) (resp *http.Response, err error) {
return nil, err
}
req.Header.Set("Accept-Language", "zh-CN,zh;q=0.9,en;q=0.8")
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36")
req.Header.Set("User-Agent", UserAgent)
return c.Do(req)
}
@@ -38,6 +40,6 @@ func (c *HttpClient) Post(url string, body io.Reader) (resp *http.Response, err
return nil, err
}
req.Header.Set("Accept-Language", "zh-CN,zh;q=0.9,en;q=0.8")
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36")
req.Header.Set("User-Agent", UserAgent)
return c.Do(req)
}