update cache

This commit is contained in:
zu1k
2020-08-12 09:00:52 +08:00
parent db1a9f995e
commit 01afab646b
4 changed files with 52 additions and 26 deletions

View File

@@ -3,9 +3,10 @@ package api
import (
"os"
"github.com/zu1k/proxypool/app/cache"
"github.com/gin-gonic/gin"
_ "github.com/heroku/x/hmetrics/onload"
"github.com/zu1k/proxypool/app"
"github.com/zu1k/proxypool/provider"
)
@@ -16,9 +17,14 @@ func setupRouter() {
router.StaticFile("/clash/config", "example/clash-config.yaml")
router.GET("/clash/proxies", func(c *gin.Context) {
proxies := app.GetProxies()
clash := provider.Clash{Proxies: proxies}
c.String(200, clash.Provide())
text := cache.GetString("clashproxies")
if text == "" {
proxies := cache.GetProxies()
clash := provider.Clash{Proxies: proxies}
text = clash.Provide()
cache.SetString("clashproxies", text)
}
c.String(200, text)
})
}

View File

@@ -1,4 +1,4 @@
package app
package cache
import (
"log"
@@ -23,3 +23,15 @@ func GetProxies() []proxy.Proxy {
func SetProxies(proxies []proxy.Proxy) {
c.Set("proxies", proxies, cache.NoExpiration)
}
func SetString(key, value string) {
c.Set(key, value, cache.NoExpiration)
}
func GetString(key string) string {
result, found := c.Get(key)
if found {
return result.(string)
}
return ""
}

View File

@@ -4,40 +4,45 @@ import (
"math/rand"
"strconv"
"github.com/zu1k/proxypool/provider"
"github.com/zu1k/proxypool/app/cache"
"github.com/zu1k/proxypool/getter"
"github.com/zu1k/proxypool/proxy"
)
func CrawlTGChannel() {
node := make([]proxy.Proxy, 0)
proxies := make([]proxy.Proxy, 0)
// tg上各种节点分享频道
node = append(node, getter.NewTGChannelGetter("https://t.me/s/ssrList", 200).Get()...)
node = append(node, getter.NewTGChannelGetter("https://t.me/s/SSRSUB", 200).Get()...)
node = append(node, getter.NewTGChannelGetter("https://t.me/s/FreeSSRNode", 200).Get()...)
node = append(node, getter.NewTGChannelGetter("https://t.me/s/ssrlists", 200).Get()...)
node = append(node, getter.NewTGChannelGetter("https://t.me/s/ssrshares", 200).Get()...)
node = append(node, getter.NewTGChannelGetter("https://t.me/s/V2List", 200).Get()...)
node = append(node, getter.NewTGChannelGetter("https://t.me/s/ssrtool", 200).Get()...)
node = append(node, getter.NewTGChannelGetter("https://t.me/s/vmessr", 200).Get()...)
node = append(node, getter.NewTGChannelGetter("https://t.me/s/FreeSSR666", 200).Get()...)
node = append(node, getter.NewTGChannelGetter("https://t.me/s/fanqiang666", 200).Get()...)
proxies = append(proxies, getter.NewTGChannelGetter("https://t.me/s/ssrList", 200).Get()...)
proxies = append(proxies, getter.NewTGChannelGetter("https://t.me/s/SSRSUB", 200).Get()...)
proxies = append(proxies, getter.NewTGChannelGetter("https://t.me/s/FreeSSRNode", 200).Get()...)
proxies = append(proxies, getter.NewTGChannelGetter("https://t.me/s/ssrlists", 200).Get()...)
proxies = append(proxies, getter.NewTGChannelGetter("https://t.me/s/ssrshares", 200).Get()...)
proxies = append(proxies, getter.NewTGChannelGetter("https://t.me/s/V2List", 200).Get()...)
proxies = append(proxies, getter.NewTGChannelGetter("https://t.me/s/ssrtool", 200).Get()...)
proxies = append(proxies, getter.NewTGChannelGetter("https://t.me/s/vmessr", 200).Get()...)
proxies = append(proxies, getter.NewTGChannelGetter("https://t.me/s/FreeSSR666", 200).Get()...)
proxies = append(proxies, getter.NewTGChannelGetter("https://t.me/s/fanqiang666", 200).Get()...)
// 各种网站上公开的
node = append(node, getter.WebFreessrXyz{}.Get()...)
node = append(node, getter.WebLucnOrg{}.Get()...)
proxies = append(proxies, getter.WebFreessrXyz{}.Get()...)
proxies = append(proxies, getter.WebLucnOrg{}.Get()...)
// 从web页面模糊获取
node = append(node, getter.NewWebFuzz("https://zfjvpn.gitbook.io/").Get()...)
node = append(node, getter.NewWebFuzz("https://www.freefq.com/d/file/free-ssr/20200811/1f3e9d0d0064f662457062712dcf1b66.txt").Get()...)
node = append(node, getter.NewWebFuzz("https://merlinblog.xyz/wiki/freess.html").Get()...)
proxies = append(proxies, getter.NewWebFuzz("https://zfjvpn.gitbook.io/").Get()...)
proxies = append(proxies, getter.NewWebFuzz("https://www.freefq.com/d/file/free-ssr/20200811/1f3e9d0d0064f662457062712dcf1b66.txt").Get()...)
proxies = append(proxies, getter.NewWebFuzz("https://merlinblog.xyz/wiki/freess.html").Get()...)
node = append(node, GetProxies()...)
node = proxy.Deduplication(node)
proxies = append(proxies, cache.GetProxies()...)
proxies = proxy.Deduplication(proxies)
num := len(node)
num := len(proxies)
for i := 0; i < num; i++ {
node[i].SetName("@tgbotlist_" + strconv.Itoa(rand.Int()))
proxies[i].SetName("@tgbotlist_" + strconv.Itoa(rand.Int()))
}
SetProxies(node)
cache.SetProxies(proxies)
cache.SetString("clashproxies", provider.Clash{Proxies: proxies}.Provide())
}

View File

@@ -1,11 +1,14 @@
package main
import (
"fmt"
"github.com/zu1k/proxypool/api"
"github.com/zu1k/proxypool/app"
)
func main() {
fmt.Println("Do the first crawl...")
app.CrawlTGChannel()
api.Run()
}