Skip to content

Commit

Permalink
1、fixed filefuzz The bug that the object is nil when the network is a…
Browse files Browse the repository at this point in the history
…bnormal

2、fixed #44
3、Memory overhead optimization 2022-07-30
  • Loading branch information
x51pwn committed Jul 30, 2022
1 parent 9cc16da commit 043a07b
Show file tree
Hide file tree
Showing 39 changed files with 145 additions and 90 deletions.
8 changes: 4 additions & 4 deletions brute/admin_brute.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ func getinput(inputurl string) (usernamekey string, passwordkey string, loginurl
if util.StrContains(req.Body, "md5.js") {
ismd5 = true
}
u, err := url.Parse(req.RequestUrl)
u, err := url.Parse(strings.TrimSpace(req.RequestUrl))
if err != nil {
return "", "", "", false
}
Expand All @@ -30,7 +30,7 @@ func getinput(inputurl string) (usernamekey string, passwordkey string, loginurl
}
hreflist := regexp.MustCompile(`location.href=['"](.*?)['"]`).FindStringSubmatch(req.Body)
if hreflist != nil {
href, _ := url.Parse(hreflist[len(hreflist)-1:][0])
href, _ := url.Parse(strings.TrimSpace(hreflist[len(hreflist)-1:][0]))
hrefurl := u.ResolveReference(href)
req, err = util.HttpRequset(hrefurl.String(), "GET", "", true, nil)
if err != nil {
Expand All @@ -57,13 +57,13 @@ func getinput(inputurl string) (usernamekey string, passwordkey string, loginurl
}
domainlist := regexp.MustCompile(`<form.*?action=['"](.*?)['"]`).FindStringSubmatch(req.Body)
if domainlist != nil {
if action, err := url.Parse(domainlist[len(domainlist)-1:][0]); err == nil {
if action, err := url.Parse(strings.TrimSpace(domainlist[len(domainlist)-1:][0])); err == nil {
loginurl = u.ResolveReference(action).String()
}
} else {
domainlist2 := regexp.MustCompile(`url.*?:.*?['"](.*?)['"],`).FindStringSubmatch(req.Body)
if domainlist2 != nil {
if ajax, err := url.Parse(domainlist2[len(domainlist2)-1:][0]); err == nil {
if ajax, err := url.Parse(strings.TrimSpace(domainlist2[len(domainlist2)-1:][0])); err == nil {
loginurl = u.ResolveReference(ajax).String()
}
}
Expand Down
4 changes: 2 additions & 2 deletions brute/check_loginpage.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@ func CheckLoginPage(inputurl string) bool {
cssurl := regexp.MustCompile(`<link[^>]*href=['"](.*?)['"]`).FindAllStringSubmatch(req.Body, -1)
for _, v := range cssurl {
if strings.Contains(v[1], ".css") {
u, err := url.Parse(inputurl)
u, err := url.Parse(strings.TrimSpace(inputurl))
if err != nil {
return false
}
href, err := url.Parse(v[1])
href, err := url.Parse(strings.TrimSpace(v[1]))
if err != nil {
return false
}
Expand Down
34 changes: 29 additions & 5 deletions brute/filefuzz.go
Original file line number Diff line number Diff line change
Expand Up @@ -157,15 +157,26 @@ var RandStr = file_not_support + "_scan4all"
// 随机10个字符串
var RandStr4Cookie = util.RandStringRunes(10)

// 避免重复
var noRpt sync.Map

// 不知道为什 FileFuzz 始终出现重复
var noRptLc = sync.RWMutex{}

// 重写了fuzz:优化流程、优化算法、修复线程安全bug、增加智能功能
func FileFuzz(u string, indexStatusCode int, indexContentLength int, indexbody string) ([]string, []string) {
if eableFileFuzz {
return []string{}, []string{}
}
u01, err := url.Parse(u)
u01, err := url.Parse(strings.TrimSpace(u))
if nil == err {
u = u01.Scheme + "://" + u01.Host + "/"
}
szKey001 := "FileFuzz" + u
szKey001Over := "FileFuzzOver" + u
//noRptLc.Lock()
if _, ok := noRpt.Load(szKey001); ok || eableFileFuzz {
return []string{}, []string{}
}
noRpt.Store(szKey001, true)
//noRptLc.Unlock()
var (
path404 = RandStr // 绝对404页面路径
errorTimes int32 = 0 // 错误计数器,> 20则退出fuzz
Expand All @@ -189,6 +200,8 @@ func FileFuzz(u string, indexStatusCode int, indexContentLength int, indexbody s
} else {
return []string{}, []string{}
}
} else {
return []string{}, []string{}
}
var wg sync.WaitGroup
// 中途控制关闭当前目标所有fuzz
Expand All @@ -199,7 +212,7 @@ func FileFuzz(u string, indexStatusCode int, indexContentLength int, indexbody s
var async_data = make(chan []string, 64)
var async_technologies = make(chan []string, 64)
defer func() {
util.CloseChan(ch)
close(ch)
close(async_data)
close(async_technologies)
}()
Expand Down Expand Up @@ -238,10 +251,19 @@ func FileFuzz(u string, indexStatusCode int, indexContentLength int, indexbody s
}()
for {
select {
case _, ok := <-ch:
if !ok {
stop()
return
}
case <-ctx.Done(): // 00-捕获所有线程关闭信号,并退出,close for all
atomic.AddInt32(&errorTimes, 21)
return
default:
if _, ok := noRpt.Load(szKey001Over); ok {
stop()
return
}
// 01-异常>20关闭所有fuzz
if atomic.LoadInt32(&errorTimes) >= 20 {
stop() //发停止指令
Expand Down Expand Up @@ -318,6 +340,8 @@ func FileFuzz(u string, indexStatusCode int, indexContentLength int, indexbody s
// 默认情况等待所有结束
wg.Wait()
stop() //发停止指令
noRpt.Store(szKey001Over, true)
log.Printf("fuzz is over: %s\n", u)
return path, technologies
}

Expand Down
28 changes: 15 additions & 13 deletions brute/fuzzAI.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,18 +44,20 @@ var asz404UrlKey = "asz404Url"

// 初始化字典到库中,且防止重复
func init() {
fuzz404 = util.GetVal4File("fuzz404", fuzz404)
sz404Url = util.GetVal4File("404url", sz404Url)
page404Title = strings.Split(strings.TrimSpace(fuzz404), "\n")
asz404Url = strings.Split(strings.TrimSpace(sz404Url), "\n")
data, err := util.NewKvDbOp().Get(asz404UrlKey)
if nil == err && 0 < len(data) {
aT1 := asz404Url
if nil != json.Unmarshal(data, &asz404Url) {
asz404Url = aT1 // 容错
util.RegInitFunc(func() {
fuzz404 = util.GetVal4File("fuzz404", fuzz404)
sz404Url = util.GetVal4File("404url", sz404Url)
page404Title = strings.Split(strings.TrimSpace(fuzz404), "\n")
asz404Url = strings.Split(strings.TrimSpace(sz404Url), "\n")
data, err := util.NewKvDbOp().Get(asz404UrlKey)
if nil == err && 0 < len(data) {
aT1 := asz404Url
if nil != json.Unmarshal(data, &asz404Url) {
asz404Url = aT1 // 容错
}
}
}
db.GetDb(&ErrPage{})
db.GetDb(&ErrPage{})
})
}

// 智能学习: 非正常页面,并记录到库中永久使用,使用该方法到页面
Expand All @@ -69,7 +71,7 @@ func StudyErrPageAI(req *util.Response, page *Page, fingerprintsTag string) {
return
}
util.DoSyncFunc(func() {
var data *ErrPage
var data = &ErrPage{}
body := []byte(req.Body)
szHs, szMd5 := fingerprint.GetHahsMd5(body)
// 这里后期优化基于其他查询
Expand Down Expand Up @@ -127,7 +129,7 @@ func CheckIsErrPageAI(req *util.Response, page *Page) bool {
db.Create[ErrPage](data)
return true
}
u01, err := url.Parse(*page.Url)
u01, err := url.Parse(strings.TrimSpace(*page.Url))
if nil == err && 2 < len(u01.Path) {
// 加 404 url判断
if pkg.Contains4sub[string](asz404Url, u01.Path) {
Expand Down
4 changes: 2 additions & 2 deletions lib/Smuggling/CheckSmuggling.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ func checkSmuggling4Poc(ClTePayload *[]string, nTimes int, r1 *Smuggling, r *soc
szBody 是为了 相同url 相同payload 的情况下,只发一次请求,进行多次判断而设计,Smuggling 的场景通常不存在
做一次 http
util.PocCheck_pipe <- util.PocCheck{
util.PocCheck_pipe <- &util.PocCheck{
Wappalyzertechnologies: &[]string{"httpCheckSmuggling"},
URL: finalURL,
FinalURL: finalURL,
Expand Down Expand Up @@ -90,7 +90,7 @@ Content-Type: application/x-www-form-urlencoded
Content-Length: 10
x=`}
u, err := url.Parse(szUrl)
u, err := url.Parse(strings.TrimSpace(szUrl))
if nil != err {
log.Println("GenerateHttpSmugglingPay url.Parse err: ", err)
return ""
Expand Down
2 changes: 1 addition & 1 deletion lib/goby/goby_pocs/showDocGo.txt
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ func init() {
select {
case webConsleID := <-waitSessionCh:
log.Println("[DEBUG] session created at:", webConsleID)
if u, err := url.Parse(webConsleID); err == nil {
if u, err := url.Parse(strings.TrimSpace(webConsleID)); err == nil {
expResult.Success = true
expResult.OutputType = "html"
sid := strings.Join(u.Query()["id"], "")
Expand Down
2 changes: 1 addition & 1 deletion lib/socket/ConnTarget.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ type CheckTarget struct {
// 准备要检测、链接带目标
// 需要考虑 ssl的情况
func NewCheckTarget(szUrl, SzType string, readWriteTimeout int) *CheckTarget {
u, err := url.Parse(szUrl)
u, err := url.Parse(strings.TrimSpace(szUrl))

if "" == SzType {
SzType = "tcp"
Expand Down
8 changes: 0 additions & 8 deletions lib/util/CheckUtil.go

This file was deleted.

6 changes: 4 additions & 2 deletions lib/util/Const.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package util

import (
"context"
"fmt"
"net/http"
"os"
"regexp"
Expand Down Expand Up @@ -50,6 +51,7 @@ func DoSyncFunc(cbk func()) {
for {
select {
case <-Ctx_global.Done():
fmt.Println("接收到全局退出事件")
return
default:
cbk()
Expand Down Expand Up @@ -86,7 +88,7 @@ type PocCheck struct {
}

// go POC 检测管道,避免循环引用
var PocCheck_pipe = make(chan PocCheck, 64)
var PocCheck_pipe = make(chan *PocCheck, 64)

// 头信息同一检查,并调用合适到go poc进一步爆破、检测
// 1、需要认证
Expand All @@ -106,7 +108,7 @@ func CheckHeader(header *http.Header, szUrl string) {
a1 = append(a1, "shiro")
}
if 0 < len(a1) && os.Getenv("NoPOC") != "true" {
PocCheck_pipe <- PocCheck{Wappalyzertechnologies: &a1, URL: szUrl, FinalURL: szUrl, Checklog4j: false}
PocCheck_pipe <- &PocCheck{Wappalyzertechnologies: &a1, URL: szUrl, FinalURL: szUrl, Checklog4j: false}
}
}
}()
Expand Down
2 changes: 1 addition & 1 deletion lib/util/HoneypotDetection.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ func HoneyportDetection(host string) bool {
if "http" != strings.ToLower(host[0:4]) {
host = "http://" + host
}
oUrl, err := url.Parse(host)
oUrl, err := url.Parse(strings.TrimSpace(host))
if err != err {
return false
}
Expand Down
2 changes: 1 addition & 1 deletion lib/util/doPy3log4j.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ func DoLog4j(szUrl string) {
if "" == EsUrl {
EsUrl = GetValByDefault("esUrl", "http://127.0.0.1:9200/%s_index/_doc/%s")
}
oUrl, err := url.Parse(EsUrl)
oUrl, err := url.Parse(strings.TrimSpace(EsUrl))
if nil == err {
p1, err := os.Getwd()
if nil == err {
Expand Down
2 changes: 1 addition & 1 deletion lib/util/kvDb.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ func (r *KvDbOp) Close() {
func (r *KvDbOp) GetKeyForData(key string) (szRst []byte) {
data, err := r.Get(key)
if nil != err {
log.Println("GetKeyForData ", key, " is err ", err)
//log.Println("GetKeyForData ", key, " is err ", err)
return []byte{}
}
return data
Expand Down
4 changes: 2 additions & 2 deletions lib/util/util.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ func HttpRequsetBasic(username string, password string, urlstring string, method
var tr *http.Transport
var err error
if HttpProxy != "" {
uri, _ := url.Parse(HttpProxy)
uri, _ := url.Parse(strings.TrimSpace(HttpProxy))
tr = &http.Transport{
MaxIdleConnsPerHost: -1,
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
Expand Down Expand Up @@ -110,7 +110,7 @@ func HttpRequsetBasic(username string, password string, urlstring string, method
func HttpRequset(urlstring string, method string, postdata string, isredirect bool, headers map[string]string) (*Response, error) {
var tr *http.Transport
if HttpProxy != "" {
uri, _ := url.Parse(HttpProxy)
uri, _ := url.Parse(strings.TrimSpace(HttpProxy))
tr = &http.Transport{
MaxIdleConnsPerHost: -1,
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
Expand Down
4 changes: 1 addition & 3 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ var Wg sync.WaitGroup

func main() {
runtime.GOMAXPROCS(runtime.NumCPU())
util.DoInit(&config)
util.Wg = &Wg
util.DoInit(&config)
defer util.CloseAll()
szTip := ""
if util.GetValAsBool("enablDevDebug") {
Expand All @@ -33,8 +33,6 @@ func main() {
//////////////////////////////////////////*/
}
api.StartScan(nil)

log.Printf("wait for all threads to end\n%s", szTip)
util.Wg.Wait()

}
2 changes: 1 addition & 1 deletion pkg/fingerprint/fingerScan.go
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ func CaseMethod(szUrl, method, bodyString, favhash, md5Body, hexBody string, fin
log.Printf("%+v", finp)
return cms
}
u01, _ := url.Parse(szUrl)
u01, _ := url.Parse(strings.TrimSpace(szUrl))
if _, ok := Mfavhash.Load(u01.Host + favhash); ok {
return cms
}
Expand Down
4 changes: 2 additions & 2 deletions pkg/fingerprint/getFavicon.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ func xegexpjs(reg string, resp string) (reslut1 [][]string) {
func getfavicon(httpbody string, turl string) (hash string, md5 string) {
faviconpaths := xegexpjs(`href="(.*?favicon....)"`, httpbody)
var faviconpath string
u, err := url.Parse(turl)
u, err := url.Parse(strings.TrimSpace(turl))
if err != nil {
panic(err)
}
Expand Down Expand Up @@ -68,7 +68,7 @@ func Favicohash4key(host, key string) (hash string, md5R string) {
timeout := time.Duration(8 * time.Second)
var tr *http.Transport
if util.HttpProxy != "" {
uri, _ := url.Parse(util.HttpProxy)
uri, _ := url.Parse(strings.TrimSpace(util.HttpProxy))
tr = &http.Transport{
MaxIdleConnsPerHost: -1,
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
Expand Down
2 changes: 1 addition & 1 deletion pkg/httpx/common/hashes/jarmhash.go
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ func fingerprint(t target, duration int) string {
}
func Jarm(host string, duration int) string {
t := target{}
if u, err := url.Parse(host); err == nil {
if u, err := url.Parse(strings.TrimSpace(host)); err == nil {
if u.Scheme == "http" {
return ""
}
Expand Down
2 changes: 1 addition & 1 deletion pkg/httpx/common/httpx/httpx.go
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ func New(options *Options) (*HTTPX, error) {
}

if httpx.Options.HTTPProxy != "" {
proxyURL, parseErr := url.Parse(httpx.Options.HTTPProxy)
proxyURL, parseErr := url.Parse(strings.TrimSpace(httpx.Options.HTTPProxy))
if parseErr != nil {
return nil, parseErr
}
Expand Down
2 changes: 1 addition & 1 deletion pkg/httpx/common/stringz/stringz.go
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ func RemoveURLDefaultPort(rawURL string) string {
}

func GetInvalidURI(rawURL string) (bool, string) {
if _, err := url.Parse(rawURL); err != nil {
if _, err := url.Parse(strings.TrimSpace(rawURL)); err != nil {
if u, err := urlutil.Parse(rawURL); err == nil {
return true, u.RequestURI
}
Expand Down
Loading

0 comments on commit 043a07b

Please sign in to comment.