Skip to content

Commit

Permalink
Merge pull request #274 from vzamanillo/improved-http-error-handling
Browse files Browse the repository at this point in the history
Improved HTTP error handling
  • Loading branch information
ehsandeep authored Jul 21, 2020
2 parents b0e9587 + c0cec04 commit 6941175
Show file tree
Hide file tree
Showing 30 changed files with 122 additions and 170 deletions.
1 change: 0 additions & 1 deletion config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ sources:
- certspotterold
- commoncrawl
- crtsh
- digicert
- dnsdumpster
- dnsdb
- entrust
Expand Down
5 changes: 1 addition & 4 deletions pkg/passive/sources.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ import (
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/certspotterold"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/commoncrawl"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/crtsh"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/digicert"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/dnsdb"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/dnsdumpster"
"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/entrust"
Expand Down Expand Up @@ -45,10 +44,10 @@ var DefaultSources = []string{
"certspotterold",
"commoncrawl",
"crtsh",
"digicert",
"dnsdumpster",
"dnsdb",
"entrust",
"github",
"hackertarget",
"ipv4info",
"intelx",
Expand Down Expand Up @@ -107,8 +106,6 @@ func (a *Agent) addSources(sources []string) {
a.sources[source] = &commoncrawl.Source{}
case "crtsh":
a.sources[source] = &crtsh.Source{}
case "digicert":
a.sources[source] = &digicert.Source{}
case "dnsdumpster":
a.sources[source] = &dnsdumpster.Source{}
case "dnsdb":
Expand Down
28 changes: 21 additions & 7 deletions pkg/subscraping/agent.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,11 @@ package subscraping
import (
"context"
"crypto/tls"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/url"
"time"
)

Expand Down Expand Up @@ -44,12 +48,7 @@ func (s *Session) NormalGetWithContext(ctx context.Context, url string) (*http.R
req.Header.Set("Accept", "*/*")
req.Header.Set("Accept-Language", "en")

resp, err := s.Client.Do(req)
if err != nil {
return nil, err
}

return resp, nil
return httpRequestWrapper(s.Client, req)
}

// Get makes a GET request to a URL
Expand All @@ -73,10 +72,25 @@ func (s *Session) Get(ctx context.Context, url string, cookies string, headers m
}
}

resp, err := s.Client.Do(req)
return httpRequestWrapper(s.Client, req)
}

func (s *Session) DiscardHttpResponse(response *http.Response) {
if response != nil {
io.Copy(ioutil.Discard, response.Body)
response.Body.Close()
}
}

func httpRequestWrapper(client *http.Client, request *http.Request) (*http.Response, error) {
resp, err := client.Do(request)
if err != nil {
return nil, err
}

if resp.StatusCode != http.StatusOK {
requestUrl, _ := url.QueryUnescape(request.URL.String())
return resp, fmt.Errorf("Unexpected status code %d received from %s", resp.StatusCode, requestUrl)
}
return resp, nil
}
11 changes: 2 additions & 9 deletions pkg/subscraping/sources/alienvault/alienvault.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@ import (
"context"
"encoding/json"
"fmt"
"io"
"io/ioutil"

"github.com/projectdiscovery/subfinder/pkg/subscraping"
)
Expand All @@ -27,16 +25,11 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://otx.alienvault.com/api/v1/indicators/domain/%s/passive_dns", domain))
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
session.DiscardHttpResponse(resp)
close(results)
return
}
if resp.StatusCode != 200 {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("invalid status code received: %d", resp.StatusCode)}
io.Copy(ioutil.Discard, resp.Body)
resp.Body.Close()
close(results)
return
}

otxResp := &alienvaultResponse{}
// Get the response body and decode
err = json.NewDecoder(resp.Body).Decode(&otxResp)
Expand Down
1 change: 1 addition & 0 deletions pkg/subscraping/sources/archiveis/archiveis.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ func (a *ArchiveIs) enumerate(ctx context.Context, baseURL string) {
resp, err := a.Session.NormalGetWithContext(ctx, baseURL)
if err != nil {
a.Results <- subscraping.Result{Source: "archiveis", Type: subscraping.Error, Error: err}
a.Session.DiscardHttpResponse(resp)
return
}

Expand Down
1 change: 1 addition & 0 deletions pkg/subscraping/sources/binaryedge/binaryedge.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
resp, err := session.Get(ctx, fmt.Sprintf("https://api.binaryedge.io/v2/query/domains/subdomain/%s", domain), "", map[string]string{"X-Key": session.Keys.Binaryedge})
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
session.DiscardHttpResponse(resp)
close(results)
return
}
Expand Down
1 change: 1 addition & 0 deletions pkg/subscraping/sources/bufferover/bufferover.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ func (s *Source) getData(ctx context.Context, URL string, session *subscraping.S
resp, err := session.NormalGetWithContext(ctx, URL)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
session.DiscardHttpResponse(resp)
return
}

Expand Down
1 change: 1 addition & 0 deletions pkg/subscraping/sources/certspotter/certspotter.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
resp, err := session.Get(ctx, fmt.Sprintf("https://api.certspotter.com/v1/issuances?domain=%s&include_subdomains=true&expand=dns_names", domain), "", map[string]string{"Authorization": "Bearer " + session.Keys.Certspotter})
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
session.DiscardHttpResponse(resp)
close(results)
return
}
Expand Down
1 change: 1 addition & 0 deletions pkg/subscraping/sources/certspotterold/certspotterold.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://certspotter.com/api/v0/certs?domain=%s", domain))
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
session.DiscardHttpResponse(resp)
close(results)
return
}
Expand Down
11 changes: 1 addition & 10 deletions pkg/subscraping/sources/commoncrawl/commoncrawl.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,7 @@ package commoncrawl

import (
"context"
"errors"
"fmt"
"io"
"io/ioutil"
"net/url"
"strings"
Expand Down Expand Up @@ -33,14 +31,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
resp, err := session.NormalGetWithContext(ctx, indexURL)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}

if resp.StatusCode == 500 {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: errors.New("internal server error")}
io.Copy(ioutil.Discard, resp.Body)
resp.Body.Close()
session.DiscardHttpResponse(resp)
close(results)
return
}
Expand Down
1 change: 1 addition & 0 deletions pkg/subscraping/sources/crtsh/crtsh.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ func (s *Source) getSubdomainsFromHTTP(ctx context.Context, domain string, sessi
resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://crt.sh/?q=%%25.%s&output=json", domain))
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
session.DiscardHttpResponse(resp)
return false
}

Expand Down
50 changes: 0 additions & 50 deletions pkg/subscraping/sources/digicert/digicert.go

This file was deleted.

12 changes: 1 addition & 11 deletions pkg/subscraping/sources/dnsdb/dnsdb.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@ import (
"context"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"strings"

"github.com/projectdiscovery/subfinder/pkg/subscraping"
Expand Down Expand Up @@ -36,15 +34,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
resp, err := session.Get(ctx, fmt.Sprintf("https://api.dnsdb.info/lookup/rrset/name/*.%s?limit=1000000000000", domain), "", headers)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
close(results)
return
}

// Check status code
if resp.StatusCode != 200 {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("invalid status code received: %d", resp.StatusCode)}
io.Copy(ioutil.Discard, resp.Body)
resp.Body.Close()
session.DiscardHttpResponse(resp)
close(results)
return
}
Expand Down
1 change: 1 addition & 0 deletions pkg/subscraping/sources/dnsdumpster/dnsdumpster.go
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
resp, err := session.NormalGetWithContext(ctx, "https://dnsdumpster.com/")
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
session.DiscardHttpResponse(resp)
close(results)
return
}
Expand Down
1 change: 1 addition & 0 deletions pkg/subscraping/sources/entrust/entrust.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://ctsearch.entrust.com/api/v1/certificates?fields=issuerCN,subjectO,issuerDN,issuerO,subjectDN,signAlg,san,publicKeyType,publicKeySize,validFrom,validTo,sn,ev,logEntries.logName,subjectCNReversed,cert&domain=%s&includeExpired=true&exactMatch=false&limit=5000", domain))
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
session.DiscardHttpResponse(resp)
close(results)
return
}
Expand Down
Loading

0 comments on commit 6941175

Please sign in to comment.