Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion v2/go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ require (
require (
github.com/cnf/structhash v0.0.0-20201127153200-e1b16c1ebc08 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/logrusorgru/aurora v2.0.3+incompatible
github.com/logrusorgru/aurora v2.0.3+incompatible // indirect
github.com/miekg/dns v1.1.54 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
Expand Down
2 changes: 2 additions & 0 deletions v2/pkg/passive/sources.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ import (
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/hunter"
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/intelx"
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/leakix"
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/netlas"
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/passivetotal"
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/quake"
"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/rapiddns"
Expand Down Expand Up @@ -70,6 +71,7 @@ var AllSources = [...]subscraping.Source{
&hackertarget.Source{},
&hunter.Source{},
&intelx.Source{},
&netlas.Source{},
&leakix.Source{},
&passivetotal.Source{},
&quake.Source{},
Expand Down
1 change: 1 addition & 0 deletions v2/pkg/passive/sources_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ var (
"github",
"hackertarget",
"intelx",
"netlas",
"passivetotal",
"quake",
"rapiddns",
Expand Down
6 changes: 3 additions & 3 deletions v2/pkg/runner/options.go
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ func ParseOptions() *Options {
flagSet.CreateGroup("update", "Update",
flagSet.CallbackVarP(GetUpdateCallback(), "update", "up", "update subfinder to latest version"),
flagSet.BoolVarP(&options.DisableUpdateCheck, "disable-update-check", "duc", false, "disable automatic subfinder update check"),
)
)

createGroup(flagSet, "output", "Output",
flagSet.StringVarP(&options.OutputFile, "output", "o", "", "file to write output to"),
Expand Down Expand Up @@ -231,8 +231,8 @@ func (options *Options) loadProvidersFrom(location string) {

// We skip bailing out if file doesn't exist because we'll create it
// at the end of options parsing from default via goflags.
if err := UnmarshalFrom(location); isFatalErr(err) && !errors.Is(err, os.ErrNotExist) {
gologger.Fatal().Msgf("Could not read providers from %s: %s\n", location, err)
if err := UnmarshalFrom(location); err != nil && (!strings.Contains(err.Error(), "file doesn't exist") || errors.Is(os.ErrNotExist, err)) {
gologger.Error().Msgf("Could not read providers from %s: %s\n", location, err)
}
}

Expand Down
194 changes: 194 additions & 0 deletions v2/pkg/subscraping/sources/netlas/netlas.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,194 @@
// Package netlas logic
package netlas

import (
"context"
"io"

"encoding/json"
"fmt"
"net/http"
"net/url"
"strconv"
"time"

"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
)

type Item struct {
Data struct {
A []string `json:"a,omitempty"`
Txt []string `json:"txt,omitempty"`
LastUpdated string `json:"last_updated,omitempty"`
Timestamp string `json:"@timestamp,omitempty"`
Ns []string `json:"ns,omitempty"`
Level int `json:"level,omitempty"`
Zone string `json:"zone,omitempty"`
Domain string `json:"domain,omitempty"`
Cname []string `json:"cname,omitempty"`
Mx []string `json:"mx,omitempty"`
} `json:"data"`
}

type DomainsResponse struct {
Items []Item `json:"items"`
Took int `json:"took"`
}

type DomainsCountResponse struct {
Count int `json:"count"`
}

// Source is the passive scraping agent
type Source struct {
apiKeys []string
timeTaken time.Duration
errors int
results int
skipped bool
}

func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
results := make(chan subscraping.Result)
s.errors = 0
s.results = 0

go func() {
defer func(startTime time.Time) {
s.timeTaken = time.Since(startTime)
close(results)
}(time.Now())

// To get count of domains
endpoint := "https://app.netlas.io/api/domains_count/"
params := url.Values{}
countQuery := fmt.Sprintf("domain:*.%s AND NOT domain:%s", domain, domain)
params.Set("q", countQuery)
countUrl := endpoint + "?" + params.Encode()

// Pick an API key
randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
resp, err := session.HTTPRequest(ctx, http.MethodGet, countUrl, "", map[string]string{
"accept": "application/json",
"X-API-Key": randomApiKey,
}, nil, subscraping.BasicAuth{})

if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
s.errors++
return
} else if resp.StatusCode != 200 {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("request rate limited with status code %d", resp.StatusCode)}
s.errors++
return
}
defer resp.Body.Close()

body, err := io.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("error reading ressponse body")}
s.errors++
return
}

// Parse the JSON response
var domainsCount DomainsCountResponse
err = json.Unmarshal(body, &domainsCount)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
s.errors++
return
}

//Define the API endpoint URL and query parameters

for i := 0; i < domainsCount.Count; i += 20 {

time.Sleep(1000 * time.Millisecond)
offset := strconv.Itoa(i)

endpoint := "https://app.netlas.io/api/domains/"
params := url.Values{}
query := fmt.Sprintf("domain:(domain:*.%s AND NOT domain:%s)", domain, domain)
params.Set("q", query)
params.Set("source_type", "include")
params.Set("start", offset)
params.Set("fields", "*")
apiUrl := endpoint + "?" + params.Encode()

// Pick an API key
randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())

resp, err := session.HTTPRequest(ctx, http.MethodGet, apiUrl, "", map[string]string{
"accept": "application/json",
"X-API-Key": randomApiKey}, nil, subscraping.BasicAuth{})
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
s.errors++
return
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("error reading ressponse body")}
s.errors++
return
}

if resp.StatusCode == 429 {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("request rate limited with status code %d", resp.StatusCode)}
s.errors++
break
}

// Parse the response body and extract the domain values
var data DomainsResponse
err = json.Unmarshal(body, &data)
if err != nil {
results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
s.errors++
return
}

for _, item := range data.Items {
results <- subscraping.Result{
Source: s.Name(), Type: subscraping.Subdomain, Value: item.Data.Domain,
}
s.results++
}
}

}()

return results
}

// Name returns the name of the source
func (s *Source) Name() string {
return "netlas"
}

func (s *Source) IsDefault() bool {
return false
}

func (s *Source) HasRecursiveSupport() bool {
return false
}

func (s *Source) NeedsKey() bool {
return true
}

func (s *Source) AddApiKeys(keys []string) {
s.apiKeys = keys
}

func (s *Source) Statistics() subscraping.Statistics {
return subscraping.Statistics{
Errors: s.errors,
Results: s.results,
TimeTaken: s.timeTaken,
Skipped: s.skipped,
}
}