diff --git a/scraper.go b/scraper.go index 8885bec..badcf8d 100644 --- a/scraper.go +++ b/scraper.go @@ -8,7 +8,7 @@ import ( type crawlFunc func(*html.Node) -func searchElem(n *html.Node, data string) chan *html.Node { +func SearchElem(n *html.Node, data string) chan *html.Node { ch := make(chan *html.Node) var crawl crawlFunc crawl = func(n *html.Node) { @@ -29,7 +29,7 @@ func searchElem(n *html.Node, data string) chan *html.Node { return ch } -func searchAttr(n *html.Node, key, contains string) chan *html.Node { +func SearchAttr(n *html.Node, key, contains string) chan *html.Node { ch := make(chan *html.Node) var crawl crawlFunc crawl = func(n *html.Node) { @@ -52,14 +52,14 @@ func searchAttr(n *html.Node, key, contains string) chan *html.Node { return ch } -func searchElemAttr(n *html.Node, elem, key, value string) chan *html.Node { +func SearchElemAttr(n *html.Node, elem, key, value string) chan *html.Node { ch := make(chan *html.Node) go func() { defer close(ch) - for e := range searchElem(n, elem) { + for e := range SearchElem(n, elem) { // If document is too large there are // would be a hundreds of goroutines :(( - for attr := range searchAttr(e, key, value) { + for attr := range SearchAttr(e, key, value) { ch <- attr } } @@ -67,7 +67,7 @@ func searchElemAttr(n *html.Node, elem, key, value string) chan *html.Node { return ch } -func crawlText(n *html.Node) string { +func CrawlText(n *html.Node) string { var s = new(strings.Builder) var crawl crawlFunc crawl = func(n *html.Node) {