Skip to content

Commit 9e8eaae

Browse files
committed
use the same io.Writer as the cobra.Commands for writing crawl logs
1 parent 2f33c68 commit 9e8eaae

File tree

6 files changed

+9
-5
lines changed

6 files changed

+9
-5
lines changed

pkg/cli/crawl/cmd.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ var Command = &cobra.Command{
2727
modifier := crawler.RequestModifier{}
2828
registerStandardCrawlCommandFlagModifiers(&modifier, crawlCommandFlags)
2929

30-
err := crawlUrls(args, modifier, crawlCommandFlags)
30+
err := crawlUrls(args, modifier, crawlCommandFlags, cmd.OutOrStdout())
3131
if err != nil {
3232
fmt.Printf("Could not create crawlable URLs:\n\t%s\n", err)
3333
os.Exit(1)

pkg/cli/crawl/cmd_list.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ var ListCommand = &cobra.Command{
3737
os.Exit(1)
3838
}
3939

40-
err = crawlUrls(urls, modifier, listCommandFlags)
40+
err = crawlUrls(urls, modifier, listCommandFlags, cmd.OutOrStdout())
4141
if err != nil {
4242
fmt.Printf("Could not create crawlable URLs:\n\t%s\n", err)
4343
os.Exit(1)

pkg/cli/crawl/cmd_sitemap.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ var SitemapCommand = &cobra.Command{
3838
os.Exit(1)
3939
}
4040

41-
err = crawlUrls(urls, modifier, sitemapCommandFlags)
41+
err = crawlUrls(urls, modifier, sitemapCommandFlags, cmd.OutOrStdout())
4242
if err != nil {
4343
fmt.Printf("Could not create crawlable URLs:\n\t%s\n", err)
4444
os.Exit(1)

pkg/cli/crawl/commons.go

+3-1
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ package crawl
22

33
import (
44
"fmt"
5+
"io"
56
"net/http"
67
"net/url"
78
"strings"
@@ -71,7 +72,7 @@ func registerStandardCrawlCommandFlagModifiers(modifier *crawler.RequestModifier
7172
}
7273
}
7374

74-
func crawlUrls(urls []string, modifier crawler.RequestModifier, flagOptions crawlerFlagOptions) error {
75+
func crawlUrls(urls []string, modifier crawler.RequestModifier, flagOptions crawlerFlagOptions, outWriter io.Writer) error {
7576
requests, err := crawler.CreateRequestsFromUrls(urls, modifier)
7677
if err != nil {
7778
return err
@@ -90,6 +91,7 @@ func crawlUrls(urls []string, modifier crawler.RequestModifier, flagOptions craw
9091
Timeout: flagOptions.HttpTimeout,
9192
},
9293
FilterStatusQuery: flagOptions.FilterStatusQuery,
94+
OutWriter: outWriter,
9395
}
9496
crawl.Crawl(requests)
9597

pkg/crawler/crawler.go

+2
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package crawler
22

33
import (
4+
"io"
45
"net/http"
56
"sync"
67
"time"
@@ -14,6 +15,7 @@ type Crawler struct {
1415
HttpClient http.Client
1516
NumberOfWorkers int
1617
FilterStatusQuery string
18+
OutWriter io.Writer
1719
statusFilter *filter.Filter
1820
printMutex sync.Mutex
1921
}

pkg/crawler/log.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ func (c *Crawler) safePrintln(statusCode int, message string) {
1717

1818
if c.statusFilter.IsValid(c.FilterStatusQuery, int64(statusCode)) {
1919
c.printMutex.Lock()
20-
fmt.Println(message)
20+
_, _ = fmt.Fprintln(c.OutWriter, message)
2121
c.printMutex.Unlock()
2222
}
2323
}

0 commit comments

Comments
 (0)