|
| 1 | +package main |
| 2 | + |
| 3 | +import ( |
| 4 | + "encoding/xml" |
| 5 | + "fmt" |
| 6 | + "log" |
| 7 | + "net/http" |
| 8 | + "os" |
| 9 | +) |
| 10 | + |
| 11 | +// URLSet is root for site mite |
| 12 | +type URLSet struct { |
| 13 | + XMLName xml.Name `xml:"urlset"` |
| 14 | + XMLNs string `xml:"xmlns,attr"` |
| 15 | + URL []URL `xml:"url"` |
| 16 | +} |
| 17 | + |
| 18 | +// URL is for every single location url |
| 19 | +type URL struct { |
| 20 | + Loc string `xml:"loc"` |
| 21 | +} |
| 22 | + |
| 23 | +func main() { |
| 24 | + if len(os.Args) < 3 { |
| 25 | + help() |
| 26 | + } |
| 27 | + sitemapURL := os.Args[1] |
| 28 | + outputFileName := os.Args[2] |
| 29 | + resp, err := http.Get(sitemapURL) |
| 30 | + if err != nil { |
| 31 | + log.Printf("Urls cannot fetched: %s\n", sitemapURL) |
| 32 | + log.Println(err) |
| 33 | + os.Exit(1) |
| 34 | + } |
| 35 | + rawXMLData := readXMLFromResponse(resp) |
| 36 | + urlSet := URLSet{} |
| 37 | + |
| 38 | + err = xml.Unmarshal(rawXMLData, &urlSet) |
| 39 | + if err != nil { |
| 40 | + log.Printf("Sitemap cannot parsed. Because: %s", err) |
| 41 | + os.Exit(1) |
| 42 | + } |
| 43 | + c := make(chan string) |
| 44 | + validURLs := []string{} |
| 45 | + for _, url := range urlSet.URL { |
| 46 | + go checkURL(url.Loc, c, &validURLs) |
| 47 | + } |
| 48 | + |
| 49 | + for range urlSet.URL { |
| 50 | + fmt.Println(<-c) |
| 51 | + } |
| 52 | + |
| 53 | + newURLSet := URLSet{ |
| 54 | + XMLNs: urlSet.XMLNs, |
| 55 | + } |
| 56 | + for _, url := range validURLs { |
| 57 | + newURL := URL{ |
| 58 | + Loc: url, |
| 59 | + } |
| 60 | + newURLSet.URL = append(newURLSet.URL, newURL) |
| 61 | + } |
| 62 | + newRawXML, err := xml.Marshal(newURLSet) |
| 63 | + if err != nil { |
| 64 | + fmt.Println(err) |
| 65 | + os.Exit(1) |
| 66 | + } |
| 67 | + |
| 68 | + err = saveValidSiteMap(outputFileName, newRawXML) |
| 69 | + if err != nil { |
| 70 | + fmt.Println("I can`₺ write valid sitemap. Error: ", err) |
| 71 | + os.Exit(1) |
| 72 | + } |
| 73 | + fmt.Println("File writed to ", outputFileName, "and closed") |
| 74 | +} |
| 75 | + |
| 76 | +func readXMLFromResponse(resp *http.Response) []byte { |
| 77 | + var rawXMLData []byte |
| 78 | + for { |
| 79 | + content := make([]byte, 1024) |
| 80 | + n, _ := resp.Body.Read(content) |
| 81 | + for _, d := range content { |
| 82 | + rawXMLData = append(rawXMLData, d) |
| 83 | + } |
| 84 | + if n == 0 { |
| 85 | + break |
| 86 | + } |
| 87 | + } |
| 88 | + return rawXMLData |
| 89 | +} |
| 90 | +func checkURL(url string, c chan string, validURLs *[]string) { |
| 91 | + resp, err := http.Get(url) |
| 92 | + if err != nil { |
| 93 | + c <- err.Error() |
| 94 | + } |
| 95 | + c <- fmt.Sprintf("Response code is %d for %s", resp.StatusCode, url) |
| 96 | + if resp.StatusCode == 200 { |
| 97 | + (*validURLs) = append((*validURLs), url) |
| 98 | + } |
| 99 | +} |
| 100 | +func saveValidSiteMap(filename string, data []byte) error { |
| 101 | + file, err := os.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0777) |
| 102 | + file.Write([]byte(xml.Header)) |
| 103 | + file.Write(data) |
| 104 | + file.Close() |
| 105 | + return err |
| 106 | +} |
| 107 | + |
| 108 | +func help() { |
| 109 | + fmt.Printf( |
| 110 | + `You have to type sitemap url and output file name |
| 111 | +Usage: checker http://sitename.com/sitemap.xml sitemap.xml |
| 112 | +`, |
| 113 | + ) |
| 114 | + os.Exit(1) |
| 115 | +} |
0 commit comments