您最多选择25个主题 主题必须以字母或数字开头,可以包含连字符 (-),并且长度不得超过35个字符

787 行
20KB

  1. package main
  2. import (
  3. "bufio"
  4. "context"
  5. "encoding/csv"
  6. "encoding/json"
  7. "flag"
  8. "fmt"
  9. "io"
  10. "net"
  11. "net/http"
  12. "net/url"
  13. "os"
  14. "strings"
  15. "sync"
  16. "time"
  17. "radio-stream-extractor/internal/extractor"
  18. )
  19. type scanResult struct {
  20. URL string `json:"url"`
  21. Streams []string `json:"streams"`
  22. Playlists []string `json:"playlists,omitempty"`
  23. Probes []probeResult `json:"probes,omitempty"`
  24. Error string `json:"error,omitempty"`
  25. FetchedAt time.Time `json:"fetchedAt"`
  26. FromPlaylist bool `json:"fromPlaylist"`
  27. }
  28. type probeResult struct {
  29. URL string `json:"url"`
  30. Status string `json:"status"`
  31. ContentType string `json:"contentType,omitempty"`
  32. }
  33. type config struct {
  34. Format string
  35. Probe bool
  36. Headers headerList
  37. Proxy string
  38. HistoryPath string
  39. Watch time.Duration
  40. Concurrency int
  41. RequestTimeout time.Duration
  42. ProbeTimeout time.Duration
  43. AllowPrivate bool
  44. }
  45. type headerList []string
  46. func (h *headerList) String() string { return strings.Join(*h, ", ") }
  47. func (h *headerList) Set(v string) error {
  48. *h = append(*h, v)
  49. return nil
  50. }
  51. func main() {
  52. port := flag.String("port", ":8080", "listen address for the web server (default :8080)")
  53. web := flag.Bool("web", false, "force web-server mode even when URLs are provided")
  54. cfg := config{}
  55. flag.StringVar(&cfg.Format, "format", "text", "output format: text|json|csv|pls")
  56. flag.BoolVar(&cfg.Probe, "probe", true, "probe discovered stream URLs with HTTP HEAD")
  57. flag.Var(&cfg.Headers, "header", "custom HTTP header (repeatable), e.g. -header 'Referer: https://example.com'")
  58. flag.StringVar(&cfg.Proxy, "proxy", "", "HTTP proxy URL (optional)")
  59. flag.StringVar(&cfg.HistoryPath, "history", "history.jsonl", "path to JSONL history log (empty to disable)")
  60. flag.DurationVar(&cfg.Watch, "watch", 0, "repeat scan in CLI mode at interval (e.g. 30s, 2m)")
  61. flag.IntVar(&cfg.Concurrency, "concurrency", 4, "number of concurrent fetch workers")
  62. flag.DurationVar(&cfg.RequestTimeout, "timeout", 15*time.Second, "timeout per HTTP request (e.g. 10s, 2m)")
  63. flag.DurationVar(&cfg.ProbeTimeout, "probe-timeout", 8*time.Second, "timeout for probing stream URLs")
  64. flag.BoolVar(&cfg.AllowPrivate, "allow-private", false, "allow requests to private/localhost addresses")
  65. flag.Usage = func() {
  66. fmt.Fprintf(flag.CommandLine.Output(), "Usage: %s [flags] <url> [url...]\n", os.Args[0])
  67. flag.PrintDefaults()
  68. }
  69. flag.Parse()
  70. urls := flag.Args()
  71. client := newHTTPClient(cfg.Proxy, cfg.RequestTimeout)
  72. history := newHistoryWriter(cfg.HistoryPath)
  73. if *web || len(urls) == 0 {
  74. if err := runWebMode(*port, client, &cfg, history); err != nil {
  75. fmt.Fprintf(os.Stderr, "web mode failed: %v\n", err)
  76. os.Exit(1)
  77. }
  78. return
  79. }
  80. runCLIMode(urls, client, &cfg, history)
  81. }
  82. func runCLIMode(urls []string, client *http.Client, cfg *config, history *historyWriter) {
  83. for {
  84. results := scanURLs(urls, client, cfg)
  85. if err := outputResults(results, cfg.Format, os.Stdout); err != nil {
  86. fmt.Fprintf(os.Stderr, "output failed: %v\n", err)
  87. return
  88. }
  89. history.Write(results)
  90. if cfg.Watch == 0 {
  91. return
  92. }
  93. time.Sleep(cfg.Watch)
  94. }
  95. }
  96. func runWebMode(addr string, client *http.Client, cfg *config, history *historyWriter) error {
  97. mux := http.NewServeMux()
  98. mux.HandleFunc("/", indexHandler)
  99. mux.HandleFunc("/scan", makeScanHandler(client, cfg, history))
  100. mux.HandleFunc("/watch", watchHandler)
  101. fmt.Printf("radiostreamscan listening on %s (GET /scan?url=... or POST url=...)\n", addr)
  102. return http.ListenAndServe(addr, mux)
  103. }
  104. func indexHandler(w http.ResponseWriter, r *http.Request) {
  105. fmt.Fprintf(w, `<!doctype html>
  106. <html>
  107. <head><meta charset="utf-8"><title>radiostreamscan</title></head>
  108. <body>
  109. <h1>radiostreamscan</h1>
  110. <form method="get" action="/watch">
  111. <label>Stream-URLs (eine pro Zeile)</label><br/>
  112. <textarea name="url" rows="6" cols="80" required></textarea><br/>
  113. <label>Format
  114. <select name="format">
  115. <option value="json">json</option>
  116. <option value="text">text</option>
  117. <option value="csv">csv</option>
  118. <option value="pls">pls</option>
  119. </select>
  120. </label>
  121. <label>Auto-Refresh (Sekunden)
  122. <input type="number" name="interval" value="0" min="0" />
  123. </label>
  124. <label><input type="checkbox" name="probe" value="1" checked> Probing</label>
  125. <button type="submit">Scan</button>
  126. </form>
  127. <p>Mehrere URLs: /scan?url=a&url=b&url=c</p>
  128. </body>
  129. </html>`)
  130. }
  131. func watchHandler(w http.ResponseWriter, r *http.Request) {
  132. urls := normalizeURLInputs(r.URL.Query()["url"])
  133. interval := r.URL.Query().Get("interval")
  134. probe := r.URL.Query().Get("probe")
  135. fmt.Fprintf(w, `<!doctype html>
  136. <html>
  137. <head><meta charset="utf-8"><title>radiostreamscan results</title>
  138. <style>
  139. body { font-family: Arial, sans-serif; }
  140. .url-block { margin: 10px 0; padding: 10px; border: 1px solid #ccc; }
  141. .error { color: #b00020; }
  142. button { margin: 8px 0; }
  143. </style>
  144. </head>
  145. <body>
  146. <h1>radiostreamscan results</h1>
  147. <button id="copy">Alle Streams kopieren</button>
  148. <div id="output">Loading...</div>
  149. <textarea id="clipboard" style="position:absolute; left:-9999px; top:-9999px;"></textarea>
  150. <script>
  151. const urls = %q.split("\n").filter(Boolean);
  152. const interval = %q;
  153. const probe = %q;
  154. async function fetchData() {
  155. const params = new URLSearchParams();
  156. urls.forEach(u => params.append("url", u));
  157. params.set("format", "json");
  158. if (probe) params.set("probe", "1");
  159. const res = await fetch("/scan?" + params.toString());
  160. const data = await res.json();
  161. const container = document.getElementById("output");
  162. container.innerHTML = "";
  163. const allStreams = [];
  164. data.forEach(item => {
  165. const block = document.createElement("div");
  166. block.className = "url-block";
  167. const title = document.createElement("h3");
  168. title.textContent = item.url;
  169. block.appendChild(title);
  170. if (item.error) {
  171. const err = document.createElement("div");
  172. err.className = "error";
  173. err.textContent = item.error;
  174. block.appendChild(err);
  175. container.appendChild(block);
  176. return;
  177. }
  178. const list = document.createElement("ul");
  179. (item.streams || []).forEach(s => {
  180. const li = document.createElement("li");
  181. li.textContent = s;
  182. list.appendChild(li);
  183. allStreams.push(s);
  184. });
  185. block.appendChild(list);
  186. container.appendChild(block);
  187. });
  188. document.getElementById("clipboard").value = allStreams.join("\n");
  189. }
  190. document.getElementById("copy").addEventListener("click", () => {
  191. const text = document.getElementById("clipboard").value;
  192. if (navigator.clipboard && navigator.clipboard.writeText) {
  193. navigator.clipboard.writeText(text);
  194. } else {
  195. const el = document.getElementById("clipboard");
  196. el.select();
  197. document.execCommand("copy");
  198. }
  199. });
  200. fetchData();
  201. if (interval && Number(interval) > 0) {
  202. setInterval(fetchData, Number(interval) * 1000);
  203. }
  204. </script>
  205. </body>
  206. </html>`, strings.Join(urls, "\n"), interval, probe)
  207. }
  208. func makeScanHandler(client *http.Client, cfg *config, history *historyWriter) http.HandlerFunc {
  209. return func(w http.ResponseWriter, r *http.Request) {
  210. var urls []string
  211. switch r.Method {
  212. case http.MethodGet:
  213. urls = r.URL.Query()["url"]
  214. case http.MethodPost:
  215. if err := r.ParseForm(); err != nil {
  216. http.Error(w, err.Error(), http.StatusBadRequest)
  217. return
  218. }
  219. urls = r.Form["url"]
  220. default:
  221. http.Error(w, "only GET and POST supported", http.StatusMethodNotAllowed)
  222. return
  223. }
  224. urls = normalizeURLInputs(urls)
  225. if len(urls) == 0 {
  226. http.Error(w, "provide at least one url parameter", http.StatusBadRequest)
  227. return
  228. }
  229. localCfg := *cfg
  230. if r.URL.Query().Get("probe") == "1" {
  231. localCfg.Probe = true
  232. } else if r.URL.Query().Get("probe") == "0" {
  233. localCfg.Probe = false
  234. }
  235. if f := r.URL.Query().Get("format"); f != "" {
  236. localCfg.Format = f
  237. }
  238. results := scanURLs(urls, client, &localCfg)
  239. history.Write(results)
  240. if err := outputResults(results, localCfg.Format, w); err != nil {
  241. http.Error(w, err.Error(), http.StatusInternalServerError)
  242. }
  243. }
  244. }
  245. func normalizeURLInputs(inputs []string) []string {
  246. var urls []string
  247. for _, item := range inputs {
  248. for _, line := range strings.Split(item, "\n") {
  249. line = strings.TrimSpace(line)
  250. if line == "" {
  251. continue
  252. }
  253. urls = append(urls, line)
  254. }
  255. }
  256. return urls
  257. }
  258. func scanURLs(urls []string, client *http.Client, cfg *config) []scanResult {
  259. results := make([]scanResult, len(urls))
  260. type job struct {
  261. index int
  262. url string
  263. }
  264. jobs := make(chan job)
  265. var wg sync.WaitGroup
  266. workers := cfg.Concurrency
  267. if workers < 1 {
  268. workers = 1
  269. }
  270. for i := 0; i < workers; i++ {
  271. wg.Add(1)
  272. go func() {
  273. defer wg.Done()
  274. for j := range jobs {
  275. res := scanOneURL(client, cfg, j.url)
  276. results[j.index] = res
  277. }
  278. }()
  279. }
  280. for i, u := range urls {
  281. jobs <- job{index: i, url: u}
  282. }
  283. close(jobs)
  284. wg.Wait()
  285. return results
  286. }
  287. func scanOneURL(client *http.Client, cfg *config, raw string) scanResult {
  288. res := scanResult{URL: raw, FetchedAt: time.Now()}
  289. html, contentType, err := fetchContent(client, cfg, raw)
  290. if err != nil {
  291. res.Error = err.Error()
  292. return res
  293. }
  294. parsedBase, _ := url.Parse(raw)
  295. baseHost := ""
  296. if parsedBase != nil {
  297. baseHost = parsedBase.Hostname()
  298. }
  299. streams := extractor.ExtractStreams(html)
  300. playlists := extractor.ExtractPlaylistLinks(html)
  301. resolvedPlaylists := make([]string, 0, len(playlists))
  302. for _, pl := range playlists {
  303. plURL := resolveURL(raw, pl)
  304. if plURL == "" {
  305. continue
  306. }
  307. resolvedPlaylists = append(resolvedPlaylists, plURL)
  308. plContent, plType, err := fetchContent(client, cfg, plURL)
  309. if err != nil {
  310. continue
  311. }
  312. parsed := extractor.ParsePlaylist(plContent, plType, plURL)
  313. if len(parsed) > 0 {
  314. streams = append(streams, parsed...)
  315. res.FromPlaylist = true
  316. }
  317. }
  318. embedURLs := extractor.ExtractEmbedURLs(html)
  319. seenEmbeds := make(map[string]struct{})
  320. for _, embed := range embedURLs {
  321. embedURL := resolveURL(raw, embed)
  322. if embedURL == "" || embedURL == raw {
  323. continue
  324. }
  325. if _, ok := seenEmbeds[embedURL]; ok {
  326. continue
  327. }
  328. seenEmbeds[embedURL] = struct{}{}
  329. embedHTML, _, err := fetchContent(client, cfg, embedURL)
  330. if err != nil {
  331. continue
  332. }
  333. streams = append(streams, extractor.ExtractStreams(embedHTML)...)
  334. embedPlaylists := extractor.ExtractPlaylistLinks(embedHTML)
  335. playlists = append(playlists, embedPlaylists...)
  336. for _, pl := range embedPlaylists {
  337. plURL := resolveURL(embedURL, pl)
  338. if plURL == "" {
  339. continue
  340. }
  341. resolvedPlaylists = append(resolvedPlaylists, plURL)
  342. plContent, plType, err := fetchContent(client, cfg, plURL)
  343. if err != nil {
  344. continue
  345. }
  346. parsed := extractor.ParsePlaylist(plContent, plType, plURL)
  347. if len(parsed) > 0 {
  348. streams = append(streams, parsed...)
  349. res.FromPlaylist = true
  350. }
  351. }
  352. }
  353. scriptURLs := extractor.ExtractScriptURLs(html)
  354. seenScripts := make(map[string]struct{})
  355. for _, script := range scriptURLs {
  356. scriptURL := resolveURL(raw, script)
  357. if scriptURL == "" || scriptURL == raw {
  358. continue
  359. }
  360. if baseHost != "" {
  361. parsedScript, err := url.Parse(scriptURL)
  362. if err != nil {
  363. continue
  364. }
  365. if parsedScript.Hostname() != "" && parsedScript.Hostname() != baseHost {
  366. continue
  367. }
  368. }
  369. if _, ok := seenScripts[scriptURL]; ok {
  370. continue
  371. }
  372. seenScripts[scriptURL] = struct{}{}
  373. scriptHTML, _, err := fetchContent(client, cfg, scriptURL)
  374. if err != nil {
  375. continue
  376. }
  377. streams = append(streams, extractor.ExtractStreams(scriptHTML)...)
  378. scriptPlaylists := extractor.ExtractPlaylistLinks(scriptHTML)
  379. playlists = append(playlists, scriptPlaylists...)
  380. for _, pl := range scriptPlaylists {
  381. plURL := resolveURL(scriptURL, pl)
  382. if plURL == "" {
  383. continue
  384. }
  385. resolvedPlaylists = append(resolvedPlaylists, plURL)
  386. plContent, plType, err := fetchContent(client, cfg, plURL)
  387. if err != nil {
  388. continue
  389. }
  390. parsed := extractor.ParsePlaylist(plContent, plType, plURL)
  391. if len(parsed) > 0 {
  392. streams = append(streams, parsed...)
  393. res.FromPlaylist = true
  394. }
  395. }
  396. }
  397. res.Playlists = uniqueStrings(resolvedPlaylists)
  398. res.Streams = uniqueStrings(streams)
  399. if cfg.Probe {
  400. res.Probes = probeStreams(client, cfg, res.Streams)
  401. }
  402. _ = contentType
  403. return res
  404. }
  405. func fetchContent(client *http.Client, cfg *config, raw string) (string, string, error) {
  406. if !cfg.AllowPrivate {
  407. if blocked, reason := isPrivateURL(raw); blocked {
  408. return "", "", fmt.Errorf("blocked private address (%s)", reason)
  409. }
  410. }
  411. timeout := cfg.RequestTimeout
  412. if timeout <= 0 {
  413. timeout = 15 * time.Second
  414. }
  415. ctx, cancel := context.WithTimeout(context.Background(), timeout)
  416. defer cancel()
  417. req, err := http.NewRequestWithContext(ctx, http.MethodGet, raw, nil)
  418. if err != nil {
  419. return "", "", err
  420. }
  421. req.Header.Set("User-Agent", "radiostreamscan/0.2")
  422. for _, h := range cfg.Headers {
  423. parts := strings.SplitN(h, ":", 2)
  424. if len(parts) == 2 {
  425. req.Header.Set(strings.TrimSpace(parts[0]), strings.TrimSpace(parts[1]))
  426. }
  427. }
  428. resp, err := client.Do(req)
  429. if err != nil {
  430. return "", "", err
  431. }
  432. defer resp.Body.Close()
  433. if resp.StatusCode < 200 || resp.StatusCode >= 300 {
  434. snippet, _ := io.ReadAll(io.LimitReader(resp.Body, 1024))
  435. return "", "", fmt.Errorf("unexpected status %s: %s", resp.Status, strings.TrimSpace(string(snippet)))
  436. }
  437. body, err := io.ReadAll(io.LimitReader(resp.Body, 2<<20))
  438. if err != nil {
  439. return "", "", err
  440. }
  441. return string(body), resp.Header.Get("Content-Type"), nil
  442. }
  443. func probeStreams(client *http.Client, cfg *config, streams []string) []probeResult {
  444. var results []probeResult
  445. for _, s := range streams {
  446. if !cfg.AllowPrivate {
  447. if blocked, reason := isPrivateURL(s); blocked {
  448. results = append(results, probeResult{URL: s, Status: "blocked: " + reason})
  449. continue
  450. }
  451. }
  452. timeout := cfg.ProbeTimeout
  453. if timeout <= 0 {
  454. timeout = cfg.RequestTimeout
  455. }
  456. if timeout <= 0 {
  457. timeout = 8 * time.Second
  458. }
  459. ctx, cancel := context.WithTimeout(context.Background(), timeout)
  460. req, err := http.NewRequestWithContext(ctx, http.MethodHead, s, nil)
  461. if err != nil {
  462. cancel()
  463. continue
  464. }
  465. for _, h := range cfg.Headers {
  466. parts := strings.SplitN(h, ":", 2)
  467. if len(parts) == 2 {
  468. req.Header.Set(strings.TrimSpace(parts[0]), strings.TrimSpace(parts[1]))
  469. }
  470. }
  471. resp, err := client.Do(req)
  472. if err != nil {
  473. cancel()
  474. // Fallback to GET with range.
  475. results = append(results, probeWithGet(client, cfg, s, timeout))
  476. continue
  477. }
  478. resp.Body.Close()
  479. cancel()
  480. if resp.StatusCode == http.StatusMethodNotAllowed || resp.StatusCode == http.StatusNotImplemented {
  481. results = append(results, probeWithGet(client, cfg, s, timeout))
  482. continue
  483. }
  484. results = append(results, probeResult{URL: s, Status: resp.Status, ContentType: resp.Header.Get("Content-Type")})
  485. }
  486. return results
  487. }
  488. func probeWithGet(client *http.Client, cfg *config, urlStr string, timeout time.Duration) probeResult {
  489. ctx, cancel := context.WithTimeout(context.Background(), timeout)
  490. defer cancel()
  491. req, err := http.NewRequestWithContext(ctx, http.MethodGet, urlStr, nil)
  492. if err != nil {
  493. return probeResult{URL: urlStr, Status: err.Error()}
  494. }
  495. req.Header.Set("Range", "bytes=0-1023")
  496. for _, h := range cfg.Headers {
  497. parts := strings.SplitN(h, ":", 2)
  498. if len(parts) == 2 {
  499. req.Header.Set(strings.TrimSpace(parts[0]), strings.TrimSpace(parts[1]))
  500. }
  501. }
  502. resp, err := client.Do(req)
  503. if err != nil {
  504. return probeResult{URL: urlStr, Status: err.Error()}
  505. }
  506. defer resp.Body.Close()
  507. _, _ = io.Copy(io.Discard, io.LimitReader(resp.Body, 1024))
  508. return probeResult{URL: urlStr, Status: resp.Status, ContentType: resp.Header.Get("Content-Type")}
  509. }
  510. func outputResults(results []scanResult, format string, w io.Writer) error {
  511. if rw, ok := w.(http.ResponseWriter); ok {
  512. if strings.ToLower(format) == "json" {
  513. rw.Header().Set("Content-Type", "application/json")
  514. } else if strings.ToLower(format) == "csv" {
  515. rw.Header().Set("Content-Type", "text/csv")
  516. }
  517. }
  518. switch strings.ToLower(format) {
  519. case "json":
  520. if err := json.NewEncoder(w).Encode(results); err != nil {
  521. return err
  522. }
  523. case "csv":
  524. cw := csv.NewWriter(w)
  525. if err := cw.Write([]string{"input_url", "stream_url"}); err != nil {
  526. return err
  527. }
  528. for _, res := range results {
  529. for _, s := range res.Streams {
  530. if err := cw.Write([]string{res.URL, s}); err != nil {
  531. return err
  532. }
  533. }
  534. }
  535. cw.Flush()
  536. if err := cw.Error(); err != nil {
  537. return err
  538. }
  539. case "pls":
  540. if _, err := fmt.Fprintln(w, "[playlist]"); err != nil {
  541. return err
  542. }
  543. i := 1
  544. for _, res := range results {
  545. for _, s := range res.Streams {
  546. if _, err := fmt.Fprintf(w, "File%d=%s\n", i, s); err != nil {
  547. return err
  548. }
  549. i++
  550. }
  551. }
  552. if _, err := fmt.Fprintf(w, "NumberOfEntries=%d\nVersion=2\n", i-1); err != nil {
  553. return err
  554. }
  555. default:
  556. for _, res := range results {
  557. if _, err := fmt.Fprintf(w, "URL: %s\n", res.URL); err != nil {
  558. return err
  559. }
  560. if res.Error != "" {
  561. if _, err := fmt.Fprintf(w, " error: %s\n", res.Error); err != nil {
  562. return err
  563. }
  564. continue
  565. }
  566. if len(res.Streams) == 0 {
  567. if _, err := fmt.Fprintln(w, " (no candidate streams found)"); err != nil {
  568. return err
  569. }
  570. continue
  571. }
  572. for _, s := range res.Streams {
  573. if _, err := fmt.Fprintf(w, " - %s\n", s); err != nil {
  574. return err
  575. }
  576. }
  577. }
  578. }
  579. return nil
  580. }
  581. func newHTTPClient(proxyURL string, timeout time.Duration) *http.Client {
  582. transport := &http.Transport{
  583. Proxy: http.ProxyFromEnvironment,
  584. ResponseHeaderTimeout: 8 * time.Second,
  585. TLSHandshakeTimeout: 6 * time.Second,
  586. IdleConnTimeout: 30 * time.Second,
  587. ExpectContinueTimeout: 1 * time.Second,
  588. MaxIdleConns: 100,
  589. MaxIdleConnsPerHost: 10,
  590. }
  591. if proxyURL != "" {
  592. if parsed, err := url.Parse(proxyURL); err == nil {
  593. transport.Proxy = http.ProxyURL(parsed)
  594. }
  595. }
  596. if timeout <= 0 {
  597. timeout = 15 * time.Second
  598. }
  599. return &http.Client{Timeout: timeout, Transport: transport}
  600. }
  601. func uniqueStrings(values []string) []string {
  602. set := make(map[string]struct{}, len(values))
  603. out := make([]string, 0, len(values))
  604. for _, v := range values {
  605. if _, ok := set[v]; ok {
  606. continue
  607. }
  608. set[v] = struct{}{}
  609. out = append(out, v)
  610. }
  611. return out
  612. }
  613. func resolveURL(base, href string) string {
  614. href = strings.TrimSpace(href)
  615. if href == "" {
  616. return ""
  617. }
  618. if strings.HasPrefix(href, "//") {
  619. return "https:" + href
  620. }
  621. parsed, err := url.Parse(href)
  622. if err != nil {
  623. return ""
  624. }
  625. if parsed.IsAbs() {
  626. return parsed.String()
  627. }
  628. baseURL, err := url.Parse(base)
  629. if err != nil {
  630. return parsed.String()
  631. }
  632. return baseURL.ResolveReference(parsed).String()
  633. }
  634. func isPrivateURL(raw string) (bool, string) {
  635. parsed, err := url.Parse(raw)
  636. if err != nil {
  637. return false, ""
  638. }
  639. host := parsed.Hostname()
  640. if host == "" {
  641. return false, ""
  642. }
  643. lower := strings.ToLower(host)
  644. if lower == "localhost" || strings.HasSuffix(lower, ".local") || strings.HasSuffix(lower, ".internal") {
  645. return true, "hostname"
  646. }
  647. ip := net.ParseIP(host)
  648. if ip == nil {
  649. return false, ""
  650. }
  651. if ip.IsLoopback() || ip.IsLinkLocalUnicast() || ip.IsLinkLocalMulticast() {
  652. return true, "loopback/link-local"
  653. }
  654. if isPrivateIP(ip) {
  655. return true, "private range"
  656. }
  657. return false, ""
  658. }
  659. func isPrivateIP(ip net.IP) bool {
  660. if ip4 := ip.To4(); ip4 != nil {
  661. switch {
  662. case ip4[0] == 10:
  663. return true
  664. case ip4[0] == 172 && ip4[1] >= 16 && ip4[1] <= 31:
  665. return true
  666. case ip4[0] == 192 && ip4[1] == 168:
  667. return true
  668. case ip4[0] == 169 && ip4[1] == 254:
  669. return true
  670. case ip4[0] == 127:
  671. return true
  672. }
  673. }
  674. // IPv6 unique local fc00::/7
  675. if ip.To16() != nil {
  676. return ip[0]&0xfe == 0xfc
  677. }
  678. return false
  679. }
  680. type historyWriter struct {
  681. path string
  682. mu sync.Mutex
  683. }
  684. func newHistoryWriter(path string) *historyWriter {
  685. return &historyWriter{path: path}
  686. }
  687. func (h *historyWriter) Write(results []scanResult) {
  688. if h == nil || h.path == "" {
  689. return
  690. }
  691. h.mu.Lock()
  692. defer h.mu.Unlock()
  693. f, err := os.OpenFile(h.path, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0644)
  694. if err != nil {
  695. return
  696. }
  697. defer f.Close()
  698. writer := bufio.NewWriter(f)
  699. for _, res := range results {
  700. data, err := json.Marshal(res)
  701. if err != nil {
  702. continue
  703. }
  704. writer.Write(data)
  705. writer.WriteString("\n")
  706. }
  707. writer.Flush()
  708. }