95 lines
2.2 KiB
Go
95 lines
2.2 KiB
Go
package bloomberg
|
|
|
|
import (
|
|
"bytes"
|
|
"context"
|
|
"encoding/json"
|
|
"fmt"
|
|
"io"
|
|
"net/http"
|
|
"strings"
|
|
"time"
|
|
|
|
"github.com/tradarr/backend/internal/scraper"
|
|
)
|
|
|
|
type Bloomberg struct {
|
|
scraperURL string
|
|
client *http.Client
|
|
}
|
|
|
|
func New(scraperURL string) *Bloomberg {
|
|
if scraperURL == "" {
|
|
scraperURL = "http://scraper:3001"
|
|
}
|
|
return &Bloomberg{
|
|
scraperURL: scraperURL,
|
|
client: &http.Client{Timeout: 10 * time.Minute},
|
|
}
|
|
}
|
|
|
|
func (b *Bloomberg) Name() string { return "bloomberg" }
|
|
|
|
type scraperRequest struct {
|
|
Username string `json:"username"`
|
|
Password string `json:"password"`
|
|
}
|
|
|
|
type scraperArticle struct {
|
|
Title string `json:"title"`
|
|
URL string `json:"url"`
|
|
}
|
|
|
|
type scraperResponse struct {
|
|
Articles []scraperArticle `json:"articles"`
|
|
Error string `json:"error,omitempty"`
|
|
}
|
|
|
|
func (b *Bloomberg) ScrapeWithCredentials(ctx context.Context, username, password string, symbols []string) ([]scraper.Article, error) {
|
|
payload, _ := json.Marshal(scraperRequest{Username: username, Password: password})
|
|
req, err := http.NewRequestWithContext(ctx, http.MethodPost, b.scraperURL+"/bloomberg/scrape", bytes.NewReader(payload))
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
req.Header.Set("Content-Type", "application/json")
|
|
|
|
resp, err := b.client.Do(req)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("scraper service unreachable: %w", err)
|
|
}
|
|
defer resp.Body.Close()
|
|
|
|
body, _ := io.ReadAll(resp.Body)
|
|
if resp.StatusCode != http.StatusOK {
|
|
return nil, fmt.Errorf("scraper service HTTP %d: %s", resp.StatusCode, body)
|
|
}
|
|
|
|
var result scraperResponse
|
|
if err := json.Unmarshal(body, &result); err != nil {
|
|
return nil, fmt.Errorf("parse scraper response: %w", err)
|
|
}
|
|
if result.Error != "" {
|
|
return nil, fmt.Errorf("bloomberg: %s", result.Error)
|
|
}
|
|
|
|
now := time.Now()
|
|
var articles []scraper.Article
|
|
for _, a := range result.Articles {
|
|
title := strings.TrimSpace(a.Title)
|
|
url := a.URL
|
|
if title == "" || url == "" {
|
|
continue
|
|
}
|
|
syms := scraper.DetectSymbols(title, symbols)
|
|
articles = append(articles, scraper.Article{
|
|
Title: title,
|
|
Content: title,
|
|
URL: url,
|
|
PublishedAt: &now,
|
|
Symbols: syms,
|
|
})
|
|
}
|
|
fmt.Printf("bloomberg: %d articles fetched\n", len(articles))
|
|
return articles, nil
|
|
}
|