feat: add sources to retrieve news and divide the IA reflexions in 2 steps to limit the number of news

This commit is contained in:
2026-04-19 10:43:15 +02:00
parent 93668273ff
commit eb1fb5ca78
28 changed files with 1086 additions and 249 deletions

View File

@ -15,6 +15,8 @@ import (
"github.com/tradarr/backend/internal/scheduler"
"github.com/tradarr/backend/internal/scraper"
"github.com/tradarr/backend/internal/scraper/bloomberg"
"github.com/tradarr/backend/internal/scraper/reuters"
"github.com/tradarr/backend/internal/scraper/watcherguru"
"github.com/tradarr/backend/internal/scraper/yahoofinance"
)
@ -38,30 +40,23 @@ func main() {
enc := crypto.New(cfg.EncryptionKey)
pipeline := ai.NewPipeline(repo, enc)
// Créer le compte admin initial si nécessaire
if err := ensureAdmin(repo, cfg); err != nil {
log.Printf("ensure admin: %v", err)
}
// Configurer les scrapers
registry := scraper.NewRegistry(repo)
registry.Register(bloomberg.NewDynamic(repo, enc, cfg.ScraperURL))
registry.Register(yahoofinance.New())
registry.Register(reuters.New())
registry.Register(watcherguru.New())
// Bloomberg (credentials chargés depuis la DB à chaque run)
bbScraper := bloomberg.NewDynamic(repo, enc, cfg.ChromePath)
registry.Register(bbScraper)
stScraper := yahoofinance.New()
registry.Register(stScraper)
// Scheduler
sched := scheduler.New(registry, pipeline, repo)
if err := sched.Start(); err != nil {
log.Printf("scheduler: %v", err)
}
defer sched.Stop()
// API
h := handlers.New(repo, cfg, enc, registry, pipeline)
h := handlers.New(repo, cfg, enc, registry, pipeline, sched)
r := api.SetupRouter(h, cfg.JWTSecret)
addr := fmt.Sprintf(":%s", cfg.Port)