feat: add frontend + backend + database to retrieve and compute news from Yahoo

This commit is contained in:
2026-04-18 23:53:57 +02:00
parent f9b6d35c49
commit 93668273ff
84 changed files with 15431 additions and 0 deletions

View File

@ -0,0 +1,79 @@
package ai
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
)
type anthropicProvider struct {
apiKey string
model string
client *http.Client
}
func newAnthropic(apiKey, model string) *anthropicProvider {
if model == "" {
model = "claude-sonnet-4-6"
}
return &anthropicProvider{
apiKey: apiKey,
model: model,
client: &http.Client{},
}
}
func (p *anthropicProvider) Name() string { return "anthropic" }
func (p *anthropicProvider) Summarize(ctx context.Context, prompt string) (string, error) {
body := map[string]interface{}{
"model": p.model,
"max_tokens": 4096,
"messages": []map[string]string{
{"role": "user", "content": prompt},
},
}
b, _ := json.Marshal(body)
req, err := http.NewRequestWithContext(ctx, http.MethodPost, "https://api.anthropic.com/v1/messages", bytes.NewReader(b))
if err != nil {
return "", err
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("x-api-key", p.apiKey)
req.Header.Set("anthropic-version", "2023-06-01")
resp, err := p.client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
raw, _ := io.ReadAll(resp.Body)
if resp.StatusCode != http.StatusOK {
return "", fmt.Errorf("anthropic API error %d: %s", resp.StatusCode, raw)
}
var result struct {
Content []struct {
Text string `json:"text"`
} `json:"content"`
}
if err := json.Unmarshal(raw, &result); err != nil {
return "", err
}
if len(result.Content) == 0 {
return "", nil
}
return result.Content[0].Text, nil
}
func (p *anthropicProvider) ListModels(_ context.Context) ([]string, error) {
return []string{
"claude-opus-4-7",
"claude-sonnet-4-6",
"claude-haiku-4-5-20251001",
}, nil
}

View File

@ -0,0 +1,84 @@
package ai
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
)
type geminiProvider struct {
apiKey string
model string
client *http.Client
}
func newGemini(apiKey, model string) *geminiProvider {
if model == "" {
model = "gemini-2.0-flash"
}
return &geminiProvider{
apiKey: apiKey,
model: model,
client: &http.Client{},
}
}
func (p *geminiProvider) Name() string { return "gemini" }
func (p *geminiProvider) Summarize(ctx context.Context, prompt string) (string, error) {
url := fmt.Sprintf(
"https://generativelanguage.googleapis.com/v1beta/models/%s:generateContent?key=%s",
p.model, p.apiKey,
)
body := map[string]interface{}{
"contents": []map[string]interface{}{
{"parts": []map[string]string{{"text": prompt}}},
},
}
b, _ := json.Marshal(body)
req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(b))
if err != nil {
return "", err
}
req.Header.Set("Content-Type", "application/json")
resp, err := p.client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
raw, _ := io.ReadAll(resp.Body)
if resp.StatusCode != http.StatusOK {
return "", fmt.Errorf("gemini API error %d: %s", resp.StatusCode, raw)
}
var result struct {
Candidates []struct {
Content struct {
Parts []struct {
Text string `json:"text"`
} `json:"parts"`
} `json:"content"`
} `json:"candidates"`
}
if err := json.Unmarshal(raw, &result); err != nil {
return "", err
}
if len(result.Candidates) == 0 || len(result.Candidates[0].Content.Parts) == 0 {
return "", nil
}
return result.Candidates[0].Content.Parts[0].Text, nil
}
func (p *geminiProvider) ListModels(_ context.Context) ([]string, error) {
return []string{
"gemini-2.0-flash",
"gemini-2.0-flash-lite",
"gemini-1.5-pro",
"gemini-1.5-flash",
}, nil
}

View File

@ -0,0 +1,95 @@
package ai
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
)
type ollamaProvider struct {
endpoint string
model string
client *http.Client
}
func newOllama(endpoint, model string) *ollamaProvider {
if endpoint == "" {
endpoint = "http://ollama:11434"
}
if model == "" {
model = "llama3"
}
return &ollamaProvider{
endpoint: endpoint,
model: model,
client: &http.Client{},
}
}
func (p *ollamaProvider) Name() string { return "ollama" }
func (p *ollamaProvider) Summarize(ctx context.Context, prompt string) (string, error) {
body := map[string]interface{}{
"model": p.model,
"prompt": prompt,
"stream": false,
"options": map[string]interface{}{
"num_ctx": 32768,
},
}
b, _ := json.Marshal(body)
req, err := http.NewRequestWithContext(ctx, http.MethodPost, p.endpoint+"/api/generate", bytes.NewReader(b))
if err != nil {
return "", err
}
req.Header.Set("Content-Type", "application/json")
resp, err := p.client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
raw, _ := io.ReadAll(resp.Body)
if resp.StatusCode != http.StatusOK {
return "", fmt.Errorf("ollama API error %d: %s", resp.StatusCode, raw)
}
var result struct {
Response string `json:"response"`
}
if err := json.Unmarshal(raw, &result); err != nil {
return "", err
}
return result.Response, nil
}
func (p *ollamaProvider) ListModels(ctx context.Context) ([]string, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, p.endpoint+"/api/tags", nil)
if err != nil {
return nil, err
}
resp, err := p.client.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
raw, _ := io.ReadAll(resp.Body)
var result struct {
Models []struct {
Name string `json:"name"`
} `json:"models"`
}
if err := json.Unmarshal(raw, &result); err != nil {
return nil, err
}
var models []string
for _, m := range result.Models {
models = append(models, m.Name)
}
return models, nil
}

View File

@ -0,0 +1,52 @@
package ai
import (
"context"
openai "github.com/sashabaranov/go-openai"
)
type openAIProvider struct {
client *openai.Client
model string
}
func newOpenAI(apiKey, model string) *openAIProvider {
if model == "" {
model = openai.GPT4oMini
}
return &openAIProvider{
client: openai.NewClient(apiKey),
model: model,
}
}
func (p *openAIProvider) Name() string { return "openai" }
func (p *openAIProvider) Summarize(ctx context.Context, prompt string) (string, error) {
resp, err := p.client.CreateChatCompletion(ctx, openai.ChatCompletionRequest{
Model: p.model,
Messages: []openai.ChatCompletionMessage{
{Role: openai.ChatMessageRoleUser, Content: prompt},
},
})
if err != nil {
return "", err
}
if len(resp.Choices) == 0 {
return "", nil
}
return resp.Choices[0].Message.Content, nil
}
func (p *openAIProvider) ListModels(ctx context.Context) ([]string, error) {
resp, err := p.client.ListModels(ctx)
if err != nil {
return nil, err
}
var models []string
for _, m := range resp.Models {
models = append(models, m.ID)
}
return models, nil
}

View File

@ -0,0 +1,160 @@
package ai
import (
"context"
"fmt"
"strconv"
"strings"
"time"
"github.com/tradarr/backend/internal/crypto"
"github.com/tradarr/backend/internal/models"
)
const DefaultSystemPrompt = `Tu es un assistant spécialisé en trading financier. Analyse l'ensemble des actualités suivantes, toutes sources confondues, et crée un résumé global structuré en français, orienté trading.
Structure ton résumé ainsi :
1. **Vue macro** : tendances globales du marché (économie, géopolitique, secteurs)
2. **Actifs surveillés** : pour chaque actif de la watchlist mentionné dans les news :
- Sentiment (haussier/baissier/neutre)
- Faits clés et catalyseurs
- Risques et opportunités
3. **Autres mouvements notables** : actifs hors watchlist à surveiller
4. **Synthèse** : points d'attention prioritaires pour la journée`
type Pipeline struct {
repo *models.Repository
enc *crypto.Encryptor
}
func NewPipeline(repo *models.Repository, enc *crypto.Encryptor) *Pipeline {
return &Pipeline{repo: repo, enc: enc}
}
// BuildProvider instancie un provider à partir de ses paramètres
func (p *Pipeline) BuildProvider(name, apiKey, endpoint string) (Provider, error) {
provider, err := p.repo.GetActiveAIProvider()
if err != nil {
return nil, err
}
model := ""
if provider != nil {
model = provider.Model
}
return NewProvider(name, apiKey, model, endpoint)
}
// GenerateForUser génère un résumé personnalisé pour un utilisateur
func (p *Pipeline) GenerateForUser(ctx context.Context, userID string) (*models.Summary, error) {
// Récupérer le provider actif
providerCfg, err := p.repo.GetActiveAIProvider()
if err != nil {
return nil, fmt.Errorf("get active provider: %w", err)
}
if providerCfg == nil {
return nil, fmt.Errorf("no active AI provider configured")
}
apiKey := ""
if providerCfg.APIKeyEncrypted != "" {
apiKey, err = p.enc.Decrypt(providerCfg.APIKeyEncrypted)
if err != nil {
return nil, fmt.Errorf("decrypt API key: %w", err)
}
}
provider, err := NewProvider(providerCfg.Name, apiKey, providerCfg.Model, providerCfg.Endpoint)
if err != nil {
return nil, fmt.Errorf("build provider: %w", err)
}
// Récupérer la watchlist de l'utilisateur (pour le contexte IA uniquement)
assets, err := p.repo.GetUserAssets(userID)
if err != nil {
return nil, fmt.Errorf("get user assets: %w", err)
}
symbols := make([]string, len(assets))
for i, a := range assets {
symbols[i] = a.Symbol
}
// Récupérer TOUS les articles récents, toutes sources confondues
hoursStr, _ := p.repo.GetSetting("articles_lookback_hours")
hours, _ := strconv.Atoi(hoursStr)
if hours == 0 {
hours = 24
}
articles, err := p.repo.GetRecentArticles(hours)
if err != nil {
return nil, fmt.Errorf("get articles: %w", err)
}
if len(articles) == 0 {
return nil, fmt.Errorf("no recent articles found")
}
maxStr, _ := p.repo.GetSetting("summary_max_articles")
maxArticles, _ := strconv.Atoi(maxStr)
if maxArticles == 0 {
maxArticles = 50
}
if len(articles) > maxArticles {
articles = articles[:maxArticles]
}
systemPrompt, _ := p.repo.GetSetting("ai_system_prompt")
if systemPrompt == "" {
systemPrompt = DefaultSystemPrompt
}
prompt := buildPrompt(systemPrompt, symbols, articles)
summary, err := provider.Summarize(ctx, prompt)
if err != nil {
return nil, fmt.Errorf("AI summarize: %w", err)
}
return p.repo.CreateSummary(userID, summary, &providerCfg.ID)
}
// GenerateForAll génère les résumés pour tous les utilisateurs ayant une watchlist
func (p *Pipeline) GenerateForAll(ctx context.Context) error {
users, err := p.repo.ListUsers()
if err != nil {
return err
}
for _, user := range users {
if _, err := p.GenerateForUser(ctx, user.ID); err != nil {
fmt.Printf("summary for user %s: %v\n", user.Email, err)
}
}
return nil
}
func buildPrompt(systemPrompt string, symbols []string, articles []models.Article) string {
var sb strings.Builder
sb.WriteString(systemPrompt)
sb.WriteString("\n\n")
if len(symbols) > 0 {
sb.WriteString("Le trader surveille particulièrement ces actifs (sois attentif à toute mention) : ")
sb.WriteString(strings.Join(symbols, ", "))
sb.WriteString(".\n\n")
}
sb.WriteString(fmt.Sprintf("Date d'analyse : %s\n\n", time.Now().Format("02/01/2006 15:04")))
sb.WriteString("## Actualités\n\n")
for i, a := range articles {
sb.WriteString(fmt.Sprintf("### [%d] %s\n", i+1, a.Title))
sb.WriteString(fmt.Sprintf("Source : %s\n", a.SourceName))
if a.PublishedAt.Valid {
sb.WriteString(fmt.Sprintf("Date : %s\n", a.PublishedAt.Time.Format("02/01/2006 15:04")))
}
content := a.Content
if len(content) > 1000 {
content = content[:1000] + "..."
}
sb.WriteString(content)
sb.WriteString("\n\n")
}
return sb.String()
}

View File

@ -0,0 +1,27 @@
package ai
import (
"context"
"fmt"
)
type Provider interface {
Name() string
Summarize(ctx context.Context, prompt string) (string, error)
ListModels(ctx context.Context) ([]string, error)
}
func NewProvider(name, apiKey, model, endpoint string) (Provider, error) {
switch name {
case "openai":
return newOpenAI(apiKey, model), nil
case "anthropic":
return newAnthropic(apiKey, model), nil
case "gemini":
return newGemini(apiKey, model), nil
case "ollama":
return newOllama(endpoint, model), nil
default:
return nil, fmt.Errorf("unknown provider: %s", name)
}
}