feat: Sentinela v0.2.0 — Brazilian Financial Data API in Go
- 20 Go source files, single 16MB binary - SQLite + FTS5 full-text search (pure Go, no CGO) - BCB integration: Selic, CDI, IPCA, USD/BRL, EUR/BRL - CVM integration: 2,524 companies from registry - Fiber v2 REST API with 42 handlers - Auto-seeds on first run (~5s for BCB + CVM) - Token bucket rate limiter, optional API key auth - Periodic sync scheduler (configurable) - Graceful shutdown, structured logging (slog) - All endpoints tested with real data
This commit is contained in:
216
internal/fetcher/bcb.go
Normal file
216
internal/fetcher/bcb.go
Normal file
@@ -0,0 +1,216 @@
|
||||
package fetcher
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/sentinela-go/internal/db"
|
||||
)
|
||||
|
||||
type bcbRecord struct {
|
||||
Data string `json:"data"`
|
||||
Valor string `json:"valor"`
|
||||
}
|
||||
|
||||
func parseBCBDate(d string) string {
|
||||
// dd/mm/yyyy -> yyyy-mm-dd
|
||||
parts := strings.Split(d, "/")
|
||||
if len(parts) != 3 {
|
||||
return d
|
||||
}
|
||||
return parts[2] + "-" + parts[1] + "-" + parts[0]
|
||||
}
|
||||
|
||||
func fetchBCBSeries(seriesID int, lastN int) ([]bcbRecord, error) {
|
||||
// BCB "ultimos" endpoint caps at 20 records. Use date range instead.
|
||||
now := time.Now()
|
||||
// Estimate days needed: lastN records ≈ lastN business days ≈ lastN * 1.5 calendar days
|
||||
daysBack := lastN * 2
|
||||
if daysBack < 60 {
|
||||
daysBack = 60
|
||||
}
|
||||
from := now.AddDate(0, 0, -daysBack).Format("02/01/2006")
|
||||
to := now.Format("02/01/2006")
|
||||
url := fmt.Sprintf("https://api.bcb.gov.br/dados/serie/bcdata.sgs.%d/dados?formato=json&dataInicial=%s&dataFinal=%s", seriesID, from, to)
|
||||
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// BCB returns an error object (not array) on failure
|
||||
if len(body) > 0 && body[0] == '{' {
|
||||
return nil, fmt.Errorf("BCB API error for series %d: %s", seriesID, string(body[:min(300, len(body))]))
|
||||
}
|
||||
|
||||
var records []bcbRecord
|
||||
if err := json.Unmarshal(body, &records); err != nil {
|
||||
return nil, fmt.Errorf("parse BCB series %d: %w (body: %s)", seriesID, err, string(body[:min(200, len(body))]))
|
||||
}
|
||||
return records, nil
|
||||
}
|
||||
|
||||
func FetchSelic(database *db.DB) error {
|
||||
slog.Info("fetching Selic data from BCB")
|
||||
// Daily rate (series 432)
|
||||
daily, err := fetchBCBSeries(432, 750)
|
||||
if err != nil {
|
||||
return fmt.Errorf("selic daily: %w", err)
|
||||
}
|
||||
// Target rate (series 11)
|
||||
target, err := fetchBCBSeries(11, 750)
|
||||
if err != nil {
|
||||
slog.Warn("failed to fetch selic target", "error", err)
|
||||
target = nil
|
||||
}
|
||||
|
||||
targetMap := make(map[string]float64)
|
||||
for _, r := range target {
|
||||
date := parseBCBDate(r.Data)
|
||||
v, _ := strconv.ParseFloat(strings.Replace(r.Valor, ",", ".", 1), 64)
|
||||
targetMap[date] = v
|
||||
}
|
||||
|
||||
count := 0
|
||||
for _, r := range daily {
|
||||
date := parseBCBDate(r.Data)
|
||||
v, _ := strconv.ParseFloat(strings.Replace(r.Valor, ",", ".", 1), 64)
|
||||
var tp *float64
|
||||
if t, ok := targetMap[date]; ok {
|
||||
tp = &t
|
||||
}
|
||||
if err := database.InsertSelic(date, v, nil, tp); err == nil {
|
||||
count++
|
||||
}
|
||||
}
|
||||
slog.Info("selic data loaded", "records", count)
|
||||
return nil
|
||||
}
|
||||
|
||||
func FetchCDI(database *db.DB) error {
|
||||
slog.Info("fetching CDI data from BCB")
|
||||
daily, err := fetchBCBSeries(12, 750)
|
||||
if err != nil {
|
||||
return fmt.Errorf("cdi daily: %w", err)
|
||||
}
|
||||
annual, err := fetchBCBSeries(4389, 750)
|
||||
if err != nil {
|
||||
slog.Warn("failed to fetch cdi annual", "error", err)
|
||||
annual = nil
|
||||
}
|
||||
|
||||
annualMap := make(map[string]float64)
|
||||
for _, r := range annual {
|
||||
date := parseBCBDate(r.Data)
|
||||
v, _ := strconv.ParseFloat(strings.Replace(r.Valor, ",", ".", 1), 64)
|
||||
annualMap[date] = v
|
||||
}
|
||||
|
||||
count := 0
|
||||
for _, r := range daily {
|
||||
date := parseBCBDate(r.Data)
|
||||
v, _ := strconv.ParseFloat(strings.Replace(r.Valor, ",", ".", 1), 64)
|
||||
var ap *float64
|
||||
if a, ok := annualMap[date]; ok {
|
||||
ap = &a
|
||||
}
|
||||
if err := database.InsertCDI(date, v, ap); err == nil {
|
||||
count++
|
||||
}
|
||||
}
|
||||
slog.Info("cdi data loaded", "records", count)
|
||||
return nil
|
||||
}
|
||||
|
||||
func FetchIPCA(database *db.DB) error {
|
||||
slog.Info("fetching IPCA data from BCB")
|
||||
monthly, err := fetchBCBSeries(433, 36)
|
||||
if err != nil {
|
||||
return fmt.Errorf("ipca monthly: %w", err)
|
||||
}
|
||||
acc, err := fetchBCBSeries(13522, 36)
|
||||
if err != nil {
|
||||
slog.Warn("failed to fetch ipca acc 12m", "error", err)
|
||||
acc = nil
|
||||
}
|
||||
|
||||
accMap := make(map[string]float64)
|
||||
for _, r := range acc {
|
||||
date := parseBCBDate(r.Data)
|
||||
v, _ := strconv.ParseFloat(strings.Replace(r.Valor, ",", ".", 1), 64)
|
||||
accMap[date] = v
|
||||
}
|
||||
|
||||
count := 0
|
||||
for _, r := range monthly {
|
||||
date := parseBCBDate(r.Data)
|
||||
v, _ := strconv.ParseFloat(strings.Replace(r.Valor, ",", ".", 1), 64)
|
||||
var ap *float64
|
||||
if a, ok := accMap[date]; ok {
|
||||
ap = &a
|
||||
}
|
||||
if err := database.InsertIPCA(date, v, ap); err == nil {
|
||||
count++
|
||||
}
|
||||
}
|
||||
slog.Info("ipca data loaded", "records", count)
|
||||
return nil
|
||||
}
|
||||
|
||||
func FetchFX(database *db.DB) error {
|
||||
slog.Info("fetching FX data from BCB")
|
||||
pairs := map[string]int{
|
||||
"USD/BRL": 1,
|
||||
"EUR/BRL": 21619,
|
||||
}
|
||||
for pair, series := range pairs {
|
||||
records, err := fetchBCBSeries(series, 750)
|
||||
if err != nil {
|
||||
slog.Warn("failed to fetch fx", "pair", pair, "error", err)
|
||||
continue
|
||||
}
|
||||
count := 0
|
||||
for _, r := range records {
|
||||
date := parseBCBDate(r.Data)
|
||||
v, _ := strconv.ParseFloat(strings.Replace(r.Valor, ",", ".", 1), 64)
|
||||
if err := database.InsertFX(date, pair, v); err == nil {
|
||||
count++
|
||||
}
|
||||
}
|
||||
slog.Info("fx data loaded", "pair", pair, "records", count)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func FetchAllBCB(database *db.DB) error {
|
||||
start := time.Now()
|
||||
var errs []string
|
||||
if err := FetchSelic(database); err != nil {
|
||||
errs = append(errs, err.Error())
|
||||
}
|
||||
if err := FetchCDI(database); err != nil {
|
||||
errs = append(errs, err.Error())
|
||||
}
|
||||
if err := FetchIPCA(database); err != nil {
|
||||
errs = append(errs, err.Error())
|
||||
}
|
||||
if err := FetchFX(database); err != nil {
|
||||
errs = append(errs, err.Error())
|
||||
}
|
||||
slog.Info("BCB sync complete", "duration", time.Since(start))
|
||||
if len(errs) > 0 {
|
||||
return fmt.Errorf("bcb errors: %s", strings.Join(errs, "; "))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
197
internal/fetcher/cvm.go
Normal file
197
internal/fetcher/cvm.go
Normal file
@@ -0,0 +1,197 @@
|
||||
package fetcher
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"encoding/csv"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"golang.org/x/text/encoding/charmap"
|
||||
"golang.org/x/text/transform"
|
||||
|
||||
"github.com/sentinela-go/internal/db"
|
||||
)
|
||||
|
||||
func FetchCVMCompanies(database *db.DB) error {
|
||||
slog.Info("fetching CVM company registry")
|
||||
resp, err := http.Get("https://dados.cvm.gov.br/dados/CIA_ABERTA/CAD/DADOS/cad_cia_aberta.csv")
|
||||
if err != nil {
|
||||
return fmt.Errorf("fetch cvm companies: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
reader := transform.NewReader(resp.Body, charmap.ISO8859_1.NewDecoder())
|
||||
csvReader := csv.NewReader(reader)
|
||||
csvReader.Comma = ';'
|
||||
csvReader.LazyQuotes = true
|
||||
|
||||
header, err := csvReader.Read()
|
||||
if err != nil {
|
||||
return fmt.Errorf("read header: %w", err)
|
||||
}
|
||||
|
||||
colIdx := make(map[string]int)
|
||||
for i, h := range header {
|
||||
colIdx[strings.TrimSpace(h)] = i
|
||||
}
|
||||
|
||||
count := 0
|
||||
for {
|
||||
record, err := csvReader.Read()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
getCol := func(name string) string {
|
||||
if idx, ok := colIdx[name]; ok && idx < len(record) {
|
||||
return strings.TrimSpace(record[idx])
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
c := &db.Company{
|
||||
Name: getCol("DENOM_SOCIAL"),
|
||||
CNPJ: getCol("CNPJ_CIA"),
|
||||
CVMCode: getCol("CD_CVM"),
|
||||
Status: getCol("SIT"),
|
||||
Sector: getCol("SETOR_ATIV"),
|
||||
}
|
||||
if c.CNPJ == "" || c.Name == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := database.UpsertCompany(c); err != nil {
|
||||
continue
|
||||
}
|
||||
count++
|
||||
}
|
||||
|
||||
database.RebuildCompaniesFTS()
|
||||
slog.Info("CVM companies loaded", "count", count)
|
||||
return nil
|
||||
}
|
||||
|
||||
func FetchCVMFilings(database *db.DB, year int) error {
|
||||
slog.Info("fetching CVM IPE filings", "year", year)
|
||||
url := fmt.Sprintf("https://dados.cvm.gov.br/dados/CIA_ABERTA/DOC/IPE/DADOS/ipe_cia_aberta_%d.zip", year)
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return fmt.Errorf("fetch ipe %d: %w", year, err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
zipReader, err := zip.NewReader(bytes.NewReader(body), int64(len(body)))
|
||||
if err != nil {
|
||||
return fmt.Errorf("open zip: %w", err)
|
||||
}
|
||||
|
||||
count := 0
|
||||
for _, f := range zipReader.File {
|
||||
if !strings.HasSuffix(f.Name, ".csv") {
|
||||
continue
|
||||
}
|
||||
rc, err := f.Open()
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
reader := transform.NewReader(rc, charmap.ISO8859_1.NewDecoder())
|
||||
csvReader := csv.NewReader(reader)
|
||||
csvReader.Comma = ';'
|
||||
csvReader.LazyQuotes = true
|
||||
|
||||
header, err := csvReader.Read()
|
||||
if err != nil {
|
||||
rc.Close()
|
||||
continue
|
||||
}
|
||||
|
||||
colIdx := make(map[string]int)
|
||||
for i, h := range header {
|
||||
colIdx[strings.TrimSpace(h)] = i
|
||||
}
|
||||
|
||||
for {
|
||||
record, err := csvReader.Read()
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
|
||||
getCol := func(name string) string {
|
||||
if idx, ok := colIdx[name]; ok && idx < len(record) {
|
||||
return strings.TrimSpace(record[idx])
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
cnpj := getCol("CNPJ_CIA")
|
||||
extID := getCol("NUM_SEQ")
|
||||
if extID == "" {
|
||||
extID = fmt.Sprintf("%s-%s-%s", cnpj, getCol("DT_ENTREGA"), getCol("NUM_PROTOCOLO"))
|
||||
}
|
||||
|
||||
// Try to find company
|
||||
var companyID *int64
|
||||
if cnpj != "" {
|
||||
if c, err := database.GetCompanyByCNPJ(cnpj); err == nil && c != nil {
|
||||
companyID = &c.ID
|
||||
}
|
||||
}
|
||||
|
||||
filing := &db.Filing{
|
||||
ExternalID: extID,
|
||||
CompanyID: companyID,
|
||||
CNPJ: cnpj,
|
||||
Category: getCol("CATEG_DOC"),
|
||||
Type: getCol("TP_DOC"),
|
||||
Species: getCol("ESPECIE"),
|
||||
Subject: getCol("ASSUNTO"),
|
||||
ReferenceDate: getCol("DT_REFER"),
|
||||
DeliveryDate: getCol("DT_ENTREGA"),
|
||||
Protocol: getCol("NUM_PROTOCOLO"),
|
||||
Version: getCol("VERSAO"),
|
||||
DownloadURL: getCol("LINK_DOC"),
|
||||
}
|
||||
|
||||
if filing.DeliveryDate == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := database.UpsertFiling(filing); err != nil {
|
||||
continue
|
||||
}
|
||||
count++
|
||||
}
|
||||
rc.Close()
|
||||
}
|
||||
|
||||
database.RebuildFilingsFTS()
|
||||
slog.Info("CVM filings loaded", "year", year, "count", count)
|
||||
return nil
|
||||
}
|
||||
|
||||
func FetchAllCVM(database *db.DB) error {
|
||||
start := time.Now()
|
||||
if err := FetchCVMCompanies(database); err != nil {
|
||||
return err
|
||||
}
|
||||
// Fetch current year filings
|
||||
currentYear := time.Now().Year()
|
||||
FetchCVMFilings(database, currentYear)
|
||||
FetchCVMFilings(database, currentYear-1)
|
||||
slog.Info("CVM sync complete", "duration", time.Since(start))
|
||||
return nil
|
||||
}
|
||||
28
internal/fetcher/scheduler.go
Normal file
28
internal/fetcher/scheduler.go
Normal file
@@ -0,0 +1,28 @@
|
||||
package fetcher
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"time"
|
||||
|
||||
"github.com/sentinela-go/internal/db"
|
||||
)
|
||||
|
||||
func StartScheduler(database *db.DB, interval time.Duration, stop <-chan struct{}) {
|
||||
ticker := time.NewTicker(interval)
|
||||
defer ticker.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ticker.C:
|
||||
slog.Info("scheduled sync starting")
|
||||
if err := FetchAllBCB(database); err != nil {
|
||||
slog.Error("scheduled BCB sync failed", "error", err)
|
||||
}
|
||||
if err := FetchAllCVM(database); err != nil {
|
||||
slog.Error("scheduled CVM sync failed", "error", err)
|
||||
}
|
||||
case <-stop:
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user