1package main
2
3import (
4 "context"
5 "embed"
6 "errors"
7 "flag"
8 "fmt"
9 "io"
10 "log/slog"
11 "net/http"
12 "os"
13 "regexp"
14 "strconv"
15 "strings"
16 "time"
17
18 "github.com/beevik/etree"
19 "github.com/prometheus/client_golang/prometheus"
20 "github.com/prometheus/client_golang/prometheus/promauto"
21 "github.com/prometheus/client_golang/prometheus/promhttp"
22)
23
24const (
25 feedUrl = "https://api.jovemnerd.com.br/feed-nerdcast/"
26)
27
28type (
29 errorRequestHandler func(w http.ResponseWriter, r *http.Request) error
30)
31
32var (
33 //go:embed static/*
34 assets embed.FS
35 serieRegex = regexp.MustCompile(`(?P<serie>.+) (?P<number>[0-9abc]+) \- (?P<title>.+)`)
36)
37
38var (
39 regexCollection = map[string]string{
40 "nerdcast": "NerdCast [0-9]+[a-c]*",
41 "empreendedor": "Empreendedor [0-9]+",
42 "mamicas": "Caneca de Mamicas [0-9]+",
43 "english": "Speak English [0-9]+",
44 "nerdcash": "NerdCash [0-9]+",
45 "bunker": "Lá do Bunker( LDB especial Oscar|) [0-9]+",
46 "tech": "NerdTech [0-9]+",
47 "genera": "Generacast [0-9]+",
48 "rpg": "NerdCast RPG [0-9]+[a-c]*",
49 "catar": "Vai te Catar [0-9]+",
50 "cloud": "Nerd na Cloud [0-9]+",
51 "contar": "Vou (T|t)e Contar [0-9]+",
52 "parceiro": "Papo de Parceiro [0-9]+",
53 }
54
55 feedRequest = promauto.NewHistogramVec(prometheus.HistogramOpts{
56 Name: "feed_request",
57 Help: "How long jovemnerd takes to answer",
58 Buckets: []float64{.01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10},
59 }, []string{"status_code"})
60
61 httpRequest = promauto.NewHistogramVec(prometheus.HistogramOpts{
62 Name: "http_request",
63 Help: "How long the application takes to complete the request",
64 Buckets: []float64{.01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10},
65 }, []string{"status_code", "user_agent"})
66
67 seriesCount = promauto.NewCounterVec(prometheus.CounterOpts{
68 Name: "serie_count",
69 Help: "How often a serie is called",
70 }, []string{"serie"})
71)
72
73func getSeries(r *http.Request) []string {
74 query := r.URL.Query().Get("q")
75
76 var series []string
77
78 for _, q := range strings.Split(query, ",") {
79 if _, ok := regexCollection[q]; ok {
80 series = append(series, q)
81 }
82 }
83
84 if len(series) > 0 {
85 return series
86 }
87
88 return []string{"nerdcast"}
89}
90
91func match(title string, series []string) bool {
92 for _, s := range series {
93 if ok, err := regexp.MatchString(regexCollection[s], title); err == nil && ok {
94 return true
95 }
96 }
97
98 return false
99}
100
101func fetchXML(_ context.Context) ([]byte, error) {
102 t := time.Now()
103 c := http.StatusInternalServerError
104
105 defer func() {
106 since := time.Since(t).Seconds()
107 code := strconv.Itoa(c)
108 feedRequest.WithLabelValues(code).Observe(since)
109 }()
110
111 res, err := http.Get(feedUrl)
112 if err != nil {
113 return nil, err
114 }
115 defer res.Body.Close()
116
117 c = res.StatusCode
118
119 if c == http.StatusOK {
120 return io.ReadAll(res.Body)
121 }
122
123 return nil, errors.New("Invalid http code")
124}
125
126func appendTag(tag *etree.Element, ap string) {
127 text := tag.Text()
128 tag.SetText(text + ap)
129}
130
131func filterBySeries(series []string, xml []byte, temper bool) ([]byte, error) {
132 doc := etree.NewDocument()
133 err := doc.ReadFromBytes(xml)
134 if err != nil {
135 return nil, err
136 }
137
138 channel := doc.FindElement("//channel")
139
140 if temper {
141 tmp := strings.ToUpper(strings.Join(series, ","))
142 tmp = fmt.Sprintf(" [%s]", tmp)
143 appendTag(channel.FindElement("title"), tmp)
144 appendTag(channel.FindElement("description"), tmp)
145 appendTag(channel.FindElement("link"), "?"+tmp)
146 appendTag(channel.FindElement("author[namespace-prefix()='itunes']"), tmp)
147 appendTag(channel.FindElement("subtitle[namespace-prefix()='itunes']"), tmp)
148 appendTag(channel.FindElement("summary[namespace-prefix()='itunes']"), tmp)
149 appendTag(channel.FindElement("author[namespace-prefix()='googleplay']"), tmp)
150
151 }
152
153 for _, tag := range channel.FindElements("item") {
154 title := tag.FindElement("title").Text()
155 if !match(title, series) {
156 channel.RemoveChild(tag)
157 }
158 }
159
160 return doc.WriteToBytes()
161}
162
163func handleError(next errorRequestHandler) http.HandlerFunc {
164 return func(w http.ResponseWriter, r *http.Request) {
165 if err := next(w, r); err != nil {
166 slog.ErrorContext(r.Context(), "Error", "error", err.Error())
167 w.WriteHeader(http.StatusInternalServerError)
168 }
169 }
170}
171
172func observe(next http.HandlerFunc) http.HandlerFunc {
173 return func(w http.ResponseWriter, r *http.Request) {
174 t := time.Now()
175
176 next(w, r)
177
178 rw := w.(*responseWriter)
179 since := time.Since(t).Seconds()
180 code := strconv.Itoa(rw.Status())
181 userAgent := r.Header.Get("user-agent")
182 httpRequest.WithLabelValues(code, userAgent).Observe(float64(since))
183
184 for _, s := range getSeries(r) {
185 seriesCount.WithLabelValues(s).Inc()
186 }
187 }
188}
189
190func wrap(next http.HandlerFunc) http.HandlerFunc {
191 return func(w http.ResponseWriter, r *http.Request) {
192 next(NewResponseWriter(w), r)
193 }
194}
195
196func titles(w http.ResponseWriter, r *http.Request) error {
197 xml, err := fetchXML(r.Context())
198 if err != nil {
199 return err
200 }
201
202 doc := etree.NewDocument()
203 err = doc.ReadFromBytes(xml)
204 if err != nil {
205 return err
206 }
207
208 series := getSeries(r)
209
210 els := doc.FindElements("//channel/item")
211 for _, e := range els {
212 txt := e.FindElement("title").Text() + "\n"
213 if match(txt, series) {
214 _, err = w.Write([]byte(txt))
215 if err != nil {
216 return err
217 }
218 }
219 }
220
221 return nil
222}
223
224func view(w http.ResponseWriter, r *http.Request) error {
225 data, err := assets.ReadFile("static/index.html")
226 if err != nil {
227 return err
228 }
229
230 _, err = w.Write(data)
231 if err != nil {
232 return err
233 }
234
235 return nil
236}
237
238func podcast(w http.ResponseWriter, r *http.Request) error {
239 xml, err := fetchXML(r.Context())
240 if err != nil {
241 return err
242 }
243
244 series := getSeries(r)
245 filterdXML, err := filterBySeries(series, xml, true)
246 if err != nil {
247 return err
248 }
249
250 _, err = w.Write(filterdXML)
251 if err != nil {
252 return err
253 }
254
255 return nil
256}
257
258func genSeries() error {
259 xml, err := fetchXML(context.Background())
260 if err != nil {
261 return err
262 }
263
264 doc := etree.NewDocument()
265 err = doc.ReadFromBytes(xml)
266 if err != nil {
267 return err
268 }
269
270 unique := make(map[string]any)
271 els := doc.FindElements("//channel/item")
272 for _, e := range els {
273 txt := e.FindElement("title").Text()
274 res := serieRegex.FindStringSubmatch(txt)
275 if len(res) > 1 {
276 unique[res[1]] = nil
277 }
278 }
279
280 for k := range unique {
281 fmt.Println(k)
282 }
283
284 return nil
285}
286
287func main() {
288 if len(os.Args) > 1 && os.Args[1] == "series" {
289 err := genSeries()
290 if err != nil {
291 panic(err.Error())
292 }
293 return
294 }
295
296 var (
297 addr = flag.String("addr", ":8080", "Server address")
298 )
299
300 flag.Parse()
301
302 mux := http.NewServeMux()
303 mux.Handle("/metrics", promhttp.Handler())
304 mux.HandleFunc("/titles", wrap(handleError(titles)))
305 mux.HandleFunc("/view", wrap(handleError(view)))
306 mux.HandleFunc("/", wrap(observe(handleError(podcast))))
307
308 server := http.Server{
309 Handler: mux,
310 Addr: *addr,
311 }
312
313 err := server.ListenAndServe()
314 if err != nil {
315 fmt.Printf("Server error: %s", err.Error())
316 }
317}