1package main
2
3import (
4 "context"
5 "errors"
6 "flag"
7 "fmt"
8 "io"
9 "net/http"
10 "os"
11 "regexp"
12 "strconv"
13 "strings"
14 "time"
15
16 "github.com/beevik/etree"
17 "github.com/prometheus/client_golang/prometheus"
18 "github.com/prometheus/client_golang/prometheus/promauto"
19 "github.com/prometheus/client_golang/prometheus/promhttp"
20)
21
22const (
23 FeedUrl = "https://api.jovemnerd.com.br/feed-nerdcast/"
24)
25
26type (
27 ErrorRequestHandler func(w http.ResponseWriter, r *http.Request) error
28)
29
30var (
31 SerieRegex = regexp.MustCompile(`(?P<serie>.+) (?P<number>[0-9abc]+) \- (?P<title>.+)`)
32)
33
34var (
35 regexCollection = map[string]string{
36 "nerdcast": "NerdCast [0-9]+[a-c]*",
37 "empreendedor": "Empreendedor [0-9]+",
38 "mamicas": "Caneca de Mamicas [0-9]+",
39 "english": "Speak English [0-9]+",
40 "nerdcash": "NerdCash [0-9]+",
41 "bunker": "Lá do Bunker( LDB especial Oscar|) [0-9]+",
42 "tech": "NerdTech [0-9]+",
43 "genera": "Generacast [0-9]+",
44 "rpg": "NerdCast RPG [0-9]+[a-c]*",
45 "catar": "Vai te Catar [0-9]+",
46 "cloud": "Nerd na Cloud [0-9]+",
47 "contar": "Vou (T|t)e Contar [0-9]+",
48 "parceiro": "Papo de Parceiro [0-9]+",
49 "cash": "NerdCash [0-9]+",
50 }
51
52 feedRequest = promauto.NewHistogramVec(prometheus.HistogramOpts{
53 Name: "feed_request",
54 Help: "How long jovemnerd takes to answer",
55 Buckets: []float64{.01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10},
56 }, []string{"status_code"})
57
58 httpRequest = promauto.NewHistogramVec(prometheus.HistogramOpts{
59 Name: "http_request",
60 Help: "How long the application takes to complete the request",
61 Buckets: []float64{.01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10},
62 }, []string{"status_code", "user_agent"})
63
64 seriesCount = promauto.NewCounterVec(prometheus.CounterOpts{
65 Name: "serie_count",
66 Help: "How often a serie is called",
67 }, []string{"serie"})
68)
69
70func getSeries(r *http.Request) []string {
71 query := r.URL.Query().Get("q")
72
73 var series []string
74
75 for _, q := range strings.Split(query, ",") {
76 if _, ok := regexCollection[q]; ok {
77 series = append(series, q)
78 }
79 }
80
81 if len(series) > 0 {
82 return series
83 }
84
85 return []string{"nerdcast"}
86}
87
88func match(title string, series []string) bool {
89 for _, s := range series {
90 if ok, err := regexp.MatchString(regexCollection[s], title); err == nil && ok {
91 return true
92 }
93 }
94
95 return false
96}
97
98func fetchXML(_ context.Context) ([]byte, error) {
99 t := time.Now()
100 c := http.StatusInternalServerError
101
102 defer func() {
103 since := time.Since(t).Seconds()
104 code := strconv.Itoa(c)
105 feedRequest.WithLabelValues(code).Observe(since)
106 }()
107
108 res, err := http.Get(FeedUrl)
109 if err != nil {
110 return nil, err
111 }
112 defer res.Body.Close()
113
114 c = res.StatusCode
115
116 if c == http.StatusOK {
117 return io.ReadAll(res.Body)
118 }
119
120 return nil, errors.New("Invalid http code")
121}
122
123func appendTag(tag *etree.Element, ap string) {
124 text := tag.Text()
125 tag.SetText(text + ap)
126}
127
128func filterBySeries(series []string, xml []byte, temper bool) ([]byte, error) {
129 doc := etree.NewDocument()
130 err := doc.ReadFromBytes(xml)
131 if err != nil {
132 return nil, err
133 }
134
135 channel := doc.FindElement("//channel")
136
137 if temper {
138 tmp := strings.ToUpper(strings.Join(series, ","))
139 tmp = fmt.Sprintf(" [%s]", tmp)
140 appendTag(channel.FindElement("title"), tmp)
141 appendTag(channel.FindElement("description"), tmp)
142 appendTag(channel.FindElement("link"), "?"+tmp)
143 appendTag(channel.FindElement("author[namespace-prefix()='itunes']"), tmp)
144 appendTag(channel.FindElement("subtitle[namespace-prefix()='itunes']"), tmp)
145 appendTag(channel.FindElement("summary[namespace-prefix()='itunes']"), tmp)
146 appendTag(channel.FindElement("author[namespace-prefix()='googleplay']"), tmp)
147
148 }
149
150 for _, tag := range channel.FindElements("item") {
151 title := tag.FindElement("title").Text()
152 if !match(title, series) {
153 channel.RemoveChild(tag)
154 }
155 }
156
157 return doc.WriteToBytes()
158}
159
160func handleError(next ErrorRequestHandler) http.HandlerFunc {
161 return func(w http.ResponseWriter, r *http.Request) {
162 if err := next(w, r); err != nil {
163 w.WriteHeader(http.StatusInternalServerError)
164 }
165 }
166}
167
168func observe(next http.HandlerFunc) http.HandlerFunc {
169 return func(w http.ResponseWriter, r *http.Request) {
170 t := time.Now()
171
172 next(w, r)
173
174 rw := w.(*responseWriter)
175 since := time.Since(t).Seconds()
176 code := strconv.Itoa(rw.Status())
177 userAgent := r.Header.Get("user-agent")
178 httpRequest.WithLabelValues(code, userAgent).Observe(float64(since))
179
180 for _, s := range getSeries(r) {
181 seriesCount.WithLabelValues(s).Inc()
182 }
183 }
184}
185
186func wrap(next http.HandlerFunc) http.HandlerFunc {
187 return func(w http.ResponseWriter, r *http.Request) {
188 next(NewResponseWriter(w), r)
189 }
190}
191
192func titles(w http.ResponseWriter, r *http.Request) error {
193 xml, err := fetchXML(r.Context())
194 if err != nil {
195 return err
196 }
197
198 doc := etree.NewDocument()
199 err = doc.ReadFromBytes(xml)
200 if err != nil {
201 return err
202 }
203
204 series := getSeries(r)
205
206 els := doc.FindElements("//channel/item")
207 for _, e := range els {
208 txt := e.FindElement("title").Text() + "\n"
209 if match(txt, series) {
210 _, err = w.Write([]byte(txt))
211 if err != nil {
212 return err
213 }
214 }
215 }
216
217 return nil
218}
219
220func podcast(w http.ResponseWriter, r *http.Request) error {
221 xml, err := fetchXML(r.Context())
222 if err != nil {
223 return err
224 }
225
226 series := getSeries(r)
227 filterdXML, err := filterBySeries(series, xml, true)
228 if err != nil {
229 return err
230 }
231
232 _, err = w.Write(filterdXML)
233 if err != nil {
234 return err
235 }
236
237 return nil
238}
239
240func genSeries() error {
241 xml, err := fetchXML(context.Background())
242 if err != nil {
243 return err
244 }
245
246 doc := etree.NewDocument()
247 err = doc.ReadFromBytes(xml)
248 if err != nil {
249 return err
250 }
251
252 unique := make(map[string]any)
253 els := doc.FindElements("//channel/item")
254 for _, e := range els {
255 txt := e.FindElement("title").Text()
256 res := SerieRegex.FindStringSubmatch(txt)
257 if len(res) > 1 {
258 unique[res[1]] = nil
259 }
260 }
261
262 for k := range unique {
263 fmt.Println(k)
264 }
265
266 return nil
267}
268
269func main() {
270 if len(os.Args) > 1 && os.Args[1] == "series" {
271 err := genSeries()
272 if err != nil {
273 panic(err.Error())
274 }
275 return
276 }
277
278 var (
279 addr = flag.String("addr", ":8080", "Server address")
280 )
281
282 flag.Parse()
283
284 mux := http.NewServeMux()
285 mux.Handle("/metrics", promhttp.Handler())
286 mux.HandleFunc("/titles", wrap(handleError(titles)))
287 mux.HandleFunc("/", wrap(observe(handleError(podcast))))
288
289 server := http.Server{
290 Handler: mux,
291 Addr: *addr,
292 }
293
294 err := server.ListenAndServe()
295 if err != nil {
296 fmt.Printf("Server error: %s", err.Error())
297 }
298}