1package main
2
3import (
4 "context"
5 "errors"
6 "flag"
7 "fmt"
8 "io"
9 "net/http"
10 "regexp"
11 "strconv"
12 "strings"
13 "time"
14
15 "github.com/beevik/etree"
16 "github.com/prometheus/client_golang/prometheus"
17 "github.com/prometheus/client_golang/prometheus/promauto"
18 "github.com/prometheus/client_golang/prometheus/promhttp"
19)
20
21type ErrorRequestHandler func(w http.ResponseWriter, r *http.Request) error
22
23const (
24 FeedUrl = "https://api.jovemnerd.com.br/feed-nerdcast/"
25)
26
27var (
28 RegexCollection = map[string]string{
29 "nerdcast": "NerdCast [0-9]+[a-c]* -",
30 "empreendedor": "Empreendedor [0-9]+ -",
31 "mamicas": "Caneca de Mamicas [0-9]+ -",
32 "english": "Speak English [0-9]+ -",
33 "nerdcash": "NerdCash [0-9]+ -",
34 "bunker": "Lá do Bunker [0-9]+ -",
35 "tech": "NerdTech [0-9]+ -",
36 "genera": "Generacast [0-9]+ -",
37 }
38
39 feedRequest = promauto.NewHistogramVec(prometheus.HistogramOpts{
40 Name: "feed_request",
41 Help: "How long jovemnerd takes to answer",
42 Buckets: []float64{.01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10},
43 }, []string{"status_code"})
44
45 httpRequest = promauto.NewHistogramVec(prometheus.HistogramOpts{
46 Name: "http_request",
47 Help: "How long the application takes to complete the request",
48 Buckets: []float64{.01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10},
49 }, []string{"status_code", "user_agent"})
50
51 seriesCount = promauto.NewCounterVec(prometheus.CounterOpts{
52 Name: "serie_count",
53 Help: "How often a serie is called",
54 }, []string{"serie"})
55)
56
57func getSeries(r *http.Request) []string {
58 query := r.URL.Query().Get("q")
59
60 var series []string
61
62 for _, q := range strings.Split(query, ",") {
63 if _, ok := RegexCollection[q]; ok {
64 series = append(series, q)
65 }
66 }
67
68 if len(series) > 0 {
69 return series
70 }
71
72 return []string{"nerdcast"}
73}
74
75func match(title string, series []string) bool {
76 for _, s := range series {
77 if ok, err := regexp.MatchString(RegexCollection[s], title); err == nil && ok {
78 return true
79 }
80 }
81
82 return false
83}
84
85func fetchXML(_ context.Context) ([]byte, error) {
86 t := time.Now()
87 c := http.StatusInternalServerError
88
89 defer func() {
90 since := time.Since(t).Seconds()
91 code := strconv.Itoa(c)
92 feedRequest.WithLabelValues(code).Observe(since)
93 }()
94
95 res, err := http.Get(FeedUrl)
96 if err != nil {
97 return nil, err
98 }
99 defer res.Body.Close()
100
101 c = res.StatusCode
102
103 if c == http.StatusOK {
104 return io.ReadAll(res.Body)
105 }
106
107 return nil, errors.New("Invalid http code")
108}
109
110func appendTag(tag *etree.Element, ap string) {
111 text := tag.Text()
112 tag.SetText(text + ap)
113}
114
115func filterBySeries(series []string, xml []byte, temper bool) ([]byte, error) {
116 doc := etree.NewDocument()
117 err := doc.ReadFromBytes(xml)
118 if err != nil {
119 return nil, err
120 }
121
122 channel := doc.FindElement("//channel")
123
124 if temper {
125 tmp := strings.ToUpper(strings.Join(series, ","))
126 tmp = fmt.Sprintf(" [%s]", tmp)
127 appendTag(channel.FindElement("title"), tmp)
128 appendTag(channel.FindElement("description"), tmp)
129 appendTag(channel.FindElement("link"), "?"+tmp)
130 appendTag(channel.FindElement("author[namespace-prefix()='itunes']"), tmp)
131 appendTag(channel.FindElement("subtitle[namespace-prefix()='itunes']"), tmp)
132 appendTag(channel.FindElement("summary[namespace-prefix()='itunes']"), tmp)
133 appendTag(channel.FindElement("author[namespace-prefix()='googleplay']"), tmp)
134
135 }
136
137 for _, tag := range channel.FindElements("item") {
138 title := tag.FindElement("title").Text()
139 if !match(title, series) {
140 channel.RemoveChild(tag)
141 }
142 }
143
144 return doc.WriteToBytes()
145}
146
147func handleError(next ErrorRequestHandler) http.HandlerFunc {
148 return func(w http.ResponseWriter, r *http.Request) {
149 if err := next(w, r); err != nil {
150 w.WriteHeader(http.StatusInternalServerError)
151 }
152 }
153}
154
155func observe(next http.HandlerFunc) http.HandlerFunc {
156 return func(w http.ResponseWriter, r *http.Request) {
157 t := time.Now()
158
159 next(w, r)
160
161 rw := w.(*responseWriter)
162 since := time.Since(t).Seconds()
163 code := strconv.Itoa(rw.Status())
164 userAgent := r.Header.Get("user-agent")
165 httpRequest.WithLabelValues(code, userAgent).Observe(float64(since))
166
167 for _, s := range getSeries(r) {
168 seriesCount.WithLabelValues(s).Inc()
169 }
170 }
171}
172
173func wrap(next http.HandlerFunc) http.HandlerFunc {
174 return func(w http.ResponseWriter, r *http.Request) {
175 next(NewResponseWriter(w), r)
176 }
177}
178
179func titles(w http.ResponseWriter, r *http.Request) error {
180 xml, err := fetchXML(r.Context())
181 if err != nil {
182 return err
183 }
184
185 doc := etree.NewDocument()
186 err = doc.ReadFromBytes(xml)
187 if err != nil {
188 return err
189 }
190
191 series := getSeries(r)
192
193 els := doc.FindElements("//channel/item")
194 for _, e := range els {
195 txt := e.FindElement("title").Text() + "\n"
196 if match(txt, series) {
197 _, err = w.Write([]byte(txt))
198 if err != nil {
199 return err
200 }
201 }
202 }
203
204 return nil
205}
206
207func podcast(w http.ResponseWriter, r *http.Request) error {
208 xml, err := fetchXML(r.Context())
209 if err != nil {
210 return err
211 }
212
213 series := getSeries(r)
214 filterdXML, err := filterBySeries(series, xml, true)
215 if err != nil {
216 return err
217 }
218
219 _, err = w.Write(filterdXML)
220 if err != nil {
221 return err
222 }
223
224 return nil
225}
226
227func main() {
228 var (
229 addr = flag.String("addr", ":8080", "Server address")
230 )
231
232 flag.Parse()
233
234 mux := http.NewServeMux()
235 mux.Handle("/metrics", promhttp.Handler())
236 mux.HandleFunc("/titles", wrap(handleError(titles)))
237 mux.HandleFunc("/", wrap(observe(handleError(podcast))))
238
239 server := http.Server{
240 Handler: mux,
241 Addr: *addr,
242 }
243
244 err := server.ListenAndServe()
245 if err != nil {
246 fmt.Printf("Server error: %s", err.Error())
247 }
248}