mirror of
https://github.com/Xevion/glance.git
synced 2025-12-15 08:11:59 -06:00
Restructure & refactor codebase
This commit is contained in:
@@ -1,120 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type adguardStatsResponse struct {
|
||||
TotalQueries int `json:"num_dns_queries"`
|
||||
QueriesSeries []int `json:"dns_queries"`
|
||||
BlockedQueries int `json:"num_blocked_filtering"`
|
||||
BlockedSeries []int `json:"blocked_filtering"`
|
||||
ResponseTime float64 `json:"avg_processing_time"`
|
||||
TopBlockedDomains []map[string]int `json:"top_blocked_domains"`
|
||||
}
|
||||
|
||||
func FetchAdguardStats(instanceURL, username, password string) (*DNSStats, error) {
|
||||
requestURL := strings.TrimRight(instanceURL, "/") + "/control/stats"
|
||||
|
||||
request, err := http.NewRequest("GET", requestURL, nil)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
request.SetBasicAuth(username, password)
|
||||
|
||||
responseJson, err := decodeJsonFromRequest[adguardStatsResponse](defaultClient, request)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var topBlockedDomainsCount = min(len(responseJson.TopBlockedDomains), 5)
|
||||
|
||||
stats := &DNSStats{
|
||||
TotalQueries: responseJson.TotalQueries,
|
||||
BlockedQueries: responseJson.BlockedQueries,
|
||||
ResponseTime: int(responseJson.ResponseTime * 1000),
|
||||
TopBlockedDomains: make([]DNSStatsBlockedDomain, 0, topBlockedDomainsCount),
|
||||
}
|
||||
|
||||
if stats.TotalQueries <= 0 {
|
||||
return stats, nil
|
||||
}
|
||||
|
||||
stats.BlockedPercent = int(float64(responseJson.BlockedQueries) / float64(responseJson.TotalQueries) * 100)
|
||||
|
||||
for i := 0; i < topBlockedDomainsCount; i++ {
|
||||
domain := responseJson.TopBlockedDomains[i]
|
||||
var firstDomain string
|
||||
|
||||
for k := range domain {
|
||||
firstDomain = k
|
||||
break
|
||||
}
|
||||
|
||||
if firstDomain == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
stats.TopBlockedDomains = append(stats.TopBlockedDomains, DNSStatsBlockedDomain{
|
||||
Domain: firstDomain,
|
||||
})
|
||||
|
||||
if stats.BlockedQueries > 0 {
|
||||
stats.TopBlockedDomains[i].PercentBlocked = int(float64(domain[firstDomain]) / float64(responseJson.BlockedQueries) * 100)
|
||||
}
|
||||
}
|
||||
|
||||
queriesSeries := responseJson.QueriesSeries
|
||||
blockedSeries := responseJson.BlockedSeries
|
||||
|
||||
const bars = 8
|
||||
const hoursSpan = 24
|
||||
const hoursPerBar int = hoursSpan / bars
|
||||
|
||||
if len(queriesSeries) > hoursSpan {
|
||||
queriesSeries = queriesSeries[len(queriesSeries)-hoursSpan:]
|
||||
} else if len(queriesSeries) < hoursSpan {
|
||||
queriesSeries = append(make([]int, hoursSpan-len(queriesSeries)), queriesSeries...)
|
||||
}
|
||||
|
||||
if len(blockedSeries) > hoursSpan {
|
||||
blockedSeries = blockedSeries[len(blockedSeries)-hoursSpan:]
|
||||
} else if len(blockedSeries) < hoursSpan {
|
||||
blockedSeries = append(make([]int, hoursSpan-len(blockedSeries)), blockedSeries...)
|
||||
}
|
||||
|
||||
maxQueriesInSeries := 0
|
||||
|
||||
for i := 0; i < bars; i++ {
|
||||
queries := 0
|
||||
blocked := 0
|
||||
|
||||
for j := 0; j < hoursPerBar; j++ {
|
||||
queries += queriesSeries[i*hoursPerBar+j]
|
||||
blocked += blockedSeries[i*hoursPerBar+j]
|
||||
}
|
||||
|
||||
stats.Series[i] = DNSStatsSeries{
|
||||
Queries: queries,
|
||||
Blocked: blocked,
|
||||
}
|
||||
|
||||
if queries > 0 {
|
||||
stats.Series[i].PercentBlocked = int(float64(blocked) / float64(queries) * 100)
|
||||
}
|
||||
|
||||
if queries > maxQueriesInSeries {
|
||||
maxQueriesInSeries = queries
|
||||
}
|
||||
}
|
||||
|
||||
for i := 0; i < bars; i++ {
|
||||
stats.Series[i].PercentTotal = int(float64(stats.Series[i].Queries) / float64(maxQueriesInSeries) * 100)
|
||||
}
|
||||
|
||||
return stats, nil
|
||||
}
|
||||
@@ -1,51 +0,0 @@
|
||||
package feed
|
||||
|
||||
import "time"
|
||||
|
||||
// TODO: very inflexible, refactor to allow more customizability
|
||||
// TODO: allow changing between showing the previous and next week and the entire month
|
||||
func NewCalendar(now time.Time, startSunday bool) *Calendar {
|
||||
year, week := now.ISOWeek()
|
||||
weekday := now.Weekday()
|
||||
if !startSunday {
|
||||
weekday = (weekday + 6) % 7 // Shift Monday to 0
|
||||
}
|
||||
|
||||
currentMonthDays := daysInMonth(now.Month(), year)
|
||||
|
||||
var previousMonthDays int
|
||||
|
||||
if previousMonthNumber := now.Month() - 1; previousMonthNumber < 1 {
|
||||
previousMonthDays = daysInMonth(12, year-1)
|
||||
} else {
|
||||
previousMonthDays = daysInMonth(previousMonthNumber, year)
|
||||
}
|
||||
|
||||
startDaysFrom := now.Day() - int(weekday) - 7
|
||||
|
||||
days := make([]int, 21)
|
||||
|
||||
for i := 0; i < 21; i++ {
|
||||
day := startDaysFrom + i
|
||||
|
||||
if day < 1 {
|
||||
day = previousMonthDays + day
|
||||
} else if day > currentMonthDays {
|
||||
day = day - currentMonthDays
|
||||
}
|
||||
|
||||
days[i] = day
|
||||
}
|
||||
|
||||
return &Calendar{
|
||||
CurrentDay: now.Day(),
|
||||
CurrentWeekNumber: week,
|
||||
CurrentMonthName: now.Month().String(),
|
||||
CurrentYear: year,
|
||||
Days: days,
|
||||
}
|
||||
}
|
||||
|
||||
func daysInMonth(m time.Month, year int) int {
|
||||
return time.Date(year, m+1, 0, 0, 0, 0, 0, time.UTC).Day()
|
||||
}
|
||||
@@ -1,139 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ChangeDetectionWatch struct {
|
||||
Title string
|
||||
URL string
|
||||
LastChanged time.Time
|
||||
DiffURL string
|
||||
PreviousHash string
|
||||
}
|
||||
|
||||
type ChangeDetectionWatches []ChangeDetectionWatch
|
||||
|
||||
func (r ChangeDetectionWatches) SortByNewest() ChangeDetectionWatches {
|
||||
sort.Slice(r, func(i, j int) bool {
|
||||
return r[i].LastChanged.After(r[j].LastChanged)
|
||||
})
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
type changeDetectionResponseJson struct {
|
||||
Title string `json:"title"`
|
||||
URL string `json:"url"`
|
||||
LastChanged int64 `json:"last_changed"`
|
||||
DateCreated int64 `json:"date_created"`
|
||||
PreviousHash string `json:"previous_md5"`
|
||||
}
|
||||
|
||||
func FetchWatchUUIDsFromChangeDetection(instanceURL string, token string) ([]string, error) {
|
||||
request, _ := http.NewRequest("GET", fmt.Sprintf("%s/api/v1/watch", instanceURL), nil)
|
||||
|
||||
if token != "" {
|
||||
request.Header.Add("x-api-key", token)
|
||||
}
|
||||
|
||||
uuidsMap, err := decodeJsonFromRequest[map[string]struct{}](defaultClient, request)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not fetch list of watch UUIDs: %v", err)
|
||||
}
|
||||
|
||||
uuids := make([]string, 0, len(uuidsMap))
|
||||
|
||||
for uuid := range uuidsMap {
|
||||
uuids = append(uuids, uuid)
|
||||
}
|
||||
|
||||
return uuids, nil
|
||||
}
|
||||
|
||||
func FetchWatchesFromChangeDetection(instanceURL string, requestedWatchIDs []string, token string) (ChangeDetectionWatches, error) {
|
||||
watches := make(ChangeDetectionWatches, 0, len(requestedWatchIDs))
|
||||
|
||||
if len(requestedWatchIDs) == 0 {
|
||||
return watches, nil
|
||||
}
|
||||
|
||||
requests := make([]*http.Request, len(requestedWatchIDs))
|
||||
|
||||
for i, repository := range requestedWatchIDs {
|
||||
request, _ := http.NewRequest("GET", fmt.Sprintf("%s/api/v1/watch/%s", instanceURL, repository), nil)
|
||||
|
||||
if token != "" {
|
||||
request.Header.Add("x-api-key", token)
|
||||
}
|
||||
|
||||
requests[i] = request
|
||||
}
|
||||
|
||||
task := decodeJsonFromRequestTask[changeDetectionResponseJson](defaultClient)
|
||||
job := newJob(task, requests).withWorkers(15)
|
||||
responses, errs, err := workerPoolDo(job)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var failed int
|
||||
|
||||
for i := range responses {
|
||||
if errs[i] != nil {
|
||||
failed++
|
||||
slog.Error("Failed to fetch or parse change detection watch", "error", errs[i], "url", requests[i].URL)
|
||||
continue
|
||||
}
|
||||
|
||||
watchJson := responses[i]
|
||||
|
||||
watch := ChangeDetectionWatch{
|
||||
URL: watchJson.URL,
|
||||
DiffURL: fmt.Sprintf("%s/diff/%s?from_version=%d", instanceURL, requestedWatchIDs[i], watchJson.LastChanged-1),
|
||||
}
|
||||
|
||||
if watchJson.LastChanged == 0 {
|
||||
watch.LastChanged = time.Unix(watchJson.DateCreated, 0)
|
||||
} else {
|
||||
watch.LastChanged = time.Unix(watchJson.LastChanged, 0)
|
||||
}
|
||||
|
||||
if watchJson.Title != "" {
|
||||
watch.Title = watchJson.Title
|
||||
} else {
|
||||
watch.Title = strings.TrimPrefix(strings.Trim(stripURLScheme(watchJson.URL), "/"), "www.")
|
||||
}
|
||||
|
||||
if watchJson.PreviousHash != "" {
|
||||
var hashLength = 8
|
||||
|
||||
if len(watchJson.PreviousHash) < hashLength {
|
||||
hashLength = len(watchJson.PreviousHash)
|
||||
}
|
||||
|
||||
watch.PreviousHash = watchJson.PreviousHash[0:hashLength]
|
||||
}
|
||||
|
||||
watches = append(watches, watch)
|
||||
}
|
||||
|
||||
if len(watches) == 0 {
|
||||
return nil, ErrNoContent
|
||||
}
|
||||
|
||||
watches.SortByNewest()
|
||||
|
||||
if failed > 0 {
|
||||
return watches, fmt.Errorf("%w: could not get %d watches", ErrPartialContent, failed)
|
||||
}
|
||||
|
||||
return watches, nil
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type codebergReleaseResponseJson struct {
|
||||
TagName string `json:"tag_name"`
|
||||
PublishedAt string `json:"published_at"`
|
||||
HtmlUrl string `json:"html_url"`
|
||||
}
|
||||
|
||||
func fetchLatestCodebergRelease(request *ReleaseRequest) (*AppRelease, error) {
|
||||
httpRequest, err := http.NewRequest(
|
||||
"GET",
|
||||
fmt.Sprintf(
|
||||
"https://codeberg.org/api/v1/repos/%s/releases/latest",
|
||||
request.Repository,
|
||||
),
|
||||
nil,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response, err := decodeJsonFromRequest[codebergReleaseResponseJson](defaultClient, httpRequest)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &AppRelease{
|
||||
Source: ReleaseSourceCodeberg,
|
||||
Name: request.Repository,
|
||||
Version: normalizeVersionFormat(response.TagName),
|
||||
NotesUrl: response.HtmlUrl,
|
||||
TimeReleased: parseRFC3339Time(response.PublishedAt),
|
||||
}, nil
|
||||
}
|
||||
@@ -1,148 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"html/template"
|
||||
"io"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
|
||||
"github.com/glanceapp/glance/internal/assets"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
func FetchAndParseCustomAPI(req *http.Request, tmpl *template.Template) (template.HTML, error) {
|
||||
emptyBody := template.HTML("")
|
||||
|
||||
resp, err := defaultClient.Do(req)
|
||||
if err != nil {
|
||||
return emptyBody, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
bodyBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return emptyBody, err
|
||||
}
|
||||
|
||||
body := string(bodyBytes)
|
||||
|
||||
if !gjson.Valid(body) {
|
||||
truncatedBody, isTruncated := limitStringLength(body, 100)
|
||||
if isTruncated {
|
||||
truncatedBody += "... <truncated>"
|
||||
}
|
||||
|
||||
slog.Error("invalid response JSON in custom API widget", "URL", req.URL.String(), "body", truncatedBody)
|
||||
return emptyBody, errors.New("invalid response JSON")
|
||||
}
|
||||
|
||||
var templateBuffer bytes.Buffer
|
||||
|
||||
data := CustomAPITemplateData{
|
||||
JSON: DecoratedGJSONResult{gjson.Parse(body)},
|
||||
Response: resp,
|
||||
}
|
||||
|
||||
err = tmpl.Execute(&templateBuffer, &data)
|
||||
if err != nil {
|
||||
return emptyBody, err
|
||||
}
|
||||
|
||||
return template.HTML(templateBuffer.String()), nil
|
||||
}
|
||||
|
||||
type DecoratedGJSONResult struct {
|
||||
gjson.Result
|
||||
}
|
||||
|
||||
type CustomAPITemplateData struct {
|
||||
JSON DecoratedGJSONResult
|
||||
Response *http.Response
|
||||
}
|
||||
|
||||
func GJsonResultArrayToDecoratedResultArray(results []gjson.Result) []DecoratedGJSONResult {
|
||||
decoratedResults := make([]DecoratedGJSONResult, len(results))
|
||||
|
||||
for i, result := range results {
|
||||
decoratedResults[i] = DecoratedGJSONResult{result}
|
||||
}
|
||||
|
||||
return decoratedResults
|
||||
}
|
||||
|
||||
func (r *DecoratedGJSONResult) Array(key string) []DecoratedGJSONResult {
|
||||
if key == "" {
|
||||
return GJsonResultArrayToDecoratedResultArray(r.Result.Array())
|
||||
}
|
||||
|
||||
return GJsonResultArrayToDecoratedResultArray(r.Get(key).Array())
|
||||
}
|
||||
|
||||
func (r *DecoratedGJSONResult) String(key string) string {
|
||||
if key == "" {
|
||||
return r.Result.String()
|
||||
}
|
||||
|
||||
return r.Get(key).String()
|
||||
}
|
||||
|
||||
func (r *DecoratedGJSONResult) Int(key string) int64 {
|
||||
if key == "" {
|
||||
return r.Result.Int()
|
||||
}
|
||||
|
||||
return r.Get(key).Int()
|
||||
}
|
||||
|
||||
func (r *DecoratedGJSONResult) Float(key string) float64 {
|
||||
if key == "" {
|
||||
return r.Result.Float()
|
||||
}
|
||||
|
||||
return r.Get(key).Float()
|
||||
}
|
||||
|
||||
func (r *DecoratedGJSONResult) Bool(key string) bool {
|
||||
if key == "" {
|
||||
return r.Result.Bool()
|
||||
}
|
||||
|
||||
return r.Get(key).Bool()
|
||||
}
|
||||
|
||||
var CustomAPITemplateFuncs = func() template.FuncMap {
|
||||
funcs := template.FuncMap{
|
||||
"toFloat": func(a int64) float64 {
|
||||
return float64(a)
|
||||
},
|
||||
"toInt": func(a float64) int64 {
|
||||
return int64(a)
|
||||
},
|
||||
"mathexpr": func(left float64, op string, right float64) float64 {
|
||||
if right == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
switch op {
|
||||
case "+":
|
||||
return left + right
|
||||
case "-":
|
||||
return left - right
|
||||
case "*":
|
||||
return left * right
|
||||
case "/":
|
||||
return left / right
|
||||
default:
|
||||
return 0
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
for key, value := range assets.GlobalTemplateFunctions {
|
||||
funcs[key] = value
|
||||
}
|
||||
|
||||
return funcs
|
||||
}()
|
||||
@@ -1,102 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type dockerHubRepositoryTagsResponse struct {
|
||||
Results []dockerHubRepositoryTagResponse `json:"results"`
|
||||
}
|
||||
|
||||
type dockerHubRepositoryTagResponse struct {
|
||||
Name string `json:"name"`
|
||||
LastPushed string `json:"tag_last_pushed"`
|
||||
}
|
||||
|
||||
const dockerHubOfficialRepoTagURLFormat = "https://hub.docker.com/_/%s/tags?name=%s"
|
||||
const dockerHubRepoTagURLFormat = "https://hub.docker.com/r/%s/tags?name=%s"
|
||||
const dockerHubTagsURLFormat = "https://hub.docker.com/v2/namespaces/%s/repositories/%s/tags"
|
||||
const dockerHubSpecificTagURLFormat = "https://hub.docker.com/v2/namespaces/%s/repositories/%s/tags/%s"
|
||||
|
||||
func fetchLatestDockerHubRelease(request *ReleaseRequest) (*AppRelease, error) {
|
||||
|
||||
nameParts := strings.Split(request.Repository, "/")
|
||||
|
||||
if len(nameParts) > 2 {
|
||||
return nil, fmt.Errorf("invalid repository name: %s", request.Repository)
|
||||
} else if len(nameParts) == 1 {
|
||||
nameParts = []string{"library", nameParts[0]}
|
||||
}
|
||||
|
||||
tagParts := strings.SplitN(nameParts[1], ":", 2)
|
||||
|
||||
var requestURL string
|
||||
|
||||
if len(tagParts) == 2 {
|
||||
requestURL = fmt.Sprintf(dockerHubSpecificTagURLFormat, nameParts[0], tagParts[0], tagParts[1])
|
||||
} else {
|
||||
requestURL = fmt.Sprintf(dockerHubTagsURLFormat, nameParts[0], nameParts[1])
|
||||
}
|
||||
|
||||
httpRequest, err := http.NewRequest("GET", requestURL, nil)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if request.Token != nil {
|
||||
httpRequest.Header.Add("Authorization", "Bearer "+(*request.Token))
|
||||
}
|
||||
|
||||
var tag *dockerHubRepositoryTagResponse
|
||||
|
||||
if len(tagParts) == 1 {
|
||||
response, err := decodeJsonFromRequest[dockerHubRepositoryTagsResponse](defaultClient, httpRequest)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(response.Results) == 0 {
|
||||
return nil, fmt.Errorf("no tags found for repository: %s", request.Repository)
|
||||
}
|
||||
|
||||
tag = &response.Results[0]
|
||||
} else {
|
||||
response, err := decodeJsonFromRequest[dockerHubRepositoryTagResponse](defaultClient, httpRequest)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tag = &response
|
||||
}
|
||||
|
||||
var repo string
|
||||
var displayName string
|
||||
var notesURL string
|
||||
|
||||
if len(tagParts) == 1 {
|
||||
repo = nameParts[1]
|
||||
} else {
|
||||
repo = tagParts[0]
|
||||
}
|
||||
|
||||
if nameParts[0] == "library" {
|
||||
displayName = repo
|
||||
notesURL = fmt.Sprintf(dockerHubOfficialRepoTagURLFormat, repo, tag.Name)
|
||||
} else {
|
||||
displayName = nameParts[0] + "/" + repo
|
||||
notesURL = fmt.Sprintf(dockerHubRepoTagURLFormat, displayName, tag.Name)
|
||||
}
|
||||
|
||||
return &AppRelease{
|
||||
Source: ReleaseSourceDockerHub,
|
||||
NotesUrl: notesURL,
|
||||
Name: displayName,
|
||||
Version: tag.Name,
|
||||
TimeReleased: parseRFC3339Time(tag.LastPushed),
|
||||
}, nil
|
||||
}
|
||||
@@ -1,102 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html"
|
||||
"html/template"
|
||||
"io"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
type ExtensionType int
|
||||
|
||||
const (
|
||||
ExtensionContentHTML ExtensionType = iota
|
||||
ExtensionContentUnknown = iota
|
||||
)
|
||||
|
||||
var ExtensionStringToType = map[string]ExtensionType{
|
||||
"html": ExtensionContentHTML,
|
||||
}
|
||||
|
||||
const (
|
||||
ExtensionHeaderTitle = "Widget-Title"
|
||||
ExtensionHeaderContentType = "Widget-Content-Type"
|
||||
)
|
||||
|
||||
type ExtensionRequestOptions struct {
|
||||
URL string `yaml:"url"`
|
||||
FallbackContentType string `yaml:"fallback-content-type"`
|
||||
Parameters map[string]string `yaml:"parameters"`
|
||||
AllowHtml bool `yaml:"allow-potentially-dangerous-html"`
|
||||
}
|
||||
|
||||
type Extension struct {
|
||||
Title string
|
||||
Content template.HTML
|
||||
}
|
||||
|
||||
func convertExtensionContent(options ExtensionRequestOptions, content []byte, contentType ExtensionType) template.HTML {
|
||||
switch contentType {
|
||||
case ExtensionContentHTML:
|
||||
if options.AllowHtml {
|
||||
return template.HTML(content)
|
||||
}
|
||||
|
||||
fallthrough
|
||||
default:
|
||||
return template.HTML(html.EscapeString(string(content)))
|
||||
}
|
||||
}
|
||||
|
||||
func FetchExtension(options ExtensionRequestOptions) (Extension, error) {
|
||||
request, _ := http.NewRequest("GET", options.URL, nil)
|
||||
|
||||
query := url.Values{}
|
||||
|
||||
for key, value := range options.Parameters {
|
||||
query.Set(key, value)
|
||||
}
|
||||
|
||||
request.URL.RawQuery = query.Encode()
|
||||
|
||||
response, err := http.DefaultClient.Do(request)
|
||||
|
||||
if err != nil {
|
||||
slog.Error("failed fetching extension", "error", err, "url", options.URL)
|
||||
return Extension{}, fmt.Errorf("%w: request failed: %w", ErrNoContent, err)
|
||||
}
|
||||
|
||||
defer response.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(response.Body)
|
||||
|
||||
if err != nil {
|
||||
slog.Error("failed reading response body of extension", "error", err, "url", options.URL)
|
||||
return Extension{}, fmt.Errorf("%w: could not read body: %w", ErrNoContent, err)
|
||||
}
|
||||
|
||||
extension := Extension{}
|
||||
|
||||
if response.Header.Get(ExtensionHeaderTitle) == "" {
|
||||
extension.Title = "Extension"
|
||||
} else {
|
||||
extension.Title = response.Header.Get(ExtensionHeaderTitle)
|
||||
}
|
||||
|
||||
contentType, ok := ExtensionStringToType[response.Header.Get(ExtensionHeaderContentType)]
|
||||
|
||||
if !ok {
|
||||
contentType, ok = ExtensionStringToType[options.FallbackContentType]
|
||||
|
||||
if !ok {
|
||||
contentType = ExtensionContentUnknown
|
||||
}
|
||||
}
|
||||
|
||||
extension.Content = convertExtensionContent(options, body, contentType)
|
||||
|
||||
return extension, nil
|
||||
}
|
||||
@@ -1,226 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type githubReleaseLatestResponseJson struct {
|
||||
TagName string `json:"tag_name"`
|
||||
PublishedAt string `json:"published_at"`
|
||||
HtmlUrl string `json:"html_url"`
|
||||
Reactions struct {
|
||||
Downvotes int `json:"-1"`
|
||||
} `json:"reactions"`
|
||||
}
|
||||
|
||||
func fetchLatestGithubRelease(request *ReleaseRequest) (*AppRelease, error) {
|
||||
httpRequest, err := http.NewRequest(
|
||||
"GET",
|
||||
fmt.Sprintf("https://api.github.com/repos/%s/releases/latest", request.Repository),
|
||||
nil,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if request.Token != nil {
|
||||
httpRequest.Header.Add("Authorization", "Bearer "+(*request.Token))
|
||||
}
|
||||
|
||||
response, err := decodeJsonFromRequest[githubReleaseLatestResponseJson](defaultClient, httpRequest)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &AppRelease{
|
||||
Source: ReleaseSourceGithub,
|
||||
Name: request.Repository,
|
||||
Version: normalizeVersionFormat(response.TagName),
|
||||
NotesUrl: response.HtmlUrl,
|
||||
TimeReleased: parseRFC3339Time(response.PublishedAt),
|
||||
Downvotes: response.Reactions.Downvotes,
|
||||
}, nil
|
||||
}
|
||||
|
||||
type GithubTicket struct {
|
||||
Number int
|
||||
CreatedAt time.Time
|
||||
Title string
|
||||
}
|
||||
|
||||
type RepositoryDetails struct {
|
||||
Name string
|
||||
Stars int
|
||||
Forks int
|
||||
OpenPullRequests int
|
||||
PullRequests []GithubTicket
|
||||
OpenIssues int
|
||||
Issues []GithubTicket
|
||||
LastCommits int
|
||||
Commits []CommitDetails
|
||||
}
|
||||
|
||||
type githubRepositoryDetailsResponseJson struct {
|
||||
Name string `json:"full_name"`
|
||||
Stars int `json:"stargazers_count"`
|
||||
Forks int `json:"forks_count"`
|
||||
}
|
||||
|
||||
type githubTicketResponseJson struct {
|
||||
Count int `json:"total_count"`
|
||||
Tickets []struct {
|
||||
Number int `json:"number"`
|
||||
CreatedAt string `json:"created_at"`
|
||||
Title string `json:"title"`
|
||||
} `json:"items"`
|
||||
}
|
||||
|
||||
type CommitDetails struct {
|
||||
Sha string
|
||||
Author string
|
||||
CreatedAt time.Time
|
||||
Message string
|
||||
}
|
||||
|
||||
type gitHubCommitResponseJson struct {
|
||||
Sha string `json:"sha"`
|
||||
Commit struct {
|
||||
Author struct {
|
||||
Name string `json:"name"`
|
||||
Date string `json:"date"`
|
||||
} `json:"author"`
|
||||
Message string `json:"message"`
|
||||
} `json:"commit"`
|
||||
}
|
||||
|
||||
func FetchRepositoryDetailsFromGithub(repository string, token string, maxPRs int, maxIssues int, maxCommits int) (RepositoryDetails, error) {
|
||||
repositoryRequest, err := http.NewRequest("GET", fmt.Sprintf("https://api.github.com/repos/%s", repository), nil)
|
||||
if err != nil {
|
||||
return RepositoryDetails{}, fmt.Errorf("%w: could not create request with repository: %v", ErrNoContent, err)
|
||||
}
|
||||
|
||||
PRsRequest, _ := http.NewRequest("GET", fmt.Sprintf("https://api.github.com/search/issues?q=is:pr+is:open+repo:%s&per_page=%d", repository, maxPRs), nil)
|
||||
issuesRequest, _ := http.NewRequest("GET", fmt.Sprintf("https://api.github.com/search/issues?q=is:issue+is:open+repo:%s&per_page=%d", repository, maxIssues), nil)
|
||||
CommitsRequest, _ := http.NewRequest("GET", fmt.Sprintf("https://api.github.com/repos/%s/commits?per_page=%d", repository, maxCommits), nil)
|
||||
|
||||
if token != "" {
|
||||
token = fmt.Sprintf("Bearer %s", token)
|
||||
repositoryRequest.Header.Add("Authorization", token)
|
||||
PRsRequest.Header.Add("Authorization", token)
|
||||
issuesRequest.Header.Add("Authorization", token)
|
||||
CommitsRequest.Header.Add("Authorization", token)
|
||||
}
|
||||
|
||||
var detailsResponse githubRepositoryDetailsResponseJson
|
||||
var detailsErr error
|
||||
var PRsResponse githubTicketResponseJson
|
||||
var PRsErr error
|
||||
var issuesResponse githubTicketResponseJson
|
||||
var issuesErr error
|
||||
var commitsResponse []gitHubCommitResponseJson
|
||||
var CommitsErr error
|
||||
var wg sync.WaitGroup
|
||||
|
||||
wg.Add(1)
|
||||
go (func() {
|
||||
defer wg.Done()
|
||||
detailsResponse, detailsErr = decodeJsonFromRequest[githubRepositoryDetailsResponseJson](defaultClient, repositoryRequest)
|
||||
})()
|
||||
|
||||
if maxPRs > 0 {
|
||||
wg.Add(1)
|
||||
go (func() {
|
||||
defer wg.Done()
|
||||
PRsResponse, PRsErr = decodeJsonFromRequest[githubTicketResponseJson](defaultClient, PRsRequest)
|
||||
})()
|
||||
}
|
||||
|
||||
if maxIssues > 0 {
|
||||
wg.Add(1)
|
||||
go (func() {
|
||||
defer wg.Done()
|
||||
issuesResponse, issuesErr = decodeJsonFromRequest[githubTicketResponseJson](defaultClient, issuesRequest)
|
||||
})()
|
||||
}
|
||||
|
||||
if maxCommits > 0 {
|
||||
wg.Add(1)
|
||||
go (func() {
|
||||
defer wg.Done()
|
||||
commitsResponse, CommitsErr = decodeJsonFromRequest[[]gitHubCommitResponseJson](defaultClient, CommitsRequest)
|
||||
})()
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
|
||||
if detailsErr != nil {
|
||||
return RepositoryDetails{}, fmt.Errorf("%w: could not get repository details: %s", ErrNoContent, detailsErr)
|
||||
}
|
||||
|
||||
details := RepositoryDetails{
|
||||
Name: detailsResponse.Name,
|
||||
Stars: detailsResponse.Stars,
|
||||
Forks: detailsResponse.Forks,
|
||||
PullRequests: make([]GithubTicket, 0, len(PRsResponse.Tickets)),
|
||||
Issues: make([]GithubTicket, 0, len(issuesResponse.Tickets)),
|
||||
Commits: make([]CommitDetails, 0, len(commitsResponse)),
|
||||
}
|
||||
|
||||
err = nil
|
||||
|
||||
if maxPRs > 0 {
|
||||
if PRsErr != nil {
|
||||
err = fmt.Errorf("%w: could not get PRs: %s", ErrPartialContent, PRsErr)
|
||||
} else {
|
||||
details.OpenPullRequests = PRsResponse.Count
|
||||
|
||||
for i := range PRsResponse.Tickets {
|
||||
details.PullRequests = append(details.PullRequests, GithubTicket{
|
||||
Number: PRsResponse.Tickets[i].Number,
|
||||
CreatedAt: parseRFC3339Time(PRsResponse.Tickets[i].CreatedAt),
|
||||
Title: PRsResponse.Tickets[i].Title,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if maxIssues > 0 {
|
||||
if issuesErr != nil {
|
||||
// TODO: fix, overwriting the previous error
|
||||
err = fmt.Errorf("%w: could not get issues: %s", ErrPartialContent, issuesErr)
|
||||
} else {
|
||||
details.OpenIssues = issuesResponse.Count
|
||||
|
||||
for i := range issuesResponse.Tickets {
|
||||
details.Issues = append(details.Issues, GithubTicket{
|
||||
Number: issuesResponse.Tickets[i].Number,
|
||||
CreatedAt: parseRFC3339Time(issuesResponse.Tickets[i].CreatedAt),
|
||||
Title: issuesResponse.Tickets[i].Title,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if maxCommits > 0 {
|
||||
if CommitsErr != nil {
|
||||
err = fmt.Errorf("%w: could not get issues: %s", ErrPartialContent, CommitsErr)
|
||||
} else {
|
||||
for i := range commitsResponse {
|
||||
details.Commits = append(details.Commits, CommitDetails{
|
||||
Sha: commitsResponse[i].Sha,
|
||||
Author: commitsResponse[i].Commit.Author.Name,
|
||||
CreatedAt: parseRFC3339Time(commitsResponse[i].Commit.Author.Date),
|
||||
Message: strings.SplitN(commitsResponse[i].Commit.Message, "\n\n", 2)[0],
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return details, err
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
type gitlabReleaseResponseJson struct {
|
||||
TagName string `json:"tag_name"`
|
||||
ReleasedAt string `json:"released_at"`
|
||||
Links struct {
|
||||
Self string `json:"self"`
|
||||
} `json:"_links"`
|
||||
}
|
||||
|
||||
func fetchLatestGitLabRelease(request *ReleaseRequest) (*AppRelease, error) {
|
||||
httpRequest, err := http.NewRequest(
|
||||
"GET",
|
||||
fmt.Sprintf(
|
||||
"https://gitlab.com/api/v4/projects/%s/releases/permalink/latest",
|
||||
url.QueryEscape(request.Repository),
|
||||
),
|
||||
nil,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if request.Token != nil {
|
||||
httpRequest.Header.Add("PRIVATE-TOKEN", *request.Token)
|
||||
}
|
||||
|
||||
response, err := decodeJsonFromRequest[gitlabReleaseResponseJson](defaultClient, httpRequest)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &AppRelease{
|
||||
Source: ReleaseSourceGitlab,
|
||||
Name: request.Repository,
|
||||
Version: normalizeVersionFormat(response.TagName),
|
||||
NotesUrl: response.Links.Self,
|
||||
TimeReleased: parseRFC3339Time(response.ReleasedAt),
|
||||
}, nil
|
||||
}
|
||||
@@ -1,98 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type hackerNewsPostResponseJson struct {
|
||||
Id int `json:"id"`
|
||||
Score int `json:"score"`
|
||||
Title string `json:"title"`
|
||||
TargetUrl string `json:"url,omitempty"`
|
||||
CommentCount int `json:"descendants"`
|
||||
TimePosted int64 `json:"time"`
|
||||
}
|
||||
|
||||
func getHackerNewsPostIds(sort string) ([]int, error) {
|
||||
request, _ := http.NewRequest("GET", fmt.Sprintf("https://hacker-news.firebaseio.com/v0/%sstories.json", sort), nil)
|
||||
response, err := decodeJsonFromRequest[[]int](defaultClient, request)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: could not fetch list of post IDs", ErrNoContent)
|
||||
}
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
func getHackerNewsPostsFromIds(postIds []int, commentsUrlTemplate string) (ForumPosts, error) {
|
||||
requests := make([]*http.Request, len(postIds))
|
||||
|
||||
for i, id := range postIds {
|
||||
request, _ := http.NewRequest("GET", fmt.Sprintf("https://hacker-news.firebaseio.com/v0/item/%d.json", id), nil)
|
||||
requests[i] = request
|
||||
}
|
||||
|
||||
task := decodeJsonFromRequestTask[hackerNewsPostResponseJson](defaultClient)
|
||||
job := newJob(task, requests).withWorkers(30)
|
||||
results, errs, err := workerPoolDo(job)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
posts := make(ForumPosts, 0, len(postIds))
|
||||
|
||||
for i := range results {
|
||||
if errs[i] != nil {
|
||||
slog.Error("Failed to fetch or parse hacker news post", "error", errs[i], "url", requests[i].URL)
|
||||
continue
|
||||
}
|
||||
|
||||
var commentsUrl string
|
||||
|
||||
if commentsUrlTemplate == "" {
|
||||
commentsUrl = "https://news.ycombinator.com/item?id=" + strconv.Itoa(results[i].Id)
|
||||
} else {
|
||||
commentsUrl = strings.ReplaceAll(commentsUrlTemplate, "{POST-ID}", strconv.Itoa(results[i].Id))
|
||||
}
|
||||
|
||||
posts = append(posts, ForumPost{
|
||||
Title: results[i].Title,
|
||||
DiscussionUrl: commentsUrl,
|
||||
TargetUrl: results[i].TargetUrl,
|
||||
TargetUrlDomain: extractDomainFromUrl(results[i].TargetUrl),
|
||||
CommentCount: results[i].CommentCount,
|
||||
Score: results[i].Score,
|
||||
TimePosted: time.Unix(results[i].TimePosted, 0),
|
||||
})
|
||||
}
|
||||
|
||||
if len(posts) == 0 {
|
||||
return nil, ErrNoContent
|
||||
}
|
||||
|
||||
if len(posts) != len(postIds) {
|
||||
return posts, fmt.Errorf("%w could not fetch some hacker news posts", ErrPartialContent)
|
||||
}
|
||||
|
||||
return posts, nil
|
||||
}
|
||||
|
||||
func FetchHackerNewsPosts(sort string, limit int, commentsUrlTemplate string) (ForumPosts, error) {
|
||||
postIds, err := getHackerNewsPostIds(sort)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(postIds) > limit {
|
||||
postIds = postIds[:limit]
|
||||
}
|
||||
|
||||
return getHackerNewsPostsFromIds(postIds, commentsUrlTemplate)
|
||||
}
|
||||
@@ -1,91 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type lobstersPostResponseJson struct {
|
||||
CreatedAt string `json:"created_at"`
|
||||
Title string `json:"title"`
|
||||
URL string `json:"url"`
|
||||
Score int `json:"score"`
|
||||
CommentCount int `json:"comment_count"`
|
||||
CommentsURL string `json:"comments_url"`
|
||||
Tags []string `json:"tags"`
|
||||
}
|
||||
|
||||
type lobstersFeedResponseJson []lobstersPostResponseJson
|
||||
|
||||
func getLobstersPostsFromFeed(feedUrl string) (ForumPosts, error) {
|
||||
request, err := http.NewRequest("GET", feedUrl, nil)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
feed, err := decodeJsonFromRequest[lobstersFeedResponseJson](defaultClient, request)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
posts := make(ForumPosts, 0, len(feed))
|
||||
|
||||
for i := range feed {
|
||||
createdAt, _ := time.Parse(time.RFC3339, feed[i].CreatedAt)
|
||||
|
||||
posts = append(posts, ForumPost{
|
||||
Title: feed[i].Title,
|
||||
DiscussionUrl: feed[i].CommentsURL,
|
||||
TargetUrl: feed[i].URL,
|
||||
TargetUrlDomain: extractDomainFromUrl(feed[i].URL),
|
||||
CommentCount: feed[i].CommentCount,
|
||||
Score: feed[i].Score,
|
||||
TimePosted: createdAt,
|
||||
Tags: feed[i].Tags,
|
||||
})
|
||||
}
|
||||
|
||||
if len(posts) == 0 {
|
||||
return nil, ErrNoContent
|
||||
}
|
||||
|
||||
return posts, nil
|
||||
}
|
||||
|
||||
func FetchLobstersPosts(customURL string, instanceURL string, sortBy string, tags []string) (ForumPosts, error) {
|
||||
var feedUrl string
|
||||
|
||||
if customURL != "" {
|
||||
feedUrl = customURL
|
||||
} else {
|
||||
if instanceURL != "" {
|
||||
instanceURL = strings.TrimRight(instanceURL, "/") + "/"
|
||||
} else {
|
||||
instanceURL = "https://lobste.rs/"
|
||||
}
|
||||
|
||||
if sortBy == "hot" {
|
||||
sortBy = "hottest"
|
||||
} else if sortBy == "new" {
|
||||
sortBy = "newest"
|
||||
}
|
||||
|
||||
if len(tags) == 0 {
|
||||
feedUrl = instanceURL + sortBy + ".json"
|
||||
} else {
|
||||
tags := strings.Join(tags, ",")
|
||||
feedUrl = instanceURL + "t/" + tags + ".json"
|
||||
}
|
||||
}
|
||||
|
||||
posts, err := getLobstersPostsFromFeed(feedUrl)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return posts, nil
|
||||
}
|
||||
@@ -1,77 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
type SiteStatusRequest struct {
|
||||
URL string `yaml:"url"`
|
||||
CheckURL string `yaml:"check-url"`
|
||||
AllowInsecure bool `yaml:"allow-insecure"`
|
||||
}
|
||||
|
||||
type SiteStatus struct {
|
||||
Code int
|
||||
TimedOut bool
|
||||
ResponseTime time.Duration
|
||||
Error error
|
||||
}
|
||||
|
||||
func getSiteStatusTask(statusRequest *SiteStatusRequest) (SiteStatus, error) {
|
||||
var url string
|
||||
if statusRequest.CheckURL != "" {
|
||||
url = statusRequest.CheckURL
|
||||
} else {
|
||||
url = statusRequest.URL
|
||||
}
|
||||
request, err := http.NewRequest(http.MethodGet, url, nil)
|
||||
|
||||
if err != nil {
|
||||
return SiteStatus{
|
||||
Error: err,
|
||||
}, nil
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Second*3)
|
||||
defer cancel()
|
||||
request = request.WithContext(ctx)
|
||||
requestSentAt := time.Now()
|
||||
var response *http.Response
|
||||
|
||||
if !statusRequest.AllowInsecure {
|
||||
response, err = defaultClient.Do(request)
|
||||
} else {
|
||||
response, err = defaultInsecureClient.Do(request)
|
||||
}
|
||||
|
||||
status := SiteStatus{ResponseTime: time.Since(requestSentAt)}
|
||||
|
||||
if err != nil {
|
||||
if errors.Is(err, context.DeadlineExceeded) {
|
||||
status.TimedOut = true
|
||||
}
|
||||
|
||||
status.Error = err
|
||||
return status, nil
|
||||
}
|
||||
|
||||
defer response.Body.Close()
|
||||
|
||||
status.Code = response.StatusCode
|
||||
|
||||
return status, nil
|
||||
}
|
||||
|
||||
func FetchStatusForSites(requests []*SiteStatusRequest) ([]SiteStatus, error) {
|
||||
job := newJob(getSiteStatusTask, requests).withWorkers(20)
|
||||
results, _, err := workerPoolDo(job)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
@@ -1,217 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
_ "time/tzdata"
|
||||
)
|
||||
|
||||
type PlacesResponseJson struct {
|
||||
Results []PlaceJson
|
||||
}
|
||||
|
||||
type PlaceJson struct {
|
||||
Name string
|
||||
Area string `json:"admin1"`
|
||||
Latitude float64
|
||||
Longitude float64
|
||||
Timezone string
|
||||
Country string
|
||||
location *time.Location
|
||||
}
|
||||
|
||||
type WeatherResponseJson struct {
|
||||
Daily struct {
|
||||
Sunrise []int64 `json:"sunrise"`
|
||||
Sunset []int64 `json:"sunset"`
|
||||
} `json:"daily"`
|
||||
|
||||
Hourly struct {
|
||||
Temperature []float64 `json:"temperature_2m"`
|
||||
PrecipitationProbability []int `json:"precipitation_probability"`
|
||||
} `json:"hourly"`
|
||||
|
||||
Current struct {
|
||||
Temperature float64 `json:"temperature_2m"`
|
||||
ApparentTemperature float64 `json:"apparent_temperature"`
|
||||
WeatherCode int `json:"weather_code"`
|
||||
} `json:"current"`
|
||||
}
|
||||
|
||||
type weatherColumn struct {
|
||||
Temperature int
|
||||
Scale float64
|
||||
HasPrecipitation bool
|
||||
}
|
||||
|
||||
var commonCountryAbbreviations = map[string]string{
|
||||
"US": "United States",
|
||||
"USA": "United States",
|
||||
"UK": "United Kingdom",
|
||||
}
|
||||
|
||||
func expandCountryAbbreviations(name string) string {
|
||||
if expanded, ok := commonCountryAbbreviations[strings.TrimSpace(name)]; ok {
|
||||
return expanded
|
||||
}
|
||||
|
||||
return name
|
||||
}
|
||||
|
||||
// Separates the location that Open Meteo accepts from the administrative area
|
||||
// which can then be used to filter to the correct place after the list of places
|
||||
// has been retrieved. Also expands abbreviations since Open Meteo does not accept
|
||||
// country names like "US", "USA" and "UK"
|
||||
func parsePlaceName(name string) (string, string) {
|
||||
parts := strings.Split(name, ",")
|
||||
|
||||
if len(parts) == 1 {
|
||||
return name, ""
|
||||
}
|
||||
|
||||
if len(parts) == 2 {
|
||||
return parts[0] + ", " + expandCountryAbbreviations(parts[1]), ""
|
||||
}
|
||||
|
||||
return parts[0] + ", " + expandCountryAbbreviations(parts[2]), strings.TrimSpace(parts[1])
|
||||
}
|
||||
|
||||
func FetchPlaceFromName(location string) (*PlaceJson, error) {
|
||||
location, area := parsePlaceName(location)
|
||||
requestUrl := fmt.Sprintf("https://geocoding-api.open-meteo.com/v1/search?name=%s&count=10&language=en&format=json", url.QueryEscape(location))
|
||||
request, _ := http.NewRequest("GET", requestUrl, nil)
|
||||
responseJson, err := decodeJsonFromRequest[PlacesResponseJson](defaultClient, request)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not fetch places data: %v", err)
|
||||
}
|
||||
|
||||
if len(responseJson.Results) == 0 {
|
||||
return nil, fmt.Errorf("no places found for %s", location)
|
||||
}
|
||||
|
||||
var place *PlaceJson
|
||||
|
||||
if area != "" {
|
||||
area = strings.ToLower(area)
|
||||
|
||||
for i := range responseJson.Results {
|
||||
if strings.ToLower(responseJson.Results[i].Area) == area {
|
||||
place = &responseJson.Results[i]
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if place == nil {
|
||||
return nil, fmt.Errorf("no place found for %s in %s", location, area)
|
||||
}
|
||||
} else {
|
||||
place = &responseJson.Results[0]
|
||||
}
|
||||
|
||||
loc, err := time.LoadLocation(place.Timezone)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not load location: %v", err)
|
||||
}
|
||||
|
||||
place.location = loc
|
||||
|
||||
return place, nil
|
||||
}
|
||||
|
||||
func barIndexFromHour(h int) int {
|
||||
return h / 2
|
||||
}
|
||||
|
||||
// TODO: bunch of spaget, refactor
|
||||
func FetchWeatherForPlace(place *PlaceJson, units string) (*Weather, error) {
|
||||
query := url.Values{}
|
||||
var temperatureUnit string
|
||||
|
||||
if units == "imperial" {
|
||||
temperatureUnit = "fahrenheit"
|
||||
} else {
|
||||
temperatureUnit = "celsius"
|
||||
}
|
||||
|
||||
query.Add("latitude", fmt.Sprintf("%f", place.Latitude))
|
||||
query.Add("longitude", fmt.Sprintf("%f", place.Longitude))
|
||||
query.Add("timeformat", "unixtime")
|
||||
query.Add("timezone", place.Timezone)
|
||||
query.Add("forecast_days", "1")
|
||||
query.Add("current", "temperature_2m,apparent_temperature,weather_code")
|
||||
query.Add("hourly", "temperature_2m,precipitation_probability")
|
||||
query.Add("daily", "sunrise,sunset")
|
||||
query.Add("temperature_unit", temperatureUnit)
|
||||
|
||||
requestUrl := "https://api.open-meteo.com/v1/forecast?" + query.Encode()
|
||||
request, _ := http.NewRequest("GET", requestUrl, nil)
|
||||
responseJson, err := decodeJsonFromRequest[WeatherResponseJson](defaultClient, request)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: %v", ErrNoContent, err)
|
||||
}
|
||||
|
||||
now := time.Now().In(place.location)
|
||||
bars := make([]weatherColumn, 0, 24)
|
||||
currentBar := barIndexFromHour(now.Hour())
|
||||
sunriseBar := barIndexFromHour(time.Unix(int64(responseJson.Daily.Sunrise[0]), 0).In(place.location).Hour())
|
||||
sunsetBar := barIndexFromHour(time.Unix(int64(responseJson.Daily.Sunset[0]), 0).In(place.location).Hour()) - 1
|
||||
|
||||
if sunsetBar < 0 {
|
||||
sunsetBar = 0
|
||||
}
|
||||
|
||||
if len(responseJson.Hourly.Temperature) == 24 {
|
||||
temperatures := make([]int, 12)
|
||||
precipitations := make([]bool, 12)
|
||||
|
||||
t := responseJson.Hourly.Temperature
|
||||
p := responseJson.Hourly.PrecipitationProbability
|
||||
|
||||
for i := 0; i < 24; i += 2 {
|
||||
if i/2 == currentBar {
|
||||
temperatures[i/2] = int(responseJson.Current.Temperature)
|
||||
} else {
|
||||
temperatures[i/2] = int(math.Round((t[i] + t[i+1]) / 2))
|
||||
}
|
||||
|
||||
precipitations[i/2] = (p[i]+p[i+1])/2 > 75
|
||||
}
|
||||
|
||||
minT := slices.Min(temperatures)
|
||||
maxT := slices.Max(temperatures)
|
||||
|
||||
temperaturesRange := float64(maxT - minT)
|
||||
|
||||
for i := 0; i < 12; i++ {
|
||||
bars = append(bars, weatherColumn{
|
||||
Temperature: temperatures[i],
|
||||
HasPrecipitation: precipitations[i],
|
||||
})
|
||||
|
||||
if temperaturesRange > 0 {
|
||||
bars[i].Scale = float64(temperatures[i]-minT) / temperaturesRange
|
||||
} else {
|
||||
bars[i].Scale = 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &Weather{
|
||||
Temperature: int(responseJson.Current.Temperature),
|
||||
ApparentTemperature: int(responseJson.Current.ApparentTemperature),
|
||||
WeatherCode: responseJson.Current.WeatherCode,
|
||||
CurrentColumn: currentBar,
|
||||
SunriseColumn: sunriseBar,
|
||||
SunsetColumn: sunsetBar,
|
||||
Columns: bars,
|
||||
}, nil
|
||||
}
|
||||
@@ -1,136 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type piholeStatsResponse struct {
|
||||
TotalQueries int `json:"dns_queries_today"`
|
||||
QueriesSeries map[int64]int `json:"domains_over_time"`
|
||||
BlockedQueries int `json:"ads_blocked_today"`
|
||||
BlockedSeries map[int64]int `json:"ads_over_time"`
|
||||
BlockedPercentage float64 `json:"ads_percentage_today"`
|
||||
TopBlockedDomains piholeTopBlockedDomains `json:"top_ads"`
|
||||
DomainsBlocked int `json:"domains_being_blocked"`
|
||||
}
|
||||
|
||||
// If user has some level of privacy enabled on Pihole, `json:"top_ads"` is an empty array
|
||||
// Use custom unmarshal behavior to avoid not getting the rest of the valid data when unmarshalling
|
||||
type piholeTopBlockedDomains map[string]int
|
||||
|
||||
func (p *piholeTopBlockedDomains) UnmarshalJSON(data []byte) error {
|
||||
// NOTE: do not change to piholeTopBlockedDomains type here or it will cause a stack overflow
|
||||
// because of the UnmarshalJSON method getting called recursively
|
||||
temp := make(map[string]int)
|
||||
|
||||
err := json.Unmarshal(data, &temp)
|
||||
|
||||
if err != nil {
|
||||
*p = make(piholeTopBlockedDomains)
|
||||
} else {
|
||||
*p = temp
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func FetchPiholeStats(instanceURL, token string) (*DNSStats, error) {
|
||||
if token == "" {
|
||||
return nil, errors.New("missing API token")
|
||||
}
|
||||
|
||||
requestURL := strings.TrimRight(instanceURL, "/") +
|
||||
"/admin/api.php?summaryRaw&topItems&overTimeData10mins&auth=" + token
|
||||
|
||||
request, err := http.NewRequest("GET", requestURL, nil)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
responseJson, err := decodeJsonFromRequest[piholeStatsResponse](defaultClient, request)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
stats := &DNSStats{
|
||||
TotalQueries: responseJson.TotalQueries,
|
||||
BlockedQueries: responseJson.BlockedQueries,
|
||||
BlockedPercent: int(responseJson.BlockedPercentage),
|
||||
DomainsBlocked: responseJson.DomainsBlocked,
|
||||
}
|
||||
|
||||
if len(responseJson.TopBlockedDomains) > 0 {
|
||||
domains := make([]DNSStatsBlockedDomain, 0, len(responseJson.TopBlockedDomains))
|
||||
|
||||
for domain, count := range responseJson.TopBlockedDomains {
|
||||
domains = append(domains, DNSStatsBlockedDomain{
|
||||
Domain: domain,
|
||||
PercentBlocked: int(float64(count) / float64(responseJson.BlockedQueries) * 100),
|
||||
})
|
||||
}
|
||||
|
||||
sort.Slice(domains, func(a, b int) bool {
|
||||
return domains[a].PercentBlocked > domains[b].PercentBlocked
|
||||
})
|
||||
|
||||
stats.TopBlockedDomains = domains[:min(len(domains), 5)]
|
||||
}
|
||||
|
||||
// Pihole _should_ return data for the last 24 hours in a 10 minute interval, 6*24 = 144
|
||||
if len(responseJson.QueriesSeries) != 144 || len(responseJson.BlockedSeries) != 144 {
|
||||
slog.Warn(
|
||||
"DNS stats for pihole: did not get expected 144 data points",
|
||||
"len(queries)", len(responseJson.QueriesSeries),
|
||||
"len(blocked)", len(responseJson.BlockedSeries),
|
||||
)
|
||||
return stats, nil
|
||||
}
|
||||
|
||||
var lowestTimestamp int64 = 0
|
||||
|
||||
for timestamp := range responseJson.QueriesSeries {
|
||||
if lowestTimestamp == 0 || timestamp < lowestTimestamp {
|
||||
lowestTimestamp = timestamp
|
||||
}
|
||||
}
|
||||
|
||||
maxQueriesInSeries := 0
|
||||
|
||||
for i := 0; i < 8; i++ {
|
||||
queries := 0
|
||||
blocked := 0
|
||||
|
||||
for j := 0; j < 18; j++ {
|
||||
index := lowestTimestamp + int64(i*10800+j*600)
|
||||
|
||||
queries += responseJson.QueriesSeries[index]
|
||||
blocked += responseJson.BlockedSeries[index]
|
||||
}
|
||||
|
||||
if queries > maxQueriesInSeries {
|
||||
maxQueriesInSeries = queries
|
||||
}
|
||||
|
||||
stats.Series[i] = DNSStatsSeries{
|
||||
Queries: queries,
|
||||
Blocked: blocked,
|
||||
}
|
||||
|
||||
if queries > 0 {
|
||||
stats.Series[i].PercentBlocked = int(float64(blocked) / float64(queries) * 100)
|
||||
}
|
||||
}
|
||||
|
||||
for i := 0; i < 8; i++ {
|
||||
stats.Series[i].PercentTotal = int(float64(stats.Series[i].Queries) / float64(maxQueriesInSeries) * 100)
|
||||
}
|
||||
|
||||
return stats, nil
|
||||
}
|
||||
@@ -1,247 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"math"
|
||||
"sort"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ForumPost struct {
|
||||
Title string
|
||||
DiscussionUrl string
|
||||
TargetUrl string
|
||||
TargetUrlDomain string
|
||||
ThumbnailUrl string
|
||||
CommentCount int
|
||||
Score int
|
||||
Engagement float64
|
||||
TimePosted time.Time
|
||||
Tags []string
|
||||
IsCrosspost bool
|
||||
}
|
||||
|
||||
type ForumPosts []ForumPost
|
||||
|
||||
type Calendar struct {
|
||||
CurrentDay int
|
||||
CurrentWeekNumber int
|
||||
CurrentMonthName string
|
||||
CurrentYear int
|
||||
Days []int
|
||||
}
|
||||
|
||||
type Weather struct {
|
||||
Temperature int
|
||||
ApparentTemperature int
|
||||
WeatherCode int
|
||||
CurrentColumn int
|
||||
SunriseColumn int
|
||||
SunsetColumn int
|
||||
Columns []weatherColumn
|
||||
}
|
||||
|
||||
type AppRelease struct {
|
||||
Source ReleaseSource
|
||||
SourceIconURL string
|
||||
Name string
|
||||
Version string
|
||||
NotesUrl string
|
||||
TimeReleased time.Time
|
||||
Downvotes int
|
||||
}
|
||||
|
||||
type AppReleases []AppRelease
|
||||
|
||||
type Video struct {
|
||||
ThumbnailUrl string
|
||||
Title string
|
||||
Url string
|
||||
Author string
|
||||
AuthorUrl string
|
||||
TimePosted time.Time
|
||||
}
|
||||
|
||||
type Videos []Video
|
||||
|
||||
var currencyToSymbol = map[string]string{
|
||||
"USD": "$",
|
||||
"EUR": "€",
|
||||
"JPY": "¥",
|
||||
"CAD": "C$",
|
||||
"AUD": "A$",
|
||||
"GBP": "£",
|
||||
"CHF": "Fr",
|
||||
"NZD": "N$",
|
||||
"INR": "₹",
|
||||
"BRL": "R$",
|
||||
"RUB": "₽",
|
||||
"TRY": "₺",
|
||||
"ZAR": "R",
|
||||
"CNY": "¥",
|
||||
"KRW": "₩",
|
||||
"HKD": "HK$",
|
||||
"SGD": "S$",
|
||||
"SEK": "kr",
|
||||
"NOK": "kr",
|
||||
"DKK": "kr",
|
||||
"PLN": "zł",
|
||||
"PHP": "₱",
|
||||
}
|
||||
|
||||
type DNSStats struct {
|
||||
TotalQueries int
|
||||
BlockedQueries int
|
||||
BlockedPercent int
|
||||
ResponseTime int
|
||||
DomainsBlocked int
|
||||
Series [8]DNSStatsSeries
|
||||
TopBlockedDomains []DNSStatsBlockedDomain
|
||||
}
|
||||
|
||||
type DNSStatsSeries struct {
|
||||
Queries int
|
||||
Blocked int
|
||||
PercentTotal int
|
||||
PercentBlocked int
|
||||
}
|
||||
|
||||
type DNSStatsBlockedDomain struct {
|
||||
Domain string
|
||||
PercentBlocked int
|
||||
}
|
||||
|
||||
type MarketRequest struct {
|
||||
Name string `yaml:"name"`
|
||||
Symbol string `yaml:"symbol"`
|
||||
ChartLink string `yaml:"chart-link"`
|
||||
SymbolLink string `yaml:"symbol-link"`
|
||||
}
|
||||
|
||||
type Market struct {
|
||||
MarketRequest
|
||||
Currency string `yaml:"-"`
|
||||
Price float64 `yaml:"-"`
|
||||
PercentChange float64 `yaml:"-"`
|
||||
SvgChartPoints string `yaml:"-"`
|
||||
}
|
||||
|
||||
type Markets []Market
|
||||
|
||||
func (t Markets) SortByAbsChange() {
|
||||
sort.Slice(t, func(i, j int) bool {
|
||||
return math.Abs(t[i].PercentChange) > math.Abs(t[j].PercentChange)
|
||||
})
|
||||
}
|
||||
|
||||
func (t Markets) SortByChange() {
|
||||
sort.Slice(t, func(i, j int) bool {
|
||||
return t[i].PercentChange > t[j].PercentChange
|
||||
})
|
||||
}
|
||||
|
||||
var weatherCodeTable = map[int]string{
|
||||
0: "Clear Sky",
|
||||
1: "Mainly Clear",
|
||||
2: "Partly Cloudy",
|
||||
3: "Overcast",
|
||||
45: "Fog",
|
||||
48: "Rime Fog",
|
||||
51: "Drizzle",
|
||||
53: "Drizzle",
|
||||
55: "Drizzle",
|
||||
56: "Drizzle",
|
||||
57: "Drizzle",
|
||||
61: "Rain",
|
||||
63: "Moderate Rain",
|
||||
65: "Heavy Rain",
|
||||
66: "Freezing Rain",
|
||||
67: "Freezing Rain",
|
||||
71: "Snow",
|
||||
73: "Moderate Snow",
|
||||
75: "Heavy Snow",
|
||||
77: "Snow Grains",
|
||||
80: "Rain",
|
||||
81: "Moderate Rain",
|
||||
82: "Heavy Rain",
|
||||
85: "Snow",
|
||||
86: "Snow",
|
||||
95: "Thunderstorm",
|
||||
96: "Thunderstorm",
|
||||
99: "Thunderstorm",
|
||||
}
|
||||
|
||||
func (w *Weather) WeatherCodeAsString() string {
|
||||
if weatherCode, ok := weatherCodeTable[w.WeatherCode]; ok {
|
||||
return weatherCode
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
const depreciatePostsOlderThanHours = 7
|
||||
const maxDepreciation = 0.9
|
||||
const maxDepreciationAfterHours = 24
|
||||
|
||||
func (p ForumPosts) CalculateEngagement() {
|
||||
var totalComments int
|
||||
var totalScore int
|
||||
|
||||
for i := range p {
|
||||
totalComments += p[i].CommentCount
|
||||
totalScore += p[i].Score
|
||||
}
|
||||
|
||||
numberOfPosts := float64(len(p))
|
||||
averageComments := float64(totalComments) / numberOfPosts
|
||||
averageScore := float64(totalScore) / numberOfPosts
|
||||
|
||||
for i := range p {
|
||||
p[i].Engagement = (float64(p[i].CommentCount)/averageComments + float64(p[i].Score)/averageScore) / 2
|
||||
|
||||
elapsed := time.Since(p[i].TimePosted)
|
||||
|
||||
if elapsed < time.Hour*depreciatePostsOlderThanHours {
|
||||
continue
|
||||
}
|
||||
|
||||
p[i].Engagement *= 1.0 - (math.Max(elapsed.Hours()-depreciatePostsOlderThanHours, maxDepreciationAfterHours)/maxDepreciationAfterHours)*maxDepreciation
|
||||
}
|
||||
}
|
||||
|
||||
func (p ForumPosts) SortByEngagement() {
|
||||
sort.Slice(p, func(i, j int) bool {
|
||||
return p[i].Engagement > p[j].Engagement
|
||||
})
|
||||
}
|
||||
|
||||
func (s *ForumPost) HasTargetUrl() bool {
|
||||
return s.TargetUrl != ""
|
||||
}
|
||||
|
||||
func (p ForumPosts) FilterPostedBefore(postedBefore time.Duration) []ForumPost {
|
||||
recent := make([]ForumPost, 0, len(p))
|
||||
|
||||
for i := range p {
|
||||
if time.Since(p[i].TimePosted) < postedBefore {
|
||||
recent = append(recent, p[i])
|
||||
}
|
||||
}
|
||||
|
||||
return recent
|
||||
}
|
||||
|
||||
func (r AppReleases) SortByNewest() AppReleases {
|
||||
sort.Slice(r, func(i, j int) bool {
|
||||
return r[i].TimeReleased.After(r[j].TimeReleased)
|
||||
})
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
func (v Videos) SortByNewest() Videos {
|
||||
sort.Slice(v, func(i, j int) bool {
|
||||
return v[i].TimePosted.After(v[j].TimePosted)
|
||||
})
|
||||
|
||||
return v
|
||||
}
|
||||
@@ -1,146 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type subredditResponseJson struct {
|
||||
Data struct {
|
||||
Children []struct {
|
||||
Data struct {
|
||||
Id string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
Upvotes int `json:"ups"`
|
||||
Url string `json:"url"`
|
||||
Time float64 `json:"created"`
|
||||
CommentsCount int `json:"num_comments"`
|
||||
Domain string `json:"domain"`
|
||||
Permalink string `json:"permalink"`
|
||||
Stickied bool `json:"stickied"`
|
||||
Pinned bool `json:"pinned"`
|
||||
IsSelf bool `json:"is_self"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
Flair string `json:"link_flair_text"`
|
||||
ParentList []struct {
|
||||
Id string `json:"id"`
|
||||
Subreddit string `json:"subreddit"`
|
||||
Permalink string `json:"permalink"`
|
||||
} `json:"crosspost_parent_list"`
|
||||
} `json:"data"`
|
||||
} `json:"children"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
func templateRedditCommentsURL(template, subreddit, postId, postPath string) string {
|
||||
template = strings.ReplaceAll(template, "{SUBREDDIT}", subreddit)
|
||||
template = strings.ReplaceAll(template, "{POST-ID}", postId)
|
||||
template = strings.ReplaceAll(template, "{POST-PATH}", strings.TrimLeft(postPath, "/"))
|
||||
|
||||
return template
|
||||
}
|
||||
|
||||
func FetchSubredditPosts(subreddit, sort, topPeriod, search, commentsUrlTemplate, requestUrlTemplate string, showFlairs bool) (ForumPosts, error) {
|
||||
query := url.Values{}
|
||||
var requestUrl string
|
||||
|
||||
if search != "" {
|
||||
query.Set("q", search+" subreddit:"+subreddit)
|
||||
query.Set("sort", sort)
|
||||
}
|
||||
|
||||
if sort == "top" {
|
||||
query.Set("t", topPeriod)
|
||||
}
|
||||
|
||||
if search != "" {
|
||||
requestUrl = fmt.Sprintf("https://www.reddit.com/search.json?%s", query.Encode())
|
||||
} else {
|
||||
requestUrl = fmt.Sprintf("https://www.reddit.com/r/%s/%s.json?%s", subreddit, sort, query.Encode())
|
||||
}
|
||||
|
||||
if requestUrlTemplate != "" {
|
||||
requestUrl = strings.ReplaceAll(requestUrlTemplate, "{REQUEST-URL}", requestUrl)
|
||||
}
|
||||
|
||||
request, err := http.NewRequest("GET", requestUrl, nil)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Required to increase rate limit, otherwise Reddit randomly returns 429 even after just 2 requests
|
||||
addBrowserUserAgentHeader(request)
|
||||
responseJson, err := decodeJsonFromRequest[subredditResponseJson](defaultClient, request)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(responseJson.Data.Children) == 0 {
|
||||
return nil, fmt.Errorf("no posts found")
|
||||
}
|
||||
|
||||
posts := make(ForumPosts, 0, len(responseJson.Data.Children))
|
||||
|
||||
for i := range responseJson.Data.Children {
|
||||
post := &responseJson.Data.Children[i].Data
|
||||
|
||||
if post.Stickied || post.Pinned {
|
||||
continue
|
||||
}
|
||||
|
||||
var commentsUrl string
|
||||
|
||||
if commentsUrlTemplate == "" {
|
||||
commentsUrl = "https://www.reddit.com" + post.Permalink
|
||||
} else {
|
||||
commentsUrl = templateRedditCommentsURL(commentsUrlTemplate, subreddit, post.Id, post.Permalink)
|
||||
}
|
||||
|
||||
forumPost := ForumPost{
|
||||
Title: html.UnescapeString(post.Title),
|
||||
DiscussionUrl: commentsUrl,
|
||||
TargetUrlDomain: post.Domain,
|
||||
CommentCount: post.CommentsCount,
|
||||
Score: post.Upvotes,
|
||||
TimePosted: time.Unix(int64(post.Time), 0),
|
||||
}
|
||||
|
||||
if post.Thumbnail != "" && post.Thumbnail != "self" && post.Thumbnail != "default" {
|
||||
forumPost.ThumbnailUrl = post.Thumbnail
|
||||
}
|
||||
|
||||
if !post.IsSelf {
|
||||
forumPost.TargetUrl = post.Url
|
||||
}
|
||||
|
||||
if showFlairs && post.Flair != "" {
|
||||
forumPost.Tags = append(forumPost.Tags, post.Flair)
|
||||
}
|
||||
|
||||
if len(post.ParentList) > 0 {
|
||||
forumPost.IsCrosspost = true
|
||||
forumPost.TargetUrlDomain = "r/" + post.ParentList[0].Subreddit
|
||||
|
||||
if commentsUrlTemplate == "" {
|
||||
forumPost.TargetUrl = "https://www.reddit.com" + post.ParentList[0].Permalink
|
||||
} else {
|
||||
forumPost.TargetUrl = templateRedditCommentsURL(
|
||||
commentsUrlTemplate,
|
||||
post.ParentList[0].Subreddit,
|
||||
post.ParentList[0].Id,
|
||||
post.ParentList[0].Permalink,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
posts = append(posts, forumPost)
|
||||
}
|
||||
|
||||
return posts, nil
|
||||
}
|
||||
@@ -1,72 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
type ReleaseSource string
|
||||
|
||||
const (
|
||||
ReleaseSourceCodeberg ReleaseSource = "codeberg"
|
||||
ReleaseSourceGithub ReleaseSource = "github"
|
||||
ReleaseSourceGitlab ReleaseSource = "gitlab"
|
||||
ReleaseSourceDockerHub ReleaseSource = "dockerhub"
|
||||
)
|
||||
|
||||
type ReleaseRequest struct {
|
||||
Source ReleaseSource
|
||||
Repository string
|
||||
Token *string
|
||||
}
|
||||
|
||||
func FetchLatestReleases(requests []*ReleaseRequest) (AppReleases, error) {
|
||||
job := newJob(fetchLatestReleaseTask, requests).withWorkers(20)
|
||||
results, errs, err := workerPoolDo(job)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var failed int
|
||||
|
||||
releases := make(AppReleases, 0, len(requests))
|
||||
|
||||
for i := range results {
|
||||
if errs[i] != nil {
|
||||
failed++
|
||||
slog.Error("Failed to fetch release", "source", requests[i].Source, "repository", requests[i].Repository, "error", errs[i])
|
||||
continue
|
||||
}
|
||||
|
||||
releases = append(releases, *results[i])
|
||||
}
|
||||
|
||||
if failed == len(requests) {
|
||||
return nil, ErrNoContent
|
||||
}
|
||||
|
||||
releases.SortByNewest()
|
||||
|
||||
if failed > 0 {
|
||||
return releases, fmt.Errorf("%w: could not get %d releases", ErrPartialContent, failed)
|
||||
}
|
||||
|
||||
return releases, nil
|
||||
}
|
||||
|
||||
func fetchLatestReleaseTask(request *ReleaseRequest) (*AppRelease, error) {
|
||||
switch request.Source {
|
||||
case ReleaseSourceCodeberg:
|
||||
return fetchLatestCodebergRelease(request)
|
||||
case ReleaseSourceGithub:
|
||||
return fetchLatestGithubRelease(request)
|
||||
case ReleaseSourceGitlab:
|
||||
return fetchLatestGitLabRelease(request)
|
||||
case ReleaseSourceDockerHub:
|
||||
return fetchLatestDockerHubRelease(request)
|
||||
}
|
||||
|
||||
return nil, errors.New("unsupported source")
|
||||
}
|
||||
@@ -1,227 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"encoding/json"
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
const defaultClientTimeout = 5 * time.Second
|
||||
|
||||
var defaultClient = &http.Client{
|
||||
Timeout: defaultClientTimeout,
|
||||
}
|
||||
|
||||
var insecureClientTransport = &http.Transport{
|
||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
|
||||
}
|
||||
|
||||
var defaultInsecureClient = &http.Client{
|
||||
Timeout: defaultClientTimeout,
|
||||
Transport: insecureClientTransport,
|
||||
}
|
||||
|
||||
type RequestDoer interface {
|
||||
Do(*http.Request) (*http.Response, error)
|
||||
}
|
||||
|
||||
func addBrowserUserAgentHeader(request *http.Request) {
|
||||
request.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:123.0) Gecko/20100101 Firefox/123.0")
|
||||
}
|
||||
|
||||
func truncateString(s string, maxLen int) string {
|
||||
asRunes := []rune(s)
|
||||
|
||||
if len(asRunes) > maxLen {
|
||||
return string(asRunes[:maxLen])
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
func decodeJsonFromRequest[T any](client RequestDoer, request *http.Request) (T, error) {
|
||||
response, err := client.Do(request)
|
||||
var result T
|
||||
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
defer response.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(response.Body)
|
||||
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
if response.StatusCode != http.StatusOK {
|
||||
return result, fmt.Errorf(
|
||||
"unexpected status code %d for %s, response: %s",
|
||||
response.StatusCode,
|
||||
request.URL,
|
||||
truncateString(string(body), 256),
|
||||
)
|
||||
}
|
||||
|
||||
err = json.Unmarshal(body, &result)
|
||||
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func decodeJsonFromRequestTask[T any](client RequestDoer) func(*http.Request) (T, error) {
|
||||
return func(request *http.Request) (T, error) {
|
||||
return decodeJsonFromRequest[T](client, request)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: tidy up, these are a copy of the above but with a line changed
|
||||
func decodeXmlFromRequest[T any](client RequestDoer, request *http.Request) (T, error) {
|
||||
response, err := client.Do(request)
|
||||
var result T
|
||||
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
defer response.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(response.Body)
|
||||
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
if response.StatusCode != http.StatusOK {
|
||||
return result, fmt.Errorf(
|
||||
"unexpected status code %d for %s, response: %s",
|
||||
response.StatusCode,
|
||||
request.URL,
|
||||
truncateString(string(body), 256),
|
||||
)
|
||||
}
|
||||
|
||||
err = xml.Unmarshal(body, &result)
|
||||
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func decodeXmlFromRequestTask[T any](client RequestDoer) func(*http.Request) (T, error) {
|
||||
return func(request *http.Request) (T, error) {
|
||||
return decodeXmlFromRequest[T](client, request)
|
||||
}
|
||||
}
|
||||
|
||||
type workerPoolTask[I any, O any] struct {
|
||||
index int
|
||||
input I
|
||||
output O
|
||||
err error
|
||||
}
|
||||
|
||||
type workerPoolJob[I any, O any] struct {
|
||||
data []I
|
||||
workers int
|
||||
task func(I) (O, error)
|
||||
ctx context.Context
|
||||
}
|
||||
|
||||
const defaultNumWorkers = 10
|
||||
|
||||
func (job *workerPoolJob[I, O]) withWorkers(workers int) *workerPoolJob[I, O] {
|
||||
if workers == 0 {
|
||||
job.workers = defaultNumWorkers
|
||||
} else if workers > len(job.data) {
|
||||
job.workers = len(job.data)
|
||||
} else {
|
||||
job.workers = workers
|
||||
}
|
||||
|
||||
return job
|
||||
}
|
||||
|
||||
// func (job *workerPoolJob[I, O]) withContext(ctx context.Context) *workerPoolJob[I, O] {
|
||||
// if ctx != nil {
|
||||
// job.ctx = ctx
|
||||
// }
|
||||
|
||||
// return job
|
||||
// }
|
||||
|
||||
func newJob[I any, O any](task func(I) (O, error), data []I) *workerPoolJob[I, O] {
|
||||
return &workerPoolJob[I, O]{
|
||||
workers: defaultNumWorkers,
|
||||
task: task,
|
||||
data: data,
|
||||
ctx: context.Background(),
|
||||
}
|
||||
}
|
||||
|
||||
func workerPoolDo[I any, O any](job *workerPoolJob[I, O]) ([]O, []error, error) {
|
||||
results := make([]O, len(job.data))
|
||||
errs := make([]error, len(job.data))
|
||||
|
||||
if len(job.data) == 0 {
|
||||
return results, errs, nil
|
||||
}
|
||||
|
||||
tasksQueue := make(chan *workerPoolTask[I, O])
|
||||
resultsQueue := make(chan *workerPoolTask[I, O])
|
||||
|
||||
var wg sync.WaitGroup
|
||||
|
||||
for range job.workers {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
|
||||
for t := range tasksQueue {
|
||||
t.output, t.err = job.task(t.input)
|
||||
resultsQueue <- t
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
var err error
|
||||
|
||||
go func() {
|
||||
loop:
|
||||
for i := range job.data {
|
||||
select {
|
||||
default:
|
||||
tasksQueue <- &workerPoolTask[I, O]{
|
||||
index: i,
|
||||
input: job.data[i],
|
||||
}
|
||||
case <-job.ctx.Done():
|
||||
err = job.ctx.Err()
|
||||
break loop
|
||||
}
|
||||
}
|
||||
|
||||
close(tasksQueue)
|
||||
wg.Wait()
|
||||
close(resultsQueue)
|
||||
}()
|
||||
|
||||
for task := range resultsQueue {
|
||||
errs[task.index] = task.err
|
||||
results[task.index] = task.output
|
||||
}
|
||||
|
||||
return results, errs, err
|
||||
}
|
||||
@@ -1,269 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html"
|
||||
"io"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/mmcdole/gofeed"
|
||||
gofeedext "github.com/mmcdole/gofeed/extensions"
|
||||
)
|
||||
|
||||
type RSSFeedItem struct {
|
||||
ChannelName string
|
||||
ChannelURL string
|
||||
Title string
|
||||
Link string
|
||||
ImageURL string
|
||||
Categories []string
|
||||
Description string
|
||||
PublishedAt time.Time
|
||||
}
|
||||
|
||||
// doesn't cover all cases but works the vast majority of the time
|
||||
var htmlTagsWithAttributesPattern = regexp.MustCompile(`<\/?[a-zA-Z0-9-]+ *(?:[a-zA-Z-]+=(?:"|').*?(?:"|') ?)* *\/?>`)
|
||||
var sequentialWhitespacePattern = regexp.MustCompile(`\s+`)
|
||||
|
||||
func sanitizeFeedDescription(description string) string {
|
||||
if description == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
description = strings.ReplaceAll(description, "\n", " ")
|
||||
description = htmlTagsWithAttributesPattern.ReplaceAllString(description, "")
|
||||
description = sequentialWhitespacePattern.ReplaceAllString(description, " ")
|
||||
description = strings.TrimSpace(description)
|
||||
description = html.UnescapeString(description)
|
||||
|
||||
return description
|
||||
}
|
||||
|
||||
func shortenFeedDescriptionLen(description string, maxLen int) string {
|
||||
description, _ = limitStringLength(description, 1000)
|
||||
description = sanitizeFeedDescription(description)
|
||||
description, limited := limitStringLength(description, maxLen)
|
||||
|
||||
if limited {
|
||||
description += "…"
|
||||
}
|
||||
|
||||
return description
|
||||
}
|
||||
|
||||
type RSSFeedRequest struct {
|
||||
Url string `yaml:"url"`
|
||||
Title string `yaml:"title"`
|
||||
HideCategories bool `yaml:"hide-categories"`
|
||||
HideDescription bool `yaml:"hide-description"`
|
||||
ItemLinkPrefix string `yaml:"item-link-prefix"`
|
||||
Headers map[string]string `yaml:"headers"`
|
||||
IsDetailed bool `yaml:"-"`
|
||||
}
|
||||
|
||||
type RSSFeedItems []RSSFeedItem
|
||||
|
||||
func (f RSSFeedItems) SortByNewest() RSSFeedItems {
|
||||
sort.Slice(f, func(i, j int) bool {
|
||||
return f[i].PublishedAt.After(f[j].PublishedAt)
|
||||
})
|
||||
|
||||
return f
|
||||
}
|
||||
|
||||
var feedParser = gofeed.NewParser()
|
||||
|
||||
func getItemsFromRSSFeedTask(request RSSFeedRequest) ([]RSSFeedItem, error) {
|
||||
req, err := http.NewRequest("GET", request.Url, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for key, value := range request.Headers {
|
||||
req.Header.Add(key, value)
|
||||
}
|
||||
|
||||
resp, err := defaultClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("unexpected status code %d from %s", resp.StatusCode, request.Url)
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
feed, err := feedParser.ParseString(string(body))
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
items := make(RSSFeedItems, 0, len(feed.Items))
|
||||
|
||||
for i := range feed.Items {
|
||||
item := feed.Items[i]
|
||||
|
||||
rssItem := RSSFeedItem{
|
||||
ChannelURL: feed.Link,
|
||||
}
|
||||
|
||||
if request.ItemLinkPrefix != "" {
|
||||
rssItem.Link = request.ItemLinkPrefix + item.Link
|
||||
} else if strings.HasPrefix(item.Link, "http://") || strings.HasPrefix(item.Link, "https://") {
|
||||
rssItem.Link = item.Link
|
||||
} else {
|
||||
parsedUrl, err := url.Parse(feed.Link)
|
||||
|
||||
if err != nil {
|
||||
parsedUrl, err = url.Parse(request.Url)
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
var link string
|
||||
|
||||
if len(item.Link) > 0 && item.Link[0] == '/' {
|
||||
link = item.Link
|
||||
} else {
|
||||
link = "/" + item.Link
|
||||
}
|
||||
|
||||
rssItem.Link = parsedUrl.Scheme + "://" + parsedUrl.Host + link
|
||||
}
|
||||
}
|
||||
|
||||
if item.Title != "" {
|
||||
rssItem.Title = item.Title
|
||||
} else {
|
||||
rssItem.Title = shortenFeedDescriptionLen(item.Description, 100)
|
||||
}
|
||||
|
||||
if request.IsDetailed {
|
||||
if !request.HideDescription && item.Description != "" && item.Title != "" {
|
||||
rssItem.Description = shortenFeedDescriptionLen(item.Description, 200)
|
||||
}
|
||||
|
||||
if !request.HideCategories {
|
||||
var categories = make([]string, 0, 6)
|
||||
|
||||
for _, category := range item.Categories {
|
||||
if len(categories) == 6 {
|
||||
break
|
||||
}
|
||||
|
||||
if len(category) == 0 || len(category) > 30 {
|
||||
continue
|
||||
}
|
||||
|
||||
categories = append(categories, category)
|
||||
}
|
||||
|
||||
rssItem.Categories = categories
|
||||
}
|
||||
}
|
||||
|
||||
if request.Title != "" {
|
||||
rssItem.ChannelName = request.Title
|
||||
} else {
|
||||
rssItem.ChannelName = feed.Title
|
||||
}
|
||||
|
||||
if item.Image != nil {
|
||||
rssItem.ImageURL = item.Image.URL
|
||||
} else if url := findThumbnailInItemExtensions(item); url != "" {
|
||||
rssItem.ImageURL = url
|
||||
} else if feed.Image != nil {
|
||||
if len(feed.Image.URL) > 0 && feed.Image.URL[0] == '/' {
|
||||
rssItem.ImageURL = strings.TrimRight(feed.Link, "/") + feed.Image.URL
|
||||
} else {
|
||||
rssItem.ImageURL = feed.Image.URL
|
||||
}
|
||||
}
|
||||
|
||||
if item.PublishedParsed != nil {
|
||||
rssItem.PublishedAt = *item.PublishedParsed
|
||||
} else {
|
||||
rssItem.PublishedAt = time.Now()
|
||||
}
|
||||
|
||||
items = append(items, rssItem)
|
||||
}
|
||||
|
||||
return items, nil
|
||||
}
|
||||
|
||||
func recursiveFindThumbnailInExtensions(extensions map[string][]gofeedext.Extension) string {
|
||||
for _, exts := range extensions {
|
||||
for _, ext := range exts {
|
||||
if ext.Name == "thumbnail" || ext.Name == "image" {
|
||||
if url, ok := ext.Attrs["url"]; ok {
|
||||
return url
|
||||
}
|
||||
}
|
||||
|
||||
if ext.Children != nil {
|
||||
if url := recursiveFindThumbnailInExtensions(ext.Children); url != "" {
|
||||
return url
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func findThumbnailInItemExtensions(item *gofeed.Item) string {
|
||||
media, ok := item.Extensions["media"]
|
||||
|
||||
if !ok {
|
||||
return ""
|
||||
}
|
||||
|
||||
return recursiveFindThumbnailInExtensions(media)
|
||||
}
|
||||
|
||||
func GetItemsFromRSSFeeds(requests []RSSFeedRequest) (RSSFeedItems, error) {
|
||||
job := newJob(getItemsFromRSSFeedTask, requests).withWorkers(10)
|
||||
feeds, errs, err := workerPoolDo(job)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: %v", ErrNoContent, err)
|
||||
}
|
||||
|
||||
failed := 0
|
||||
|
||||
entries := make(RSSFeedItems, 0, len(feeds)*10)
|
||||
|
||||
for i := range feeds {
|
||||
if errs[i] != nil {
|
||||
failed++
|
||||
slog.Error("failed to get rss feed", "error", errs[i], "url", requests[i].Url)
|
||||
continue
|
||||
}
|
||||
|
||||
entries = append(entries, feeds[i]...)
|
||||
}
|
||||
|
||||
if failed == len(requests) {
|
||||
return nil, ErrNoContent
|
||||
}
|
||||
|
||||
entries.SortByNewest()
|
||||
|
||||
if failed > 0 {
|
||||
return entries, fmt.Errorf("%w: missing %d RSS feeds", ErrPartialContent, failed)
|
||||
}
|
||||
|
||||
return entries, nil
|
||||
}
|
||||
@@ -1,267 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"slices"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type TwitchCategory struct {
|
||||
Slug string `json:"slug"`
|
||||
Name string `json:"name"`
|
||||
AvatarUrl string `json:"avatarURL"`
|
||||
ViewersCount int `json:"viewersCount"`
|
||||
Tags []struct {
|
||||
Name string `json:"tagName"`
|
||||
} `json:"tags"`
|
||||
GameReleaseDate string `json:"originalReleaseDate"`
|
||||
IsNew bool `json:"-"`
|
||||
}
|
||||
|
||||
type TwitchChannel struct {
|
||||
Login string
|
||||
Exists bool
|
||||
Name string
|
||||
StreamTitle string
|
||||
AvatarUrl string
|
||||
IsLive bool
|
||||
LiveSince time.Time
|
||||
Category string
|
||||
CategorySlug string
|
||||
ViewersCount int
|
||||
}
|
||||
|
||||
type TwitchChannels []TwitchChannel
|
||||
|
||||
func (channels TwitchChannels) SortByViewers() {
|
||||
sort.Slice(channels, func(i, j int) bool {
|
||||
return channels[i].ViewersCount > channels[j].ViewersCount
|
||||
})
|
||||
}
|
||||
|
||||
func (channels TwitchChannels) SortByLive() {
|
||||
sort.SliceStable(channels, func(i, j int) bool {
|
||||
return channels[i].IsLive && !channels[j].IsLive
|
||||
})
|
||||
}
|
||||
|
||||
type twitchOperationResponse struct {
|
||||
Data json.RawMessage
|
||||
Extensions struct {
|
||||
OperationName string `json:"operationName"`
|
||||
}
|
||||
}
|
||||
|
||||
type twitchChannelShellOperationResponse struct {
|
||||
UserOrError struct {
|
||||
Type string `json:"__typename"`
|
||||
DisplayName string `json:"displayName"`
|
||||
ProfileImageUrl string `json:"profileImageURL"`
|
||||
Stream *struct {
|
||||
ViewersCount int `json:"viewersCount"`
|
||||
}
|
||||
} `json:"userOrError"`
|
||||
}
|
||||
|
||||
type twitchStreamMetadataOperationResponse struct {
|
||||
UserOrNull *struct {
|
||||
Stream *struct {
|
||||
StartedAt string `json:"createdAt"`
|
||||
Game *struct {
|
||||
Slug string `json:"slug"`
|
||||
Name string `json:"name"`
|
||||
} `json:"game"`
|
||||
} `json:"stream"`
|
||||
LastBroadcast *struct {
|
||||
Title string `json:"title"`
|
||||
}
|
||||
} `json:"user"`
|
||||
}
|
||||
|
||||
type twitchDirectoriesOperationResponse struct {
|
||||
Data struct {
|
||||
DirectoriesWithTags struct {
|
||||
Edges []struct {
|
||||
Node TwitchCategory `json:"node"`
|
||||
} `json:"edges"`
|
||||
} `json:"directoriesWithTags"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
const twitchGqlEndpoint = "https://gql.twitch.tv/gql"
|
||||
const twitchGqlClientId = "kimne78kx3ncx6brgo4mv6wki5h1ko"
|
||||
|
||||
const twitchDirectoriesOperationRequestBody = `[{"operationName": "BrowsePage_AllDirectories","variables": {"limit": %d,"options": {"sort": "VIEWER_COUNT","tags": []}},"extensions": {"persistedQuery": {"version": 1,"sha256Hash": "2f67f71ba89f3c0ed26a141ec00da1defecb2303595f5cda4298169549783d9e"}}}]`
|
||||
|
||||
func FetchTopGamesFromTwitch(exclude []string, limit int) ([]TwitchCategory, error) {
|
||||
reader := strings.NewReader(fmt.Sprintf(twitchDirectoriesOperationRequestBody, len(exclude)+limit))
|
||||
request, _ := http.NewRequest("POST", twitchGqlEndpoint, reader)
|
||||
request.Header.Add("Client-ID", twitchGqlClientId)
|
||||
response, err := decodeJsonFromRequest[[]twitchDirectoriesOperationResponse](defaultClient, request)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(response) == 0 {
|
||||
return nil, errors.New("no categories could be retrieved")
|
||||
}
|
||||
|
||||
edges := (response)[0].Data.DirectoriesWithTags.Edges
|
||||
categories := make([]TwitchCategory, 0, len(edges))
|
||||
|
||||
for i := range edges {
|
||||
if slices.Contains(exclude, edges[i].Node.Slug) {
|
||||
continue
|
||||
}
|
||||
|
||||
category := &edges[i].Node
|
||||
category.AvatarUrl = strings.Replace(category.AvatarUrl, "285x380", "144x192", 1)
|
||||
|
||||
if len(category.Tags) > 2 {
|
||||
category.Tags = category.Tags[:2]
|
||||
}
|
||||
|
||||
gameReleasedDate, err := time.Parse("2006-01-02T15:04:05Z", category.GameReleaseDate)
|
||||
|
||||
if err == nil {
|
||||
if time.Since(gameReleasedDate) < 14*24*time.Hour {
|
||||
category.IsNew = true
|
||||
}
|
||||
}
|
||||
|
||||
categories = append(categories, *category)
|
||||
}
|
||||
|
||||
if len(categories) > limit {
|
||||
categories = categories[:limit]
|
||||
}
|
||||
|
||||
return categories, nil
|
||||
}
|
||||
|
||||
const twitchChannelStatusOperationRequestBody = `[
|
||||
{"operationName":"ChannelShell","variables":{"login":"%s"},"extensions":{"persistedQuery":{"version":1,"sha256Hash":"580ab410bcd0c1ad194224957ae2241e5d252b2c5173d8e0cce9d32d5bb14efe"}}},
|
||||
{"operationName":"StreamMetadata","variables":{"channelLogin":"%s"},"extensions":{"persistedQuery":{"version":1,"sha256Hash":"676ee2f834ede42eb4514cdb432b3134fefc12590080c9a2c9bb44a2a4a63266"}}}
|
||||
]`
|
||||
|
||||
// TODO: rework
|
||||
// The operations for multiple channels can all be sent in a single request
|
||||
// rather than sending a separate request for each channel. Need to figure out
|
||||
// what the limit is for max operations per request and batch operations in
|
||||
// multiple requests if number of channels exceeds allowed limit.
|
||||
|
||||
func fetchChannelFromTwitchTask(channel string) (TwitchChannel, error) {
|
||||
result := TwitchChannel{
|
||||
Login: strings.ToLower(channel),
|
||||
}
|
||||
|
||||
reader := strings.NewReader(fmt.Sprintf(twitchChannelStatusOperationRequestBody, channel, channel))
|
||||
request, _ := http.NewRequest("POST", twitchGqlEndpoint, reader)
|
||||
request.Header.Add("Client-ID", twitchGqlClientId)
|
||||
|
||||
response, err := decodeJsonFromRequest[[]twitchOperationResponse](defaultClient, request)
|
||||
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
if len(response) != 2 {
|
||||
return result, fmt.Errorf("expected 2 operation responses, got %d", len(response))
|
||||
}
|
||||
|
||||
var channelShell twitchChannelShellOperationResponse
|
||||
var streamMetadata twitchStreamMetadataOperationResponse
|
||||
|
||||
for i := range response {
|
||||
switch response[i].Extensions.OperationName {
|
||||
case "ChannelShell":
|
||||
err = json.Unmarshal(response[i].Data, &channelShell)
|
||||
|
||||
if err != nil {
|
||||
return result, fmt.Errorf("failed to unmarshal channel shell: %w", err)
|
||||
}
|
||||
case "StreamMetadata":
|
||||
err = json.Unmarshal(response[i].Data, &streamMetadata)
|
||||
|
||||
if err != nil {
|
||||
return result, fmt.Errorf("failed to unmarshal stream metadata: %w", err)
|
||||
}
|
||||
default:
|
||||
return result, fmt.Errorf("unknown operation name: %s", response[i].Extensions.OperationName)
|
||||
}
|
||||
}
|
||||
|
||||
if channelShell.UserOrError.Type != "User" {
|
||||
result.Name = result.Login
|
||||
return result, nil
|
||||
}
|
||||
|
||||
result.Exists = true
|
||||
result.Name = channelShell.UserOrError.DisplayName
|
||||
result.AvatarUrl = channelShell.UserOrError.ProfileImageUrl
|
||||
|
||||
if channelShell.UserOrError.Stream != nil {
|
||||
result.IsLive = true
|
||||
result.ViewersCount = channelShell.UserOrError.Stream.ViewersCount
|
||||
|
||||
if streamMetadata.UserOrNull != nil && streamMetadata.UserOrNull.Stream != nil {
|
||||
if streamMetadata.UserOrNull.LastBroadcast != nil {
|
||||
result.StreamTitle = streamMetadata.UserOrNull.LastBroadcast.Title
|
||||
}
|
||||
|
||||
if streamMetadata.UserOrNull.Stream.Game != nil {
|
||||
result.Category = streamMetadata.UserOrNull.Stream.Game.Name
|
||||
result.CategorySlug = streamMetadata.UserOrNull.Stream.Game.Slug
|
||||
}
|
||||
startedAt, err := time.Parse("2006-01-02T15:04:05Z", streamMetadata.UserOrNull.Stream.StartedAt)
|
||||
|
||||
if err == nil {
|
||||
result.LiveSince = startedAt
|
||||
} else {
|
||||
slog.Warn("failed to parse twitch stream started at", "error", err, "started_at", streamMetadata.UserOrNull.Stream.StartedAt)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func FetchChannelsFromTwitch(channelLogins []string) (TwitchChannels, error) {
|
||||
result := make(TwitchChannels, 0, len(channelLogins))
|
||||
|
||||
job := newJob(fetchChannelFromTwitchTask, channelLogins).withWorkers(10)
|
||||
channels, errs, err := workerPoolDo(job)
|
||||
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
var failed int
|
||||
|
||||
for i := range channels {
|
||||
if errs[i] != nil {
|
||||
failed++
|
||||
slog.Warn("failed to fetch twitch channel", "channel", channelLogins[i], "error", errs[i])
|
||||
continue
|
||||
}
|
||||
|
||||
result = append(result, channels[i])
|
||||
}
|
||||
|
||||
if failed == len(channelLogins) {
|
||||
return result, ErrNoContent
|
||||
}
|
||||
|
||||
if failed > 0 {
|
||||
return result, fmt.Errorf("%w: failed to fetch %d channels", ErrPartialContent, failed)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
@@ -1,117 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrNoContent = errors.New("failed to retrieve any content")
|
||||
ErrPartialContent = errors.New("failed to retrieve some of the content")
|
||||
)
|
||||
|
||||
func percentChange(current, previous float64) float64 {
|
||||
return (current/previous - 1) * 100
|
||||
}
|
||||
|
||||
func extractDomainFromUrl(u string) string {
|
||||
if u == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
parsed, err := url.Parse(u)
|
||||
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
return strings.TrimPrefix(strings.ToLower(parsed.Host), "www.")
|
||||
}
|
||||
|
||||
func SvgPolylineCoordsFromYValues(width float64, height float64, values []float64) string {
|
||||
if len(values) < 2 {
|
||||
return ""
|
||||
}
|
||||
|
||||
verticalPadding := height * 0.02
|
||||
height -= verticalPadding * 2
|
||||
coordinates := make([]string, len(values))
|
||||
distanceBetweenPoints := width / float64(len(values)-1)
|
||||
min := slices.Min(values)
|
||||
max := slices.Max(values)
|
||||
|
||||
for i := range values {
|
||||
coordinates[i] = fmt.Sprintf(
|
||||
"%.2f,%.2f",
|
||||
float64(i)*distanceBetweenPoints,
|
||||
((max-values[i])/(max-min))*height+verticalPadding,
|
||||
)
|
||||
}
|
||||
|
||||
return strings.Join(coordinates, " ")
|
||||
}
|
||||
|
||||
func maybeCopySliceWithoutZeroValues[T int | float64](values []T) []T {
|
||||
if len(values) == 0 {
|
||||
return values
|
||||
}
|
||||
|
||||
for i := range values {
|
||||
if values[i] != 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
c := make([]T, 0, len(values)-1)
|
||||
|
||||
for i := range values {
|
||||
if values[i] != 0 {
|
||||
c = append(c, values[i])
|
||||
}
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
return values
|
||||
}
|
||||
|
||||
var urlSchemePattern = regexp.MustCompile(`^[a-z]+:\/\/`)
|
||||
|
||||
func stripURLScheme(url string) string {
|
||||
return urlSchemePattern.ReplaceAllString(url, "")
|
||||
}
|
||||
|
||||
func limitStringLength(s string, max int) (string, bool) {
|
||||
asRunes := []rune(s)
|
||||
|
||||
if len(asRunes) > max {
|
||||
return string(asRunes[:max]), true
|
||||
}
|
||||
|
||||
return s, false
|
||||
}
|
||||
|
||||
func parseRFC3339Time(t string) time.Time {
|
||||
parsed, err := time.Parse(time.RFC3339, t)
|
||||
|
||||
if err != nil {
|
||||
return time.Now()
|
||||
}
|
||||
|
||||
return parsed
|
||||
}
|
||||
|
||||
func normalizeVersionFormat(version string) string {
|
||||
version = strings.ToLower(strings.TrimSpace(version))
|
||||
|
||||
if len(version) > 0 && version[0] != 'v' {
|
||||
return "v" + version
|
||||
}
|
||||
|
||||
return version
|
||||
}
|
||||
@@ -1,104 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type marketResponseJson struct {
|
||||
Chart struct {
|
||||
Result []struct {
|
||||
Meta struct {
|
||||
Currency string `json:"currency"`
|
||||
Symbol string `json:"symbol"`
|
||||
RegularMarketPrice float64 `json:"regularMarketPrice"`
|
||||
ChartPreviousClose float64 `json:"chartPreviousClose"`
|
||||
} `json:"meta"`
|
||||
Indicators struct {
|
||||
Quote []struct {
|
||||
Close []float64 `json:"close,omitempty"`
|
||||
} `json:"quote"`
|
||||
} `json:"indicators"`
|
||||
} `json:"result"`
|
||||
} `json:"chart"`
|
||||
}
|
||||
|
||||
// TODO: allow changing chart time frame
|
||||
const marketChartDays = 21
|
||||
|
||||
func FetchMarketsDataFromYahoo(marketRequests []MarketRequest) (Markets, error) {
|
||||
requests := make([]*http.Request, 0, len(marketRequests))
|
||||
|
||||
for i := range marketRequests {
|
||||
request, _ := http.NewRequest("GET", fmt.Sprintf("https://query1.finance.yahoo.com/v8/finance/chart/%s?range=1mo&interval=1d", marketRequests[i].Symbol), nil)
|
||||
requests = append(requests, request)
|
||||
}
|
||||
|
||||
job := newJob(decodeJsonFromRequestTask[marketResponseJson](defaultClient), requests)
|
||||
responses, errs, err := workerPoolDo(job)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: %v", ErrNoContent, err)
|
||||
}
|
||||
|
||||
markets := make(Markets, 0, len(responses))
|
||||
var failed int
|
||||
|
||||
for i := range responses {
|
||||
if errs[i] != nil {
|
||||
failed++
|
||||
slog.Error("Failed to fetch market data", "symbol", marketRequests[i].Symbol, "error", errs[i])
|
||||
continue
|
||||
}
|
||||
|
||||
response := responses[i]
|
||||
|
||||
if len(response.Chart.Result) == 0 {
|
||||
failed++
|
||||
slog.Error("Market response contains no data", "symbol", marketRequests[i].Symbol)
|
||||
continue
|
||||
}
|
||||
|
||||
prices := response.Chart.Result[0].Indicators.Quote[0].Close
|
||||
|
||||
if len(prices) > marketChartDays {
|
||||
prices = prices[len(prices)-marketChartDays:]
|
||||
}
|
||||
|
||||
previous := response.Chart.Result[0].Meta.RegularMarketPrice
|
||||
|
||||
if len(prices) >= 2 && prices[len(prices)-2] != 0 {
|
||||
previous = prices[len(prices)-2]
|
||||
}
|
||||
|
||||
points := SvgPolylineCoordsFromYValues(100, 50, maybeCopySliceWithoutZeroValues(prices))
|
||||
|
||||
currency, exists := currencyToSymbol[response.Chart.Result[0].Meta.Currency]
|
||||
|
||||
if !exists {
|
||||
currency = response.Chart.Result[0].Meta.Currency
|
||||
}
|
||||
|
||||
markets = append(markets, Market{
|
||||
MarketRequest: marketRequests[i],
|
||||
Price: response.Chart.Result[0].Meta.RegularMarketPrice,
|
||||
Currency: currency,
|
||||
PercentChange: percentChange(
|
||||
response.Chart.Result[0].Meta.RegularMarketPrice,
|
||||
previous,
|
||||
),
|
||||
SvgChartPoints: points,
|
||||
})
|
||||
}
|
||||
|
||||
if len(markets) == 0 {
|
||||
return nil, ErrNoContent
|
||||
}
|
||||
|
||||
if failed > 0 {
|
||||
return markets, fmt.Errorf("%w: could not fetch data for %d market(s)", ErrPartialContent, failed)
|
||||
}
|
||||
|
||||
return markets, nil
|
||||
}
|
||||
@@ -1,115 +0,0 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type youtubeFeedResponseXml struct {
|
||||
Channel string `xml:"author>name"`
|
||||
ChannelLink string `xml:"author>uri"`
|
||||
Videos []struct {
|
||||
Title string `xml:"title"`
|
||||
Published string `xml:"published"`
|
||||
Link struct {
|
||||
Href string `xml:"href,attr"`
|
||||
} `xml:"link"`
|
||||
|
||||
Group struct {
|
||||
Thumbnail struct {
|
||||
Url string `xml:"url,attr"`
|
||||
} `xml:"http://search.yahoo.com/mrss/ thumbnail"`
|
||||
} `xml:"http://search.yahoo.com/mrss/ group"`
|
||||
} `xml:"entry"`
|
||||
}
|
||||
|
||||
func parseYoutubeFeedTime(t string) time.Time {
|
||||
parsedTime, err := time.Parse("2006-01-02T15:04:05-07:00", t)
|
||||
|
||||
if err != nil {
|
||||
return time.Now()
|
||||
}
|
||||
|
||||
return parsedTime
|
||||
}
|
||||
|
||||
func FetchYoutubeChannelUploads(channelIds []string, videoUrlTemplate string, includeShorts bool) (Videos, error) {
|
||||
requests := make([]*http.Request, 0, len(channelIds))
|
||||
|
||||
for i := range channelIds {
|
||||
var feedUrl string
|
||||
if !includeShorts && strings.HasPrefix(channelIds[i], "UC") {
|
||||
playlistId := strings.Replace(channelIds[i], "UC", "UULF", 1)
|
||||
feedUrl = "https://www.youtube.com/feeds/videos.xml?playlist_id=" + playlistId
|
||||
} else {
|
||||
feedUrl = "https://www.youtube.com/feeds/videos.xml?channel_id=" + channelIds[i]
|
||||
}
|
||||
|
||||
request, _ := http.NewRequest("GET", feedUrl, nil)
|
||||
requests = append(requests, request)
|
||||
}
|
||||
|
||||
job := newJob(decodeXmlFromRequestTask[youtubeFeedResponseXml](defaultClient), requests).withWorkers(30)
|
||||
|
||||
responses, errs, err := workerPoolDo(job)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: %v", ErrNoContent, err)
|
||||
}
|
||||
|
||||
videos := make(Videos, 0, len(channelIds)*15)
|
||||
|
||||
var failed int
|
||||
|
||||
for i := range responses {
|
||||
if errs[i] != nil {
|
||||
failed++
|
||||
slog.Error("Failed to fetch youtube feed", "channel", channelIds[i], "error", errs[i])
|
||||
continue
|
||||
}
|
||||
|
||||
response := responses[i]
|
||||
|
||||
for j := range response.Videos {
|
||||
video := &response.Videos[j]
|
||||
var videoUrl string
|
||||
|
||||
if videoUrlTemplate == "" {
|
||||
videoUrl = video.Link.Href
|
||||
} else {
|
||||
parsedUrl, err := url.Parse(video.Link.Href)
|
||||
|
||||
if err == nil {
|
||||
videoUrl = strings.ReplaceAll(videoUrlTemplate, "{VIDEO-ID}", parsedUrl.Query().Get("v"))
|
||||
} else {
|
||||
videoUrl = "#"
|
||||
}
|
||||
}
|
||||
|
||||
videos = append(videos, Video{
|
||||
ThumbnailUrl: video.Group.Thumbnail.Url,
|
||||
Title: video.Title,
|
||||
Url: videoUrl,
|
||||
Author: response.Channel,
|
||||
AuthorUrl: response.ChannelLink + "/videos",
|
||||
TimePosted: parseYoutubeFeedTime(video.Published),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if len(videos) == 0 {
|
||||
return nil, ErrNoContent
|
||||
}
|
||||
|
||||
videos.SortByNewest()
|
||||
|
||||
if failed > 0 {
|
||||
return videos, fmt.Errorf("%w: missing videos from %d channels", ErrPartialContent, failed)
|
||||
}
|
||||
|
||||
return videos, nil
|
||||
}
|
||||
Reference in New Issue
Block a user