mirror of
https://github.com/Xevion/glance.git
synced 2025-12-07 18:07:07 -06:00
Initial commit
This commit is contained in:
53
internal/feed/calendar.go
Normal file
53
internal/feed/calendar.go
Normal file
@@ -0,0 +1,53 @@
|
||||
package feed
|
||||
|
||||
import "time"
|
||||
|
||||
// TODO: very inflexible, refactor to allow more customizability
|
||||
// TODO: allow changing first day of week
|
||||
// TODO: allow changing between showing the previous and next week and the entire month
|
||||
func NewCalendar(now time.Time) *Calendar {
|
||||
year, week := now.ISOWeek()
|
||||
weekday := now.Weekday()
|
||||
|
||||
if weekday == 0 {
|
||||
weekday = 7
|
||||
}
|
||||
|
||||
currentMonthDays := daysInMonth(now.Month(), year)
|
||||
|
||||
var previousMonthDays int
|
||||
|
||||
if previousMonthNumber := now.Month() - 1; previousMonthNumber < 1 {
|
||||
previousMonthDays = daysInMonth(12, year-1)
|
||||
} else {
|
||||
previousMonthDays = daysInMonth(previousMonthNumber, year)
|
||||
}
|
||||
|
||||
startDaysFrom := now.Day() - int(weekday+6)
|
||||
|
||||
days := make([]int, 21)
|
||||
|
||||
for i := 0; i < 21; i++ {
|
||||
day := startDaysFrom + i
|
||||
|
||||
if day < 1 {
|
||||
day = previousMonthDays + day
|
||||
} else if day > currentMonthDays {
|
||||
day = day - currentMonthDays
|
||||
}
|
||||
|
||||
days[i] = day
|
||||
}
|
||||
|
||||
return &Calendar{
|
||||
CurrentDay: now.Day(),
|
||||
CurrentWeekNumber: week,
|
||||
CurrentMonthName: now.Month().String(),
|
||||
CurrentYear: year,
|
||||
Days: days,
|
||||
}
|
||||
}
|
||||
|
||||
func daysInMonth(m time.Month, year int) int {
|
||||
return time.Date(year, m+1, 0, 0, 0, 0, 0, time.UTC).Day()
|
||||
}
|
||||
117
internal/feed/github.go
Normal file
117
internal/feed/github.go
Normal file
@@ -0,0 +1,117 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
type githubReleaseResponseJson struct {
|
||||
TagName string `json:"tag_name"`
|
||||
PublishedAt string `json:"published_at"`
|
||||
HtmlUrl string `json:"html_url"`
|
||||
Draft bool `json:"draft"`
|
||||
PreRelease bool `json:"prerelease"`
|
||||
Reactions struct {
|
||||
Downvotes int `json:"-1"`
|
||||
} `json:"reactions"`
|
||||
}
|
||||
|
||||
func parseGithubTime(t string) time.Time {
|
||||
parsedTime, err := time.Parse("2006-01-02T15:04:05Z", t)
|
||||
|
||||
if err != nil {
|
||||
return time.Now()
|
||||
}
|
||||
|
||||
return parsedTime
|
||||
}
|
||||
|
||||
func FetchLatestReleasesFromGithub(repositories []string, token string) (AppReleases, error) {
|
||||
appReleases := make(AppReleases, 0, len(repositories))
|
||||
|
||||
if len(repositories) == 0 {
|
||||
return appReleases, nil
|
||||
}
|
||||
|
||||
requests := make([]*http.Request, len(repositories))
|
||||
|
||||
for i, repository := range repositories {
|
||||
request, _ := http.NewRequest("GET", fmt.Sprintf("https://api.github.com/repos/%s/releases?per_page=10", repository), nil)
|
||||
|
||||
if token != "" {
|
||||
request.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token))
|
||||
}
|
||||
|
||||
requests[i] = request
|
||||
}
|
||||
|
||||
task := decodeJsonFromRequestTask[[]githubReleaseResponseJson](defaultClient)
|
||||
job := newJob(task, requests).withWorkers(15)
|
||||
responses, errs, err := workerPoolDo(job)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var failed int
|
||||
|
||||
for i := range responses {
|
||||
if errs[i] != nil {
|
||||
failed++
|
||||
slog.Error("Failed to fetch or parse github release", "error", errs[i], "url", requests[i].URL)
|
||||
continue
|
||||
}
|
||||
|
||||
releases := responses[i]
|
||||
|
||||
if len(releases) < 1 {
|
||||
failed++
|
||||
slog.Error("No releases found", "repository", repositories[i], "url", requests[i].URL)
|
||||
continue
|
||||
}
|
||||
|
||||
var liveRelease *githubReleaseResponseJson
|
||||
|
||||
for i := range releases {
|
||||
release := &releases[i]
|
||||
|
||||
if !release.Draft && !release.PreRelease {
|
||||
liveRelease = release
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if liveRelease == nil {
|
||||
slog.Error("No live release found", "repository", repositories[i], "url", requests[i].URL)
|
||||
continue
|
||||
}
|
||||
|
||||
version := liveRelease.TagName
|
||||
|
||||
if version[0] != 'v' {
|
||||
version = "v" + version
|
||||
}
|
||||
|
||||
appReleases = append(appReleases, AppRelease{
|
||||
Name: repositories[i],
|
||||
Version: version,
|
||||
NotesUrl: liveRelease.HtmlUrl,
|
||||
TimeReleased: parseGithubTime(liveRelease.PublishedAt),
|
||||
Downvotes: liveRelease.Reactions.Downvotes,
|
||||
})
|
||||
}
|
||||
|
||||
if len(appReleases) == 0 {
|
||||
return nil, ErrNoContent
|
||||
}
|
||||
|
||||
appReleases.SortByNewest()
|
||||
|
||||
if failed > 0 {
|
||||
return appReleases, fmt.Errorf("%w: could not get %d releases", ErrPartialContent, failed)
|
||||
}
|
||||
|
||||
return appReleases, nil
|
||||
}
|
||||
89
internal/feed/hacker-news.go
Normal file
89
internal/feed/hacker-news.go
Normal file
@@ -0,0 +1,89 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
type hackerNewsPostResponseJson struct {
|
||||
Id int `json:"id"`
|
||||
Score int `json:"score"`
|
||||
Title string `json:"title"`
|
||||
TargetUrl string `json:"url,omitempty"`
|
||||
CommentCount int `json:"descendants"`
|
||||
TimePosted int64 `json:"time"`
|
||||
}
|
||||
|
||||
func getHackerNewsTopPostIds() ([]int, error) {
|
||||
request, _ := http.NewRequest("GET", "https://hacker-news.firebaseio.com/v0/topstories.json", nil)
|
||||
response, err := decodeJsonFromRequest[[]int](defaultClient, request)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: could not fetch list of post IDs", ErrNoContent)
|
||||
}
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
func getHackerNewsPostsFromIds(postIds []int) (ForumPosts, error) {
|
||||
requests := make([]*http.Request, len(postIds))
|
||||
|
||||
for i, id := range postIds {
|
||||
request, _ := http.NewRequest("GET", fmt.Sprintf("https://hacker-news.firebaseio.com/v0/item/%d.json", id), nil)
|
||||
requests[i] = request
|
||||
}
|
||||
|
||||
task := decodeJsonFromRequestTask[hackerNewsPostResponseJson](defaultClient)
|
||||
job := newJob(task, requests).withWorkers(30)
|
||||
results, errs, err := workerPoolDo(job)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
posts := make(ForumPosts, 0, len(postIds))
|
||||
|
||||
for i := range results {
|
||||
if errs[i] != nil {
|
||||
slog.Error("Failed to fetch or parse hacker news post", "error", errs[i], "url", requests[i].URL)
|
||||
continue
|
||||
}
|
||||
|
||||
posts = append(posts, ForumPost{
|
||||
Title: results[i].Title,
|
||||
DiscussionUrl: "https://news.ycombinator.com/item?id=" + strconv.Itoa(results[i].Id),
|
||||
TargetUrl: results[i].TargetUrl,
|
||||
TargetUrlDomain: extractDomainFromUrl(results[i].TargetUrl),
|
||||
CommentCount: results[i].CommentCount,
|
||||
Score: results[i].Score,
|
||||
TimePosted: time.Unix(results[i].TimePosted, 0),
|
||||
})
|
||||
}
|
||||
|
||||
if len(posts) == 0 {
|
||||
return nil, ErrNoContent
|
||||
}
|
||||
|
||||
if len(posts) != len(postIds) {
|
||||
return posts, fmt.Errorf("%w could not fetch some hacker news posts", ErrPartialContent)
|
||||
}
|
||||
|
||||
return posts, nil
|
||||
}
|
||||
|
||||
func FetchHackerNewsTopPosts(limit int) (ForumPosts, error) {
|
||||
postIds, err := getHackerNewsTopPostIds()
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(postIds) > limit {
|
||||
postIds = postIds[:limit]
|
||||
}
|
||||
|
||||
return getHackerNewsPostsFromIds(postIds)
|
||||
}
|
||||
51
internal/feed/monitor.go
Normal file
51
internal/feed/monitor.go
Normal file
@@ -0,0 +1,51 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
type SiteStatus struct {
|
||||
Code int
|
||||
TimedOut bool
|
||||
ResponseTime time.Duration
|
||||
Error error
|
||||
}
|
||||
|
||||
func getSiteStatusTask(request *http.Request) (SiteStatus, error) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Second*3)
|
||||
defer cancel()
|
||||
request = request.WithContext(ctx)
|
||||
start := time.Now()
|
||||
response, err := http.DefaultClient.Do(request)
|
||||
took := time.Since(start)
|
||||
status := SiteStatus{ResponseTime: took}
|
||||
|
||||
if err != nil {
|
||||
if errors.Is(err, context.DeadlineExceeded) {
|
||||
status.TimedOut = true
|
||||
}
|
||||
|
||||
status.Error = err
|
||||
return status, err
|
||||
}
|
||||
|
||||
defer response.Body.Close()
|
||||
|
||||
status.Code = response.StatusCode
|
||||
|
||||
return status, nil
|
||||
}
|
||||
|
||||
func FetchStatusesForRequests(requests []*http.Request) ([]SiteStatus, error) {
|
||||
job := newJob(getSiteStatusTask, requests).withWorkers(20)
|
||||
results, _, err := workerPoolDo(job)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
151
internal/feed/openmeteo.go
Normal file
151
internal/feed/openmeteo.go
Normal file
@@ -0,0 +1,151 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"slices"
|
||||
"time"
|
||||
|
||||
_ "time/tzdata"
|
||||
)
|
||||
|
||||
type PlacesResponseJson struct {
|
||||
Results []PlaceJson
|
||||
}
|
||||
|
||||
type PlaceJson struct {
|
||||
Name string
|
||||
Latitude float64
|
||||
Longitude float64
|
||||
Timezone string
|
||||
Country string
|
||||
location *time.Location
|
||||
}
|
||||
|
||||
type WeatherResponseJson struct {
|
||||
Daily struct {
|
||||
Sunrise []int64 `json:"sunrise"`
|
||||
Sunset []int64 `json:"sunset"`
|
||||
} `json:"daily"`
|
||||
|
||||
Hourly struct {
|
||||
Temperature []float64 `json:"temperature_2m"`
|
||||
PrecipitationProbability []int `json:"precipitation_probability"`
|
||||
} `json:"hourly"`
|
||||
|
||||
Current struct {
|
||||
Temperature float64 `json:"temperature_2m"`
|
||||
ApparentTemperature float64 `json:"apparent_temperature"`
|
||||
WeatherCode int `json:"weather_code"`
|
||||
} `json:"current"`
|
||||
}
|
||||
|
||||
type weatherColumn struct {
|
||||
Temperature int
|
||||
Scale float64
|
||||
HasPrecipitation bool
|
||||
}
|
||||
|
||||
func FetchPlaceFromName(location string) (*PlaceJson, error) {
|
||||
requestUrl := fmt.Sprintf("https://geocoding-api.open-meteo.com/v1/search?name=%s&count=1&language=en&format=json", url.QueryEscape(location))
|
||||
request, _ := http.NewRequest("GET", requestUrl, nil)
|
||||
responseJson, err := decodeJsonFromRequest[PlacesResponseJson](defaultClient, request)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not fetch places data: %v", err)
|
||||
}
|
||||
|
||||
if len(responseJson.Results) == 0 {
|
||||
return nil, fmt.Errorf("no places found for %s", location)
|
||||
}
|
||||
|
||||
place := &responseJson.Results[0]
|
||||
|
||||
loc, err := time.LoadLocation(place.Timezone)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not load location: %v", err)
|
||||
}
|
||||
|
||||
place.location = loc
|
||||
|
||||
return place, nil
|
||||
}
|
||||
|
||||
func barIndexFromHour(h int) int {
|
||||
return h / 2
|
||||
}
|
||||
|
||||
// TODO: bunch of spaget, refactor
|
||||
// TODO: allow changing between C and F
|
||||
func FetchWeatherForPlace(place *PlaceJson) (*Weather, error) {
|
||||
query := url.Values{}
|
||||
|
||||
query.Add("latitude", fmt.Sprintf("%f", place.Latitude))
|
||||
query.Add("longitude", fmt.Sprintf("%f", place.Longitude))
|
||||
query.Add("timeformat", "unixtime")
|
||||
query.Add("timezone", place.Timezone)
|
||||
query.Add("forecast_days", "1")
|
||||
query.Add("current", "temperature_2m,apparent_temperature,weather_code,wind_speed_10m")
|
||||
query.Add("hourly", "temperature_2m,precipitation_probability")
|
||||
query.Add("daily", "sunrise,sunset")
|
||||
|
||||
requestUrl := "https://api.open-meteo.com/v1/forecast?" + query.Encode()
|
||||
request, _ := http.NewRequest("GET", requestUrl, nil)
|
||||
responseJson, err := decodeJsonFromRequest[WeatherResponseJson](defaultClient, request)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: %v", ErrNoContent, err)
|
||||
}
|
||||
|
||||
now := time.Now().In(place.location)
|
||||
bars := make([]weatherColumn, 0, 24)
|
||||
currentBar := barIndexFromHour(now.Hour())
|
||||
sunriseBar := barIndexFromHour(time.Unix(int64(responseJson.Daily.Sunrise[0]), 0).In(place.location).Hour())
|
||||
sunsetBar := barIndexFromHour(time.Unix(int64(responseJson.Daily.Sunset[0]), 0).In(place.location).Hour()) - 1
|
||||
|
||||
if sunsetBar < 0 {
|
||||
sunsetBar = 0
|
||||
}
|
||||
|
||||
if len(responseJson.Hourly.Temperature) == 24 {
|
||||
temperatures := make([]int, 12)
|
||||
precipitations := make([]bool, 12)
|
||||
|
||||
t := responseJson.Hourly.Temperature
|
||||
p := responseJson.Hourly.PrecipitationProbability
|
||||
|
||||
for i := 0; i < 24; i += 2 {
|
||||
if i/2 == currentBar {
|
||||
temperatures[i/2] = int(responseJson.Current.Temperature)
|
||||
} else {
|
||||
temperatures[i/2] = int(math.Round((t[i] + t[i+1]) / 2))
|
||||
}
|
||||
|
||||
precipitations[i/2] = (p[i]+p[i+1])/2 > 75
|
||||
}
|
||||
|
||||
minT := slices.Min(temperatures)
|
||||
maxT := slices.Max(temperatures)
|
||||
|
||||
for i := 0; i < 12; i++ {
|
||||
bars = append(bars, weatherColumn{
|
||||
Temperature: temperatures[i],
|
||||
Scale: float64(temperatures[i]-minT) / float64(maxT-minT),
|
||||
HasPrecipitation: precipitations[i],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return &Weather{
|
||||
Temperature: int(responseJson.Current.Temperature),
|
||||
ApparentTemperature: int(responseJson.Current.ApparentTemperature),
|
||||
WeatherCode: responseJson.Current.WeatherCode,
|
||||
CurrentColumn: currentBar,
|
||||
SunriseColumn: sunriseBar,
|
||||
SunsetColumn: sunsetBar,
|
||||
Columns: bars,
|
||||
}, nil
|
||||
}
|
||||
183
internal/feed/primitives.go
Normal file
183
internal/feed/primitives.go
Normal file
@@ -0,0 +1,183 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"math"
|
||||
"sort"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ForumPost struct {
|
||||
Title string
|
||||
DiscussionUrl string
|
||||
TargetUrl string
|
||||
TargetUrlDomain string
|
||||
ThumbnailUrl string
|
||||
CommentCount int
|
||||
Score int
|
||||
Engagement float64
|
||||
TimePosted time.Time
|
||||
}
|
||||
|
||||
type ForumPosts []ForumPost
|
||||
|
||||
type Calendar struct {
|
||||
CurrentDay int
|
||||
CurrentWeekNumber int
|
||||
CurrentMonthName string
|
||||
CurrentYear int
|
||||
Days []int
|
||||
}
|
||||
|
||||
type Weather struct {
|
||||
Temperature int
|
||||
ApparentTemperature int
|
||||
WeatherCode int
|
||||
CurrentColumn int
|
||||
SunriseColumn int
|
||||
SunsetColumn int
|
||||
Columns []weatherColumn
|
||||
}
|
||||
|
||||
type AppRelease struct {
|
||||
Name string
|
||||
Version string
|
||||
NotesUrl string
|
||||
TimeReleased time.Time
|
||||
Downvotes int
|
||||
}
|
||||
|
||||
type AppReleases []AppRelease
|
||||
|
||||
type Video struct {
|
||||
ThumbnailUrl string
|
||||
Title string
|
||||
Url string
|
||||
Author string
|
||||
AuthorUrl string
|
||||
TimePosted time.Time
|
||||
}
|
||||
|
||||
type Videos []Video
|
||||
|
||||
type Stock struct {
|
||||
Name string
|
||||
Symbol string
|
||||
Price float64
|
||||
PercentChange float64
|
||||
SvgChartPoints string
|
||||
}
|
||||
|
||||
type Stocks []Stock
|
||||
|
||||
func (t Stocks) SortByAbsChange() {
|
||||
sort.Slice(t, func(i, j int) bool {
|
||||
return math.Abs(t[i].PercentChange) > math.Abs(t[j].PercentChange)
|
||||
})
|
||||
}
|
||||
|
||||
var weatherCodeTable = map[int]string{
|
||||
0: "Clear Sky",
|
||||
1: "Mainly Clear",
|
||||
2: "Partly Cloudy",
|
||||
3: "Overcast",
|
||||
45: "Fog",
|
||||
48: "Rime Fog",
|
||||
51: "Drizzle",
|
||||
53: "Drizzle",
|
||||
55: "Drizzle",
|
||||
56: "Drizzle",
|
||||
57: "Drizzle",
|
||||
61: "Rain",
|
||||
63: "Moderate Rain",
|
||||
65: "Heavy Rain",
|
||||
66: "Freezing Rain",
|
||||
67: "Freezing Rain",
|
||||
71: "Snow",
|
||||
73: "Moderate Snow",
|
||||
75: "Heavy Snow",
|
||||
77: "Snow Grains",
|
||||
80: "Rain",
|
||||
81: "Moderate Rain",
|
||||
82: "Heavy Rain",
|
||||
85: "Snow",
|
||||
86: "Snow",
|
||||
95: "Thunderstorm",
|
||||
96: "Thunderstorm",
|
||||
99: "Thunderstorm",
|
||||
}
|
||||
|
||||
func (w *Weather) WeatherCodeAsString() string {
|
||||
if weatherCode, ok := weatherCodeTable[w.WeatherCode]; ok {
|
||||
return weatherCode
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
const depreciatePostsOlderThanHours = 7
|
||||
const maxDepreciation = 0.9
|
||||
const maxDepreciationAfterHours = 24
|
||||
|
||||
func (p ForumPosts) CalculateEngagement() {
|
||||
var totalComments int
|
||||
var totalScore int
|
||||
|
||||
for i := range p {
|
||||
totalComments += p[i].CommentCount
|
||||
totalScore += p[i].Score
|
||||
}
|
||||
|
||||
numberOfPosts := float64(len(p))
|
||||
averageComments := float64(totalComments) / numberOfPosts
|
||||
averageScore := float64(totalScore) / numberOfPosts
|
||||
|
||||
for i := range p {
|
||||
p[i].Engagement = (float64(p[i].CommentCount)/averageComments + float64(p[i].Score)/averageScore) / 2
|
||||
|
||||
elapsed := time.Since(p[i].TimePosted)
|
||||
|
||||
if elapsed < time.Hour*depreciatePostsOlderThanHours {
|
||||
continue
|
||||
}
|
||||
|
||||
p[i].Engagement *= 1.0 - (math.Max(elapsed.Hours()-depreciatePostsOlderThanHours, maxDepreciationAfterHours)/maxDepreciationAfterHours)*maxDepreciation
|
||||
}
|
||||
}
|
||||
|
||||
func (p ForumPosts) SortByEngagement() {
|
||||
sort.Slice(p, func(i, j int) bool {
|
||||
return p[i].Engagement > p[j].Engagement
|
||||
})
|
||||
}
|
||||
|
||||
func (s *ForumPost) HasTargetUrl() bool {
|
||||
return s.TargetUrl != ""
|
||||
}
|
||||
|
||||
func (p ForumPosts) FilterPostedBefore(postedBefore time.Duration) []ForumPost {
|
||||
recent := make([]ForumPost, 0, len(p))
|
||||
|
||||
for i := range p {
|
||||
if time.Since(p[i].TimePosted) < postedBefore {
|
||||
recent = append(recent, p[i])
|
||||
}
|
||||
}
|
||||
|
||||
return recent
|
||||
}
|
||||
|
||||
func (r AppReleases) SortByNewest() AppReleases {
|
||||
sort.Slice(r, func(i, j int) bool {
|
||||
return r[i].TimeReleased.After(r[j].TimeReleased)
|
||||
})
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
func (v Videos) SortByNewest() Videos {
|
||||
sort.Slice(v, func(i, j int) bool {
|
||||
return v[i].TimePosted.After(v[j].TimePosted)
|
||||
})
|
||||
|
||||
return v
|
||||
}
|
||||
83
internal/feed/reddit.go
Normal file
83
internal/feed/reddit.go
Normal file
@@ -0,0 +1,83 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"time"
|
||||
)
|
||||
|
||||
type subredditResponseJson struct {
|
||||
Data struct {
|
||||
Children []struct {
|
||||
Data struct {
|
||||
Title string `json:"title"`
|
||||
Upvotes int `json:"ups"`
|
||||
Url string `json:"url"`
|
||||
Time float64 `json:"created"`
|
||||
CommentsCount int `json:"num_comments"`
|
||||
Domain string `json:"domain"`
|
||||
Permalink string `json:"permalink"`
|
||||
Stickied bool `json:"stickied"`
|
||||
Pinned bool `json:"pinned"`
|
||||
IsSelf bool `json:"is_self"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
} `json:"data"`
|
||||
} `json:"children"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
func FetchSubredditPosts(subreddit string) (ForumPosts, error) {
|
||||
requestUrl := fmt.Sprintf("https://www.reddit.com/r/%s/hot.json", url.QueryEscape(subreddit))
|
||||
request, err := http.NewRequest("GET", requestUrl, nil)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Required to increase rate limit, otherwise Reddit randomly returns 429 even after just 2 requests
|
||||
addBrowserUserAgentHeader(request)
|
||||
responseJson, err := decodeJsonFromRequest[subredditResponseJson](defaultClient, request)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(responseJson.Data.Children) == 0 {
|
||||
return nil, fmt.Errorf("no posts found")
|
||||
}
|
||||
|
||||
posts := make(ForumPosts, 0, len(responseJson.Data.Children))
|
||||
|
||||
for i := range responseJson.Data.Children {
|
||||
post := &responseJson.Data.Children[i].Data
|
||||
|
||||
if post.Stickied || post.Pinned {
|
||||
continue
|
||||
}
|
||||
|
||||
forumPost := ForumPost{
|
||||
Title: html.UnescapeString(post.Title),
|
||||
DiscussionUrl: "https://www.reddit.com" + post.Permalink,
|
||||
TargetUrlDomain: post.Domain,
|
||||
CommentCount: post.CommentsCount,
|
||||
Score: post.Upvotes,
|
||||
TimePosted: time.Unix(int64(post.Time), 0),
|
||||
}
|
||||
|
||||
if post.Thumbnail != "" && post.Thumbnail != "self" && post.Thumbnail != "default" {
|
||||
forumPost.ThumbnailUrl = post.Thumbnail
|
||||
}
|
||||
|
||||
if !post.IsSelf {
|
||||
forumPost.TargetUrl = post.Url
|
||||
}
|
||||
|
||||
posts = append(posts, forumPost)
|
||||
}
|
||||
|
||||
posts.CalculateEngagement()
|
||||
|
||||
return posts, nil
|
||||
}
|
||||
195
internal/feed/requests.go
Normal file
195
internal/feed/requests.go
Normal file
@@ -0,0 +1,195 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
var defaultClient = &http.Client{
|
||||
Timeout: 5 * time.Second,
|
||||
}
|
||||
|
||||
type RequestDoer interface {
|
||||
Do(*http.Request) (*http.Response, error)
|
||||
}
|
||||
|
||||
func addBrowserUserAgentHeader(request *http.Request) {
|
||||
request.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:123.0) Gecko/20100101 Firefox/123.0")
|
||||
}
|
||||
|
||||
func decodeJsonFromRequest[T any](client RequestDoer, request *http.Request) (T, error) {
|
||||
response, err := client.Do(request)
|
||||
var result T
|
||||
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
defer response.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(response.Body)
|
||||
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
if response.StatusCode != http.StatusOK {
|
||||
return result, fmt.Errorf("unexpected status code %d for %s, response: %s", response.StatusCode, request.URL, string(body))
|
||||
}
|
||||
|
||||
err = json.Unmarshal(body, &result)
|
||||
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func decodeJsonFromRequestTask[T any](client RequestDoer) func(*http.Request) (T, error) {
|
||||
return func(request *http.Request) (T, error) {
|
||||
return decodeJsonFromRequest[T](client, request)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: tidy up, these are a copy of the above but with a line changed
|
||||
func decodeXmlFromRequest[T any](client RequestDoer, request *http.Request) (T, error) {
|
||||
response, err := client.Do(request)
|
||||
var result T
|
||||
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
defer response.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(response.Body)
|
||||
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
if response.StatusCode != http.StatusOK {
|
||||
return result, fmt.Errorf("unexpected status code %d for %s, response: %s", response.StatusCode, request.URL, string(body))
|
||||
}
|
||||
|
||||
err = xml.Unmarshal(body, &result)
|
||||
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func decodeXmlFromRequestTask[T any](client RequestDoer) func(*http.Request) (T, error) {
|
||||
return func(request *http.Request) (T, error) {
|
||||
return decodeXmlFromRequest[T](client, request)
|
||||
}
|
||||
}
|
||||
|
||||
type workerPoolTask[I any, O any] struct {
|
||||
index int
|
||||
input I
|
||||
output O
|
||||
err error
|
||||
}
|
||||
|
||||
type workerPoolJob[I any, O any] struct {
|
||||
data []I
|
||||
workers int
|
||||
task func(I) (O, error)
|
||||
ctx context.Context
|
||||
}
|
||||
|
||||
const defaultNumWorkers = 10
|
||||
|
||||
func (job *workerPoolJob[I, O]) withWorkers(workers int) *workerPoolJob[I, O] {
|
||||
if workers == 0 {
|
||||
job.workers = defaultNumWorkers
|
||||
} else if workers > len(job.data) {
|
||||
job.workers = len(job.data)
|
||||
} else {
|
||||
job.workers = workers
|
||||
}
|
||||
|
||||
return job
|
||||
}
|
||||
|
||||
// func (job *workerPoolJob[I, O]) withContext(ctx context.Context) *workerPoolJob[I, O] {
|
||||
// if ctx != nil {
|
||||
// job.ctx = ctx
|
||||
// }
|
||||
|
||||
// return job
|
||||
// }
|
||||
|
||||
func newJob[I any, O any](task func(I) (O, error), data []I) *workerPoolJob[I, O] {
|
||||
return &workerPoolJob[I, O]{
|
||||
workers: defaultNumWorkers,
|
||||
task: task,
|
||||
data: data,
|
||||
ctx: context.Background(),
|
||||
}
|
||||
}
|
||||
|
||||
func workerPoolDo[I any, O any](job *workerPoolJob[I, O]) ([]O, []error, error) {
|
||||
results := make([]O, len(job.data))
|
||||
errs := make([]error, len(job.data))
|
||||
|
||||
if len(job.data) == 0 {
|
||||
return results, errs, nil
|
||||
}
|
||||
|
||||
tasksQueue := make(chan *workerPoolTask[I, O])
|
||||
resultsQueue := make(chan *workerPoolTask[I, O])
|
||||
|
||||
var wg sync.WaitGroup
|
||||
|
||||
for range job.workers {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
|
||||
for t := range tasksQueue {
|
||||
t.output, t.err = job.task(t.input)
|
||||
resultsQueue <- t
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
var err error
|
||||
|
||||
go func() {
|
||||
loop:
|
||||
for i := range job.data {
|
||||
select {
|
||||
default:
|
||||
tasksQueue <- &workerPoolTask[I, O]{
|
||||
index: i,
|
||||
input: job.data[i],
|
||||
}
|
||||
case <-job.ctx.Done():
|
||||
err = job.ctx.Err()
|
||||
break loop
|
||||
}
|
||||
}
|
||||
|
||||
close(tasksQueue)
|
||||
wg.Wait()
|
||||
close(resultsQueue)
|
||||
}()
|
||||
|
||||
for task := range resultsQueue {
|
||||
errs[task.index] = task.err
|
||||
results[task.index] = task.output
|
||||
}
|
||||
|
||||
return results, errs, err
|
||||
}
|
||||
117
internal/feed/rss.go
Normal file
117
internal/feed/rss.go
Normal file
@@ -0,0 +1,117 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/mmcdole/gofeed"
|
||||
)
|
||||
|
||||
type RSSFeedItem struct {
|
||||
ChannelName string
|
||||
ChannelURL string
|
||||
Title string
|
||||
Link string
|
||||
ImageURL string
|
||||
PublishedAt time.Time
|
||||
}
|
||||
|
||||
type RSSFeedRequest struct {
|
||||
Url string `yaml:"url"`
|
||||
Title string `yaml:"title"`
|
||||
}
|
||||
|
||||
type RSSFeedItems []RSSFeedItem
|
||||
|
||||
func (f RSSFeedItems) SortByNewest() RSSFeedItems {
|
||||
sort.Slice(f, func(i, j int) bool {
|
||||
return f[i].PublishedAt.After(f[j].PublishedAt)
|
||||
})
|
||||
|
||||
return f
|
||||
}
|
||||
|
||||
var feedParser = gofeed.NewParser()
|
||||
|
||||
func getItemsFromRSSFeedTask(request RSSFeedRequest) ([]RSSFeedItem, error) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Second*5)
|
||||
defer cancel()
|
||||
|
||||
feed, err := feedParser.ParseURLWithContext(request.Url, ctx)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
items := make(RSSFeedItems, 0, len(feed.Items))
|
||||
|
||||
for i := range feed.Items {
|
||||
item := feed.Items[i]
|
||||
|
||||
rssItem := RSSFeedItem{
|
||||
ChannelURL: feed.Link,
|
||||
Title: item.Title,
|
||||
Link: item.Link,
|
||||
}
|
||||
|
||||
if request.Title != "" {
|
||||
rssItem.ChannelName = request.Title
|
||||
} else {
|
||||
rssItem.ChannelName = feed.Title
|
||||
}
|
||||
|
||||
if item.Image != nil {
|
||||
rssItem.ImageURL = item.Image.URL
|
||||
} else if feed.Image != nil {
|
||||
rssItem.ImageURL = feed.Image.URL
|
||||
}
|
||||
|
||||
if item.PublishedParsed != nil {
|
||||
rssItem.PublishedAt = *item.PublishedParsed
|
||||
} else {
|
||||
rssItem.PublishedAt = time.Now()
|
||||
}
|
||||
|
||||
items = append(items, rssItem)
|
||||
}
|
||||
|
||||
return items, nil
|
||||
}
|
||||
|
||||
func GetItemsFromRSSFeeds(requests []RSSFeedRequest) (RSSFeedItems, error) {
|
||||
job := newJob(getItemsFromRSSFeedTask, requests).withWorkers(10)
|
||||
feeds, errs, err := workerPoolDo(job)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: %v", ErrNoContent, err)
|
||||
}
|
||||
|
||||
failed := 0
|
||||
|
||||
entries := make(RSSFeedItems, 0, len(feeds)*10)
|
||||
|
||||
for i := range feeds {
|
||||
if errs[i] != nil {
|
||||
failed++
|
||||
slog.Error("failed to get rss feed", "error", errs[i], "url", requests[i].Url)
|
||||
continue
|
||||
}
|
||||
|
||||
entries = append(entries, feeds[i]...)
|
||||
}
|
||||
|
||||
if len(entries) == 0 {
|
||||
return nil, ErrNoContent
|
||||
}
|
||||
|
||||
entries.SortByNewest()
|
||||
|
||||
if failed > 0 {
|
||||
return entries, fmt.Errorf("%w: missing %d RSS feeds", ErrPartialContent, failed)
|
||||
}
|
||||
|
||||
return entries, nil
|
||||
}
|
||||
248
internal/feed/twitch.go
Normal file
248
internal/feed/twitch.go
Normal file
@@ -0,0 +1,248 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"slices"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type TwitchCategory struct {
|
||||
Slug string `json:"slug"`
|
||||
Name string `json:"name"`
|
||||
AvatarUrl string `json:"avatarURL"`
|
||||
ViewersCount int `json:"viewersCount"`
|
||||
Tags []struct {
|
||||
Name string `json:"tagName"`
|
||||
} `json:"tags"`
|
||||
GameReleaseDate string `json:"originalReleaseDate"`
|
||||
IsNew bool `json:"-"`
|
||||
}
|
||||
|
||||
type TwitchChannel struct {
|
||||
Login string
|
||||
Exists bool
|
||||
Name string
|
||||
AvatarUrl string
|
||||
IsLive bool
|
||||
LiveSince time.Time
|
||||
Category string
|
||||
CategorySlug string
|
||||
ViewersCount int
|
||||
}
|
||||
|
||||
type TwitchChannels []TwitchChannel
|
||||
|
||||
func (channels TwitchChannels) SortByViewers() {
|
||||
sort.Slice(channels, func(i, j int) bool {
|
||||
return channels[i].ViewersCount > channels[j].ViewersCount
|
||||
})
|
||||
}
|
||||
|
||||
type twitchOperationResponse struct {
|
||||
Data json.RawMessage
|
||||
Extensions struct {
|
||||
OperationName string `json:"operationName"`
|
||||
}
|
||||
}
|
||||
|
||||
type twitchChannelShellOperationResponse struct {
|
||||
UserOrError struct {
|
||||
Type string `json:"__typename"`
|
||||
DisplayName string `json:"displayName"`
|
||||
ProfileImageUrl string `json:"profileImageURL"`
|
||||
Stream *struct {
|
||||
ViewersCount int `json:"viewersCount"`
|
||||
}
|
||||
} `json:"userOrError"`
|
||||
}
|
||||
|
||||
type twitchStreamMetadataOperationResponse struct {
|
||||
UserOrNull *struct {
|
||||
Stream *struct {
|
||||
StartedAt string `json:"createdAt"`
|
||||
Game *struct {
|
||||
Slug string `json:"slug"`
|
||||
Name string `json:"name"`
|
||||
} `json:"game"`
|
||||
} `json:"stream"`
|
||||
} `json:"user"`
|
||||
}
|
||||
|
||||
type twitchDirectoriesOperationResponse struct {
|
||||
Data struct {
|
||||
DirectoriesWithTags struct {
|
||||
Edges []struct {
|
||||
Node TwitchCategory `json:"node"`
|
||||
} `json:"edges"`
|
||||
} `json:"directoriesWithTags"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
const twitchGqlEndpoint = "https://gql.twitch.tv/gql"
|
||||
const twitchGqlClientId = "kimne78kx3ncx6brgo4mv6wki5h1ko"
|
||||
|
||||
const twitchDirectoriesOperationRequestBody = `[{"operationName": "BrowsePage_AllDirectories","variables": {"limit": %d,"options": {"sort": "VIEWER_COUNT","tags": []}},"extensions": {"persistedQuery": {"version": 1,"sha256Hash": "2f67f71ba89f3c0ed26a141ec00da1defecb2303595f5cda4298169549783d9e"}}}]`
|
||||
|
||||
func FetchTopGamesFromTwitch(exclude []string, limit int) ([]TwitchCategory, error) {
|
||||
reader := strings.NewReader(fmt.Sprintf(twitchDirectoriesOperationRequestBody, len(exclude)+limit))
|
||||
request, _ := http.NewRequest("POST", twitchGqlEndpoint, reader)
|
||||
request.Header.Add("Client-ID", twitchGqlClientId)
|
||||
response, err := decodeJsonFromRequest[[]twitchDirectoriesOperationResponse](defaultClient, request)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(response) == 0 {
|
||||
return nil, errors.New("no categories could be retrieved")
|
||||
}
|
||||
|
||||
edges := (response)[0].Data.DirectoriesWithTags.Edges
|
||||
categories := make([]TwitchCategory, 0, len(edges))
|
||||
|
||||
for i := range edges {
|
||||
if slices.Contains(exclude, edges[i].Node.Slug) {
|
||||
continue
|
||||
}
|
||||
|
||||
category := &edges[i].Node
|
||||
category.AvatarUrl = strings.Replace(category.AvatarUrl, "285x380", "144x192", 1)
|
||||
|
||||
if len(category.Tags) > 2 {
|
||||
category.Tags = category.Tags[:2]
|
||||
}
|
||||
|
||||
gameReleasedDate, err := time.Parse("2006-01-02T15:04:05Z", category.GameReleaseDate)
|
||||
|
||||
if err == nil {
|
||||
if time.Since(gameReleasedDate) < 14*24*time.Hour {
|
||||
category.IsNew = true
|
||||
}
|
||||
}
|
||||
|
||||
categories = append(categories, *category)
|
||||
}
|
||||
|
||||
if len(categories) > limit {
|
||||
categories = categories[:limit]
|
||||
}
|
||||
|
||||
return categories, nil
|
||||
}
|
||||
|
||||
const twitchChannelStatusOperationRequestBody = `[{"operationName":"ChannelShell","variables":{"login":"%s"},"extensions":{"persistedQuery":{"version":1,"sha256Hash":"580ab410bcd0c1ad194224957ae2241e5d252b2c5173d8e0cce9d32d5bb14efe"}}},{"operationName":"StreamMetadata","variables":{"channelLogin":"%s"},"extensions":{"persistedQuery":{"version":1,"sha256Hash":"676ee2f834ede42eb4514cdb432b3134fefc12590080c9a2c9bb44a2a4a63266"}}}]`
|
||||
|
||||
// TODO: rework
|
||||
// The operations for multiple channels can all be sent in a single request
|
||||
// rather than sending a separate request for each channel. Need to figure out
|
||||
// what the limit is for max operations per request and batch operations in
|
||||
// multiple requests if number of channels exceeds allowed limit.
|
||||
|
||||
func fetchChannelFromTwitchTask(channel string) (TwitchChannel, error) {
|
||||
result := TwitchChannel{
|
||||
Login: strings.ToLower(channel),
|
||||
}
|
||||
|
||||
reader := strings.NewReader(fmt.Sprintf(twitchChannelStatusOperationRequestBody, channel, channel))
|
||||
request, _ := http.NewRequest("POST", twitchGqlEndpoint, reader)
|
||||
request.Header.Add("Client-ID", twitchGqlClientId)
|
||||
|
||||
response, err := decodeJsonFromRequest[[]twitchOperationResponse](defaultClient, request)
|
||||
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
if len(response) != 2 {
|
||||
return result, fmt.Errorf("expected 2 operation responses, got %d", len(response))
|
||||
}
|
||||
|
||||
var channelShell twitchChannelShellOperationResponse
|
||||
var streamMetadata twitchStreamMetadataOperationResponse
|
||||
|
||||
for i := range response {
|
||||
switch response[i].Extensions.OperationName {
|
||||
case "ChannelShell":
|
||||
err = json.Unmarshal(response[i].Data, &channelShell)
|
||||
|
||||
if err != nil {
|
||||
return result, fmt.Errorf("failed to unmarshal channel shell: %w", err)
|
||||
}
|
||||
case "StreamMetadata":
|
||||
err = json.Unmarshal(response[i].Data, &streamMetadata)
|
||||
|
||||
if err != nil {
|
||||
return result, fmt.Errorf("failed to unmarshal stream metadata: %w", err)
|
||||
}
|
||||
default:
|
||||
return result, fmt.Errorf("unknown operation name: %s", response[i].Extensions.OperationName)
|
||||
}
|
||||
}
|
||||
|
||||
if channelShell.UserOrError.Type != "User" {
|
||||
result.Name = result.Login
|
||||
return result, nil
|
||||
}
|
||||
|
||||
result.Exists = true
|
||||
result.Name = channelShell.UserOrError.DisplayName
|
||||
result.AvatarUrl = channelShell.UserOrError.ProfileImageUrl
|
||||
|
||||
if channelShell.UserOrError.Stream != nil {
|
||||
result.IsLive = true
|
||||
result.ViewersCount = channelShell.UserOrError.Stream.ViewersCount
|
||||
|
||||
if streamMetadata.UserOrNull != nil && streamMetadata.UserOrNull.Stream != nil && streamMetadata.UserOrNull.Stream.Game != nil {
|
||||
result.Category = streamMetadata.UserOrNull.Stream.Game.Name
|
||||
result.CategorySlug = streamMetadata.UserOrNull.Stream.Game.Slug
|
||||
startedAt, err := time.Parse("2006-01-02T15:04:05Z", streamMetadata.UserOrNull.Stream.StartedAt)
|
||||
|
||||
if err == nil {
|
||||
result.LiveSince = startedAt
|
||||
} else {
|
||||
slog.Warn("failed to parse twitch stream started at", "error", err, "started_at", streamMetadata.UserOrNull.Stream.StartedAt)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func FetchChannelsFromTwitch(channelLogins []string) (TwitchChannels, error) {
|
||||
result := make(TwitchChannels, 0, len(channelLogins))
|
||||
|
||||
job := newJob(fetchChannelFromTwitchTask, channelLogins).withWorkers(10)
|
||||
channels, errs, err := workerPoolDo(job)
|
||||
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
var failed int
|
||||
|
||||
for i := range channels {
|
||||
if errs[i] != nil {
|
||||
failed++
|
||||
slog.Warn("failed to fetch twitch channel", "channel", channelLogins[i], "error", errs[i])
|
||||
continue
|
||||
}
|
||||
|
||||
result = append(result, channels[i])
|
||||
}
|
||||
|
||||
if failed == len(channelLogins) {
|
||||
return result, ErrNoContent
|
||||
}
|
||||
|
||||
if failed > 0 {
|
||||
return result, fmt.Errorf("%w: failed to fetch %d channels", ErrPartialContent, failed)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
79
internal/feed/utils.go
Normal file
79
internal/feed/utils.go
Normal file
@@ -0,0 +1,79 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"slices"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrNoContent = errors.New("failed to retrieve any content")
|
||||
ErrPartialContent = errors.New("failed to retrieve some of the content")
|
||||
)
|
||||
|
||||
func percentChange(current, previous float64) float64 {
|
||||
return (current/previous - 1) * 100
|
||||
}
|
||||
|
||||
func extractDomainFromUrl(u string) string {
|
||||
if u == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
parsed, err := url.Parse(u)
|
||||
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
return strings.TrimPrefix(parsed.Host, "www.")
|
||||
}
|
||||
|
||||
func SvgPolylineCoordsFromYValues(width float64, height float64, values []float64) string {
|
||||
if len(values) < 2 {
|
||||
return ""
|
||||
}
|
||||
|
||||
verticalPadding := height * 0.02
|
||||
height -= verticalPadding * 2
|
||||
coordinates := make([]string, len(values))
|
||||
distanceBetweenPoints := width / float64(len(values)-1)
|
||||
min := slices.Min(values)
|
||||
max := slices.Max(values)
|
||||
|
||||
for i := range values {
|
||||
coordinates[i] = fmt.Sprintf(
|
||||
"%.2f,%.2f",
|
||||
float64(i)*distanceBetweenPoints,
|
||||
((max-values[i])/(max-min))*height+verticalPadding,
|
||||
)
|
||||
}
|
||||
|
||||
return strings.Join(coordinates, " ")
|
||||
}
|
||||
|
||||
func maybeCopySliceWithoutZeroValues[T int | float64](values []T) []T {
|
||||
if len(values) == 0 {
|
||||
return values
|
||||
}
|
||||
|
||||
for i := range values {
|
||||
if values[i] != 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
c := make([]T, 0, len(values)-1)
|
||||
|
||||
for i := range values {
|
||||
if values[i] != 0 {
|
||||
c = append(c, values[i])
|
||||
}
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
return values
|
||||
}
|
||||
102
internal/feed/yahoo.go
Normal file
102
internal/feed/yahoo.go
Normal file
@@ -0,0 +1,102 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type stockResponseJson struct {
|
||||
Chart struct {
|
||||
Result []struct {
|
||||
Meta struct {
|
||||
Symbol string `json:"symbol"`
|
||||
RegularMarketPrice float64 `json:"regularMarketPrice"`
|
||||
ChartPreviousClose float64 `json:"chartPreviousClose"`
|
||||
} `json:"meta"`
|
||||
Indicators struct {
|
||||
Quote []struct {
|
||||
Close []float64 `json:"close,omitempty"`
|
||||
} `json:"quote"`
|
||||
} `json:"indicators"`
|
||||
} `json:"result"`
|
||||
} `json:"chart"`
|
||||
}
|
||||
|
||||
type StockRequest struct {
|
||||
Symbol string
|
||||
Name string
|
||||
}
|
||||
|
||||
// TODO: allow changing chart time frame
|
||||
const stockChartDays = 21
|
||||
|
||||
func FetchStocksDataFromYahoo(stockRequests []StockRequest) (Stocks, error) {
|
||||
requests := make([]*http.Request, 0, len(stockRequests))
|
||||
|
||||
for i := range stockRequests {
|
||||
request, _ := http.NewRequest("GET", fmt.Sprintf("https://query1.finance.yahoo.com/v8/finance/chart/%s?range=1mo&interval=1d", stockRequests[i].Symbol), nil)
|
||||
requests = append(requests, request)
|
||||
}
|
||||
|
||||
job := newJob(decodeJsonFromRequestTask[stockResponseJson](defaultClient), requests)
|
||||
responses, errs, err := workerPoolDo(job)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: %v", ErrNoContent, err)
|
||||
}
|
||||
|
||||
stocks := make(Stocks, 0, len(responses))
|
||||
var failed int
|
||||
|
||||
for i := range responses {
|
||||
if errs[i] != nil {
|
||||
failed++
|
||||
slog.Error("Failed to fetch stock data", "symbol", stockRequests[i].Symbol, "error", errs[i])
|
||||
continue
|
||||
}
|
||||
|
||||
response := responses[i]
|
||||
|
||||
if len(response.Chart.Result) == 0 {
|
||||
failed++
|
||||
slog.Error("Stock response contains no data", "symbol", stockRequests[i].Symbol)
|
||||
continue
|
||||
}
|
||||
|
||||
prices := response.Chart.Result[0].Indicators.Quote[0].Close
|
||||
|
||||
if len(prices) > stockChartDays {
|
||||
prices = prices[len(prices)-stockChartDays:]
|
||||
}
|
||||
|
||||
previous := response.Chart.Result[0].Meta.RegularMarketPrice
|
||||
|
||||
if len(prices) >= 2 && prices[len(prices)-2] != 0 {
|
||||
previous = prices[len(prices)-2]
|
||||
}
|
||||
|
||||
points := SvgPolylineCoordsFromYValues(100, 50, maybeCopySliceWithoutZeroValues(prices))
|
||||
|
||||
stocks = append(stocks, Stock{
|
||||
Name: stockRequests[i].Name,
|
||||
Symbol: response.Chart.Result[0].Meta.Symbol,
|
||||
Price: response.Chart.Result[0].Meta.RegularMarketPrice,
|
||||
PercentChange: percentChange(
|
||||
response.Chart.Result[0].Meta.RegularMarketPrice,
|
||||
previous,
|
||||
),
|
||||
SvgChartPoints: points,
|
||||
})
|
||||
}
|
||||
|
||||
if len(stocks) == 0 {
|
||||
return nil, ErrNoContent
|
||||
}
|
||||
|
||||
if failed > 0 {
|
||||
return stocks, fmt.Errorf("%w: could not fetch data for %d stock(s)", ErrPartialContent, failed)
|
||||
}
|
||||
|
||||
return stocks, nil
|
||||
}
|
||||
100
internal/feed/youtube.go
Normal file
100
internal/feed/youtube.go
Normal file
@@ -0,0 +1,100 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type youtubeFeedResponseXml struct {
|
||||
Channel string `xml:"title"`
|
||||
ChannelLink struct {
|
||||
Href string `xml:"href,attr"`
|
||||
} `xml:"link"`
|
||||
Videos []struct {
|
||||
Title string `xml:"title"`
|
||||
Published string `xml:"published"`
|
||||
Link struct {
|
||||
Href string `xml:"href,attr"`
|
||||
} `xml:"link"`
|
||||
|
||||
Group struct {
|
||||
Thumbnail struct {
|
||||
Url string `xml:"url,attr"`
|
||||
} `xml:"http://search.yahoo.com/mrss/ thumbnail"`
|
||||
} `xml:"http://search.yahoo.com/mrss/ group"`
|
||||
} `xml:"entry"`
|
||||
}
|
||||
|
||||
func parseYoutubeFeedTime(t string) time.Time {
|
||||
parsedTime, err := time.Parse("2006-01-02T15:04:05-07:00", t)
|
||||
|
||||
if err != nil {
|
||||
return time.Now()
|
||||
}
|
||||
|
||||
return parsedTime
|
||||
}
|
||||
|
||||
func FetchYoutubeChannelUploads(channelIds []string) (Videos, error) {
|
||||
requests := make([]*http.Request, 0, len(channelIds))
|
||||
|
||||
for i := range channelIds {
|
||||
request, _ := http.NewRequest("GET", "https://www.youtube.com/feeds/videos.xml?channel_id="+channelIds[i], nil)
|
||||
requests = append(requests, request)
|
||||
}
|
||||
|
||||
job := newJob(decodeXmlFromRequestTask[youtubeFeedResponseXml](defaultClient), requests).withWorkers(30)
|
||||
|
||||
responses, errs, err := workerPoolDo(job)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: %v", ErrNoContent, err)
|
||||
}
|
||||
|
||||
videos := make(Videos, 0, len(channelIds)*15)
|
||||
|
||||
var failed int
|
||||
|
||||
for i := range responses {
|
||||
if errs[i] != nil {
|
||||
failed++
|
||||
slog.Error("Failed to fetch youtube feed", "channel", channelIds[i], "error", errs[i])
|
||||
continue
|
||||
}
|
||||
|
||||
response := responses[i]
|
||||
|
||||
for j := range response.Videos {
|
||||
video := &response.Videos[j]
|
||||
|
||||
// TODO: figure out a better way of skipping shorts
|
||||
if strings.Contains(video.Title, "#shorts") {
|
||||
continue
|
||||
}
|
||||
|
||||
videos = append(videos, Video{
|
||||
ThumbnailUrl: video.Group.Thumbnail.Url,
|
||||
Title: video.Title,
|
||||
Url: video.Link.Href,
|
||||
Author: response.Channel,
|
||||
AuthorUrl: response.ChannelLink.Href + "/videos",
|
||||
TimePosted: parseYoutubeFeedTime(video.Published),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if len(videos) == 0 {
|
||||
return nil, ErrNoContent
|
||||
}
|
||||
|
||||
videos.SortByNewest()
|
||||
|
||||
if failed > 0 {
|
||||
return videos, fmt.Errorf("%w: missing videos from %d channels", ErrPartialContent, failed)
|
||||
}
|
||||
|
||||
return videos, nil
|
||||
}
|
||||
Reference in New Issue
Block a user