mirror of
https://github.com/Xevion/glance.git
synced 2025-12-10 06:07:20 -06:00
Merge branch 'release/v0.6.0' into github-commit
This commit is contained in:
139
internal/feed/changedetection.go
Normal file
139
internal/feed/changedetection.go
Normal file
@@ -0,0 +1,139 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ChangeDetectionWatch struct {
|
||||
Title string
|
||||
URL string
|
||||
LastChanged time.Time
|
||||
DiffURL string
|
||||
PreviousHash string
|
||||
}
|
||||
|
||||
type ChangeDetectionWatches []ChangeDetectionWatch
|
||||
|
||||
func (r ChangeDetectionWatches) SortByNewest() ChangeDetectionWatches {
|
||||
sort.Slice(r, func(i, j int) bool {
|
||||
return r[i].LastChanged.After(r[j].LastChanged)
|
||||
})
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
type changeDetectionResponseJson struct {
|
||||
Title string `json:"title"`
|
||||
URL string `json:"url"`
|
||||
LastChanged int64 `json:"last_changed"`
|
||||
DateCreated int64 `json:"date_created"`
|
||||
PreviousHash string `json:"previous_md5"`
|
||||
}
|
||||
|
||||
func FetchWatchUUIDsFromChangeDetection(instanceURL string, token string) ([]string, error) {
|
||||
request, _ := http.NewRequest("GET", fmt.Sprintf("%s/api/v1/watch", instanceURL), nil)
|
||||
|
||||
if token != "" {
|
||||
request.Header.Add("x-api-key", token)
|
||||
}
|
||||
|
||||
uuidsMap, err := decodeJsonFromRequest[map[string]struct{}](defaultClient, request)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not fetch list of watch UUIDs: %v", err)
|
||||
}
|
||||
|
||||
uuids := make([]string, 0, len(uuidsMap))
|
||||
|
||||
for uuid := range uuidsMap {
|
||||
uuids = append(uuids, uuid)
|
||||
}
|
||||
|
||||
return uuids, nil
|
||||
}
|
||||
|
||||
func FetchWatchesFromChangeDetection(instanceURL string, requestedWatchIDs []string, token string) (ChangeDetectionWatches, error) {
|
||||
watches := make(ChangeDetectionWatches, 0, len(requestedWatchIDs))
|
||||
|
||||
if len(requestedWatchIDs) == 0 {
|
||||
return watches, nil
|
||||
}
|
||||
|
||||
requests := make([]*http.Request, len(requestedWatchIDs))
|
||||
|
||||
for i, repository := range requestedWatchIDs {
|
||||
request, _ := http.NewRequest("GET", fmt.Sprintf("%s/api/v1/watch/%s", instanceURL, repository), nil)
|
||||
|
||||
if token != "" {
|
||||
request.Header.Add("x-api-key", token)
|
||||
}
|
||||
|
||||
requests[i] = request
|
||||
}
|
||||
|
||||
task := decodeJsonFromRequestTask[changeDetectionResponseJson](defaultClient)
|
||||
job := newJob(task, requests).withWorkers(15)
|
||||
responses, errs, err := workerPoolDo(job)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var failed int
|
||||
|
||||
for i := range responses {
|
||||
if errs[i] != nil {
|
||||
failed++
|
||||
slog.Error("Failed to fetch or parse change detection watch", "error", errs[i], "url", requests[i].URL)
|
||||
continue
|
||||
}
|
||||
|
||||
watchJson := responses[i]
|
||||
|
||||
watch := ChangeDetectionWatch{
|
||||
URL: watchJson.URL,
|
||||
DiffURL: fmt.Sprintf("%s/diff/%s?from_version=%d", instanceURL, requestedWatchIDs[i], watchJson.LastChanged-1),
|
||||
}
|
||||
|
||||
if watchJson.LastChanged == 0 {
|
||||
watch.LastChanged = time.Unix(watchJson.DateCreated, 0)
|
||||
} else {
|
||||
watch.LastChanged = time.Unix(watchJson.LastChanged, 0)
|
||||
}
|
||||
|
||||
if watchJson.Title != "" {
|
||||
watch.Title = watchJson.Title
|
||||
} else {
|
||||
watch.Title = strings.TrimPrefix(strings.Trim(stripURLScheme(watchJson.URL), "/"), "www.")
|
||||
}
|
||||
|
||||
if watchJson.PreviousHash != "" {
|
||||
var hashLength = 8
|
||||
|
||||
if len(watchJson.PreviousHash) < hashLength {
|
||||
hashLength = len(watchJson.PreviousHash)
|
||||
}
|
||||
|
||||
watch.PreviousHash = watchJson.PreviousHash[0:hashLength]
|
||||
}
|
||||
|
||||
watches = append(watches, watch)
|
||||
}
|
||||
|
||||
if len(watches) == 0 {
|
||||
return nil, ErrNoContent
|
||||
}
|
||||
|
||||
watches.SortByNewest()
|
||||
|
||||
if failed > 0 {
|
||||
return watches, fmt.Errorf("%w: could not get %d watches", ErrPartialContent, failed)
|
||||
}
|
||||
|
||||
return watches, nil
|
||||
}
|
||||
102
internal/feed/dockerhub.go
Normal file
102
internal/feed/dockerhub.go
Normal file
@@ -0,0 +1,102 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type dockerHubRepositoryTagsResponse struct {
|
||||
Results []dockerHubRepositoryTagResponse `json:"results"`
|
||||
}
|
||||
|
||||
type dockerHubRepositoryTagResponse struct {
|
||||
Name string `json:"name"`
|
||||
LastPushed string `json:"tag_last_pushed"`
|
||||
}
|
||||
|
||||
const dockerHubOfficialRepoTagURLFormat = "https://hub.docker.com/_/%s/tags?name=%s"
|
||||
const dockerHubRepoTagURLFormat = "https://hub.docker.com/r/%s/tags?name=%s"
|
||||
const dockerHubTagsURLFormat = "https://hub.docker.com/v2/namespaces/%s/repositories/%s/tags"
|
||||
const dockerHubSpecificTagURLFormat = "https://hub.docker.com/v2/namespaces/%s/repositories/%s/tags/%s"
|
||||
|
||||
func fetchLatestDockerHubRelease(request *ReleaseRequest) (*AppRelease, error) {
|
||||
|
||||
nameParts := strings.Split(request.Repository, "/")
|
||||
|
||||
if len(nameParts) > 2 {
|
||||
return nil, fmt.Errorf("invalid repository name: %s", request.Repository)
|
||||
} else if len(nameParts) == 1 {
|
||||
nameParts = []string{"library", nameParts[0]}
|
||||
}
|
||||
|
||||
tagParts := strings.SplitN(nameParts[1], ":", 2)
|
||||
|
||||
var requestURL string
|
||||
|
||||
if len(tagParts) == 2 {
|
||||
requestURL = fmt.Sprintf(dockerHubSpecificTagURLFormat, nameParts[0], tagParts[0], tagParts[1])
|
||||
} else {
|
||||
requestURL = fmt.Sprintf(dockerHubTagsURLFormat, nameParts[0], nameParts[1])
|
||||
}
|
||||
|
||||
httpRequest, err := http.NewRequest("GET", requestURL, nil)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if request.Token != nil {
|
||||
httpRequest.Header.Add("Authorization", "Bearer "+(*request.Token))
|
||||
}
|
||||
|
||||
var tag *dockerHubRepositoryTagResponse
|
||||
|
||||
if len(tagParts) == 1 {
|
||||
response, err := decodeJsonFromRequest[dockerHubRepositoryTagsResponse](defaultClient, httpRequest)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(response.Results) == 0 {
|
||||
return nil, fmt.Errorf("no tags found for repository: %s", request.Repository)
|
||||
}
|
||||
|
||||
tag = &response.Results[0]
|
||||
} else {
|
||||
response, err := decodeJsonFromRequest[dockerHubRepositoryTagResponse](defaultClient, httpRequest)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tag = &response
|
||||
}
|
||||
|
||||
var repo string
|
||||
var displayName string
|
||||
var notesURL string
|
||||
|
||||
if len(tagParts) == 1 {
|
||||
repo = nameParts[1]
|
||||
} else {
|
||||
repo = tagParts[0]
|
||||
}
|
||||
|
||||
if nameParts[0] == "library" {
|
||||
displayName = repo
|
||||
notesURL = fmt.Sprintf(dockerHubOfficialRepoTagURLFormat, repo, tag.Name)
|
||||
} else {
|
||||
displayName = nameParts[0] + "/" + repo
|
||||
notesURL = fmt.Sprintf(dockerHubRepoTagURLFormat, displayName, tag.Name)
|
||||
}
|
||||
|
||||
return &AppRelease{
|
||||
Source: ReleaseSourceDockerHub,
|
||||
NotesUrl: notesURL,
|
||||
Name: displayName,
|
||||
Version: tag.Name,
|
||||
TimeReleased: parseRFC3339Time(tag.LastPushed),
|
||||
}, nil
|
||||
}
|
||||
97
internal/feed/extension.go
Normal file
97
internal/feed/extension.go
Normal file
@@ -0,0 +1,97 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"html"
|
||||
"html/template"
|
||||
"io"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
type ExtensionType int
|
||||
|
||||
const (
|
||||
ExtensionContentHTML ExtensionType = iota
|
||||
ExtensionContentUnknown = iota
|
||||
)
|
||||
|
||||
var ExtensionStringToType = map[string]ExtensionType{
|
||||
"html": ExtensionContentHTML,
|
||||
}
|
||||
|
||||
const (
|
||||
ExtensionHeaderTitle = "Widget-Title"
|
||||
ExtensionHeaderContentType = "Widget-Content-Type"
|
||||
)
|
||||
|
||||
type ExtensionRequestOptions struct {
|
||||
URL string `yaml:"url"`
|
||||
Parameters map[string]string `yaml:"parameters"`
|
||||
AllowHtml bool `yaml:"allow-potentially-dangerous-html"`
|
||||
}
|
||||
|
||||
type Extension struct {
|
||||
Title string
|
||||
Content template.HTML
|
||||
}
|
||||
|
||||
func convertExtensionContent(options ExtensionRequestOptions, content []byte, contentType ExtensionType) template.HTML {
|
||||
switch contentType {
|
||||
case ExtensionContentHTML:
|
||||
if options.AllowHtml {
|
||||
return template.HTML(content)
|
||||
}
|
||||
|
||||
fallthrough
|
||||
default:
|
||||
return template.HTML(html.EscapeString(string(content)))
|
||||
}
|
||||
}
|
||||
|
||||
func FetchExtension(options ExtensionRequestOptions) (Extension, error) {
|
||||
request, _ := http.NewRequest("GET", options.URL, nil)
|
||||
|
||||
query := url.Values{}
|
||||
|
||||
for key, value := range options.Parameters {
|
||||
query.Set(key, value)
|
||||
}
|
||||
|
||||
request.URL.RawQuery = query.Encode()
|
||||
|
||||
response, err := http.DefaultClient.Do(request)
|
||||
|
||||
if err != nil {
|
||||
slog.Error("failed fetching extension", "error", err, "url", options.URL)
|
||||
return Extension{}, fmt.Errorf("%w: request failed: %w", ErrNoContent, err)
|
||||
}
|
||||
|
||||
defer response.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(response.Body)
|
||||
|
||||
if err != nil {
|
||||
slog.Error("failed reading response body of extension", "error", err, "url", options.URL)
|
||||
return Extension{}, fmt.Errorf("%w: could not read body: %w", ErrNoContent, err)
|
||||
}
|
||||
|
||||
extension := Extension{}
|
||||
|
||||
if response.Header.Get(ExtensionHeaderTitle) == "" {
|
||||
extension.Title = "Extension"
|
||||
} else {
|
||||
extension.Title = response.Header.Get(ExtensionHeaderTitle)
|
||||
}
|
||||
|
||||
contentType, ok := ExtensionStringToType[response.Header.Get(ExtensionHeaderContentType)]
|
||||
|
||||
if !ok {
|
||||
contentType = ExtensionContentUnknown
|
||||
}
|
||||
|
||||
extension.Content = convertExtensionContent(options, body, contentType)
|
||||
|
||||
return extension, nil
|
||||
}
|
||||
@@ -2,118 +2,57 @@ package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type githubReleaseResponseJson struct {
|
||||
type githubReleaseLatestResponseJson struct {
|
||||
TagName string `json:"tag_name"`
|
||||
PublishedAt string `json:"published_at"`
|
||||
HtmlUrl string `json:"html_url"`
|
||||
Draft bool `json:"draft"`
|
||||
PreRelease bool `json:"prerelease"`
|
||||
Reactions struct {
|
||||
Downvotes int `json:"-1"`
|
||||
} `json:"reactions"`
|
||||
}
|
||||
|
||||
func parseGithubTime(t string) time.Time {
|
||||
parsedTime, err := time.Parse("2006-01-02T15:04:05Z", t)
|
||||
if err != nil {
|
||||
return time.Now()
|
||||
}
|
||||
|
||||
return parsedTime
|
||||
}
|
||||
func fetchLatestGithubRelease(request *ReleaseRequest) (*AppRelease, error) {
|
||||
httpRequest, err := http.NewRequest(
|
||||
"GET",
|
||||
fmt.Sprintf("https://api.github.com/repos/%s/releases/latest", request.Repository),
|
||||
nil,
|
||||
)
|
||||
|
||||
func FetchLatestReleasesFromGithub(repositories []string, token string) (AppReleases, error) {
|
||||
appReleases := make(AppReleases, 0, len(repositories))
|
||||
|
||||
if len(repositories) == 0 {
|
||||
return appReleases, nil
|
||||
}
|
||||
|
||||
requests := make([]*http.Request, len(repositories))
|
||||
|
||||
for i, repository := range repositories {
|
||||
request, _ := http.NewRequest("GET", fmt.Sprintf("https://api.github.com/repos/%s/releases?per_page=10", repository), nil)
|
||||
|
||||
if token != "" {
|
||||
request.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token))
|
||||
}
|
||||
|
||||
requests[i] = request
|
||||
}
|
||||
|
||||
task := decodeJsonFromRequestTask[[]githubReleaseResponseJson](defaultClient)
|
||||
job := newJob(task, requests).withWorkers(15)
|
||||
responses, errs, err := workerPoolDo(job)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var failed int
|
||||
|
||||
for i := range responses {
|
||||
if errs[i] != nil {
|
||||
failed++
|
||||
slog.Error("Failed to fetch or parse github release", "error", errs[i], "url", requests[i].URL)
|
||||
continue
|
||||
}
|
||||
|
||||
releases := responses[i]
|
||||
|
||||
if len(releases) < 1 {
|
||||
failed++
|
||||
slog.Error("No releases found", "repository", repositories[i], "url", requests[i].URL)
|
||||
continue
|
||||
}
|
||||
|
||||
var liveRelease *githubReleaseResponseJson
|
||||
|
||||
for i := range releases {
|
||||
release := &releases[i]
|
||||
|
||||
if !release.Draft && !release.PreRelease {
|
||||
liveRelease = release
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if liveRelease == nil {
|
||||
slog.Error("No live release found", "repository", repositories[i], "url", requests[i].URL)
|
||||
continue
|
||||
}
|
||||
|
||||
version := liveRelease.TagName
|
||||
|
||||
if version[0] != 'v' {
|
||||
version = "v" + version
|
||||
}
|
||||
|
||||
appReleases = append(appReleases, AppRelease{
|
||||
Name: repositories[i],
|
||||
Version: version,
|
||||
NotesUrl: liveRelease.HtmlUrl,
|
||||
TimeReleased: parseGithubTime(liveRelease.PublishedAt),
|
||||
Downvotes: liveRelease.Reactions.Downvotes,
|
||||
})
|
||||
if request.Token != nil {
|
||||
httpRequest.Header.Add("Authorization", "Bearer "+(*request.Token))
|
||||
}
|
||||
|
||||
if len(appReleases) == 0 {
|
||||
return nil, ErrNoContent
|
||||
response, err := decodeJsonFromRequest[githubReleaseLatestResponseJson](defaultClient, httpRequest)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
appReleases.SortByNewest()
|
||||
version := response.TagName
|
||||
|
||||
if failed > 0 {
|
||||
return appReleases, fmt.Errorf("%w: could not get %d releases", ErrPartialContent, failed)
|
||||
if len(version) > 0 && version[0] != 'v' {
|
||||
version = "v" + version
|
||||
}
|
||||
|
||||
return appReleases, nil
|
||||
return &AppRelease{
|
||||
Source: ReleaseSourceGithub,
|
||||
Name: request.Repository,
|
||||
Version: version,
|
||||
NotesUrl: response.HtmlUrl,
|
||||
TimeReleased: parseRFC3339Time(response.PublishedAt),
|
||||
Downvotes: response.Reactions.Downvotes,
|
||||
}, nil
|
||||
}
|
||||
|
||||
type GithubTicket struct {
|
||||
@@ -256,7 +195,7 @@ func FetchRepositoryDetailsFromGithub(repository string, token string, maxPRs in
|
||||
for i := range PRsResponse.Tickets {
|
||||
details.PullRequests = append(details.PullRequests, GithubTicket{
|
||||
Number: PRsResponse.Tickets[i].Number,
|
||||
CreatedAt: parseGithubTime(PRsResponse.Tickets[i].CreatedAt),
|
||||
CreatedAt: parseRFC3339Time(PRsResponse.Tickets[i].CreatedAt),
|
||||
Title: PRsResponse.Tickets[i].Title,
|
||||
})
|
||||
}
|
||||
@@ -273,7 +212,7 @@ func FetchRepositoryDetailsFromGithub(repository string, token string, maxPRs in
|
||||
for i := range issuesResponse.Tickets {
|
||||
details.Issues = append(details.Issues, GithubTicket{
|
||||
Number: issuesResponse.Tickets[i].Number,
|
||||
CreatedAt: parseGithubTime(issuesResponse.Tickets[i].CreatedAt),
|
||||
CreatedAt: parseRFC3339Time(issuesResponse.Tickets[i].CreatedAt),
|
||||
Title: issuesResponse.Tickets[i].Title,
|
||||
})
|
||||
}
|
||||
|
||||
54
internal/feed/gitlab.go
Normal file
54
internal/feed/gitlab.go
Normal file
@@ -0,0 +1,54 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
type gitlabReleaseResponseJson struct {
|
||||
TagName string `json:"tag_name"`
|
||||
ReleasedAt string `json:"released_at"`
|
||||
Links struct {
|
||||
Self string `json:"self"`
|
||||
} `json:"_links"`
|
||||
}
|
||||
|
||||
func fetchLatestGitLabRelease(request *ReleaseRequest) (*AppRelease, error) {
|
||||
httpRequest, err := http.NewRequest(
|
||||
"GET",
|
||||
fmt.Sprintf(
|
||||
"https://gitlab.com/api/v4/projects/%s/releases/permalink/latest",
|
||||
url.QueryEscape(request.Repository),
|
||||
),
|
||||
nil,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if request.Token != nil {
|
||||
httpRequest.Header.Add("PRIVATE-TOKEN", *request.Token)
|
||||
}
|
||||
|
||||
response, err := decodeJsonFromRequest[gitlabReleaseResponseJson](defaultClient, httpRequest)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
version := response.TagName
|
||||
|
||||
if len(version) > 0 && version[0] != 'v' {
|
||||
version = "v" + version
|
||||
}
|
||||
|
||||
return &AppRelease{
|
||||
Source: ReleaseSourceGitlab,
|
||||
Name: request.Repository,
|
||||
Version: version,
|
||||
NotesUrl: response.Links.Self,
|
||||
TimeReleased: parseRFC3339Time(response.ReleasedAt),
|
||||
}, nil
|
||||
}
|
||||
91
internal/feed/lobsters.go
Normal file
91
internal/feed/lobsters.go
Normal file
@@ -0,0 +1,91 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type lobstersPostResponseJson struct {
|
||||
CreatedAt string `json:"created_at"`
|
||||
Title string `json:"title"`
|
||||
URL string `json:"url"`
|
||||
Score int `json:"score"`
|
||||
CommentCount int `json:"comment_count"`
|
||||
CommentsURL string `json:"comments_url"`
|
||||
Tags []string `json:"tags"`
|
||||
}
|
||||
|
||||
type lobstersFeedResponseJson []lobstersPostResponseJson
|
||||
|
||||
func getLobstersPostsFromFeed(feedUrl string) (ForumPosts, error) {
|
||||
request, err := http.NewRequest("GET", feedUrl, nil)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
feed, err := decodeJsonFromRequest[lobstersFeedResponseJson](defaultClient, request)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
posts := make(ForumPosts, 0, len(feed))
|
||||
|
||||
for i := range feed {
|
||||
createdAt, _ := time.Parse(time.RFC3339, feed[i].CreatedAt)
|
||||
|
||||
posts = append(posts, ForumPost{
|
||||
Title: feed[i].Title,
|
||||
DiscussionUrl: feed[i].CommentsURL,
|
||||
TargetUrl: feed[i].URL,
|
||||
TargetUrlDomain: extractDomainFromUrl(feed[i].URL),
|
||||
CommentCount: feed[i].CommentCount,
|
||||
Score: feed[i].Score,
|
||||
TimePosted: createdAt,
|
||||
Tags: feed[i].Tags,
|
||||
})
|
||||
}
|
||||
|
||||
if len(posts) == 0 {
|
||||
return nil, ErrNoContent
|
||||
}
|
||||
|
||||
return posts, nil
|
||||
}
|
||||
|
||||
func FetchLobstersPosts(customURL string, instanceURL string, sortBy string, tags []string) (ForumPosts, error) {
|
||||
var feedUrl string
|
||||
|
||||
if customURL != "" {
|
||||
feedUrl = customURL
|
||||
} else {
|
||||
if instanceURL != "" {
|
||||
instanceURL = strings.TrimRight(instanceURL, "/") + "/"
|
||||
} else {
|
||||
instanceURL = "https://lobste.rs/"
|
||||
}
|
||||
|
||||
if sortBy == "hot" {
|
||||
sortBy = "hottest"
|
||||
} else if sortBy == "new" {
|
||||
sortBy = "newest"
|
||||
}
|
||||
|
||||
if len(tags) == 0 {
|
||||
feedUrl = instanceURL + sortBy + ".json"
|
||||
} else {
|
||||
tags := strings.Join(tags, ",")
|
||||
feedUrl = instanceURL + "t/" + tags + ".json"
|
||||
}
|
||||
}
|
||||
|
||||
posts, err := getLobstersPostsFromFeed(feedUrl)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return posts, nil
|
||||
}
|
||||
@@ -7,6 +7,12 @@ import (
|
||||
"time"
|
||||
)
|
||||
|
||||
type SiteStatusRequest struct {
|
||||
URL string `yaml:"url"`
|
||||
CheckURL string `yaml:"check-url"`
|
||||
AllowInsecure bool `yaml:"allow-insecure"`
|
||||
}
|
||||
|
||||
type SiteStatus struct {
|
||||
Code int
|
||||
TimedOut bool
|
||||
@@ -14,14 +20,34 @@ type SiteStatus struct {
|
||||
Error error
|
||||
}
|
||||
|
||||
func getSiteStatusTask(request *http.Request) (SiteStatus, error) {
|
||||
func getSiteStatusTask(statusRequest *SiteStatusRequest) (SiteStatus, error) {
|
||||
var url string
|
||||
if statusRequest.CheckURL != "" {
|
||||
url = statusRequest.CheckURL
|
||||
} else {
|
||||
url = statusRequest.URL
|
||||
}
|
||||
request, err := http.NewRequest(http.MethodGet, url, nil)
|
||||
|
||||
if err != nil {
|
||||
return SiteStatus{
|
||||
Error: err,
|
||||
}, nil
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Second*3)
|
||||
defer cancel()
|
||||
request = request.WithContext(ctx)
|
||||
start := time.Now()
|
||||
response, err := http.DefaultClient.Do(request)
|
||||
took := time.Since(start)
|
||||
status := SiteStatus{ResponseTime: took}
|
||||
requestSentAt := time.Now()
|
||||
var response *http.Response
|
||||
|
||||
if !statusRequest.AllowInsecure {
|
||||
response, err = defaultClient.Do(request)
|
||||
} else {
|
||||
response, err = defaultInsecureClient.Do(request)
|
||||
}
|
||||
|
||||
status := SiteStatus{ResponseTime: time.Since(requestSentAt)}
|
||||
|
||||
if err != nil {
|
||||
if errors.Is(err, context.DeadlineExceeded) {
|
||||
@@ -29,7 +55,7 @@ func getSiteStatusTask(request *http.Request) (SiteStatus, error) {
|
||||
}
|
||||
|
||||
status.Error = err
|
||||
return status, err
|
||||
return status, nil
|
||||
}
|
||||
|
||||
defer response.Body.Close()
|
||||
@@ -39,7 +65,7 @@ func getSiteStatusTask(request *http.Request) (SiteStatus, error) {
|
||||
return status, nil
|
||||
}
|
||||
|
||||
func FetchStatusesForRequests(requests []*http.Request) ([]SiteStatus, error) {
|
||||
func FetchStatusForSites(requests []*SiteStatusRequest) ([]SiteStatus, error) {
|
||||
job := newJob(getSiteStatusTask, requests).withWorkers(20)
|
||||
results, _, err := workerPoolDo(job)
|
||||
|
||||
|
||||
@@ -16,6 +16,8 @@ type ForumPost struct {
|
||||
Score int
|
||||
Engagement float64
|
||||
TimePosted time.Time
|
||||
Tags []string
|
||||
IsCrosspost bool
|
||||
}
|
||||
|
||||
type ForumPosts []ForumPost
|
||||
@@ -39,11 +41,13 @@ type Weather struct {
|
||||
}
|
||||
|
||||
type AppRelease struct {
|
||||
Name string
|
||||
Version string
|
||||
NotesUrl string
|
||||
TimeReleased time.Time
|
||||
Downvotes int
|
||||
Source ReleaseSource
|
||||
SourceIconURL string
|
||||
Name string
|
||||
Version string
|
||||
NotesUrl string
|
||||
TimeReleased time.Time
|
||||
Downvotes int
|
||||
}
|
||||
|
||||
type AppReleases []AppRelease
|
||||
@@ -84,20 +88,24 @@ var currencyToSymbol = map[string]string{
|
||||
"PHP": "₱",
|
||||
}
|
||||
|
||||
type Stock struct {
|
||||
Name string `yaml:"name"`
|
||||
Symbol string `yaml:"symbol"`
|
||||
ChartLink string `yaml:"chart-link"`
|
||||
SymbolLink string `yaml:"symbol-link"`
|
||||
type MarketRequest struct {
|
||||
Name string `yaml:"name"`
|
||||
Symbol string `yaml:"symbol"`
|
||||
ChartLink string `yaml:"chart-link"`
|
||||
SymbolLink string `yaml:"symbol-link"`
|
||||
}
|
||||
|
||||
type Market struct {
|
||||
MarketRequest
|
||||
Currency string `yaml:"-"`
|
||||
Price float64 `yaml:"-"`
|
||||
PercentChange float64 `yaml:"-"`
|
||||
SvgChartPoints string `yaml:"-"`
|
||||
}
|
||||
|
||||
type Stocks []Stock
|
||||
type Markets []Market
|
||||
|
||||
func (t Stocks) SortByAbsChange() {
|
||||
func (t Markets) SortByAbsChange() {
|
||||
sort.Slice(t, func(i, j int) bool {
|
||||
return math.Abs(t[i].PercentChange) > math.Abs(t[j].PercentChange)
|
||||
})
|
||||
|
||||
@@ -25,12 +25,26 @@ type subredditResponseJson struct {
|
||||
Pinned bool `json:"pinned"`
|
||||
IsSelf bool `json:"is_self"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
Flair string `json:"link_flair_text"`
|
||||
ParentList []struct {
|
||||
Id string `json:"id"`
|
||||
Subreddit string `json:"subreddit"`
|
||||
Permalink string `json:"permalink"`
|
||||
} `json:"crosspost_parent_list"`
|
||||
} `json:"data"`
|
||||
} `json:"children"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
func FetchSubredditPosts(subreddit, sort, topPeriod, search, commentsUrlTemplate, requestUrlTemplate string) (ForumPosts, error) {
|
||||
func templateRedditCommentsURL(template, subreddit, postId, postPath string) string {
|
||||
template = strings.ReplaceAll(template, "{SUBREDDIT}", subreddit)
|
||||
template = strings.ReplaceAll(template, "{POST-ID}", postId)
|
||||
template = strings.ReplaceAll(template, "{POST-PATH}", strings.TrimLeft(postPath, "/"))
|
||||
|
||||
return template
|
||||
}
|
||||
|
||||
func FetchSubredditPosts(subreddit, sort, topPeriod, search, commentsUrlTemplate, requestUrlTemplate string, showFlairs bool) (ForumPosts, error) {
|
||||
query := url.Values{}
|
||||
var requestUrl string
|
||||
|
||||
@@ -85,9 +99,7 @@ func FetchSubredditPosts(subreddit, sort, topPeriod, search, commentsUrlTemplate
|
||||
if commentsUrlTemplate == "" {
|
||||
commentsUrl = "https://www.reddit.com" + post.Permalink
|
||||
} else {
|
||||
commentsUrl = strings.ReplaceAll(commentsUrlTemplate, "{SUBREDDIT}", subreddit)
|
||||
commentsUrl = strings.ReplaceAll(commentsUrl, "{POST-ID}", post.Id)
|
||||
commentsUrl = strings.ReplaceAll(commentsUrl, "{POST-PATH}", strings.TrimLeft(post.Permalink, "/"))
|
||||
commentsUrl = templateRedditCommentsURL(commentsUrlTemplate, subreddit, post.Id, post.Permalink)
|
||||
}
|
||||
|
||||
forumPost := ForumPost{
|
||||
@@ -107,6 +119,26 @@ func FetchSubredditPosts(subreddit, sort, topPeriod, search, commentsUrlTemplate
|
||||
forumPost.TargetUrl = post.Url
|
||||
}
|
||||
|
||||
if showFlairs && post.Flair != "" {
|
||||
forumPost.Tags = append(forumPost.Tags, post.Flair)
|
||||
}
|
||||
|
||||
if len(post.ParentList) > 0 {
|
||||
forumPost.IsCrosspost = true
|
||||
forumPost.TargetUrlDomain = "r/" + post.ParentList[0].Subreddit
|
||||
|
||||
if commentsUrlTemplate == "" {
|
||||
forumPost.TargetUrl = "https://www.reddit.com" + post.ParentList[0].Permalink
|
||||
} else {
|
||||
forumPost.TargetUrl = templateRedditCommentsURL(
|
||||
commentsUrlTemplate,
|
||||
post.ParentList[0].Subreddit,
|
||||
post.ParentList[0].Id,
|
||||
post.ParentList[0].Permalink,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
posts = append(posts, forumPost)
|
||||
}
|
||||
|
||||
|
||||
69
internal/feed/releases.go
Normal file
69
internal/feed/releases.go
Normal file
@@ -0,0 +1,69 @@
|
||||
package feed
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
type ReleaseSource string
|
||||
|
||||
const (
|
||||
ReleaseSourceGithub ReleaseSource = "github"
|
||||
ReleaseSourceGitlab ReleaseSource = "gitlab"
|
||||
ReleaseSourceDockerHub ReleaseSource = "dockerhub"
|
||||
)
|
||||
|
||||
type ReleaseRequest struct {
|
||||
Source ReleaseSource
|
||||
Repository string
|
||||
Token *string
|
||||
}
|
||||
|
||||
func FetchLatestReleases(requests []*ReleaseRequest) (AppReleases, error) {
|
||||
job := newJob(fetchLatestReleaseTask, requests).withWorkers(20)
|
||||
results, errs, err := workerPoolDo(job)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var failed int
|
||||
|
||||
releases := make(AppReleases, 0, len(requests))
|
||||
|
||||
for i := range results {
|
||||
if errs[i] != nil {
|
||||
failed++
|
||||
slog.Error("Failed to fetch release", "source", requests[i].Source, "repository", requests[i].Repository, "error", errs[i])
|
||||
continue
|
||||
}
|
||||
|
||||
releases = append(releases, *results[i])
|
||||
}
|
||||
|
||||
if failed == len(requests) {
|
||||
return nil, ErrNoContent
|
||||
}
|
||||
|
||||
releases.SortByNewest()
|
||||
|
||||
if failed > 0 {
|
||||
return releases, fmt.Errorf("%w: could not get %d releases", ErrPartialContent, failed)
|
||||
}
|
||||
|
||||
return releases, nil
|
||||
}
|
||||
|
||||
func fetchLatestReleaseTask(request *ReleaseRequest) (*AppRelease, error) {
|
||||
switch request.Source {
|
||||
case ReleaseSourceGithub:
|
||||
return fetchLatestGithubRelease(request)
|
||||
case ReleaseSourceGitlab:
|
||||
return fetchLatestGitLabRelease(request)
|
||||
case ReleaseSourceDockerHub:
|
||||
return fetchLatestDockerHubRelease(request)
|
||||
}
|
||||
|
||||
return nil, errors.New("unsupported source")
|
||||
}
|
||||
@@ -2,6 +2,7 @@ package feed
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"encoding/json"
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
@@ -11,8 +12,19 @@ import (
|
||||
"time"
|
||||
)
|
||||
|
||||
const defaultClientTimeout = 5 * time.Second
|
||||
|
||||
var defaultClient = &http.Client{
|
||||
Timeout: 5 * time.Second,
|
||||
Timeout: defaultClientTimeout,
|
||||
}
|
||||
|
||||
var insecureClientTransport = &http.Transport{
|
||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
|
||||
}
|
||||
|
||||
var defaultInsecureClient = &http.Client{
|
||||
Timeout: defaultClientTimeout,
|
||||
Transport: insecureClientTransport,
|
||||
}
|
||||
|
||||
type RequestDoer interface {
|
||||
|
||||
@@ -3,11 +3,16 @@ package feed
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"html"
|
||||
"log/slog"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/mmcdole/gofeed"
|
||||
gofeedext "github.com/mmcdole/gofeed/extensions"
|
||||
)
|
||||
|
||||
type RSSFeedItem struct {
|
||||
@@ -16,12 +21,48 @@ type RSSFeedItem struct {
|
||||
Title string
|
||||
Link string
|
||||
ImageURL string
|
||||
Categories []string
|
||||
Description string
|
||||
PublishedAt time.Time
|
||||
}
|
||||
|
||||
// doesn't cover all cases but works the vast majority of the time
|
||||
var htmlTagsWithAttributesPattern = regexp.MustCompile(`<\/?[a-zA-Z0-9-]+ *(?:[a-zA-Z-]+=(?:"|').*?(?:"|') ?)* *\/?>`)
|
||||
var sequentialWhitespacePattern = regexp.MustCompile(`\s+`)
|
||||
|
||||
func sanitizeFeedDescription(description string) string {
|
||||
if description == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
description = strings.ReplaceAll(description, "\n", " ")
|
||||
description = htmlTagsWithAttributesPattern.ReplaceAllString(description, "")
|
||||
description = sequentialWhitespacePattern.ReplaceAllString(description, " ")
|
||||
description = strings.TrimSpace(description)
|
||||
description = html.UnescapeString(description)
|
||||
|
||||
return description
|
||||
}
|
||||
|
||||
func shortenFeedDescriptionLen(description string, maxLen int) string {
|
||||
description, _ = limitStringLength(description, 1000)
|
||||
description = sanitizeFeedDescription(description)
|
||||
description, limited := limitStringLength(description, maxLen)
|
||||
|
||||
if limited {
|
||||
description += "…"
|
||||
}
|
||||
|
||||
return description
|
||||
}
|
||||
|
||||
type RSSFeedRequest struct {
|
||||
Url string `yaml:"url"`
|
||||
Title string `yaml:"title"`
|
||||
Url string `yaml:"url"`
|
||||
Title string `yaml:"title"`
|
||||
HideCategories bool `yaml:"hide-categories"`
|
||||
HideDescription bool `yaml:"hide-description"`
|
||||
ItemLinkPrefix string `yaml:"item-link-prefix"`
|
||||
IsDetailed bool `yaml:"-"`
|
||||
}
|
||||
|
||||
type RSSFeedItems []RSSFeedItem
|
||||
@@ -53,8 +94,60 @@ func getItemsFromRSSFeedTask(request RSSFeedRequest) ([]RSSFeedItem, error) {
|
||||
|
||||
rssItem := RSSFeedItem{
|
||||
ChannelURL: feed.Link,
|
||||
Title: item.Title,
|
||||
Link: item.Link,
|
||||
}
|
||||
|
||||
if request.ItemLinkPrefix != "" {
|
||||
rssItem.Link = request.ItemLinkPrefix + item.Link
|
||||
} else if strings.HasPrefix(item.Link, "http://") || strings.HasPrefix(item.Link, "https://") {
|
||||
rssItem.Link = item.Link
|
||||
} else {
|
||||
parsedUrl, err := url.Parse(feed.Link)
|
||||
|
||||
if err != nil {
|
||||
parsedUrl, err = url.Parse(request.Url)
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
var link string
|
||||
|
||||
if item.Link[0] == '/' {
|
||||
link = item.Link
|
||||
} else {
|
||||
link = "/" + item.Link
|
||||
}
|
||||
|
||||
rssItem.Link = parsedUrl.Scheme + "://" + parsedUrl.Host + link
|
||||
}
|
||||
}
|
||||
|
||||
if item.Title != "" {
|
||||
rssItem.Title = item.Title
|
||||
} else {
|
||||
rssItem.Title = shortenFeedDescriptionLen(item.Description, 100)
|
||||
}
|
||||
|
||||
if request.IsDetailed {
|
||||
if !request.HideDescription && item.Description != "" && item.Title != "" {
|
||||
rssItem.Description = shortenFeedDescriptionLen(item.Description, 200)
|
||||
}
|
||||
|
||||
if !request.HideCategories {
|
||||
var categories = make([]string, 0, 6)
|
||||
|
||||
for _, category := range item.Categories {
|
||||
if len(categories) == 6 {
|
||||
break
|
||||
}
|
||||
|
||||
if len(category) == 0 || len(category) > 30 {
|
||||
continue
|
||||
}
|
||||
|
||||
categories = append(categories, category)
|
||||
}
|
||||
|
||||
rssItem.Categories = categories
|
||||
}
|
||||
}
|
||||
|
||||
if request.Title != "" {
|
||||
@@ -65,8 +158,14 @@ func getItemsFromRSSFeedTask(request RSSFeedRequest) ([]RSSFeedItem, error) {
|
||||
|
||||
if item.Image != nil {
|
||||
rssItem.ImageURL = item.Image.URL
|
||||
} else if url := findThumbnailInItemExtensions(item); url != "" {
|
||||
rssItem.ImageURL = url
|
||||
} else if feed.Image != nil {
|
||||
rssItem.ImageURL = feed.Image.URL
|
||||
if len(feed.Image.URL) > 0 && feed.Image.URL[0] == '/' {
|
||||
rssItem.ImageURL = strings.TrimRight(feed.Link, "/") + feed.Image.URL
|
||||
} else {
|
||||
rssItem.ImageURL = feed.Image.URL
|
||||
}
|
||||
}
|
||||
|
||||
if item.PublishedParsed != nil {
|
||||
@@ -81,6 +180,36 @@ func getItemsFromRSSFeedTask(request RSSFeedRequest) ([]RSSFeedItem, error) {
|
||||
return items, nil
|
||||
}
|
||||
|
||||
func recursiveFindThumbnailInExtensions(extensions map[string][]gofeedext.Extension) string {
|
||||
for _, exts := range extensions {
|
||||
for _, ext := range exts {
|
||||
if ext.Name == "thumbnail" || ext.Name == "image" {
|
||||
if url, ok := ext.Attrs["url"]; ok {
|
||||
return url
|
||||
}
|
||||
}
|
||||
|
||||
if ext.Children != nil {
|
||||
if url := recursiveFindThumbnailInExtensions(ext.Children); url != "" {
|
||||
return url
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func findThumbnailInItemExtensions(item *gofeed.Item) string {
|
||||
media, ok := item.Extensions["media"]
|
||||
|
||||
if !ok {
|
||||
return ""
|
||||
}
|
||||
|
||||
return recursiveFindThumbnailInExtensions(media)
|
||||
}
|
||||
|
||||
func GetItemsFromRSSFeeds(requests []RSSFeedRequest) (RSSFeedItems, error) {
|
||||
job := newJob(getItemsFromRSSFeedTask, requests).withWorkers(10)
|
||||
feeds, errs, err := workerPoolDo(job)
|
||||
@@ -103,7 +232,7 @@ func GetItemsFromRSSFeeds(requests []RSSFeedRequest) (RSSFeedItems, error) {
|
||||
entries = append(entries, feeds[i]...)
|
||||
}
|
||||
|
||||
if len(entries) == 0 {
|
||||
if failed == len(requests) {
|
||||
return nil, ErrNoContent
|
||||
}
|
||||
|
||||
|
||||
@@ -44,6 +44,12 @@ func (channels TwitchChannels) SortByViewers() {
|
||||
})
|
||||
}
|
||||
|
||||
func (channels TwitchChannels) SortByLive() {
|
||||
sort.SliceStable(channels, func(i, j int) bool {
|
||||
return channels[i].IsLive && !channels[j].IsLive
|
||||
})
|
||||
}
|
||||
|
||||
type twitchOperationResponse struct {
|
||||
Data json.RawMessage
|
||||
Extensions struct {
|
||||
@@ -198,9 +204,11 @@ func fetchChannelFromTwitchTask(channel string) (TwitchChannel, error) {
|
||||
result.IsLive = true
|
||||
result.ViewersCount = channelShell.UserOrError.Stream.ViewersCount
|
||||
|
||||
if streamMetadata.UserOrNull != nil && streamMetadata.UserOrNull.Stream != nil && streamMetadata.UserOrNull.Stream.Game != nil {
|
||||
result.Category = streamMetadata.UserOrNull.Stream.Game.Name
|
||||
result.CategorySlug = streamMetadata.UserOrNull.Stream.Game.Slug
|
||||
if streamMetadata.UserOrNull != nil && streamMetadata.UserOrNull.Stream != nil {
|
||||
if streamMetadata.UserOrNull.Stream.Game != nil {
|
||||
result.Category = streamMetadata.UserOrNull.Stream.Game.Name
|
||||
result.CategorySlug = streamMetadata.UserOrNull.Stream.Game.Slug
|
||||
}
|
||||
startedAt, err := time.Parse("2006-01-02T15:04:05Z", streamMetadata.UserOrNull.Stream.StartedAt)
|
||||
|
||||
if err == nil {
|
||||
|
||||
@@ -4,8 +4,10 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -77,3 +79,29 @@ func maybeCopySliceWithoutZeroValues[T int | float64](values []T) []T {
|
||||
|
||||
return values
|
||||
}
|
||||
|
||||
var urlSchemePattern = regexp.MustCompile(`^[a-z]+:\/\/`)
|
||||
|
||||
func stripURLScheme(url string) string {
|
||||
return urlSchemePattern.ReplaceAllString(url, "")
|
||||
}
|
||||
|
||||
func limitStringLength(s string, max int) (string, bool) {
|
||||
asRunes := []rune(s)
|
||||
|
||||
if len(asRunes) > max {
|
||||
return string(asRunes[:max]), true
|
||||
}
|
||||
|
||||
return s, false
|
||||
}
|
||||
|
||||
func parseRFC3339Time(t string) time.Time {
|
||||
parsed, err := time.Parse(time.RFC3339, t)
|
||||
|
||||
if err != nil {
|
||||
return time.Now()
|
||||
}
|
||||
|
||||
return parsed
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ import (
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type stockResponseJson struct {
|
||||
type marketResponseJson struct {
|
||||
Chart struct {
|
||||
Result []struct {
|
||||
Meta struct {
|
||||
@@ -25,30 +25,30 @@ type stockResponseJson struct {
|
||||
}
|
||||
|
||||
// TODO: allow changing chart time frame
|
||||
const stockChartDays = 21
|
||||
const marketChartDays = 21
|
||||
|
||||
func FetchStocksDataFromYahoo(stockRequests Stocks) (Stocks, error) {
|
||||
requests := make([]*http.Request, 0, len(stockRequests))
|
||||
func FetchMarketsDataFromYahoo(marketRequests []MarketRequest) (Markets, error) {
|
||||
requests := make([]*http.Request, 0, len(marketRequests))
|
||||
|
||||
for i := range stockRequests {
|
||||
request, _ := http.NewRequest("GET", fmt.Sprintf("https://query1.finance.yahoo.com/v8/finance/chart/%s?range=1mo&interval=1d", stockRequests[i].Symbol), nil)
|
||||
for i := range marketRequests {
|
||||
request, _ := http.NewRequest("GET", fmt.Sprintf("https://query1.finance.yahoo.com/v8/finance/chart/%s?range=1mo&interval=1d", marketRequests[i].Symbol), nil)
|
||||
requests = append(requests, request)
|
||||
}
|
||||
|
||||
job := newJob(decodeJsonFromRequestTask[stockResponseJson](defaultClient), requests)
|
||||
job := newJob(decodeJsonFromRequestTask[marketResponseJson](defaultClient), requests)
|
||||
responses, errs, err := workerPoolDo(job)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w: %v", ErrNoContent, err)
|
||||
}
|
||||
|
||||
stocks := make(Stocks, 0, len(responses))
|
||||
markets := make(Markets, 0, len(responses))
|
||||
var failed int
|
||||
|
||||
for i := range responses {
|
||||
if errs[i] != nil {
|
||||
failed++
|
||||
slog.Error("Failed to fetch stock data", "symbol", stockRequests[i].Symbol, "error", errs[i])
|
||||
slog.Error("Failed to fetch market data", "symbol", marketRequests[i].Symbol, "error", errs[i])
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -56,14 +56,14 @@ func FetchStocksDataFromYahoo(stockRequests Stocks) (Stocks, error) {
|
||||
|
||||
if len(response.Chart.Result) == 0 {
|
||||
failed++
|
||||
slog.Error("Stock response contains no data", "symbol", stockRequests[i].Symbol)
|
||||
slog.Error("Market response contains no data", "symbol", marketRequests[i].Symbol)
|
||||
continue
|
||||
}
|
||||
|
||||
prices := response.Chart.Result[0].Indicators.Quote[0].Close
|
||||
|
||||
if len(prices) > stockChartDays {
|
||||
prices = prices[len(prices)-stockChartDays:]
|
||||
if len(prices) > marketChartDays {
|
||||
prices = prices[len(prices)-marketChartDays:]
|
||||
}
|
||||
|
||||
previous := response.Chart.Result[0].Meta.RegularMarketPrice
|
||||
@@ -80,13 +80,10 @@ func FetchStocksDataFromYahoo(stockRequests Stocks) (Stocks, error) {
|
||||
currency = response.Chart.Result[0].Meta.Currency
|
||||
}
|
||||
|
||||
stocks = append(stocks, Stock{
|
||||
Name: stockRequests[i].Name,
|
||||
Symbol: response.Chart.Result[0].Meta.Symbol,
|
||||
SymbolLink: stockRequests[i].SymbolLink,
|
||||
ChartLink: stockRequests[i].ChartLink,
|
||||
Price: response.Chart.Result[0].Meta.RegularMarketPrice,
|
||||
Currency: currency,
|
||||
markets = append(markets, Market{
|
||||
MarketRequest: marketRequests[i],
|
||||
Price: response.Chart.Result[0].Meta.RegularMarketPrice,
|
||||
Currency: currency,
|
||||
PercentChange: percentChange(
|
||||
response.Chart.Result[0].Meta.RegularMarketPrice,
|
||||
previous,
|
||||
@@ -95,13 +92,13 @@ func FetchStocksDataFromYahoo(stockRequests Stocks) (Stocks, error) {
|
||||
})
|
||||
}
|
||||
|
||||
if len(stocks) == 0 {
|
||||
if len(markets) == 0 {
|
||||
return nil, ErrNoContent
|
||||
}
|
||||
|
||||
if failed > 0 {
|
||||
return stocks, fmt.Errorf("%w: could not fetch data for %d stock(s)", ErrPartialContent, failed)
|
||||
return markets, fmt.Errorf("%w: could not fetch data for %d market(s)", ErrPartialContent, failed)
|
||||
}
|
||||
|
||||
return stocks, nil
|
||||
return markets, nil
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ import (
|
||||
)
|
||||
|
||||
type youtubeFeedResponseXml struct {
|
||||
Channel string `xml:"title"`
|
||||
Channel string `xml:"author>name"`
|
||||
ChannelLink struct {
|
||||
Href string `xml:"href,attr"`
|
||||
} `xml:"link"`
|
||||
@@ -39,11 +39,19 @@ func parseYoutubeFeedTime(t string) time.Time {
|
||||
return parsedTime
|
||||
}
|
||||
|
||||
func FetchYoutubeChannelUploads(channelIds []string, videoUrlTemplate string) (Videos, error) {
|
||||
func FetchYoutubeChannelUploads(channelIds []string, videoUrlTemplate string, includeShorts bool) (Videos, error) {
|
||||
requests := make([]*http.Request, 0, len(channelIds))
|
||||
|
||||
for i := range channelIds {
|
||||
request, _ := http.NewRequest("GET", "https://www.youtube.com/feeds/videos.xml?channel_id="+channelIds[i], nil)
|
||||
var feedUrl string
|
||||
if !includeShorts && strings.HasPrefix(channelIds[i], "UC") {
|
||||
playlistId := strings.Replace(channelIds[i], "UC", "UULF", 1)
|
||||
feedUrl = "https://www.youtube.com/feeds/videos.xml?playlist_id=" + playlistId
|
||||
} else {
|
||||
feedUrl = "https://www.youtube.com/feeds/videos.xml?channel_id=" + channelIds[i]
|
||||
}
|
||||
|
||||
request, _ := http.NewRequest("GET", feedUrl, nil)
|
||||
requests = append(requests, request)
|
||||
}
|
||||
|
||||
@@ -70,12 +78,6 @@ func FetchYoutubeChannelUploads(channelIds []string, videoUrlTemplate string) (V
|
||||
|
||||
for j := range response.Videos {
|
||||
video := &response.Videos[j]
|
||||
|
||||
// TODO: figure out a better way of skipping shorts
|
||||
if strings.Contains(video.Title, "#shorts") {
|
||||
continue
|
||||
}
|
||||
|
||||
var videoUrl string
|
||||
|
||||
if videoUrlTemplate == "" {
|
||||
|
||||
Reference in New Issue
Block a user