gitlin/pages/repo.go
2023-06-08 00:18:34 +09:00

171 行
6.9 KiB
Go

package pages
import (
"log"
"net/http"
"os"
"strings"
"time"
"gitler.moe/suwako/gitlin/utils"
"github.com/gocolly/colly"
"github.com/gofiber/fiber/v2"
)
type Languages struct {
Name, Percent, Color string
}
type LastCommit struct {
Ava, User, Id, IdShort, Mess, MessIssue, Date string
}
type Repo struct {
Username, Reponame, CommitCount, Description, Parent, Stars, Forks, CommitsBehind, Watchers, License, DefaultBranch, Readme, Link, ComCount string
Tags, Branch []string
Language []Languages
LastCom LastCommit
}
type RepoFiles struct {
Name, Path, Commit, Date, Type, Username, Reponame, DefaultBranch string
}
func HandleRepo(c *fiber.Ctx) error {
var repoArray []Repo
var repoFilesArray []RepoFiles
var readmeOutput string
branchExists := ""
if strings.Count(c.Params("branch"), "")-1 > 0 {
branchExists = "/tree/" + c.Params("branch")
}
repoUrl := strings.TrimSuffix(c.Params("repo"), ".git")
resp, statusErr := http.Get("https://github.com/" + c.Params("user") + "/" + repoUrl + branchExists)
if statusErr != nil {
log.Println(statusErr)
}
if resp.StatusCode == 404 {
// I need a better way to do this
return c.Status(404).Render("error", fiber.Map{
"title": "Error",
"ver": utils.Ver,
"ves": utils.Ves,
"error": "Repository " + c.Params("user") + "/" + repoUrl + branchExists + " not found",
})
}
// Scraping
Scrape := Repo{}
UserAgent, ok := os.LookupEnv("GITLIN_USER_AGENT")
if !ok {
UserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36"
}
sc := colly.NewCollector(colly.AllowedDomains("github.com"), colly.UserAgent(UserAgent))
sc.Limit(&colly.LimitRule{
DomainGlob: "github.githubassets.com/*",
Delay: 30 * time.Second,
RandomDelay: 30 * time.Second,
})
sc.OnHTML("div.Layout-sidebar", func(e *colly.HTMLElement) {
Scrape.Username = c.Params("user")
Scrape.Reponame = repoUrl
Scrape.Description = e.ChildText("p.f4")
Scrape.Stars = e.ChildText("a[href*='/" + c.Params("user") + "/" + repoUrl + "/stargazers' i] strong")
Scrape.Watchers = e.ChildText("a[href*='/" + c.Params("user") + "/" + repoUrl + "/watchers' i] strong")
Scrape.Forks = e.ChildText("a[href*='/" + c.Params("user") + "/" + repoUrl + "/forks' i] strong")
Scrape.Link = e.ChildAttr("span.css-truncate a.text-bold", "href")
Scrape.License = e.ChildText("a[data-analytics-event*='{\"category\":\"Repository Overview\",\"action\":\"click\",\"label\":\"location:sidebar;file:license\"}']")
e.ForEach("a.topic-tag", func(i int, el *colly.HTMLElement) {
Scrape.Tags = append(Scrape.Tags, strings.TrimPrefix(el.Attr("data-octo-dimensions"), "topic:"))
})
})
sc.OnHTML("div.Box-header div.js-details-container", func(e *colly.HTMLElement) {
var commit LastCommit
commit.Ava = strings.ReplaceAll(e.ChildAttr("img", "src"), "https://avatars.githubusercontent.com/u", "/avatar")
commit.User = e.ChildText("a.commit-author")
commit.Id = strings.TrimPrefix(e.ChildAttr("a.Link--primary", "href"), "/" + c.Params("user") + "/" + repoUrl + "/commit/")
commit.IdShort = e.ChildText("div.flex-items-baseline a.f6")
commit.Mess = e.ChildText("span.d-none a.Link--primary")
Scrape.ComCount = e.ChildText("li.ml-0 a.Link--primary span.d-none strong")
commit.MessIssue = strings.ReplaceAll(e.ChildText("span.d-none a.issue-link"), "#", "")
tstring := e.ChildAttr("relative-time", "datetime")
if tstring != "" {
tstring = strings.Split(tstring, "T")[0]
tstrings := strings.Split(tstring, "-")
tstring = tstrings[0] + "-" + tstrings[1] + "-" + tstrings[2]
}
commit.Date = tstring
Scrape.LastCom = commit
})
sc.OnHTML("div.Box-body div.d-flex div span", func(e *colly.HTMLElement) {
Scrape.CommitsBehind = strings.TrimSuffix(e.ChildText("a"), " commits behind")
})
sc.OnHTML("div#readme", func(e *colly.HTMLElement) {
Scrape.Readme = e.ChildText("a[href='#readme']")
})
sc.OnHTML("div#readme div.Box-body", func(e *colly.HTMLElement) {
Content, _ := e.DOM.Html()
readmeOutput = strings.Replace(strings.Replace(strings.Replace(strings.Replace(string(utils.UGCPolicy().SanitizeBytes([]byte(Content))), "https://github.com", "", -1), "user-content-", "", -1), "https://camo.githubusercontent.com", "/camo", -1), "https://raw.githubusercontent.com", "/raw", -1)
})
sc.OnHTML("div.BorderGrid-cell ul.list-style-none", func(e *colly.HTMLElement) {
e.ForEach("li.d-inline .d-inline-flex", func(i int, el *colly.HTMLElement) {
var lang Languages
lang.Name = el.ChildText("span.text-bold")
lang.Percent = el.ChildText("span:contains('%')")
lang.Color = strings.ReplaceAll(strings.ReplaceAll(el.ChildAttr("svg", "style"), "color:", ""), ";", "")
Scrape.Language = append(Scrape.Language, lang)
})
})
sc.OnHTML("div#repository-container-header", func(e *colly.HTMLElement) {
Scrape.Parent = e.ChildText("span.text-small a")
})
sc.OnHTML("summary[title*='Switch branches or tags']", func(e *colly.HTMLElement) {
Scrape.DefaultBranch = e.ChildText("span.css-truncate-target")
})
sc.OnHTML("div.js-details-container div.Details-content--hidden-not-important", func(e *colly.HTMLElement) {
e.ForEach("div.js-navigation-item", func(i int, el *colly.HTMLElement) {
var FileType string
if el.ChildAttr("div.flex-shrink-0 svg", "aria-label") == "Directory" {
FileType = "dir"
} else {
FileType = "file"
}
tstring := el.ChildAttr("relative-time", "datetime")
if tstring != "" {
tstring = strings.ReplaceAll(tstring, "T", "、")
tstring = strings.ReplaceAll(tstring, "Z", "")
tstring = strings.Split(tstring, "+")[0]
tstrings := strings.Split(tstring, "-")
tstring = tstrings[0] + "-" + tstrings[1] + "-" + tstrings[2]
}
repoFilesArray = append(repoFilesArray, RepoFiles{
Name: el.ChildText("div.flex-auto span.d-block a.js-navigation-open"),
Path: el.ChildText("div.flex-auto span.d-block a.js-navigation-open"),
Commit: el.ChildText("div.flex-auto span.d-block a.Link--secondary"),
Date: tstring,
Type: FileType,
Username: Scrape.Username,
Reponame: Scrape.Reponame,
DefaultBranch: Scrape.DefaultBranch,
})
})
})
sc.Visit("https://github.com/" + c.Params("user") + "/" + repoUrl + branchExists)
// Add scrape-based info to repoArray
repoArray = append(repoArray, Scrape)
return c.Render("repo", fiber.Map{
"title": c.Params("user") + "/" + repoUrl + branchExists,
"branch": Scrape.DefaultBranch,
"ver": utils.Ver,
"ves": utils.Ves,
"repo": repoArray,
"usrep": c.Params("user") + "/" + repoUrl,
"commits": Scrape.ComCount,
"lastcom": Scrape.LastCom,
"files": repoFilesArray,
"readme": readmeOutput,
})
}