package pages import ( "log" "net/http" "os" "strings" "gitler.moe/suwako/gitlin/utils" "github.com/gocolly/colly" "github.com/gofiber/fiber/v2" ) type Languages struct { Name string Percent string Color string } type Repo struct { Username string Reponame string Description string Parent string Stars string Forks string CommitsBehind string Watchers string Language []Languages License string DefaultBranch string Readme string Link string Tags []string Branch []string } type RepoFiles struct { Name string Path string Commit string Date string Type string Username string Reponame string DefaultBranch string } func HandleRepo(c *fiber.Ctx) error { var repoArray []Repo var repoFilesArray []RepoFiles var readmeOutput string branchExists := "" if strings.Count(c.Params("branch"), "")-1 > 0 { branchExists = "/tree/" + c.Params("branch") } repoUrl := strings.TrimSuffix(c.Params("repo"), ".git") resp, statusErr := http.Get("https://github.com/" + c.Params("user") + "/" + repoUrl + branchExists) if statusErr != nil { log.Println(statusErr) } if resp.StatusCode == 404 { // I need a better way to do this return c.Status(404).Render("error", fiber.Map{ "title": "Error", "ver": utils.Ver, "ves": utils.Ves, "error": "Repository " + c.Params("user") + "/" + repoUrl + branchExists + " not found", }) } // Scraping Scrape := Repo{} UserAgent, ok := os.LookupEnv("GITLIN_USER_AGENT") if !ok { UserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36" } sc := colly.NewCollector(colly.AllowedDomains("github.com"), colly.UserAgent(UserAgent)) sc.OnHTML("div.Layout-sidebar", func(e *colly.HTMLElement) { Scrape.Username = c.Params("user") Scrape.Reponame = repoUrl Scrape.Description = e.ChildText("p.f4") Scrape.Stars = e.ChildText("a[href*='/" + c.Params("user") + "/" + repoUrl + "/stargazers' i] strong") Scrape.Watchers = e.ChildText("a[href*='/" + c.Params("user") + "/" + repoUrl + "/watchers' i] strong") Scrape.Forks = e.ChildText("a[href*='/" + c.Params("user") + "/" + repoUrl + "/forks' i] strong") Scrape.Link = e.ChildAttr("span.css-truncate a.text-bold", "href") Scrape.License = e.ChildText("a[data-analytics-event*='{\"category\":\"Repository Overview\",\"action\":\"click\",\"label\":\"location:sidebar;file:license\"}']") e.ForEach("a.topic-tag", func(i int, el *colly.HTMLElement) { Scrape.Tags = append(Scrape.Tags, strings.TrimPrefix(el.Attr("data-octo-dimensions"), "topic:")) }) }) sc.OnHTML("div.Box-body div.d-flex div span", func(e *colly.HTMLElement) { Scrape.CommitsBehind = strings.TrimSuffix(e.ChildText("a"), " commits behind") }) sc.OnHTML("div#readme", func(e *colly.HTMLElement) { Scrape.Readme = e.ChildText("a[href='#readme']") }) sc.OnHTML("div#readme div.Box-body", func(e *colly.HTMLElement) { Content, _ := e.DOM.Html() readmeOutput = strings.Replace(strings.Replace(strings.Replace(strings.Replace(string(utils.UGCPolicy().SanitizeBytes([]byte(Content))), "https://github.com", "", -1), "user-content-", "", -1), "https://camo.githubusercontent.com", "/camo", -1), "https://raw.githubusercontent.com", "/raw", -1) }) sc.OnHTML("div.BorderGrid-cell ul.list-style-none", func(e *colly.HTMLElement) { e.ForEach("li.d-inline .d-inline-flex", func(i int, el *colly.HTMLElement) { var lang Languages lang.Name = el.ChildText("span.text-bold") lang.Percent = el.ChildText("span:contains('%')") lang.Color = strings.ReplaceAll(strings.ReplaceAll(el.ChildAttr("svg", "style"), "color:", ""), ";", "") Scrape.Language = append(Scrape.Language, lang) }) }) sc.OnHTML("div#repository-container-header", func(e *colly.HTMLElement) { Scrape.Parent = e.ChildText("span.text-small a") }) sc.OnHTML("summary[title*='Switch branches or tags']", func(e *colly.HTMLElement) { Scrape.DefaultBranch = e.ChildText("span.css-truncate-target") }) sc.OnHTML("div.js-details-container div.Details-content--hidden-not-important", func(e *colly.HTMLElement) { e.ForEach("div.js-navigation-item", func(i int, el *colly.HTMLElement) { var FileType string if el.ChildAttr("div.flex-shrink-0 svg", "aria-label") == "Directory" { FileType = "dir" } else { FileType = "file" } tstring := el.ChildAttr("relative-time", "datetime") if tstring != "" { tstring = strings.ReplaceAll(tstring, "T", "、") tstring = strings.ReplaceAll(tstring, "Z", "") tstring = strings.Split(tstring, "+")[0] tstrings := strings.Split(tstring, "-") tstring = tstrings[0] + "-" + tstrings[1] + "-" + tstrings[2] } repoFilesArray = append(repoFilesArray, RepoFiles{ Name: el.ChildText("div.flex-auto span.d-block a.js-navigation-open"), Path: el.ChildText("div.flex-auto span.d-block a.js-navigation-open"), Commit: el.ChildText("div.flex-auto span.d-block a.Link--secondary"), Date: tstring, Type: FileType, Username: Scrape.Username, Reponame: Scrape.Reponame, DefaultBranch: Scrape.DefaultBranch, }) }) }) sc.Visit("https://github.com/" + c.Params("user") + "/" + repoUrl + branchExists) // Add scrape-based info to repoArray repoArray = append(repoArray, Scrape) return c.Render("repo", fiber.Map{ "title": c.Params("user") + "/" + repoUrl + branchExists, "ver": utils.Ver, "ves": utils.Ves, "repo": repoArray, "files": repoFilesArray, "readme": readmeOutput, }) }