better error handling
All checks were successful
Build and Push Docker Image / build-and-push (push) Successful in 28s
All checks were successful
Build and Push Docker Image / build-and-push (push) Successful in 28s
This commit is contained in:
parent
fd73c4d5d9
commit
f9530fd7d2
118
main.go
118
main.go
@ -86,7 +86,6 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func scrapeHub() {
|
func scrapeHub() {
|
||||||
// declare and initialize variables
|
|
||||||
var (
|
var (
|
||||||
jobs []job
|
jobs []job
|
||||||
jobCount int
|
jobCount int
|
||||||
@ -94,32 +93,23 @@ func scrapeHub() {
|
|||||||
maxJobs = 20
|
maxJobs = 20
|
||||||
baseUrl = "https://thehub.io"
|
baseUrl = "https://thehub.io"
|
||||||
searchString = "https://thehub.io/jobs?roles=frontenddeveloper&roles=fullstackdeveloper&roles=backenddeveloper&roles=devops&paid=true&countryCode=DK&sorting=newJobs"
|
searchString = "https://thehub.io/jobs?roles=frontenddeveloper&roles=fullstackdeveloper&roles=backenddeveloper&roles=devops&paid=true&countryCode=DK&sorting=newJobs"
|
||||||
file, err = os.Create(fName)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if err != nil {
|
// Create file after scraping is complete
|
||||||
log.Fatalf("Cannot create file %q: %s", fName, err)
|
|
||||||
}
|
|
||||||
defer file.Close()
|
|
||||||
|
|
||||||
// Instantiate default collector
|
|
||||||
c := colly.NewCollector(
|
c := colly.NewCollector(
|
||||||
// visit only the hub
|
|
||||||
colly.AllowedDomains("www.thehub.io", "thehub.io"),
|
colly.AllowedDomains("www.thehub.io", "thehub.io"),
|
||||||
)
|
)
|
||||||
|
|
||||||
// Instantiate a new collector to visit the job details page
|
|
||||||
detailsCollector := colly.NewCollector(
|
detailsCollector := colly.NewCollector(
|
||||||
colly.AllowedDomains("www.thehub.io", "thehub.io"),
|
colly.AllowedDomains("www.thehub.io", "thehub.io"),
|
||||||
colly.CacheDir("/app/data/thehub_cache"),
|
colly.CacheDir("/app/data/thehub_cache"),
|
||||||
)
|
)
|
||||||
// On every <div> element with class "card__content attribute call callback
|
|
||||||
c.OnHTML("div[class=card__content]", func(e *colly.HTMLElement) {
|
c.OnHTML("div[class=card__content]", func(e *colly.HTMLElement) {
|
||||||
if jobCount >= maxJobs {
|
if jobCount >= maxJobs {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the title and ensure it doesn't contain any excluded words
|
|
||||||
title := e.ChildText("span.card-job-find-list__position")
|
title := e.ChildText("span.card-job-find-list__position")
|
||||||
for _, excludedWord := range excluded {
|
for _, excludedWord := range excluded {
|
||||||
if strings.Contains(strings.ToLower(title), excludedWord) {
|
if strings.Contains(strings.ToLower(title), excludedWord) {
|
||||||
@ -130,7 +120,6 @@ func scrapeHub() {
|
|||||||
fullLink := baseUrl + link
|
fullLink := baseUrl + link
|
||||||
|
|
||||||
detailsCollector.Visit(fullLink)
|
detailsCollector.Visit(fullLink)
|
||||||
|
|
||||||
})
|
})
|
||||||
|
|
||||||
detailsCollector.OnRequest(func(r *colly.Request) {
|
detailsCollector.OnRequest(func(r *colly.Request) {
|
||||||
@ -138,21 +127,21 @@ func scrapeHub() {
|
|||||||
})
|
})
|
||||||
|
|
||||||
detailsCollector.OnHTML("div[class='view-job-details']", func(e *colly.HTMLElement) {
|
detailsCollector.OnHTML("div[class='view-job-details']", func(e *colly.HTMLElement) {
|
||||||
|
if jobCount >= maxJobs {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// Get logo and trim the url
|
|
||||||
logo := e.ChildAttr("div.media-item__image", "style")
|
logo := e.ChildAttr("div.media-item__image", "style")
|
||||||
cutLeft := "background-image:url("
|
cutLeft := "background-image:url("
|
||||||
cutRight := ");"
|
cutRight := ");"
|
||||||
trimmedLogo := strings.Trim(logo, cutLeft+cutRight)
|
trimmedLogo := strings.Trim(logo, cutLeft+cutRight)
|
||||||
|
|
||||||
// Get the HTML of the description and check to see if it's paid
|
|
||||||
descriptionHTML, err := e.DOM.Find("content.text-block__content > span").Html()
|
descriptionHTML, err := e.DOM.Find("content.text-block__content > span").Html()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("Error getting HTML of description: %s", err)
|
log.Printf("Error getting HTML of description: %s", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// fill in the job struct
|
|
||||||
jobDetails := job{
|
jobDetails := job{
|
||||||
Title: e.ChildText("h2[class=view-job-details__title]"),
|
Title: e.ChildText("h2[class=view-job-details__title]"),
|
||||||
Logo: trimmedLogo,
|
Logo: trimmedLogo,
|
||||||
@ -167,14 +156,13 @@ func scrapeHub() {
|
|||||||
}
|
}
|
||||||
jobs = append(jobs, jobDetails)
|
jobs = append(jobs, jobDetails)
|
||||||
jobCount++
|
jobCount++
|
||||||
fmt.Println("Scraped job", jobCount)
|
fmt.Printf("Scraped job %d from TheHub\n", jobCount)
|
||||||
|
})
|
||||||
|
|
||||||
|
c.OnHTML("a.page-link", func(e *colly.HTMLElement) {
|
||||||
if jobCount >= maxJobs {
|
if jobCount >= maxJobs {
|
||||||
jobsToJson(file, jobs, fName)
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
})
|
|
||||||
// Handle pagination
|
|
||||||
c.OnHTML("a.page-link", func(e *colly.HTMLElement) {
|
|
||||||
nextPage := e.Attr("href")
|
nextPage := e.Attr("href")
|
||||||
if nextPage != "" {
|
if nextPage != "" {
|
||||||
fullNextPage := baseUrl + nextPage
|
fullNextPage := baseUrl + nextPage
|
||||||
@ -182,12 +170,34 @@ func scrapeHub() {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
c.Visit(searchString)
|
// Add error handling for the initial visit
|
||||||
|
err := c.Visit(searchString)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error visiting TheHub: %s", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for all collectors to finish
|
||||||
|
c.Wait()
|
||||||
|
detailsCollector.Wait()
|
||||||
|
|
||||||
|
// Write jobs to file after scraping is complete
|
||||||
|
if len(jobs) > 0 {
|
||||||
|
file, err := os.Create(fName)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Cannot create file %q: %s", fName, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
jobsToJson(file, jobs, fName)
|
||||||
|
fmt.Printf("Successfully scraped %d jobs from TheHub\n", len(jobs))
|
||||||
|
} else {
|
||||||
|
log.Println("No jobs were scraped from TheHub")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func scrapeItJobBank() {
|
func scrapeItJobBank() {
|
||||||
// declare and initialize variables
|
|
||||||
var (
|
var (
|
||||||
jobs []job
|
jobs []job
|
||||||
jobCount int
|
jobCount int
|
||||||
@ -195,34 +205,22 @@ func scrapeItJobBank() {
|
|||||||
maxJobs = 20
|
maxJobs = 20
|
||||||
baseUrl = "https://www.it-jobbank.dk"
|
baseUrl = "https://www.it-jobbank.dk"
|
||||||
searchString = "https://www.it-jobbank.dk/jobsoegning/udvikling"
|
searchString = "https://www.it-jobbank.dk/jobsoegning/udvikling"
|
||||||
file, err = os.Create(fName)
|
|
||||||
)
|
)
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("Cannot create file %q: %s", fName, err)
|
|
||||||
}
|
|
||||||
defer file.Close()
|
|
||||||
|
|
||||||
// Instantiate default collector
|
|
||||||
c := colly.NewCollector(
|
c := colly.NewCollector(
|
||||||
// visit only the hub
|
|
||||||
colly.AllowedDomains("www.it-jobbank.dk", "it-jobbank.dk"),
|
colly.AllowedDomains("www.it-jobbank.dk", "it-jobbank.dk"),
|
||||||
|
|
||||||
// Cache responses to prevent multiple requests
|
|
||||||
// colly.CacheDir("./itjobbank_cache"),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Instantiate a new collector to visit the job details page
|
|
||||||
detailsCollector := colly.NewCollector(
|
detailsCollector := colly.NewCollector(
|
||||||
colly.AllowedDomains("www.it-jobbank.dk", "it-jobbank.dk"),
|
colly.AllowedDomains("www.it-jobbank.dk", "it-jobbank.dk"),
|
||||||
// Cache responses to prevent multiple requests
|
colly.CacheDir("/app/data/itjobbank_cache"),
|
||||||
colly.CacheDir("/app/data/itjobbank_cache"))
|
)
|
||||||
|
|
||||||
// On every <div> element with class "card__content attribute call callback
|
|
||||||
c.OnHTML("div[class=result]", func(e *colly.HTMLElement) {
|
c.OnHTML("div[class=result]", func(e *colly.HTMLElement) {
|
||||||
if jobCount >= maxJobs {
|
if jobCount >= maxJobs {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Get the title and ensure it doesn't contain any excluded words
|
|
||||||
title := e.ChildText("h3.job-title > a")
|
title := e.ChildText("h3.job-title > a")
|
||||||
for _, excludedWord := range excluded {
|
for _, excludedWord := range excluded {
|
||||||
if strings.Contains(strings.ToLower(title), excludedWord) {
|
if strings.Contains(strings.ToLower(title), excludedWord) {
|
||||||
@ -239,19 +237,23 @@ func scrapeItJobBank() {
|
|||||||
})
|
})
|
||||||
|
|
||||||
detailsCollector.OnHTML("section > div", func(e *colly.HTMLElement) {
|
detailsCollector.OnHTML("section > div", func(e *colly.HTMLElement) {
|
||||||
// get the description as html
|
if jobCount >= maxJobs {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
descriptionHTML, err := e.DOM.Find("div[id=job_ad]").Html()
|
descriptionHTML, err := e.DOM.Find("div[id=job_ad]").Html()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("Error getting HTML of description: %s", err)
|
log.Printf("Error getting HTML of description: %s", err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
// Check if the job is paid
|
|
||||||
checkIfPaid(descriptionHTML)
|
checkIfPaid(descriptionHTML)
|
||||||
// fill in the job struct
|
|
||||||
title := e.ChildText("h1.title")
|
title := e.ChildText("h1.title")
|
||||||
if title == "" {
|
if title == "" {
|
||||||
title = e.ChildText("h1[id=jobtitle]")
|
title = e.ChildText("h1[id=jobtitle]")
|
||||||
}
|
}
|
||||||
|
|
||||||
jobDetails := job{
|
jobDetails := job{
|
||||||
Title: title,
|
Title: title,
|
||||||
Logo: baseUrl + e.ChildAttr("div.company-logo > img", "src"),
|
Logo: baseUrl + e.ChildAttr("div.company-logo > img", "src"),
|
||||||
@ -266,13 +268,9 @@ func scrapeItJobBank() {
|
|||||||
}
|
}
|
||||||
jobs = append(jobs, jobDetails)
|
jobs = append(jobs, jobDetails)
|
||||||
jobCount++
|
jobCount++
|
||||||
fmt.Println("Scraped job", jobCount)
|
fmt.Printf("Scraped job %d from IT JobBank\n", jobCount)
|
||||||
if jobCount >= maxJobs {
|
|
||||||
jobsToJson(file, jobs, fName)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
// Handle pagination
|
|
||||||
c.OnHTML("a.page-link", func(e *colly.HTMLElement) {
|
c.OnHTML("a.page-link", func(e *colly.HTMLElement) {
|
||||||
if jobCount >= maxJobs {
|
if jobCount >= maxJobs {
|
||||||
return
|
return
|
||||||
@ -283,7 +281,31 @@ func scrapeItJobBank() {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
c.Visit(searchString)
|
// Add error handling for the initial visit
|
||||||
|
err := c.Visit(searchString)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Error visiting IT JobBank: %s", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for all collectors to finish
|
||||||
|
c.Wait()
|
||||||
|
detailsCollector.Wait()
|
||||||
|
|
||||||
|
// Write jobs to file after scraping is complete
|
||||||
|
if len(jobs) > 0 {
|
||||||
|
file, err := os.Create(fName)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("Cannot create file %q: %s", fName, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
jobsToJson(file, jobs, fName)
|
||||||
|
fmt.Printf("Successfully scraped %d jobs from IT JobBank\n", len(jobs))
|
||||||
|
} else {
|
||||||
|
log.Println("No jobs were scraped from IT JobBank")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
Loading…
Reference in New Issue
Block a user