forked from rannes.dev/sw-jobs-go
Compare commits
16 Commits
Author | SHA1 | Date | |
---|---|---|---|
9b0941a04a | |||
ec140cbbc7 | |||
f7fcb41a87 | |||
cd675c0d6a | |||
333739450f | |||
8c9f6e2dee | |||
32f83e358b | |||
979ed97738 | |||
8abff30b52 | |||
fd9b4b515c | |||
1d25f4e112 | |||
07bb549d44 | |||
c0ec6dc003 | |||
994ee9c732 | |||
693d654764 | |||
38023c1aa5 |
4
.gitignore
vendored
4
.gitignore
vendored
@ -1,2 +1,4 @@
|
||||
/thehub_cache
|
||||
/jobs.json
|
||||
/thehub.json
|
||||
/itjobbank_cache
|
||||
/it-jobbank.json
|
292
main.go
Normal file
292
main.go
Normal file
@ -0,0 +1,292 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gocolly/colly"
|
||||
)
|
||||
|
||||
type job struct {
|
||||
Title string `json:"title"`
|
||||
Logo string `json:"logo"`
|
||||
Company string `json:"company"`
|
||||
Location string `json:"location"`
|
||||
Type string `json:"type"`
|
||||
Description string `json:"description"`
|
||||
Link string `json:"link"`
|
||||
Skills skills `json:"skills"`
|
||||
Scraped string `json:"scraped"`
|
||||
Source string `json:"source"`
|
||||
}
|
||||
|
||||
type skills struct {
|
||||
React bool `json:"react"`
|
||||
Python bool `json:"python"`
|
||||
Golang bool `json:"golang"`
|
||||
Svelte bool `json:"svelte"`
|
||||
Nextjs bool `json:"nextjs"`
|
||||
Typescript bool `json:"typescript"`
|
||||
Tailwind bool `json:"tailwind"`
|
||||
}
|
||||
|
||||
// Utility functions
|
||||
|
||||
// Checks if a string contains any of the given keywords
|
||||
func skillChecker(description string) skills {
|
||||
return skills{
|
||||
React: strings.Contains(description, "React"),
|
||||
Python: strings.Contains(description, "Python"),
|
||||
Golang: strings.Contains(description, "Golang"),
|
||||
Svelte: strings.Contains(description, "Svelte"),
|
||||
Nextjs: strings.Contains(description, "Next.js"),
|
||||
Typescript: strings.Contains(description, "TypeScript"),
|
||||
Tailwind: strings.Contains(description, "Tailwind"),
|
||||
}
|
||||
}
|
||||
|
||||
// Converts job struct to json
|
||||
func jobsToJson(file *os.File, jobs []job, fName string) {
|
||||
// Encode jobs slice to JSON
|
||||
encoder := json.NewEncoder(file)
|
||||
encoder.SetIndent("", " ") // Pretty-print with indentation
|
||||
if err := encoder.Encode(jobs); err != nil {
|
||||
log.Fatalf("Cannot write to file %q: %s", fName, err)
|
||||
}
|
||||
|
||||
fmt.Println("Job details successfully written to", fName)
|
||||
}
|
||||
|
||||
func checkIfPaid(description string) {
|
||||
for _, keyword := range unpaidKeywords {
|
||||
if strings.Contains(strings.ToLower(description), keyword) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func checkIfStudent(description string) string {
|
||||
for _, keyword := range studentKeywords {
|
||||
if strings.Contains(strings.ToLower(description), keyword) {
|
||||
return "student"
|
||||
}
|
||||
}
|
||||
return "full time"
|
||||
}
|
||||
|
||||
// Slice to store job details
|
||||
var (
|
||||
excluded = []string{"senior", "lead", "founder", "cto", "vp of", "erfaren", "arkitekt", "architect", "manager", "ulønnet", "unpaid", "praktik", "cyber", "leder", "sikkerhed", "supporter"}
|
||||
unpaidKeywords = []string{"unpaid", "praktik", "ulønnet"}
|
||||
studentKeywords = []string{"studerende", "studenter", "student", "medhjælper"}
|
||||
)
|
||||
|
||||
func scrapeHub() {
|
||||
// declare and initialize variables
|
||||
var (
|
||||
jobs []job
|
||||
jobCount int
|
||||
fName = "thehub.json"
|
||||
maxJobs = 20
|
||||
baseUrl = "https://thehub.io"
|
||||
searchString = "https://thehub.io/jobs?roles=frontenddeveloper&roles=fullstackdeveloper&roles=backenddeveloper&roles=devops&paid=true&countryCode=DK&sorting=newJobs"
|
||||
file, err = os.Create(fName)
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("Cannot create file %q: %s", fName, err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
// Instantiate default collector
|
||||
c := colly.NewCollector(
|
||||
// visit only the hub
|
||||
colly.AllowedDomains("www.thehub.io", "thehub.io"),
|
||||
)
|
||||
|
||||
// Instantiate a new collector to visit the job details page
|
||||
detailsCollector := colly.NewCollector(
|
||||
colly.AllowedDomains("www.thehub.io", "thehub.io"),
|
||||
colly.CacheDir("./thehub_cache"),
|
||||
)
|
||||
// On every <div> element with class "card__content attribute call callback
|
||||
c.OnHTML("div[class=card__content]", func(e *colly.HTMLElement) {
|
||||
if jobCount >= maxJobs {
|
||||
return
|
||||
}
|
||||
|
||||
// Get the title and ensure it doesn't contain any excluded words
|
||||
title := e.ChildText("span.card-job-find-list__position")
|
||||
for _, excludedWord := range excluded {
|
||||
if strings.Contains(strings.ToLower(title), excludedWord) {
|
||||
return
|
||||
}
|
||||
}
|
||||
link := e.ChildAttr("a", "href")
|
||||
fullLink := baseUrl + link
|
||||
|
||||
detailsCollector.Visit(fullLink)
|
||||
|
||||
})
|
||||
|
||||
detailsCollector.OnRequest(func(r *colly.Request) {
|
||||
fmt.Println("Visiting", r.URL.String())
|
||||
})
|
||||
|
||||
detailsCollector.OnHTML("div[class='view-job-details']", func(e *colly.HTMLElement) {
|
||||
|
||||
// Get logo and trim the url
|
||||
logo := e.ChildAttr("div.media-item__image", "style")
|
||||
cutLeft := "background-image:url("
|
||||
cutRight := ");"
|
||||
trimmedLogo := strings.Trim(logo, cutLeft+cutRight)
|
||||
|
||||
// Get the HTML of the description and check to see if it's paid
|
||||
descriptionHTML, err := e.DOM.Find("content.text-block__content > span").Html()
|
||||
if err != nil {
|
||||
log.Printf("Error getting HTML of description: %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
// fill in the job struct
|
||||
jobDetails := job{
|
||||
Title: e.ChildText("h2[class=view-job-details__title]"),
|
||||
Logo: trimmedLogo,
|
||||
Company: e.ChildText(".bullet-inline-list > a:first-child"),
|
||||
Location: e.ChildText(".bullet-inline-list > a:nth-child(2)"),
|
||||
Type: e.ChildText(".bullet-inline-list > a:nth-child(3)"),
|
||||
Description: descriptionHTML,
|
||||
Link: e.Request.URL.String(),
|
||||
Skills: skillChecker(e.ChildText("content.text-block__content > span")),
|
||||
Scraped: time.Now().String(),
|
||||
Source: baseUrl,
|
||||
}
|
||||
jobs = append(jobs, jobDetails)
|
||||
jobCount++
|
||||
fmt.Println("Scraped job", jobCount)
|
||||
if jobCount >= maxJobs {
|
||||
jobsToJson(file, jobs, fName)
|
||||
return
|
||||
}
|
||||
})
|
||||
// Handle pagination
|
||||
c.OnHTML("a.page-link", func(e *colly.HTMLElement) {
|
||||
nextPage := e.Attr("href")
|
||||
if nextPage != "" {
|
||||
fullNextPage := baseUrl + nextPage
|
||||
e.Request.Visit(fullNextPage)
|
||||
}
|
||||
})
|
||||
|
||||
c.Visit(searchString)
|
||||
|
||||
}
|
||||
|
||||
func scrapeItJobBank() {
|
||||
// declare and initialize variables
|
||||
var (
|
||||
jobs []job
|
||||
jobCount int
|
||||
fName = "it-jobbank.json"
|
||||
maxJobs = 20
|
||||
baseUrl = "https://www.it-jobbank.dk"
|
||||
searchString = "https://www.it-jobbank.dk/jobsoegning/udvikling"
|
||||
file, err = os.Create(fName)
|
||||
)
|
||||
if err != nil {
|
||||
log.Fatalf("Cannot create file %q: %s", fName, err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
// Instantiate default collector
|
||||
c := colly.NewCollector(
|
||||
// visit only the hub
|
||||
colly.AllowedDomains("www.it-jobbank.dk", "it-jobbank.dk"),
|
||||
|
||||
// Cache responses to prevent multiple requests
|
||||
// colly.CacheDir("./itjobbank_cache"),
|
||||
)
|
||||
|
||||
// Instantiate a new collector to visit the job details page
|
||||
detailsCollector := colly.NewCollector(
|
||||
colly.AllowedDomains("www.it-jobbank.dk", "it-jobbank.dk"),
|
||||
// Cache responses to prevent multiple requests
|
||||
colly.CacheDir("./itjobbank_cache"))
|
||||
|
||||
// On every <div> element with class "card__content attribute call callback
|
||||
c.OnHTML("div[class=result]", func(e *colly.HTMLElement) {
|
||||
if jobCount >= maxJobs {
|
||||
return
|
||||
}
|
||||
// Get the title and ensure it doesn't contain any excluded words
|
||||
title := e.ChildText("h3.job-title > a")
|
||||
for _, excludedWord := range excluded {
|
||||
if strings.Contains(strings.ToLower(title), excludedWord) {
|
||||
return
|
||||
}
|
||||
}
|
||||
fullLink := e.ChildAttr("h3.job-title > a", "href")
|
||||
|
||||
detailsCollector.Visit(fullLink)
|
||||
})
|
||||
|
||||
detailsCollector.OnRequest(func(r *colly.Request) {
|
||||
fmt.Println("Visiting", r.URL.String())
|
||||
})
|
||||
|
||||
detailsCollector.OnHTML("section > div", func(e *colly.HTMLElement) {
|
||||
// get the description as html
|
||||
descriptionHTML, err := e.DOM.Find("div[id=job_ad]").Html()
|
||||
if err != nil {
|
||||
log.Printf("Error getting HTML of description: %s", err)
|
||||
return
|
||||
}
|
||||
// Check if the job is paid
|
||||
checkIfPaid(descriptionHTML)
|
||||
// fill in the job struct
|
||||
title := e.ChildText("h1.title")
|
||||
if title == "" {
|
||||
title = e.ChildText("h1[id=jobtitle]")
|
||||
}
|
||||
jobDetails := job{
|
||||
Title: title,
|
||||
Logo: baseUrl + e.ChildAttr("div.company-logo > img", "src"),
|
||||
Company: e.ChildText("p.published"),
|
||||
Location: e.ChildText("div.job-location > p.caption"),
|
||||
Type: checkIfStudent(descriptionHTML),
|
||||
Description: descriptionHTML,
|
||||
Link: e.Request.URL.String(),
|
||||
Skills: skillChecker(descriptionHTML),
|
||||
Scraped: time.Now().String(),
|
||||
Source: baseUrl,
|
||||
}
|
||||
jobs = append(jobs, jobDetails)
|
||||
jobCount++
|
||||
fmt.Println("Scraped job", jobCount)
|
||||
if jobCount >= maxJobs {
|
||||
jobsToJson(file, jobs, fName)
|
||||
return
|
||||
}
|
||||
})
|
||||
// Handle pagination
|
||||
c.OnHTML("a.page-link", func(e *colly.HTMLElement) {
|
||||
if jobCount >= maxJobs {
|
||||
return
|
||||
}
|
||||
nextPage := e.Attr("href")
|
||||
if nextPage != "" {
|
||||
e.Request.Visit(nextPage)
|
||||
}
|
||||
})
|
||||
|
||||
c.Visit(searchString)
|
||||
}
|
||||
|
||||
func main() {
|
||||
scrapeHub()
|
||||
scrapeItJobBank()
|
||||
}
|
11
readme.md
11
readme.md
@ -1,6 +1,11 @@
|
||||
# The Hub Scraper
|
||||
# IT jobs scraper
|
||||
|
||||
This is a simple scraper that extracts job details from the [The Hub](https://thehub.io) website.
|
||||
This is a simple scraper that extracts job details from the [The Hub](https://thehub.io) website and itjobbank.
|
||||
|
||||
|
||||
## Filtering
|
||||
|
||||
The scraper filters out a list of keywords like senior, architect etc. as I wrote it for entry and mid level roles. It also filters out unpaid form the hub, and keyword based from itjobbank.
|
||||
|
||||
## Usage
|
||||
|
||||
@ -10,4 +15,4 @@ To run the scraper, simply execute the following command:
|
||||
go run scraper.go
|
||||
```
|
||||
|
||||
The scraper will create a `jobs.json` file in the current directory, which contains a list of job details in JSON format.
|
||||
The scraper will create a `thehub.json` and `itjobbank.json` file in the current directory, which contains a list of job details in JSON format. It caches the pages, so very light on resources and requests.
|
||||
|
3
run-scrapers.sh
Normal file
3
run-scrapers.sh
Normal file
@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
cd /home/admin/sw-jobs-go
|
||||
go run go-scraper >> scraper.log 2>&1
|
130
scraper.go
130
scraper.go
@ -1,130 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/gocolly/colly"
|
||||
)
|
||||
|
||||
type job struct {
|
||||
Title string `json:"title"`
|
||||
Logo string `json:"logo"`
|
||||
Company string `json:"company"`
|
||||
Location string `json:"location"`
|
||||
Type string `json:"type"`
|
||||
Description string `json:"description"`
|
||||
Link string `json:"link"`
|
||||
Skills skills `json:"skills"`
|
||||
}
|
||||
|
||||
type skills struct {
|
||||
React bool `json:"react"`
|
||||
Python bool `json:"python"`
|
||||
Golang bool `json:"golang"`
|
||||
Svelte bool `json:"svelte"`
|
||||
Nextjs bool `json:"nextjs"`
|
||||
Typescript bool `json:"typescript"`
|
||||
}
|
||||
|
||||
func skillChecker(description string) skills {
|
||||
return skills{
|
||||
React: strings.Contains(description, "React"),
|
||||
Python: strings.Contains(description, "Python"),
|
||||
Golang: strings.Contains(description, "Go"),
|
||||
Svelte: strings.Contains(description, "Svelte"),
|
||||
Nextjs: strings.Contains(description, "Next.js"),
|
||||
Typescript: strings.Contains(description, "TypeScript"),
|
||||
}
|
||||
}
|
||||
|
||||
// Slice to store job details
|
||||
var jobs []job
|
||||
|
||||
func main() {
|
||||
|
||||
fName := "jobs.json"
|
||||
file, err := os.Create(fName)
|
||||
if err != nil {
|
||||
log.Fatalf("Cannot create file %q: %s", fName, err)
|
||||
}
|
||||
defer file.Close()
|
||||
baseUrl := "https://thehub.io"
|
||||
searchString := "https://thehub.io/jobs?roles=frontenddeveloper&roles=fullstackdeveloper&roles=backenddeveloper&roles=devops&paid=true&countryCode=DK&sorting=newJobs"
|
||||
// Instantiate default collector
|
||||
c := colly.NewCollector(
|
||||
// visit only the hub
|
||||
colly.AllowedDomains("www.thehub.io", "thehub.io"),
|
||||
|
||||
// Cache responses to prevent multiple requests
|
||||
colly.CacheDir("./thehub_cache"),
|
||||
)
|
||||
|
||||
// Slice of excluded words in the job titles
|
||||
excluded := []string{"senior", "lead"}
|
||||
// Instantiate a new collector to visit the job details page
|
||||
detailsCollector := c.Clone()
|
||||
// On every <div> element with class "card__content attribute call callback
|
||||
c.OnHTML("div[class=card__content]", func(e *colly.HTMLElement) {
|
||||
// Get the title and ensure it doesn't contain any excluded words
|
||||
title := e.ChildText("span.card-job-find-list__position")
|
||||
for _, excludedWord := range excluded {
|
||||
if strings.Contains(strings.ToLower(title), excludedWord) {
|
||||
return
|
||||
}
|
||||
}
|
||||
link := e.ChildAttr("a", "href")
|
||||
fullLink := baseUrl + link
|
||||
|
||||
detailsCollector.Visit(fullLink)
|
||||
})
|
||||
|
||||
detailsCollector.OnRequest(func(r *colly.Request) {
|
||||
fmt.Println("Visiting", r.URL.String())
|
||||
})
|
||||
|
||||
detailsCollector.OnHTML("div.view-job-details", func(e *colly.HTMLElement) {
|
||||
// Get logo and trim the url
|
||||
|
||||
logo := e.ChildAttr("div.media-item__image", "style")
|
||||
cutLeft := "background-image:url("
|
||||
cutRight := ");"
|
||||
trimmedLogo := strings.Trim(logo, cutLeft+cutRight)
|
||||
|
||||
// Get company name
|
||||
jobDetails := job{
|
||||
Title: e.ChildText("h2[class=view-job-details__title]"),
|
||||
Logo: trimmedLogo,
|
||||
Company: e.ChildText(".bullet-inline-list > a:first-child"),
|
||||
Location: e.ChildText(".bullet-inline-list > a:nth-child(2)"),
|
||||
Type: e.ChildText(".bullet-inline-list > a:nth-child(3)"),
|
||||
Description: e.ChildText("content.text-block__content > span"),
|
||||
Link: e.Request.URL.String(),
|
||||
Skills: skillChecker(e.ChildText("content.text-block__content > span")),
|
||||
}
|
||||
jobs = append(jobs, jobDetails)
|
||||
})
|
||||
// Handle pagination
|
||||
c.OnHTML("a.page-link", func(e *colly.HTMLElement) {
|
||||
nextPage := e.Attr("href")
|
||||
if nextPage != "" {
|
||||
fullNextPage := baseUrl + nextPage
|
||||
fmt.Println("Visiting next page:", fullNextPage)
|
||||
e.Request.Visit(fullNextPage)
|
||||
}
|
||||
})
|
||||
|
||||
c.Visit(searchString)
|
||||
|
||||
// Encode jobs slice to JSON
|
||||
encoder := json.NewEncoder(file)
|
||||
encoder.SetIndent("", " ") // Pretty-print with indentation
|
||||
if err := encoder.Encode(jobs); err != nil {
|
||||
log.Fatalf("Cannot write to file %q: %s", fName, err)
|
||||
}
|
||||
|
||||
fmt.Println("Job details successfully written to", fName)
|
||||
}
|
Loading…
Reference in New Issue
Block a user