-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathjobup_scraper.py
More file actions
87 lines (78 loc) · 3.67 KB
/
jobup_scraper.py
File metadata and controls
87 lines (78 loc) · 3.67 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
from bs4 import BeautifulSoup
from playwright.sync_api import sync_playwright
import traceback
def add_value(data, key, value):
if key in data.keys():
if type(data[key]) is list:
data[key] = data[key] + [value]
else:
data[key] = [data[key]] + [value]
else:
data[key] = value
return data
def jobup_scraper(url):
with sync_playwright() as playwright:
my_dict = {}
browser = playwright.chromium.launch(headless=True)
context = browser.new_context(
user_agent=(
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/120.0.0.0 Safari/537.36"
)
)
page = context.new_page()
page.goto(url)
page.get_by_role("button", name="ok", exact=True).click()
try:
while True:
page.wait_for_selector('a[data-cy="job-link"]')
html = page.content()
soup = BeautifulSoup(html, "html.parser")
job_list = soup.find("div", {"aria-label": "Liste d'emplois"}) # Need to correct if the language change
if not job_list:
raise Exception("No job found")
all_job_name = job_list.find_all("div", {"class": "mb_s8"})
all_company_name = job_list.find_all("div", {"class": "d_grid ai_center gap_s12 grid-tc_[auto_1fr] mt_s12"})
all_job_id = job_list.find_all("a", {"data-cy": "job-link"})
joblinks = page.locator('a[data-cy="job-link"]')
count = joblinks.count()
for i in range(count):
joblinks.nth(i).click()
page.wait_for_selector('div[data-cy="vacancy-description"]')
html = page.content()
soup = BeautifulSoup(html, "html.parser")
div_description = soup.find("div", {"data-cy": "vacancy-description"}, class_=lambda s: s != "mb_s24")
div_des = div_description.find_all("div", class_ = "mb_s24", recursive=False)
for div in div_des:
div.decompose()
description = ""
for d in div_description:
description += d.text.strip()
location = soup.find("p", {"class": "mb_s12 lastOfType:mb_s0 textStyle_body2 pt_s8"})
if location is None:
continue
location = location.text.strip()
href = all_job_id[i].get("href")
id_ = href[-37:len(href) - 1]
job_link = f"https://www.jobup.ch/fr/emplois/detail/{id_}"
name = all_job_name[i].text.strip()
name = name.replace('"', '')
company_name = all_company_name[i].text.strip()
my_dict.update({id_: f'=HYPERLINK("{job_link}","{name}")'})
my_dict = add_value(my_dict, id_, company_name)
my_dict = add_value(my_dict, id_, location)
my_dict = add_value(my_dict, id_, description)
page.wait_for_timeout(1000)
next = page.get_by_role("link", name="Suivant") # Need to correct if the language change
if not next.is_visible():
break
next.click()
page.wait_for_timeout(1000)
except Exception as e:
my_dict.clear()
my_dict.update({'ERROR in jobup script': str(e)})
print(traceback.format_exc())
finally:
page.close()
return my_dict