fix useragent and better random useragents

This commit is contained in:
Hendrik Schutter 2023-01-11 18:09:03 +01:00
parent 3e36104d9f
commit fb921423aa
2 changed files with 6 additions and 6 deletions

View File

@ -15,8 +15,7 @@ def get_random_user_agent():
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.111 Safari/537.36",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.1.17 (KHTML, like Gecko) Version/7.1 Safari/537.85.10",\
"Mozilla/5.0 (Linux; Android 10.1; TV BOX) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36 OPR/58.2.2878.53403",\
"Mozilla/5.0 (Windows NT 6.3; WOW64; rv:33.0) Gecko/20100101 Firefox/33.0",\
"Dalvik/2.1.0 (Linux; U; Android 10; M2006C3MI MIUI/V12.0.15.0.QCRINXM)"\
"Mozilla/5.0 (Windows NT 6.3; WOW64; rv:33.0) Gecko/20100101 Firefox/33.0"\
]
return random.choice(uastrings)
return random.choice(uastrings)+str(random.randrange(255))

View File

@ -26,7 +26,7 @@ def scrape_listing(url):
#print ("Web Page: ", url)
soup = make_soup(url)
#print(soup)
#print(soup.find("div", class_="vim x-item-title").span.text)
#print(soup.find("span", itemprop="price")["content"])
#print(soup.find("img", itemprop="image")["src"])
@ -40,6 +40,7 @@ def scrape_listing(url):
return listing
if __name__ == "__main__":
listing = scrape_listing("https://www.ebay.de/itm/162861653490")
print(listing)
while(1):
listing = scrape_listing("https://www.ebay.de/itm/165841583057")
print(listing)