search ebay and get product id as return

This commit is contained in:
Hendrik Schutter 2022-05-31 09:57:03 +02:00
parent c5d71574a9
commit 7add4df512
3 changed files with 69 additions and 0 deletions

View File

@ -0,0 +1,8 @@
{
"folders": [
{
"path": "."
}
],
"settings": {}
}

1
search_db.json Normal file

File diff suppressed because one or more lines are too long

60
search_listing.py Normal file
View File

@ -0,0 +1,60 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
""" Author: Hendrik Schutter, mail@hendrikschutter.com
Date of creation: 2022/05/31
Date of last modification: 2022/05/31
"""
from bs4 import BeautifulSoup
import datetime
from tinydb import TinyDB, Query
import urllib3
import sys
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
def make_soup(url):
http = urllib3.PoolManager()
r = http.request("GET", url)
return BeautifulSoup(r.data,'lxml')
def search_listing(search_term, db, max_pages):
#sort by newest listing
#display page 1 (first)
#EU only
#60 listings in one result page
page_counter = 1
last_result_page = False
while not last_result_page:
result_page_added = 0
url = 'https://www.ebay.de/sch/i.html?_from=R40&_nkw=' + search_term + '&_sop=10&_pgn='+ str(page_counter) + '&LH_PrefLoc=3&_ipg=60'
#print ("Web Page: ", url)
soup = make_soup(url)
results = soup.find_all("li", class_="s-item s-item__pl-on-bottom s-item--watch-at-corner")
for result in results:
try:
rec = {
'epid': result.div.div.div.a['href'].split("?", 1)[0],
}
#check if listing is allready stored
if not db.search(Query().epid == rec["epid"]):
result_page_added += 1
db.insert(rec)
except (AttributeError, KeyError) as ex:
pass
if (result_page_added == 0) or (page_counter == max_pages):
last_result_page = True
page_counter += 1
if __name__ == "__main__":
search_db = TinyDB("search_db.json")
search_listing("mainboard", search_db, max_pages = 4)
print(search_db.all())