basic error handling
This commit is contained in:
parent
7f4fedd8fd
commit
22fafbd097
|
@ -20,6 +20,7 @@ exporter_prefix = "gas_"
|
||||||
stations_ids = (20153, 21907, 183433, 159416, 30856, 16362, 12634)
|
stations_ids = (20153, 21907, 183433, 159416, 30856, 16362, 12634)
|
||||||
|
|
||||||
request_count = 0
|
request_count = 0
|
||||||
|
scrape_healthy = True
|
||||||
startTime = datetime.now()
|
startTime = datetime.now()
|
||||||
station_metrics = list()
|
station_metrics = list()
|
||||||
mutex = threading.Lock()
|
mutex = threading.Lock()
|
||||||
|
@ -37,6 +38,7 @@ class RequestHandler(BaseHTTPRequestHandler):
|
||||||
self.end_headers()
|
self.end_headers()
|
||||||
self.wfile.write(bytes(exporter_prefix + "expoter_duration_seconds_sum " + str(int((datetime.now() - startTime).total_seconds())) + "\n", "utf-8"))
|
self.wfile.write(bytes(exporter_prefix + "expoter_duration_seconds_sum " + str(int((datetime.now() - startTime).total_seconds())) + "\n", "utf-8"))
|
||||||
self.wfile.write(bytes(exporter_prefix + "exporter_request_count " + str(request_count) + "\n", "utf-8"))
|
self.wfile.write(bytes(exporter_prefix + "exporter_request_count " + str(request_count) + "\n", "utf-8"))
|
||||||
|
self.wfile.write(bytes(exporter_prefix + "exporter_scrape_healthy " + str(scrape_healthy) + "\n", "utf-8"))
|
||||||
|
|
||||||
for metric in station_metrics:
|
for metric in station_metrics:
|
||||||
#print(metric)
|
#print(metric)
|
||||||
|
@ -69,15 +71,22 @@ def update_metrics():
|
||||||
print("Scrape")
|
print("Scrape")
|
||||||
global station_metrics
|
global station_metrics
|
||||||
global mutex
|
global mutex
|
||||||
|
global scrape_healthy
|
||||||
mutex.acquire()
|
mutex.acquire()
|
||||||
|
scrape_healthy = True
|
||||||
station_metrics.clear()
|
station_metrics.clear()
|
||||||
|
|
||||||
for station_id in stations_ids:
|
for station_id in stations_ids:
|
||||||
station_data = station_scraper.scrape_station(station_id)
|
try:
|
||||||
#print(station_data)
|
station_data = station_scraper.scrape_station(station_id)
|
||||||
for fuel in station_data['fuels']:
|
#print(station_data)
|
||||||
#print(fuel)
|
for fuel in station_data['fuels']:
|
||||||
station_metrics.append(station_data['station_metric_basename'] + "_" + fuel['name'] + " " + str(fuel['price']))
|
#print(fuel)
|
||||||
|
station_metrics.append(station_data['station_metric_basename'] + "_" + fuel['name'] + " " + str(fuel['price']))
|
||||||
|
except Exception as ex:
|
||||||
|
print("scrape error: " + str(ex))
|
||||||
|
scrape_healthy = False
|
||||||
|
pass
|
||||||
mutex.release()
|
mutex.release()
|
||||||
time.sleep(300)
|
time.sleep(300)
|
||||||
|
|
||||||
|
|
|
@ -21,6 +21,8 @@ def make_soup(url):
|
||||||
#print(user_agent)
|
#print(user_agent)
|
||||||
http = urllib3.PoolManager(10, headers=user_agent)
|
http = urllib3.PoolManager(10, headers=user_agent)
|
||||||
r = http.request("GET", url)
|
r = http.request("GET", url)
|
||||||
|
if (r.status != 200):
|
||||||
|
raise FileNotFoundError("http error code " + str(r.status) + " for " + url)
|
||||||
return BeautifulSoup(r.data,'lxml')
|
return BeautifulSoup(r.data,'lxml')
|
||||||
|
|
||||||
def scrape_station(station_id):
|
def scrape_station(station_id):
|
||||||
|
|
Loading…
Reference in New Issue