Ajusta conexão quebrada com o banco
This commit is contained in:
26
src/main.py
26
src/main.py
@@ -45,7 +45,6 @@ class ProxyScrapingService:
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the proxy scraping service."""
|
||||
self.db = DatabaseManager()
|
||||
self.validator = ProxyValidator()
|
||||
self.scheduler = BlockingScheduler(timezone=pytz.UTC)
|
||||
|
||||
@@ -56,6 +55,9 @@ class ProxyScrapingService:
|
||||
logger.info(f"Starting proxy scraping job at {job_start}")
|
||||
logger.info("=" * 80)
|
||||
|
||||
# Create database connection for this job
|
||||
db = DatabaseManager()
|
||||
|
||||
try:
|
||||
# Step 1: Scrape proxies from sources
|
||||
logger.info("Step 1: Scraping proxies from sources...")
|
||||
@@ -95,7 +97,7 @@ class ProxyScrapingService:
|
||||
inserted_count = 0
|
||||
|
||||
for proxy in validated_proxies:
|
||||
if self.db.insert_proxy(proxy):
|
||||
if db.insert_proxy(proxy):
|
||||
inserted_count += 1
|
||||
|
||||
logger.info(
|
||||
@@ -105,12 +107,14 @@ class ProxyScrapingService:
|
||||
|
||||
# Step 5: Display statistics
|
||||
logger.info("Step 5: Database statistics...")
|
||||
stats = self.db.get_stats()
|
||||
stats = db.get_stats()
|
||||
self._display_stats(stats)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during scraping job: {e}", exc_info=True)
|
||||
finally:
|
||||
# Close database connection
|
||||
db.close()
|
||||
job_end = datetime.now()
|
||||
duration = (job_end - job_start).total_seconds()
|
||||
logger.info("=" * 80)
|
||||
@@ -125,10 +129,13 @@ class ProxyScrapingService:
|
||||
logger.info(f"Starting proxy validation job at {job_start}")
|
||||
logger.info("=" * 80)
|
||||
|
||||
# Create database connection for this job
|
||||
db = DatabaseManager()
|
||||
|
||||
try:
|
||||
# Step 1: Get all proxies from database
|
||||
logger.info("Step 1: Fetching all proxies from database...")
|
||||
all_proxies = self.db.get_all_proxies()
|
||||
all_proxies = db.get_all_proxies()
|
||||
|
||||
if not all_proxies:
|
||||
logger.warning("No proxies found in database to validate")
|
||||
@@ -153,7 +160,7 @@ class ProxyScrapingService:
|
||||
|
||||
if is_active and response_time_ms is not None:
|
||||
# Proxy is working - update status in database
|
||||
if self.db.update_proxy_status(
|
||||
if db.update_proxy_status(
|
||||
ip_address, port, protocol, True, response_time_ms
|
||||
):
|
||||
validated_count += 1
|
||||
@@ -162,7 +169,7 @@ class ProxyScrapingService:
|
||||
)
|
||||
else:
|
||||
# Proxy failed - delete from database
|
||||
if self.db.delete_proxy(ip_address, port, protocol):
|
||||
if db.delete_proxy(ip_address, port, protocol):
|
||||
deleted_count += 1
|
||||
logger.info(
|
||||
f"Proxy {ip_address}:{port} failed validation - removed from database"
|
||||
@@ -175,12 +182,14 @@ class ProxyScrapingService:
|
||||
|
||||
# Step 3: Display statistics
|
||||
logger.info("Step 3: Database statistics...")
|
||||
stats = self.db.get_stats()
|
||||
stats = db.get_stats()
|
||||
self._display_stats(stats)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during validation job: {e}", exc_info=True)
|
||||
finally:
|
||||
# Close database connection
|
||||
db.close()
|
||||
job_end = datetime.now()
|
||||
duration = (job_end - job_start).total_seconds()
|
||||
logger.info("=" * 80)
|
||||
@@ -291,10 +300,8 @@ class ProxyScrapingService:
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
logger.info("Scheduler shutdown requested")
|
||||
self.scheduler.shutdown()
|
||||
self.db.close()
|
||||
except Exception as e:
|
||||
logger.error(f"Scheduler error: {e}", exc_info=True)
|
||||
self.db.close()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@@ -322,7 +329,6 @@ def main():
|
||||
if settings.development:
|
||||
logger.info("Running in DEVELOPMENT mode - executing scraping job and exiting")
|
||||
service.run_immediate()
|
||||
service.db.close()
|
||||
logger.info("Development run completed. Exiting.")
|
||||
else:
|
||||
# Start scheduler for recurring jobs (production mode)
|
||||
|
||||
Reference in New Issue
Block a user