Ajusta conexão quebrada com o banco

This commit is contained in:
LeoMortari
2025-12-08 09:18:57 -03:00
parent 04e8d7bc60
commit 8c65f2dc7b

View File

@@ -45,7 +45,6 @@ class ProxyScrapingService:
def __init__(self): def __init__(self):
"""Initialize the proxy scraping service.""" """Initialize the proxy scraping service."""
self.db = DatabaseManager()
self.validator = ProxyValidator() self.validator = ProxyValidator()
self.scheduler = BlockingScheduler(timezone=pytz.UTC) self.scheduler = BlockingScheduler(timezone=pytz.UTC)
@@ -56,6 +55,9 @@ class ProxyScrapingService:
logger.info(f"Starting proxy scraping job at {job_start}") logger.info(f"Starting proxy scraping job at {job_start}")
logger.info("=" * 80) logger.info("=" * 80)
# Create database connection for this job
db = DatabaseManager()
try: try:
# Step 1: Scrape proxies from sources # Step 1: Scrape proxies from sources
logger.info("Step 1: Scraping proxies from sources...") logger.info("Step 1: Scraping proxies from sources...")
@@ -95,7 +97,7 @@ class ProxyScrapingService:
inserted_count = 0 inserted_count = 0
for proxy in validated_proxies: for proxy in validated_proxies:
if self.db.insert_proxy(proxy): if db.insert_proxy(proxy):
inserted_count += 1 inserted_count += 1
logger.info( logger.info(
@@ -105,12 +107,14 @@ class ProxyScrapingService:
# Step 5: Display statistics # Step 5: Display statistics
logger.info("Step 5: Database statistics...") logger.info("Step 5: Database statistics...")
stats = self.db.get_stats() stats = db.get_stats()
self._display_stats(stats) self._display_stats(stats)
except Exception as e: except Exception as e:
logger.error(f"Error during scraping job: {e}", exc_info=True) logger.error(f"Error during scraping job: {e}", exc_info=True)
finally: finally:
# Close database connection
db.close()
job_end = datetime.now() job_end = datetime.now()
duration = (job_end - job_start).total_seconds() duration = (job_end - job_start).total_seconds()
logger.info("=" * 80) logger.info("=" * 80)
@@ -125,10 +129,13 @@ class ProxyScrapingService:
logger.info(f"Starting proxy validation job at {job_start}") logger.info(f"Starting proxy validation job at {job_start}")
logger.info("=" * 80) logger.info("=" * 80)
# Create database connection for this job
db = DatabaseManager()
try: try:
# Step 1: Get all proxies from database # Step 1: Get all proxies from database
logger.info("Step 1: Fetching all proxies from database...") logger.info("Step 1: Fetching all proxies from database...")
all_proxies = self.db.get_all_proxies() all_proxies = db.get_all_proxies()
if not all_proxies: if not all_proxies:
logger.warning("No proxies found in database to validate") logger.warning("No proxies found in database to validate")
@@ -153,7 +160,7 @@ class ProxyScrapingService:
if is_active and response_time_ms is not None: if is_active and response_time_ms is not None:
# Proxy is working - update status in database # Proxy is working - update status in database
if self.db.update_proxy_status( if db.update_proxy_status(
ip_address, port, protocol, True, response_time_ms ip_address, port, protocol, True, response_time_ms
): ):
validated_count += 1 validated_count += 1
@@ -162,7 +169,7 @@ class ProxyScrapingService:
) )
else: else:
# Proxy failed - delete from database # Proxy failed - delete from database
if self.db.delete_proxy(ip_address, port, protocol): if db.delete_proxy(ip_address, port, protocol):
deleted_count += 1 deleted_count += 1
logger.info( logger.info(
f"Proxy {ip_address}:{port} failed validation - removed from database" f"Proxy {ip_address}:{port} failed validation - removed from database"
@@ -175,12 +182,14 @@ class ProxyScrapingService:
# Step 3: Display statistics # Step 3: Display statistics
logger.info("Step 3: Database statistics...") logger.info("Step 3: Database statistics...")
stats = self.db.get_stats() stats = db.get_stats()
self._display_stats(stats) self._display_stats(stats)
except Exception as e: except Exception as e:
logger.error(f"Error during validation job: {e}", exc_info=True) logger.error(f"Error during validation job: {e}", exc_info=True)
finally: finally:
# Close database connection
db.close()
job_end = datetime.now() job_end = datetime.now()
duration = (job_end - job_start).total_seconds() duration = (job_end - job_start).total_seconds()
logger.info("=" * 80) logger.info("=" * 80)
@@ -291,10 +300,8 @@ class ProxyScrapingService:
except (KeyboardInterrupt, SystemExit): except (KeyboardInterrupt, SystemExit):
logger.info("Scheduler shutdown requested") logger.info("Scheduler shutdown requested")
self.scheduler.shutdown() self.scheduler.shutdown()
self.db.close()
except Exception as e: except Exception as e:
logger.error(f"Scheduler error: {e}", exc_info=True) logger.error(f"Scheduler error: {e}", exc_info=True)
self.db.close()
sys.exit(1) sys.exit(1)
@@ -322,7 +329,6 @@ def main():
if settings.development: if settings.development:
logger.info("Running in DEVELOPMENT mode - executing scraping job and exiting") logger.info("Running in DEVELOPMENT mode - executing scraping job and exiting")
service.run_immediate() service.run_immediate()
service.db.close()
logger.info("Development run completed. Exiting.") logger.info("Development run completed. Exiting.")
else: else:
# Start scheduler for recurring jobs (production mode) # Start scheduler for recurring jobs (production mode)