Papaflessas's picture
Deploy Signal Generator app
3fe0726
import time
import logging
from news_scraper.services.news_processor import NewsProcessor
from news_scraper.adapters.alpaca_ws import AlpacaNewsFeedAdapter
from news_scraper.helpers.timer import Timer
def main():
# Configure logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
# Initialize the timer
timer = Timer("logs/news_processing_times.log")
# Initialize the news processor
news_processor = NewsProcessor()
# Register an async callback for news processing
async def news_callback(news_item):
# Stop timing when the news item is processed
timer.stop_timing(news_item)
print(f"[PROCESSOR] [FUNC] Processing | {news_item.headline if hasattr(news_item, 'headline') else ''}")
news_processor.register_callback(news_callback)
# Start processing news items
news_processor.start_processing()
# Initialize AlpacaNewsFeedAdapter
def print_news(news_item):
# Start timing when the news item enters the queue
timer.start_timing(news_item)
print(f"[PROCESSOR] [QUEUE] News item | {news_item.headline}")
news_processor.add_news(news_item)
# Create the Alpaca adapter and register callback
alpaca_adapter = AlpacaNewsFeedAdapter()
alpaca_adapter.register_callback(print_news)
# Initialize CalendarProcessor
from news_scraper.services.calendar_processor import CalendarProcessor
calendar_processor = CalendarProcessor()
last_run_date = None
# Keep the main thread alive to receive messages
try:
while True:
# Run Calendar Processor daily
from datetime import date
current_date = date.today()
if last_run_date != current_date:
calendar_processor.run_daily_scan()
last_run_date = current_date
# Periodically log queue statistics
if time.time() % 120 < 1: # Roughly every two minutes
timer.get_queue_stats()
timer.get_processing_stats()
time.sleep(1)
except KeyboardInterrupt:
print("Exiting...")
alpaca_adapter.close()
news_processor.stop_processing()
if __name__ == "__main__":
main()