Refactoring the main function to use multiprocessing.Process to run scrapy

This commit is contained in:
edipretoro 2026-01-03 09:52:11 +01:00
parent ad60e83693
commit 3c30b42f75

View File

@ -4,6 +4,7 @@ import re
import sys import sys
from datetime import datetime from datetime import datetime
from multiprocessing import Process
import scrapy import scrapy
from scrapy.crawler import CrawlerProcess from scrapy.crawler import CrawlerProcess
@ -179,5 +180,20 @@ def run_scrapy():
process.start() process.start()
def main():
p_scrapy = Process(target=run_scrapy)
try:
p_scrapy.start()
except KeyboardInterrupt:
print(f"Closing...")
p_scrapy.terminate()
p_scrapy.join(timeout=30)
if p_scrapy.is_alive():
print("⚠️ Scrapy n'a pas pu s'arrêter proprement.")
p_scrapy.kill()
print(f"scrarls is stopped.")
if __name__ == "__main__": if __name__ == "__main__":
sys.exit(main()) sys.exit(main())