Refactoring the main function to use multiprocessing.Process to run scrapy
This commit is contained in:
parent
ad60e83693
commit
3c30b42f75
16
scrarls.py
16
scrarls.py
@ -4,6 +4,7 @@ import re
|
||||
import sys
|
||||
|
||||
from datetime import datetime
|
||||
from multiprocessing import Process
|
||||
|
||||
import scrapy
|
||||
from scrapy.crawler import CrawlerProcess
|
||||
@ -179,5 +180,20 @@ def run_scrapy():
|
||||
process.start()
|
||||
|
||||
|
||||
def main():
|
||||
p_scrapy = Process(target=run_scrapy)
|
||||
|
||||
try:
|
||||
p_scrapy.start()
|
||||
except KeyboardInterrupt:
|
||||
print(f"Closing...")
|
||||
p_scrapy.terminate()
|
||||
p_scrapy.join(timeout=30)
|
||||
if p_scrapy.is_alive():
|
||||
print("⚠️ Scrapy n'a pas pu s'arrêter proprement.")
|
||||
p_scrapy.kill()
|
||||
print(f"scrarls is stopped.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user