Refactoring the main function to use multiprocessing.Process to run scrapy
This commit is contained in:
+16
@@ -4,6 +4,7 @@ import re
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from multiprocessing import Process
|
||||||
|
|
||||||
import scrapy
|
import scrapy
|
||||||
from scrapy.crawler import CrawlerProcess
|
from scrapy.crawler import CrawlerProcess
|
||||||
@@ -179,5 +180,20 @@ def run_scrapy():
|
|||||||
process.start()
|
process.start()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
p_scrapy = Process(target=run_scrapy)
|
||||||
|
|
||||||
|
try:
|
||||||
|
p_scrapy.start()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print(f"Closing...")
|
||||||
|
p_scrapy.terminate()
|
||||||
|
p_scrapy.join(timeout=30)
|
||||||
|
if p_scrapy.is_alive():
|
||||||
|
print("⚠️ Scrapy n'a pas pu s'arrêter proprement.")
|
||||||
|
p_scrapy.kill()
|
||||||
|
print(f"scrarls is stopped.")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
|
|||||||
Reference in New Issue
Block a user