Compare commits

..

6 Commits

View File

@ -10,8 +10,8 @@ from scrapy.crawler import CrawlerProcess
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
from sqlalchemy import create_engine, func, select, Engine, Integer, String, Text, DateTime
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
from sqlalchemy import ForeignKey, create_engine, func, select, Engine, Integer, Boolean, String, Text, DateTime
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
from sqlalchemy.orm import sessionmaker
@ -58,10 +58,6 @@ class TvShowDB(Base):
String(255),
nullable=True
)
download_url: Mapped[str | None] = mapped_column(
String(255),
nullable=True
)
created_at: Mapped[datetime] = mapped_column(
DateTime,
server_default=func.datetime('now'),
@ -73,6 +69,30 @@ class TvShowDB(Base):
onupdate=func.datetime('now'),
nullable=False
)
links: Mapped[list["LinkDB"]] = relationship(back_populates="show")
class LinkDB(Base):
"""Modèle pour le stockage des liens de téléchargement (SQLAlchemy 2.0)."""
__tablename__: str = "links"
id: Mapped[int] = mapped_column(
Integer,
primary_key=True,
autoincrement=True
)
link: Mapped[str] = mapped_column(
String(255),
nullable=False
)
is_downloaded: Mapped[bool] = mapped_column(
Boolean,
default=False
)
show_id: Mapped[int] = mapped_column(ForeignKey("tvshows.id"))
show: Mapped["TvShowDB"] = relationship(back_populates="links")
class TvShowItem(scrapy.Item):
post_id: scrapy.Field = scrapy.Field()
@ -97,11 +117,20 @@ class SQLAlchemyPipeline:
show = session.scalars(stmt).first()
print(f"{show=}")
if not show:
show = TvShowDB(**item)
show = TvShowDB(
post_id=item["post_id"],
post_title=item["post_title"],
title=item["title"],
date=item["date"],
summary=item["summary"],
image_url=item["image_url"],
links=[LinkDB(link=url) for url in item["download_url"]]
)
session.add(show)
else:
for key, value in item.items():
setattr(show, key, value)
if key != "download_url":
setattr(show, key, value)
session.commit()
except Exception as e:
session.rollback()
@ -136,7 +165,7 @@ class TvShow(CrawlSpider):
item['date'] = self.parse_date(article.css('.entry-meta-header-before::text').getall()[1].strip())
item['summary'] = "".join(article.xpath('.//div[@class="entry-summary"]/node()').extract())
item['image_url'] = article.css('.entry-summary > p > img::attr(src)').get()
item['download_url'] = "#".join(article.css('.entry-summary > p > a[href ^= "https://rapidgator"]::attr(href)').extract())
item['download_url'] = article.css('.entry-summary > p > a[href ^= "https://rapidgator"]::attr(href)').extract()
yield item
def parse_date(self, formatted_date: str):