Compare commits
6 Commits
8adc0623bd
...
a7952bc32c
| Author | SHA1 | Date | |
|---|---|---|---|
| a7952bc32c | |||
| 1d0cb8ed5d | |||
| d8d8109cdc | |||
| 05866cc862 | |||
| f0e6d73dde | |||
| 4bff05bc92 |
47
scrarls.py
47
scrarls.py
@ -10,8 +10,8 @@ from scrapy.crawler import CrawlerProcess
|
|||||||
from scrapy.spiders import CrawlSpider, Rule
|
from scrapy.spiders import CrawlSpider, Rule
|
||||||
from scrapy.linkextractors import LinkExtractor
|
from scrapy.linkextractors import LinkExtractor
|
||||||
|
|
||||||
from sqlalchemy import create_engine, func, select, Engine, Integer, String, Text, DateTime
|
from sqlalchemy import ForeignKey, create_engine, func, select, Engine, Integer, Boolean, String, Text, DateTime
|
||||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
|
|
||||||
@ -58,10 +58,6 @@ class TvShowDB(Base):
|
|||||||
String(255),
|
String(255),
|
||||||
nullable=True
|
nullable=True
|
||||||
)
|
)
|
||||||
download_url: Mapped[str | None] = mapped_column(
|
|
||||||
String(255),
|
|
||||||
nullable=True
|
|
||||||
)
|
|
||||||
created_at: Mapped[datetime] = mapped_column(
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
DateTime,
|
DateTime,
|
||||||
server_default=func.datetime('now'),
|
server_default=func.datetime('now'),
|
||||||
@ -73,6 +69,30 @@ class TvShowDB(Base):
|
|||||||
onupdate=func.datetime('now'),
|
onupdate=func.datetime('now'),
|
||||||
nullable=False
|
nullable=False
|
||||||
)
|
)
|
||||||
|
links: Mapped[list["LinkDB"]] = relationship(back_populates="show")
|
||||||
|
|
||||||
|
|
||||||
|
class LinkDB(Base):
|
||||||
|
"""Modèle pour le stockage des liens de téléchargement (SQLAlchemy 2.0)."""
|
||||||
|
|
||||||
|
__tablename__: str = "links"
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(
|
||||||
|
Integer,
|
||||||
|
primary_key=True,
|
||||||
|
autoincrement=True
|
||||||
|
)
|
||||||
|
link: Mapped[str] = mapped_column(
|
||||||
|
String(255),
|
||||||
|
nullable=False
|
||||||
|
)
|
||||||
|
is_downloaded: Mapped[bool] = mapped_column(
|
||||||
|
Boolean,
|
||||||
|
default=False
|
||||||
|
)
|
||||||
|
show_id: Mapped[int] = mapped_column(ForeignKey("tvshows.id"))
|
||||||
|
show: Mapped["TvShowDB"] = relationship(back_populates="links")
|
||||||
|
|
||||||
|
|
||||||
class TvShowItem(scrapy.Item):
|
class TvShowItem(scrapy.Item):
|
||||||
post_id: scrapy.Field = scrapy.Field()
|
post_id: scrapy.Field = scrapy.Field()
|
||||||
@ -97,11 +117,20 @@ class SQLAlchemyPipeline:
|
|||||||
show = session.scalars(stmt).first()
|
show = session.scalars(stmt).first()
|
||||||
print(f"{show=}")
|
print(f"{show=}")
|
||||||
if not show:
|
if not show:
|
||||||
show = TvShowDB(**item)
|
show = TvShowDB(
|
||||||
|
post_id=item["post_id"],
|
||||||
|
post_title=item["post_title"],
|
||||||
|
title=item["title"],
|
||||||
|
date=item["date"],
|
||||||
|
summary=item["summary"],
|
||||||
|
image_url=item["image_url"],
|
||||||
|
links=[LinkDB(link=url) for url in item["download_url"]]
|
||||||
|
)
|
||||||
session.add(show)
|
session.add(show)
|
||||||
else:
|
else:
|
||||||
for key, value in item.items():
|
for key, value in item.items():
|
||||||
setattr(show, key, value)
|
if key != "download_url":
|
||||||
|
setattr(show, key, value)
|
||||||
session.commit()
|
session.commit()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
session.rollback()
|
session.rollback()
|
||||||
@ -136,7 +165,7 @@ class TvShow(CrawlSpider):
|
|||||||
item['date'] = self.parse_date(article.css('.entry-meta-header-before::text').getall()[1].strip())
|
item['date'] = self.parse_date(article.css('.entry-meta-header-before::text').getall()[1].strip())
|
||||||
item['summary'] = "".join(article.xpath('.//div[@class="entry-summary"]/node()').extract())
|
item['summary'] = "".join(article.xpath('.//div[@class="entry-summary"]/node()').extract())
|
||||||
item['image_url'] = article.css('.entry-summary > p > img::attr(src)').get()
|
item['image_url'] = article.css('.entry-summary > p > img::attr(src)').get()
|
||||||
item['download_url'] = "#".join(article.css('.entry-summary > p > a[href ^= "https://rapidgator"]::attr(href)').extract())
|
item['download_url'] = article.css('.entry-summary > p > a[href ^= "https://rapidgator"]::attr(href)').extract()
|
||||||
yield item
|
yield item
|
||||||
|
|
||||||
def parse_date(self, formatted_date: str):
|
def parse_date(self, formatted_date: str):
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user