Compare commits
8 Commits
8adc0623bd
...
develop
| Author | SHA1 | Date | |
|---|---|---|---|
| a2dce2bba9 | |||
| edbb92ff9a | |||
| a7952bc32c | |||
| 1d0cb8ed5d | |||
| d8d8109cdc | |||
| 05866cc862 | |||
| f0e6d73dde | |||
| 4bff05bc92 |
@@ -1 +1,2 @@
|
|||||||
/*.db
|
/*.db
|
||||||
|
/__pycache__/
|
||||||
|
|||||||
@@ -0,0 +1,85 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from sqlalchemy import ForeignKey, func, Integer, Boolean, String, Text, DateTime
|
||||||
|
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
|
||||||
|
|
||||||
|
|
||||||
|
class Base(DeclarativeBase):
|
||||||
|
"""Base déclarative pour SQLAlchemy 2.0."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class TvShowDB(Base):
|
||||||
|
"""Modèle pour le stockage des épisodes (SQLAlchemy 2.0)."""
|
||||||
|
|
||||||
|
__tablename__: str = "tvshows"
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(
|
||||||
|
Integer,
|
||||||
|
primary_key=True,
|
||||||
|
autoincrement=True
|
||||||
|
)
|
||||||
|
post_id: Mapped[str] = mapped_column(
|
||||||
|
String(length=255),
|
||||||
|
nullable=False,
|
||||||
|
unique=True,
|
||||||
|
index=True
|
||||||
|
)
|
||||||
|
post_title: Mapped[str] = mapped_column(
|
||||||
|
String(255),
|
||||||
|
nullable=False
|
||||||
|
)
|
||||||
|
title: Mapped[str] = mapped_column(
|
||||||
|
String(255),
|
||||||
|
nullable=False,
|
||||||
|
index=True
|
||||||
|
)
|
||||||
|
date: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime,
|
||||||
|
nullable=False,
|
||||||
|
index=True
|
||||||
|
)
|
||||||
|
summary: Mapped[str | None] = mapped_column(
|
||||||
|
Text,
|
||||||
|
nullable=True
|
||||||
|
)
|
||||||
|
image_url: Mapped[str | None] = mapped_column(
|
||||||
|
String(255),
|
||||||
|
nullable=True
|
||||||
|
)
|
||||||
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime,
|
||||||
|
server_default=func.datetime('now'),
|
||||||
|
nullable=False
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
|
DateTime,
|
||||||
|
server_default=func.datetime('now'),
|
||||||
|
onupdate=func.datetime('now'),
|
||||||
|
nullable=False
|
||||||
|
)
|
||||||
|
links: Mapped[list["LinkDB"]] = relationship(back_populates="show")
|
||||||
|
|
||||||
|
|
||||||
|
class LinkDB(Base):
|
||||||
|
"""Modèle pour le stockage des liens de téléchargement (SQLAlchemy 2.0)."""
|
||||||
|
|
||||||
|
__tablename__: str = "links"
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(
|
||||||
|
Integer,
|
||||||
|
primary_key=True,
|
||||||
|
autoincrement=True
|
||||||
|
)
|
||||||
|
link: Mapped[str] = mapped_column(
|
||||||
|
String(255),
|
||||||
|
nullable=False
|
||||||
|
)
|
||||||
|
is_downloaded: Mapped[bool] = mapped_column(
|
||||||
|
Boolean,
|
||||||
|
default=False
|
||||||
|
)
|
||||||
|
show_id: Mapped[int] = mapped_column(ForeignKey("tvshows.id"))
|
||||||
|
show: Mapped["TvShowDB"] = relationship(back_populates="links")
|
||||||
|
|
||||||
|
|
||||||
+13
-61
@@ -10,70 +10,13 @@ from scrapy.crawler import CrawlerProcess
|
|||||||
from scrapy.spiders import CrawlSpider, Rule
|
from scrapy.spiders import CrawlSpider, Rule
|
||||||
from scrapy.linkextractors import LinkExtractor
|
from scrapy.linkextractors import LinkExtractor
|
||||||
|
|
||||||
from sqlalchemy import create_engine, func, select, Engine, Integer, String, Text, DateTime
|
from sqlalchemy import create_engine, select, Engine
|
||||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
|
|
||||||
class Base(DeclarativeBase):
|
from models import Base, TvShowDB, LinkDB
|
||||||
"""Base déclarative pour SQLAlchemy 2.0."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class TvShowDB(Base):
|
|
||||||
"""Modèle pour le stockage des épisodes (SQLAlchemy 2.0)."""
|
|
||||||
|
|
||||||
__tablename__: str = "tvshows"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(
|
|
||||||
Integer,
|
|
||||||
primary_key=True,
|
|
||||||
autoincrement=True
|
|
||||||
)
|
|
||||||
post_id: Mapped[str] = mapped_column(
|
|
||||||
String(length=255),
|
|
||||||
nullable=False,
|
|
||||||
unique=True,
|
|
||||||
index=True
|
|
||||||
)
|
|
||||||
post_title: Mapped[str] = mapped_column(
|
|
||||||
String(255),
|
|
||||||
nullable=False
|
|
||||||
)
|
|
||||||
title: Mapped[str] = mapped_column(
|
|
||||||
String(255),
|
|
||||||
nullable=False,
|
|
||||||
index=True
|
|
||||||
)
|
|
||||||
date: Mapped[datetime] = mapped_column(
|
|
||||||
DateTime,
|
|
||||||
nullable=False,
|
|
||||||
index=True
|
|
||||||
)
|
|
||||||
summary: Mapped[str | None] = mapped_column(
|
|
||||||
Text,
|
|
||||||
nullable=True
|
|
||||||
)
|
|
||||||
image_url: Mapped[str | None] = mapped_column(
|
|
||||||
String(255),
|
|
||||||
nullable=True
|
|
||||||
)
|
|
||||||
download_url: Mapped[str | None] = mapped_column(
|
|
||||||
String(255),
|
|
||||||
nullable=True
|
|
||||||
)
|
|
||||||
created_at: Mapped[datetime] = mapped_column(
|
|
||||||
DateTime,
|
|
||||||
server_default=func.datetime('now'),
|
|
||||||
nullable=False
|
|
||||||
)
|
|
||||||
updated_at: Mapped[datetime] = mapped_column(
|
|
||||||
DateTime,
|
|
||||||
server_default=func.datetime('now'),
|
|
||||||
onupdate=func.datetime('now'),
|
|
||||||
nullable=False
|
|
||||||
)
|
|
||||||
|
|
||||||
class TvShowItem(scrapy.Item):
|
class TvShowItem(scrapy.Item):
|
||||||
post_id: scrapy.Field = scrapy.Field()
|
post_id: scrapy.Field = scrapy.Field()
|
||||||
post_title: scrapy.Field = scrapy.Field()
|
post_title: scrapy.Field = scrapy.Field()
|
||||||
@@ -97,10 +40,19 @@ class SQLAlchemyPipeline:
|
|||||||
show = session.scalars(stmt).first()
|
show = session.scalars(stmt).first()
|
||||||
print(f"{show=}")
|
print(f"{show=}")
|
||||||
if not show:
|
if not show:
|
||||||
show = TvShowDB(**item)
|
show = TvShowDB(
|
||||||
|
post_id=item["post_id"],
|
||||||
|
post_title=item["post_title"],
|
||||||
|
title=item["title"],
|
||||||
|
date=item["date"],
|
||||||
|
summary=item["summary"],
|
||||||
|
image_url=item["image_url"],
|
||||||
|
links=[LinkDB(link=url) for url in item["download_url"]]
|
||||||
|
)
|
||||||
session.add(show)
|
session.add(show)
|
||||||
else:
|
else:
|
||||||
for key, value in item.items():
|
for key, value in item.items():
|
||||||
|
if key != "download_url":
|
||||||
setattr(show, key, value)
|
setattr(show, key, value)
|
||||||
session.commit()
|
session.commit()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -136,7 +88,7 @@ class TvShow(CrawlSpider):
|
|||||||
item['date'] = self.parse_date(article.css('.entry-meta-header-before::text').getall()[1].strip())
|
item['date'] = self.parse_date(article.css('.entry-meta-header-before::text').getall()[1].strip())
|
||||||
item['summary'] = "".join(article.xpath('.//div[@class="entry-summary"]/node()').extract())
|
item['summary'] = "".join(article.xpath('.//div[@class="entry-summary"]/node()').extract())
|
||||||
item['image_url'] = article.css('.entry-summary > p > img::attr(src)').get()
|
item['image_url'] = article.css('.entry-summary > p > img::attr(src)').get()
|
||||||
item['download_url'] = "#".join(article.css('.entry-summary > p > a[href ^= "https://rapidgator"]::attr(href)').extract())
|
item['download_url'] = article.css('.entry-summary > p > a[href ^= "https://rapidgator"]::attr(href)').extract()
|
||||||
yield item
|
yield item
|
||||||
|
|
||||||
def parse_date(self, formatted_date: str):
|
def parse_date(self, formatted_date: str):
|
||||||
|
|||||||
Reference in New Issue
Block a user