Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| a2dce2bba9 | |||
| edbb92ff9a | |||
| a7952bc32c | |||
| 1d0cb8ed5d | |||
| d8d8109cdc | |||
| 05866cc862 | |||
| f0e6d73dde | |||
| 4bff05bc92 | |||
| 8adc0623bd | |||
| 0db07013ce | |||
| eaf854c3eb | |||
| 359b3271e4 | |||
| 15e1f837c8 | |||
| ada99be262 | |||
| 177652dce1 |
@@ -0,0 +1,2 @@
|
||||
/*.db
|
||||
/__pycache__/
|
||||
@@ -0,0 +1,85 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import ForeignKey, func, Integer, Boolean, String, Text, DateTime
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
"""Base déclarative pour SQLAlchemy 2.0."""
|
||||
pass
|
||||
|
||||
|
||||
class TvShowDB(Base):
|
||||
"""Modèle pour le stockage des épisodes (SQLAlchemy 2.0)."""
|
||||
|
||||
__tablename__: str = "tvshows"
|
||||
|
||||
id: Mapped[int] = mapped_column(
|
||||
Integer,
|
||||
primary_key=True,
|
||||
autoincrement=True
|
||||
)
|
||||
post_id: Mapped[str] = mapped_column(
|
||||
String(length=255),
|
||||
nullable=False,
|
||||
unique=True,
|
||||
index=True
|
||||
)
|
||||
post_title: Mapped[str] = mapped_column(
|
||||
String(255),
|
||||
nullable=False
|
||||
)
|
||||
title: Mapped[str] = mapped_column(
|
||||
String(255),
|
||||
nullable=False,
|
||||
index=True
|
||||
)
|
||||
date: Mapped[datetime] = mapped_column(
|
||||
DateTime,
|
||||
nullable=False,
|
||||
index=True
|
||||
)
|
||||
summary: Mapped[str | None] = mapped_column(
|
||||
Text,
|
||||
nullable=True
|
||||
)
|
||||
image_url: Mapped[str | None] = mapped_column(
|
||||
String(255),
|
||||
nullable=True
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime,
|
||||
server_default=func.datetime('now'),
|
||||
nullable=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime,
|
||||
server_default=func.datetime('now'),
|
||||
onupdate=func.datetime('now'),
|
||||
nullable=False
|
||||
)
|
||||
links: Mapped[list["LinkDB"]] = relationship(back_populates="show")
|
||||
|
||||
|
||||
class LinkDB(Base):
|
||||
"""Modèle pour le stockage des liens de téléchargement (SQLAlchemy 2.0)."""
|
||||
|
||||
__tablename__: str = "links"
|
||||
|
||||
id: Mapped[int] = mapped_column(
|
||||
Integer,
|
||||
primary_key=True,
|
||||
autoincrement=True
|
||||
)
|
||||
link: Mapped[str] = mapped_column(
|
||||
String(255),
|
||||
nullable=False
|
||||
)
|
||||
is_downloaded: Mapped[bool] = mapped_column(
|
||||
Boolean,
|
||||
default=False
|
||||
)
|
||||
show_id: Mapped[int] = mapped_column(ForeignKey("tvshows.id"))
|
||||
show: Mapped["TvShowDB"] = relationship(back_populates="links")
|
||||
|
||||
|
||||
+55
-9
@@ -2,6 +2,7 @@
|
||||
|
||||
import re
|
||||
import sys
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
import scrapy
|
||||
@@ -9,10 +10,16 @@ from scrapy.crawler import CrawlerProcess
|
||||
from scrapy.spiders import CrawlSpider, Rule
|
||||
from scrapy.linkextractors import LinkExtractor
|
||||
|
||||
from sqlalchemy import create_engine, select, Engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
|
||||
from models import Base, TvShowDB, LinkDB
|
||||
|
||||
|
||||
class TvShowItem(scrapy.Item):
|
||||
article_id: scrapy.Field = scrapy.Field()
|
||||
article_title: scrapy.Field = scrapy.Field()
|
||||
post_id: scrapy.Field = scrapy.Field()
|
||||
post_title: scrapy.Field = scrapy.Field()
|
||||
title:scrapy.Field = scrapy.Field()
|
||||
date: scrapy.Field = scrapy.Field()
|
||||
summary: scrapy.Field = scrapy.Field()
|
||||
@@ -20,6 +27,42 @@ class TvShowItem(scrapy.Item):
|
||||
download_url: scrapy.Field = scrapy.Field()
|
||||
|
||||
|
||||
class SQLAlchemyPipeline:
|
||||
def __init__(self):
|
||||
self.engine: Engine = create_engine('sqlite:///tvshows.db', echo=True)
|
||||
Base.metadata.create_all(self.engine)
|
||||
self.Session = sessionmaker(bind=self.engine)
|
||||
|
||||
def process_item(self, item, spider):
|
||||
session = self.Session()
|
||||
try:
|
||||
stmt = select(TvShowDB).where(TvShowDB.post_id == item["post_id"])
|
||||
show = session.scalars(stmt).first()
|
||||
print(f"{show=}")
|
||||
if not show:
|
||||
show = TvShowDB(
|
||||
post_id=item["post_id"],
|
||||
post_title=item["post_title"],
|
||||
title=item["title"],
|
||||
date=item["date"],
|
||||
summary=item["summary"],
|
||||
image_url=item["image_url"],
|
||||
links=[LinkDB(link=url) for url in item["download_url"]]
|
||||
)
|
||||
session.add(show)
|
||||
else:
|
||||
for key, value in item.items():
|
||||
if key != "download_url":
|
||||
setattr(show, key, value)
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
return item
|
||||
|
||||
|
||||
class TvShow(CrawlSpider):
|
||||
name: str = "rlsb_tvshow"
|
||||
allowed_domains: list[str] = ["rlsbb.ru"]
|
||||
@@ -28,6 +71,9 @@ class TvShow(CrawlSpider):
|
||||
'USER_AGENT': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36',
|
||||
'AUTOTHROTTLE_ENABLED': True,
|
||||
'DOWNLOAD_DELAY': 10,
|
||||
'ITEM_PIPELINES': {
|
||||
'__main__.SQLAlchemyPipeline': 300,
|
||||
},
|
||||
}
|
||||
rules: list[Rule] = [
|
||||
Rule(LinkExtractor(allow=r"/tv-shows/page/"), callback="parse", follow=True)
|
||||
@@ -36,13 +82,13 @@ class TvShow(CrawlSpider):
|
||||
def parse(self, response):
|
||||
for article in response.css("article"):
|
||||
item = TvShowItem()
|
||||
item['article_id'] = article.attrib['id'],
|
||||
item['article_title'] = article.css('h1.entry-title > a::text').get(),
|
||||
item['title'] = article.css('.entry-summary > p:nth-child(4) > strong::text').get(),
|
||||
item['date'] = self.parse_date(article.css('.entry-meta-header-before::text').getall()[1].strip()),
|
||||
item['summary'] = article.xpath('.//div[@class="entry-summary"]/node()').extract(),
|
||||
item['image_url'] = article.css('.entry-summary > p > img::attr(src)').get(),
|
||||
item['download_url'] = article.css('.entry-summary > p > a[href ^= "https://rapidgator"]::attr(href)').get()
|
||||
item['post_id'] = article.attrib['id']
|
||||
item['post_title'] = article.css('h1.entry-title > a::text').get()
|
||||
item['title'] = article.css('.entry-summary > p:nth-child(4) > strong::text').get()
|
||||
item['date'] = self.parse_date(article.css('.entry-meta-header-before::text').getall()[1].strip())
|
||||
item['summary'] = "".join(article.xpath('.//div[@class="entry-summary"]/node()').extract())
|
||||
item['image_url'] = article.css('.entry-summary > p > img::attr(src)').get()
|
||||
item['download_url'] = article.css('.entry-summary > p > a[href ^= "https://rapidgator"]::attr(href)').extract()
|
||||
yield item
|
||||
|
||||
def parse_date(self, formatted_date: str):
|
||||
|
||||
Reference in New Issue
Block a user