Refresh?
This commit is contained in:
parent
ff53b42b6f
commit
a3858f6395
3 changed files with 104 additions and 2 deletions
40
cry/cli.py
40
cry/cli.py
|
|
@ -8,9 +8,12 @@ from . import database
|
|||
|
||||
@click.group()
|
||||
@click.version_option()
|
||||
def cli():
|
||||
@click.option("-v", "--verbose", count=True)
|
||||
def cli(verbose):
|
||||
"Command line feed reader"
|
||||
|
||||
click.echo(f"Verbosity: {verbose}")
|
||||
|
||||
|
||||
@cli.command(name="subscribe")
|
||||
@click.argument("url")
|
||||
|
|
@ -36,3 +39,38 @@ def subscribe(url):
|
|||
db.store_feed(f)
|
||||
|
||||
click.echo(f"Subscribed to {meta.url}")
|
||||
|
||||
|
||||
@cli.command(name="refresh")
|
||||
@click.argument("url", required=False, default=None)
|
||||
def refresh(url):
|
||||
"""Refresh one or more feeds.
|
||||
|
||||
If a URL is specified, refresh that URL. Otherwise, refresh all subscribed
|
||||
feeds.
|
||||
"""
|
||||
|
||||
db = database.Database.local()
|
||||
if url:
|
||||
f = db.load_feed(url)
|
||||
if f is None:
|
||||
click.echo(f"Not subscribed to {url}")
|
||||
return 1
|
||||
feeds = [f.meta]
|
||||
else:
|
||||
feeds = db.load_all_meta()
|
||||
|
||||
click.echo(f"Refreshing {len(feeds)} feed(s)...")
|
||||
results = asyncio.run(feed.fetch_many(feeds))
|
||||
|
||||
new_count = 0
|
||||
for d, meta in results:
|
||||
if d is None:
|
||||
# Nothing new.
|
||||
db.update_meta(meta)
|
||||
else:
|
||||
# New items, possibly!
|
||||
f = feed.Feed.from_parsed(d, meta)
|
||||
new_count = new_count + db.store_feed(f)
|
||||
|
||||
click.echo(f"Fetched {new_count} new entries.")
|
||||
|
|
|
|||
|
|
@ -126,6 +126,33 @@ class Database:
|
|||
self.set_property("version", len(SCHEMA_STATEMENTS))
|
||||
self.set_property("origin", self.origin)
|
||||
|
||||
def load_all_meta(self) -> list[feed.FeedMeta]:
|
||||
cursor = self.db.execute(
|
||||
"""
|
||||
SELECT
|
||||
url,
|
||||
last_fetched_ts,
|
||||
retry_after_ts,
|
||||
status,
|
||||
etag,
|
||||
modified
|
||||
FROM feeds
|
||||
"""
|
||||
)
|
||||
rows = cursor.fetchall()
|
||||
return [
|
||||
feed.FeedMeta(
|
||||
url=url,
|
||||
last_fetched_ts=int(last_fetched_ts),
|
||||
retry_after_ts=int(retry_after_ts),
|
||||
status=int(status),
|
||||
etag=etag,
|
||||
modified=modified,
|
||||
origin=self.origin,
|
||||
)
|
||||
for url, last_fetched_ts, retry_after_ts, status, etag, modified in rows
|
||||
]
|
||||
|
||||
def load_feed(self, url: str) -> feed.Feed | None:
|
||||
cursor = self.db.execute(
|
||||
"""
|
||||
|
|
@ -179,7 +206,25 @@ class Database:
|
|||
|
||||
return feed.Feed(meta=meta, title=title, link=link, entries=entries)
|
||||
|
||||
def store_feed(self, f: feed.Feed):
|
||||
def update_meta(self, f: feed.FeedMeta):
|
||||
self.db.execute(
|
||||
"""
|
||||
UPDATE feeds SET
|
||||
last_fetched_ts=?,
|
||||
retry_after_ts=?,
|
||||
status=?,
|
||||
etag=?,
|
||||
modified=?
|
||||
WHERE url=?
|
||||
""",
|
||||
[f.last_fetched_ts, f.retry_after_ts, f.status, f.etag, f.modified, f.url],
|
||||
)
|
||||
|
||||
def store_feed(self, f: feed.Feed) -> int:
|
||||
"""Store the given feed in the database.
|
||||
|
||||
Returns the number of new entries inserted.
|
||||
"""
|
||||
with self.db:
|
||||
self.db.execute(
|
||||
"""
|
||||
|
|
@ -215,6 +260,11 @@ class Database:
|
|||
],
|
||||
)
|
||||
|
||||
cursor = self.db.execute(
|
||||
"SELECT COUNT (*) FROM entries WHERE feed_url=?", [f.meta.url]
|
||||
)
|
||||
start_count = cursor.fetchone()[0]
|
||||
|
||||
self.db.executemany(
|
||||
"""
|
||||
INSERT INTO entries (
|
||||
|
|
@ -248,3 +298,9 @@ class Database:
|
|||
""",
|
||||
[(e.id, e.inserted_at, f.meta.url, e.title, e.link) for e in f.entries],
|
||||
)
|
||||
|
||||
cursor = self.db.execute(
|
||||
"SELECT COUNT (*) FROM entries WHERE feed_url=?", [f.meta.url]
|
||||
)
|
||||
end_count = cursor.fetchone()[0]
|
||||
return end_count - start_count
|
||||
|
|
|
|||
|
|
@ -239,6 +239,14 @@ async def fetch_feed(
|
|||
return (parsed, feed)
|
||||
|
||||
|
||||
async def fetch_many(
|
||||
metas: list[FeedMeta],
|
||||
) -> list[typing.Tuple[feedparser.FeedParserDict | None, FeedMeta]]:
|
||||
async with asyncio.TaskGroup() as group:
|
||||
tasks = [group.create_task(fetch_feed(m)) for m in metas]
|
||||
return [t.result() for t in tasks]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Entry:
|
||||
id: str
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue