Implement subscription from RSS feed
This commit is contained in:
parent
4674cc926c
commit
835b9a42a1
11 changed files with 185 additions and 271 deletions
|
|
@ -1,13 +1,14 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import json
|
||||
from datetime import datetime
|
||||
import time
|
||||
from typing import TYPE_CHECKING
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from .youtube_objects import ChannelResult, SearchResult
|
||||
from .youtube_subscription import ChannelInfo, ThumbnailInfo, VideoInfo
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
|
@ -25,12 +26,11 @@ class YoutubeManager:
|
|||
remaining: int
|
||||
next_reset: float
|
||||
|
||||
def __init__(self, api_key: str, logger: logging.Logger):
|
||||
self._api_key = api_key
|
||||
def __init__(self, logger: logging.Logger):
|
||||
self._logger = logger
|
||||
self.rate_limit = self.RateLimit(remaining=self.DEFAULT_DAILY_POINTS, next_reset=time.time() + 24 * 3600)
|
||||
|
||||
def _request(self, url: str, request_timeout: float, expected_status: int = 200) -> tuple[HTTPHeaders, dict]:
|
||||
def _request(self, url: str, request_timeout: float, expected_status: int = 200) -> tuple[HTTPHeaders, str]:
|
||||
if time.time() >= self.rate_limit.next_reset:
|
||||
self.rate_limit.next_reset = time.time() + 24 * 3600
|
||||
self.rate_limit.remaining = self.DEFAULT_DAILY_POINTS
|
||||
|
|
@ -41,14 +41,14 @@ class YoutubeManager:
|
|||
self.rate_limit.remaining -= 1
|
||||
|
||||
request = urllib.request.Request(url)
|
||||
request.add_header('Accept', 'application/json')
|
||||
# request.add_header('Accept', 'application/json')
|
||||
try:
|
||||
with urllib.request.urlopen(request, timeout=request_timeout) as response:
|
||||
if response.status != expected_status:
|
||||
raise RuntimeError(
|
||||
f'Unexpected YT status {response.status} (expected: {expected_status})'
|
||||
f' -> {response.read().decode()}')
|
||||
return dict(response.getheaders()), json.loads(response.read().decode())
|
||||
return dict(response.getheaders()), response.read().decode()
|
||||
except urllib.error.HTTPError as error:
|
||||
raise RuntimeError(
|
||||
f'HTTP error calling API ({url}): {error}:\n'
|
||||
|
|
@ -69,18 +69,55 @@ class YoutubeManager:
|
|||
except Exception as error:
|
||||
raise RuntimeError(f'Exception calling YouTube shorts ({video_id}): {error}') from error
|
||||
|
||||
def request_channel_info(self, channel_id: str, request_timeout: float) -> tuple[
|
||||
HTTPHeaders, ChannelResult]:
|
||||
url = ('https://www.googleapis.com/youtube/v3/channels?part=snippet'
|
||||
f'&id={channel_id}&key={self._api_key}')
|
||||
self._logger.debug('YoutubeManager: request channel info for channel %s', channel_id)
|
||||
headers, info = self._request(url=url, request_timeout=request_timeout)
|
||||
return headers, ChannelResult.from_dict(info)
|
||||
@staticmethod
|
||||
def _parse_rss_data(data) -> tuple[ChannelInfo, list[VideoInfo]]:
|
||||
videos: list[VideoInfo] = []
|
||||
root = ET.parse(data)
|
||||
author = root.find('{*}author')
|
||||
channel_info = ChannelInfo(
|
||||
channel_id=root.find('{*}channelId').text, # type: ignore
|
||||
title=author.find('{*}name').text, # type: ignore
|
||||
url=author.find('{*}uri').text) # type: ignore
|
||||
for entry in root.findall('{*}entry'):
|
||||
media = entry.find('{*}group') # type: ignore
|
||||
thumbnail = media.find('{*}thumbnail') # type: ignore
|
||||
videos.append(VideoInfo(
|
||||
video_id=entry.find('{*}videoId').text, # type: ignore
|
||||
title=entry.find('{*}title').text, # type: ignore
|
||||
description=media.find('{*}description').text, # type: ignore
|
||||
url=entry.find('{*}link').get('href'), # type: ignore
|
||||
thumbnail=ThumbnailInfo(
|
||||
url=thumbnail.get('url'), # type: ignore
|
||||
width=thumbnail.get('width'), # type: ignore
|
||||
height=thumbnail.get('height')), # type: ignore
|
||||
published=datetime.fromisoformat(entry.find('{*}published').text), # type: ignore
|
||||
updated=datetime.fromisoformat(entry.find('{*}updated').text) # type: ignore
|
||||
))
|
||||
return channel_info, videos
|
||||
|
||||
def request_channel_videos(self, channel_id: str, max_results: int, request_timeout: float) -> tuple[
|
||||
HTTPHeaders, SearchResult]:
|
||||
url = (f'https://www.googleapis.com/youtube/v3/search?part=snippet&channelId={channel_id}'
|
||||
f'&maxResults={max_results}&order=date&type=video&key={self._api_key}')
|
||||
def request_channel_videos(self, connection: http.client.HTTPConnection, channel_id: str,
|
||||
expected_status: int = 200) -> tuple[HTTPHeaders, ChannelInfo, list[VideoInfo]]:
|
||||
url = '/feeds/videos.xml?playlist_id='
|
||||
url += f'UULF{channel_id[2:]}' if channel_id.startswith('UC') else f'{channel_id}'
|
||||
self._logger.debug('YoutubeManager: request channel videos for channel %s', channel_id)
|
||||
headers, info = self._request(url=url, request_timeout=request_timeout)
|
||||
return headers, SearchResult.from_dict(info)
|
||||
try:
|
||||
connection.request('GET', url)
|
||||
response = connection.getresponse()
|
||||
headers = dict(response.getheaders())
|
||||
except urllib.error.HTTPError as error:
|
||||
raise RuntimeError(
|
||||
f'HTTP error calling {url}: {error}:\n'
|
||||
f'Headers:\n{error.headers}Body:\n{error.read()}') from error
|
||||
except urllib.error.URLError as error:
|
||||
raise RuntimeError(f'URL error calling {url}: {error}') from error
|
||||
except TimeoutError as error:
|
||||
raise RuntimeError(f'Timeout calling {url}: {error}') from error
|
||||
except Exception as error:
|
||||
raise RuntimeError(f'Unexecpted error calling {url}: {error}') from error
|
||||
|
||||
if response.status != expected_status:
|
||||
raise RuntimeError(
|
||||
f'Unexpected YT status {response.status} (expected: {expected_status}) for {url}'
|
||||
f' -> {response.read().decode()}')
|
||||
|
||||
return headers, *self._parse_rss_data(response)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue