|
15 | 15 | base_url = 'https://www.googleapis.com/youtube/v3/' |
16 | 16 | api_url = base_url + 'videos?part=contentDetails%2C+snippet%2C+statistics&id={}&key={}' |
17 | 17 | search_api_url = base_url + 'search?part=id&maxResults=1' |
| 18 | +playlist_api_url = base_url + 'playlists?part=snippet%2CcontentDetails%2Cstatus' |
18 | 19 | video_url = "http://youtu.be/%s" |
19 | 20 |
|
20 | 21 |
|
@@ -133,13 +134,16 @@ def youtime(text): |
133 | 134 | @hook.regex(ytpl_re) |
134 | 135 | def ytplaylist_url(match): |
135 | 136 | location = match.group(4).split("=")[-1] |
136 | | - try: |
137 | | - request = requests.get("https://www.youtube.com/playlist?list=" + location) |
138 | | - soup = bs4.BeautifulSoup(request.text, 'lxml') |
139 | | - except Exception: |
140 | | - return "\x034\x02Invalid response." |
141 | | - title = soup.find('title').text.split('-')[0].strip() |
142 | | - author = soup.find('img', {'class': 'channel-header-profile-image'})['title'] |
143 | | - num_videos = soup.find('ul', {'class': 'header-stats'}).findAll('li')[0].text.split(' ')[0] |
144 | | - views = soup.find('ul', {'class': 'header-stats'}).findAll('li')[1].text.split(' ')[0] |
145 | | - return "\x02{}\x02 - \x02{}\x02 views - \x02{}\x02 videos - \x02{}\x02".format(title, views, num_videos, author) |
| 137 | + json = requests.get(search_api_url, params={"id": location, "key": dev_key}).json() |
| 138 | + |
| 139 | + if 'error' in json: |
| 140 | + return 'Error looking up playlist.' |
| 141 | + |
| 142 | + data = json['items'] |
| 143 | + snippet = data[0]['snippet'] |
| 144 | + content_details = data[0]['contentDetails'] |
| 145 | + |
| 146 | + title = snippet['title'] |
| 147 | + author = snippet['channelTitle'] |
| 148 | + num_videos = str(content_details['itemCount']) |
| 149 | + return "\x02{}\x02 - \x02{}\x02 videos - \x02{}\x02".format(title, num_videos, author) |
0 commit comments