aboutsummaryrefslogtreecommitdiffstats
path: root/youtube
diff options
context:
space:
mode:
authorJames Taylor <user234683@users.noreply.github.com>2018-07-23 18:29:37 -0700
committerJames Taylor <user234683@users.noreply.github.com>2018-07-23 18:29:37 -0700
commitce6fb753a7f951e87d170cc3f3e7f77c37e0b8e5 (patch)
tree1d5c7d26546c27d602ead8c8f947e84b769da96e /youtube
parent663a9c0382223e43ee0556dd6f974e5ad1f3a9a8 (diff)
downloadyt-local-ce6fb753a7f951e87d170cc3f3e7f77c37e0b8e5.tar.lz
yt-local-ce6fb753a7f951e87d170cc3f3e7f77c37e0b8e5.tar.xz
yt-local-ce6fb753a7f951e87d170cc3f3e7f77c37e0b8e5.zip
correctly handle 'search instead for'
Diffstat (limited to 'youtube')
-rw-r--r--youtube/search.py12
1 files changed, 7 insertions, 5 deletions
diff --git a/youtube/search.py b/youtube/search.py
index 13daa2c..2307656 100644
--- a/youtube/search.py
+++ b/youtube/search.py
@@ -40,11 +40,12 @@ features = {
'location': 23,
}
-def page_number_to_sp_parameter(page):
+def page_number_to_sp_parameter(page, autocorrect=1):
offset = (int(page) - 1)*20 # 20 results per page
- return base64.urlsafe_b64encode(proto.uint(9, offset) + proto.string(61, b'')).decode('ascii')
+ autocorrect = proto.nested(8, proto.uint(1, 1 - int(autocorrect) ))
+ return base64.urlsafe_b64encode(proto.uint(9, offset) + proto.string(61, b'') + autocorrect).decode('ascii')
-def get_search_json(query, page):
+def get_search_json(query, page, autocorrect):
url = "https://www.youtube.com/results?search_query=" + urllib.parse.quote_plus(query)
headers = {
'Host': 'www.youtube.com',
@@ -54,7 +55,7 @@ def get_search_json(query, page):
'X-YouTube-Client-Name': '1',
'X-YouTube-Client-Version': '2.20180418',
}
- url += "&pbj=1&sp=" + page_number_to_sp_parameter(page)
+ url += "&pbj=1&sp=" + page_number_to_sp_parameter(page, autocorrect)
content = common.fetch_url(url, headers=headers)
info = json.loads(content)
return info
@@ -83,8 +84,9 @@ def get_search_page(query_string, parameters=()):
return yt_search_template
query = qs_query["query"][0]
page = qs_query.get("page", "1")[0]
+ autocorrect = qs_query.get("autocorrect", "1")[0]
- info = get_search_json(query, page)
+ info = get_search_json(query, page, autocorrect)
estimated_results = int(info[1]['response']['estimatedResults'])
estimated_pages = ceil(estimated_results/20)