diff options
author | James Taylor <user234683@users.noreply.github.com> | 2018-12-26 17:24:53 -0800 |
---|---|---|
committer | James Taylor <user234683@users.noreply.github.com> | 2018-12-26 17:24:53 -0800 |
commit | 6a23df8c90cd3def49f83a68b501f785eefc6b37 (patch) | |
tree | 4e6eabc8fb964fc6fccf4947d8b5bbe9766debaa | |
parent | b321b5fc6484c38d861530e6b89405b062e32459 (diff) | |
download | yt-local-6a23df8c90cd3def49f83a68b501f785eefc6b37.tar.lz yt-local-6a23df8c90cd3def49f83a68b501f785eefc6b37.tar.xz yt-local-6a23df8c90cd3def49f83a68b501f785eefc6b37.zip |
Don't use tor when logging in
-rw-r--r-- | python/sockshandler.py | 79 | ||||
-rw-r--r-- | server.py | 16 | ||||
-rw-r--r-- | youtube/accounts.py | 10 | ||||
-rw-r--r-- | youtube/common.py | 13 | ||||
-rw-r--r-- | youtube/post_comment.py | 23 | ||||
-rw-r--r-- | youtube/watch.py | 6 |
6 files changed, 116 insertions, 31 deletions
diff --git a/python/sockshandler.py b/python/sockshandler.py new file mode 100644 index 0000000..26c8343 --- /dev/null +++ b/python/sockshandler.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python +""" +SocksiPy + urllib2 handler + +version: 0.3 +author: e<e@tr0ll.in> + +This module provides a Handler which you can use with urllib2 to allow it to tunnel your connection through a socks.sockssocket socket, with out monkey patching the original socket... +""" +import ssl + +try: + import urllib2 + import httplib +except ImportError: # Python 3 + import urllib.request as urllib2 + import http.client as httplib + +import socks # $ pip install PySocks + +def merge_dict(a, b): + d = a.copy() + d.update(b) + return d + +class SocksiPyConnection(httplib.HTTPConnection): + def __init__(self, proxytype, proxyaddr, proxyport=None, rdns=True, username=None, password=None, *args, **kwargs): + self.proxyargs = (proxytype, proxyaddr, proxyport, rdns, username, password) + httplib.HTTPConnection.__init__(self, *args, **kwargs) + + def connect(self): + self.sock = socks.socksocket() + self.sock.setproxy(*self.proxyargs) + if type(self.timeout) in (int, float): + self.sock.settimeout(self.timeout) + self.sock.connect((self.host, self.port)) + +class SocksiPyConnectionS(httplib.HTTPSConnection): + def __init__(self, proxytype, proxyaddr, proxyport=None, rdns=True, username=None, password=None, *args, **kwargs): + self.proxyargs = (proxytype, proxyaddr, proxyport, rdns, username, password) + httplib.HTTPSConnection.__init__(self, *args, **kwargs) + + def connect(self): + sock = socks.socksocket() + sock.setproxy(*self.proxyargs) + if type(self.timeout) in (int, float): + sock.settimeout(self.timeout) + sock.connect((self.host, self.port)) + self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file) + +class SocksiPyHandler(urllib2.HTTPHandler, urllib2.HTTPSHandler): + def __init__(self, *args, **kwargs): + self.args = args + self.kw = kwargs + urllib2.HTTPHandler.__init__(self) + + def http_open(self, req): + def build(host, port=None, timeout=0, **kwargs): + kw = merge_dict(self.kw, kwargs) + conn = SocksiPyConnection(*self.args, host=host, port=port, timeout=timeout, **kw) + return conn + return self.do_open(build, req) + + def https_open(self, req): + def build(host, port=None, timeout=0, **kwargs): + kw = merge_dict(self.kw, kwargs) + conn = SocksiPyConnectionS(*self.args, host=host, port=port, timeout=timeout, **kw) + return conn + return self.do_open(build, req) + +if __name__ == "__main__": + import sys + try: + port = int(sys.argv[1]) + except (ValueError, IndexError): + port = 9050 + opener = urllib2.build_opener(SocksiPyHandler(socks.PROXY_TYPE_SOCKS5, "localhost", port)) + print("HTTP: " + opener.open("http://httpbin.org/ip").read().decode()) + print("HTTPS: " + opener.open("https://httpbin.org/ip").read().decode()) @@ -7,7 +7,7 @@ from youtube.youtube import youtube import http_errors import urllib import socket -import socks +import socks, sockshandler import subprocess import re @@ -41,8 +41,15 @@ def proxy_site(env, start_response): url = "https://" + env['SERVER_NAME'] + env['PATH_INFO'] if env['QUERY_STRING']: url += '?' + env['QUERY_STRING'] + + req = urllib.request.Request(url, headers=headers) - response = urllib.request.urlopen(req, timeout = 10) + if settings.route_tor: + opener = urllib.request.build_opener(sockshandler.SocksiPyHandler(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9150)) + response = opener.open(req, timeout=10) + else: + response = urllib.request.urlopen(req, timeout=10) + start_response('200 OK', response.getheaders() ) return response.read() @@ -141,11 +148,6 @@ def site_dispatch(env, start_response): -if settings.route_tor: - #subprocess.Popen(TOR_PATH) - socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, '127.0.0.1', 9150) - socket.socket = socks.socksocket - gevent.socket.socket = socks.socksocket if settings.allow_foreign_addresses: server = WSGIServer(('0.0.0.0', settings.port_number), site_dispatch) diff --git a/youtube/accounts.py b/youtube/accounts.py index 9696899..f9bbc75 100644 --- a/youtube/accounts.py +++ b/youtube/accounts.py @@ -180,7 +180,8 @@ def _login(username, password, cookie_jar): Taken from youtube-dl """ - login_page = common.fetch_url(_LOGIN_URL, yt_dl_headers, report_text='Downloaded login page', cookie_jar_receive=cookie_jar).decode('utf-8') + + login_page = common.fetch_url(_LOGIN_URL, yt_dl_headers, report_text='Downloaded login page', cookie_jar_receive=cookie_jar, use_tor=False).decode('utf-8') '''with open('debug/login_page', 'w', encoding='utf-8') as f: f.write(login_page)''' #print(cookie_jar.as_lwp_str()) @@ -206,7 +207,7 @@ def _login(username, password, cookie_jar): 'Google-Accounts-XSRF': 1, } headers.update(yt_dl_headers) - result = common.fetch_url(url, headers, report_text=note, data=data, cookie_jar_send=cookie_jar, cookie_jar_receive=cookie_jar).decode('utf-8') + result = common.fetch_url(url, headers, report_text=note, data=data, cookie_jar_send=cookie_jar, cookie_jar_receive=cookie_jar, use_tor=False).decode('utf-8') #print(cookie_jar.as_lwp_str()) '''with open('debug/' + note, 'w', encoding='utf-8') as f: f.write(result)''' @@ -338,10 +339,13 @@ def _login(username, password, cookie_jar): return False try: - check_cookie_results = common.fetch_url(check_cookie_url, headers=yt_dl_headers, report_text="Checked cookie", cookie_jar_send=cookie_jar, cookie_jar_receive=cookie_jar).decode('utf-8') + check_cookie_results = common.fetch_url(check_cookie_url, headers=yt_dl_headers, report_text="Checked cookie", cookie_jar_send=cookie_jar, cookie_jar_receive=cookie_jar, use_tor=False).decode('utf-8') except (urllib.error.URLError, compat_http_client.HTTPException, socket.error) as err: return False + '''with open('debug/check_cookie_results', 'w', encoding='utf-8') as f: + f.write(check_cookie_results)''' + if 'https://myaccount.google.com/' not in check_cookie_results: warn('Unable to log in') return False diff --git a/youtube/common.py b/youtube/common.py index 655f24c..6b0051b 100644 --- a/youtube/common.py +++ b/youtube/common.py @@ -1,5 +1,6 @@ from youtube.template import Template from youtube import local_playlist +import settings import html import json import re @@ -7,7 +8,7 @@ import urllib.parse import gzip import brotli import time - +import socks, sockshandler URL_ORIGIN = "/https://www.youtube.com" @@ -148,7 +149,7 @@ def decode_content(content, encoding_header): content = gzip.decompress(content) return content -def fetch_url(url, headers=(), timeout=15, report_text=None, data=None, cookie_jar_send=None, cookie_jar_receive=None): +def fetch_url(url, headers=(), timeout=15, report_text=None, data=None, cookie_jar_send=None, cookie_jar_receive=None, use_tor=True): ''' When cookie_jar_send is set to a CookieJar object, those cookies will be sent in the request (but cookies in response will not be merged into it) @@ -168,10 +169,16 @@ def fetch_url(url, headers=(), timeout=15, report_text=None, data=None, cookie_j start_time = time.time() + req = urllib.request.Request(url, data=data, headers=headers) if cookie_jar_send is not None: cookie_jar_send.add_cookie_header(req) - response = urllib.request.urlopen(req, timeout=timeout) + + if use_tor and settings.route_tor: + opener = urllib.request.build_opener(sockshandler.SocksiPyHandler(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9150)) + response = opener.open(req, timeout=timeout) + else: + response = urllib.request.urlopen(req, timeout=timeout) response_time = time.time() if cookie_jar_receive is not None: diff --git a/youtube/post_comment.py b/youtube/post_comment.py index d43d5f4..7478a5d 100644 --- a/youtube/post_comment.py +++ b/youtube/post_comment.py @@ -13,7 +13,6 @@ def _post_comment(text, video_id, session_token, cookie_jar): 'User-Agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_1 like Mac OS X) AppleWebKit/603.1.30 (KHTML, like Gecko) Version/10.0 Mobile/14E304 Safari/602.1', 'Accept': '*/*', 'Accept-Language': 'en-US,en;q=0.5', - 'Accept-Encoding': 'gzip, deflate, br', 'X-YouTube-Client-Name': '2', 'X-YouTube-Client-Version': '2.20180823', 'Content-Type': 'application/x-www-form-urlencoded', @@ -31,11 +30,9 @@ def _post_comment(text, video_id, session_token, cookie_jar): } data = urllib.parse.urlencode(data_dict).encode() - req = urllib.request.Request("https://m.youtube.com/service_ajax?name=createCommentEndpoint", headers=headers, data=data) - cookie_jar.add_cookie_header(req) - response = urllib.request.urlopen(req, timeout = 5) - content = response.read() - content = common.decode_content(content, response.getheader('Content-Encoding', default='identity')) + + content = common.fetch_url("https://m.youtube.com/service_ajax?name=createCommentEndpoint", headers=headers, data=data, cookie_jar_send=cookie_jar) + code = json.loads(content)['code'] print("Comment posting code: " + code) return code @@ -48,7 +45,6 @@ def _post_comment_reply(text, video_id, parent_comment_id, session_token, cookie 'User-Agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_1 like Mac OS X) AppleWebKit/603.1.30 (KHTML, like Gecko) Version/10.0 Mobile/14E304 Safari/602.1', 'Accept': '*/*', 'Accept-Language': 'en-US,en;q=0.5', - 'Accept-Encoding': 'gzip, deflate, br', 'X-YouTube-Client-Name': '2', 'X-YouTube-Client-Version': '2.20180823', 'Content-Type': 'application/x-www-form-urlencoded', @@ -66,11 +62,8 @@ def _post_comment_reply(text, video_id, parent_comment_id, session_token, cookie } data = urllib.parse.urlencode(data_dict).encode() - req = urllib.request.Request("https://m.youtube.com/service_ajax?name=createCommentReplyEndpoint", headers=headers, data=data) - cookie_jar.add_cookie_header(req) - response = urllib.request.urlopen(req, timeout = 5) - content = response.read() - content = common.decode_content(content, response.getheader('Content-Encoding', default='identity')) + content = common.fetch_url("https://m.youtube.com/service_ajax?name=createCommentReplyEndpoint", headers=headers, data=data, cookie_jar_send=cookie_jar) + code = json.loads(content)['code'] print("Comment posting code: " + code) return code @@ -82,7 +75,6 @@ def delete_comment(video_id, comment_id, author_id, session_token, cookie_jar): 'User-Agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_1 like Mac OS X) AppleWebKit/603.1.30 (KHTML, like Gecko) Version/10.0 Mobile/14E304 Safari/602.1', 'Accept': '*/*', 'Accept-Language': 'en-US,en;q=0.5', - 'Accept-Encoding': 'gzip, deflate, br', 'X-YouTube-Client-Name': '2', 'X-YouTube-Client-Version': '2.20180823', 'Content-Type': 'application/x-www-form-urlencoded', @@ -98,10 +90,7 @@ def delete_comment(video_id, comment_id, author_id, session_token, cookie_jar): } data = urllib.parse.urlencode(data_dict).encode() - req = urllib.request.Request("https://m.youtube.com/service_ajax?name=performCommentActionEndpoint", headers=headers, data=data) - cookie_jar.add_cookie_header(req) - response = urllib.request.urlopen(req, timeout = 5) - content = response.read() + content = common.fetch_url("https://m.youtube.com/service_ajax?name=performCommentActionEndpoint", headers=headers, data=data, cookie_jar_send=cookie_jar) xsrf_token_regex = re.compile(r'''XSRF_TOKEN"\s*:\s*"([\w-]*(?:=|%3D){0,2})"''') def post_comment(parameters, fields): diff --git a/youtube/watch.py b/youtube/watch.py index 2d10ec1..4051a3f 100644 --- a/youtube/watch.py +++ b/youtube/watch.py @@ -232,7 +232,11 @@ def get_watch_page(query_string): parsed_qs = urllib.parse.parse_qs(query_string) id = parsed_qs['v'][0] lc = common.default_multi_get(parsed_qs, 'lc', 0, default='') - downloader = YoutubeDL(params={'youtube_include_dash_manifest':False}) + if settings.route_tor: + proxy = 'socks5://127.0.0.1:9150/' + else: + proxy = '' + downloader = YoutubeDL(params={'youtube_include_dash_manifest':False, 'proxy':proxy}) tasks = ( gevent.spawn(comments.video_comments, id, int(settings.default_comment_sorting), lc=lc ), gevent.spawn(extract_info, downloader, "https://www.youtube.com/watch?v=" + id, download=False) |