mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-11-24 07:40:30 +01:00
[cleanup] Remove unused scripts/tests (#2173)
Authored by fstirlitz, pukkandan
This commit is contained in:
parent
5a727063c5
commit
ab96d1ad1b
@ -1,435 +0,0 @@
|
||||
# UNUSED
|
||||
|
||||
#!/usr/bin/python3
|
||||
|
||||
import argparse
|
||||
import ctypes
|
||||
import functools
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import threading
|
||||
import traceback
|
||||
import os.path
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname((os.path.abspath(__file__)))))
|
||||
from yt_dlp.compat import (
|
||||
compat_input,
|
||||
compat_http_server,
|
||||
compat_str,
|
||||
compat_urlparse,
|
||||
)
|
||||
|
||||
# These are not used outside of buildserver.py thus not in compat.py
|
||||
|
||||
try:
|
||||
import winreg as compat_winreg
|
||||
except ImportError: # Python 2
|
||||
import _winreg as compat_winreg
|
||||
|
||||
try:
|
||||
import socketserver as compat_socketserver
|
||||
except ImportError: # Python 2
|
||||
import SocketServer as compat_socketserver
|
||||
|
||||
|
||||
class BuildHTTPServer(compat_socketserver.ThreadingMixIn, compat_http_server.HTTPServer):
|
||||
allow_reuse_address = True
|
||||
|
||||
|
||||
advapi32 = ctypes.windll.advapi32
|
||||
|
||||
SC_MANAGER_ALL_ACCESS = 0xf003f
|
||||
SC_MANAGER_CREATE_SERVICE = 0x02
|
||||
SERVICE_WIN32_OWN_PROCESS = 0x10
|
||||
SERVICE_AUTO_START = 0x2
|
||||
SERVICE_ERROR_NORMAL = 0x1
|
||||
DELETE = 0x00010000
|
||||
SERVICE_STATUS_START_PENDING = 0x00000002
|
||||
SERVICE_STATUS_RUNNING = 0x00000004
|
||||
SERVICE_ACCEPT_STOP = 0x1
|
||||
|
||||
SVCNAME = 'youtubedl_builder'
|
||||
|
||||
LPTSTR = ctypes.c_wchar_p
|
||||
START_CALLBACK = ctypes.WINFUNCTYPE(None, ctypes.c_int, ctypes.POINTER(LPTSTR))
|
||||
|
||||
|
||||
class SERVICE_TABLE_ENTRY(ctypes.Structure):
|
||||
_fields_ = [
|
||||
('lpServiceName', LPTSTR),
|
||||
('lpServiceProc', START_CALLBACK)
|
||||
]
|
||||
|
||||
|
||||
HandlerEx = ctypes.WINFUNCTYPE(
|
||||
ctypes.c_int, # return
|
||||
ctypes.c_int, # dwControl
|
||||
ctypes.c_int, # dwEventType
|
||||
ctypes.c_void_p, # lpEventData,
|
||||
ctypes.c_void_p, # lpContext,
|
||||
)
|
||||
|
||||
|
||||
def _ctypes_array(c_type, py_array):
|
||||
ar = (c_type * len(py_array))()
|
||||
ar[:] = py_array
|
||||
return ar
|
||||
|
||||
|
||||
def win_OpenSCManager():
|
||||
res = advapi32.OpenSCManagerW(None, None, SC_MANAGER_ALL_ACCESS)
|
||||
if not res:
|
||||
raise Exception('Opening service manager failed - '
|
||||
'are you running this as administrator?')
|
||||
return res
|
||||
|
||||
|
||||
def win_install_service(service_name, cmdline):
|
||||
manager = win_OpenSCManager()
|
||||
try:
|
||||
h = advapi32.CreateServiceW(
|
||||
manager, service_name, None,
|
||||
SC_MANAGER_CREATE_SERVICE, SERVICE_WIN32_OWN_PROCESS,
|
||||
SERVICE_AUTO_START, SERVICE_ERROR_NORMAL,
|
||||
cmdline, None, None, None, None, None)
|
||||
if not h:
|
||||
raise OSError('Service creation failed: %s' % ctypes.FormatError())
|
||||
|
||||
advapi32.CloseServiceHandle(h)
|
||||
finally:
|
||||
advapi32.CloseServiceHandle(manager)
|
||||
|
||||
|
||||
def win_uninstall_service(service_name):
|
||||
manager = win_OpenSCManager()
|
||||
try:
|
||||
h = advapi32.OpenServiceW(manager, service_name, DELETE)
|
||||
if not h:
|
||||
raise OSError('Could not find service %s: %s' % (
|
||||
service_name, ctypes.FormatError()))
|
||||
|
||||
try:
|
||||
if not advapi32.DeleteService(h):
|
||||
raise OSError('Deletion failed: %s' % ctypes.FormatError())
|
||||
finally:
|
||||
advapi32.CloseServiceHandle(h)
|
||||
finally:
|
||||
advapi32.CloseServiceHandle(manager)
|
||||
|
||||
|
||||
def win_service_report_event(service_name, msg, is_error=True):
|
||||
with open('C:/sshkeys/log', 'a', encoding='utf-8') as f:
|
||||
f.write(msg + '\n')
|
||||
|
||||
event_log = advapi32.RegisterEventSourceW(None, service_name)
|
||||
if not event_log:
|
||||
raise OSError('Could not report event: %s' % ctypes.FormatError())
|
||||
|
||||
try:
|
||||
type_id = 0x0001 if is_error else 0x0004
|
||||
event_id = 0xc0000000 if is_error else 0x40000000
|
||||
lines = _ctypes_array(LPTSTR, [msg])
|
||||
|
||||
if not advapi32.ReportEventW(
|
||||
event_log, type_id, 0, event_id, None, len(lines), 0,
|
||||
lines, None):
|
||||
raise OSError('Event reporting failed: %s' % ctypes.FormatError())
|
||||
finally:
|
||||
advapi32.DeregisterEventSource(event_log)
|
||||
|
||||
|
||||
def win_service_handler(stop_event, *args):
|
||||
try:
|
||||
raise ValueError('Handler called with args ' + repr(args))
|
||||
TODO
|
||||
except Exception as e:
|
||||
tb = traceback.format_exc()
|
||||
msg = str(e) + '\n' + tb
|
||||
win_service_report_event(service_name, msg, is_error=True)
|
||||
raise
|
||||
|
||||
|
||||
def win_service_set_status(handle, status_code):
|
||||
svcStatus = SERVICE_STATUS()
|
||||
svcStatus.dwServiceType = SERVICE_WIN32_OWN_PROCESS
|
||||
svcStatus.dwCurrentState = status_code
|
||||
svcStatus.dwControlsAccepted = SERVICE_ACCEPT_STOP
|
||||
|
||||
svcStatus.dwServiceSpecificExitCode = 0
|
||||
|
||||
if not advapi32.SetServiceStatus(handle, ctypes.byref(svcStatus)):
|
||||
raise OSError('SetServiceStatus failed: %r' % ctypes.FormatError())
|
||||
|
||||
|
||||
def win_service_main(service_name, real_main, argc, argv_raw):
|
||||
try:
|
||||
# args = [argv_raw[i].value for i in range(argc)]
|
||||
stop_event = threading.Event()
|
||||
handler = HandlerEx(functools.partial(stop_event, win_service_handler))
|
||||
h = advapi32.RegisterServiceCtrlHandlerExW(service_name, handler, None)
|
||||
if not h:
|
||||
raise OSError('Handler registration failed: %s' %
|
||||
ctypes.FormatError())
|
||||
|
||||
TODO
|
||||
except Exception as e:
|
||||
tb = traceback.format_exc()
|
||||
msg = str(e) + '\n' + tb
|
||||
win_service_report_event(service_name, msg, is_error=True)
|
||||
raise
|
||||
|
||||
|
||||
def win_service_start(service_name, real_main):
|
||||
try:
|
||||
cb = START_CALLBACK(
|
||||
functools.partial(win_service_main, service_name, real_main))
|
||||
dispatch_table = _ctypes_array(SERVICE_TABLE_ENTRY, [
|
||||
SERVICE_TABLE_ENTRY(
|
||||
service_name,
|
||||
cb
|
||||
),
|
||||
SERVICE_TABLE_ENTRY(None, ctypes.cast(None, START_CALLBACK))
|
||||
])
|
||||
|
||||
if not advapi32.StartServiceCtrlDispatcherW(dispatch_table):
|
||||
raise OSError('ctypes start failed: %s' % ctypes.FormatError())
|
||||
except Exception as e:
|
||||
tb = traceback.format_exc()
|
||||
msg = str(e) + '\n' + tb
|
||||
win_service_report_event(service_name, msg, is_error=True)
|
||||
raise
|
||||
|
||||
|
||||
def main(args=None):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-i', '--install',
|
||||
action='store_const', dest='action', const='install',
|
||||
help='Launch at Windows startup')
|
||||
parser.add_argument('-u', '--uninstall',
|
||||
action='store_const', dest='action', const='uninstall',
|
||||
help='Remove Windows service')
|
||||
parser.add_argument('-s', '--service',
|
||||
action='store_const', dest='action', const='service',
|
||||
help='Run as a Windows service')
|
||||
parser.add_argument('-b', '--bind', metavar='<host:port>',
|
||||
action='store', default='0.0.0.0:8142',
|
||||
help='Bind to host:port (default %default)')
|
||||
options = parser.parse_args(args=args)
|
||||
|
||||
if options.action == 'install':
|
||||
fn = os.path.abspath(__file__).replace('v:', '\\\\vboxsrv\\vbox')
|
||||
cmdline = '%s %s -s -b %s' % (sys.executable, fn, options.bind)
|
||||
win_install_service(SVCNAME, cmdline)
|
||||
return
|
||||
|
||||
if options.action == 'uninstall':
|
||||
win_uninstall_service(SVCNAME)
|
||||
return
|
||||
|
||||
if options.action == 'service':
|
||||
win_service_start(SVCNAME, main)
|
||||
return
|
||||
|
||||
host, port_str = options.bind.split(':')
|
||||
port = int(port_str)
|
||||
|
||||
print('Listening on %s:%d' % (host, port))
|
||||
srv = BuildHTTPServer((host, port), BuildHTTPRequestHandler)
|
||||
thr = threading.Thread(target=srv.serve_forever)
|
||||
thr.start()
|
||||
compat_input('Press ENTER to shut down')
|
||||
srv.shutdown()
|
||||
thr.join()
|
||||
|
||||
|
||||
def rmtree(path):
|
||||
for name in os.listdir(path):
|
||||
fname = os.path.join(path, name)
|
||||
if os.path.isdir(fname):
|
||||
rmtree(fname)
|
||||
else:
|
||||
os.chmod(fname, 0o666)
|
||||
os.remove(fname)
|
||||
os.rmdir(path)
|
||||
|
||||
|
||||
class BuildError(Exception):
|
||||
def __init__(self, output, code=500):
|
||||
self.output = output
|
||||
self.code = code
|
||||
|
||||
def __str__(self):
|
||||
return self.output
|
||||
|
||||
|
||||
class HTTPError(BuildError):
|
||||
pass
|
||||
|
||||
|
||||
class PythonBuilder(object):
|
||||
def __init__(self, **kwargs):
|
||||
python_version = kwargs.pop('python', '3.4')
|
||||
python_path = None
|
||||
for node in ('Wow6432Node\\', ''):
|
||||
try:
|
||||
key = compat_winreg.OpenKey(
|
||||
compat_winreg.HKEY_LOCAL_MACHINE,
|
||||
r'SOFTWARE\%sPython\PythonCore\%s\InstallPath' % (node, python_version))
|
||||
try:
|
||||
python_path, _ = compat_winreg.QueryValueEx(key, '')
|
||||
finally:
|
||||
compat_winreg.CloseKey(key)
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not python_path:
|
||||
raise BuildError('No such Python version: %s' % python_version)
|
||||
|
||||
self.pythonPath = python_path
|
||||
|
||||
super(PythonBuilder, self).__init__(**kwargs)
|
||||
|
||||
|
||||
class GITInfoBuilder(object):
|
||||
def __init__(self, **kwargs):
|
||||
try:
|
||||
self.user, self.repoName = kwargs['path'][:2]
|
||||
self.rev = kwargs.pop('rev')
|
||||
except ValueError:
|
||||
raise BuildError('Invalid path')
|
||||
except KeyError as e:
|
||||
raise BuildError('Missing mandatory parameter "%s"' % e.args[0])
|
||||
|
||||
path = os.path.join(os.environ['APPDATA'], 'Build archive', self.repoName, self.user)
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
self.basePath = tempfile.mkdtemp(dir=path)
|
||||
self.buildPath = os.path.join(self.basePath, 'build')
|
||||
|
||||
super(GITInfoBuilder, self).__init__(**kwargs)
|
||||
|
||||
|
||||
class GITBuilder(GITInfoBuilder):
|
||||
def build(self):
|
||||
try:
|
||||
subprocess.check_output(['git', 'clone', 'git://github.com/%s/%s.git' % (self.user, self.repoName), self.buildPath])
|
||||
subprocess.check_output(['git', 'checkout', self.rev], cwd=self.buildPath)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise BuildError(e.output)
|
||||
|
||||
super(GITBuilder, self).build()
|
||||
|
||||
|
||||
class YoutubeDLBuilder(object):
|
||||
authorizedUsers = ['fraca7', 'phihag', 'rg3', 'FiloSottile', 'ytdl-org']
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
if self.repoName != 'yt-dlp':
|
||||
raise BuildError('Invalid repository "%s"' % self.repoName)
|
||||
if self.user not in self.authorizedUsers:
|
||||
raise HTTPError('Unauthorized user "%s"' % self.user, 401)
|
||||
|
||||
super(YoutubeDLBuilder, self).__init__(**kwargs)
|
||||
|
||||
def build(self):
|
||||
try:
|
||||
proc = subprocess.Popen([os.path.join(self.pythonPath, 'python.exe'), 'setup.py', 'py2exe'], stdin=subprocess.PIPE, cwd=self.buildPath)
|
||||
proc.wait()
|
||||
#subprocess.check_output([os.path.join(self.pythonPath, 'python.exe'), 'setup.py', 'py2exe'],
|
||||
# cwd=self.buildPath)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise BuildError(e.output)
|
||||
|
||||
super(YoutubeDLBuilder, self).build()
|
||||
|
||||
|
||||
class DownloadBuilder(object):
|
||||
def __init__(self, **kwargs):
|
||||
self.handler = kwargs.pop('handler')
|
||||
self.srcPath = os.path.join(self.buildPath, *tuple(kwargs['path'][2:]))
|
||||
self.srcPath = os.path.abspath(os.path.normpath(self.srcPath))
|
||||
if not self.srcPath.startswith(self.buildPath):
|
||||
raise HTTPError(self.srcPath, 401)
|
||||
|
||||
super(DownloadBuilder, self).__init__(**kwargs)
|
||||
|
||||
def build(self):
|
||||
if not os.path.exists(self.srcPath):
|
||||
raise HTTPError('No such file', 404)
|
||||
if os.path.isdir(self.srcPath):
|
||||
raise HTTPError('Is a directory: %s' % self.srcPath, 401)
|
||||
|
||||
self.handler.send_response(200)
|
||||
self.handler.send_header('Content-Type', 'application/octet-stream')
|
||||
self.handler.send_header('Content-Disposition', 'attachment; filename=%s' % os.path.split(self.srcPath)[-1])
|
||||
self.handler.send_header('Content-Length', str(os.stat(self.srcPath).st_size))
|
||||
self.handler.end_headers()
|
||||
|
||||
with open(self.srcPath, 'rb') as src:
|
||||
shutil.copyfileobj(src, self.handler.wfile)
|
||||
|
||||
super(DownloadBuilder, self).build()
|
||||
|
||||
|
||||
class CleanupTempDir(object):
|
||||
def build(self):
|
||||
try:
|
||||
rmtree(self.basePath)
|
||||
except Exception as e:
|
||||
print('WARNING deleting "%s": %s' % (self.basePath, e))
|
||||
|
||||
super(CleanupTempDir, self).build()
|
||||
|
||||
|
||||
class Null(object):
|
||||
def __init__(self, **kwargs):
|
||||
pass
|
||||
|
||||
def start(self):
|
||||
pass
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def build(self):
|
||||
pass
|
||||
|
||||
|
||||
class Builder(PythonBuilder, GITBuilder, YoutubeDLBuilder, DownloadBuilder, CleanupTempDir, Null):
|
||||
pass
|
||||
|
||||
|
||||
class BuildHTTPRequestHandler(compat_http_server.BaseHTTPRequestHandler):
|
||||
actionDict = {'build': Builder, 'download': Builder} # They're the same, no more caching.
|
||||
|
||||
def do_GET(self):
|
||||
path = compat_urlparse.urlparse(self.path)
|
||||
paramDict = dict([(key, value[0]) for key, value in compat_urlparse.parse_qs(path.query).items()])
|
||||
action, _, path = path.path.strip('/').partition('/')
|
||||
if path:
|
||||
path = path.split('/')
|
||||
if action in self.actionDict:
|
||||
try:
|
||||
builder = self.actionDict[action](path=path, handler=self, **paramDict)
|
||||
builder.start()
|
||||
try:
|
||||
builder.build()
|
||||
finally:
|
||||
builder.close()
|
||||
except BuildError as e:
|
||||
self.send_response(e.code)
|
||||
msg = compat_str(e).encode('UTF-8')
|
||||
self.send_header('Content-Type', 'text/plain; charset=UTF-8')
|
||||
self.send_header('Content-Length', len(msg))
|
||||
self.end_headers()
|
||||
self.wfile.write(msg)
|
||||
else:
|
||||
self.send_response(500, 'Unknown build method "%s"' % action)
|
||||
else:
|
||||
self.send_response(500, 'Malformed URL')
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,112 +0,0 @@
|
||||
# Unused
|
||||
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import io
|
||||
import json
|
||||
import mimetypes
|
||||
import netrc
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from yt_dlp.compat import (
|
||||
compat_basestring,
|
||||
compat_getpass,
|
||||
compat_print,
|
||||
compat_urllib_request,
|
||||
)
|
||||
from yt_dlp.utils import (
|
||||
make_HTTPS_handler,
|
||||
sanitized_Request,
|
||||
)
|
||||
|
||||
|
||||
class GitHubReleaser(object):
|
||||
_API_URL = 'https://api.github.com/repos/ytdl-org/youtube-dl/releases'
|
||||
_UPLOADS_URL = 'https://uploads.github.com/repos/ytdl-org/youtube-dl/releases/%s/assets?name=%s'
|
||||
_NETRC_MACHINE = 'github.com'
|
||||
|
||||
def __init__(self, debuglevel=0):
|
||||
self._init_github_account()
|
||||
https_handler = make_HTTPS_handler({}, debuglevel=debuglevel)
|
||||
self._opener = compat_urllib_request.build_opener(https_handler)
|
||||
|
||||
def _init_github_account(self):
|
||||
try:
|
||||
info = netrc.netrc().authenticators(self._NETRC_MACHINE)
|
||||
if info is not None:
|
||||
self._token = info[2]
|
||||
compat_print('Using GitHub credentials found in .netrc...')
|
||||
return
|
||||
else:
|
||||
compat_print('No GitHub credentials found in .netrc')
|
||||
except (IOError, netrc.NetrcParseError):
|
||||
compat_print('Unable to parse .netrc')
|
||||
self._token = compat_getpass(
|
||||
'Type your GitHub PAT (personal access token) and press [Return]: ')
|
||||
|
||||
def _call(self, req):
|
||||
if isinstance(req, compat_basestring):
|
||||
req = sanitized_Request(req)
|
||||
req.add_header('Authorization', 'token %s' % self._token)
|
||||
response = self._opener.open(req).read().decode('utf-8')
|
||||
return json.loads(response)
|
||||
|
||||
def list_releases(self):
|
||||
return self._call(self._API_URL)
|
||||
|
||||
def create_release(self, tag_name, name=None, body='', draft=False, prerelease=False):
|
||||
data = {
|
||||
'tag_name': tag_name,
|
||||
'target_commitish': 'master',
|
||||
'name': name,
|
||||
'body': body,
|
||||
'draft': draft,
|
||||
'prerelease': prerelease,
|
||||
}
|
||||
req = sanitized_Request(self._API_URL, json.dumps(data).encode('utf-8'))
|
||||
return self._call(req)
|
||||
|
||||
def create_asset(self, release_id, asset):
|
||||
asset_name = os.path.basename(asset)
|
||||
url = self._UPLOADS_URL % (release_id, asset_name)
|
||||
# Our files are small enough to be loaded directly into memory.
|
||||
data = open(asset, 'rb').read()
|
||||
req = sanitized_Request(url, data)
|
||||
mime_type, _ = mimetypes.guess_type(asset_name)
|
||||
req.add_header('Content-Type', mime_type or 'application/octet-stream')
|
||||
return self._call(req)
|
||||
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(usage='%prog CHANGELOG VERSION BUILDPATH')
|
||||
options, args = parser.parse_args()
|
||||
if len(args) != 3:
|
||||
parser.error('Expected a version and a build directory')
|
||||
|
||||
changelog_file, version, build_path = args
|
||||
|
||||
with io.open(changelog_file, encoding='utf-8') as inf:
|
||||
changelog = inf.read()
|
||||
|
||||
mobj = re.search(r'(?s)version %s\n{2}(.+?)\n{3}' % version, changelog)
|
||||
body = mobj.group(1) if mobj else ''
|
||||
|
||||
releaser = GitHubReleaser()
|
||||
|
||||
new_release = releaser.create_release(
|
||||
version, name='yt-dlp %s' % version, body=body)
|
||||
release_id = new_release['id']
|
||||
|
||||
for asset in os.listdir(build_path):
|
||||
compat_print('Uploading %s...' % asset)
|
||||
releaser.create_asset(release_id, os.path.join(build_path, asset))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,43 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
import sys
|
||||
import hashlib
|
||||
import os.path
|
||||
|
||||
|
||||
if len(sys.argv) <= 1:
|
||||
print('Specify the version number as parameter')
|
||||
sys.exit()
|
||||
version = sys.argv[1]
|
||||
|
||||
with open('update/LATEST_VERSION', 'w') as f:
|
||||
f.write(version)
|
||||
|
||||
versions_info = json.load(open('update/versions.json'))
|
||||
if 'signature' in versions_info:
|
||||
del versions_info['signature']
|
||||
|
||||
new_version = {}
|
||||
|
||||
filenames = {
|
||||
'bin': 'yt-dlp',
|
||||
'exe': 'yt-dlp.exe',
|
||||
'tar': 'yt-dlp-%s.tar.gz' % version}
|
||||
build_dir = os.path.join('..', '..', 'build', version)
|
||||
for key, filename in filenames.items():
|
||||
url = 'https://yt-dl.org/downloads/%s/%s' % (version, filename)
|
||||
fn = os.path.join(build_dir, filename)
|
||||
with open(fn, 'rb') as f:
|
||||
data = f.read()
|
||||
if not data:
|
||||
raise ValueError('File %s is empty!' % fn)
|
||||
sha256sum = hashlib.sha256(data).hexdigest()
|
||||
new_version[key] = (url, sha256sum)
|
||||
|
||||
versions_info['versions'][version] = new_version
|
||||
versions_info['latest'] = version
|
||||
|
||||
with open('update/versions.json', 'w') as jsonf:
|
||||
json.dump(versions_info, jsonf, indent=4, sort_keys=True)
|
@ -1,22 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import json
|
||||
|
||||
versions_info = json.load(open('update/versions.json'))
|
||||
version = versions_info['latest']
|
||||
version_dict = versions_info['versions'][version]
|
||||
|
||||
# Read template page
|
||||
with open('download.html.in', 'r', encoding='utf-8') as tmplf:
|
||||
template = tmplf.read()
|
||||
|
||||
template = template.replace('@PROGRAM_VERSION@', version)
|
||||
template = template.replace('@PROGRAM_URL@', version_dict['bin'][0])
|
||||
template = template.replace('@PROGRAM_SHA256SUM@', version_dict['bin'][1])
|
||||
template = template.replace('@EXE_URL@', version_dict['exe'][0])
|
||||
template = template.replace('@EXE_SHA256SUM@', version_dict['exe'][1])
|
||||
template = template.replace('@TAR_URL@', version_dict['tar'][0])
|
||||
template = template.replace('@TAR_SHA256SUM@', version_dict['tar'][1])
|
||||
with open('download.html', 'w', encoding='utf-8') as dlf:
|
||||
dlf.write(template)
|
@ -1,34 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import unicode_literals, with_statement
|
||||
|
||||
import rsa
|
||||
import json
|
||||
from binascii import hexlify
|
||||
|
||||
try:
|
||||
input = raw_input
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
versions_info = json.load(open('update/versions.json'))
|
||||
if 'signature' in versions_info:
|
||||
del versions_info['signature']
|
||||
|
||||
print('Enter the PKCS1 private key, followed by a blank line:')
|
||||
privkey = b''
|
||||
while True:
|
||||
try:
|
||||
line = input()
|
||||
except EOFError:
|
||||
break
|
||||
if line == '':
|
||||
break
|
||||
privkey += line.encode('ascii') + b'\n'
|
||||
privkey = rsa.PrivateKey.load_pkcs1(privkey)
|
||||
|
||||
signature = hexlify(rsa.pkcs1.sign(json.dumps(versions_info, sort_keys=True).encode('utf-8'), privkey, 'SHA-256')).decode()
|
||||
print('signature: ' + signature)
|
||||
|
||||
versions_info['signature'] = signature
|
||||
with open('update/versions.json', 'w') as versionsf:
|
||||
json.dump(versions_info, versionsf, indent=4, sort_keys=True)
|
@ -1,21 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import with_statement, unicode_literals
|
||||
|
||||
import datetime
|
||||
import glob
|
||||
import io # For Python 2 compatibility
|
||||
import os
|
||||
import re
|
||||
|
||||
year = str(datetime.datetime.now().year)
|
||||
for fn in glob.glob('*.html*'):
|
||||
with io.open(fn, encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
newc = re.sub(r'(?P<copyright>Copyright © 2011-)(?P<year>[0-9]{4})', 'Copyright © 2011-' + year, content)
|
||||
if content != newc:
|
||||
tmpFn = fn + '.part'
|
||||
with io.open(tmpFn, 'wt', encoding='utf-8') as outf:
|
||||
outf.write(newc)
|
||||
os.rename(tmpFn, fn)
|
@ -1,76 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import datetime
|
||||
import io
|
||||
import json
|
||||
import textwrap
|
||||
|
||||
|
||||
atom_template = textwrap.dedent("""\
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom">
|
||||
<link rel="self" href="http://ytdl-org.github.io/youtube-dl/update/releases.atom" />
|
||||
<title>yt-dlp releases</title>
|
||||
<id>https://yt-dl.org/feed/yt-dlp-updates-feed</id>
|
||||
<updated>@TIMESTAMP@</updated>
|
||||
@ENTRIES@
|
||||
</feed>""")
|
||||
|
||||
entry_template = textwrap.dedent("""
|
||||
<entry>
|
||||
<id>https://yt-dl.org/feed/yt-dlp-updates-feed/yt-dlp-@VERSION@</id>
|
||||
<title>New version @VERSION@</title>
|
||||
<link href="http://ytdl-org.github.io/yt-dlp" />
|
||||
<content type="xhtml">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml">
|
||||
Downloads available at <a href="https://yt-dl.org/downloads/@VERSION@/">https://yt-dl.org/downloads/@VERSION@/</a>
|
||||
</div>
|
||||
</content>
|
||||
<author>
|
||||
<name>The yt-dlp maintainers</name>
|
||||
</author>
|
||||
<updated>@TIMESTAMP@</updated>
|
||||
</entry>
|
||||
""")
|
||||
|
||||
now = datetime.datetime.now()
|
||||
now_iso = now.isoformat() + 'Z'
|
||||
|
||||
atom_template = atom_template.replace('@TIMESTAMP@', now_iso)
|
||||
|
||||
versions_info = json.load(open('update/versions.json'))
|
||||
versions = list(versions_info['versions'].keys())
|
||||
versions.sort()
|
||||
|
||||
entries = []
|
||||
for v in versions:
|
||||
fields = v.split('.')
|
||||
year, month, day = map(int, fields[:3])
|
||||
faked = 0
|
||||
patchlevel = 0
|
||||
while True:
|
||||
try:
|
||||
datetime.date(year, month, day)
|
||||
except ValueError:
|
||||
day -= 1
|
||||
faked += 1
|
||||
assert day > 0
|
||||
continue
|
||||
break
|
||||
if len(fields) >= 4:
|
||||
try:
|
||||
patchlevel = int(fields[3])
|
||||
except ValueError:
|
||||
patchlevel = 1
|
||||
timestamp = '%04d-%02d-%02dT00:%02d:%02dZ' % (year, month, day, faked, patchlevel)
|
||||
|
||||
entry = entry_template.replace('@TIMESTAMP@', timestamp)
|
||||
entry = entry.replace('@VERSION@', v)
|
||||
entries.append(entry)
|
||||
|
||||
entries_str = textwrap.indent(''.join(entries), '\t')
|
||||
atom_template = atom_template.replace('@ENTRIES@', entries_str)
|
||||
|
||||
with io.open('update/releases.atom', 'w', encoding='utf-8') as atom_file:
|
||||
atom_file.write(atom_template)
|
@ -1,37 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import sys
|
||||
import os
|
||||
import textwrap
|
||||
|
||||
# We must be able to import yt_dlp
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
|
||||
|
||||
import yt_dlp
|
||||
|
||||
|
||||
def main():
|
||||
with open('supportedsites.html.in', 'r', encoding='utf-8') as tmplf:
|
||||
template = tmplf.read()
|
||||
|
||||
ie_htmls = []
|
||||
for ie in yt_dlp.list_extractors(age_limit=None):
|
||||
ie_html = '<b>{}</b>'.format(ie.IE_NAME)
|
||||
ie_desc = getattr(ie, 'IE_DESC', None)
|
||||
if ie_desc is False:
|
||||
continue
|
||||
elif ie_desc is not None:
|
||||
ie_html += ': {}'.format(ie.IE_DESC)
|
||||
if not ie.working():
|
||||
ie_html += ' (Currently broken)'
|
||||
ie_htmls.append('<li>{}</li>'.format(ie_html))
|
||||
|
||||
template = template.replace('@SITES@', textwrap.indent('\n'.join(ie_htmls), '\t'))
|
||||
|
||||
with open('supportedsites.html', 'w', encoding='utf-8') as sitesf:
|
||||
sitesf.write(template)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,6 +0,0 @@
|
||||
|
||||
# source this file in your shell to get a POSIX locale (which will break many programs, but that's kind of the point)
|
||||
|
||||
export LC_ALL=POSIX
|
||||
export LANG=POSIX
|
||||
export LANGUAGE=POSIX
|
@ -1,143 +0,0 @@
|
||||
# Unused
|
||||
|
||||
#!/bin/bash
|
||||
|
||||
# IMPORTANT: the following assumptions are made
|
||||
# * the GH repo is on the origin remote
|
||||
# * the gh-pages branch is named so locally
|
||||
# * the git config user.signingkey is properly set
|
||||
|
||||
# You will need
|
||||
# pip install coverage nose rsa wheel
|
||||
|
||||
# TODO
|
||||
# release notes
|
||||
# make hash on local files
|
||||
|
||||
set -e
|
||||
|
||||
skip_tests=true
|
||||
gpg_sign_commits=""
|
||||
buildserver='localhost:8142'
|
||||
|
||||
while true
|
||||
do
|
||||
case "$1" in
|
||||
--run-tests)
|
||||
skip_tests=false
|
||||
shift
|
||||
;;
|
||||
--gpg-sign-commits|-S)
|
||||
gpg_sign_commits="-S"
|
||||
shift
|
||||
;;
|
||||
--buildserver)
|
||||
buildserver="$2"
|
||||
shift 2
|
||||
;;
|
||||
--*)
|
||||
echo "ERROR: unknown option $1"
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ -z "$1" ]; then echo "ERROR: specify version number like this: $0 1994.09.06"; exit 1; fi
|
||||
version="$1"
|
||||
major_version=$(echo "$version" | sed -n 's#^\([0-9]*\.[0-9]*\.[0-9]*\).*#\1#p')
|
||||
if test "$major_version" '!=' "$(date '+%Y.%m.%d')"; then
|
||||
echo "$version does not start with today's date!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -z "`git tag | grep "$version"`" ]; then echo 'ERROR: version already present'; exit 1; fi
|
||||
if [ ! -z "`git status --porcelain | grep -v CHANGELOG`" ]; then echo 'ERROR: the working directory is not clean; commit or stash changes'; exit 1; fi
|
||||
useless_files=$(find yt_dlp -type f -not -name '*.py')
|
||||
if [ ! -z "$useless_files" ]; then echo "ERROR: Non-.py files in yt_dlp: $useless_files"; exit 1; fi
|
||||
if [ ! -f "updates_key.pem" ]; then echo 'ERROR: updates_key.pem missing'; exit 1; fi
|
||||
if ! type pandoc >/dev/null 2>/dev/null; then echo 'ERROR: pandoc is missing'; exit 1; fi
|
||||
if ! python3 -c 'import rsa' 2>/dev/null; then echo 'ERROR: python3-rsa is missing'; exit 1; fi
|
||||
if ! python3 -c 'import wheel' 2>/dev/null; then echo 'ERROR: wheel is missing'; exit 1; fi
|
||||
|
||||
read -p "Is Changelog up to date? (y/n) " -n 1
|
||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then exit 1; fi
|
||||
|
||||
/bin/echo -e "\n### First of all, testing..."
|
||||
make clean
|
||||
if $skip_tests ; then
|
||||
echo 'SKIPPING TESTS'
|
||||
else
|
||||
nosetests --verbose --with-coverage --cover-package=yt_dlp --cover-html test --stop || exit 1
|
||||
fi
|
||||
|
||||
/bin/echo -e "\n### Changing version in version.py..."
|
||||
sed -i "s/__version__ = '.*'/__version__ = '$version'/" yt_dlp/version.py
|
||||
|
||||
/bin/echo -e "\n### Changing version in Changelog..."
|
||||
sed -i "s/<unreleased>/$version/" Changelog.md
|
||||
|
||||
/bin/echo -e "\n### Committing documentation, templates and yt_dlp/version.py..."
|
||||
make README.md CONTRIBUTING.md issuetemplates supportedsites
|
||||
git add README.md CONTRIBUTING.md .github/ISSUE_TEMPLATE/1_broken_site.md .github/ISSUE_TEMPLATE/2_site_support_request.md .github/ISSUE_TEMPLATE/3_site_feature_request.md .github/ISSUE_TEMPLATE/4_bug_report.md .github/ISSUE_TEMPLATE/5_feature_request.md .github/ISSUE_TEMPLATE/6_question.md docs/supportedsites.md yt_dlp/version.py Changelog.md
|
||||
git commit $gpg_sign_commits -m "release $version"
|
||||
|
||||
/bin/echo -e "\n### Now tagging, signing and pushing..."
|
||||
git tag -s -m "Release $version" "$version"
|
||||
git show "$version"
|
||||
read -p "Is it good, can I push? (y/n) " -n 1
|
||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then exit 1; fi
|
||||
echo
|
||||
MASTER=$(git rev-parse --abbrev-ref HEAD)
|
||||
git push origin $MASTER:master
|
||||
git push origin "$version"
|
||||
|
||||
/bin/echo -e "\n### OK, now it is time to build the binaries..."
|
||||
REV=$(git rev-parse HEAD)
|
||||
make yt-dlp yt-dlp.tar.gz
|
||||
read -p "VM running? (y/n) " -n 1
|
||||
wget "http://$buildserver/build/ytdl-org/youtube-dl/yt-dlp.exe?rev=$REV" -O yt-dlp.exe
|
||||
mkdir -p "build/$version"
|
||||
mv yt-dlp yt-dlp.exe "build/$version"
|
||||
mv yt-dlp.tar.gz "build/$version/yt-dlp-$version.tar.gz"
|
||||
RELEASE_FILES="yt-dlp yt-dlp.exe yt-dlp-$version.tar.gz"
|
||||
(cd build/$version/ && md5sum $RELEASE_FILES > MD5SUMS)
|
||||
(cd build/$version/ && sha1sum $RELEASE_FILES > SHA1SUMS)
|
||||
(cd build/$version/ && sha256sum $RELEASE_FILES > SHA2-256SUMS)
|
||||
(cd build/$version/ && sha512sum $RELEASE_FILES > SHA2-512SUMS)
|
||||
|
||||
/bin/echo -e "\n### Signing and uploading the new binaries to GitHub..."
|
||||
for f in $RELEASE_FILES; do gpg --passphrase-repeat 5 --detach-sig "build/$version/$f"; done
|
||||
|
||||
ROOT=$(pwd)
|
||||
python devscripts/create-github-release.py Changelog.md $version "$ROOT/build/$version"
|
||||
|
||||
ssh ytdl@yt-dl.org "sh html/update_latest.sh $version"
|
||||
|
||||
/bin/echo -e "\n### Now switching to gh-pages..."
|
||||
git clone --branch gh-pages --single-branch . build/gh-pages
|
||||
(
|
||||
set -e
|
||||
ORIGIN_URL=$(git config --get remote.origin.url)
|
||||
cd build/gh-pages
|
||||
"$ROOT/devscripts/gh-pages/add-version.py" $version
|
||||
"$ROOT/devscripts/gh-pages/update-feed.py"
|
||||
"$ROOT/devscripts/gh-pages/sign-versions.py" < "$ROOT/updates_key.pem"
|
||||
"$ROOT/devscripts/gh-pages/generate-download.py"
|
||||
"$ROOT/devscripts/gh-pages/update-copyright.py"
|
||||
"$ROOT/devscripts/gh-pages/update-sites.py"
|
||||
git add *.html *.html.in update
|
||||
git commit $gpg_sign_commits -m "release $version"
|
||||
git push "$ROOT" gh-pages
|
||||
git push "$ORIGIN_URL" gh-pages
|
||||
)
|
||||
rm -rf build
|
||||
|
||||
make pypi-files
|
||||
echo "Uploading to PyPi ..."
|
||||
python setup.py sdist bdist_wheel upload
|
||||
make clean
|
||||
|
||||
/bin/echo -e "\n### DONE!"
|
@ -1,49 +0,0 @@
|
||||
# Unused
|
||||
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from yt_dlp.compat import (
|
||||
compat_print,
|
||||
compat_urllib_request,
|
||||
)
|
||||
from yt_dlp.utils import format_bytes
|
||||
|
||||
|
||||
def format_size(bytes):
|
||||
return '%s (%d bytes)' % (format_bytes(bytes), bytes)
|
||||
|
||||
|
||||
total_bytes = 0
|
||||
|
||||
for page in itertools.count(1):
|
||||
releases = json.loads(compat_urllib_request.urlopen(
|
||||
'https://api.github.com/repos/ytdl-org/youtube-dl/releases?page=%s' % page
|
||||
).read().decode('utf-8'))
|
||||
|
||||
if not releases:
|
||||
break
|
||||
|
||||
for release in releases:
|
||||
compat_print(release['name'])
|
||||
for asset in release['assets']:
|
||||
asset_name = asset['name']
|
||||
total_bytes += asset['download_count'] * asset['size']
|
||||
if all(not re.match(p, asset_name) for p in (
|
||||
r'^yt-dlp$',
|
||||
r'^yt-dlp-\d{4}\.\d{2}\.\d{2}(?:\.\d+)?\.tar\.gz$',
|
||||
r'^yt-dlp\.exe$')):
|
||||
continue
|
||||
compat_print(
|
||||
' %s size: %s downloads: %d'
|
||||
% (asset_name, format_size(asset['size']), asset['download_count']))
|
||||
|
||||
compat_print('total downloads traffic: %s' % format_size(total_bytes))
|
@ -1,58 +0,0 @@
|
||||
# UNUSED
|
||||
|
||||
#!/bin/bash
|
||||
|
||||
# Run with as parameter a setup.py that works in the current directory
|
||||
# e.g. no os.chdir()
|
||||
# It will run twice, the first time will crash
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
|
||||
|
||||
if [ ! -d wine-py2exe ]; then
|
||||
|
||||
sudo apt-get install wine1.3 axel bsdiff
|
||||
|
||||
mkdir wine-py2exe
|
||||
cd wine-py2exe
|
||||
export WINEPREFIX=`pwd`
|
||||
|
||||
axel -a "http://www.python.org/ftp/python/2.7/python-2.7.msi"
|
||||
axel -a "http://downloads.sourceforge.net/project/py2exe/py2exe/0.6.9/py2exe-0.6.9.win32-py2.7.exe"
|
||||
#axel -a "http://winetricks.org/winetricks"
|
||||
|
||||
# http://appdb.winehq.org/objectManager.php?sClass=version&iId=21957
|
||||
echo "Follow python setup on screen"
|
||||
wine msiexec /i python-2.7.msi
|
||||
|
||||
echo "Follow py2exe setup on screen"
|
||||
wine py2exe-0.6.9.win32-py2.7.exe
|
||||
|
||||
#echo "Follow Microsoft Visual C++ 2008 Redistributable Package setup on screen"
|
||||
#bash winetricks vcrun2008
|
||||
|
||||
rm py2exe-0.6.9.win32-py2.7.exe
|
||||
rm python-2.7.msi
|
||||
#rm winetricks
|
||||
|
||||
# http://bugs.winehq.org/show_bug.cgi?id=3591
|
||||
|
||||
mv drive_c/Python27/Lib/site-packages/py2exe/run.exe drive_c/Python27/Lib/site-packages/py2exe/run.exe.backup
|
||||
bspatch drive_c/Python27/Lib/site-packages/py2exe/run.exe.backup drive_c/Python27/Lib/site-packages/py2exe/run.exe "$SCRIPT_DIR/SizeOfImage.patch"
|
||||
mv drive_c/Python27/Lib/site-packages/py2exe/run_w.exe drive_c/Python27/Lib/site-packages/py2exe/run_w.exe.backup
|
||||
bspatch drive_c/Python27/Lib/site-packages/py2exe/run_w.exe.backup drive_c/Python27/Lib/site-packages/py2exe/run_w.exe "$SCRIPT_DIR/SizeOfImage_w.patch"
|
||||
|
||||
cd -
|
||||
|
||||
else
|
||||
|
||||
export WINEPREFIX="$( cd wine-py2exe && pwd )"
|
||||
|
||||
fi
|
||||
|
||||
wine "C:\\Python27\\python.exe" "$1" py2exe > "py2exe.log" 2>&1 || true
|
||||
echo '# Copying python27.dll' >> "py2exe.log"
|
||||
cp "$WINEPREFIX/drive_c/windows/system32/python27.dll" build/bdist.win32/winexe/bundle-2.7/
|
||||
wine "C:\\Python27\\python.exe" "$1" py2exe >> "py2exe.log" 2>&1
|
||||
|
1
test/swftests.unused/.gitignore
vendored
1
test/swftests.unused/.gitignore
vendored
@ -1 +0,0 @@
|
||||
*.swf
|
@ -1,19 +0,0 @@
|
||||
// input: [["a", "b", "c", "d"]]
|
||||
// output: ["c", "b", "a", "d"]
|
||||
|
||||
package {
|
||||
public class ArrayAccess {
|
||||
public static function main(ar:Array):Array {
|
||||
var aa:ArrayAccess = new ArrayAccess();
|
||||
return aa.f(ar, 2);
|
||||
}
|
||||
|
||||
private function f(ar:Array, num:Number):Array{
|
||||
var x:String = ar[0];
|
||||
var y:String = ar[num % ar.length];
|
||||
ar[0] = y;
|
||||
ar[num] = x;
|
||||
return ar;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,17 +0,0 @@
|
||||
// input: []
|
||||
// output: 121
|
||||
|
||||
package {
|
||||
public class ClassCall {
|
||||
public static function main():int{
|
||||
var f:OtherClass = new OtherClass();
|
||||
return f.func(100,20);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class OtherClass {
|
||||
public function func(x: int, y: int):int {
|
||||
return x+y+1;
|
||||
}
|
||||
}
|
@ -1,15 +0,0 @@
|
||||
// input: []
|
||||
// output: 0
|
||||
|
||||
package {
|
||||
public class ClassConstruction {
|
||||
public static function main():int{
|
||||
var f:Foo = new Foo();
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class Foo {
|
||||
|
||||
}
|
@ -1,18 +0,0 @@
|
||||
// input: []
|
||||
// output: 4
|
||||
|
||||
package {
|
||||
public class ConstArrayAccess {
|
||||
private static const x:int = 2;
|
||||
private static const ar:Array = ["42", "3411"];
|
||||
|
||||
public static function main():int{
|
||||
var c:ConstArrayAccess = new ConstArrayAccess();
|
||||
return c.f();
|
||||
}
|
||||
|
||||
public function f(): int {
|
||||
return ar[1].length;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,12 +0,0 @@
|
||||
// input: []
|
||||
// output: 2
|
||||
|
||||
package {
|
||||
public class ConstantInt {
|
||||
private static const x:int = 2;
|
||||
|
||||
public static function main():int{
|
||||
return x;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,10 +0,0 @@
|
||||
// input: [{"x": 1, "y": 2}]
|
||||
// output: 3
|
||||
|
||||
package {
|
||||
public class DictCall {
|
||||
public static function main(d:Object):int{
|
||||
return d.x + d.y;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,10 +0,0 @@
|
||||
// input: []
|
||||
// output: false
|
||||
|
||||
package {
|
||||
public class EqualsOperator {
|
||||
public static function main():Boolean{
|
||||
return 1 == 2;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
// input: [1, 2]
|
||||
// output: 3
|
||||
|
||||
package {
|
||||
public class LocalVars {
|
||||
public static function main(a:int, b:int):int{
|
||||
var c:int = a + b + b;
|
||||
var d:int = c - b;
|
||||
var e:int = d;
|
||||
return e;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,22 +0,0 @@
|
||||
// input: [1]
|
||||
// output: 2
|
||||
|
||||
package {
|
||||
public class MemberAssignment {
|
||||
public var v:int;
|
||||
|
||||
public function g():int {
|
||||
return this.v;
|
||||
}
|
||||
|
||||
public function f(a:int):int{
|
||||
this.v = a;
|
||||
return this.v + this.g();
|
||||
}
|
||||
|
||||
public static function main(a:int): int {
|
||||
var v:MemberAssignment = new MemberAssignment();
|
||||
return v.f(a);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,24 +0,0 @@
|
||||
// input: []
|
||||
// output: 123
|
||||
|
||||
package {
|
||||
public class NeOperator {
|
||||
public static function main(): int {
|
||||
var res:int = 0;
|
||||
if (1 != 2) {
|
||||
res += 3;
|
||||
} else {
|
||||
res += 4;
|
||||
}
|
||||
if (2 != 2) {
|
||||
res += 10;
|
||||
} else {
|
||||
res += 20;
|
||||
}
|
||||
if (9 == 9) {
|
||||
res += 100;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
// input: []
|
||||
// output: 9
|
||||
|
||||
package {
|
||||
public class PrivateCall {
|
||||
public static function main():int{
|
||||
var f:OtherClass = new OtherClass();
|
||||
return f.func();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class OtherClass {
|
||||
private function pf():int {
|
||||
return 9;
|
||||
}
|
||||
|
||||
public function func():int {
|
||||
return this.pf();
|
||||
}
|
||||
}
|
@ -1,22 +0,0 @@
|
||||
// input: []
|
||||
// output: 9
|
||||
|
||||
package {
|
||||
public class PrivateVoidCall {
|
||||
public static function main():int{
|
||||
var f:OtherClass = new OtherClass();
|
||||
f.func();
|
||||
return 9;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class OtherClass {
|
||||
private function pf():void {
|
||||
;
|
||||
}
|
||||
|
||||
public function func():void {
|
||||
this.pf();
|
||||
}
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
// input: [1]
|
||||
// output: 1
|
||||
|
||||
package {
|
||||
public class StaticAssignment {
|
||||
public static var v:int;
|
||||
|
||||
public static function main(a:int):int{
|
||||
v = a;
|
||||
return v;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,16 +0,0 @@
|
||||
// input: []
|
||||
// output: 1
|
||||
|
||||
package {
|
||||
public class StaticRetrieval {
|
||||
public static var v:int;
|
||||
|
||||
public static function main():int{
|
||||
if (v) {
|
||||
return 0;
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
// input: []
|
||||
// output: 3
|
||||
|
||||
package {
|
||||
public class StringBasics {
|
||||
public static function main():int{
|
||||
var s:String = "abc";
|
||||
return s.length;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
// input: []
|
||||
// output: 9897
|
||||
|
||||
package {
|
||||
public class StringCharCodeAt {
|
||||
public static function main():int{
|
||||
var s:String = "abc";
|
||||
return s.charCodeAt(1) * 100 + s.charCodeAt();
|
||||
}
|
||||
}
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
// input: []
|
||||
// output: 2
|
||||
|
||||
package {
|
||||
public class StringConversion {
|
||||
public static function main():int{
|
||||
var s:String = String(99);
|
||||
return s.length;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,80 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
||||
import errno
|
||||
import io
|
||||
import json
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
from yt_dlp.swfinterp import SWFInterpreter
|
||||
|
||||
|
||||
TEST_DIR = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), 'swftests')
|
||||
|
||||
|
||||
class TestSWFInterpreter(unittest.TestCase):
|
||||
pass
|
||||
|
||||
|
||||
def _make_testfunc(testfile):
|
||||
m = re.match(r'^(.*)\.(as)$', testfile)
|
||||
if not m:
|
||||
return
|
||||
test_id = m.group(1)
|
||||
|
||||
def test_func(self):
|
||||
as_file = os.path.join(TEST_DIR, testfile)
|
||||
swf_file = os.path.join(TEST_DIR, test_id + '.swf')
|
||||
if ((not os.path.exists(swf_file))
|
||||
or os.path.getmtime(swf_file) < os.path.getmtime(as_file)):
|
||||
# Recompile
|
||||
try:
|
||||
subprocess.check_call([
|
||||
'mxmlc', '-output', swf_file,
|
||||
'-static-link-runtime-shared-libraries', as_file])
|
||||
except OSError as ose:
|
||||
if ose.errno == errno.ENOENT:
|
||||
print('mxmlc not found! Skipping test.')
|
||||
return
|
||||
raise
|
||||
|
||||
with open(swf_file, 'rb') as swf_f:
|
||||
swf_content = swf_f.read()
|
||||
swfi = SWFInterpreter(swf_content)
|
||||
|
||||
with io.open(as_file, 'r', encoding='utf-8') as as_f:
|
||||
as_content = as_f.read()
|
||||
|
||||
def _find_spec(key):
|
||||
m = re.search(
|
||||
r'(?m)^//\s*%s:\s*(.*?)\n' % re.escape(key), as_content)
|
||||
if not m:
|
||||
raise ValueError('Cannot find %s in %s' % (key, testfile))
|
||||
return json.loads(m.group(1))
|
||||
|
||||
input_args = _find_spec('input')
|
||||
output = _find_spec('output')
|
||||
|
||||
swf_class = swfi.extract_class(test_id)
|
||||
func = swfi.extract_function(swf_class, 'main')
|
||||
res = func(input_args)
|
||||
self.assertEqual(res, output)
|
||||
|
||||
test_func.__name__ = str('test_swf_' + test_id)
|
||||
setattr(TestSWFInterpreter, test_func.__name__, test_func)
|
||||
|
||||
|
||||
for testfile in os.listdir(TEST_DIR):
|
||||
_make_testfunc(testfile)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -1,63 +0,0 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Allow direct execution
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
import io
|
||||
import re
|
||||
|
||||
rootDir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
IGNORED_FILES = [
|
||||
'setup.py', # http://bugs.python.org/issue13943
|
||||
'conf.py',
|
||||
'buildserver.py',
|
||||
]
|
||||
|
||||
IGNORED_DIRS = [
|
||||
'.git',
|
||||
'.tox',
|
||||
]
|
||||
|
||||
from test.helper import assertRegexpMatches
|
||||
|
||||
|
||||
class TestUnicodeLiterals(unittest.TestCase):
|
||||
def test_all_files(self):
|
||||
for dirpath, dirnames, filenames in os.walk(rootDir):
|
||||
for ignore_dir in IGNORED_DIRS:
|
||||
if ignore_dir in dirnames:
|
||||
# If we remove the directory from dirnames os.walk won't
|
||||
# recurse into it
|
||||
dirnames.remove(ignore_dir)
|
||||
for basename in filenames:
|
||||
if not basename.endswith('.py'):
|
||||
continue
|
||||
if basename in IGNORED_FILES:
|
||||
continue
|
||||
|
||||
fn = os.path.join(dirpath, basename)
|
||||
with io.open(fn, encoding='utf-8') as inf:
|
||||
code = inf.read()
|
||||
|
||||
if "'" not in code and '"' not in code:
|
||||
continue
|
||||
assertRegexpMatches(
|
||||
self,
|
||||
code,
|
||||
r'(?:(?:#.*?|\s*)\n)*from __future__ import (?:[a-z_]+,\s*)*unicode_literals',
|
||||
'unicode_literals import missing in %s' % fn)
|
||||
|
||||
m = re.search(r'(?<=\s)u[\'"](?!\)|,|$)', code)
|
||||
if m is not None:
|
||||
self.assertTrue(
|
||||
m is None,
|
||||
'u present in %s, around %s' % (
|
||||
fn, code[m.start() - 10:m.end() + 10]))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -1,834 +0,0 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import collections
|
||||
import io
|
||||
import zlib
|
||||
|
||||
from .compat import (
|
||||
compat_str,
|
||||
compat_struct_unpack,
|
||||
)
|
||||
from .utils import (
|
||||
ExtractorError,
|
||||
)
|
||||
|
||||
|
||||
def _extract_tags(file_contents):
|
||||
if file_contents[1:3] != b'WS':
|
||||
raise ExtractorError(
|
||||
'Not an SWF file; header is %r' % file_contents[:3])
|
||||
if file_contents[:1] == b'C':
|
||||
content = zlib.decompress(file_contents[8:])
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
'Unsupported compression format %r' %
|
||||
file_contents[:1])
|
||||
|
||||
# Determine number of bits in framesize rectangle
|
||||
framesize_nbits = compat_struct_unpack('!B', content[:1])[0] >> 3
|
||||
framesize_len = (5 + 4 * framesize_nbits + 7) // 8
|
||||
|
||||
pos = framesize_len + 2 + 2
|
||||
while pos < len(content):
|
||||
header16 = compat_struct_unpack('<H', content[pos:pos + 2])[0]
|
||||
pos += 2
|
||||
tag_code = header16 >> 6
|
||||
tag_len = header16 & 0x3f
|
||||
if tag_len == 0x3f:
|
||||
tag_len = compat_struct_unpack('<I', content[pos:pos + 4])[0]
|
||||
pos += 4
|
||||
assert pos + tag_len <= len(content), \
|
||||
('Tag %d ends at %d+%d - that\'s longer than the file (%d)'
|
||||
% (tag_code, pos, tag_len, len(content)))
|
||||
yield (tag_code, content[pos:pos + tag_len])
|
||||
pos += tag_len
|
||||
|
||||
|
||||
class _AVMClass_Object(object):
|
||||
def __init__(self, avm_class):
|
||||
self.avm_class = avm_class
|
||||
|
||||
def __repr__(self):
|
||||
return '%s#%x' % (self.avm_class.name, id(self))
|
||||
|
||||
|
||||
class _ScopeDict(dict):
|
||||
def __init__(self, avm_class):
|
||||
super(_ScopeDict, self).__init__()
|
||||
self.avm_class = avm_class
|
||||
|
||||
def __repr__(self):
|
||||
return '%s__Scope(%s)' % (
|
||||
self.avm_class.name,
|
||||
super(_ScopeDict, self).__repr__())
|
||||
|
||||
|
||||
class _AVMClass(object):
|
||||
def __init__(self, name_idx, name, static_properties=None):
|
||||
self.name_idx = name_idx
|
||||
self.name = name
|
||||
self.method_names = {}
|
||||
self.method_idxs = {}
|
||||
self.methods = {}
|
||||
self.method_pyfunctions = {}
|
||||
self.static_properties = static_properties if static_properties else {}
|
||||
|
||||
self.variables = _ScopeDict(self)
|
||||
self.constants = {}
|
||||
|
||||
def make_object(self):
|
||||
return _AVMClass_Object(self)
|
||||
|
||||
def __repr__(self):
|
||||
return '_AVMClass(%s)' % (self.name)
|
||||
|
||||
def register_methods(self, methods):
|
||||
self.method_names.update(methods.items())
|
||||
self.method_idxs.update(dict(
|
||||
(idx, name)
|
||||
for name, idx in methods.items()))
|
||||
|
||||
|
||||
class _Multiname(object):
|
||||
def __init__(self, kind):
|
||||
self.kind = kind
|
||||
|
||||
def __repr__(self):
|
||||
return '[MULTINAME kind: 0x%x]' % self.kind
|
||||
|
||||
|
||||
def _read_int(reader):
|
||||
res = 0
|
||||
shift = 0
|
||||
for _ in range(5):
|
||||
buf = reader.read(1)
|
||||
assert len(buf) == 1
|
||||
b = compat_struct_unpack('<B', buf)[0]
|
||||
res = res | ((b & 0x7f) << shift)
|
||||
if b & 0x80 == 0:
|
||||
break
|
||||
shift += 7
|
||||
return res
|
||||
|
||||
|
||||
def _u30(reader):
|
||||
res = _read_int(reader)
|
||||
assert res & 0xf0000000 == 0
|
||||
return res
|
||||
|
||||
|
||||
_u32 = _read_int
|
||||
|
||||
|
||||
def _s32(reader):
|
||||
v = _read_int(reader)
|
||||
if v & 0x80000000 != 0:
|
||||
v = - ((v ^ 0xffffffff) + 1)
|
||||
return v
|
||||
|
||||
|
||||
def _s24(reader):
|
||||
bs = reader.read(3)
|
||||
assert len(bs) == 3
|
||||
last_byte = b'\xff' if (ord(bs[2:3]) >= 0x80) else b'\x00'
|
||||
return compat_struct_unpack('<i', bs + last_byte)[0]
|
||||
|
||||
|
||||
def _read_string(reader):
|
||||
slen = _u30(reader)
|
||||
resb = reader.read(slen)
|
||||
assert len(resb) == slen
|
||||
return resb.decode('utf-8')
|
||||
|
||||
|
||||
def _read_bytes(count, reader):
|
||||
assert count >= 0
|
||||
resb = reader.read(count)
|
||||
assert len(resb) == count
|
||||
return resb
|
||||
|
||||
|
||||
def _read_byte(reader):
|
||||
resb = _read_bytes(1, reader=reader)
|
||||
res = compat_struct_unpack('<B', resb)[0]
|
||||
return res
|
||||
|
||||
|
||||
StringClass = _AVMClass('(no name idx)', 'String')
|
||||
ByteArrayClass = _AVMClass('(no name idx)', 'ByteArray')
|
||||
TimerClass = _AVMClass('(no name idx)', 'Timer')
|
||||
TimerEventClass = _AVMClass('(no name idx)', 'TimerEvent', {'TIMER': 'timer'})
|
||||
_builtin_classes = {
|
||||
StringClass.name: StringClass,
|
||||
ByteArrayClass.name: ByteArrayClass,
|
||||
TimerClass.name: TimerClass,
|
||||
TimerEventClass.name: TimerEventClass,
|
||||
}
|
||||
|
||||
|
||||
class _Undefined(object):
|
||||
def __bool__(self):
|
||||
return False
|
||||
__nonzero__ = __bool__
|
||||
|
||||
def __hash__(self):
|
||||
return 0
|
||||
|
||||
def __str__(self):
|
||||
return 'undefined'
|
||||
__repr__ = __str__
|
||||
|
||||
|
||||
undefined = _Undefined()
|
||||
|
||||
|
||||
class SWFInterpreter(object):
|
||||
def __init__(self, file_contents):
|
||||
self._patched_functions = {
|
||||
(TimerClass, 'addEventListener'): lambda params: undefined,
|
||||
}
|
||||
code_tag = next(tag
|
||||
for tag_code, tag in _extract_tags(file_contents)
|
||||
if tag_code == 82)
|
||||
p = code_tag.index(b'\0', 4) + 1
|
||||
code_reader = io.BytesIO(code_tag[p:])
|
||||
|
||||
# Parse ABC (AVM2 ByteCode)
|
||||
|
||||
# Define a couple convenience methods
|
||||
u30 = lambda *args: _u30(*args, reader=code_reader)
|
||||
s32 = lambda *args: _s32(*args, reader=code_reader)
|
||||
u32 = lambda *args: _u32(*args, reader=code_reader)
|
||||
read_bytes = lambda *args: _read_bytes(*args, reader=code_reader)
|
||||
read_byte = lambda *args: _read_byte(*args, reader=code_reader)
|
||||
|
||||
# minor_version + major_version
|
||||
read_bytes(2 + 2)
|
||||
|
||||
# Constant pool
|
||||
int_count = u30()
|
||||
self.constant_ints = [0]
|
||||
for _c in range(1, int_count):
|
||||
self.constant_ints.append(s32())
|
||||
self.constant_uints = [0]
|
||||
uint_count = u30()
|
||||
for _c in range(1, uint_count):
|
||||
self.constant_uints.append(u32())
|
||||
double_count = u30()
|
||||
read_bytes(max(0, (double_count - 1)) * 8)
|
||||
string_count = u30()
|
||||
self.constant_strings = ['']
|
||||
for _c in range(1, string_count):
|
||||
s = _read_string(code_reader)
|
||||
self.constant_strings.append(s)
|
||||
namespace_count = u30()
|
||||
for _c in range(1, namespace_count):
|
||||
read_bytes(1) # kind
|
||||
u30() # name
|
||||
ns_set_count = u30()
|
||||
for _c in range(1, ns_set_count):
|
||||
count = u30()
|
||||
for _c2 in range(count):
|
||||
u30()
|
||||
multiname_count = u30()
|
||||
MULTINAME_SIZES = {
|
||||
0x07: 2, # QName
|
||||
0x0d: 2, # QNameA
|
||||
0x0f: 1, # RTQName
|
||||
0x10: 1, # RTQNameA
|
||||
0x11: 0, # RTQNameL
|
||||
0x12: 0, # RTQNameLA
|
||||
0x09: 2, # Multiname
|
||||
0x0e: 2, # MultinameA
|
||||
0x1b: 1, # MultinameL
|
||||
0x1c: 1, # MultinameLA
|
||||
}
|
||||
self.multinames = ['']
|
||||
for _c in range(1, multiname_count):
|
||||
kind = u30()
|
||||
assert kind in MULTINAME_SIZES, 'Invalid multiname kind %r' % kind
|
||||
if kind == 0x07:
|
||||
u30() # namespace_idx
|
||||
name_idx = u30()
|
||||
self.multinames.append(self.constant_strings[name_idx])
|
||||
elif kind == 0x09:
|
||||
name_idx = u30()
|
||||
u30()
|
||||
self.multinames.append(self.constant_strings[name_idx])
|
||||
else:
|
||||
self.multinames.append(_Multiname(kind))
|
||||
for _c2 in range(MULTINAME_SIZES[kind]):
|
||||
u30()
|
||||
|
||||
# Methods
|
||||
method_count = u30()
|
||||
MethodInfo = collections.namedtuple(
|
||||
'MethodInfo',
|
||||
['NEED_ARGUMENTS', 'NEED_REST'])
|
||||
method_infos = []
|
||||
for method_id in range(method_count):
|
||||
param_count = u30()
|
||||
u30() # return type
|
||||
for _ in range(param_count):
|
||||
u30() # param type
|
||||
u30() # name index (always 0 for youtube)
|
||||
flags = read_byte()
|
||||
if flags & 0x08 != 0:
|
||||
# Options present
|
||||
option_count = u30()
|
||||
for c in range(option_count):
|
||||
u30() # val
|
||||
read_bytes(1) # kind
|
||||
if flags & 0x80 != 0:
|
||||
# Param names present
|
||||
for _ in range(param_count):
|
||||
u30() # param name
|
||||
mi = MethodInfo(flags & 0x01 != 0, flags & 0x04 != 0)
|
||||
method_infos.append(mi)
|
||||
|
||||
# Metadata
|
||||
metadata_count = u30()
|
||||
for _c in range(metadata_count):
|
||||
u30() # name
|
||||
item_count = u30()
|
||||
for _c2 in range(item_count):
|
||||
u30() # key
|
||||
u30() # value
|
||||
|
||||
def parse_traits_info():
|
||||
trait_name_idx = u30()
|
||||
kind_full = read_byte()
|
||||
kind = kind_full & 0x0f
|
||||
attrs = kind_full >> 4
|
||||
methods = {}
|
||||
constants = None
|
||||
if kind == 0x00: # Slot
|
||||
u30() # Slot id
|
||||
u30() # type_name_idx
|
||||
vindex = u30()
|
||||
if vindex != 0:
|
||||
read_byte() # vkind
|
||||
elif kind == 0x06: # Const
|
||||
u30() # Slot id
|
||||
u30() # type_name_idx
|
||||
vindex = u30()
|
||||
vkind = 'any'
|
||||
if vindex != 0:
|
||||
vkind = read_byte()
|
||||
if vkind == 0x03: # Constant_Int
|
||||
value = self.constant_ints[vindex]
|
||||
elif vkind == 0x04: # Constant_UInt
|
||||
value = self.constant_uints[vindex]
|
||||
else:
|
||||
return {}, None # Ignore silently for now
|
||||
constants = {self.multinames[trait_name_idx]: value}
|
||||
elif kind in (0x01, 0x02, 0x03): # Method / Getter / Setter
|
||||
u30() # disp_id
|
||||
method_idx = u30()
|
||||
methods[self.multinames[trait_name_idx]] = method_idx
|
||||
elif kind == 0x04: # Class
|
||||
u30() # slot_id
|
||||
u30() # classi
|
||||
elif kind == 0x05: # Function
|
||||
u30() # slot_id
|
||||
function_idx = u30()
|
||||
methods[function_idx] = self.multinames[trait_name_idx]
|
||||
else:
|
||||
raise ExtractorError('Unsupported trait kind %d' % kind)
|
||||
|
||||
if attrs & 0x4 != 0: # Metadata present
|
||||
metadata_count = u30()
|
||||
for _c3 in range(metadata_count):
|
||||
u30() # metadata index
|
||||
|
||||
return methods, constants
|
||||
|
||||
# Classes
|
||||
class_count = u30()
|
||||
classes = []
|
||||
for class_id in range(class_count):
|
||||
name_idx = u30()
|
||||
|
||||
cname = self.multinames[name_idx]
|
||||
avm_class = _AVMClass(name_idx, cname)
|
||||
classes.append(avm_class)
|
||||
|
||||
u30() # super_name idx
|
||||
flags = read_byte()
|
||||
if flags & 0x08 != 0: # Protected namespace is present
|
||||
u30() # protected_ns_idx
|
||||
intrf_count = u30()
|
||||
for _c2 in range(intrf_count):
|
||||
u30()
|
||||
u30() # iinit
|
||||
trait_count = u30()
|
||||
for _c2 in range(trait_count):
|
||||
trait_methods, trait_constants = parse_traits_info()
|
||||
avm_class.register_methods(trait_methods)
|
||||
if trait_constants:
|
||||
avm_class.constants.update(trait_constants)
|
||||
|
||||
assert len(classes) == class_count
|
||||
self._classes_by_name = dict((c.name, c) for c in classes)
|
||||
|
||||
for avm_class in classes:
|
||||
avm_class.cinit_idx = u30()
|
||||
trait_count = u30()
|
||||
for _c2 in range(trait_count):
|
||||
trait_methods, trait_constants = parse_traits_info()
|
||||
avm_class.register_methods(trait_methods)
|
||||
if trait_constants:
|
||||
avm_class.constants.update(trait_constants)
|
||||
|
||||
# Scripts
|
||||
script_count = u30()
|
||||
for _c in range(script_count):
|
||||
u30() # init
|
||||
trait_count = u30()
|
||||
for _c2 in range(trait_count):
|
||||
parse_traits_info()
|
||||
|
||||
# Method bodies
|
||||
method_body_count = u30()
|
||||
Method = collections.namedtuple('Method', ['code', 'local_count'])
|
||||
self._all_methods = []
|
||||
for _c in range(method_body_count):
|
||||
method_idx = u30()
|
||||
u30() # max_stack
|
||||
local_count = u30()
|
||||
u30() # init_scope_depth
|
||||
u30() # max_scope_depth
|
||||
code_length = u30()
|
||||
code = read_bytes(code_length)
|
||||
m = Method(code, local_count)
|
||||
self._all_methods.append(m)
|
||||
for avm_class in classes:
|
||||
if method_idx in avm_class.method_idxs:
|
||||
avm_class.methods[avm_class.method_idxs[method_idx]] = m
|
||||
exception_count = u30()
|
||||
for _c2 in range(exception_count):
|
||||
u30() # from
|
||||
u30() # to
|
||||
u30() # target
|
||||
u30() # exc_type
|
||||
u30() # var_name
|
||||
trait_count = u30()
|
||||
for _c2 in range(trait_count):
|
||||
parse_traits_info()
|
||||
|
||||
assert p + code_reader.tell() == len(code_tag)
|
||||
|
||||
def patch_function(self, avm_class, func_name, f):
|
||||
self._patched_functions[(avm_class, func_name)] = f
|
||||
|
||||
def extract_class(self, class_name, call_cinit=True):
|
||||
try:
|
||||
res = self._classes_by_name[class_name]
|
||||
except KeyError:
|
||||
raise ExtractorError('Class %r not found' % class_name)
|
||||
|
||||
if call_cinit and hasattr(res, 'cinit_idx'):
|
||||
res.register_methods({'$cinit': res.cinit_idx})
|
||||
res.methods['$cinit'] = self._all_methods[res.cinit_idx]
|
||||
cinit = self.extract_function(res, '$cinit')
|
||||
cinit([])
|
||||
|
||||
return res
|
||||
|
||||
def extract_function(self, avm_class, func_name):
|
||||
p = self._patched_functions.get((avm_class, func_name))
|
||||
if p:
|
||||
return p
|
||||
if func_name in avm_class.method_pyfunctions:
|
||||
return avm_class.method_pyfunctions[func_name]
|
||||
if func_name in self._classes_by_name:
|
||||
return self._classes_by_name[func_name].make_object()
|
||||
if func_name not in avm_class.methods:
|
||||
raise ExtractorError('Cannot find function %s.%s' % (
|
||||
avm_class.name, func_name))
|
||||
m = avm_class.methods[func_name]
|
||||
|
||||
def resfunc(args):
|
||||
# Helper functions
|
||||
coder = io.BytesIO(m.code)
|
||||
s24 = lambda: _s24(coder)
|
||||
u30 = lambda: _u30(coder)
|
||||
|
||||
registers = [avm_class.variables] + list(args) + [None] * m.local_count
|
||||
stack = []
|
||||
scopes = collections.deque([
|
||||
self._classes_by_name, avm_class.constants, avm_class.variables])
|
||||
while True:
|
||||
opcode = _read_byte(coder)
|
||||
if opcode == 9: # label
|
||||
pass # Spec says: "Do nothing."
|
||||
elif opcode == 16: # jump
|
||||
offset = s24()
|
||||
coder.seek(coder.tell() + offset)
|
||||
elif opcode == 17: # iftrue
|
||||
offset = s24()
|
||||
value = stack.pop()
|
||||
if value:
|
||||
coder.seek(coder.tell() + offset)
|
||||
elif opcode == 18: # iffalse
|
||||
offset = s24()
|
||||
value = stack.pop()
|
||||
if not value:
|
||||
coder.seek(coder.tell() + offset)
|
||||
elif opcode == 19: # ifeq
|
||||
offset = s24()
|
||||
value2 = stack.pop()
|
||||
value1 = stack.pop()
|
||||
if value2 == value1:
|
||||
coder.seek(coder.tell() + offset)
|
||||
elif opcode == 20: # ifne
|
||||
offset = s24()
|
||||
value2 = stack.pop()
|
||||
value1 = stack.pop()
|
||||
if value2 != value1:
|
||||
coder.seek(coder.tell() + offset)
|
||||
elif opcode == 21: # iflt
|
||||
offset = s24()
|
||||
value2 = stack.pop()
|
||||
value1 = stack.pop()
|
||||
if value1 < value2:
|
||||
coder.seek(coder.tell() + offset)
|
||||
elif opcode == 32: # pushnull
|
||||
stack.append(None)
|
||||
elif opcode == 33: # pushundefined
|
||||
stack.append(undefined)
|
||||
elif opcode == 36: # pushbyte
|
||||
v = _read_byte(coder)
|
||||
stack.append(v)
|
||||
elif opcode == 37: # pushshort
|
||||
v = u30()
|
||||
stack.append(v)
|
||||
elif opcode == 38: # pushtrue
|
||||
stack.append(True)
|
||||
elif opcode == 39: # pushfalse
|
||||
stack.append(False)
|
||||
elif opcode == 40: # pushnan
|
||||
stack.append(float('NaN'))
|
||||
elif opcode == 42: # dup
|
||||
value = stack[-1]
|
||||
stack.append(value)
|
||||
elif opcode == 44: # pushstring
|
||||
idx = u30()
|
||||
stack.append(self.constant_strings[idx])
|
||||
elif opcode == 48: # pushscope
|
||||
new_scope = stack.pop()
|
||||
scopes.append(new_scope)
|
||||
elif opcode == 66: # construct
|
||||
arg_count = u30()
|
||||
args = list(reversed(
|
||||
[stack.pop() for _ in range(arg_count)]))
|
||||
obj = stack.pop()
|
||||
res = obj.avm_class.make_object()
|
||||
stack.append(res)
|
||||
elif opcode == 70: # callproperty
|
||||
index = u30()
|
||||
mname = self.multinames[index]
|
||||
arg_count = u30()
|
||||
args = list(reversed(
|
||||
[stack.pop() for _ in range(arg_count)]))
|
||||
obj = stack.pop()
|
||||
|
||||
if obj == StringClass:
|
||||
if mname == 'String':
|
||||
assert len(args) == 1
|
||||
assert isinstance(args[0], (
|
||||
int, compat_str, _Undefined))
|
||||
if args[0] == undefined:
|
||||
res = 'undefined'
|
||||
else:
|
||||
res = compat_str(args[0])
|
||||
stack.append(res)
|
||||
continue
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
'Function String.%s is not yet implemented'
|
||||
% mname)
|
||||
elif isinstance(obj, _AVMClass_Object):
|
||||
func = self.extract_function(obj.avm_class, mname)
|
||||
res = func(args)
|
||||
stack.append(res)
|
||||
continue
|
||||
elif isinstance(obj, _AVMClass):
|
||||
func = self.extract_function(obj, mname)
|
||||
res = func(args)
|
||||
stack.append(res)
|
||||
continue
|
||||
elif isinstance(obj, _ScopeDict):
|
||||
if mname in obj.avm_class.method_names:
|
||||
func = self.extract_function(obj.avm_class, mname)
|
||||
res = func(args)
|
||||
else:
|
||||
res = obj[mname]
|
||||
stack.append(res)
|
||||
continue
|
||||
elif isinstance(obj, compat_str):
|
||||
if mname == 'split':
|
||||
assert len(args) == 1
|
||||
assert isinstance(args[0], compat_str)
|
||||
if args[0] == '':
|
||||
res = list(obj)
|
||||
else:
|
||||
res = obj.split(args[0])
|
||||
stack.append(res)
|
||||
continue
|
||||
elif mname == 'charCodeAt':
|
||||
assert len(args) <= 1
|
||||
idx = 0 if len(args) == 0 else args[0]
|
||||
assert isinstance(idx, int)
|
||||
res = ord(obj[idx])
|
||||
stack.append(res)
|
||||
continue
|
||||
elif isinstance(obj, list):
|
||||
if mname == 'slice':
|
||||
assert len(args) == 1
|
||||
assert isinstance(args[0], int)
|
||||
res = obj[args[0]:]
|
||||
stack.append(res)
|
||||
continue
|
||||
elif mname == 'join':
|
||||
assert len(args) == 1
|
||||
assert isinstance(args[0], compat_str)
|
||||
res = args[0].join(obj)
|
||||
stack.append(res)
|
||||
continue
|
||||
raise NotImplementedError(
|
||||
'Unsupported property %r on %r'
|
||||
% (mname, obj))
|
||||
elif opcode == 71: # returnvoid
|
||||
res = undefined
|
||||
return res
|
||||
elif opcode == 72: # returnvalue
|
||||
res = stack.pop()
|
||||
return res
|
||||
elif opcode == 73: # constructsuper
|
||||
# Not yet implemented, just hope it works without it
|
||||
arg_count = u30()
|
||||
args = list(reversed(
|
||||
[stack.pop() for _ in range(arg_count)]))
|
||||
obj = stack.pop()
|
||||
elif opcode == 74: # constructproperty
|
||||
index = u30()
|
||||
arg_count = u30()
|
||||
args = list(reversed(
|
||||
[stack.pop() for _ in range(arg_count)]))
|
||||
obj = stack.pop()
|
||||
|
||||
mname = self.multinames[index]
|
||||
assert isinstance(obj, _AVMClass)
|
||||
|
||||
# We do not actually call the constructor for now;
|
||||
# we just pretend it does nothing
|
||||
stack.append(obj.make_object())
|
||||
elif opcode == 79: # callpropvoid
|
||||
index = u30()
|
||||
mname = self.multinames[index]
|
||||
arg_count = u30()
|
||||
args = list(reversed(
|
||||
[stack.pop() for _ in range(arg_count)]))
|
||||
obj = stack.pop()
|
||||
if isinstance(obj, _AVMClass_Object):
|
||||
func = self.extract_function(obj.avm_class, mname)
|
||||
res = func(args)
|
||||
assert res is undefined
|
||||
continue
|
||||
if isinstance(obj, _ScopeDict):
|
||||
assert mname in obj.avm_class.method_names
|
||||
func = self.extract_function(obj.avm_class, mname)
|
||||
res = func(args)
|
||||
assert res is undefined
|
||||
continue
|
||||
if mname == 'reverse':
|
||||
assert isinstance(obj, list)
|
||||
obj.reverse()
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
'Unsupported (void) property %r on %r'
|
||||
% (mname, obj))
|
||||
elif opcode == 86: # newarray
|
||||
arg_count = u30()
|
||||
arr = []
|
||||
for i in range(arg_count):
|
||||
arr.append(stack.pop())
|
||||
arr = arr[::-1]
|
||||
stack.append(arr)
|
||||
elif opcode == 93: # findpropstrict
|
||||
index = u30()
|
||||
mname = self.multinames[index]
|
||||
for s in reversed(scopes):
|
||||
if mname in s:
|
||||
res = s
|
||||
break
|
||||
else:
|
||||
res = scopes[0]
|
||||
if mname not in res and mname in _builtin_classes:
|
||||
stack.append(_builtin_classes[mname])
|
||||
else:
|
||||
stack.append(res[mname])
|
||||
elif opcode == 94: # findproperty
|
||||
index = u30()
|
||||
mname = self.multinames[index]
|
||||
for s in reversed(scopes):
|
||||
if mname in s:
|
||||
res = s
|
||||
break
|
||||
else:
|
||||
res = avm_class.variables
|
||||
stack.append(res)
|
||||
elif opcode == 96: # getlex
|
||||
index = u30()
|
||||
mname = self.multinames[index]
|
||||
for s in reversed(scopes):
|
||||
if mname in s:
|
||||
scope = s
|
||||
break
|
||||
else:
|
||||
scope = avm_class.variables
|
||||
|
||||
if mname in scope:
|
||||
res = scope[mname]
|
||||
elif mname in _builtin_classes:
|
||||
res = _builtin_classes[mname]
|
||||
else:
|
||||
# Assume uninitialized
|
||||
# TODO warn here
|
||||
res = undefined
|
||||
stack.append(res)
|
||||
elif opcode == 97: # setproperty
|
||||
index = u30()
|
||||
value = stack.pop()
|
||||
idx = self.multinames[index]
|
||||
if isinstance(idx, _Multiname):
|
||||
idx = stack.pop()
|
||||
obj = stack.pop()
|
||||
obj[idx] = value
|
||||
elif opcode == 98: # getlocal
|
||||
index = u30()
|
||||
stack.append(registers[index])
|
||||
elif opcode == 99: # setlocal
|
||||
index = u30()
|
||||
value = stack.pop()
|
||||
registers[index] = value
|
||||
elif opcode == 102: # getproperty
|
||||
index = u30()
|
||||
pname = self.multinames[index]
|
||||
if pname == 'length':
|
||||
obj = stack.pop()
|
||||
assert isinstance(obj, (compat_str, list))
|
||||
stack.append(len(obj))
|
||||
elif isinstance(pname, compat_str): # Member access
|
||||
obj = stack.pop()
|
||||
if isinstance(obj, _AVMClass):
|
||||
res = obj.static_properties[pname]
|
||||
stack.append(res)
|
||||
continue
|
||||
|
||||
assert isinstance(obj, (dict, _ScopeDict)),\
|
||||
'Accessing member %r on %r' % (pname, obj)
|
||||
res = obj.get(pname, undefined)
|
||||
stack.append(res)
|
||||
else: # Assume attribute access
|
||||
idx = stack.pop()
|
||||
assert isinstance(idx, int)
|
||||
obj = stack.pop()
|
||||
assert isinstance(obj, list)
|
||||
stack.append(obj[idx])
|
||||
elif opcode == 104: # initproperty
|
||||
index = u30()
|
||||
value = stack.pop()
|
||||
idx = self.multinames[index]
|
||||
if isinstance(idx, _Multiname):
|
||||
idx = stack.pop()
|
||||
obj = stack.pop()
|
||||
obj[idx] = value
|
||||
elif opcode == 115: # convert_
|
||||
value = stack.pop()
|
||||
intvalue = int(value)
|
||||
stack.append(intvalue)
|
||||
elif opcode == 128: # coerce
|
||||
u30()
|
||||
elif opcode == 130: # coerce_a
|
||||
value = stack.pop()
|
||||
# um, yes, it's any value
|
||||
stack.append(value)
|
||||
elif opcode == 133: # coerce_s
|
||||
assert isinstance(stack[-1], (type(None), compat_str))
|
||||
elif opcode == 147: # decrement
|
||||
value = stack.pop()
|
||||
assert isinstance(value, int)
|
||||
stack.append(value - 1)
|
||||
elif opcode == 149: # typeof
|
||||
value = stack.pop()
|
||||
return {
|
||||
_Undefined: 'undefined',
|
||||
compat_str: 'String',
|
||||
int: 'Number',
|
||||
float: 'Number',
|
||||
}[type(value)]
|
||||
elif opcode == 160: # add
|
||||
value2 = stack.pop()
|
||||
value1 = stack.pop()
|
||||
res = value1 + value2
|
||||
stack.append(res)
|
||||
elif opcode == 161: # subtract
|
||||
value2 = stack.pop()
|
||||
value1 = stack.pop()
|
||||
res = value1 - value2
|
||||
stack.append(res)
|
||||
elif opcode == 162: # multiply
|
||||
value2 = stack.pop()
|
||||
value1 = stack.pop()
|
||||
res = value1 * value2
|
||||
stack.append(res)
|
||||
elif opcode == 164: # modulo
|
||||
value2 = stack.pop()
|
||||
value1 = stack.pop()
|
||||
res = value1 % value2
|
||||
stack.append(res)
|
||||
elif opcode == 168: # bitand
|
||||
value2 = stack.pop()
|
||||
value1 = stack.pop()
|
||||
assert isinstance(value1, int)
|
||||
assert isinstance(value2, int)
|
||||
res = value1 & value2
|
||||
stack.append(res)
|
||||
elif opcode == 171: # equals
|
||||
value2 = stack.pop()
|
||||
value1 = stack.pop()
|
||||
result = value1 == value2
|
||||
stack.append(result)
|
||||
elif opcode == 175: # greaterequals
|
||||
value2 = stack.pop()
|
||||
value1 = stack.pop()
|
||||
result = value1 >= value2
|
||||
stack.append(result)
|
||||
elif opcode == 192: # increment_i
|
||||
value = stack.pop()
|
||||
assert isinstance(value, int)
|
||||
stack.append(value + 1)
|
||||
elif opcode == 208: # getlocal_0
|
||||
stack.append(registers[0])
|
||||
elif opcode == 209: # getlocal_1
|
||||
stack.append(registers[1])
|
||||
elif opcode == 210: # getlocal_2
|
||||
stack.append(registers[2])
|
||||
elif opcode == 211: # getlocal_3
|
||||
stack.append(registers[3])
|
||||
elif opcode == 212: # setlocal_0
|
||||
registers[0] = stack.pop()
|
||||
elif opcode == 213: # setlocal_1
|
||||
registers[1] = stack.pop()
|
||||
elif opcode == 214: # setlocal_2
|
||||
registers[2] = stack.pop()
|
||||
elif opcode == 215: # setlocal_3
|
||||
registers[3] = stack.pop()
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
'Unsupported opcode %d' % opcode)
|
||||
|
||||
avm_class.method_pyfunctions[func_name] = resfunc
|
||||
return resfunc
|
Loading…
Reference in New Issue
Block a user