Bug 1774569 - Vendor jsonschema at 4.10.0 r=ahal
Differential Revision: https://phabricator.services.mozilla.com/D171760
This commit is contained in:
@@ -12,7 +12,6 @@ pypi:fluent.syntax==0.18.1
|
|||||||
pypi:idna==2.10
|
pypi:idna==2.10
|
||||||
pypi:imagesize==1.2.0
|
pypi:imagesize==1.2.0
|
||||||
pypi:jmespath==0.10.0
|
pypi:jmespath==0.10.0
|
||||||
pypi:jsonschema==3.2.0
|
|
||||||
pypi:livereload==2.6.3
|
pypi:livereload==2.6.3
|
||||||
pypi:Markdown==3.3.4
|
pypi:Markdown==3.3.4
|
||||||
pypi:MarkupSafe==2.0.1
|
pypi:MarkupSafe==2.0.1
|
||||||
@@ -55,6 +54,7 @@ vendored:third_party/python/chardet
|
|||||||
vendored:third_party/python/importlib_metadata
|
vendored:third_party/python/importlib_metadata
|
||||||
vendored:third_party/python/importlib_resources
|
vendored:third_party/python/importlib_resources
|
||||||
vendored:third_party/python/Jinja2
|
vendored:third_party/python/Jinja2
|
||||||
|
vendored:third_party/python/jsonschema
|
||||||
vendored:third_party/python/packaging
|
vendored:third_party/python/packaging
|
||||||
vendored:third_party/python/requests
|
vendored:third_party/python/requests
|
||||||
vendored:third_party/python/rsa
|
vendored:third_party/python/rsa
|
||||||
|
|||||||
@@ -80,6 +80,7 @@ vendored:third_party/python/giturlparse
|
|||||||
vendored:third_party/python/gyp/pylib
|
vendored:third_party/python/gyp/pylib
|
||||||
vendored:third_party/python/idna
|
vendored:third_party/python/idna
|
||||||
vendored:third_party/python/importlib_metadata
|
vendored:third_party/python/importlib_metadata
|
||||||
|
vendored:third_party/python/importlib_resources
|
||||||
vendored:third_party/python/Jinja2
|
vendored:third_party/python/Jinja2
|
||||||
vendored:third_party/python/jinxed
|
vendored:third_party/python/jinxed
|
||||||
vendored:third_party/python/jsmin
|
vendored:third_party/python/jsmin
|
||||||
@@ -95,6 +96,7 @@ vendored:third_party/python/packaging
|
|||||||
vendored:third_party/python/pathspec
|
vendored:third_party/python/pathspec
|
||||||
vendored:third_party/python/pip
|
vendored:third_party/python/pip
|
||||||
vendored:third_party/python/pip_tools
|
vendored:third_party/python/pip_tools
|
||||||
|
vendored:third_party/python/pkgutil_resolve_name
|
||||||
vendored:third_party/python/ply
|
vendored:third_party/python/ply
|
||||||
vendored:third_party/python/pyasn1
|
vendored:third_party/python/pyasn1
|
||||||
vendored:third_party/python/pyasn1_modules
|
vendored:third_party/python/pyasn1_modules
|
||||||
|
|||||||
18
third_party/python/dlmanager/build/lib/dlmanager/__init__.py
vendored
Normal file
18
third_party/python/dlmanager/build/lib/dlmanager/__init__.py
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
__version__ = "0.1.1"
|
||||||
|
|
||||||
|
|
||||||
|
try: # Python 2.7+
|
||||||
|
from logging import NullHandler
|
||||||
|
except ImportError:
|
||||||
|
class NullHandler(logging.Handler):
|
||||||
|
def emit(self, record):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Set default logging handler to avoid "No handler found" warnings.
|
||||||
|
logging.getLogger(__name__).addHandler(NullHandler())
|
||||||
|
|
||||||
|
# exported api
|
||||||
|
from dlmanager.manager import Download, DownloadInterrupt, DownloadManager # noqa
|
||||||
|
from dlmanager.persist_limit import PersistLimit # noqa
|
||||||
116
third_party/python/dlmanager/build/lib/dlmanager/fs.py
vendored
Normal file
116
third_party/python/dlmanager/build/lib/dlmanager/fs.py
vendored
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
import errno
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import stat
|
||||||
|
import time
|
||||||
|
|
||||||
|
"""
|
||||||
|
File system utilities, copied from mozfile.
|
||||||
|
"""
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _call_windows_retry(func, args=(), retry_max=5, retry_delay=0.5):
|
||||||
|
"""
|
||||||
|
It's possible to see spurious errors on Windows due to various things
|
||||||
|
keeping a handle to the directory open (explorer, virus scanners, etc)
|
||||||
|
So we try a few times if it fails with a known error.
|
||||||
|
"""
|
||||||
|
retry_count = 0
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
func(*args)
|
||||||
|
except OSError as e:
|
||||||
|
# Error codes are defined in:
|
||||||
|
# http://docs.python.org/2/library/errno.html#module-errno
|
||||||
|
if e.errno not in (errno.EACCES, errno.ENOTEMPTY):
|
||||||
|
raise
|
||||||
|
|
||||||
|
if retry_count == retry_max:
|
||||||
|
raise
|
||||||
|
|
||||||
|
retry_count += 1
|
||||||
|
|
||||||
|
LOG.info('%s() failed for "%s". Reason: %s (%s). Retrying...',
|
||||||
|
func.__name__, args, e.strerror, e.errno)
|
||||||
|
time.sleep(retry_delay)
|
||||||
|
else:
|
||||||
|
# If no exception has been thrown it should be done
|
||||||
|
break
|
||||||
|
|
||||||
|
|
||||||
|
def remove(path):
|
||||||
|
"""Removes the specified file, link, or directory tree.
|
||||||
|
|
||||||
|
This is a replacement for shutil.rmtree that works better under
|
||||||
|
windows. It does the following things:
|
||||||
|
|
||||||
|
- check path access for the current user before trying to remove
|
||||||
|
- retry operations on some known errors due to various things keeping
|
||||||
|
a handle on file paths - like explorer, virus scanners, etc. The
|
||||||
|
known errors are errno.EACCES and errno.ENOTEMPTY, and it will
|
||||||
|
retry up to 5 five times with a delay of 0.5 seconds between each
|
||||||
|
attempt.
|
||||||
|
|
||||||
|
Note that no error will be raised if the given path does not exists.
|
||||||
|
|
||||||
|
:param path: path to be removed
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _call_with_windows_retry(*args, **kwargs):
|
||||||
|
try:
|
||||||
|
_call_windows_retry(*args, **kwargs)
|
||||||
|
except OSError as e:
|
||||||
|
# The file or directory to be removed doesn't exist anymore
|
||||||
|
if e.errno != errno.ENOENT:
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _update_permissions(path):
|
||||||
|
"""Sets specified pemissions depending on filetype"""
|
||||||
|
if os.path.islink(path):
|
||||||
|
# Path is a symlink which we don't have to modify
|
||||||
|
# because it should already have all the needed permissions
|
||||||
|
return
|
||||||
|
|
||||||
|
stats = os.stat(path)
|
||||||
|
|
||||||
|
if os.path.isfile(path):
|
||||||
|
mode = stats.st_mode | stat.S_IWUSR
|
||||||
|
elif os.path.isdir(path):
|
||||||
|
mode = stats.st_mode | stat.S_IWUSR | stat.S_IXUSR
|
||||||
|
else:
|
||||||
|
# Not supported type
|
||||||
|
return
|
||||||
|
|
||||||
|
_call_with_windows_retry(os.chmod, (path, mode))
|
||||||
|
|
||||||
|
if not os.path.exists(path):
|
||||||
|
return
|
||||||
|
|
||||||
|
if os.path.isfile(path) or os.path.islink(path):
|
||||||
|
# Verify the file or link is read/write for the current user
|
||||||
|
_update_permissions(path)
|
||||||
|
_call_with_windows_retry(os.remove, (path,))
|
||||||
|
|
||||||
|
elif os.path.isdir(path):
|
||||||
|
# Verify the directory is read/write/execute for the current user
|
||||||
|
_update_permissions(path)
|
||||||
|
|
||||||
|
# We're ensuring that every nested item has writable permission.
|
||||||
|
for root, dirs, files in os.walk(path):
|
||||||
|
for entry in dirs + files:
|
||||||
|
_update_permissions(os.path.join(root, entry))
|
||||||
|
_call_with_windows_retry(shutil.rmtree, (path,))
|
||||||
|
|
||||||
|
|
||||||
|
def move(src, dst):
|
||||||
|
"""
|
||||||
|
Move a file or directory path.
|
||||||
|
|
||||||
|
This is a replacement for shutil.move that works better under windows,
|
||||||
|
retrying operations on some known errors due to various things keeping
|
||||||
|
a handle on file paths.
|
||||||
|
"""
|
||||||
|
_call_windows_retry(shutil.move, (src, dst))
|
||||||
323
third_party/python/dlmanager/build/lib/dlmanager/manager.py
vendored
Normal file
323
third_party/python/dlmanager/build/lib/dlmanager/manager.py
vendored
Normal file
@@ -0,0 +1,323 @@
|
|||||||
|
import os
|
||||||
|
import requests
|
||||||
|
import six
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import threading
|
||||||
|
|
||||||
|
from contextlib import closing
|
||||||
|
from six.moves.urllib.parse import urlparse
|
||||||
|
|
||||||
|
from dlmanager import fs
|
||||||
|
from dlmanager.persist_limit import PersistLimit
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadInterrupt(Exception):
|
||||||
|
"Raised when a download is interrupted."
|
||||||
|
|
||||||
|
|
||||||
|
class Download(object):
|
||||||
|
"""
|
||||||
|
Download is reponsible of downloading one file in the background.
|
||||||
|
|
||||||
|
Example of use: ::
|
||||||
|
|
||||||
|
dl = Download(url, dest)
|
||||||
|
dl.start()
|
||||||
|
dl.wait() # this will block until completion / cancel / error
|
||||||
|
|
||||||
|
If a download fail or is canceled, the temporary dest is removed from
|
||||||
|
the disk.
|
||||||
|
|
||||||
|
Usually, Downloads are created by using :meth:`DownloadManager.download`.
|
||||||
|
|
||||||
|
:param url: the url of the file to download
|
||||||
|
:param dest: the local file path destination
|
||||||
|
:param finished_callback: a callback that will be called in the thread
|
||||||
|
when the thread work is done. Takes the download
|
||||||
|
instance as a parameter.
|
||||||
|
:param chunk_size: size of the chunk that will be read. The thread can
|
||||||
|
not be stopped while we are reading that chunk size.
|
||||||
|
:param session: a requests.Session instance that will do do the real
|
||||||
|
downloading work. If None, `requests` module is used.
|
||||||
|
:param progress: A callable to report the progress (default to None).
|
||||||
|
see :meth:`set_progress`.
|
||||||
|
"""
|
||||||
|
def __init__(self, url, dest, finished_callback=None,
|
||||||
|
chunk_size=16 * 1024, session=None, progress=None):
|
||||||
|
self.thread = threading.Thread(
|
||||||
|
target=self._download,
|
||||||
|
args=(url, dest, finished_callback, chunk_size,
|
||||||
|
session or requests)
|
||||||
|
)
|
||||||
|
self._lock = threading.Lock()
|
||||||
|
self.__url = url
|
||||||
|
self.__dest = dest
|
||||||
|
self.__progress = progress
|
||||||
|
self.__canceled = False
|
||||||
|
self.__error = None
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
"""
|
||||||
|
Start the thread that will do the download.
|
||||||
|
"""
|
||||||
|
self.thread.start()
|
||||||
|
|
||||||
|
def cancel(self):
|
||||||
|
"""
|
||||||
|
Cancel a previously started download.
|
||||||
|
"""
|
||||||
|
self.__canceled = True
|
||||||
|
|
||||||
|
def is_canceled(self):
|
||||||
|
"""
|
||||||
|
Returns True if we canceled this download.
|
||||||
|
"""
|
||||||
|
return self.__canceled
|
||||||
|
|
||||||
|
def is_running(self):
|
||||||
|
"""
|
||||||
|
Returns True if the downloading thread is running.
|
||||||
|
"""
|
||||||
|
return self.thread.is_alive()
|
||||||
|
|
||||||
|
def wait(self, raise_if_error=True):
|
||||||
|
"""
|
||||||
|
Block until the downloading thread is finished.
|
||||||
|
|
||||||
|
:param raise_if_error: if True (the default), :meth:`raise_if_error`
|
||||||
|
will be called and raise an error if any.
|
||||||
|
"""
|
||||||
|
while self.thread.is_alive():
|
||||||
|
try:
|
||||||
|
# in case of exception here (like KeyboardInterrupt),
|
||||||
|
# cancel the task.
|
||||||
|
self.thread.join(0.02)
|
||||||
|
except:
|
||||||
|
self.cancel()
|
||||||
|
raise
|
||||||
|
# this will raise exception that may happen inside the thread.
|
||||||
|
if raise_if_error:
|
||||||
|
self.raise_if_error()
|
||||||
|
|
||||||
|
def error(self):
|
||||||
|
"""
|
||||||
|
Returns None or a tuple of three values (type, value, traceback)
|
||||||
|
that give information about the exception.
|
||||||
|
"""
|
||||||
|
return self.__error
|
||||||
|
|
||||||
|
def raise_if_error(self):
|
||||||
|
"""
|
||||||
|
Raise an error if any. If the download was canceled, raise
|
||||||
|
:class:`DownloadInterrupt`.
|
||||||
|
"""
|
||||||
|
if self.__error:
|
||||||
|
six.reraise(*self.__error)
|
||||||
|
if self.__canceled:
|
||||||
|
raise DownloadInterrupt()
|
||||||
|
|
||||||
|
def set_progress(self, progress):
|
||||||
|
"""
|
||||||
|
set a callable to report the progress of the download, or None to
|
||||||
|
disable any report.
|
||||||
|
|
||||||
|
The callable must take three parameters (download, current, total).
|
||||||
|
Note that this method is thread safe, you can call it during a
|
||||||
|
download.
|
||||||
|
"""
|
||||||
|
with self._lock:
|
||||||
|
self.__progress = progress
|
||||||
|
|
||||||
|
def get_dest(self):
|
||||||
|
"""
|
||||||
|
Returns the dest.
|
||||||
|
"""
|
||||||
|
return self.__dest
|
||||||
|
|
||||||
|
def get_url(self):
|
||||||
|
"""
|
||||||
|
Returns the url.
|
||||||
|
"""
|
||||||
|
return self.__url
|
||||||
|
|
||||||
|
def _update_progress(self, current, total):
|
||||||
|
with self._lock:
|
||||||
|
if self.__progress:
|
||||||
|
self.__progress(self, current, total)
|
||||||
|
|
||||||
|
def _download(self, url, dest, finished_callback, chunk_size, session):
|
||||||
|
# save the file under a temporary name
|
||||||
|
# this allow to not use a broken file in case things went really bad
|
||||||
|
# while downloading the file (ie the python interpreter is killed
|
||||||
|
# abruptly)
|
||||||
|
temp = None
|
||||||
|
bytes_so_far = 0
|
||||||
|
try:
|
||||||
|
with closing(session.get(url, stream=True)) as response:
|
||||||
|
total_size = response.headers.get('Content-length', '').strip()
|
||||||
|
total_size = int(total_size) if total_size else None
|
||||||
|
self._update_progress(bytes_so_far, total_size)
|
||||||
|
# we use NamedTemporaryFile as raw open() call was causing
|
||||||
|
# issues on windows - see:
|
||||||
|
# https://bugzilla.mozilla.org/show_bug.cgi?id=1185756
|
||||||
|
with tempfile.NamedTemporaryFile(
|
||||||
|
delete=False,
|
||||||
|
suffix='.tmp',
|
||||||
|
dir=os.path.dirname(dest)) as temp:
|
||||||
|
for chunk in response.iter_content(chunk_size):
|
||||||
|
if self.is_canceled():
|
||||||
|
break
|
||||||
|
if chunk:
|
||||||
|
temp.write(chunk)
|
||||||
|
bytes_so_far += len(chunk)
|
||||||
|
self._update_progress(bytes_so_far, total_size)
|
||||||
|
response.raise_for_status()
|
||||||
|
except:
|
||||||
|
self.__error = sys.exc_info()
|
||||||
|
try:
|
||||||
|
if temp is None:
|
||||||
|
pass # not even opened the temp file, nothing to do
|
||||||
|
elif self.is_canceled() or self.__error:
|
||||||
|
fs.remove(temp.name)
|
||||||
|
else:
|
||||||
|
# if all goes well, then rename the file to the real dest
|
||||||
|
fs.remove(dest) # just in case it already existed
|
||||||
|
fs.move(temp.name, dest)
|
||||||
|
finally:
|
||||||
|
if finished_callback:
|
||||||
|
finished_callback(self)
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadManager(object):
|
||||||
|
"""
|
||||||
|
DownloadManager is responsible of starting and managing downloads inside
|
||||||
|
a given directory. It will download a file only if a given filename
|
||||||
|
is not already there.
|
||||||
|
|
||||||
|
Note that background downloads needs to be stopped. For example, if
|
||||||
|
you have an exception while a download is occuring, python will only
|
||||||
|
exit when the download will finish. To get rid of that, there is a
|
||||||
|
possible idiom: ::
|
||||||
|
|
||||||
|
def download_things(manager):
|
||||||
|
# do things with the manager
|
||||||
|
manager.download(url1, f1)
|
||||||
|
manager.download(url2, f2)
|
||||||
|
...
|
||||||
|
|
||||||
|
manager = DownloadManager(destdir)
|
||||||
|
try:
|
||||||
|
download_things(manager)
|
||||||
|
finally:
|
||||||
|
# ensure we cancel all background downloads to ask the end
|
||||||
|
# of possible remainings threads
|
||||||
|
manager.cancel()
|
||||||
|
|
||||||
|
:param destdir: a directory where files are downloaded. It will be created
|
||||||
|
if it does not exists.
|
||||||
|
:param session: a requests session. If None, one will be created for you.
|
||||||
|
:param persist_limit: an instance of :class:`PersistLimit`, to allow
|
||||||
|
limiting the size of the download dir. Defaults
|
||||||
|
to None, meaning no limit.
|
||||||
|
"""
|
||||||
|
def __init__(self, destdir, session=None, persist_limit=None):
|
||||||
|
self.destdir = destdir
|
||||||
|
self.session = session or requests.Session()
|
||||||
|
self._downloads = {}
|
||||||
|
self._lock = threading.Lock()
|
||||||
|
self.persist_limit = persist_limit or PersistLimit(0)
|
||||||
|
self.persist_limit.register_dir_content(self.destdir)
|
||||||
|
|
||||||
|
# if persist folder does not exist, create it
|
||||||
|
if not os.path.isdir(destdir):
|
||||||
|
os.makedirs(destdir)
|
||||||
|
|
||||||
|
def get_dest(self, fname):
|
||||||
|
return os.path.join(self.destdir, fname)
|
||||||
|
|
||||||
|
def cancel(self, cancel_if=None):
|
||||||
|
"""
|
||||||
|
Cancel downloads, if any.
|
||||||
|
|
||||||
|
if cancel_if is given, it must be a callable that take the download
|
||||||
|
instance as parameter, and return True if the download needs to be
|
||||||
|
canceled.
|
||||||
|
|
||||||
|
Note that download threads won't be stopped directly.
|
||||||
|
"""
|
||||||
|
with self._lock:
|
||||||
|
for download in six.itervalues(self._downloads):
|
||||||
|
if cancel_if is None or cancel_if(download):
|
||||||
|
if download.is_running():
|
||||||
|
download.cancel()
|
||||||
|
|
||||||
|
def wait(self, raise_if_error=True):
|
||||||
|
"""
|
||||||
|
Wait for all downloads to be finished.
|
||||||
|
"""
|
||||||
|
for download in self._downloads.values():
|
||||||
|
download.wait(raise_if_error=raise_if_error)
|
||||||
|
|
||||||
|
def download(self, url, fname=None, progress=None):
|
||||||
|
"""
|
||||||
|
Returns a started :class:`Download` instance, or None if fname is
|
||||||
|
already present in destdir.
|
||||||
|
|
||||||
|
if a download is already running for the given fname, it is just
|
||||||
|
returned. Else the download is created, started and returned.
|
||||||
|
|
||||||
|
:param url: url of the file to download.
|
||||||
|
:param fname: name to give for the downloaded file. If None, it will
|
||||||
|
be the name extracted in the url.
|
||||||
|
:param progress: a callable to report the download progress, or None.
|
||||||
|
See :meth:`Download.set_progress`.
|
||||||
|
"""
|
||||||
|
if fname is None:
|
||||||
|
fname = urlparse(url).path.split('/')[-1]
|
||||||
|
dest = self.get_dest(fname)
|
||||||
|
with self._lock:
|
||||||
|
# if we are downloading, returns the instance
|
||||||
|
if dest in self._downloads:
|
||||||
|
dl = self._downloads[dest]
|
||||||
|
if progress:
|
||||||
|
dl.set_progress(progress)
|
||||||
|
return dl
|
||||||
|
|
||||||
|
if os.path.exists(dest):
|
||||||
|
return None
|
||||||
|
|
||||||
|
# else create the download (will be automatically removed of
|
||||||
|
# the list on completion) start it, and returns that.
|
||||||
|
with self._lock:
|
||||||
|
download = Download(url, dest,
|
||||||
|
session=self.session,
|
||||||
|
finished_callback=self._download_finished,
|
||||||
|
progress=progress)
|
||||||
|
self._downloads[dest] = download
|
||||||
|
download.start()
|
||||||
|
self._download_started(download)
|
||||||
|
return download
|
||||||
|
|
||||||
|
def _download_started(self, dl):
|
||||||
|
"""
|
||||||
|
Useful when sub-classing. Report the start event of a download.
|
||||||
|
|
||||||
|
:param dl: The :class:`Download` instance.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _download_finished(self, dl):
|
||||||
|
"""
|
||||||
|
Useful when sub-classing. Report the end of a download.
|
||||||
|
|
||||||
|
Note that this is executed in the download thread. Also, you should
|
||||||
|
make sure to call the base implementation.
|
||||||
|
|
||||||
|
:param dl: The :class:`Download` instance.
|
||||||
|
"""
|
||||||
|
with self._lock:
|
||||||
|
dest = dl.get_dest()
|
||||||
|
del self._downloads[dest]
|
||||||
|
self.persist_limit.register_file(dest)
|
||||||
|
self.persist_limit.remove_old_files()
|
||||||
65
third_party/python/dlmanager/build/lib/dlmanager/persist_limit.py
vendored
Normal file
65
third_party/python/dlmanager/build/lib/dlmanager/persist_limit.py
vendored
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
import os
|
||||||
|
import stat
|
||||||
|
|
||||||
|
from collections import namedtuple
|
||||||
|
from glob import glob
|
||||||
|
|
||||||
|
from dlmanager import fs
|
||||||
|
|
||||||
|
|
||||||
|
File = namedtuple('File', ('path', 'stat'))
|
||||||
|
|
||||||
|
|
||||||
|
class PersistLimit(object):
|
||||||
|
"""
|
||||||
|
Keep a list of files, removing the oldest ones when the size_limit
|
||||||
|
is reached.
|
||||||
|
|
||||||
|
The access time of a file is used to determine the oldests, e.g. the
|
||||||
|
last time a file was read.
|
||||||
|
|
||||||
|
:param size_limit: the size limit in bytes. A value of 0 means no limit.
|
||||||
|
:param file_limit: even if the size limit is reached, this force
|
||||||
|
to keep at least *file_limit* files.
|
||||||
|
"""
|
||||||
|
def __init__(self, size_limit, file_limit=5):
|
||||||
|
self.size_limit = size_limit
|
||||||
|
self.file_limit = file_limit
|
||||||
|
self.files = []
|
||||||
|
self._files_size = 0
|
||||||
|
|
||||||
|
def register_file(self, path):
|
||||||
|
"""
|
||||||
|
register a single file.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
fstat = os.stat(path)
|
||||||
|
except OSError:
|
||||||
|
# file do not exists probably, just skip it
|
||||||
|
# note this happen when backgound files are canceled
|
||||||
|
return
|
||||||
|
if stat.S_ISREG(fstat.st_mode):
|
||||||
|
self.files.append(File(path=path, stat=fstat))
|
||||||
|
self._files_size += fstat.st_size
|
||||||
|
|
||||||
|
def register_dir_content(self, directory, pattern="*"):
|
||||||
|
"""
|
||||||
|
Register every files in a directory that match *pattern*.
|
||||||
|
"""
|
||||||
|
for path in glob(os.path.join(directory, pattern)):
|
||||||
|
self.register_file(path)
|
||||||
|
|
||||||
|
def remove_old_files(self):
|
||||||
|
"""
|
||||||
|
remove oldest registered files.
|
||||||
|
"""
|
||||||
|
if self.size_limit <= 0 or self.file_limit <= 0:
|
||||||
|
return
|
||||||
|
# sort by creation time, oldest first
|
||||||
|
files = sorted(self.files, key=lambda f: f.stat.st_atime)
|
||||||
|
while len(files) > self.file_limit and \
|
||||||
|
self._files_size >= self.size_limit:
|
||||||
|
f = files.pop(0)
|
||||||
|
fs.remove(f.path)
|
||||||
|
self._files_size -= f.stat.st_size
|
||||||
|
self.files = files
|
||||||
68
third_party/python/dlmanager/dlmanager.egg-info/PKG-INFO
vendored
Normal file
68
third_party/python/dlmanager/dlmanager.egg-info/PKG-INFO
vendored
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: dlmanager
|
||||||
|
Version: 0.1.1
|
||||||
|
Summary: download manager library
|
||||||
|
Home-page: http://github.com/parkouss/dlmanager
|
||||||
|
Author: Julien Pagès
|
||||||
|
Author-email: j.parkouss@gmail.com
|
||||||
|
License: GPL/LGPL
|
||||||
|
|
||||||
|
.. image:: https://badge.fury.io/py/dlmanager.svg
|
||||||
|
:target: https://pypi.python.org/pypi/dlmanager
|
||||||
|
|
||||||
|
.. image:: https://readthedocs.org/projects/dlmanager/badge/?version=latest
|
||||||
|
:target: http://dlmanager.readthedocs.org/en/latest/?badge=latest
|
||||||
|
:alt: Documentation Status
|
||||||
|
|
||||||
|
.. image:: https://travis-ci.org/parkouss/dlmanager.svg?branch=master
|
||||||
|
:target: https://travis-ci.org/parkouss/dlmanager
|
||||||
|
|
||||||
|
.. image:: https://codecov.io/github/parkouss/dlmanager/coverage.svg?branch=master
|
||||||
|
:target: https://codecov.io/github/parkouss/dlmanager?branch=master
|
||||||
|
|
||||||
|
dlmanager
|
||||||
|
=========
|
||||||
|
|
||||||
|
**dlmanager** is Python 2 and 3 download manager library, with the following
|
||||||
|
features:
|
||||||
|
|
||||||
|
- Download files in background and in parallel
|
||||||
|
- Cancel downloads
|
||||||
|
- store downloads in a given directory, avoiding re-downloading files
|
||||||
|
- Limit the size of this directory, removing oldest files
|
||||||
|
|
||||||
|
|
||||||
|
Example
|
||||||
|
-------
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
from dlmanager import DownloadManager, PersistLimit
|
||||||
|
|
||||||
|
manager = DownloadManager(
|
||||||
|
"dlmanager-destir",
|
||||||
|
persist_limit=PersistLimit(
|
||||||
|
size_limit=1073741824, # 1 GB max
|
||||||
|
file_limit=10, # force to keep 10 files even if size_limit is reached
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Start downloads in background
|
||||||
|
# Note that if files are already present, this is a no-op.
|
||||||
|
manager.download(url1)
|
||||||
|
manager.download(url2)
|
||||||
|
|
||||||
|
# Wait for completion
|
||||||
|
try:
|
||||||
|
manager.wait()
|
||||||
|
except:
|
||||||
|
manager.cancel()
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
Installation
|
||||||
|
------------
|
||||||
|
|
||||||
|
Use pip: ::
|
||||||
|
|
||||||
|
pip install -U dlmanager
|
||||||
14
third_party/python/dlmanager/dlmanager.egg-info/SOURCES.txt
vendored
Normal file
14
third_party/python/dlmanager/dlmanager.egg-info/SOURCES.txt
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
README.rst
|
||||||
|
setup.cfg
|
||||||
|
setup.py
|
||||||
|
dlmanager/__init__.py
|
||||||
|
dlmanager/fs.py
|
||||||
|
dlmanager/manager.py
|
||||||
|
dlmanager/persist_limit.py
|
||||||
|
dlmanager.egg-info/PKG-INFO
|
||||||
|
dlmanager.egg-info/SOURCES.txt
|
||||||
|
dlmanager.egg-info/dependency_links.txt
|
||||||
|
dlmanager.egg-info/requires.txt
|
||||||
|
dlmanager.egg-info/top_level.txt
|
||||||
|
tests/test_manager.py
|
||||||
|
tests/test_persist_limit.py
|
||||||
1
third_party/python/dlmanager/dlmanager.egg-info/dependency_links.txt
vendored
Normal file
1
third_party/python/dlmanager/dlmanager.egg-info/dependency_links.txt
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
|
||||||
2
third_party/python/dlmanager/dlmanager.egg-info/requires.txt
vendored
Normal file
2
third_party/python/dlmanager/dlmanager.egg-info/requires.txt
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
requests
|
||||||
|
six
|
||||||
1
third_party/python/dlmanager/dlmanager.egg-info/top_level.txt
vendored
Normal file
1
third_party/python/dlmanager/dlmanager.egg-info/top_level.txt
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
dlmanager
|
||||||
353
third_party/python/gyp/build/lib/gyp/MSVSNew.py
vendored
Normal file
353
third_party/python/gyp/build/lib/gyp/MSVSNew.py
vendored
Normal file
@@ -0,0 +1,353 @@
|
|||||||
|
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""New implementation of Visual Studio project generation."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import random
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import gyp.common
|
||||||
|
|
||||||
|
# hashlib is supplied as of Python 2.5 as the replacement interface for md5
|
||||||
|
# and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if
|
||||||
|
# available, avoiding a deprecation warning under 2.6. Import md5 otherwise,
|
||||||
|
# preserving 2.4 compatibility.
|
||||||
|
try:
|
||||||
|
import hashlib
|
||||||
|
_new_md5 = hashlib.md5
|
||||||
|
except ImportError:
|
||||||
|
import md5
|
||||||
|
_new_md5 = md5.new
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
# cmp was removed in python3.
|
||||||
|
cmp
|
||||||
|
except NameError:
|
||||||
|
def cmp(a, b):
|
||||||
|
return (a > b) - (a < b)
|
||||||
|
|
||||||
|
# Initialize random number generator
|
||||||
|
random.seed()
|
||||||
|
|
||||||
|
# GUIDs for project types
|
||||||
|
ENTRY_TYPE_GUIDS = {
|
||||||
|
'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}',
|
||||||
|
'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}',
|
||||||
|
}
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
# Helper functions
|
||||||
|
|
||||||
|
|
||||||
|
def MakeGuid(name, seed='msvs_new'):
|
||||||
|
"""Returns a GUID for the specified target name.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Target name.
|
||||||
|
seed: Seed for MD5 hash.
|
||||||
|
Returns:
|
||||||
|
A GUID-line string calculated from the name and seed.
|
||||||
|
|
||||||
|
This generates something which looks like a GUID, but depends only on the
|
||||||
|
name and seed. This means the same name/seed will always generate the same
|
||||||
|
GUID, so that projects and solutions which refer to each other can explicitly
|
||||||
|
determine the GUID to refer to explicitly. It also means that the GUID will
|
||||||
|
not change when the project for a target is rebuilt.
|
||||||
|
"""
|
||||||
|
|
||||||
|
to_hash = str(seed) + str(name)
|
||||||
|
to_hash = to_hash.encode('utf-8')
|
||||||
|
# Calculate a MD5 signature for the seed and name.
|
||||||
|
d = _new_md5(to_hash).hexdigest().upper()
|
||||||
|
# Convert most of the signature to GUID form (discard the rest)
|
||||||
|
guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20]
|
||||||
|
+ '-' + d[20:32] + '}')
|
||||||
|
return guid
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class MSVSSolutionEntry(object):
|
||||||
|
def __cmp__(self, other):
|
||||||
|
# Sort by name then guid (so things are in order on vs2008).
|
||||||
|
return cmp((self.name, self.get_guid()), (other.name, other.get_guid()))
|
||||||
|
|
||||||
|
def __lt__(self, other):
|
||||||
|
return (self.name, self.get_guid()) < (other.name, other.get_guid())
|
||||||
|
|
||||||
|
class MSVSFolder(MSVSSolutionEntry):
|
||||||
|
"""Folder in a Visual Studio project or solution."""
|
||||||
|
|
||||||
|
def __init__(self, path, name = None, entries = None,
|
||||||
|
guid = None, items = None):
|
||||||
|
"""Initializes the folder.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path: Full path to the folder.
|
||||||
|
name: Name of the folder.
|
||||||
|
entries: List of folder entries to nest inside this folder. May contain
|
||||||
|
Folder or Project objects. May be None, if the folder is empty.
|
||||||
|
guid: GUID to use for folder, if not None.
|
||||||
|
items: List of solution items to include in the folder project. May be
|
||||||
|
None, if the folder does not directly contain items.
|
||||||
|
"""
|
||||||
|
if name:
|
||||||
|
self.name = name
|
||||||
|
else:
|
||||||
|
# Use last layer.
|
||||||
|
self.name = os.path.basename(path)
|
||||||
|
|
||||||
|
self.path = path
|
||||||
|
self.guid = guid
|
||||||
|
|
||||||
|
# Copy passed lists (or set to empty lists)
|
||||||
|
self.entries = sorted(list(entries or []))
|
||||||
|
self.items = list(items or [])
|
||||||
|
|
||||||
|
self.entry_type_guid = ENTRY_TYPE_GUIDS['folder']
|
||||||
|
|
||||||
|
def get_guid(self):
|
||||||
|
if self.guid is None:
|
||||||
|
# Use consistent guids for folders (so things don't regenerate).
|
||||||
|
self.guid = MakeGuid(self.path, seed='msvs_folder')
|
||||||
|
return self.guid
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class MSVSProject(MSVSSolutionEntry):
|
||||||
|
"""Visual Studio project."""
|
||||||
|
|
||||||
|
def __init__(self, path, name = None, dependencies = None, guid = None,
|
||||||
|
spec = None, build_file = None, config_platform_overrides = None,
|
||||||
|
fixpath_prefix = None):
|
||||||
|
"""Initializes the project.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path: Absolute path to the project file.
|
||||||
|
name: Name of project. If None, the name will be the same as the base
|
||||||
|
name of the project file.
|
||||||
|
dependencies: List of other Project objects this project is dependent
|
||||||
|
upon, if not None.
|
||||||
|
guid: GUID to use for project, if not None.
|
||||||
|
spec: Dictionary specifying how to build this project.
|
||||||
|
build_file: Filename of the .gyp file that the vcproj file comes from.
|
||||||
|
config_platform_overrides: optional dict of configuration platforms to
|
||||||
|
used in place of the default for this target.
|
||||||
|
fixpath_prefix: the path used to adjust the behavior of _fixpath
|
||||||
|
"""
|
||||||
|
self.path = path
|
||||||
|
self.guid = guid
|
||||||
|
self.spec = spec
|
||||||
|
self.build_file = build_file
|
||||||
|
# Use project filename if name not specified
|
||||||
|
self.name = name or os.path.splitext(os.path.basename(path))[0]
|
||||||
|
|
||||||
|
# Copy passed lists (or set to empty lists)
|
||||||
|
self.dependencies = list(dependencies or [])
|
||||||
|
|
||||||
|
self.entry_type_guid = ENTRY_TYPE_GUIDS['project']
|
||||||
|
|
||||||
|
if config_platform_overrides:
|
||||||
|
self.config_platform_overrides = config_platform_overrides
|
||||||
|
else:
|
||||||
|
self.config_platform_overrides = {}
|
||||||
|
self.fixpath_prefix = fixpath_prefix
|
||||||
|
self.msbuild_toolset = None
|
||||||
|
|
||||||
|
def set_dependencies(self, dependencies):
|
||||||
|
self.dependencies = list(dependencies or [])
|
||||||
|
|
||||||
|
def get_guid(self):
|
||||||
|
if self.guid is None:
|
||||||
|
# Set GUID from path
|
||||||
|
# TODO(rspangler): This is fragile.
|
||||||
|
# 1. We can't just use the project filename sans path, since there could
|
||||||
|
# be multiple projects with the same base name (for example,
|
||||||
|
# foo/unittest.vcproj and bar/unittest.vcproj).
|
||||||
|
# 2. The path needs to be relative to $SOURCE_ROOT, so that the project
|
||||||
|
# GUID is the same whether it's included from base/base.sln or
|
||||||
|
# foo/bar/baz/baz.sln.
|
||||||
|
# 3. The GUID needs to be the same each time this builder is invoked, so
|
||||||
|
# that we don't need to rebuild the solution when the project changes.
|
||||||
|
# 4. We should be able to handle pre-built project files by reading the
|
||||||
|
# GUID from the files.
|
||||||
|
self.guid = MakeGuid(self.name)
|
||||||
|
return self.guid
|
||||||
|
|
||||||
|
def set_msbuild_toolset(self, msbuild_toolset):
|
||||||
|
self.msbuild_toolset = msbuild_toolset
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class MSVSSolution(object):
|
||||||
|
"""Visual Studio solution."""
|
||||||
|
|
||||||
|
def __init__(self, path, version, entries=None, variants=None,
|
||||||
|
websiteProperties=True):
|
||||||
|
"""Initializes the solution.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path: Path to solution file.
|
||||||
|
version: Format version to emit.
|
||||||
|
entries: List of entries in solution. May contain Folder or Project
|
||||||
|
objects. May be None, if the folder is empty.
|
||||||
|
variants: List of build variant strings. If none, a default list will
|
||||||
|
be used.
|
||||||
|
websiteProperties: Flag to decide if the website properties section
|
||||||
|
is generated.
|
||||||
|
"""
|
||||||
|
self.path = path
|
||||||
|
self.websiteProperties = websiteProperties
|
||||||
|
self.version = version
|
||||||
|
|
||||||
|
# Copy passed lists (or set to empty lists)
|
||||||
|
self.entries = list(entries or [])
|
||||||
|
|
||||||
|
if variants:
|
||||||
|
# Copy passed list
|
||||||
|
self.variants = variants[:]
|
||||||
|
else:
|
||||||
|
# Use default
|
||||||
|
self.variants = ['Debug|Win32', 'Release|Win32']
|
||||||
|
# TODO(rspangler): Need to be able to handle a mapping of solution config
|
||||||
|
# to project config. Should we be able to handle variants being a dict,
|
||||||
|
# or add a separate variant_map variable? If it's a dict, we can't
|
||||||
|
# guarantee the order of variants since dict keys aren't ordered.
|
||||||
|
|
||||||
|
|
||||||
|
# TODO(rspangler): Automatically write to disk for now; should delay until
|
||||||
|
# node-evaluation time.
|
||||||
|
self.Write()
|
||||||
|
|
||||||
|
|
||||||
|
def Write(self, writer=gyp.common.WriteOnDiff):
|
||||||
|
"""Writes the solution file to disk.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
IndexError: An entry appears multiple times.
|
||||||
|
"""
|
||||||
|
# Walk the entry tree and collect all the folders and projects.
|
||||||
|
all_entries = set()
|
||||||
|
entries_to_check = self.entries[:]
|
||||||
|
while entries_to_check:
|
||||||
|
e = entries_to_check.pop(0)
|
||||||
|
|
||||||
|
# If this entry has been visited, nothing to do.
|
||||||
|
if e in all_entries:
|
||||||
|
continue
|
||||||
|
|
||||||
|
all_entries.add(e)
|
||||||
|
|
||||||
|
# If this is a folder, check its entries too.
|
||||||
|
if isinstance(e, MSVSFolder):
|
||||||
|
entries_to_check += e.entries
|
||||||
|
|
||||||
|
all_entries = sorted(all_entries)
|
||||||
|
|
||||||
|
# Open file and print header
|
||||||
|
f = writer(self.path)
|
||||||
|
f.write('Microsoft Visual Studio Solution File, '
|
||||||
|
'Format Version %s\r\n' % self.version.SolutionVersion())
|
||||||
|
f.write('# %s\r\n' % self.version.Description())
|
||||||
|
|
||||||
|
# Project entries
|
||||||
|
sln_root = os.path.split(self.path)[0]
|
||||||
|
for e in all_entries:
|
||||||
|
relative_path = gyp.common.RelativePath(e.path, sln_root)
|
||||||
|
# msbuild does not accept an empty folder_name.
|
||||||
|
# use '.' in case relative_path is empty.
|
||||||
|
folder_name = relative_path.replace('/', '\\') or '.'
|
||||||
|
f.write('Project("%s") = "%s", "%s", "%s"\r\n' % (
|
||||||
|
e.entry_type_guid, # Entry type GUID
|
||||||
|
e.name, # Folder name
|
||||||
|
folder_name, # Folder name (again)
|
||||||
|
e.get_guid(), # Entry GUID
|
||||||
|
))
|
||||||
|
|
||||||
|
# TODO(rspangler): Need a way to configure this stuff
|
||||||
|
if self.websiteProperties:
|
||||||
|
f.write('\tProjectSection(WebsiteProperties) = preProject\r\n'
|
||||||
|
'\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
|
||||||
|
'\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
|
||||||
|
'\tEndProjectSection\r\n')
|
||||||
|
|
||||||
|
if isinstance(e, MSVSFolder):
|
||||||
|
if e.items:
|
||||||
|
f.write('\tProjectSection(SolutionItems) = preProject\r\n')
|
||||||
|
for i in e.items:
|
||||||
|
f.write('\t\t%s = %s\r\n' % (i, i))
|
||||||
|
f.write('\tEndProjectSection\r\n')
|
||||||
|
|
||||||
|
if isinstance(e, MSVSProject):
|
||||||
|
if e.dependencies:
|
||||||
|
f.write('\tProjectSection(ProjectDependencies) = postProject\r\n')
|
||||||
|
for d in e.dependencies:
|
||||||
|
f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid()))
|
||||||
|
f.write('\tEndProjectSection\r\n')
|
||||||
|
|
||||||
|
f.write('EndProject\r\n')
|
||||||
|
|
||||||
|
# Global section
|
||||||
|
f.write('Global\r\n')
|
||||||
|
|
||||||
|
# Configurations (variants)
|
||||||
|
f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n')
|
||||||
|
for v in self.variants:
|
||||||
|
f.write('\t\t%s = %s\r\n' % (v, v))
|
||||||
|
f.write('\tEndGlobalSection\r\n')
|
||||||
|
|
||||||
|
# Sort config guids for easier diffing of solution changes.
|
||||||
|
config_guids = []
|
||||||
|
config_guids_overrides = {}
|
||||||
|
for e in all_entries:
|
||||||
|
if isinstance(e, MSVSProject):
|
||||||
|
config_guids.append(e.get_guid())
|
||||||
|
config_guids_overrides[e.get_guid()] = e.config_platform_overrides
|
||||||
|
config_guids.sort()
|
||||||
|
|
||||||
|
f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n')
|
||||||
|
for g in config_guids:
|
||||||
|
for v in self.variants:
|
||||||
|
nv = config_guids_overrides[g].get(v, v)
|
||||||
|
# Pick which project configuration to build for this solution
|
||||||
|
# configuration.
|
||||||
|
f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % (
|
||||||
|
g, # Project GUID
|
||||||
|
v, # Solution build configuration
|
||||||
|
nv, # Project build config for that solution config
|
||||||
|
))
|
||||||
|
|
||||||
|
# Enable project in this solution configuration.
|
||||||
|
f.write('\t\t%s.%s.Build.0 = %s\r\n' % (
|
||||||
|
g, # Project GUID
|
||||||
|
v, # Solution build configuration
|
||||||
|
nv, # Project build config for that solution config
|
||||||
|
))
|
||||||
|
f.write('\tEndGlobalSection\r\n')
|
||||||
|
|
||||||
|
# TODO(rspangler): Should be able to configure this stuff too (though I've
|
||||||
|
# never seen this be any different)
|
||||||
|
f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n')
|
||||||
|
f.write('\t\tHideSolutionNode = FALSE\r\n')
|
||||||
|
f.write('\tEndGlobalSection\r\n')
|
||||||
|
|
||||||
|
# Folder mappings
|
||||||
|
# Omit this section if there are no folders
|
||||||
|
if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]):
|
||||||
|
f.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
|
||||||
|
for e in all_entries:
|
||||||
|
if not isinstance(e, MSVSFolder):
|
||||||
|
continue # Does not apply to projects, only folders
|
||||||
|
for subentry in e.entries:
|
||||||
|
f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid()))
|
||||||
|
f.write('\tEndGlobalSection\r\n')
|
||||||
|
|
||||||
|
f.write('EndGlobal\r\n')
|
||||||
|
|
||||||
|
f.close()
|
||||||
208
third_party/python/gyp/build/lib/gyp/MSVSProject.py
vendored
Normal file
208
third_party/python/gyp/build/lib/gyp/MSVSProject.py
vendored
Normal file
@@ -0,0 +1,208 @@
|
|||||||
|
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""Visual Studio project reader/writer."""
|
||||||
|
|
||||||
|
import gyp.common
|
||||||
|
import gyp.easy_xml as easy_xml
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class Tool(object):
|
||||||
|
"""Visual Studio tool."""
|
||||||
|
|
||||||
|
def __init__(self, name, attrs=None):
|
||||||
|
"""Initializes the tool.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Tool name.
|
||||||
|
attrs: Dict of tool attributes; may be None.
|
||||||
|
"""
|
||||||
|
self._attrs = attrs or {}
|
||||||
|
self._attrs['Name'] = name
|
||||||
|
|
||||||
|
def _GetSpecification(self):
|
||||||
|
"""Creates an element for the tool.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A new xml.dom.Element for the tool.
|
||||||
|
"""
|
||||||
|
return ['Tool', self._attrs]
|
||||||
|
|
||||||
|
class Filter(object):
|
||||||
|
"""Visual Studio filter - that is, a virtual folder."""
|
||||||
|
|
||||||
|
def __init__(self, name, contents=None):
|
||||||
|
"""Initializes the folder.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Filter (folder) name.
|
||||||
|
contents: List of filenames and/or Filter objects contained.
|
||||||
|
"""
|
||||||
|
self.name = name
|
||||||
|
self.contents = list(contents or [])
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class Writer(object):
|
||||||
|
"""Visual Studio XML project writer."""
|
||||||
|
|
||||||
|
def __init__(self, project_path, version, name, guid=None, platforms=None):
|
||||||
|
"""Initializes the project.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_path: Path to the project file.
|
||||||
|
version: Format version to emit.
|
||||||
|
name: Name of the project.
|
||||||
|
guid: GUID to use for project, if not None.
|
||||||
|
platforms: Array of string, the supported platforms. If null, ['Win32']
|
||||||
|
"""
|
||||||
|
self.project_path = project_path
|
||||||
|
self.version = version
|
||||||
|
self.name = name
|
||||||
|
self.guid = guid
|
||||||
|
|
||||||
|
# Default to Win32 for platforms.
|
||||||
|
if not platforms:
|
||||||
|
platforms = ['Win32']
|
||||||
|
|
||||||
|
# Initialize the specifications of the various sections.
|
||||||
|
self.platform_section = ['Platforms']
|
||||||
|
for platform in platforms:
|
||||||
|
self.platform_section.append(['Platform', {'Name': platform}])
|
||||||
|
self.tool_files_section = ['ToolFiles']
|
||||||
|
self.configurations_section = ['Configurations']
|
||||||
|
self.files_section = ['Files']
|
||||||
|
|
||||||
|
# Keep a dict keyed on filename to speed up access.
|
||||||
|
self.files_dict = dict()
|
||||||
|
|
||||||
|
def AddToolFile(self, path):
|
||||||
|
"""Adds a tool file to the project.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path: Relative path from project to tool file.
|
||||||
|
"""
|
||||||
|
self.tool_files_section.append(['ToolFile', {'RelativePath': path}])
|
||||||
|
|
||||||
|
def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools):
|
||||||
|
"""Returns the specification for a configuration.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config_type: Type of configuration node.
|
||||||
|
config_name: Configuration name.
|
||||||
|
attrs: Dict of configuration attributes; may be None.
|
||||||
|
tools: List of tools (strings or Tool objects); may be None.
|
||||||
|
Returns:
|
||||||
|
"""
|
||||||
|
# Handle defaults
|
||||||
|
if not attrs:
|
||||||
|
attrs = {}
|
||||||
|
if not tools:
|
||||||
|
tools = []
|
||||||
|
|
||||||
|
# Add configuration node and its attributes
|
||||||
|
node_attrs = attrs.copy()
|
||||||
|
node_attrs['Name'] = config_name
|
||||||
|
specification = [config_type, node_attrs]
|
||||||
|
|
||||||
|
# Add tool nodes and their attributes
|
||||||
|
if tools:
|
||||||
|
for t in tools:
|
||||||
|
if isinstance(t, Tool):
|
||||||
|
specification.append(t._GetSpecification())
|
||||||
|
else:
|
||||||
|
specification.append(Tool(t)._GetSpecification())
|
||||||
|
return specification
|
||||||
|
|
||||||
|
|
||||||
|
def AddConfig(self, name, attrs=None, tools=None):
|
||||||
|
"""Adds a configuration to the project.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Configuration name.
|
||||||
|
attrs: Dict of configuration attributes; may be None.
|
||||||
|
tools: List of tools (strings or Tool objects); may be None.
|
||||||
|
"""
|
||||||
|
spec = self._GetSpecForConfiguration('Configuration', name, attrs, tools)
|
||||||
|
self.configurations_section.append(spec)
|
||||||
|
|
||||||
|
def _AddFilesToNode(self, parent, files):
|
||||||
|
"""Adds files and/or filters to the parent node.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
parent: Destination node
|
||||||
|
files: A list of Filter objects and/or relative paths to files.
|
||||||
|
|
||||||
|
Will call itself recursively, if the files list contains Filter objects.
|
||||||
|
"""
|
||||||
|
for f in files:
|
||||||
|
if isinstance(f, Filter):
|
||||||
|
node = ['Filter', {'Name': f.name}]
|
||||||
|
self._AddFilesToNode(node, f.contents)
|
||||||
|
else:
|
||||||
|
node = ['File', {'RelativePath': f}]
|
||||||
|
self.files_dict[f] = node
|
||||||
|
parent.append(node)
|
||||||
|
|
||||||
|
def AddFiles(self, files):
|
||||||
|
"""Adds files to the project.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
files: A list of Filter objects and/or relative paths to files.
|
||||||
|
|
||||||
|
This makes a copy of the file/filter tree at the time of this call. If you
|
||||||
|
later add files to a Filter object which was passed into a previous call
|
||||||
|
to AddFiles(), it will not be reflected in this project.
|
||||||
|
"""
|
||||||
|
self._AddFilesToNode(self.files_section, files)
|
||||||
|
# TODO(rspangler) This also doesn't handle adding files to an existing
|
||||||
|
# filter. That is, it doesn't merge the trees.
|
||||||
|
|
||||||
|
def AddFileConfig(self, path, config, attrs=None, tools=None):
|
||||||
|
"""Adds a configuration to a file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path: Relative path to the file.
|
||||||
|
config: Name of configuration to add.
|
||||||
|
attrs: Dict of configuration attributes; may be None.
|
||||||
|
tools: List of tools (strings or Tool objects); may be None.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: Relative path does not match any file added via AddFiles().
|
||||||
|
"""
|
||||||
|
# Find the file node with the right relative path
|
||||||
|
parent = self.files_dict.get(path)
|
||||||
|
if not parent:
|
||||||
|
raise ValueError('AddFileConfig: file "%s" not in project.' % path)
|
||||||
|
|
||||||
|
# Add the config to the file node
|
||||||
|
spec = self._GetSpecForConfiguration('FileConfiguration', config, attrs,
|
||||||
|
tools)
|
||||||
|
parent.append(spec)
|
||||||
|
|
||||||
|
def WriteIfChanged(self):
|
||||||
|
"""Writes the project file."""
|
||||||
|
# First create XML content definition
|
||||||
|
content = [
|
||||||
|
'VisualStudioProject',
|
||||||
|
{'ProjectType': 'Visual C++',
|
||||||
|
'Version': self.version.ProjectVersion(),
|
||||||
|
'Name': self.name,
|
||||||
|
'ProjectGUID': self.guid,
|
||||||
|
'RootNamespace': self.name,
|
||||||
|
'Keyword': 'Win32Proj'
|
||||||
|
},
|
||||||
|
self.platform_section,
|
||||||
|
self.tool_files_section,
|
||||||
|
self.configurations_section,
|
||||||
|
['References'], # empty section
|
||||||
|
self.files_section,
|
||||||
|
['Globals'] # empty section
|
||||||
|
]
|
||||||
|
easy_xml.WriteXmlIfChanged(content, self.project_path,
|
||||||
|
encoding="Windows-1252")
|
||||||
1106
third_party/python/gyp/build/lib/gyp/MSVSSettings.py
vendored
Normal file
1106
third_party/python/gyp/build/lib/gyp/MSVSSettings.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1486
third_party/python/gyp/build/lib/gyp/MSVSSettings_test.py
vendored
Normal file
1486
third_party/python/gyp/build/lib/gyp/MSVSSettings_test.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
58
third_party/python/gyp/build/lib/gyp/MSVSToolFile.py
vendored
Normal file
58
third_party/python/gyp/build/lib/gyp/MSVSToolFile.py
vendored
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""Visual Studio project reader/writer."""
|
||||||
|
|
||||||
|
import gyp.common
|
||||||
|
import gyp.easy_xml as easy_xml
|
||||||
|
|
||||||
|
|
||||||
|
class Writer(object):
|
||||||
|
"""Visual Studio XML tool file writer."""
|
||||||
|
|
||||||
|
def __init__(self, tool_file_path, name):
|
||||||
|
"""Initializes the tool file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tool_file_path: Path to the tool file.
|
||||||
|
name: Name of the tool file.
|
||||||
|
"""
|
||||||
|
self.tool_file_path = tool_file_path
|
||||||
|
self.name = name
|
||||||
|
self.rules_section = ['Rules']
|
||||||
|
|
||||||
|
def AddCustomBuildRule(self, name, cmd, description,
|
||||||
|
additional_dependencies,
|
||||||
|
outputs, extensions):
|
||||||
|
"""Adds a rule to the tool file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Name of the rule.
|
||||||
|
description: Description of the rule.
|
||||||
|
cmd: Command line of the rule.
|
||||||
|
additional_dependencies: other files which may trigger the rule.
|
||||||
|
outputs: outputs of the rule.
|
||||||
|
extensions: extensions handled by the rule.
|
||||||
|
"""
|
||||||
|
rule = ['CustomBuildRule',
|
||||||
|
{'Name': name,
|
||||||
|
'ExecutionDescription': description,
|
||||||
|
'CommandLine': cmd,
|
||||||
|
'Outputs': ';'.join(outputs),
|
||||||
|
'FileExtensions': ';'.join(extensions),
|
||||||
|
'AdditionalDependencies':
|
||||||
|
';'.join(additional_dependencies)
|
||||||
|
}]
|
||||||
|
self.rules_section.append(rule)
|
||||||
|
|
||||||
|
def WriteIfChanged(self):
|
||||||
|
"""Writes the tool file."""
|
||||||
|
content = ['VisualStudioToolFile',
|
||||||
|
{'Version': '8.00',
|
||||||
|
'Name': self.name
|
||||||
|
},
|
||||||
|
self.rules_section
|
||||||
|
]
|
||||||
|
easy_xml.WriteXmlIfChanged(content, self.tool_file_path,
|
||||||
|
encoding="Windows-1252")
|
||||||
147
third_party/python/gyp/build/lib/gyp/MSVSUserFile.py
vendored
Normal file
147
third_party/python/gyp/build/lib/gyp/MSVSUserFile.py
vendored
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""Visual Studio user preferences file writer."""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import socket # for gethostname
|
||||||
|
|
||||||
|
import gyp.common
|
||||||
|
import gyp.easy_xml as easy_xml
|
||||||
|
|
||||||
|
|
||||||
|
#------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _FindCommandInPath(command):
|
||||||
|
"""If there are no slashes in the command given, this function
|
||||||
|
searches the PATH env to find the given command, and converts it
|
||||||
|
to an absolute path. We have to do this because MSVS is looking
|
||||||
|
for an actual file to launch a debugger on, not just a command
|
||||||
|
line. Note that this happens at GYP time, so anything needing to
|
||||||
|
be built needs to have a full path."""
|
||||||
|
if '/' in command or '\\' in command:
|
||||||
|
# If the command already has path elements (either relative or
|
||||||
|
# absolute), then assume it is constructed properly.
|
||||||
|
return command
|
||||||
|
else:
|
||||||
|
# Search through the path list and find an existing file that
|
||||||
|
# we can access.
|
||||||
|
paths = os.environ.get('PATH','').split(os.pathsep)
|
||||||
|
for path in paths:
|
||||||
|
item = os.path.join(path, command)
|
||||||
|
if os.path.isfile(item) and os.access(item, os.X_OK):
|
||||||
|
return item
|
||||||
|
return command
|
||||||
|
|
||||||
|
def _QuoteWin32CommandLineArgs(args):
|
||||||
|
new_args = []
|
||||||
|
for arg in args:
|
||||||
|
# Replace all double-quotes with double-double-quotes to escape
|
||||||
|
# them for cmd shell, and then quote the whole thing if there
|
||||||
|
# are any.
|
||||||
|
if arg.find('"') != -1:
|
||||||
|
arg = '""'.join(arg.split('"'))
|
||||||
|
arg = '"%s"' % arg
|
||||||
|
|
||||||
|
# Otherwise, if there are any spaces, quote the whole arg.
|
||||||
|
elif re.search(r'[ \t\n]', arg):
|
||||||
|
arg = '"%s"' % arg
|
||||||
|
new_args.append(arg)
|
||||||
|
return new_args
|
||||||
|
|
||||||
|
class Writer(object):
|
||||||
|
"""Visual Studio XML user user file writer."""
|
||||||
|
|
||||||
|
def __init__(self, user_file_path, version, name):
|
||||||
|
"""Initializes the user file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user_file_path: Path to the user file.
|
||||||
|
version: Version info.
|
||||||
|
name: Name of the user file.
|
||||||
|
"""
|
||||||
|
self.user_file_path = user_file_path
|
||||||
|
self.version = version
|
||||||
|
self.name = name
|
||||||
|
self.configurations = {}
|
||||||
|
|
||||||
|
def AddConfig(self, name):
|
||||||
|
"""Adds a configuration to the project.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Configuration name.
|
||||||
|
"""
|
||||||
|
self.configurations[name] = ['Configuration', {'Name': name}]
|
||||||
|
|
||||||
|
def AddDebugSettings(self, config_name, command, environment = {},
|
||||||
|
working_directory=""):
|
||||||
|
"""Adds a DebugSettings node to the user file for a particular config.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
command: command line to run. First element in the list is the
|
||||||
|
executable. All elements of the command will be quoted if
|
||||||
|
necessary.
|
||||||
|
working_directory: other files which may trigger the rule. (optional)
|
||||||
|
"""
|
||||||
|
command = _QuoteWin32CommandLineArgs(command)
|
||||||
|
|
||||||
|
abs_command = _FindCommandInPath(command[0])
|
||||||
|
|
||||||
|
if environment and isinstance(environment, dict):
|
||||||
|
env_list = ['%s="%s"' % (key, val)
|
||||||
|
for (key,val) in environment.items()]
|
||||||
|
environment = ' '.join(env_list)
|
||||||
|
else:
|
||||||
|
environment = ''
|
||||||
|
|
||||||
|
n_cmd = ['DebugSettings',
|
||||||
|
{'Command': abs_command,
|
||||||
|
'WorkingDirectory': working_directory,
|
||||||
|
'CommandArguments': " ".join(command[1:]),
|
||||||
|
'RemoteMachine': socket.gethostname(),
|
||||||
|
'Environment': environment,
|
||||||
|
'EnvironmentMerge': 'true',
|
||||||
|
# Currently these are all "dummy" values that we're just setting
|
||||||
|
# in the default manner that MSVS does it. We could use some of
|
||||||
|
# these to add additional capabilities, I suppose, but they might
|
||||||
|
# not have parity with other platforms then.
|
||||||
|
'Attach': 'false',
|
||||||
|
'DebuggerType': '3', # 'auto' debugger
|
||||||
|
'Remote': '1',
|
||||||
|
'RemoteCommand': '',
|
||||||
|
'HttpUrl': '',
|
||||||
|
'PDBPath': '',
|
||||||
|
'SQLDebugging': '',
|
||||||
|
'DebuggerFlavor': '0',
|
||||||
|
'MPIRunCommand': '',
|
||||||
|
'MPIRunArguments': '',
|
||||||
|
'MPIRunWorkingDirectory': '',
|
||||||
|
'ApplicationCommand': '',
|
||||||
|
'ApplicationArguments': '',
|
||||||
|
'ShimCommand': '',
|
||||||
|
'MPIAcceptMode': '',
|
||||||
|
'MPIAcceptFilter': ''
|
||||||
|
}]
|
||||||
|
|
||||||
|
# Find the config, and add it if it doesn't exist.
|
||||||
|
if config_name not in self.configurations:
|
||||||
|
self.AddConfig(config_name)
|
||||||
|
|
||||||
|
# Add the DebugSettings onto the appropriate config.
|
||||||
|
self.configurations[config_name].append(n_cmd)
|
||||||
|
|
||||||
|
def WriteIfChanged(self):
|
||||||
|
"""Writes the user file."""
|
||||||
|
configs = ['Configurations']
|
||||||
|
for config, spec in sorted(self.configurations.items()):
|
||||||
|
configs.append(spec)
|
||||||
|
|
||||||
|
content = ['VisualStudioUserFile',
|
||||||
|
{'Version': self.version.ProjectVersion(),
|
||||||
|
'Name': self.name
|
||||||
|
},
|
||||||
|
configs]
|
||||||
|
easy_xml.WriteXmlIfChanged(content, self.user_file_path,
|
||||||
|
encoding="Windows-1252")
|
||||||
271
third_party/python/gyp/build/lib/gyp/MSVSUtil.py
vendored
Normal file
271
third_party/python/gyp/build/lib/gyp/MSVSUtil.py
vendored
Normal file
@@ -0,0 +1,271 @@
|
|||||||
|
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""Utility functions shared amongst the Windows generators."""
|
||||||
|
|
||||||
|
import copy
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
# A dictionary mapping supported target types to extensions.
|
||||||
|
TARGET_TYPE_EXT = {
|
||||||
|
'executable': 'exe',
|
||||||
|
'loadable_module': 'dll',
|
||||||
|
'shared_library': 'dll',
|
||||||
|
'static_library': 'lib',
|
||||||
|
'windows_driver': 'sys',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _GetLargePdbShimCcPath():
|
||||||
|
"""Returns the path of the large_pdb_shim.cc file."""
|
||||||
|
this_dir = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
src_dir = os.path.abspath(os.path.join(this_dir, '..', '..'))
|
||||||
|
win_data_dir = os.path.join(src_dir, 'data', 'win')
|
||||||
|
large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc')
|
||||||
|
return large_pdb_shim_cc
|
||||||
|
|
||||||
|
|
||||||
|
def _DeepCopySomeKeys(in_dict, keys):
|
||||||
|
"""Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
in_dict: The dictionary to copy.
|
||||||
|
keys: The keys to be copied. If a key is in this list and doesn't exist in
|
||||||
|
|in_dict| this is not an error.
|
||||||
|
Returns:
|
||||||
|
The partially deep-copied dictionary.
|
||||||
|
"""
|
||||||
|
d = {}
|
||||||
|
for key in keys:
|
||||||
|
if key not in in_dict:
|
||||||
|
continue
|
||||||
|
d[key] = copy.deepcopy(in_dict[key])
|
||||||
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
def _SuffixName(name, suffix):
|
||||||
|
"""Add a suffix to the end of a target.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
name: name of the target (foo#target)
|
||||||
|
suffix: the suffix to be added
|
||||||
|
Returns:
|
||||||
|
Target name with suffix added (foo_suffix#target)
|
||||||
|
"""
|
||||||
|
parts = name.rsplit('#', 1)
|
||||||
|
parts[0] = '%s_%s' % (parts[0], suffix)
|
||||||
|
return '#'.join(parts)
|
||||||
|
|
||||||
|
|
||||||
|
def _ShardName(name, number):
|
||||||
|
"""Add a shard number to the end of a target.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
name: name of the target (foo#target)
|
||||||
|
number: shard number
|
||||||
|
Returns:
|
||||||
|
Target name with shard added (foo_1#target)
|
||||||
|
"""
|
||||||
|
return _SuffixName(name, str(number))
|
||||||
|
|
||||||
|
|
||||||
|
def ShardTargets(target_list, target_dicts):
|
||||||
|
"""Shard some targets apart to work around the linkers limits.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||||
|
target_dicts: Dict of target properties keyed on target pair.
|
||||||
|
Returns:
|
||||||
|
Tuple of the new sharded versions of the inputs.
|
||||||
|
"""
|
||||||
|
# Gather the targets to shard, and how many pieces.
|
||||||
|
targets_to_shard = {}
|
||||||
|
for t in target_dicts:
|
||||||
|
shards = int(target_dicts[t].get('msvs_shard', 0))
|
||||||
|
if shards:
|
||||||
|
targets_to_shard[t] = shards
|
||||||
|
# Shard target_list.
|
||||||
|
new_target_list = []
|
||||||
|
for t in target_list:
|
||||||
|
if t in targets_to_shard:
|
||||||
|
for i in range(targets_to_shard[t]):
|
||||||
|
new_target_list.append(_ShardName(t, i))
|
||||||
|
else:
|
||||||
|
new_target_list.append(t)
|
||||||
|
# Shard target_dict.
|
||||||
|
new_target_dicts = {}
|
||||||
|
for t in target_dicts:
|
||||||
|
if t in targets_to_shard:
|
||||||
|
for i in range(targets_to_shard[t]):
|
||||||
|
name = _ShardName(t, i)
|
||||||
|
new_target_dicts[name] = copy.copy(target_dicts[t])
|
||||||
|
new_target_dicts[name]['target_name'] = _ShardName(
|
||||||
|
new_target_dicts[name]['target_name'], i)
|
||||||
|
sources = new_target_dicts[name].get('sources', [])
|
||||||
|
new_sources = []
|
||||||
|
for pos in range(i, len(sources), targets_to_shard[t]):
|
||||||
|
new_sources.append(sources[pos])
|
||||||
|
new_target_dicts[name]['sources'] = new_sources
|
||||||
|
else:
|
||||||
|
new_target_dicts[t] = target_dicts[t]
|
||||||
|
# Shard dependencies.
|
||||||
|
for t in sorted(new_target_dicts):
|
||||||
|
for deptype in ('dependencies', 'dependencies_original'):
|
||||||
|
dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
|
||||||
|
new_dependencies = []
|
||||||
|
for d in dependencies:
|
||||||
|
if d in targets_to_shard:
|
||||||
|
for i in range(targets_to_shard[d]):
|
||||||
|
new_dependencies.append(_ShardName(d, i))
|
||||||
|
else:
|
||||||
|
new_dependencies.append(d)
|
||||||
|
new_target_dicts[t][deptype] = new_dependencies
|
||||||
|
|
||||||
|
return (new_target_list, new_target_dicts)
|
||||||
|
|
||||||
|
|
||||||
|
def _GetPdbPath(target_dict, config_name, vars):
|
||||||
|
"""Returns the path to the PDB file that will be generated by a given
|
||||||
|
configuration.
|
||||||
|
|
||||||
|
The lookup proceeds as follows:
|
||||||
|
- Look for an explicit path in the VCLinkerTool configuration block.
|
||||||
|
- Look for an 'msvs_large_pdb_path' variable.
|
||||||
|
- Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
|
||||||
|
specified.
|
||||||
|
- Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
target_dict: The target dictionary to be searched.
|
||||||
|
config_name: The name of the configuration of interest.
|
||||||
|
vars: A dictionary of common GYP variables with generator-specific values.
|
||||||
|
Returns:
|
||||||
|
The path of the corresponding PDB file.
|
||||||
|
"""
|
||||||
|
config = target_dict['configurations'][config_name]
|
||||||
|
msvs = config.setdefault('msvs_settings', {})
|
||||||
|
|
||||||
|
linker = msvs.get('VCLinkerTool', {})
|
||||||
|
|
||||||
|
pdb_path = linker.get('ProgramDatabaseFile')
|
||||||
|
if pdb_path:
|
||||||
|
return pdb_path
|
||||||
|
|
||||||
|
variables = target_dict.get('variables', {})
|
||||||
|
pdb_path = variables.get('msvs_large_pdb_path', None)
|
||||||
|
if pdb_path:
|
||||||
|
return pdb_path
|
||||||
|
|
||||||
|
|
||||||
|
pdb_base = target_dict.get('product_name', target_dict['target_name'])
|
||||||
|
pdb_base = '%s.%s.pdb' % (pdb_base, TARGET_TYPE_EXT[target_dict['type']])
|
||||||
|
pdb_path = vars['PRODUCT_DIR'] + '/' + pdb_base
|
||||||
|
|
||||||
|
return pdb_path
|
||||||
|
|
||||||
|
|
||||||
|
def InsertLargePdbShims(target_list, target_dicts, vars):
|
||||||
|
"""Insert a shim target that forces the linker to use 4KB pagesize PDBs.
|
||||||
|
|
||||||
|
This is a workaround for targets with PDBs greater than 1GB in size, the
|
||||||
|
limit for the 1KB pagesize PDBs created by the linker by default.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||||
|
target_dicts: Dict of target properties keyed on target pair.
|
||||||
|
vars: A dictionary of common GYP variables with generator-specific values.
|
||||||
|
Returns:
|
||||||
|
Tuple of the shimmed version of the inputs.
|
||||||
|
"""
|
||||||
|
# Determine which targets need shimming.
|
||||||
|
targets_to_shim = []
|
||||||
|
for t in target_dicts:
|
||||||
|
target_dict = target_dicts[t]
|
||||||
|
|
||||||
|
# We only want to shim targets that have msvs_large_pdb enabled.
|
||||||
|
if not int(target_dict.get('msvs_large_pdb', 0)):
|
||||||
|
continue
|
||||||
|
# This is intended for executable, shared_library and loadable_module
|
||||||
|
# targets where every configuration is set up to produce a PDB output.
|
||||||
|
# If any of these conditions is not true then the shim logic will fail
|
||||||
|
# below.
|
||||||
|
targets_to_shim.append(t)
|
||||||
|
|
||||||
|
large_pdb_shim_cc = _GetLargePdbShimCcPath()
|
||||||
|
|
||||||
|
for t in targets_to_shim:
|
||||||
|
target_dict = target_dicts[t]
|
||||||
|
target_name = target_dict.get('target_name')
|
||||||
|
|
||||||
|
base_dict = _DeepCopySomeKeys(target_dict,
|
||||||
|
['configurations', 'default_configuration', 'toolset'])
|
||||||
|
|
||||||
|
# This is the dict for copying the source file (part of the GYP tree)
|
||||||
|
# to the intermediate directory of the project. This is necessary because
|
||||||
|
# we can't always build a relative path to the shim source file (on Windows
|
||||||
|
# GYP and the project may be on different drives), and Ninja hates absolute
|
||||||
|
# paths (it ends up generating the .obj and .obj.d alongside the source
|
||||||
|
# file, polluting GYPs tree).
|
||||||
|
copy_suffix = 'large_pdb_copy'
|
||||||
|
copy_target_name = target_name + '_' + copy_suffix
|
||||||
|
full_copy_target_name = _SuffixName(t, copy_suffix)
|
||||||
|
shim_cc_basename = os.path.basename(large_pdb_shim_cc)
|
||||||
|
shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name
|
||||||
|
shim_cc_path = shim_cc_dir + '/' + shim_cc_basename
|
||||||
|
copy_dict = copy.deepcopy(base_dict)
|
||||||
|
copy_dict['target_name'] = copy_target_name
|
||||||
|
copy_dict['type'] = 'none'
|
||||||
|
copy_dict['sources'] = [ large_pdb_shim_cc ]
|
||||||
|
copy_dict['copies'] = [{
|
||||||
|
'destination': shim_cc_dir,
|
||||||
|
'files': [ large_pdb_shim_cc ]
|
||||||
|
}]
|
||||||
|
|
||||||
|
# This is the dict for the PDB generating shim target. It depends on the
|
||||||
|
# copy target.
|
||||||
|
shim_suffix = 'large_pdb_shim'
|
||||||
|
shim_target_name = target_name + '_' + shim_suffix
|
||||||
|
full_shim_target_name = _SuffixName(t, shim_suffix)
|
||||||
|
shim_dict = copy.deepcopy(base_dict)
|
||||||
|
shim_dict['target_name'] = shim_target_name
|
||||||
|
shim_dict['type'] = 'static_library'
|
||||||
|
shim_dict['sources'] = [ shim_cc_path ]
|
||||||
|
shim_dict['dependencies'] = [ full_copy_target_name ]
|
||||||
|
|
||||||
|
# Set up the shim to output its PDB to the same location as the final linker
|
||||||
|
# target.
|
||||||
|
for config_name, config in shim_dict.get('configurations').items():
|
||||||
|
pdb_path = _GetPdbPath(target_dict, config_name, vars)
|
||||||
|
|
||||||
|
# A few keys that we don't want to propagate.
|
||||||
|
for key in ['msvs_precompiled_header', 'msvs_precompiled_source', 'test']:
|
||||||
|
config.pop(key, None)
|
||||||
|
|
||||||
|
msvs = config.setdefault('msvs_settings', {})
|
||||||
|
|
||||||
|
# Update the compiler directives in the shim target.
|
||||||
|
compiler = msvs.setdefault('VCCLCompilerTool', {})
|
||||||
|
compiler['DebugInformationFormat'] = '3'
|
||||||
|
compiler['ProgramDataBaseFileName'] = pdb_path
|
||||||
|
|
||||||
|
# Set the explicit PDB path in the appropriate configuration of the
|
||||||
|
# original target.
|
||||||
|
config = target_dict['configurations'][config_name]
|
||||||
|
msvs = config.setdefault('msvs_settings', {})
|
||||||
|
linker = msvs.setdefault('VCLinkerTool', {})
|
||||||
|
linker['GenerateDebugInformation'] = 'true'
|
||||||
|
linker['ProgramDatabaseFile'] = pdb_path
|
||||||
|
|
||||||
|
# Add the new targets. They must go to the beginning of the list so that
|
||||||
|
# the dependency generation works as expected in ninja.
|
||||||
|
target_list.insert(0, full_copy_target_name)
|
||||||
|
target_list.insert(0, full_shim_target_name)
|
||||||
|
target_dicts[full_copy_target_name] = copy_dict
|
||||||
|
target_dicts[full_shim_target_name] = shim_dict
|
||||||
|
|
||||||
|
# Update the original target to depend on the shim target.
|
||||||
|
target_dict.setdefault('dependencies', []).append(full_shim_target_name)
|
||||||
|
|
||||||
|
return (target_list, target_dicts)
|
||||||
504
third_party/python/gyp/build/lib/gyp/MSVSVersion.py
vendored
Normal file
504
third_party/python/gyp/build/lib/gyp/MSVSVersion.py
vendored
Normal file
@@ -0,0 +1,504 @@
|
|||||||
|
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""Handle version information related to Visual Stuio."""
|
||||||
|
|
||||||
|
import errno
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import gyp
|
||||||
|
import glob
|
||||||
|
|
||||||
|
|
||||||
|
def JoinPath(*args):
|
||||||
|
return os.path.normpath(os.path.join(*args))
|
||||||
|
|
||||||
|
|
||||||
|
class VisualStudioVersion(object):
|
||||||
|
"""Information regarding a version of Visual Studio."""
|
||||||
|
|
||||||
|
def __init__(self, short_name, description,
|
||||||
|
solution_version, project_version, flat_sln, uses_vcxproj,
|
||||||
|
path, sdk_based, default_toolset=None, compatible_sdks=None):
|
||||||
|
self.short_name = short_name
|
||||||
|
self.description = description
|
||||||
|
self.solution_version = solution_version
|
||||||
|
self.project_version = project_version
|
||||||
|
self.flat_sln = flat_sln
|
||||||
|
self.uses_vcxproj = uses_vcxproj
|
||||||
|
self.path = path
|
||||||
|
self.sdk_based = sdk_based
|
||||||
|
self.default_toolset = default_toolset
|
||||||
|
compatible_sdks = compatible_sdks or []
|
||||||
|
compatible_sdks.sort(key=lambda v: float(v.replace('v', '')), reverse=True)
|
||||||
|
self.compatible_sdks = compatible_sdks
|
||||||
|
|
||||||
|
def ShortName(self):
|
||||||
|
return self.short_name
|
||||||
|
|
||||||
|
def Description(self):
|
||||||
|
"""Get the full description of the version."""
|
||||||
|
return self.description
|
||||||
|
|
||||||
|
def SolutionVersion(self):
|
||||||
|
"""Get the version number of the sln files."""
|
||||||
|
return self.solution_version
|
||||||
|
|
||||||
|
def ProjectVersion(self):
|
||||||
|
"""Get the version number of the vcproj or vcxproj files."""
|
||||||
|
return self.project_version
|
||||||
|
|
||||||
|
def FlatSolution(self):
|
||||||
|
return self.flat_sln
|
||||||
|
|
||||||
|
def UsesVcxproj(self):
|
||||||
|
"""Returns true if this version uses a vcxproj file."""
|
||||||
|
return self.uses_vcxproj
|
||||||
|
|
||||||
|
def ProjectExtension(self):
|
||||||
|
"""Returns the file extension for the project."""
|
||||||
|
return self.uses_vcxproj and '.vcxproj' or '.vcproj'
|
||||||
|
|
||||||
|
def Path(self):
|
||||||
|
"""Returns the path to Visual Studio installation."""
|
||||||
|
return self.path
|
||||||
|
|
||||||
|
def ToolPath(self, tool):
|
||||||
|
"""Returns the path to a given compiler tool. """
|
||||||
|
return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
|
||||||
|
|
||||||
|
def DefaultToolset(self):
|
||||||
|
"""Returns the msbuild toolset version that will be used in the absence
|
||||||
|
of a user override."""
|
||||||
|
return self.default_toolset
|
||||||
|
|
||||||
|
|
||||||
|
def _SetupScriptInternal(self, target_arch):
|
||||||
|
"""Returns a command (with arguments) to be used to set up the
|
||||||
|
environment."""
|
||||||
|
assert target_arch in ('x86', 'x64'), "target_arch not supported"
|
||||||
|
# If WindowsSDKDir is set and SetEnv.Cmd exists then we are using the
|
||||||
|
# depot_tools build tools and should run SetEnv.Cmd to set up the
|
||||||
|
# environment. The check for WindowsSDKDir alone is not sufficient because
|
||||||
|
# this is set by running vcvarsall.bat.
|
||||||
|
sdk_dir = os.environ.get('WindowsSDKDir', '')
|
||||||
|
setup_path = JoinPath(sdk_dir, 'Bin', 'SetEnv.Cmd')
|
||||||
|
if self.sdk_based and sdk_dir and os.path.exists(setup_path):
|
||||||
|
return [setup_path, '/' + target_arch]
|
||||||
|
|
||||||
|
is_host_arch_x64 = (
|
||||||
|
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
|
||||||
|
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'
|
||||||
|
)
|
||||||
|
|
||||||
|
# For VS2017 (and newer) it's fairly easy
|
||||||
|
if self.short_name >= '2017':
|
||||||
|
script_path = JoinPath(self.path,
|
||||||
|
'VC', 'Auxiliary', 'Build', 'vcvarsall.bat')
|
||||||
|
|
||||||
|
# Always use a native executable, cross-compiling if necessary.
|
||||||
|
host_arch = 'amd64' if is_host_arch_x64 else 'x86'
|
||||||
|
msvc_target_arch = 'amd64' if target_arch == 'x64' else 'x86'
|
||||||
|
arg = host_arch
|
||||||
|
if host_arch != msvc_target_arch:
|
||||||
|
arg += '_' + msvc_target_arch
|
||||||
|
|
||||||
|
return [script_path, arg]
|
||||||
|
|
||||||
|
# We try to find the best version of the env setup batch.
|
||||||
|
vcvarsall = JoinPath(self.path, 'VC', 'vcvarsall.bat')
|
||||||
|
if target_arch == 'x86':
|
||||||
|
if self.short_name >= '2013' and self.short_name[-1] != 'e' and \
|
||||||
|
is_host_arch_x64:
|
||||||
|
# VS2013 and later, non-Express have a x64-x86 cross that we want
|
||||||
|
# to prefer.
|
||||||
|
return [vcvarsall, 'amd64_x86']
|
||||||
|
else:
|
||||||
|
# Otherwise, the standard x86 compiler. We don't use VC/vcvarsall.bat
|
||||||
|
# for x86 because vcvarsall calls vcvars32, which it can only find if
|
||||||
|
# VS??COMNTOOLS is set, which isn't guaranteed.
|
||||||
|
return [JoinPath(self.path, 'Common7', 'Tools', 'vsvars32.bat')]
|
||||||
|
elif target_arch == 'x64':
|
||||||
|
arg = 'x86_amd64'
|
||||||
|
# Use the 64-on-64 compiler if we're not using an express edition and
|
||||||
|
# we're running on a 64bit OS.
|
||||||
|
if self.short_name[-1] != 'e' and is_host_arch_x64:
|
||||||
|
arg = 'amd64'
|
||||||
|
return [vcvarsall, arg]
|
||||||
|
|
||||||
|
def SetupScript(self, target_arch):
|
||||||
|
script_data = self._SetupScriptInternal(target_arch)
|
||||||
|
script_path = script_data[0]
|
||||||
|
if not os.path.exists(script_path):
|
||||||
|
raise Exception('%s is missing - make sure VC++ tools are installed.' %
|
||||||
|
script_path)
|
||||||
|
return script_data
|
||||||
|
|
||||||
|
|
||||||
|
def _RegistryQueryBase(sysdir, key, value):
|
||||||
|
"""Use reg.exe to read a particular key.
|
||||||
|
|
||||||
|
While ideally we might use the win32 module, we would like gyp to be
|
||||||
|
python neutral, so for instance cygwin python lacks this module.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
sysdir: The system subdirectory to attempt to launch reg.exe from.
|
||||||
|
key: The registry key to read from.
|
||||||
|
value: The particular value to read.
|
||||||
|
Return:
|
||||||
|
stdout from reg.exe, or None for failure.
|
||||||
|
"""
|
||||||
|
# Skip if not on Windows or Python Win32 setup issue
|
||||||
|
if sys.platform not in ('win32', 'cygwin'):
|
||||||
|
return None
|
||||||
|
# Setup params to pass to and attempt to launch reg.exe
|
||||||
|
cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'),
|
||||||
|
'query', key]
|
||||||
|
if value:
|
||||||
|
cmd.extend(['/v', value])
|
||||||
|
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
# Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
|
||||||
|
# Note that the error text may be in [1] in some cases
|
||||||
|
text = p.communicate()[0]
|
||||||
|
# Check return code from reg.exe; officially 0==success and 1==error
|
||||||
|
if p.returncode:
|
||||||
|
return None
|
||||||
|
return text
|
||||||
|
|
||||||
|
|
||||||
|
def _RegistryQuery(key, value=None):
|
||||||
|
r"""Use reg.exe to read a particular key through _RegistryQueryBase.
|
||||||
|
|
||||||
|
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
|
||||||
|
that fails, it falls back to System32. Sysnative is available on Vista and
|
||||||
|
up and available on Windows Server 2003 and XP through KB patch 942589. Note
|
||||||
|
that Sysnative will always fail if using 64-bit python due to it being a
|
||||||
|
virtual directory and System32 will work correctly in the first place.
|
||||||
|
|
||||||
|
KB 942589 - http://support.microsoft.com/kb/942589/en-us.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
key: The registry key.
|
||||||
|
value: The particular registry value to read (optional).
|
||||||
|
Return:
|
||||||
|
stdout from reg.exe, or None for failure.
|
||||||
|
"""
|
||||||
|
text = None
|
||||||
|
try:
|
||||||
|
text = _RegistryQueryBase('Sysnative', key, value)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno == errno.ENOENT:
|
||||||
|
text = _RegistryQueryBase('System32', key, value)
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
return text
|
||||||
|
|
||||||
|
|
||||||
|
def _RegistryGetValueUsingWinReg(key, value):
|
||||||
|
"""Use the _winreg module to obtain the value of a registry key.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: The registry key.
|
||||||
|
value: The particular registry value to read.
|
||||||
|
Return:
|
||||||
|
contents of the registry key's value, or None on failure. Throws
|
||||||
|
ImportError if _winreg is unavailable.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
import _winreg as winreg
|
||||||
|
except ImportError:
|
||||||
|
import winreg
|
||||||
|
try:
|
||||||
|
root, subkey = key.split('\\', 1)
|
||||||
|
assert root == 'HKLM' # Only need HKLM for now.
|
||||||
|
with winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
|
||||||
|
return winreg.QueryValueEx(hkey, value)[0]
|
||||||
|
except WindowsError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _RegistryGetValue(key, value):
|
||||||
|
"""Use _winreg or reg.exe to obtain the value of a registry key.
|
||||||
|
|
||||||
|
Using _winreg is preferable because it solves an issue on some corporate
|
||||||
|
environments where access to reg.exe is locked down. However, we still need
|
||||||
|
to fallback to reg.exe for the case where the _winreg module is not available
|
||||||
|
(for example in cygwin python).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: The registry key.
|
||||||
|
value: The particular registry value to read.
|
||||||
|
Return:
|
||||||
|
contents of the registry key's value, or None on failure.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return _RegistryGetValueUsingWinReg(key, value)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Fallback to reg.exe if we fail to import _winreg.
|
||||||
|
text = _RegistryQuery(key, value)
|
||||||
|
if not text:
|
||||||
|
return None
|
||||||
|
# Extract value.
|
||||||
|
match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
|
||||||
|
if not match:
|
||||||
|
return None
|
||||||
|
return match.group(1)
|
||||||
|
|
||||||
|
|
||||||
|
def _CreateVersion(name, path, sdk_based=False):
|
||||||
|
"""Sets up MSVS project generation.
|
||||||
|
|
||||||
|
Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
|
||||||
|
autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
|
||||||
|
passed in that doesn't match a value in versions python will throw a error.
|
||||||
|
"""
|
||||||
|
if path:
|
||||||
|
path = os.path.normpath(path)
|
||||||
|
versions = {
|
||||||
|
'2019': VisualStudioVersion('2019',
|
||||||
|
'Visual Studio 2019',
|
||||||
|
solution_version='12.00',
|
||||||
|
project_version='15.0',
|
||||||
|
flat_sln=False,
|
||||||
|
uses_vcxproj=True,
|
||||||
|
path=path,
|
||||||
|
sdk_based=sdk_based,
|
||||||
|
default_toolset='v141',
|
||||||
|
compatible_sdks=['v8.1', 'v10.0']),
|
||||||
|
'2017': VisualStudioVersion('2017',
|
||||||
|
'Visual Studio 2017',
|
||||||
|
solution_version='12.00',
|
||||||
|
project_version='15.0',
|
||||||
|
flat_sln=False,
|
||||||
|
uses_vcxproj=True,
|
||||||
|
path=path,
|
||||||
|
sdk_based=sdk_based,
|
||||||
|
default_toolset='v141',
|
||||||
|
compatible_sdks=['v8.1', 'v10.0']),
|
||||||
|
'2015': VisualStudioVersion('2015',
|
||||||
|
'Visual Studio 2015',
|
||||||
|
solution_version='12.00',
|
||||||
|
project_version='14.0',
|
||||||
|
flat_sln=False,
|
||||||
|
uses_vcxproj=True,
|
||||||
|
path=path,
|
||||||
|
sdk_based=sdk_based,
|
||||||
|
default_toolset='v140'),
|
||||||
|
'2013': VisualStudioVersion('2013',
|
||||||
|
'Visual Studio 2013',
|
||||||
|
solution_version='13.00',
|
||||||
|
project_version='12.0',
|
||||||
|
flat_sln=False,
|
||||||
|
uses_vcxproj=True,
|
||||||
|
path=path,
|
||||||
|
sdk_based=sdk_based,
|
||||||
|
default_toolset='v120'),
|
||||||
|
'2013e': VisualStudioVersion('2013e',
|
||||||
|
'Visual Studio 2013',
|
||||||
|
solution_version='13.00',
|
||||||
|
project_version='12.0',
|
||||||
|
flat_sln=True,
|
||||||
|
uses_vcxproj=True,
|
||||||
|
path=path,
|
||||||
|
sdk_based=sdk_based,
|
||||||
|
default_toolset='v120'),
|
||||||
|
'2012': VisualStudioVersion('2012',
|
||||||
|
'Visual Studio 2012',
|
||||||
|
solution_version='12.00',
|
||||||
|
project_version='4.0',
|
||||||
|
flat_sln=False,
|
||||||
|
uses_vcxproj=True,
|
||||||
|
path=path,
|
||||||
|
sdk_based=sdk_based,
|
||||||
|
default_toolset='v110'),
|
||||||
|
'2012e': VisualStudioVersion('2012e',
|
||||||
|
'Visual Studio 2012',
|
||||||
|
solution_version='12.00',
|
||||||
|
project_version='4.0',
|
||||||
|
flat_sln=True,
|
||||||
|
uses_vcxproj=True,
|
||||||
|
path=path,
|
||||||
|
sdk_based=sdk_based,
|
||||||
|
default_toolset='v110'),
|
||||||
|
'2010': VisualStudioVersion('2010',
|
||||||
|
'Visual Studio 2010',
|
||||||
|
solution_version='11.00',
|
||||||
|
project_version='4.0',
|
||||||
|
flat_sln=False,
|
||||||
|
uses_vcxproj=True,
|
||||||
|
path=path,
|
||||||
|
sdk_based=sdk_based),
|
||||||
|
'2010e': VisualStudioVersion('2010e',
|
||||||
|
'Visual C++ Express 2010',
|
||||||
|
solution_version='11.00',
|
||||||
|
project_version='4.0',
|
||||||
|
flat_sln=True,
|
||||||
|
uses_vcxproj=True,
|
||||||
|
path=path,
|
||||||
|
sdk_based=sdk_based),
|
||||||
|
'2008': VisualStudioVersion('2008',
|
||||||
|
'Visual Studio 2008',
|
||||||
|
solution_version='10.00',
|
||||||
|
project_version='9.00',
|
||||||
|
flat_sln=False,
|
||||||
|
uses_vcxproj=False,
|
||||||
|
path=path,
|
||||||
|
sdk_based=sdk_based),
|
||||||
|
'2008e': VisualStudioVersion('2008e',
|
||||||
|
'Visual Studio 2008',
|
||||||
|
solution_version='10.00',
|
||||||
|
project_version='9.00',
|
||||||
|
flat_sln=True,
|
||||||
|
uses_vcxproj=False,
|
||||||
|
path=path,
|
||||||
|
sdk_based=sdk_based),
|
||||||
|
'2005': VisualStudioVersion('2005',
|
||||||
|
'Visual Studio 2005',
|
||||||
|
solution_version='9.00',
|
||||||
|
project_version='8.00',
|
||||||
|
flat_sln=False,
|
||||||
|
uses_vcxproj=False,
|
||||||
|
path=path,
|
||||||
|
sdk_based=sdk_based),
|
||||||
|
'2005e': VisualStudioVersion('2005e',
|
||||||
|
'Visual Studio 2005',
|
||||||
|
solution_version='9.00',
|
||||||
|
project_version='8.00',
|
||||||
|
flat_sln=True,
|
||||||
|
uses_vcxproj=False,
|
||||||
|
path=path,
|
||||||
|
sdk_based=sdk_based),
|
||||||
|
}
|
||||||
|
return versions[str(name)]
|
||||||
|
|
||||||
|
|
||||||
|
def _ConvertToCygpath(path):
|
||||||
|
"""Convert to cygwin path if we are using cygwin."""
|
||||||
|
if sys.platform == 'cygwin':
|
||||||
|
p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE)
|
||||||
|
path = p.communicate()[0].strip()
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def _DetectVisualStudioVersions(versions_to_check, force_express):
|
||||||
|
"""Collect the list of installed visual studio versions.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A list of visual studio versions installed in descending order of
|
||||||
|
usage preference.
|
||||||
|
Base this on the registry and a quick check if devenv.exe exists.
|
||||||
|
Possibilities are:
|
||||||
|
2005(e) - Visual Studio 2005 (8)
|
||||||
|
2008(e) - Visual Studio 2008 (9)
|
||||||
|
2010(e) - Visual Studio 2010 (10)
|
||||||
|
2012(e) - Visual Studio 2012 (11)
|
||||||
|
2013(e) - Visual Studio 2013 (12)
|
||||||
|
2015 - Visual Studio 2015 (14)
|
||||||
|
2017 - Visual Studio 2017 (15)
|
||||||
|
Where (e) is e for express editions of MSVS and blank otherwise.
|
||||||
|
"""
|
||||||
|
version_to_year = {
|
||||||
|
'8.0': '2005',
|
||||||
|
'9.0': '2008',
|
||||||
|
'10.0': '2010',
|
||||||
|
'11.0': '2012',
|
||||||
|
'12.0': '2013',
|
||||||
|
'14.0': '2015',
|
||||||
|
'15.0': '2017'
|
||||||
|
}
|
||||||
|
versions = []
|
||||||
|
for version in versions_to_check:
|
||||||
|
# Old method of searching for which VS version is installed
|
||||||
|
# We don't use the 2010-encouraged-way because we also want to get the
|
||||||
|
# path to the binaries, which it doesn't offer.
|
||||||
|
keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version,
|
||||||
|
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version,
|
||||||
|
r'HKLM\Software\Microsoft\VCExpress\%s' % version,
|
||||||
|
r'HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s' % version]
|
||||||
|
for index in range(len(keys)):
|
||||||
|
path = _RegistryGetValue(keys[index], 'InstallDir')
|
||||||
|
if not path:
|
||||||
|
continue
|
||||||
|
path = _ConvertToCygpath(path)
|
||||||
|
# Check for full.
|
||||||
|
full_path = os.path.join(path, 'devenv.exe')
|
||||||
|
express_path = os.path.join(path, '*express.exe')
|
||||||
|
if not force_express and os.path.exists(full_path):
|
||||||
|
# Add this one.
|
||||||
|
versions.append(_CreateVersion(version_to_year[version],
|
||||||
|
os.path.join(path, '..', '..')))
|
||||||
|
# Check for express.
|
||||||
|
elif glob.glob(express_path):
|
||||||
|
# Add this one.
|
||||||
|
versions.append(_CreateVersion(version_to_year[version] + 'e',
|
||||||
|
os.path.join(path, '..', '..')))
|
||||||
|
|
||||||
|
# The old method above does not work when only SDK is installed.
|
||||||
|
keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7',
|
||||||
|
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7',
|
||||||
|
r'HKLM\Software\Microsoft\VisualStudio\SxS\VS7',
|
||||||
|
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VS7']
|
||||||
|
for index in range(len(keys)):
|
||||||
|
path = _RegistryGetValue(keys[index], version)
|
||||||
|
if not path:
|
||||||
|
continue
|
||||||
|
path = _ConvertToCygpath(path)
|
||||||
|
if version == '15.0':
|
||||||
|
if os.path.exists(path):
|
||||||
|
versions.append(_CreateVersion('2017', path))
|
||||||
|
elif version != '14.0': # There is no Express edition for 2015.
|
||||||
|
versions.append(_CreateVersion(version_to_year[version] + 'e',
|
||||||
|
os.path.join(path, '..'), sdk_based=True))
|
||||||
|
|
||||||
|
return versions
|
||||||
|
|
||||||
|
|
||||||
|
def SelectVisualStudioVersion(version='auto', allow_fallback=True):
|
||||||
|
"""Select which version of Visual Studio projects to generate.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
version: Hook to allow caller to force a particular version (vs auto).
|
||||||
|
Returns:
|
||||||
|
An object representing a visual studio project format version.
|
||||||
|
"""
|
||||||
|
# In auto mode, check environment variable for override.
|
||||||
|
if version == 'auto':
|
||||||
|
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
|
||||||
|
version_map = {
|
||||||
|
'auto': ('15.0', '14.0', '12.0', '10.0', '9.0', '8.0', '11.0'),
|
||||||
|
'2005': ('8.0',),
|
||||||
|
'2005e': ('8.0',),
|
||||||
|
'2008': ('9.0',),
|
||||||
|
'2008e': ('9.0',),
|
||||||
|
'2010': ('10.0',),
|
||||||
|
'2010e': ('10.0',),
|
||||||
|
'2012': ('11.0',),
|
||||||
|
'2012e': ('11.0',),
|
||||||
|
'2013': ('12.0',),
|
||||||
|
'2013e': ('12.0',),
|
||||||
|
'2015': ('14.0',),
|
||||||
|
'2017': ('15.0',),
|
||||||
|
}
|
||||||
|
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
|
||||||
|
if override_path:
|
||||||
|
msvs_version = os.environ.get('GYP_MSVS_VERSION')
|
||||||
|
if not msvs_version:
|
||||||
|
raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
|
||||||
|
'set to a particular version (e.g. 2010e).')
|
||||||
|
return _CreateVersion(msvs_version, override_path, sdk_based=True)
|
||||||
|
version = str(version)
|
||||||
|
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
|
||||||
|
if not versions:
|
||||||
|
if not allow_fallback:
|
||||||
|
raise ValueError('Could not locate Visual Studio installation.')
|
||||||
|
if version == 'auto':
|
||||||
|
# Default to 2005 if we couldn't find anything
|
||||||
|
return _CreateVersion('2005', None)
|
||||||
|
else:
|
||||||
|
return _CreateVersion(version, None)
|
||||||
|
return versions[0]
|
||||||
555
third_party/python/gyp/build/lib/gyp/__init__.py
vendored
Normal file
555
third_party/python/gyp/build/lib/gyp/__init__.py
vendored
Normal file
@@ -0,0 +1,555 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import copy
|
||||||
|
import gyp.input
|
||||||
|
import optparse
|
||||||
|
import os.path
|
||||||
|
import re
|
||||||
|
import shlex
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
from gyp.common import GypError
|
||||||
|
|
||||||
|
try:
|
||||||
|
# basestring was removed in python3.
|
||||||
|
basestring
|
||||||
|
except NameError:
|
||||||
|
basestring = str
|
||||||
|
|
||||||
|
# Default debug modes for GYP
|
||||||
|
debug = {}
|
||||||
|
|
||||||
|
# List of "official" debug modes, but you can use anything you like.
|
||||||
|
DEBUG_GENERAL = 'general'
|
||||||
|
DEBUG_VARIABLES = 'variables'
|
||||||
|
DEBUG_INCLUDES = 'includes'
|
||||||
|
|
||||||
|
def DebugOutput(mode, message, *args):
|
||||||
|
if 'all' in gyp.debug or mode in gyp.debug:
|
||||||
|
ctx = ('unknown', 0, 'unknown')
|
||||||
|
try:
|
||||||
|
f = traceback.extract_stack(limit=2)
|
||||||
|
if f:
|
||||||
|
ctx = f[0][:3]
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
if args:
|
||||||
|
message %= args
|
||||||
|
print('%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]),
|
||||||
|
ctx[1], ctx[2], message))
|
||||||
|
|
||||||
|
def FindBuildFiles():
|
||||||
|
extension = '.gyp'
|
||||||
|
files = os.listdir(os.getcwd())
|
||||||
|
build_files = []
|
||||||
|
for file in files:
|
||||||
|
if file.endswith(extension):
|
||||||
|
build_files.append(file)
|
||||||
|
return build_files
|
||||||
|
|
||||||
|
|
||||||
|
def Load(build_files, format, default_variables={},
|
||||||
|
includes=[], depth='.', params=None, check=False,
|
||||||
|
circular_check=True, duplicate_basename_check=True):
|
||||||
|
"""
|
||||||
|
Loads one or more specified build files.
|
||||||
|
default_variables and includes will be copied before use.
|
||||||
|
Returns the generator for the specified format and the
|
||||||
|
data returned by loading the specified build files.
|
||||||
|
"""
|
||||||
|
if params is None:
|
||||||
|
params = {}
|
||||||
|
|
||||||
|
if '-' in format:
|
||||||
|
format, params['flavor'] = format.split('-', 1)
|
||||||
|
|
||||||
|
default_variables = copy.copy(default_variables)
|
||||||
|
|
||||||
|
# Default variables provided by this program and its modules should be
|
||||||
|
# named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
|
||||||
|
# avoiding collisions with user and automatic variables.
|
||||||
|
default_variables['GENERATOR'] = format
|
||||||
|
default_variables['GENERATOR_FLAVOR'] = params.get('flavor', '')
|
||||||
|
|
||||||
|
# Format can be a custom python file, or by default the name of a module
|
||||||
|
# within gyp.generator.
|
||||||
|
if format.endswith('.py'):
|
||||||
|
generator_name = os.path.splitext(format)[0]
|
||||||
|
path, generator_name = os.path.split(generator_name)
|
||||||
|
|
||||||
|
# Make sure the path to the custom generator is in sys.path
|
||||||
|
# Don't worry about removing it once we are done. Keeping the path
|
||||||
|
# to each generator that is used in sys.path is likely harmless and
|
||||||
|
# arguably a good idea.
|
||||||
|
path = os.path.abspath(path)
|
||||||
|
if path not in sys.path:
|
||||||
|
sys.path.insert(0, path)
|
||||||
|
else:
|
||||||
|
generator_name = 'gyp.generator.' + format
|
||||||
|
|
||||||
|
# These parameters are passed in order (as opposed to by key)
|
||||||
|
# because ActivePython cannot handle key parameters to __import__.
|
||||||
|
generator = __import__(generator_name, globals(), locals(), generator_name)
|
||||||
|
for (key, val) in generator.generator_default_variables.items():
|
||||||
|
default_variables.setdefault(key, val)
|
||||||
|
|
||||||
|
# Give the generator the opportunity to set additional variables based on
|
||||||
|
# the params it will receive in the output phase.
|
||||||
|
if getattr(generator, 'CalculateVariables', None):
|
||||||
|
generator.CalculateVariables(default_variables, params)
|
||||||
|
|
||||||
|
# Give the generator the opportunity to set generator_input_info based on
|
||||||
|
# the params it will receive in the output phase.
|
||||||
|
if getattr(generator, 'CalculateGeneratorInputInfo', None):
|
||||||
|
generator.CalculateGeneratorInputInfo(params)
|
||||||
|
|
||||||
|
# Fetch the generator specific info that gets fed to input, we use getattr
|
||||||
|
# so we can default things and the generators only have to provide what
|
||||||
|
# they need.
|
||||||
|
generator_input_info = {
|
||||||
|
'non_configuration_keys':
|
||||||
|
getattr(generator, 'generator_additional_non_configuration_keys', []),
|
||||||
|
'path_sections':
|
||||||
|
getattr(generator, 'generator_additional_path_sections', []),
|
||||||
|
'extra_sources_for_rules':
|
||||||
|
getattr(generator, 'generator_extra_sources_for_rules', []),
|
||||||
|
'generator_supports_multiple_toolsets':
|
||||||
|
getattr(generator, 'generator_supports_multiple_toolsets', False),
|
||||||
|
'generator_wants_static_library_dependencies_adjusted':
|
||||||
|
getattr(generator,
|
||||||
|
'generator_wants_static_library_dependencies_adjusted', True),
|
||||||
|
'generator_wants_sorted_dependencies':
|
||||||
|
getattr(generator, 'generator_wants_sorted_dependencies', False),
|
||||||
|
'generator_filelist_paths':
|
||||||
|
getattr(generator, 'generator_filelist_paths', None),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Process the input specific to this generator.
|
||||||
|
result = gyp.input.Load(build_files, default_variables, includes[:],
|
||||||
|
depth, generator_input_info, check, circular_check,
|
||||||
|
duplicate_basename_check,
|
||||||
|
params['parallel'], params['root_targets'])
|
||||||
|
return [generator] + result
|
||||||
|
|
||||||
|
def NameValueListToDict(name_value_list):
|
||||||
|
"""
|
||||||
|
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
|
||||||
|
of the pairs. If a string is simply NAME, then the value in the dictionary
|
||||||
|
is set to True. If VALUE can be converted to an integer, it is.
|
||||||
|
"""
|
||||||
|
result = { }
|
||||||
|
for item in name_value_list:
|
||||||
|
tokens = item.split('=', 1)
|
||||||
|
if len(tokens) == 2:
|
||||||
|
# If we can make it an int, use that, otherwise, use the string.
|
||||||
|
try:
|
||||||
|
token_value = int(tokens[1])
|
||||||
|
except ValueError:
|
||||||
|
token_value = tokens[1]
|
||||||
|
# Set the variable to the supplied value.
|
||||||
|
result[tokens[0]] = token_value
|
||||||
|
else:
|
||||||
|
# No value supplied, treat it as a boolean and set it.
|
||||||
|
result[tokens[0]] = True
|
||||||
|
return result
|
||||||
|
|
||||||
|
def ShlexEnv(env_name):
|
||||||
|
flags = os.environ.get(env_name, [])
|
||||||
|
if flags:
|
||||||
|
flags = shlex.split(flags)
|
||||||
|
return flags
|
||||||
|
|
||||||
|
def FormatOpt(opt, value):
|
||||||
|
if opt.startswith('--'):
|
||||||
|
return '%s=%s' % (opt, value)
|
||||||
|
return opt + value
|
||||||
|
|
||||||
|
def RegenerateAppendFlag(flag, values, predicate, env_name, options):
|
||||||
|
"""Regenerate a list of command line flags, for an option of action='append'.
|
||||||
|
|
||||||
|
The |env_name|, if given, is checked in the environment and used to generate
|
||||||
|
an initial list of options, then the options that were specified on the
|
||||||
|
command line (given in |values|) are appended. This matches the handling of
|
||||||
|
environment variables and command line flags where command line flags override
|
||||||
|
the environment, while not requiring the environment to be set when the flags
|
||||||
|
are used again.
|
||||||
|
"""
|
||||||
|
flags = []
|
||||||
|
if options.use_environment and env_name:
|
||||||
|
for flag_value in ShlexEnv(env_name):
|
||||||
|
value = FormatOpt(flag, predicate(flag_value))
|
||||||
|
if value in flags:
|
||||||
|
flags.remove(value)
|
||||||
|
flags.append(value)
|
||||||
|
if values:
|
||||||
|
for flag_value in values:
|
||||||
|
flags.append(FormatOpt(flag, predicate(flag_value)))
|
||||||
|
return flags
|
||||||
|
|
||||||
|
def RegenerateFlags(options):
|
||||||
|
"""Given a parsed options object, and taking the environment variables into
|
||||||
|
account, returns a list of flags that should regenerate an equivalent options
|
||||||
|
object (even in the absence of the environment variables.)
|
||||||
|
|
||||||
|
Any path options will be normalized relative to depth.
|
||||||
|
|
||||||
|
The format flag is not included, as it is assumed the calling generator will
|
||||||
|
set that as appropriate.
|
||||||
|
"""
|
||||||
|
def FixPath(path):
|
||||||
|
path = gyp.common.FixIfRelativePath(path, options.depth)
|
||||||
|
if not path:
|
||||||
|
return os.path.curdir
|
||||||
|
return path
|
||||||
|
|
||||||
|
def Noop(value):
|
||||||
|
return value
|
||||||
|
|
||||||
|
# We always want to ignore the environment when regenerating, to avoid
|
||||||
|
# duplicate or changed flags in the environment at the time of regeneration.
|
||||||
|
flags = ['--ignore-environment']
|
||||||
|
for name, metadata in options._regeneration_metadata.items():
|
||||||
|
opt = metadata['opt']
|
||||||
|
value = getattr(options, name)
|
||||||
|
value_predicate = metadata['type'] == 'path' and FixPath or Noop
|
||||||
|
action = metadata['action']
|
||||||
|
env_name = metadata['env_name']
|
||||||
|
if action == 'append':
|
||||||
|
flags.extend(RegenerateAppendFlag(opt, value, value_predicate,
|
||||||
|
env_name, options))
|
||||||
|
elif action in ('store', None): # None is a synonym for 'store'.
|
||||||
|
if value:
|
||||||
|
flags.append(FormatOpt(opt, value_predicate(value)))
|
||||||
|
elif options.use_environment and env_name and os.environ.get(env_name):
|
||||||
|
flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
|
||||||
|
elif action in ('store_true', 'store_false'):
|
||||||
|
if ((action == 'store_true' and value) or
|
||||||
|
(action == 'store_false' and not value)):
|
||||||
|
flags.append(opt)
|
||||||
|
elif options.use_environment and env_name:
|
||||||
|
print(('Warning: environment regeneration unimplemented '
|
||||||
|
'for %s flag %r env_name %r' % (action, opt,
|
||||||
|
env_name)),
|
||||||
|
file=sys.stderr)
|
||||||
|
else:
|
||||||
|
print(('Warning: regeneration unimplemented for action %r '
|
||||||
|
'flag %r' % (action, opt)), file=sys.stderr)
|
||||||
|
|
||||||
|
return flags
|
||||||
|
|
||||||
|
class RegeneratableOptionParser(optparse.OptionParser):
|
||||||
|
def __init__(self):
|
||||||
|
self.__regeneratable_options = {}
|
||||||
|
optparse.OptionParser.__init__(self)
|
||||||
|
|
||||||
|
def add_option(self, *args, **kw):
|
||||||
|
"""Add an option to the parser.
|
||||||
|
|
||||||
|
This accepts the same arguments as OptionParser.add_option, plus the
|
||||||
|
following:
|
||||||
|
regenerate: can be set to False to prevent this option from being included
|
||||||
|
in regeneration.
|
||||||
|
env_name: name of environment variable that additional values for this
|
||||||
|
option come from.
|
||||||
|
type: adds type='path', to tell the regenerator that the values of
|
||||||
|
this option need to be made relative to options.depth
|
||||||
|
"""
|
||||||
|
env_name = kw.pop('env_name', None)
|
||||||
|
if 'dest' in kw and kw.pop('regenerate', True):
|
||||||
|
dest = kw['dest']
|
||||||
|
|
||||||
|
# The path type is needed for regenerating, for optparse we can just treat
|
||||||
|
# it as a string.
|
||||||
|
type = kw.get('type')
|
||||||
|
if type == 'path':
|
||||||
|
kw['type'] = 'string'
|
||||||
|
|
||||||
|
self.__regeneratable_options[dest] = {
|
||||||
|
'action': kw.get('action'),
|
||||||
|
'type': type,
|
||||||
|
'env_name': env_name,
|
||||||
|
'opt': args[0],
|
||||||
|
}
|
||||||
|
|
||||||
|
optparse.OptionParser.add_option(self, *args, **kw)
|
||||||
|
|
||||||
|
def parse_args(self, *args):
|
||||||
|
values, args = optparse.OptionParser.parse_args(self, *args)
|
||||||
|
values._regeneration_metadata = self.__regeneratable_options
|
||||||
|
return values, args
|
||||||
|
|
||||||
|
def gyp_main(args):
|
||||||
|
my_name = os.path.basename(sys.argv[0])
|
||||||
|
|
||||||
|
parser = RegeneratableOptionParser()
|
||||||
|
usage = 'usage: %s [options ...] [build_file ...]'
|
||||||
|
parser.set_usage(usage.replace('%s', '%prog'))
|
||||||
|
parser.add_option('--build', dest='configs', action='append',
|
||||||
|
help='configuration for build after project generation')
|
||||||
|
parser.add_option('--check', dest='check', action='store_true',
|
||||||
|
help='check format of gyp files')
|
||||||
|
parser.add_option('--config-dir', dest='config_dir', action='store',
|
||||||
|
env_name='GYP_CONFIG_DIR', default=None,
|
||||||
|
help='The location for configuration files like '
|
||||||
|
'include.gypi.')
|
||||||
|
parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
|
||||||
|
action='append', default=[], help='turn on a debugging '
|
||||||
|
'mode for debugging GYP. Supported modes are "variables", '
|
||||||
|
'"includes" and "general" or "all" for all of them.')
|
||||||
|
parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
|
||||||
|
env_name='GYP_DEFINES',
|
||||||
|
help='sets variable VAR to value VAL')
|
||||||
|
parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
|
||||||
|
help='set DEPTH gyp variable to a relative path to PATH')
|
||||||
|
parser.add_option('-f', '--format', dest='formats', action='append',
|
||||||
|
env_name='GYP_GENERATORS', regenerate=False,
|
||||||
|
help='output formats to generate')
|
||||||
|
parser.add_option('-G', dest='generator_flags', action='append', default=[],
|
||||||
|
metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
|
||||||
|
help='sets generator flag FLAG to VAL')
|
||||||
|
parser.add_option('--generator-output', dest='generator_output',
|
||||||
|
action='store', default=None, metavar='DIR', type='path',
|
||||||
|
env_name='GYP_GENERATOR_OUTPUT',
|
||||||
|
help='puts generated build files under DIR')
|
||||||
|
parser.add_option('--ignore-environment', dest='use_environment',
|
||||||
|
action='store_false', default=True, regenerate=False,
|
||||||
|
help='do not read options from environment variables')
|
||||||
|
parser.add_option('-I', '--include', dest='includes', action='append',
|
||||||
|
metavar='INCLUDE', type='path',
|
||||||
|
help='files to include in all loaded .gyp files')
|
||||||
|
# --no-circular-check disables the check for circular relationships between
|
||||||
|
# .gyp files. These relationships should not exist, but they've only been
|
||||||
|
# observed to be harmful with the Xcode generator. Chromium's .gyp files
|
||||||
|
# currently have some circular relationships on non-Mac platforms, so this
|
||||||
|
# option allows the strict behavior to be used on Macs and the lenient
|
||||||
|
# behavior to be used elsewhere.
|
||||||
|
# TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
|
||||||
|
parser.add_option('--no-circular-check', dest='circular_check',
|
||||||
|
action='store_false', default=True, regenerate=False,
|
||||||
|
help="don't check for circular relationships between files")
|
||||||
|
# --no-duplicate-basename-check disables the check for duplicate basenames
|
||||||
|
# in a static_library/shared_library project. Visual C++ 2008 generator
|
||||||
|
# doesn't support this configuration. Libtool on Mac also generates warnings
|
||||||
|
# when duplicate basenames are passed into Make generator on Mac.
|
||||||
|
# TODO(yukawa): Remove this option when these legacy generators are
|
||||||
|
# deprecated.
|
||||||
|
parser.add_option('--no-duplicate-basename-check',
|
||||||
|
dest='duplicate_basename_check', action='store_false',
|
||||||
|
default=True, regenerate=False,
|
||||||
|
help="don't check for duplicate basenames")
|
||||||
|
parser.add_option('--no-parallel', action='store_true', default=False,
|
||||||
|
help='Disable multiprocessing')
|
||||||
|
parser.add_option('-S', '--suffix', dest='suffix', default='',
|
||||||
|
help='suffix to add to generated files')
|
||||||
|
parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
|
||||||
|
default=None, metavar='DIR', type='path',
|
||||||
|
help='directory to use as the root of the source tree')
|
||||||
|
parser.add_option('-R', '--root-target', dest='root_targets',
|
||||||
|
action='append', metavar='TARGET',
|
||||||
|
help='include only TARGET and its deep dependencies')
|
||||||
|
|
||||||
|
options, build_files_arg = parser.parse_args(args)
|
||||||
|
build_files = build_files_arg
|
||||||
|
|
||||||
|
# Set up the configuration directory (defaults to ~/.gyp)
|
||||||
|
if not options.config_dir:
|
||||||
|
home = None
|
||||||
|
home_dot_gyp = None
|
||||||
|
if options.use_environment:
|
||||||
|
home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None)
|
||||||
|
if home_dot_gyp:
|
||||||
|
home_dot_gyp = os.path.expanduser(home_dot_gyp)
|
||||||
|
|
||||||
|
if not home_dot_gyp:
|
||||||
|
home_vars = ['HOME']
|
||||||
|
if sys.platform in ('cygwin', 'win32'):
|
||||||
|
home_vars.append('USERPROFILE')
|
||||||
|
for home_var in home_vars:
|
||||||
|
home = os.getenv(home_var)
|
||||||
|
if home != None:
|
||||||
|
home_dot_gyp = os.path.join(home, '.gyp')
|
||||||
|
if not os.path.exists(home_dot_gyp):
|
||||||
|
home_dot_gyp = None
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
home_dot_gyp = os.path.expanduser(options.config_dir)
|
||||||
|
|
||||||
|
if home_dot_gyp and not os.path.exists(home_dot_gyp):
|
||||||
|
home_dot_gyp = None
|
||||||
|
|
||||||
|
if not options.formats:
|
||||||
|
# If no format was given on the command line, then check the env variable.
|
||||||
|
generate_formats = []
|
||||||
|
if options.use_environment:
|
||||||
|
generate_formats = os.environ.get('GYP_GENERATORS', [])
|
||||||
|
if generate_formats:
|
||||||
|
generate_formats = re.split(r'[\s,]', generate_formats)
|
||||||
|
if generate_formats:
|
||||||
|
options.formats = generate_formats
|
||||||
|
else:
|
||||||
|
# Nothing in the variable, default based on platform.
|
||||||
|
if sys.platform == 'darwin':
|
||||||
|
options.formats = ['xcode']
|
||||||
|
elif sys.platform in ('win32', 'cygwin'):
|
||||||
|
options.formats = ['msvs']
|
||||||
|
else:
|
||||||
|
options.formats = ['make']
|
||||||
|
|
||||||
|
if not options.generator_output and options.use_environment:
|
||||||
|
g_o = os.environ.get('GYP_GENERATOR_OUTPUT')
|
||||||
|
if g_o:
|
||||||
|
options.generator_output = g_o
|
||||||
|
|
||||||
|
options.parallel = not options.no_parallel
|
||||||
|
|
||||||
|
for mode in options.debug:
|
||||||
|
gyp.debug[mode] = 1
|
||||||
|
|
||||||
|
# Do an extra check to avoid work when we're not debugging.
|
||||||
|
if DEBUG_GENERAL in gyp.debug:
|
||||||
|
DebugOutput(DEBUG_GENERAL, 'running with these options:')
|
||||||
|
for option, value in sorted(options.__dict__.items()):
|
||||||
|
if option[0] == '_':
|
||||||
|
continue
|
||||||
|
if isinstance(value, basestring):
|
||||||
|
DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value)
|
||||||
|
else:
|
||||||
|
DebugOutput(DEBUG_GENERAL, " %s: %s", option, value)
|
||||||
|
|
||||||
|
if not build_files:
|
||||||
|
build_files = FindBuildFiles()
|
||||||
|
if not build_files:
|
||||||
|
raise GypError((usage + '\n\n%s: error: no build_file') %
|
||||||
|
(my_name, my_name))
|
||||||
|
|
||||||
|
# TODO(mark): Chromium-specific hack!
|
||||||
|
# For Chromium, the gyp "depth" variable should always be a relative path
|
||||||
|
# to Chromium's top-level "src" directory. If no depth variable was set
|
||||||
|
# on the command line, try to find a "src" directory by looking at the
|
||||||
|
# absolute path to each build file's directory. The first "src" component
|
||||||
|
# found will be treated as though it were the path used for --depth.
|
||||||
|
if not options.depth:
|
||||||
|
for build_file in build_files:
|
||||||
|
build_file_dir = os.path.abspath(os.path.dirname(build_file))
|
||||||
|
build_file_dir_components = build_file_dir.split(os.path.sep)
|
||||||
|
for component in reversed(build_file_dir_components):
|
||||||
|
if component == 'src':
|
||||||
|
options.depth = os.path.sep.join(build_file_dir_components)
|
||||||
|
break
|
||||||
|
del build_file_dir_components[-1]
|
||||||
|
|
||||||
|
# If the inner loop found something, break without advancing to another
|
||||||
|
# build file.
|
||||||
|
if options.depth:
|
||||||
|
break
|
||||||
|
|
||||||
|
if not options.depth:
|
||||||
|
raise GypError('Could not automatically locate src directory. This is'
|
||||||
|
'a temporary Chromium feature that will be removed. Use'
|
||||||
|
'--depth as a workaround.')
|
||||||
|
|
||||||
|
# If toplevel-dir is not set, we assume that depth is the root of our source
|
||||||
|
# tree.
|
||||||
|
if not options.toplevel_dir:
|
||||||
|
options.toplevel_dir = options.depth
|
||||||
|
|
||||||
|
# -D on the command line sets variable defaults - D isn't just for define,
|
||||||
|
# it's for default. Perhaps there should be a way to force (-F?) a
|
||||||
|
# variable's value so that it can't be overridden by anything else.
|
||||||
|
cmdline_default_variables = {}
|
||||||
|
defines = []
|
||||||
|
if options.use_environment:
|
||||||
|
defines += ShlexEnv('GYP_DEFINES')
|
||||||
|
if options.defines:
|
||||||
|
defines += options.defines
|
||||||
|
cmdline_default_variables = NameValueListToDict(defines)
|
||||||
|
if DEBUG_GENERAL in gyp.debug:
|
||||||
|
DebugOutput(DEBUG_GENERAL,
|
||||||
|
"cmdline_default_variables: %s", cmdline_default_variables)
|
||||||
|
|
||||||
|
# Set up includes.
|
||||||
|
includes = []
|
||||||
|
|
||||||
|
# If ~/.gyp/include.gypi exists, it'll be forcibly included into every
|
||||||
|
# .gyp file that's loaded, before anything else is included.
|
||||||
|
if home_dot_gyp != None:
|
||||||
|
default_include = os.path.join(home_dot_gyp, 'include.gypi')
|
||||||
|
if os.path.exists(default_include):
|
||||||
|
print('Using overrides found in ' + default_include)
|
||||||
|
includes.append(default_include)
|
||||||
|
|
||||||
|
# Command-line --include files come after the default include.
|
||||||
|
if options.includes:
|
||||||
|
includes.extend(options.includes)
|
||||||
|
|
||||||
|
# Generator flags should be prefixed with the target generator since they
|
||||||
|
# are global across all generator runs.
|
||||||
|
gen_flags = []
|
||||||
|
if options.use_environment:
|
||||||
|
gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS')
|
||||||
|
if options.generator_flags:
|
||||||
|
gen_flags += options.generator_flags
|
||||||
|
generator_flags = NameValueListToDict(gen_flags)
|
||||||
|
if DEBUG_GENERAL in gyp.debug:
|
||||||
|
DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags)
|
||||||
|
|
||||||
|
# Generate all requested formats (use a set in case we got one format request
|
||||||
|
# twice)
|
||||||
|
for format in set(options.formats):
|
||||||
|
params = {'options': options,
|
||||||
|
'build_files': build_files,
|
||||||
|
'generator_flags': generator_flags,
|
||||||
|
'cwd': os.getcwd(),
|
||||||
|
'build_files_arg': build_files_arg,
|
||||||
|
'gyp_binary': sys.argv[0],
|
||||||
|
'home_dot_gyp': home_dot_gyp,
|
||||||
|
'parallel': options.parallel,
|
||||||
|
'root_targets': options.root_targets,
|
||||||
|
'target_arch': cmdline_default_variables.get('target_arch', '')}
|
||||||
|
|
||||||
|
# Start with the default variables from the command line.
|
||||||
|
[generator, flat_list, targets, data] = Load(
|
||||||
|
build_files, format, cmdline_default_variables, includes, options.depth,
|
||||||
|
params, options.check, options.circular_check,
|
||||||
|
options.duplicate_basename_check)
|
||||||
|
|
||||||
|
# TODO(mark): Pass |data| for now because the generator needs a list of
|
||||||
|
# build files that came in. In the future, maybe it should just accept
|
||||||
|
# a list, and not the whole data dict.
|
||||||
|
# NOTE: flat_list is the flattened dependency graph specifying the order
|
||||||
|
# that targets may be built. Build systems that operate serially or that
|
||||||
|
# need to have dependencies defined before dependents reference them should
|
||||||
|
# generate targets in the order specified in flat_list.
|
||||||
|
generator.GenerateOutput(flat_list, targets, data, params)
|
||||||
|
|
||||||
|
if options.configs:
|
||||||
|
valid_configs = targets[flat_list[0]]['configurations']
|
||||||
|
for conf in options.configs:
|
||||||
|
if conf not in valid_configs:
|
||||||
|
raise GypError('Invalid config specified via --build: %s' % conf)
|
||||||
|
generator.PerformBuild(data, options.configs, params)
|
||||||
|
|
||||||
|
# Done
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def main(args):
|
||||||
|
try:
|
||||||
|
return gyp_main(args)
|
||||||
|
except GypError as e:
|
||||||
|
sys.stderr.write("gyp: %s\n" % e)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# NOTE: setuptools generated console_scripts calls function with no arguments
|
||||||
|
def script_main():
|
||||||
|
return main(sys.argv[1:])
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(script_main())
|
||||||
608
third_party/python/gyp/build/lib/gyp/common.py
vendored
Normal file
608
third_party/python/gyp/build/lib/gyp/common.py
vendored
Normal file
@@ -0,0 +1,608 @@
|
|||||||
|
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
from __future__ import with_statement
|
||||||
|
|
||||||
|
import collections
|
||||||
|
import collections.abc
|
||||||
|
import errno
|
||||||
|
import filecmp
|
||||||
|
import os.path
|
||||||
|
import re
|
||||||
|
import tempfile
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
# A minimal memoizing decorator. It'll blow up if the args aren't immutable,
|
||||||
|
# among other "problems".
|
||||||
|
class memoize(object):
|
||||||
|
def __init__(self, func):
|
||||||
|
self.func = func
|
||||||
|
self.cache = {}
|
||||||
|
def __call__(self, *args):
|
||||||
|
try:
|
||||||
|
return self.cache[args]
|
||||||
|
except KeyError:
|
||||||
|
result = self.func(*args)
|
||||||
|
self.cache[args] = result
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class GypError(Exception):
|
||||||
|
"""Error class representing an error, which is to be presented
|
||||||
|
to the user. The main entry point will catch and display this.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def ExceptionAppend(e, msg):
|
||||||
|
"""Append a message to the given exception's message."""
|
||||||
|
if not e.args:
|
||||||
|
e.args = (msg,)
|
||||||
|
elif len(e.args) == 1:
|
||||||
|
e.args = (str(e.args[0]) + ' ' + msg,)
|
||||||
|
else:
|
||||||
|
e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
|
||||||
|
|
||||||
|
|
||||||
|
def FindQualifiedTargets(target, qualified_list):
|
||||||
|
"""
|
||||||
|
Given a list of qualified targets, return the qualified targets for the
|
||||||
|
specified |target|.
|
||||||
|
"""
|
||||||
|
return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target]
|
||||||
|
|
||||||
|
|
||||||
|
def ParseQualifiedTarget(target):
|
||||||
|
# Splits a qualified target into a build file, target name and toolset.
|
||||||
|
|
||||||
|
# NOTE: rsplit is used to disambiguate the Windows drive letter separator.
|
||||||
|
target_split = target.rsplit(':', 1)
|
||||||
|
if len(target_split) == 2:
|
||||||
|
[build_file, target] = target_split
|
||||||
|
else:
|
||||||
|
build_file = None
|
||||||
|
|
||||||
|
target_split = target.rsplit('#', 1)
|
||||||
|
if len(target_split) == 2:
|
||||||
|
[target, toolset] = target_split
|
||||||
|
else:
|
||||||
|
toolset = None
|
||||||
|
|
||||||
|
return [build_file, target, toolset]
|
||||||
|
|
||||||
|
|
||||||
|
def ResolveTarget(build_file, target, toolset):
|
||||||
|
# This function resolves a target into a canonical form:
|
||||||
|
# - a fully defined build file, either absolute or relative to the current
|
||||||
|
# directory
|
||||||
|
# - a target name
|
||||||
|
# - a toolset
|
||||||
|
#
|
||||||
|
# build_file is the file relative to which 'target' is defined.
|
||||||
|
# target is the qualified target.
|
||||||
|
# toolset is the default toolset for that target.
|
||||||
|
[parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
|
||||||
|
|
||||||
|
if parsed_build_file:
|
||||||
|
if build_file:
|
||||||
|
# If a relative path, parsed_build_file is relative to the directory
|
||||||
|
# containing build_file. If build_file is not in the current directory,
|
||||||
|
# parsed_build_file is not a usable path as-is. Resolve it by
|
||||||
|
# interpreting it as relative to build_file. If parsed_build_file is
|
||||||
|
# absolute, it is usable as a path regardless of the current directory,
|
||||||
|
# and os.path.join will return it as-is.
|
||||||
|
build_file = os.path.normpath(os.path.join(os.path.dirname(build_file),
|
||||||
|
parsed_build_file))
|
||||||
|
# Further (to handle cases like ../cwd), make it relative to cwd)
|
||||||
|
if not os.path.isabs(build_file):
|
||||||
|
build_file = RelativePath(build_file, '.')
|
||||||
|
else:
|
||||||
|
build_file = parsed_build_file
|
||||||
|
|
||||||
|
if parsed_toolset:
|
||||||
|
toolset = parsed_toolset
|
||||||
|
|
||||||
|
return [build_file, target, toolset]
|
||||||
|
|
||||||
|
|
||||||
|
def BuildFile(fully_qualified_target):
|
||||||
|
# Extracts the build file from the fully qualified target.
|
||||||
|
return ParseQualifiedTarget(fully_qualified_target)[0]
|
||||||
|
|
||||||
|
|
||||||
|
def GetEnvironFallback(var_list, default):
|
||||||
|
"""Look up a key in the environment, with fallback to secondary keys
|
||||||
|
and finally falling back to a default value."""
|
||||||
|
for var in var_list:
|
||||||
|
if var in os.environ:
|
||||||
|
return os.environ[var]
|
||||||
|
return default
|
||||||
|
|
||||||
|
|
||||||
|
def QualifiedTarget(build_file, target, toolset):
|
||||||
|
# "Qualified" means the file that a target was defined in and the target
|
||||||
|
# name, separated by a colon, suffixed by a # and the toolset name:
|
||||||
|
# /path/to/file.gyp:target_name#toolset
|
||||||
|
fully_qualified = build_file + ':' + target
|
||||||
|
if toolset:
|
||||||
|
fully_qualified = fully_qualified + '#' + toolset
|
||||||
|
return fully_qualified
|
||||||
|
|
||||||
|
|
||||||
|
@memoize
|
||||||
|
def RelativePath(path, relative_to, follow_path_symlink=True):
|
||||||
|
# Assuming both |path| and |relative_to| are relative to the current
|
||||||
|
# directory, returns a relative path that identifies path relative to
|
||||||
|
# relative_to.
|
||||||
|
# If |follow_symlink_path| is true (default) and |path| is a symlink, then
|
||||||
|
# this method returns a path to the real file represented by |path|. If it is
|
||||||
|
# false, this method returns a path to the symlink. If |path| is not a
|
||||||
|
# symlink, this option has no effect.
|
||||||
|
|
||||||
|
# Convert to normalized (and therefore absolute paths).
|
||||||
|
if follow_path_symlink:
|
||||||
|
path = os.path.realpath(path)
|
||||||
|
else:
|
||||||
|
path = os.path.abspath(path)
|
||||||
|
relative_to = os.path.realpath(relative_to)
|
||||||
|
|
||||||
|
# On Windows, we can't create a relative path to a different drive, so just
|
||||||
|
# use the absolute path.
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
if (os.path.splitdrive(path)[0].lower() !=
|
||||||
|
os.path.splitdrive(relative_to)[0].lower()):
|
||||||
|
return path
|
||||||
|
|
||||||
|
relative = os.path.relpath(path, relative_to)
|
||||||
|
if relative == os.path.curdir:
|
||||||
|
# The paths were the same.
|
||||||
|
return ''
|
||||||
|
|
||||||
|
return relative
|
||||||
|
|
||||||
|
|
||||||
|
@memoize
|
||||||
|
def InvertRelativePath(path, toplevel_dir=None):
|
||||||
|
"""Given a path like foo/bar that is relative to toplevel_dir, return
|
||||||
|
the inverse relative path back to the toplevel_dir.
|
||||||
|
|
||||||
|
E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
|
||||||
|
should always produce the empty string, unless the path contains symlinks.
|
||||||
|
"""
|
||||||
|
if not path:
|
||||||
|
return path
|
||||||
|
toplevel_dir = '.' if toplevel_dir is None else toplevel_dir
|
||||||
|
return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path))
|
||||||
|
|
||||||
|
|
||||||
|
def FixIfRelativePath(path, relative_to):
|
||||||
|
# Like RelativePath but returns |path| unchanged if it is absolute.
|
||||||
|
if os.path.isabs(path):
|
||||||
|
return path
|
||||||
|
return RelativePath(path, relative_to)
|
||||||
|
|
||||||
|
|
||||||
|
def UnrelativePath(path, relative_to):
|
||||||
|
# Assuming that |relative_to| is relative to the current directory, and |path|
|
||||||
|
# is a path relative to the dirname of |relative_to|, returns a path that
|
||||||
|
# identifies |path| relative to the current directory.
|
||||||
|
rel_dir = os.path.dirname(relative_to)
|
||||||
|
return os.path.normpath(os.path.join(rel_dir, path))
|
||||||
|
|
||||||
|
|
||||||
|
# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at
|
||||||
|
# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
|
||||||
|
# and the documentation for various shells.
|
||||||
|
|
||||||
|
# _quote is a pattern that should match any argument that needs to be quoted
|
||||||
|
# with double-quotes by EncodePOSIXShellArgument. It matches the following
|
||||||
|
# characters appearing anywhere in an argument:
|
||||||
|
# \t, \n, space parameter separators
|
||||||
|
# # comments
|
||||||
|
# $ expansions (quoted to always expand within one argument)
|
||||||
|
# % called out by IEEE 1003.1 XCU.2.2
|
||||||
|
# & job control
|
||||||
|
# ' quoting
|
||||||
|
# (, ) subshell execution
|
||||||
|
# *, ?, [ pathname expansion
|
||||||
|
# ; command delimiter
|
||||||
|
# <, >, | redirection
|
||||||
|
# = assignment
|
||||||
|
# {, } brace expansion (bash)
|
||||||
|
# ~ tilde expansion
|
||||||
|
# It also matches the empty string, because "" (or '') is the only way to
|
||||||
|
# represent an empty string literal argument to a POSIX shell.
|
||||||
|
#
|
||||||
|
# This does not match the characters in _escape, because those need to be
|
||||||
|
# backslash-escaped regardless of whether they appear in a double-quoted
|
||||||
|
# string.
|
||||||
|
_quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$')
|
||||||
|
|
||||||
|
# _escape is a pattern that should match any character that needs to be
|
||||||
|
# escaped with a backslash, whether or not the argument matched the _quote
|
||||||
|
# pattern. _escape is used with re.sub to backslash anything in _escape's
|
||||||
|
# first match group, hence the (parentheses) in the regular expression.
|
||||||
|
#
|
||||||
|
# _escape matches the following characters appearing anywhere in an argument:
|
||||||
|
# " to prevent POSIX shells from interpreting this character for quoting
|
||||||
|
# \ to prevent POSIX shells from interpreting this character for escaping
|
||||||
|
# ` to prevent POSIX shells from interpreting this character for command
|
||||||
|
# substitution
|
||||||
|
# Missing from this list is $, because the desired behavior of
|
||||||
|
# EncodePOSIXShellArgument is to permit parameter (variable) expansion.
|
||||||
|
#
|
||||||
|
# Also missing from this list is !, which bash will interpret as the history
|
||||||
|
# expansion character when history is enabled. bash does not enable history
|
||||||
|
# by default in non-interactive shells, so this is not thought to be a problem.
|
||||||
|
# ! was omitted from this list because bash interprets "\!" as a literal string
|
||||||
|
# including the backslash character (avoiding history expansion but retaining
|
||||||
|
# the backslash), which would not be correct for argument encoding. Handling
|
||||||
|
# this case properly would also be problematic because bash allows the history
|
||||||
|
# character to be changed with the histchars shell variable. Fortunately,
|
||||||
|
# as history is not enabled in non-interactive shells and
|
||||||
|
# EncodePOSIXShellArgument is only expected to encode for non-interactive
|
||||||
|
# shells, there is no room for error here by ignoring !.
|
||||||
|
_escape = re.compile(r'(["\\`])')
|
||||||
|
|
||||||
|
def EncodePOSIXShellArgument(argument):
|
||||||
|
"""Encodes |argument| suitably for consumption by POSIX shells.
|
||||||
|
|
||||||
|
argument may be quoted and escaped as necessary to ensure that POSIX shells
|
||||||
|
treat the returned value as a literal representing the argument passed to
|
||||||
|
this function. Parameter (variable) expansions beginning with $ are allowed
|
||||||
|
to remain intact without escaping the $, to allow the argument to contain
|
||||||
|
references to variables to be expanded by the shell.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not isinstance(argument, str):
|
||||||
|
argument = str(argument)
|
||||||
|
|
||||||
|
if _quote.search(argument):
|
||||||
|
quote = '"'
|
||||||
|
else:
|
||||||
|
quote = ''
|
||||||
|
|
||||||
|
encoded = quote + re.sub(_escape, r'\\\1', argument) + quote
|
||||||
|
|
||||||
|
return encoded
|
||||||
|
|
||||||
|
|
||||||
|
def EncodePOSIXShellList(list):
|
||||||
|
"""Encodes |list| suitably for consumption by POSIX shells.
|
||||||
|
|
||||||
|
Returns EncodePOSIXShellArgument for each item in list, and joins them
|
||||||
|
together using the space character as an argument separator.
|
||||||
|
"""
|
||||||
|
|
||||||
|
encoded_arguments = []
|
||||||
|
for argument in list:
|
||||||
|
encoded_arguments.append(EncodePOSIXShellArgument(argument))
|
||||||
|
return ' '.join(encoded_arguments)
|
||||||
|
|
||||||
|
|
||||||
|
def DeepDependencyTargets(target_dicts, roots):
|
||||||
|
"""Returns the recursive list of target dependencies."""
|
||||||
|
dependencies = set()
|
||||||
|
pending = set(roots)
|
||||||
|
while pending:
|
||||||
|
# Pluck out one.
|
||||||
|
r = pending.pop()
|
||||||
|
# Skip if visited already.
|
||||||
|
if r in dependencies:
|
||||||
|
continue
|
||||||
|
# Add it.
|
||||||
|
dependencies.add(r)
|
||||||
|
# Add its children.
|
||||||
|
spec = target_dicts[r]
|
||||||
|
pending.update(set(spec.get('dependencies', [])))
|
||||||
|
pending.update(set(spec.get('dependencies_original', [])))
|
||||||
|
return list(dependencies - set(roots))
|
||||||
|
|
||||||
|
|
||||||
|
def BuildFileTargets(target_list, build_file):
|
||||||
|
"""From a target_list, returns the subset from the specified build_file.
|
||||||
|
"""
|
||||||
|
return [p for p in target_list if BuildFile(p) == build_file]
|
||||||
|
|
||||||
|
|
||||||
|
def AllTargets(target_list, target_dicts, build_file):
|
||||||
|
"""Returns all targets (direct and dependencies) for the specified build_file.
|
||||||
|
"""
|
||||||
|
bftargets = BuildFileTargets(target_list, build_file)
|
||||||
|
deptargets = DeepDependencyTargets(target_dicts, bftargets)
|
||||||
|
return bftargets + deptargets
|
||||||
|
|
||||||
|
|
||||||
|
def WriteOnDiff(filename):
|
||||||
|
"""Write to a file only if the new contents differ.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
filename: name of the file to potentially write to.
|
||||||
|
Returns:
|
||||||
|
A file like object which will write to temporary file and only overwrite
|
||||||
|
the target if it differs (on close).
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Writer(object):
|
||||||
|
"""Wrapper around file which only covers the target if it differs."""
|
||||||
|
def __init__(self):
|
||||||
|
# Pick temporary file.
|
||||||
|
tmp_fd, self.tmp_path = tempfile.mkstemp(
|
||||||
|
suffix='.tmp',
|
||||||
|
prefix=os.path.split(filename)[1] + '.gyp.',
|
||||||
|
dir=os.path.split(filename)[0])
|
||||||
|
try:
|
||||||
|
self.tmp_file = os.fdopen(tmp_fd, 'w')
|
||||||
|
except Exception:
|
||||||
|
# Don't leave turds behind.
|
||||||
|
os.unlink(self.tmp_path)
|
||||||
|
raise
|
||||||
|
|
||||||
|
def __getattr__(self, attrname):
|
||||||
|
# Delegate everything else to self.tmp_file
|
||||||
|
return getattr(self.tmp_file, attrname)
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
try:
|
||||||
|
# Close tmp file.
|
||||||
|
self.tmp_file.close()
|
||||||
|
# Determine if different.
|
||||||
|
same = False
|
||||||
|
try:
|
||||||
|
same = filecmp.cmp(self.tmp_path, filename, False)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno != errno.ENOENT:
|
||||||
|
raise
|
||||||
|
|
||||||
|
if same:
|
||||||
|
# The new file is identical to the old one, just get rid of the new
|
||||||
|
# one.
|
||||||
|
os.unlink(self.tmp_path)
|
||||||
|
else:
|
||||||
|
# The new file is different from the old one, or there is no old one.
|
||||||
|
# Rename the new file to the permanent name.
|
||||||
|
#
|
||||||
|
# tempfile.mkstemp uses an overly restrictive mode, resulting in a
|
||||||
|
# file that can only be read by the owner, regardless of the umask.
|
||||||
|
# There's no reason to not respect the umask here, which means that
|
||||||
|
# an extra hoop is required to fetch it and reset the new file's mode.
|
||||||
|
#
|
||||||
|
# No way to get the umask without setting a new one? Set a safe one
|
||||||
|
# and then set it back to the old value.
|
||||||
|
umask = os.umask(0o77)
|
||||||
|
os.umask(umask)
|
||||||
|
os.chmod(self.tmp_path, 0o666 & ~umask)
|
||||||
|
if sys.platform == 'win32' and os.path.exists(filename):
|
||||||
|
# NOTE: on windows (but not cygwin) rename will not replace an
|
||||||
|
# existing file, so it must be preceded with a remove. Sadly there
|
||||||
|
# is no way to make the switch atomic.
|
||||||
|
os.remove(filename)
|
||||||
|
os.rename(self.tmp_path, filename)
|
||||||
|
except Exception:
|
||||||
|
# Don't leave turds behind.
|
||||||
|
os.unlink(self.tmp_path)
|
||||||
|
raise
|
||||||
|
|
||||||
|
return Writer()
|
||||||
|
|
||||||
|
|
||||||
|
def EnsureDirExists(path):
|
||||||
|
"""Make sure the directory for |path| exists."""
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(path))
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def GetFlavor(params):
|
||||||
|
"""Returns |params.flavor| if it's set, the system's default flavor else."""
|
||||||
|
flavors = {
|
||||||
|
'cygwin': 'win',
|
||||||
|
'win32': 'win',
|
||||||
|
'darwin': 'mac',
|
||||||
|
}
|
||||||
|
|
||||||
|
if 'flavor' in params:
|
||||||
|
return params['flavor']
|
||||||
|
if sys.platform in flavors:
|
||||||
|
return flavors[sys.platform]
|
||||||
|
if sys.platform.startswith('sunos'):
|
||||||
|
return 'solaris'
|
||||||
|
if sys.platform.startswith('freebsd'):
|
||||||
|
return 'freebsd'
|
||||||
|
if sys.platform.startswith('openbsd'):
|
||||||
|
return 'openbsd'
|
||||||
|
if sys.platform.startswith('netbsd'):
|
||||||
|
return 'netbsd'
|
||||||
|
if sys.platform.startswith('aix'):
|
||||||
|
return 'aix'
|
||||||
|
if sys.platform.startswith('zos'):
|
||||||
|
return 'zos'
|
||||||
|
if sys.platform.startswith('os390'):
|
||||||
|
return 'zos'
|
||||||
|
|
||||||
|
return 'linux'
|
||||||
|
|
||||||
|
|
||||||
|
def CopyTool(flavor, out_path, generator_flags={}):
|
||||||
|
"""Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
|
||||||
|
to |out_path|."""
|
||||||
|
# aix and solaris just need flock emulation. mac and win use more complicated
|
||||||
|
# support scripts.
|
||||||
|
prefix = {
|
||||||
|
'aix': 'flock',
|
||||||
|
'solaris': 'flock',
|
||||||
|
'mac': 'mac',
|
||||||
|
'win': 'win'
|
||||||
|
}.get(flavor, None)
|
||||||
|
if not prefix:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Slurp input file.
|
||||||
|
source_path = os.path.join(
|
||||||
|
os.path.dirname(os.path.abspath(__file__)), '%s_tool.py' % prefix)
|
||||||
|
with open(source_path) as source_file:
|
||||||
|
source = source_file.readlines()
|
||||||
|
|
||||||
|
# Set custom header flags.
|
||||||
|
header = '# Generated by gyp. Do not edit.\n'
|
||||||
|
mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None)
|
||||||
|
if flavor == 'mac' and mac_toolchain_dir:
|
||||||
|
header += "import os;\nos.environ['DEVELOPER_DIR']='%s'\n" \
|
||||||
|
% mac_toolchain_dir
|
||||||
|
|
||||||
|
# Add header and write it out.
|
||||||
|
tool_path = os.path.join(out_path, 'gyp-%s-tool' % prefix)
|
||||||
|
with open(tool_path, 'w') as tool_file:
|
||||||
|
tool_file.write(
|
||||||
|
''.join([source[0], header] + source[1:]))
|
||||||
|
|
||||||
|
# Make file executable.
|
||||||
|
os.chmod(tool_path, 0o755)
|
||||||
|
|
||||||
|
|
||||||
|
# From Alex Martelli,
|
||||||
|
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
|
||||||
|
# ASPN: Python Cookbook: Remove duplicates from a sequence
|
||||||
|
# First comment, dated 2001/10/13.
|
||||||
|
# (Also in the printed Python Cookbook.)
|
||||||
|
|
||||||
|
def uniquer(seq, idfun=None):
|
||||||
|
if idfun is None:
|
||||||
|
idfun = lambda x: x
|
||||||
|
seen = {}
|
||||||
|
result = []
|
||||||
|
for item in seq:
|
||||||
|
marker = idfun(item)
|
||||||
|
if marker in seen: continue
|
||||||
|
seen[marker] = 1
|
||||||
|
result.append(item)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
# Based on http://code.activestate.com/recipes/576694/.
|
||||||
|
class OrderedSet(collections.abc.MutableSet):
|
||||||
|
def __init__(self, iterable=None):
|
||||||
|
self.end = end = []
|
||||||
|
end += [None, end, end] # sentinel node for doubly linked list
|
||||||
|
self.map = {} # key --> [key, prev, next]
|
||||||
|
if iterable is not None:
|
||||||
|
self |= iterable
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self.map)
|
||||||
|
|
||||||
|
def __contains__(self, key):
|
||||||
|
return key in self.map
|
||||||
|
|
||||||
|
def add(self, key):
|
||||||
|
if key not in self.map:
|
||||||
|
end = self.end
|
||||||
|
curr = end[1]
|
||||||
|
curr[2] = end[1] = self.map[key] = [key, curr, end]
|
||||||
|
|
||||||
|
def discard(self, key):
|
||||||
|
if key in self.map:
|
||||||
|
key, prev_item, next_item = self.map.pop(key)
|
||||||
|
prev_item[2] = next_item
|
||||||
|
next_item[1] = prev_item
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
end = self.end
|
||||||
|
curr = end[2]
|
||||||
|
while curr is not end:
|
||||||
|
yield curr[0]
|
||||||
|
curr = curr[2]
|
||||||
|
|
||||||
|
def __reversed__(self):
|
||||||
|
end = self.end
|
||||||
|
curr = end[1]
|
||||||
|
while curr is not end:
|
||||||
|
yield curr[0]
|
||||||
|
curr = curr[1]
|
||||||
|
|
||||||
|
# The second argument is an addition that causes a pylint warning.
|
||||||
|
def pop(self, last=True): # pylint: disable=W0221
|
||||||
|
if not self:
|
||||||
|
raise KeyError('set is empty')
|
||||||
|
key = self.end[1][0] if last else self.end[2][0]
|
||||||
|
self.discard(key)
|
||||||
|
return key
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
if not self:
|
||||||
|
return '%s()' % (self.__class__.__name__,)
|
||||||
|
return '%s(%r)' % (self.__class__.__name__, list(self))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, OrderedSet):
|
||||||
|
return len(self) == len(other) and list(self) == list(other)
|
||||||
|
return set(self) == set(other)
|
||||||
|
|
||||||
|
# Extensions to the recipe.
|
||||||
|
def update(self, iterable):
|
||||||
|
for i in iterable:
|
||||||
|
if i not in self:
|
||||||
|
self.add(i)
|
||||||
|
|
||||||
|
|
||||||
|
class CycleError(Exception):
|
||||||
|
"""An exception raised when an unexpected cycle is detected."""
|
||||||
|
def __init__(self, nodes):
|
||||||
|
self.nodes = nodes
|
||||||
|
def __str__(self):
|
||||||
|
return 'CycleError: cycle involving: ' + str(self.nodes)
|
||||||
|
|
||||||
|
|
||||||
|
def TopologicallySorted(graph, get_edges):
|
||||||
|
r"""Topologically sort based on a user provided edge definition.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
graph: A list of node names.
|
||||||
|
get_edges: A function mapping from node name to a hashable collection
|
||||||
|
of node names which this node has outgoing edges to.
|
||||||
|
Returns:
|
||||||
|
A list containing all of the node in graph in topological order.
|
||||||
|
It is assumed that calling get_edges once for each node and caching is
|
||||||
|
cheaper than repeatedly calling get_edges.
|
||||||
|
Raises:
|
||||||
|
CycleError in the event of a cycle.
|
||||||
|
Example:
|
||||||
|
graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
|
||||||
|
def GetEdges(node):
|
||||||
|
return re.findall(r'\$\(([^))]\)', graph[node])
|
||||||
|
print(TopologicallySorted(graph.keys(), GetEdges))
|
||||||
|
==>
|
||||||
|
['a', 'c', b']
|
||||||
|
"""
|
||||||
|
get_edges = memoize(get_edges)
|
||||||
|
visited = set()
|
||||||
|
visiting = set()
|
||||||
|
ordered_nodes = []
|
||||||
|
def Visit(node):
|
||||||
|
if node in visiting:
|
||||||
|
raise CycleError(visiting)
|
||||||
|
if node in visited:
|
||||||
|
return
|
||||||
|
visited.add(node)
|
||||||
|
visiting.add(node)
|
||||||
|
for neighbor in get_edges(node):
|
||||||
|
Visit(neighbor)
|
||||||
|
visiting.remove(node)
|
||||||
|
ordered_nodes.insert(0, node)
|
||||||
|
for node in sorted(graph):
|
||||||
|
Visit(node)
|
||||||
|
return ordered_nodes
|
||||||
|
|
||||||
|
def CrossCompileRequested():
|
||||||
|
# TODO: figure out how to not build extra host objects in the
|
||||||
|
# non-cross-compile case when this is enabled, and enable unconditionally.
|
||||||
|
return (os.environ.get('GYP_CROSSCOMPILE') or
|
||||||
|
os.environ.get('AR_host') or
|
||||||
|
os.environ.get('CC_host') or
|
||||||
|
os.environ.get('CXX_host') or
|
||||||
|
os.environ.get('AR_target') or
|
||||||
|
os.environ.get('CC_target') or
|
||||||
|
os.environ.get('CXX_target'))
|
||||||
73
third_party/python/gyp/build/lib/gyp/common_test.py
vendored
Normal file
73
third_party/python/gyp/build/lib/gyp/common_test.py
vendored
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""Unit tests for the common.py file."""
|
||||||
|
|
||||||
|
import gyp.common
|
||||||
|
import unittest
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
class TestTopologicallySorted(unittest.TestCase):
|
||||||
|
def test_Valid(self):
|
||||||
|
"""Test that sorting works on a valid graph with one possible order."""
|
||||||
|
graph = {
|
||||||
|
'a': ['b', 'c'],
|
||||||
|
'b': [],
|
||||||
|
'c': ['d'],
|
||||||
|
'd': ['b'],
|
||||||
|
}
|
||||||
|
def GetEdge(node):
|
||||||
|
return tuple(graph[node])
|
||||||
|
self.assertEqual(
|
||||||
|
gyp.common.TopologicallySorted(graph.keys(), GetEdge),
|
||||||
|
['a', 'c', 'd', 'b'])
|
||||||
|
|
||||||
|
def test_Cycle(self):
|
||||||
|
"""Test that an exception is thrown on a cyclic graph."""
|
||||||
|
graph = {
|
||||||
|
'a': ['b'],
|
||||||
|
'b': ['c'],
|
||||||
|
'c': ['d'],
|
||||||
|
'd': ['a'],
|
||||||
|
}
|
||||||
|
def GetEdge(node):
|
||||||
|
return tuple(graph[node])
|
||||||
|
self.assertRaises(
|
||||||
|
gyp.common.CycleError, gyp.common.TopologicallySorted,
|
||||||
|
graph.keys(), GetEdge)
|
||||||
|
|
||||||
|
|
||||||
|
class TestGetFlavor(unittest.TestCase):
|
||||||
|
"""Test that gyp.common.GetFlavor works as intended"""
|
||||||
|
original_platform = ''
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.original_platform = sys.platform
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
sys.platform = self.original_platform
|
||||||
|
|
||||||
|
def assertFlavor(self, expected, argument, param):
|
||||||
|
sys.platform = argument
|
||||||
|
self.assertEqual(expected, gyp.common.GetFlavor(param))
|
||||||
|
|
||||||
|
def test_platform_default(self):
|
||||||
|
self.assertFlavor('freebsd', 'freebsd9' , {})
|
||||||
|
self.assertFlavor('freebsd', 'freebsd10', {})
|
||||||
|
self.assertFlavor('openbsd', 'openbsd5' , {})
|
||||||
|
self.assertFlavor('solaris', 'sunos5' , {});
|
||||||
|
self.assertFlavor('solaris', 'sunos' , {});
|
||||||
|
self.assertFlavor('linux' , 'linux2' , {});
|
||||||
|
self.assertFlavor('linux' , 'linux3' , {});
|
||||||
|
self.assertFlavor('linux' , 'linux' , {});
|
||||||
|
|
||||||
|
def test_param(self):
|
||||||
|
self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'})
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
170
third_party/python/gyp/build/lib/gyp/easy_xml.py
vendored
Normal file
170
third_party/python/gyp/build/lib/gyp/easy_xml.py
vendored
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
import locale
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
# reduce moved to functools in python3.
|
||||||
|
reduce
|
||||||
|
except NameError:
|
||||||
|
from functools import reduce
|
||||||
|
|
||||||
|
def XmlToString(content, encoding='utf-8', pretty=False):
|
||||||
|
""" Writes the XML content to disk, touching the file only if it has changed.
|
||||||
|
|
||||||
|
Visual Studio files have a lot of pre-defined structures. This function makes
|
||||||
|
it easy to represent these structures as Python data structures, instead of
|
||||||
|
having to create a lot of function calls.
|
||||||
|
|
||||||
|
Each XML element of the content is represented as a list composed of:
|
||||||
|
1. The name of the element, a string,
|
||||||
|
2. The attributes of the element, a dictionary (optional), and
|
||||||
|
3+. The content of the element, if any. Strings are simple text nodes and
|
||||||
|
lists are child elements.
|
||||||
|
|
||||||
|
Example 1:
|
||||||
|
<test/>
|
||||||
|
becomes
|
||||||
|
['test']
|
||||||
|
|
||||||
|
Example 2:
|
||||||
|
<myelement a='value1' b='value2'>
|
||||||
|
<childtype>This is</childtype>
|
||||||
|
<childtype>it!</childtype>
|
||||||
|
</myelement>
|
||||||
|
|
||||||
|
becomes
|
||||||
|
['myelement', {'a':'value1', 'b':'value2'},
|
||||||
|
['childtype', 'This is'],
|
||||||
|
['childtype', 'it!'],
|
||||||
|
]
|
||||||
|
|
||||||
|
Args:
|
||||||
|
content: The structured content to be converted.
|
||||||
|
encoding: The encoding to report on the first XML line.
|
||||||
|
pretty: True if we want pretty printing with indents and new lines.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The XML content as a string.
|
||||||
|
"""
|
||||||
|
# We create a huge list of all the elements of the file.
|
||||||
|
xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding]
|
||||||
|
if pretty:
|
||||||
|
xml_parts.append('\n')
|
||||||
|
_ConstructContentList(xml_parts, content, pretty)
|
||||||
|
|
||||||
|
# Convert it to a string
|
||||||
|
return ''.join(xml_parts)
|
||||||
|
|
||||||
|
|
||||||
|
def _ConstructContentList(xml_parts, specification, pretty, level=0):
|
||||||
|
""" Appends the XML parts corresponding to the specification.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
xml_parts: A list of XML parts to be appended to.
|
||||||
|
specification: The specification of the element. See EasyXml docs.
|
||||||
|
pretty: True if we want pretty printing with indents and new lines.
|
||||||
|
level: Indentation level.
|
||||||
|
"""
|
||||||
|
# The first item in a specification is the name of the element.
|
||||||
|
if pretty:
|
||||||
|
indentation = ' ' * level
|
||||||
|
new_line = '\n'
|
||||||
|
else:
|
||||||
|
indentation = ''
|
||||||
|
new_line = ''
|
||||||
|
name = specification[0]
|
||||||
|
if not isinstance(name, str):
|
||||||
|
raise Exception('The first item of an EasyXml specification should be '
|
||||||
|
'a string. Specification was ' + str(specification))
|
||||||
|
xml_parts.append(indentation + '<' + name)
|
||||||
|
|
||||||
|
# Optionally in second position is a dictionary of the attributes.
|
||||||
|
rest = specification[1:]
|
||||||
|
if rest and isinstance(rest[0], dict):
|
||||||
|
for at, val in sorted(rest[0].items()):
|
||||||
|
xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True)))
|
||||||
|
rest = rest[1:]
|
||||||
|
if rest:
|
||||||
|
xml_parts.append('>')
|
||||||
|
all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True)
|
||||||
|
multi_line = not all_strings
|
||||||
|
if multi_line and new_line:
|
||||||
|
xml_parts.append(new_line)
|
||||||
|
for child_spec in rest:
|
||||||
|
# If it's a string, append a text node.
|
||||||
|
# Otherwise recurse over that child definition
|
||||||
|
if isinstance(child_spec, str):
|
||||||
|
xml_parts.append(_XmlEscape(child_spec))
|
||||||
|
else:
|
||||||
|
_ConstructContentList(xml_parts, child_spec, pretty, level + 1)
|
||||||
|
if multi_line and indentation:
|
||||||
|
xml_parts.append(indentation)
|
||||||
|
xml_parts.append('</%s>%s' % (name, new_line))
|
||||||
|
else:
|
||||||
|
xml_parts.append('/>%s' % new_line)
|
||||||
|
|
||||||
|
|
||||||
|
def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False,
|
||||||
|
win32=False):
|
||||||
|
""" Writes the XML content to disk, touching the file only if it has changed.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
content: The structured content to be written.
|
||||||
|
path: Location of the file.
|
||||||
|
encoding: The encoding to report on the first line of the XML file.
|
||||||
|
pretty: True if we want pretty printing with indents and new lines.
|
||||||
|
"""
|
||||||
|
xml_string = XmlToString(content, encoding, pretty)
|
||||||
|
if win32 and os.linesep != '\r\n':
|
||||||
|
xml_string = xml_string.replace('\n', '\r\n')
|
||||||
|
default_encoding = locale.getdefaultlocale()[1]
|
||||||
|
if default_encoding and default_encoding.upper() != encoding.upper():
|
||||||
|
try:
|
||||||
|
xml_string = xml_string.decode(default_encoding).encode(encoding)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Get the old content
|
||||||
|
try:
|
||||||
|
f = open(path, 'r')
|
||||||
|
existing = f.read()
|
||||||
|
f.close()
|
||||||
|
except:
|
||||||
|
existing = None
|
||||||
|
|
||||||
|
# It has changed, write it
|
||||||
|
if existing != xml_string:
|
||||||
|
f = open(path, 'w')
|
||||||
|
f.write(xml_string)
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
|
||||||
|
_xml_escape_map = {
|
||||||
|
'"': '"',
|
||||||
|
"'": ''',
|
||||||
|
'<': '<',
|
||||||
|
'>': '>',
|
||||||
|
'&': '&',
|
||||||
|
'\n': '
',
|
||||||
|
'\r': '
',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
_xml_escape_re = re.compile(
|
||||||
|
"(%s)" % "|".join(map(re.escape, _xml_escape_map.keys())))
|
||||||
|
|
||||||
|
|
||||||
|
def _XmlEscape(value, attr=False):
|
||||||
|
""" Escape a string for inclusion in XML."""
|
||||||
|
def replace(match):
|
||||||
|
m = match.string[match.start() : match.end()]
|
||||||
|
# don't replace single quotes in attrs
|
||||||
|
if attr and m == "'":
|
||||||
|
return m
|
||||||
|
return _xml_escape_map[m]
|
||||||
|
return _xml_escape_re.sub(replace, value)
|
||||||
106
third_party/python/gyp/build/lib/gyp/easy_xml_test.py
vendored
Normal file
106
third_party/python/gyp/build/lib/gyp/easy_xml_test.py
vendored
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
""" Unit tests for the easy_xml.py file. """
|
||||||
|
|
||||||
|
import gyp.easy_xml as easy_xml
|
||||||
|
import unittest
|
||||||
|
try:
|
||||||
|
from StringIO import StringIO
|
||||||
|
except ImportError:
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
|
||||||
|
class TestSequenceFunctions(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.stderr = StringIO()
|
||||||
|
|
||||||
|
def test_EasyXml_simple(self):
|
||||||
|
self.assertEqual(
|
||||||
|
easy_xml.XmlToString(['test']),
|
||||||
|
'<?xml version="1.0" encoding="utf-8"?><test/>')
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
easy_xml.XmlToString(['test'], encoding='Windows-1252'),
|
||||||
|
'<?xml version="1.0" encoding="Windows-1252"?><test/>')
|
||||||
|
|
||||||
|
def test_EasyXml_simple_with_attributes(self):
|
||||||
|
self.assertEqual(
|
||||||
|
easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]),
|
||||||
|
'<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>')
|
||||||
|
|
||||||
|
def test_EasyXml_escaping(self):
|
||||||
|
original = '<test>\'"\r&\nfoo'
|
||||||
|
converted = '<test>\'"
&
foo'
|
||||||
|
converted_apos = converted.replace("'", ''')
|
||||||
|
self.assertEqual(
|
||||||
|
easy_xml.XmlToString(['test3', {'a': original}, original]),
|
||||||
|
'<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>' %
|
||||||
|
(converted, converted_apos))
|
||||||
|
|
||||||
|
def test_EasyXml_pretty(self):
|
||||||
|
self.assertEqual(
|
||||||
|
easy_xml.XmlToString(
|
||||||
|
['test3',
|
||||||
|
['GrandParent',
|
||||||
|
['Parent1',
|
||||||
|
['Child']
|
||||||
|
],
|
||||||
|
['Parent2']
|
||||||
|
]
|
||||||
|
],
|
||||||
|
pretty=True),
|
||||||
|
'<?xml version="1.0" encoding="utf-8"?>\n'
|
||||||
|
'<test3>\n'
|
||||||
|
' <GrandParent>\n'
|
||||||
|
' <Parent1>\n'
|
||||||
|
' <Child/>\n'
|
||||||
|
' </Parent1>\n'
|
||||||
|
' <Parent2/>\n'
|
||||||
|
' </GrandParent>\n'
|
||||||
|
'</test3>\n')
|
||||||
|
|
||||||
|
|
||||||
|
def test_EasyXml_complex(self):
|
||||||
|
# We want to create:
|
||||||
|
target = (
|
||||||
|
'<?xml version="1.0" encoding="utf-8"?>'
|
||||||
|
'<Project>'
|
||||||
|
'<PropertyGroup Label="Globals">'
|
||||||
|
'<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>'
|
||||||
|
'<Keyword>Win32Proj</Keyword>'
|
||||||
|
'<RootNamespace>automated_ui_tests</RootNamespace>'
|
||||||
|
'</PropertyGroup>'
|
||||||
|
'<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
|
||||||
|
'<PropertyGroup '
|
||||||
|
'Condition="\'$(Configuration)|$(Platform)\'=='
|
||||||
|
'\'Debug|Win32\'" Label="Configuration">'
|
||||||
|
'<ConfigurationType>Application</ConfigurationType>'
|
||||||
|
'<CharacterSet>Unicode</CharacterSet>'
|
||||||
|
'</PropertyGroup>'
|
||||||
|
'</Project>')
|
||||||
|
|
||||||
|
xml = easy_xml.XmlToString(
|
||||||
|
['Project',
|
||||||
|
['PropertyGroup', {'Label': 'Globals'},
|
||||||
|
['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'],
|
||||||
|
['Keyword', 'Win32Proj'],
|
||||||
|
['RootNamespace', 'automated_ui_tests']
|
||||||
|
],
|
||||||
|
['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}],
|
||||||
|
['PropertyGroup',
|
||||||
|
{'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'",
|
||||||
|
'Label': 'Configuration'},
|
||||||
|
['ConfigurationType', 'Application'],
|
||||||
|
['CharacterSet', 'Unicode']
|
||||||
|
]
|
||||||
|
])
|
||||||
|
self.assertEqual(xml, target)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
54
third_party/python/gyp/build/lib/gyp/flock_tool.py
vendored
Normal file
54
third_party/python/gyp/build/lib/gyp/flock_tool.py
vendored
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""These functions are executed via gyp-flock-tool when using the Makefile
|
||||||
|
generator. Used on systems that don't have a built-in flock."""
|
||||||
|
|
||||||
|
import fcntl
|
||||||
|
import os
|
||||||
|
import struct
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def main(args):
|
||||||
|
executor = FlockTool()
|
||||||
|
executor.Dispatch(args)
|
||||||
|
|
||||||
|
|
||||||
|
class FlockTool(object):
|
||||||
|
"""This class emulates the 'flock' command."""
|
||||||
|
def Dispatch(self, args):
|
||||||
|
"""Dispatches a string command to a method."""
|
||||||
|
if len(args) < 1:
|
||||||
|
raise Exception("Not enough arguments")
|
||||||
|
|
||||||
|
method = "Exec%s" % self._CommandifyName(args[0])
|
||||||
|
getattr(self, method)(*args[1:])
|
||||||
|
|
||||||
|
def _CommandifyName(self, name_string):
|
||||||
|
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
|
||||||
|
return name_string.title().replace('-', '')
|
||||||
|
|
||||||
|
def ExecFlock(self, lockfile, *cmd_list):
|
||||||
|
"""Emulates the most basic behavior of Linux's flock(1)."""
|
||||||
|
# Rely on exception handling to report errors.
|
||||||
|
# Note that the stock python on SunOS has a bug
|
||||||
|
# where fcntl.flock(fd, LOCK_EX) always fails
|
||||||
|
# with EBADF, that's why we use this F_SETLK
|
||||||
|
# hack instead.
|
||||||
|
fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
|
||||||
|
if sys.platform.startswith('aix'):
|
||||||
|
# Python on AIX is compiled with LARGEFILE support, which changes the
|
||||||
|
# struct size.
|
||||||
|
op = struct.pack('hhIllqq', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
|
||||||
|
else:
|
||||||
|
op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
|
||||||
|
fcntl.fcntl(fd, fcntl.F_SETLK, op)
|
||||||
|
return subprocess.call(cmd_list)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(main(sys.argv[1:]))
|
||||||
0
third_party/python/gyp/build/lib/gyp/generator/__init__.py
vendored
Normal file
0
third_party/python/gyp/build/lib/gyp/generator/__init__.py
vendored
Normal file
739
third_party/python/gyp/build/lib/gyp/generator/analyzer.py
vendored
Normal file
739
third_party/python/gyp/build/lib/gyp/generator/analyzer.py
vendored
Normal file
@@ -0,0 +1,739 @@
|
|||||||
|
# Copyright (c) 2014 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""
|
||||||
|
This script is intended for use as a GYP_GENERATOR. It takes as input (by way of
|
||||||
|
the generator flag config_path) the path of a json file that dictates the files
|
||||||
|
and targets to search for. The following keys are supported:
|
||||||
|
files: list of paths (relative) of the files to search for.
|
||||||
|
test_targets: unqualified target names to search for. Any target in this list
|
||||||
|
that depends upon a file in |files| is output regardless of the type of target
|
||||||
|
or chain of dependencies.
|
||||||
|
additional_compile_targets: Unqualified targets to search for in addition to
|
||||||
|
test_targets. Targets in the combined list that depend upon a file in |files|
|
||||||
|
are not necessarily output. For example, if the target is of type none then the
|
||||||
|
target is not output (but one of the descendants of the target will be).
|
||||||
|
|
||||||
|
The following is output:
|
||||||
|
error: only supplied if there is an error.
|
||||||
|
compile_targets: minimal set of targets that directly or indirectly (for
|
||||||
|
targets of type none) depend on the files in |files| and is one of the
|
||||||
|
supplied targets or a target that one of the supplied targets depends on.
|
||||||
|
The expectation is this set of targets is passed into a build step. This list
|
||||||
|
always contains the output of test_targets as well.
|
||||||
|
test_targets: set of targets from the supplied |test_targets| that either
|
||||||
|
directly or indirectly depend upon a file in |files|. This list if useful
|
||||||
|
if additional processing needs to be done for certain targets after the
|
||||||
|
build, such as running tests.
|
||||||
|
status: outputs one of three values: none of the supplied files were found,
|
||||||
|
one of the include files changed so that it should be assumed everything
|
||||||
|
changed (in this case test_targets and compile_targets are not output) or at
|
||||||
|
least one file was found.
|
||||||
|
invalid_targets: list of supplied targets that were not found.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
Consider a graph like the following:
|
||||||
|
A D
|
||||||
|
/ \
|
||||||
|
B C
|
||||||
|
A depends upon both B and C, A is of type none and B and C are executables.
|
||||||
|
D is an executable, has no dependencies and nothing depends on it.
|
||||||
|
If |additional_compile_targets| = ["A"], |test_targets| = ["B", "C"] and
|
||||||
|
files = ["b.cc", "d.cc"] (B depends upon b.cc and D depends upon d.cc), then
|
||||||
|
the following is output:
|
||||||
|
|compile_targets| = ["B"] B must built as it depends upon the changed file b.cc
|
||||||
|
and the supplied target A depends upon it. A is not output as a build_target
|
||||||
|
as it is of type none with no rules and actions.
|
||||||
|
|test_targets| = ["B"] B directly depends upon the change file b.cc.
|
||||||
|
|
||||||
|
Even though the file d.cc, which D depends upon, has changed D is not output
|
||||||
|
as it was not supplied by way of |additional_compile_targets| or |test_targets|.
|
||||||
|
|
||||||
|
If the generator flag analyzer_output_path is specified, output is written
|
||||||
|
there. Otherwise output is written to stdout.
|
||||||
|
|
||||||
|
In Gyp the "all" target is shorthand for the root targets in the files passed
|
||||||
|
to gyp. For example, if file "a.gyp" contains targets "a1" and
|
||||||
|
"a2", and file "b.gyp" contains targets "b1" and "b2" and "a2" has a dependency
|
||||||
|
on "b2" and gyp is supplied "a.gyp" then "all" consists of "a1" and "a2".
|
||||||
|
Notice that "b1" and "b2" are not in the "all" target as "b.gyp" was not
|
||||||
|
directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp
|
||||||
|
then the "all" target includes "b1" and "b2".
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import gyp.common
|
||||||
|
import gyp.ninja_syntax as ninja_syntax
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import posixpath
|
||||||
|
import sys
|
||||||
|
|
||||||
|
debug = False
|
||||||
|
|
||||||
|
found_dependency_string = 'Found dependency'
|
||||||
|
no_dependency_string = 'No dependencies'
|
||||||
|
# Status when it should be assumed that everything has changed.
|
||||||
|
all_changed_string = 'Found dependency (all)'
|
||||||
|
|
||||||
|
# MatchStatus is used indicate if and how a target depends upon the supplied
|
||||||
|
# sources.
|
||||||
|
# The target's sources contain one of the supplied paths.
|
||||||
|
MATCH_STATUS_MATCHES = 1
|
||||||
|
# The target has a dependency on another target that contains one of the
|
||||||
|
# supplied paths.
|
||||||
|
MATCH_STATUS_MATCHES_BY_DEPENDENCY = 2
|
||||||
|
# The target's sources weren't in the supplied paths and none of the target's
|
||||||
|
# dependencies depend upon a target that matched.
|
||||||
|
MATCH_STATUS_DOESNT_MATCH = 3
|
||||||
|
# The target doesn't contain the source, but the dependent targets have not yet
|
||||||
|
# been visited to determine a more specific status yet.
|
||||||
|
MATCH_STATUS_TBD = 4
|
||||||
|
|
||||||
|
generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
|
||||||
|
|
||||||
|
generator_wants_static_library_dependencies_adjusted = False
|
||||||
|
|
||||||
|
generator_default_variables = {
|
||||||
|
}
|
||||||
|
for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
|
||||||
|
'LIB_DIR', 'SHARED_LIB_DIR']:
|
||||||
|
generator_default_variables[dirname] = '!!!'
|
||||||
|
|
||||||
|
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
|
||||||
|
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
|
||||||
|
'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
|
||||||
|
'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
|
||||||
|
'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
|
||||||
|
'CONFIGURATION_NAME']:
|
||||||
|
generator_default_variables[unused] = ''
|
||||||
|
|
||||||
|
|
||||||
|
def _ToGypPath(path):
|
||||||
|
"""Converts a path to the format used by gyp."""
|
||||||
|
if os.sep == '\\' and os.altsep == '/':
|
||||||
|
return path.replace('\\', '/')
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def _ResolveParent(path, base_path_components):
|
||||||
|
"""Resolves |path|, which starts with at least one '../'. Returns an empty
|
||||||
|
string if the path shouldn't be considered. See _AddSources() for a
|
||||||
|
description of |base_path_components|."""
|
||||||
|
depth = 0
|
||||||
|
while path.startswith('../'):
|
||||||
|
depth += 1
|
||||||
|
path = path[3:]
|
||||||
|
# Relative includes may go outside the source tree. For example, an action may
|
||||||
|
# have inputs in /usr/include, which are not in the source tree.
|
||||||
|
if depth > len(base_path_components):
|
||||||
|
return ''
|
||||||
|
if depth == len(base_path_components):
|
||||||
|
return path
|
||||||
|
return '/'.join(base_path_components[0:len(base_path_components) - depth]) + \
|
||||||
|
'/' + path
|
||||||
|
|
||||||
|
|
||||||
|
def _AddSources(sources, base_path, base_path_components, result):
|
||||||
|
"""Extracts valid sources from |sources| and adds them to |result|. Each
|
||||||
|
source file is relative to |base_path|, but may contain '..'. To make
|
||||||
|
resolving '..' easier |base_path_components| contains each of the
|
||||||
|
directories in |base_path|. Additionally each source may contain variables.
|
||||||
|
Such sources are ignored as it is assumed dependencies on them are expressed
|
||||||
|
and tracked in some other means."""
|
||||||
|
# NOTE: gyp paths are always posix style.
|
||||||
|
for source in sources:
|
||||||
|
if not len(source) or source.startswith('!!!') or source.startswith('$'):
|
||||||
|
continue
|
||||||
|
# variable expansion may lead to //.
|
||||||
|
org_source = source
|
||||||
|
source = source[0] + source[1:].replace('//', '/')
|
||||||
|
if source.startswith('../'):
|
||||||
|
source = _ResolveParent(source, base_path_components)
|
||||||
|
if len(source):
|
||||||
|
result.append(source)
|
||||||
|
continue
|
||||||
|
result.append(base_path + source)
|
||||||
|
if debug:
|
||||||
|
print('AddSource', org_source, result[len(result) - 1])
|
||||||
|
|
||||||
|
|
||||||
|
def _ExtractSourcesFromAction(action, base_path, base_path_components,
|
||||||
|
results):
|
||||||
|
if 'inputs' in action:
|
||||||
|
_AddSources(action['inputs'], base_path, base_path_components, results)
|
||||||
|
|
||||||
|
|
||||||
|
def _ToLocalPath(toplevel_dir, path):
|
||||||
|
"""Converts |path| to a path relative to |toplevel_dir|."""
|
||||||
|
if path == toplevel_dir:
|
||||||
|
return ''
|
||||||
|
if path.startswith(toplevel_dir + '/'):
|
||||||
|
return path[len(toplevel_dir) + len('/'):]
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def _ExtractSources(target, target_dict, toplevel_dir):
|
||||||
|
# |target| is either absolute or relative and in the format of the OS. Gyp
|
||||||
|
# source paths are always posix. Convert |target| to a posix path relative to
|
||||||
|
# |toplevel_dir_|. This is done to make it easy to build source paths.
|
||||||
|
base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target)))
|
||||||
|
base_path_components = base_path.split('/')
|
||||||
|
|
||||||
|
# Add a trailing '/' so that _AddSources() can easily build paths.
|
||||||
|
if len(base_path):
|
||||||
|
base_path += '/'
|
||||||
|
|
||||||
|
if debug:
|
||||||
|
print('ExtractSources', target, base_path)
|
||||||
|
|
||||||
|
results = []
|
||||||
|
if 'sources' in target_dict:
|
||||||
|
_AddSources(target_dict['sources'], base_path, base_path_components,
|
||||||
|
results)
|
||||||
|
# Include the inputs from any actions. Any changes to these affect the
|
||||||
|
# resulting output.
|
||||||
|
if 'actions' in target_dict:
|
||||||
|
for action in target_dict['actions']:
|
||||||
|
_ExtractSourcesFromAction(action, base_path, base_path_components,
|
||||||
|
results)
|
||||||
|
if 'rules' in target_dict:
|
||||||
|
for rule in target_dict['rules']:
|
||||||
|
_ExtractSourcesFromAction(rule, base_path, base_path_components, results)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
class Target(object):
|
||||||
|
"""Holds information about a particular target:
|
||||||
|
deps: set of Targets this Target depends upon. This is not recursive, only the
|
||||||
|
direct dependent Targets.
|
||||||
|
match_status: one of the MatchStatus values.
|
||||||
|
back_deps: set of Targets that have a dependency on this Target.
|
||||||
|
visited: used during iteration to indicate whether we've visited this target.
|
||||||
|
This is used for two iterations, once in building the set of Targets and
|
||||||
|
again in _GetBuildTargets().
|
||||||
|
name: fully qualified name of the target.
|
||||||
|
requires_build: True if the target type is such that it needs to be built.
|
||||||
|
See _DoesTargetTypeRequireBuild for details.
|
||||||
|
added_to_compile_targets: used when determining if the target was added to the
|
||||||
|
set of targets that needs to be built.
|
||||||
|
in_roots: true if this target is a descendant of one of the root nodes.
|
||||||
|
is_executable: true if the type of target is executable.
|
||||||
|
is_static_library: true if the type of target is static_library.
|
||||||
|
is_or_has_linked_ancestor: true if the target does a link (eg executable), or
|
||||||
|
if there is a target in back_deps that does a link."""
|
||||||
|
def __init__(self, name):
|
||||||
|
self.deps = set()
|
||||||
|
self.match_status = MATCH_STATUS_TBD
|
||||||
|
self.back_deps = set()
|
||||||
|
self.name = name
|
||||||
|
# TODO(sky): I don't like hanging this off Target. This state is specific
|
||||||
|
# to certain functions and should be isolated there.
|
||||||
|
self.visited = False
|
||||||
|
self.requires_build = False
|
||||||
|
self.added_to_compile_targets = False
|
||||||
|
self.in_roots = False
|
||||||
|
self.is_executable = False
|
||||||
|
self.is_static_library = False
|
||||||
|
self.is_or_has_linked_ancestor = False
|
||||||
|
|
||||||
|
|
||||||
|
class Config(object):
|
||||||
|
"""Details what we're looking for
|
||||||
|
files: set of files to search for
|
||||||
|
targets: see file description for details."""
|
||||||
|
def __init__(self):
|
||||||
|
self.files = []
|
||||||
|
self.targets = set()
|
||||||
|
self.additional_compile_target_names = set()
|
||||||
|
self.test_target_names = set()
|
||||||
|
|
||||||
|
def Init(self, params):
|
||||||
|
"""Initializes Config. This is a separate method as it raises an exception
|
||||||
|
if there is a parse error."""
|
||||||
|
generator_flags = params.get('generator_flags', {})
|
||||||
|
config_path = generator_flags.get('config_path', None)
|
||||||
|
if not config_path:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
f = open(config_path, 'r')
|
||||||
|
config = json.load(f)
|
||||||
|
f.close()
|
||||||
|
except IOError:
|
||||||
|
raise Exception('Unable to open file ' + config_path)
|
||||||
|
except ValueError as e:
|
||||||
|
raise Exception('Unable to parse config file ' + config_path + str(e))
|
||||||
|
if not isinstance(config, dict):
|
||||||
|
raise Exception('config_path must be a JSON file containing a dictionary')
|
||||||
|
self.files = config.get('files', [])
|
||||||
|
self.additional_compile_target_names = set(
|
||||||
|
config.get('additional_compile_targets', []))
|
||||||
|
self.test_target_names = set(config.get('test_targets', []))
|
||||||
|
|
||||||
|
|
||||||
|
def _WasBuildFileModified(build_file, data, files, toplevel_dir):
|
||||||
|
"""Returns true if the build file |build_file| is either in |files| or
|
||||||
|
one of the files included by |build_file| is in |files|. |toplevel_dir| is
|
||||||
|
the root of the source tree."""
|
||||||
|
if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
|
||||||
|
if debug:
|
||||||
|
print('gyp file modified', build_file)
|
||||||
|
return True
|
||||||
|
|
||||||
|
# First element of included_files is the file itself.
|
||||||
|
if len(data[build_file]['included_files']) <= 1:
|
||||||
|
return False
|
||||||
|
|
||||||
|
for include_file in data[build_file]['included_files'][1:]:
|
||||||
|
# |included_files| are relative to the directory of the |build_file|.
|
||||||
|
rel_include_file = \
|
||||||
|
_ToGypPath(gyp.common.UnrelativePath(include_file, build_file))
|
||||||
|
if _ToLocalPath(toplevel_dir, rel_include_file) in files:
|
||||||
|
if debug:
|
||||||
|
print('included gyp file modified, gyp_file=', build_file, \
|
||||||
|
'included file=', rel_include_file)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _GetOrCreateTargetByName(targets, target_name):
|
||||||
|
"""Creates or returns the Target at targets[target_name]. If there is no
|
||||||
|
Target for |target_name| one is created. Returns a tuple of whether a new
|
||||||
|
Target was created and the Target."""
|
||||||
|
if target_name in targets:
|
||||||
|
return False, targets[target_name]
|
||||||
|
target = Target(target_name)
|
||||||
|
targets[target_name] = target
|
||||||
|
return True, target
|
||||||
|
|
||||||
|
|
||||||
|
def _DoesTargetTypeRequireBuild(target_dict):
|
||||||
|
"""Returns true if the target type is such that it needs to be built."""
|
||||||
|
# If a 'none' target has rules or actions we assume it requires a build.
|
||||||
|
return bool(target_dict['type'] != 'none' or
|
||||||
|
target_dict.get('actions') or target_dict.get('rules'))
|
||||||
|
|
||||||
|
|
||||||
|
def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files,
|
||||||
|
build_files):
|
||||||
|
"""Returns a tuple of the following:
|
||||||
|
. A dictionary mapping from fully qualified name to Target.
|
||||||
|
. A list of the targets that have a source file in |files|.
|
||||||
|
. Targets that constitute the 'all' target. See description at top of file
|
||||||
|
for details on the 'all' target.
|
||||||
|
This sets the |match_status| of the targets that contain any of the source
|
||||||
|
files in |files| to MATCH_STATUS_MATCHES.
|
||||||
|
|toplevel_dir| is the root of the source tree."""
|
||||||
|
# Maps from target name to Target.
|
||||||
|
name_to_target = {}
|
||||||
|
|
||||||
|
# Targets that matched.
|
||||||
|
matching_targets = []
|
||||||
|
|
||||||
|
# Queue of targets to visit.
|
||||||
|
targets_to_visit = target_list[:]
|
||||||
|
|
||||||
|
# Maps from build file to a boolean indicating whether the build file is in
|
||||||
|
# |files|.
|
||||||
|
build_file_in_files = {}
|
||||||
|
|
||||||
|
# Root targets across all files.
|
||||||
|
roots = set()
|
||||||
|
|
||||||
|
# Set of Targets in |build_files|.
|
||||||
|
build_file_targets = set()
|
||||||
|
|
||||||
|
while len(targets_to_visit) > 0:
|
||||||
|
target_name = targets_to_visit.pop()
|
||||||
|
created_target, target = _GetOrCreateTargetByName(name_to_target,
|
||||||
|
target_name)
|
||||||
|
if created_target:
|
||||||
|
roots.add(target)
|
||||||
|
elif target.visited:
|
||||||
|
continue
|
||||||
|
|
||||||
|
target.visited = True
|
||||||
|
target.requires_build = _DoesTargetTypeRequireBuild(
|
||||||
|
target_dicts[target_name])
|
||||||
|
target_type = target_dicts[target_name]['type']
|
||||||
|
target.is_executable = target_type == 'executable'
|
||||||
|
target.is_static_library = target_type == 'static_library'
|
||||||
|
target.is_or_has_linked_ancestor = (target_type == 'executable' or
|
||||||
|
target_type == 'shared_library')
|
||||||
|
|
||||||
|
build_file = gyp.common.ParseQualifiedTarget(target_name)[0]
|
||||||
|
if not build_file in build_file_in_files:
|
||||||
|
build_file_in_files[build_file] = \
|
||||||
|
_WasBuildFileModified(build_file, data, files, toplevel_dir)
|
||||||
|
|
||||||
|
if build_file in build_files:
|
||||||
|
build_file_targets.add(target)
|
||||||
|
|
||||||
|
# If a build file (or any of its included files) is modified we assume all
|
||||||
|
# targets in the file are modified.
|
||||||
|
if build_file_in_files[build_file]:
|
||||||
|
print('matching target from modified build file', target_name)
|
||||||
|
target.match_status = MATCH_STATUS_MATCHES
|
||||||
|
matching_targets.append(target)
|
||||||
|
else:
|
||||||
|
sources = _ExtractSources(target_name, target_dicts[target_name],
|
||||||
|
toplevel_dir)
|
||||||
|
for source in sources:
|
||||||
|
if _ToGypPath(os.path.normpath(source)) in files:
|
||||||
|
print('target', target_name, 'matches', source)
|
||||||
|
target.match_status = MATCH_STATUS_MATCHES
|
||||||
|
matching_targets.append(target)
|
||||||
|
break
|
||||||
|
|
||||||
|
# Add dependencies to visit as well as updating back pointers for deps.
|
||||||
|
for dep in target_dicts[target_name].get('dependencies', []):
|
||||||
|
targets_to_visit.append(dep)
|
||||||
|
|
||||||
|
created_dep_target, dep_target = _GetOrCreateTargetByName(name_to_target,
|
||||||
|
dep)
|
||||||
|
if not created_dep_target:
|
||||||
|
roots.discard(dep_target)
|
||||||
|
|
||||||
|
target.deps.add(dep_target)
|
||||||
|
dep_target.back_deps.add(target)
|
||||||
|
|
||||||
|
return name_to_target, matching_targets, roots & build_file_targets
|
||||||
|
|
||||||
|
|
||||||
|
def _GetUnqualifiedToTargetMapping(all_targets, to_find):
|
||||||
|
"""Returns a tuple of the following:
|
||||||
|
. mapping (dictionary) from unqualified name to Target for all the
|
||||||
|
Targets in |to_find|.
|
||||||
|
. any target names not found. If this is empty all targets were found."""
|
||||||
|
result = {}
|
||||||
|
if not to_find:
|
||||||
|
return {}, []
|
||||||
|
to_find = set(to_find)
|
||||||
|
for target_name in all_targets.keys():
|
||||||
|
extracted = gyp.common.ParseQualifiedTarget(target_name)
|
||||||
|
if len(extracted) > 1 and extracted[1] in to_find:
|
||||||
|
to_find.remove(extracted[1])
|
||||||
|
result[extracted[1]] = all_targets[target_name]
|
||||||
|
if not to_find:
|
||||||
|
return result, []
|
||||||
|
return result, [x for x in to_find]
|
||||||
|
|
||||||
|
|
||||||
|
def _DoesTargetDependOnMatchingTargets(target):
|
||||||
|
"""Returns true if |target| or any of its dependencies is one of the
|
||||||
|
targets containing the files supplied as input to analyzer. This updates
|
||||||
|
|matches| of the Targets as it recurses.
|
||||||
|
target: the Target to look for."""
|
||||||
|
if target.match_status == MATCH_STATUS_DOESNT_MATCH:
|
||||||
|
return False
|
||||||
|
if target.match_status == MATCH_STATUS_MATCHES or \
|
||||||
|
target.match_status == MATCH_STATUS_MATCHES_BY_DEPENDENCY:
|
||||||
|
return True
|
||||||
|
for dep in target.deps:
|
||||||
|
if _DoesTargetDependOnMatchingTargets(dep):
|
||||||
|
target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
|
||||||
|
print('\t', target.name, 'matches by dep', dep.name)
|
||||||
|
return True
|
||||||
|
target.match_status = MATCH_STATUS_DOESNT_MATCH
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _GetTargetsDependingOnMatchingTargets(possible_targets):
|
||||||
|
"""Returns the list of Targets in |possible_targets| that depend (either
|
||||||
|
directly on indirectly) on at least one of the targets containing the files
|
||||||
|
supplied as input to analyzer.
|
||||||
|
possible_targets: targets to search from."""
|
||||||
|
found = []
|
||||||
|
print('Targets that matched by dependency:')
|
||||||
|
for target in possible_targets:
|
||||||
|
if _DoesTargetDependOnMatchingTargets(target):
|
||||||
|
found.append(target)
|
||||||
|
return found
|
||||||
|
|
||||||
|
|
||||||
|
def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
|
||||||
|
"""Recurses through all targets that depend on |target|, adding all targets
|
||||||
|
that need to be built (and are in |roots|) to |result|.
|
||||||
|
roots: set of root targets.
|
||||||
|
add_if_no_ancestor: If true and there are no ancestors of |target| then add
|
||||||
|
|target| to |result|. |target| must still be in |roots|.
|
||||||
|
result: targets that need to be built are added here."""
|
||||||
|
if target.visited:
|
||||||
|
return
|
||||||
|
|
||||||
|
target.visited = True
|
||||||
|
target.in_roots = target in roots
|
||||||
|
|
||||||
|
for back_dep_target in target.back_deps:
|
||||||
|
_AddCompileTargets(back_dep_target, roots, False, result)
|
||||||
|
target.added_to_compile_targets |= back_dep_target.added_to_compile_targets
|
||||||
|
target.in_roots |= back_dep_target.in_roots
|
||||||
|
target.is_or_has_linked_ancestor |= (
|
||||||
|
back_dep_target.is_or_has_linked_ancestor)
|
||||||
|
|
||||||
|
# Always add 'executable' targets. Even though they may be built by other
|
||||||
|
# targets that depend upon them it makes detection of what is going to be
|
||||||
|
# built easier.
|
||||||
|
# And always add static_libraries that have no dependencies on them from
|
||||||
|
# linkables. This is necessary as the other dependencies on them may be
|
||||||
|
# static libraries themselves, which are not compile time dependencies.
|
||||||
|
if target.in_roots and \
|
||||||
|
(target.is_executable or
|
||||||
|
(not target.added_to_compile_targets and
|
||||||
|
(add_if_no_ancestor or target.requires_build)) or
|
||||||
|
(target.is_static_library and add_if_no_ancestor and
|
||||||
|
not target.is_or_has_linked_ancestor)):
|
||||||
|
print('\t\tadding to compile targets', target.name, 'executable',
|
||||||
|
target.is_executable, 'added_to_compile_targets',
|
||||||
|
target.added_to_compile_targets, 'add_if_no_ancestor',
|
||||||
|
add_if_no_ancestor, 'requires_build', target.requires_build,
|
||||||
|
'is_static_library', target.is_static_library,
|
||||||
|
'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor
|
||||||
|
)
|
||||||
|
result.add(target)
|
||||||
|
target.added_to_compile_targets = True
|
||||||
|
|
||||||
|
|
||||||
|
def _GetCompileTargets(matching_targets, supplied_targets):
|
||||||
|
"""Returns the set of Targets that require a build.
|
||||||
|
matching_targets: targets that changed and need to be built.
|
||||||
|
supplied_targets: set of targets supplied to analyzer to search from."""
|
||||||
|
result = set()
|
||||||
|
for target in matching_targets:
|
||||||
|
print('finding compile targets for match', target.name)
|
||||||
|
_AddCompileTargets(target, supplied_targets, True, result)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def _WriteOutput(params, **values):
|
||||||
|
"""Writes the output, either to stdout or a file is specified."""
|
||||||
|
if 'error' in values:
|
||||||
|
print('Error:', values['error'])
|
||||||
|
if 'status' in values:
|
||||||
|
print(values['status'])
|
||||||
|
if 'targets' in values:
|
||||||
|
values['targets'].sort()
|
||||||
|
print('Supplied targets that depend on changed files:')
|
||||||
|
for target in values['targets']:
|
||||||
|
print('\t', target)
|
||||||
|
if 'invalid_targets' in values:
|
||||||
|
values['invalid_targets'].sort()
|
||||||
|
print('The following targets were not found:')
|
||||||
|
for target in values['invalid_targets']:
|
||||||
|
print('\t', target)
|
||||||
|
if 'compile_targets' in values:
|
||||||
|
values['compile_targets'].sort()
|
||||||
|
print('Targets that need to be built:')
|
||||||
|
for target in values['compile_targets']:
|
||||||
|
print('\t', target)
|
||||||
|
if 'test_targets' in values:
|
||||||
|
values['test_targets'].sort()
|
||||||
|
print('Test targets:')
|
||||||
|
for target in values['test_targets']:
|
||||||
|
print('\t', target)
|
||||||
|
|
||||||
|
output_path = params.get('generator_flags', {}).get(
|
||||||
|
'analyzer_output_path', None)
|
||||||
|
if not output_path:
|
||||||
|
print(json.dumps(values))
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
f = open(output_path, 'w')
|
||||||
|
f.write(json.dumps(values) + '\n')
|
||||||
|
f.close()
|
||||||
|
except IOError as e:
|
||||||
|
print('Error writing to output file', output_path, str(e))
|
||||||
|
|
||||||
|
|
||||||
|
def _WasGypIncludeFileModified(params, files):
|
||||||
|
"""Returns true if one of the files in |files| is in the set of included
|
||||||
|
files."""
|
||||||
|
if params['options'].includes:
|
||||||
|
for include in params['options'].includes:
|
||||||
|
if _ToGypPath(os.path.normpath(include)) in files:
|
||||||
|
print('Include file modified, assuming all changed', include)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _NamesNotIn(names, mapping):
|
||||||
|
"""Returns a list of the values in |names| that are not in |mapping|."""
|
||||||
|
return [name for name in names if name not in mapping]
|
||||||
|
|
||||||
|
|
||||||
|
def _LookupTargets(names, mapping):
|
||||||
|
"""Returns a list of the mapping[name] for each value in |names| that is in
|
||||||
|
|mapping|."""
|
||||||
|
return [mapping[name] for name in names if name in mapping]
|
||||||
|
|
||||||
|
|
||||||
|
def CalculateVariables(default_variables, params):
|
||||||
|
"""Calculate additional variables for use in the build (called by gyp)."""
|
||||||
|
flavor = gyp.common.GetFlavor(params)
|
||||||
|
if flavor == 'mac':
|
||||||
|
default_variables.setdefault('OS', 'mac')
|
||||||
|
elif flavor == 'win':
|
||||||
|
default_variables.setdefault('OS', 'win')
|
||||||
|
# Copy additional generator configuration data from VS, which is shared
|
||||||
|
# by the Windows Ninja generator.
|
||||||
|
import gyp.generator.msvs as msvs_generator
|
||||||
|
generator_additional_non_configuration_keys = getattr(msvs_generator,
|
||||||
|
'generator_additional_non_configuration_keys', [])
|
||||||
|
generator_additional_path_sections = getattr(msvs_generator,
|
||||||
|
'generator_additional_path_sections', [])
|
||||||
|
|
||||||
|
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
||||||
|
else:
|
||||||
|
operating_system = flavor
|
||||||
|
if flavor == 'android':
|
||||||
|
operating_system = 'linux' # Keep this legacy behavior for now.
|
||||||
|
default_variables.setdefault('OS', operating_system)
|
||||||
|
|
||||||
|
|
||||||
|
class TargetCalculator(object):
|
||||||
|
"""Calculates the matching test_targets and matching compile_targets."""
|
||||||
|
def __init__(self, files, additional_compile_target_names, test_target_names,
|
||||||
|
data, target_list, target_dicts, toplevel_dir, build_files):
|
||||||
|
self._additional_compile_target_names = set(additional_compile_target_names)
|
||||||
|
self._test_target_names = set(test_target_names)
|
||||||
|
self._name_to_target, self._changed_targets, self._root_targets = (
|
||||||
|
_GenerateTargets(data, target_list, target_dicts, toplevel_dir,
|
||||||
|
frozenset(files), build_files))
|
||||||
|
self._unqualified_mapping, self.invalid_targets = (
|
||||||
|
_GetUnqualifiedToTargetMapping(self._name_to_target,
|
||||||
|
self._supplied_target_names_no_all()))
|
||||||
|
|
||||||
|
def _supplied_target_names(self):
|
||||||
|
return self._additional_compile_target_names | self._test_target_names
|
||||||
|
|
||||||
|
def _supplied_target_names_no_all(self):
|
||||||
|
"""Returns the supplied test targets without 'all'."""
|
||||||
|
result = self._supplied_target_names();
|
||||||
|
result.discard('all')
|
||||||
|
return result
|
||||||
|
|
||||||
|
def is_build_impacted(self):
|
||||||
|
"""Returns true if the supplied files impact the build at all."""
|
||||||
|
return self._changed_targets
|
||||||
|
|
||||||
|
def find_matching_test_target_names(self):
|
||||||
|
"""Returns the set of output test targets."""
|
||||||
|
assert self.is_build_impacted()
|
||||||
|
# Find the test targets first. 'all' is special cased to mean all the
|
||||||
|
# root targets. To deal with all the supplied |test_targets| are expanded
|
||||||
|
# to include the root targets during lookup. If any of the root targets
|
||||||
|
# match, we remove it and replace it with 'all'.
|
||||||
|
test_target_names_no_all = set(self._test_target_names)
|
||||||
|
test_target_names_no_all.discard('all')
|
||||||
|
test_targets_no_all = _LookupTargets(test_target_names_no_all,
|
||||||
|
self._unqualified_mapping)
|
||||||
|
test_target_names_contains_all = 'all' in self._test_target_names
|
||||||
|
if test_target_names_contains_all:
|
||||||
|
test_targets = [x for x in (set(test_targets_no_all) |
|
||||||
|
set(self._root_targets))]
|
||||||
|
else:
|
||||||
|
test_targets = [x for x in test_targets_no_all]
|
||||||
|
print('supplied test_targets')
|
||||||
|
for target_name in self._test_target_names:
|
||||||
|
print('\t', target_name)
|
||||||
|
print('found test_targets')
|
||||||
|
for target in test_targets:
|
||||||
|
print('\t', target.name)
|
||||||
|
print('searching for matching test targets')
|
||||||
|
matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets)
|
||||||
|
matching_test_targets_contains_all = (test_target_names_contains_all and
|
||||||
|
set(matching_test_targets) &
|
||||||
|
set(self._root_targets))
|
||||||
|
if matching_test_targets_contains_all:
|
||||||
|
# Remove any of the targets for all that were not explicitly supplied,
|
||||||
|
# 'all' is subsequentely added to the matching names below.
|
||||||
|
matching_test_targets = [x for x in (set(matching_test_targets) &
|
||||||
|
set(test_targets_no_all))]
|
||||||
|
print('matched test_targets')
|
||||||
|
for target in matching_test_targets:
|
||||||
|
print('\t', target.name)
|
||||||
|
matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1]
|
||||||
|
for target in matching_test_targets]
|
||||||
|
if matching_test_targets_contains_all:
|
||||||
|
matching_target_names.append('all')
|
||||||
|
print('\tall')
|
||||||
|
return matching_target_names
|
||||||
|
|
||||||
|
def find_matching_compile_target_names(self):
|
||||||
|
"""Returns the set of output compile targets."""
|
||||||
|
assert self.is_build_impacted();
|
||||||
|
# Compile targets are found by searching up from changed targets.
|
||||||
|
# Reset the visited status for _GetBuildTargets.
|
||||||
|
for target in self._name_to_target.values():
|
||||||
|
target.visited = False
|
||||||
|
|
||||||
|
supplied_targets = _LookupTargets(self._supplied_target_names_no_all(),
|
||||||
|
self._unqualified_mapping)
|
||||||
|
if 'all' in self._supplied_target_names():
|
||||||
|
supplied_targets = [x for x in (set(supplied_targets) |
|
||||||
|
set(self._root_targets))]
|
||||||
|
print('Supplied test_targets & compile_targets')
|
||||||
|
for target in supplied_targets:
|
||||||
|
print('\t', target.name)
|
||||||
|
print('Finding compile targets')
|
||||||
|
compile_targets = _GetCompileTargets(self._changed_targets,
|
||||||
|
supplied_targets)
|
||||||
|
return [gyp.common.ParseQualifiedTarget(target.name)[1]
|
||||||
|
for target in compile_targets]
|
||||||
|
|
||||||
|
|
||||||
|
def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
|
"""Called by gyp as the final stage. Outputs results."""
|
||||||
|
config = Config()
|
||||||
|
try:
|
||||||
|
config.Init(params)
|
||||||
|
|
||||||
|
if not config.files:
|
||||||
|
raise Exception('Must specify files to analyze via config_path generator '
|
||||||
|
'flag')
|
||||||
|
|
||||||
|
toplevel_dir = _ToGypPath(os.path.abspath(params['options'].toplevel_dir))
|
||||||
|
if debug:
|
||||||
|
print('toplevel_dir', toplevel_dir)
|
||||||
|
|
||||||
|
if _WasGypIncludeFileModified(params, config.files):
|
||||||
|
result_dict = { 'status': all_changed_string,
|
||||||
|
'test_targets': list(config.test_target_names),
|
||||||
|
'compile_targets': list(
|
||||||
|
config.additional_compile_target_names |
|
||||||
|
config.test_target_names) }
|
||||||
|
_WriteOutput(params, **result_dict)
|
||||||
|
return
|
||||||
|
|
||||||
|
calculator = TargetCalculator(config.files,
|
||||||
|
config.additional_compile_target_names,
|
||||||
|
config.test_target_names, data,
|
||||||
|
target_list, target_dicts, toplevel_dir,
|
||||||
|
params['build_files'])
|
||||||
|
if not calculator.is_build_impacted():
|
||||||
|
result_dict = { 'status': no_dependency_string,
|
||||||
|
'test_targets': [],
|
||||||
|
'compile_targets': [] }
|
||||||
|
if calculator.invalid_targets:
|
||||||
|
result_dict['invalid_targets'] = calculator.invalid_targets
|
||||||
|
_WriteOutput(params, **result_dict)
|
||||||
|
return
|
||||||
|
|
||||||
|
test_target_names = calculator.find_matching_test_target_names()
|
||||||
|
compile_target_names = calculator.find_matching_compile_target_names()
|
||||||
|
found_at_least_one_target = compile_target_names or test_target_names
|
||||||
|
result_dict = { 'test_targets': test_target_names,
|
||||||
|
'status': found_dependency_string if
|
||||||
|
found_at_least_one_target else no_dependency_string,
|
||||||
|
'compile_targets': list(
|
||||||
|
set(compile_target_names) |
|
||||||
|
set(test_target_names)) }
|
||||||
|
if calculator.invalid_targets:
|
||||||
|
result_dict['invalid_targets'] = calculator.invalid_targets
|
||||||
|
_WriteOutput(params, **result_dict)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
_WriteOutput(params, error=str(e))
|
||||||
1256
third_party/python/gyp/build/lib/gyp/generator/cmake.py
vendored
Normal file
1256
third_party/python/gyp/build/lib/gyp/generator/cmake.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
101
third_party/python/gyp/build/lib/gyp/generator/dump_dependency_json.py
vendored
Normal file
101
third_party/python/gyp/build/lib/gyp/generator/dump_dependency_json.py
vendored
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import collections
|
||||||
|
import os
|
||||||
|
import gyp
|
||||||
|
import gyp.common
|
||||||
|
import gyp.msvs_emulation
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
|
||||||
|
generator_supports_multiple_toolsets = True
|
||||||
|
|
||||||
|
generator_wants_static_library_dependencies_adjusted = False
|
||||||
|
|
||||||
|
generator_filelist_paths = {
|
||||||
|
}
|
||||||
|
|
||||||
|
generator_default_variables = {
|
||||||
|
}
|
||||||
|
for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
|
||||||
|
'LIB_DIR', 'SHARED_LIB_DIR']:
|
||||||
|
# Some gyp steps fail if these are empty(!).
|
||||||
|
generator_default_variables[dirname] = 'dir'
|
||||||
|
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
|
||||||
|
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
|
||||||
|
'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
|
||||||
|
'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
|
||||||
|
'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
|
||||||
|
'CONFIGURATION_NAME']:
|
||||||
|
generator_default_variables[unused] = ''
|
||||||
|
|
||||||
|
|
||||||
|
def CalculateVariables(default_variables, params):
|
||||||
|
generator_flags = params.get('generator_flags', {})
|
||||||
|
for key, val in generator_flags.items():
|
||||||
|
default_variables.setdefault(key, val)
|
||||||
|
default_variables.setdefault('OS', gyp.common.GetFlavor(params))
|
||||||
|
|
||||||
|
flavor = gyp.common.GetFlavor(params)
|
||||||
|
if flavor =='win':
|
||||||
|
# Copy additional generator configuration data from VS, which is shared
|
||||||
|
# by the Windows Ninja generator.
|
||||||
|
import gyp.generator.msvs as msvs_generator
|
||||||
|
generator_additional_non_configuration_keys = getattr(msvs_generator,
|
||||||
|
'generator_additional_non_configuration_keys', [])
|
||||||
|
generator_additional_path_sections = getattr(msvs_generator,
|
||||||
|
'generator_additional_path_sections', [])
|
||||||
|
|
||||||
|
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
||||||
|
|
||||||
|
|
||||||
|
def CalculateGeneratorInputInfo(params):
|
||||||
|
"""Calculate the generator specific info that gets fed to input (called by
|
||||||
|
gyp)."""
|
||||||
|
generator_flags = params.get('generator_flags', {})
|
||||||
|
if generator_flags.get('adjust_static_libraries', False):
|
||||||
|
global generator_wants_static_library_dependencies_adjusted
|
||||||
|
generator_wants_static_library_dependencies_adjusted = True
|
||||||
|
|
||||||
|
toplevel = params['options'].toplevel_dir
|
||||||
|
generator_dir = os.path.relpath(params['options'].generator_output or '.')
|
||||||
|
# output_dir: relative path from generator_dir to the build directory.
|
||||||
|
output_dir = generator_flags.get('output_dir', 'out')
|
||||||
|
qualified_out_dir = os.path.normpath(os.path.join(
|
||||||
|
toplevel, generator_dir, output_dir, 'gypfiles'))
|
||||||
|
global generator_filelist_paths
|
||||||
|
generator_filelist_paths = {
|
||||||
|
'toplevel': toplevel,
|
||||||
|
'qualified_out_dir': qualified_out_dir,
|
||||||
|
}
|
||||||
|
|
||||||
|
def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
|
# Map of target -> list of targets it depends on.
|
||||||
|
edges = {}
|
||||||
|
|
||||||
|
# Queue of targets to visit.
|
||||||
|
targets_to_visit = target_list[:]
|
||||||
|
|
||||||
|
while len(targets_to_visit) > 0:
|
||||||
|
target = targets_to_visit.pop()
|
||||||
|
if target in edges:
|
||||||
|
continue
|
||||||
|
edges[target] = []
|
||||||
|
|
||||||
|
for dep in target_dicts[target].get('dependencies', []):
|
||||||
|
edges[target].append(dep)
|
||||||
|
targets_to_visit.append(dep)
|
||||||
|
|
||||||
|
try:
|
||||||
|
filepath = params['generator_flags']['output_dir']
|
||||||
|
except KeyError:
|
||||||
|
filepath = '.'
|
||||||
|
filename = os.path.join(filepath, 'dump.json')
|
||||||
|
f = open(filename, 'w')
|
||||||
|
json.dump(edges, f)
|
||||||
|
f.close()
|
||||||
|
print('Wrote json to %s.' % filename)
|
||||||
425
third_party/python/gyp/build/lib/gyp/generator/eclipse.py
vendored
Normal file
425
third_party/python/gyp/build/lib/gyp/generator/eclipse.py
vendored
Normal file
@@ -0,0 +1,425 @@
|
|||||||
|
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""GYP backend that generates Eclipse CDT settings files.
|
||||||
|
|
||||||
|
This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML
|
||||||
|
files that can be imported into an Eclipse CDT project. The XML file contains a
|
||||||
|
list of include paths and symbols (i.e. defines).
|
||||||
|
|
||||||
|
Because a full .cproject definition is not created by this generator, it's not
|
||||||
|
possible to properly define the include dirs and symbols for each file
|
||||||
|
individually. Instead, one set of includes/symbols is generated for the entire
|
||||||
|
project. This works fairly well (and is a vast improvement in general), but may
|
||||||
|
still result in a few indexer issues here and there.
|
||||||
|
|
||||||
|
This generator has no automated tests, so expect it to be broken.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from xml.sax.saxutils import escape
|
||||||
|
import os.path
|
||||||
|
import subprocess
|
||||||
|
import gyp
|
||||||
|
import gyp.common
|
||||||
|
import gyp.msvs_emulation
|
||||||
|
import shlex
|
||||||
|
import xml.etree.cElementTree as ET
|
||||||
|
|
||||||
|
generator_wants_static_library_dependencies_adjusted = False
|
||||||
|
|
||||||
|
generator_default_variables = {
|
||||||
|
}
|
||||||
|
|
||||||
|
for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']:
|
||||||
|
# Some gyp steps fail if these are empty(!), so we convert them to variables
|
||||||
|
generator_default_variables[dirname] = '$' + dirname
|
||||||
|
|
||||||
|
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
|
||||||
|
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
|
||||||
|
'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
|
||||||
|
'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
|
||||||
|
'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
|
||||||
|
'CONFIGURATION_NAME']:
|
||||||
|
generator_default_variables[unused] = ''
|
||||||
|
|
||||||
|
# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as
|
||||||
|
# part of the path when dealing with generated headers. This value will be
|
||||||
|
# replaced dynamically for each configuration.
|
||||||
|
generator_default_variables['SHARED_INTERMEDIATE_DIR'] = \
|
||||||
|
'$SHARED_INTERMEDIATE_DIR'
|
||||||
|
|
||||||
|
|
||||||
|
def CalculateVariables(default_variables, params):
|
||||||
|
generator_flags = params.get('generator_flags', {})
|
||||||
|
for key, val in generator_flags.items():
|
||||||
|
default_variables.setdefault(key, val)
|
||||||
|
flavor = gyp.common.GetFlavor(params)
|
||||||
|
default_variables.setdefault('OS', flavor)
|
||||||
|
if flavor == 'win':
|
||||||
|
# Copy additional generator configuration data from VS, which is shared
|
||||||
|
# by the Eclipse generator.
|
||||||
|
import gyp.generator.msvs as msvs_generator
|
||||||
|
generator_additional_non_configuration_keys = getattr(msvs_generator,
|
||||||
|
'generator_additional_non_configuration_keys', [])
|
||||||
|
generator_additional_path_sections = getattr(msvs_generator,
|
||||||
|
'generator_additional_path_sections', [])
|
||||||
|
|
||||||
|
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
||||||
|
|
||||||
|
|
||||||
|
def CalculateGeneratorInputInfo(params):
|
||||||
|
"""Calculate the generator specific info that gets fed to input (called by
|
||||||
|
gyp)."""
|
||||||
|
generator_flags = params.get('generator_flags', {})
|
||||||
|
if generator_flags.get('adjust_static_libraries', False):
|
||||||
|
global generator_wants_static_library_dependencies_adjusted
|
||||||
|
generator_wants_static_library_dependencies_adjusted = True
|
||||||
|
|
||||||
|
|
||||||
|
def GetAllIncludeDirectories(target_list, target_dicts,
|
||||||
|
shared_intermediate_dirs, config_name, params,
|
||||||
|
compiler_path):
|
||||||
|
"""Calculate the set of include directories to be used.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A list including all the include_dir's specified for every target followed
|
||||||
|
by any include directories that were added as cflag compiler options.
|
||||||
|
"""
|
||||||
|
|
||||||
|
gyp_includes_set = set()
|
||||||
|
compiler_includes_list = []
|
||||||
|
|
||||||
|
# Find compiler's default include dirs.
|
||||||
|
if compiler_path:
|
||||||
|
command = shlex.split(compiler_path)
|
||||||
|
command.extend(['-E', '-xc++', '-v', '-'])
|
||||||
|
proc = subprocess.Popen(args=command, stdin=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
output = proc.communicate()[1]
|
||||||
|
# Extract the list of include dirs from the output, which has this format:
|
||||||
|
# ...
|
||||||
|
# #include "..." search starts here:
|
||||||
|
# #include <...> search starts here:
|
||||||
|
# /usr/include/c++/4.6
|
||||||
|
# /usr/local/include
|
||||||
|
# End of search list.
|
||||||
|
# ...
|
||||||
|
in_include_list = False
|
||||||
|
for line in output.splitlines():
|
||||||
|
if line.startswith('#include'):
|
||||||
|
in_include_list = True
|
||||||
|
continue
|
||||||
|
if line.startswith('End of search list.'):
|
||||||
|
break
|
||||||
|
if in_include_list:
|
||||||
|
include_dir = line.strip()
|
||||||
|
if include_dir not in compiler_includes_list:
|
||||||
|
compiler_includes_list.append(include_dir)
|
||||||
|
|
||||||
|
flavor = gyp.common.GetFlavor(params)
|
||||||
|
if flavor == 'win':
|
||||||
|
generator_flags = params.get('generator_flags', {})
|
||||||
|
for target_name in target_list:
|
||||||
|
target = target_dicts[target_name]
|
||||||
|
if config_name in target['configurations']:
|
||||||
|
config = target['configurations'][config_name]
|
||||||
|
|
||||||
|
# Look for any include dirs that were explicitly added via cflags. This
|
||||||
|
# may be done in gyp files to force certain includes to come at the end.
|
||||||
|
# TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and
|
||||||
|
# remove this.
|
||||||
|
if flavor == 'win':
|
||||||
|
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
|
||||||
|
cflags = msvs_settings.GetCflags(config_name)
|
||||||
|
else:
|
||||||
|
cflags = config['cflags']
|
||||||
|
for cflag in cflags:
|
||||||
|
if cflag.startswith('-I'):
|
||||||
|
include_dir = cflag[2:]
|
||||||
|
if include_dir not in compiler_includes_list:
|
||||||
|
compiler_includes_list.append(include_dir)
|
||||||
|
|
||||||
|
# Find standard gyp include dirs.
|
||||||
|
if 'include_dirs' in config:
|
||||||
|
include_dirs = config['include_dirs']
|
||||||
|
for shared_intermediate_dir in shared_intermediate_dirs:
|
||||||
|
for include_dir in include_dirs:
|
||||||
|
include_dir = include_dir.replace('$SHARED_INTERMEDIATE_DIR',
|
||||||
|
shared_intermediate_dir)
|
||||||
|
if not os.path.isabs(include_dir):
|
||||||
|
base_dir = os.path.dirname(target_name)
|
||||||
|
|
||||||
|
include_dir = base_dir + '/' + include_dir
|
||||||
|
include_dir = os.path.abspath(include_dir)
|
||||||
|
|
||||||
|
gyp_includes_set.add(include_dir)
|
||||||
|
|
||||||
|
# Generate a list that has all the include dirs.
|
||||||
|
all_includes_list = list(gyp_includes_set)
|
||||||
|
all_includes_list.sort()
|
||||||
|
for compiler_include in compiler_includes_list:
|
||||||
|
if not compiler_include in gyp_includes_set:
|
||||||
|
all_includes_list.append(compiler_include)
|
||||||
|
|
||||||
|
# All done.
|
||||||
|
return all_includes_list
|
||||||
|
|
||||||
|
|
||||||
|
def GetCompilerPath(target_list, data, options):
|
||||||
|
"""Determine a command that can be used to invoke the compiler.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
If this is a gyp project that has explicit make settings, try to determine
|
||||||
|
the compiler from that. Otherwise, see if a compiler was specified via the
|
||||||
|
CC_target environment variable.
|
||||||
|
"""
|
||||||
|
# First, see if the compiler is configured in make's settings.
|
||||||
|
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
|
||||||
|
make_global_settings_dict = data[build_file].get('make_global_settings', {})
|
||||||
|
for key, value in make_global_settings_dict:
|
||||||
|
if key in ['CC', 'CXX']:
|
||||||
|
return os.path.join(options.toplevel_dir, value)
|
||||||
|
|
||||||
|
# Check to see if the compiler was specified as an environment variable.
|
||||||
|
for key in ['CC_target', 'CC', 'CXX']:
|
||||||
|
compiler = os.environ.get(key)
|
||||||
|
if compiler:
|
||||||
|
return compiler
|
||||||
|
|
||||||
|
return 'gcc'
|
||||||
|
|
||||||
|
|
||||||
|
def GetAllDefines(target_list, target_dicts, data, config_name, params,
|
||||||
|
compiler_path):
|
||||||
|
"""Calculate the defines for a project.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A dict that includes explict defines declared in gyp files along with all of
|
||||||
|
the default defines that the compiler uses.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Get defines declared in the gyp files.
|
||||||
|
all_defines = {}
|
||||||
|
flavor = gyp.common.GetFlavor(params)
|
||||||
|
if flavor == 'win':
|
||||||
|
generator_flags = params.get('generator_flags', {})
|
||||||
|
for target_name in target_list:
|
||||||
|
target = target_dicts[target_name]
|
||||||
|
|
||||||
|
if flavor == 'win':
|
||||||
|
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
|
||||||
|
extra_defines = msvs_settings.GetComputedDefines(config_name)
|
||||||
|
else:
|
||||||
|
extra_defines = []
|
||||||
|
if config_name in target['configurations']:
|
||||||
|
config = target['configurations'][config_name]
|
||||||
|
target_defines = config['defines']
|
||||||
|
else:
|
||||||
|
target_defines = []
|
||||||
|
for define in target_defines + extra_defines:
|
||||||
|
split_define = define.split('=', 1)
|
||||||
|
if len(split_define) == 1:
|
||||||
|
split_define.append('1')
|
||||||
|
if split_define[0].strip() in all_defines:
|
||||||
|
# Already defined
|
||||||
|
continue
|
||||||
|
all_defines[split_define[0].strip()] = split_define[1].strip()
|
||||||
|
# Get default compiler defines (if possible).
|
||||||
|
if flavor == 'win':
|
||||||
|
return all_defines # Default defines already processed in the loop above.
|
||||||
|
if compiler_path:
|
||||||
|
command = shlex.split(compiler_path)
|
||||||
|
command.extend(['-E', '-dM', '-'])
|
||||||
|
cpp_proc = subprocess.Popen(args=command, cwd='.',
|
||||||
|
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
|
||||||
|
cpp_output = cpp_proc.communicate()[0]
|
||||||
|
cpp_lines = cpp_output.split('\n')
|
||||||
|
for cpp_line in cpp_lines:
|
||||||
|
if not cpp_line.strip():
|
||||||
|
continue
|
||||||
|
cpp_line_parts = cpp_line.split(' ', 2)
|
||||||
|
key = cpp_line_parts[1]
|
||||||
|
if len(cpp_line_parts) >= 3:
|
||||||
|
val = cpp_line_parts[2]
|
||||||
|
else:
|
||||||
|
val = '1'
|
||||||
|
all_defines[key] = val
|
||||||
|
|
||||||
|
return all_defines
|
||||||
|
|
||||||
|
|
||||||
|
def WriteIncludePaths(out, eclipse_langs, include_dirs):
|
||||||
|
"""Write the includes section of a CDT settings export file."""
|
||||||
|
|
||||||
|
out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \
|
||||||
|
'settingswizards.IncludePaths">\n')
|
||||||
|
out.write(' <language name="holder for library settings"></language>\n')
|
||||||
|
for lang in eclipse_langs:
|
||||||
|
out.write(' <language name="%s">\n' % lang)
|
||||||
|
for include_dir in include_dirs:
|
||||||
|
out.write(' <includepath workspace_path="false">%s</includepath>\n' %
|
||||||
|
include_dir)
|
||||||
|
out.write(' </language>\n')
|
||||||
|
out.write(' </section>\n')
|
||||||
|
|
||||||
|
|
||||||
|
def WriteMacros(out, eclipse_langs, defines):
|
||||||
|
"""Write the macros section of a CDT settings export file."""
|
||||||
|
|
||||||
|
out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \
|
||||||
|
'settingswizards.Macros">\n')
|
||||||
|
out.write(' <language name="holder for library settings"></language>\n')
|
||||||
|
for lang in eclipse_langs:
|
||||||
|
out.write(' <language name="%s">\n' % lang)
|
||||||
|
for key in sorted(defines.keys()):
|
||||||
|
out.write(' <macro><name>%s</name><value>%s</value></macro>\n' %
|
||||||
|
(escape(key), escape(defines[key])))
|
||||||
|
out.write(' </language>\n')
|
||||||
|
out.write(' </section>\n')
|
||||||
|
|
||||||
|
|
||||||
|
def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
|
config_name):
|
||||||
|
options = params['options']
|
||||||
|
generator_flags = params.get('generator_flags', {})
|
||||||
|
|
||||||
|
# build_dir: relative path from source root to our output files.
|
||||||
|
# e.g. "out/Debug"
|
||||||
|
build_dir = os.path.join(generator_flags.get('output_dir', 'out'),
|
||||||
|
config_name)
|
||||||
|
|
||||||
|
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
|
||||||
|
# Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the
|
||||||
|
# SHARED_INTERMEDIATE_DIR. Include both possible locations.
|
||||||
|
shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'),
|
||||||
|
os.path.join(toplevel_build, 'gen')]
|
||||||
|
|
||||||
|
GenerateCdtSettingsFile(target_list,
|
||||||
|
target_dicts,
|
||||||
|
data,
|
||||||
|
params,
|
||||||
|
config_name,
|
||||||
|
os.path.join(toplevel_build,
|
||||||
|
'eclipse-cdt-settings.xml'),
|
||||||
|
options,
|
||||||
|
shared_intermediate_dirs)
|
||||||
|
GenerateClasspathFile(target_list,
|
||||||
|
target_dicts,
|
||||||
|
options.toplevel_dir,
|
||||||
|
toplevel_build,
|
||||||
|
os.path.join(toplevel_build,
|
||||||
|
'eclipse-classpath.xml'))
|
||||||
|
|
||||||
|
|
||||||
|
def GenerateCdtSettingsFile(target_list, target_dicts, data, params,
|
||||||
|
config_name, out_name, options,
|
||||||
|
shared_intermediate_dirs):
|
||||||
|
gyp.common.EnsureDirExists(out_name)
|
||||||
|
with open(out_name, 'w') as out:
|
||||||
|
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||||
|
out.write('<cdtprojectproperties>\n')
|
||||||
|
|
||||||
|
eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
|
||||||
|
'GNU C++', 'GNU C', 'Assembly']
|
||||||
|
compiler_path = GetCompilerPath(target_list, data, options)
|
||||||
|
include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
|
||||||
|
shared_intermediate_dirs,
|
||||||
|
config_name, params, compiler_path)
|
||||||
|
WriteIncludePaths(out, eclipse_langs, include_dirs)
|
||||||
|
defines = GetAllDefines(target_list, target_dicts, data, config_name,
|
||||||
|
params, compiler_path)
|
||||||
|
WriteMacros(out, eclipse_langs, defines)
|
||||||
|
|
||||||
|
out.write('</cdtprojectproperties>\n')
|
||||||
|
|
||||||
|
|
||||||
|
def GenerateClasspathFile(target_list, target_dicts, toplevel_dir,
|
||||||
|
toplevel_build, out_name):
|
||||||
|
'''Generates a classpath file suitable for symbol navigation and code
|
||||||
|
completion of Java code (such as in Android projects) by finding all
|
||||||
|
.java and .jar files used as action inputs.'''
|
||||||
|
gyp.common.EnsureDirExists(out_name)
|
||||||
|
result = ET.Element('classpath')
|
||||||
|
|
||||||
|
def AddElements(kind, paths):
|
||||||
|
# First, we need to normalize the paths so they are all relative to the
|
||||||
|
# toplevel dir.
|
||||||
|
rel_paths = set()
|
||||||
|
for path in paths:
|
||||||
|
if os.path.isabs(path):
|
||||||
|
rel_paths.add(os.path.relpath(path, toplevel_dir))
|
||||||
|
else:
|
||||||
|
rel_paths.add(path)
|
||||||
|
|
||||||
|
for path in sorted(rel_paths):
|
||||||
|
entry_element = ET.SubElement(result, 'classpathentry')
|
||||||
|
entry_element.set('kind', kind)
|
||||||
|
entry_element.set('path', path)
|
||||||
|
|
||||||
|
AddElements('lib', GetJavaJars(target_list, target_dicts, toplevel_dir))
|
||||||
|
AddElements('src', GetJavaSourceDirs(target_list, target_dicts, toplevel_dir))
|
||||||
|
# Include the standard JRE container and a dummy out folder
|
||||||
|
AddElements('con', ['org.eclipse.jdt.launching.JRE_CONTAINER'])
|
||||||
|
# Include a dummy out folder so that Eclipse doesn't use the default /bin
|
||||||
|
# folder in the root of the project.
|
||||||
|
AddElements('output', [os.path.join(toplevel_build, '.eclipse-java-build')])
|
||||||
|
|
||||||
|
ET.ElementTree(result).write(out_name)
|
||||||
|
|
||||||
|
|
||||||
|
def GetJavaJars(target_list, target_dicts, toplevel_dir):
|
||||||
|
'''Generates a sequence of all .jars used as inputs.'''
|
||||||
|
for target_name in target_list:
|
||||||
|
target = target_dicts[target_name]
|
||||||
|
for action in target.get('actions', []):
|
||||||
|
for input_ in action['inputs']:
|
||||||
|
if os.path.splitext(input_)[1] == '.jar' and not input_.startswith('$'):
|
||||||
|
if os.path.isabs(input_):
|
||||||
|
yield input_
|
||||||
|
else:
|
||||||
|
yield os.path.join(os.path.dirname(target_name), input_)
|
||||||
|
|
||||||
|
|
||||||
|
def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir):
|
||||||
|
'''Generates a sequence of all likely java package root directories.'''
|
||||||
|
for target_name in target_list:
|
||||||
|
target = target_dicts[target_name]
|
||||||
|
for action in target.get('actions', []):
|
||||||
|
for input_ in action['inputs']:
|
||||||
|
if (os.path.splitext(input_)[1] == '.java' and
|
||||||
|
not input_.startswith('$')):
|
||||||
|
dir_ = os.path.dirname(os.path.join(os.path.dirname(target_name),
|
||||||
|
input_))
|
||||||
|
# If there is a parent 'src' or 'java' folder, navigate up to it -
|
||||||
|
# these are canonical package root names in Chromium. This will
|
||||||
|
# break if 'src' or 'java' exists in the package structure. This
|
||||||
|
# could be further improved by inspecting the java file for the
|
||||||
|
# package name if this proves to be too fragile in practice.
|
||||||
|
parent_search = dir_
|
||||||
|
while os.path.basename(parent_search) not in ['src', 'java']:
|
||||||
|
parent_search, _ = os.path.split(parent_search)
|
||||||
|
if not parent_search or parent_search == toplevel_dir:
|
||||||
|
# Didn't find a known root, just return the original path
|
||||||
|
yield dir_
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
yield parent_search
|
||||||
|
|
||||||
|
|
||||||
|
def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
|
"""Generate an XML settings file that can be imported into a CDT project."""
|
||||||
|
|
||||||
|
if params['options'].generator_output:
|
||||||
|
raise NotImplementedError("--generator_output not implemented for eclipse")
|
||||||
|
|
||||||
|
user_config = params.get('generator_flags', {}).get('config', None)
|
||||||
|
if user_config:
|
||||||
|
GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
|
user_config)
|
||||||
|
else:
|
||||||
|
config_names = target_dicts[target_list[0]]['configurations']
|
||||||
|
for config_name in config_names:
|
||||||
|
GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
|
config_name)
|
||||||
|
|
||||||
94
third_party/python/gyp/build/lib/gyp/generator/gypd.py
vendored
Normal file
94
third_party/python/gyp/build/lib/gyp/generator/gypd.py
vendored
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""gypd output module
|
||||||
|
|
||||||
|
This module produces gyp input as its output. Output files are given the
|
||||||
|
.gypd extension to avoid overwriting the .gyp files that they are generated
|
||||||
|
from. Internal references to .gyp files (such as those found in
|
||||||
|
"dependencies" sections) are not adjusted to point to .gypd files instead;
|
||||||
|
unlike other paths, which are relative to the .gyp or .gypd file, such paths
|
||||||
|
are relative to the directory from which gyp was run to create the .gypd file.
|
||||||
|
|
||||||
|
This generator module is intended to be a sample and a debugging aid, hence
|
||||||
|
the "d" for "debug" in .gypd. It is useful to inspect the results of the
|
||||||
|
various merges, expansions, and conditional evaluations performed by gyp
|
||||||
|
and to see a representation of what would be fed to a generator module.
|
||||||
|
|
||||||
|
It's not advisable to rename .gypd files produced by this module to .gyp,
|
||||||
|
because they will have all merges, expansions, and evaluations already
|
||||||
|
performed and the relevant constructs not present in the output; paths to
|
||||||
|
dependencies may be wrong; and various sections that do not belong in .gyp
|
||||||
|
files such as such as "included_files" and "*_excluded" will be present.
|
||||||
|
Output will also be stripped of comments. This is not intended to be a
|
||||||
|
general-purpose gyp pretty-printer; for that, you probably just want to
|
||||||
|
run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
|
||||||
|
comments but won't do all of the other things done to this module's output.
|
||||||
|
|
||||||
|
The specific formatting of the output generated by this module is subject
|
||||||
|
to change.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
import gyp.common
|
||||||
|
import errno
|
||||||
|
import os
|
||||||
|
import pprint
|
||||||
|
|
||||||
|
|
||||||
|
# These variables should just be spit back out as variable references.
|
||||||
|
_generator_identity_variables = [
|
||||||
|
'CONFIGURATION_NAME',
|
||||||
|
'EXECUTABLE_PREFIX',
|
||||||
|
'EXECUTABLE_SUFFIX',
|
||||||
|
'INTERMEDIATE_DIR',
|
||||||
|
'LIB_DIR',
|
||||||
|
'PRODUCT_DIR',
|
||||||
|
'RULE_INPUT_ROOT',
|
||||||
|
'RULE_INPUT_DIRNAME',
|
||||||
|
'RULE_INPUT_EXT',
|
||||||
|
'RULE_INPUT_NAME',
|
||||||
|
'RULE_INPUT_PATH',
|
||||||
|
'SHARED_INTERMEDIATE_DIR',
|
||||||
|
'SHARED_LIB_DIR',
|
||||||
|
'SHARED_LIB_PREFIX',
|
||||||
|
'SHARED_LIB_SUFFIX',
|
||||||
|
'STATIC_LIB_PREFIX',
|
||||||
|
'STATIC_LIB_SUFFIX',
|
||||||
|
]
|
||||||
|
|
||||||
|
# gypd doesn't define a default value for OS like many other generator
|
||||||
|
# modules. Specify "-D OS=whatever" on the command line to provide a value.
|
||||||
|
generator_default_variables = {
|
||||||
|
}
|
||||||
|
|
||||||
|
# gypd supports multiple toolsets
|
||||||
|
generator_supports_multiple_toolsets = True
|
||||||
|
|
||||||
|
# TODO(mark): This always uses <, which isn't right. The input module should
|
||||||
|
# notify the generator to tell it which phase it is operating in, and this
|
||||||
|
# module should use < for the early phase and then switch to > for the late
|
||||||
|
# phase. Bonus points for carrying @ back into the output too.
|
||||||
|
for v in _generator_identity_variables:
|
||||||
|
generator_default_variables[v] = '<(%s)' % v
|
||||||
|
|
||||||
|
|
||||||
|
def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
|
output_files = {}
|
||||||
|
for qualified_target in target_list:
|
||||||
|
[input_file, target] = \
|
||||||
|
gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
|
||||||
|
|
||||||
|
if input_file[-4:] != '.gyp':
|
||||||
|
continue
|
||||||
|
input_file_stem = input_file[:-4]
|
||||||
|
output_file = input_file_stem + params['options'].suffix + '.gypd'
|
||||||
|
|
||||||
|
if not output_file in output_files:
|
||||||
|
output_files[output_file] = input_file
|
||||||
|
|
||||||
|
for output_file, input_file in output_files.items():
|
||||||
|
output = open(output_file, 'w')
|
||||||
|
pprint.pprint(data[input_file], output)
|
||||||
|
output.close()
|
||||||
56
third_party/python/gyp/build/lib/gyp/generator/gypsh.py
vendored
Normal file
56
third_party/python/gyp/build/lib/gyp/generator/gypsh.py
vendored
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""gypsh output module
|
||||||
|
|
||||||
|
gypsh is a GYP shell. It's not really a generator per se. All it does is
|
||||||
|
fire up an interactive Python session with a few local variables set to the
|
||||||
|
variables passed to the generator. Like gypd, it's intended as a debugging
|
||||||
|
aid, to facilitate the exploration of .gyp structures after being processed
|
||||||
|
by the input module.
|
||||||
|
|
||||||
|
The expected usage is "gyp -f gypsh -D OS=desired_os".
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
import code
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
# All of this stuff about generator variables was lovingly ripped from gypd.py.
|
||||||
|
# That module has a much better description of what's going on and why.
|
||||||
|
_generator_identity_variables = [
|
||||||
|
'EXECUTABLE_PREFIX',
|
||||||
|
'EXECUTABLE_SUFFIX',
|
||||||
|
'INTERMEDIATE_DIR',
|
||||||
|
'PRODUCT_DIR',
|
||||||
|
'RULE_INPUT_ROOT',
|
||||||
|
'RULE_INPUT_DIRNAME',
|
||||||
|
'RULE_INPUT_EXT',
|
||||||
|
'RULE_INPUT_NAME',
|
||||||
|
'RULE_INPUT_PATH',
|
||||||
|
'SHARED_INTERMEDIATE_DIR',
|
||||||
|
]
|
||||||
|
|
||||||
|
generator_default_variables = {
|
||||||
|
}
|
||||||
|
|
||||||
|
for v in _generator_identity_variables:
|
||||||
|
generator_default_variables[v] = '<(%s)' % v
|
||||||
|
|
||||||
|
|
||||||
|
def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
|
locals = {
|
||||||
|
'target_list': target_list,
|
||||||
|
'target_dicts': target_dicts,
|
||||||
|
'data': data,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Use a banner that looks like the stock Python one and like what
|
||||||
|
# code.interact uses by default, but tack on something to indicate what
|
||||||
|
# locals are available, and identify gypsh.
|
||||||
|
banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
|
||||||
|
(sys.version, sys.platform, repr(sorted(locals.keys())))
|
||||||
|
|
||||||
|
code.interact(banner, local=locals)
|
||||||
2260
third_party/python/gyp/build/lib/gyp/generator/make.py
vendored
Normal file
2260
third_party/python/gyp/build/lib/gyp/generator/make.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3537
third_party/python/gyp/build/lib/gyp/generator/msvs.py
vendored
Normal file
3537
third_party/python/gyp/build/lib/gyp/generator/msvs.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
40
third_party/python/gyp/build/lib/gyp/generator/msvs_test.py
vendored
Normal file
40
third_party/python/gyp/build/lib/gyp/generator/msvs_test.py
vendored
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
""" Unit tests for the msvs.py file. """
|
||||||
|
|
||||||
|
import gyp.generator.msvs as msvs
|
||||||
|
import unittest
|
||||||
|
try:
|
||||||
|
from StringIO import StringIO
|
||||||
|
except ImportError:
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
|
||||||
|
class TestSequenceFunctions(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.stderr = StringIO()
|
||||||
|
|
||||||
|
def test_GetLibraries(self):
|
||||||
|
self.assertEqual(
|
||||||
|
msvs._GetLibraries({}),
|
||||||
|
[])
|
||||||
|
self.assertEqual(
|
||||||
|
msvs._GetLibraries({'libraries': []}),
|
||||||
|
[])
|
||||||
|
self.assertEqual(
|
||||||
|
msvs._GetLibraries({'other':'foo', 'libraries': ['a.lib']}),
|
||||||
|
['a.lib'])
|
||||||
|
self.assertEqual(
|
||||||
|
msvs._GetLibraries({'libraries': ['-la']}),
|
||||||
|
['a.lib'])
|
||||||
|
self.assertEqual(
|
||||||
|
msvs._GetLibraries({'libraries': ['a.lib', 'b.lib', 'c.lib', '-lb.lib',
|
||||||
|
'-lb.lib', 'd.lib', 'a.lib']}),
|
||||||
|
['c.lib', 'b.lib', 'd.lib', 'a.lib'])
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
2500
third_party/python/gyp/build/lib/gyp/generator/ninja.py
vendored
Normal file
2500
third_party/python/gyp/build/lib/gyp/generator/ninja.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
46
third_party/python/gyp/build/lib/gyp/generator/ninja_test.py
vendored
Normal file
46
third_party/python/gyp/build/lib/gyp/generator/ninja_test.py
vendored
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
""" Unit tests for the ninja.py file. """
|
||||||
|
|
||||||
|
import gyp.generator.ninja as ninja
|
||||||
|
import unittest
|
||||||
|
import sys
|
||||||
|
import TestCommon
|
||||||
|
|
||||||
|
|
||||||
|
class TestPrefixesAndSuffixes(unittest.TestCase):
|
||||||
|
def test_BinaryNamesWindows(self):
|
||||||
|
# These cannot run on non-Windows as they require a VS installation to
|
||||||
|
# correctly handle variable expansion.
|
||||||
|
if sys.platform.startswith('win'):
|
||||||
|
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
|
||||||
|
'build.ninja', 'win')
|
||||||
|
spec = { 'target_name': 'wee' }
|
||||||
|
self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
|
||||||
|
endswith('.exe'))
|
||||||
|
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
|
||||||
|
endswith('.dll'))
|
||||||
|
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
|
||||||
|
endswith('.lib'))
|
||||||
|
|
||||||
|
def test_BinaryNamesLinux(self):
|
||||||
|
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
|
||||||
|
'build.ninja', 'linux')
|
||||||
|
spec = { 'target_name': 'wee' }
|
||||||
|
self.assertTrue('.' not in writer.ComputeOutputFileName(spec,
|
||||||
|
'executable'))
|
||||||
|
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
|
||||||
|
startswith('lib'))
|
||||||
|
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
|
||||||
|
startswith('lib'))
|
||||||
|
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
|
||||||
|
endswith('.so'))
|
||||||
|
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
|
||||||
|
endswith('.a'))
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
1302
third_party/python/gyp/build/lib/gyp/generator/xcode.py
vendored
Normal file
1302
third_party/python/gyp/build/lib/gyp/generator/xcode.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
23
third_party/python/gyp/build/lib/gyp/generator/xcode_test.py
vendored
Normal file
23
third_party/python/gyp/build/lib/gyp/generator/xcode_test.py
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
""" Unit tests for the xcode.py file. """
|
||||||
|
|
||||||
|
import gyp.generator.xcode as xcode
|
||||||
|
import unittest
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
class TestEscapeXcodeDefine(unittest.TestCase):
|
||||||
|
if sys.platform == 'darwin':
|
||||||
|
def test_InheritedRemainsUnescaped(self):
|
||||||
|
self.assertEqual(xcode.EscapeXcodeDefine('$(inherited)'), '$(inherited)')
|
||||||
|
|
||||||
|
def test_Escaping(self):
|
||||||
|
self.assertEqual(xcode.EscapeXcodeDefine('a b"c\\'), 'a\\ b\\"c\\\\')
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
2908
third_party/python/gyp/build/lib/gyp/input.py
vendored
Normal file
2908
third_party/python/gyp/build/lib/gyp/input.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
90
third_party/python/gyp/build/lib/gyp/input_test.py
vendored
Normal file
90
third_party/python/gyp/build/lib/gyp/input_test.py
vendored
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# Copyright 2013 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""Unit tests for the input.py file."""
|
||||||
|
|
||||||
|
import gyp.input
|
||||||
|
import unittest
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
class TestFindCycles(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.nodes = {}
|
||||||
|
for x in ('a', 'b', 'c', 'd', 'e'):
|
||||||
|
self.nodes[x] = gyp.input.DependencyGraphNode(x)
|
||||||
|
|
||||||
|
def _create_dependency(self, dependent, dependency):
|
||||||
|
dependent.dependencies.append(dependency)
|
||||||
|
dependency.dependents.append(dependent)
|
||||||
|
|
||||||
|
def test_no_cycle_empty_graph(self):
|
||||||
|
for label, node in self.nodes.items():
|
||||||
|
self.assertEquals([], node.FindCycles())
|
||||||
|
|
||||||
|
def test_no_cycle_line(self):
|
||||||
|
self._create_dependency(self.nodes['a'], self.nodes['b'])
|
||||||
|
self._create_dependency(self.nodes['b'], self.nodes['c'])
|
||||||
|
self._create_dependency(self.nodes['c'], self.nodes['d'])
|
||||||
|
|
||||||
|
for label, node in self.nodes.items():
|
||||||
|
self.assertEquals([], node.FindCycles())
|
||||||
|
|
||||||
|
def test_no_cycle_dag(self):
|
||||||
|
self._create_dependency(self.nodes['a'], self.nodes['b'])
|
||||||
|
self._create_dependency(self.nodes['a'], self.nodes['c'])
|
||||||
|
self._create_dependency(self.nodes['b'], self.nodes['c'])
|
||||||
|
|
||||||
|
for label, node in self.nodes.items():
|
||||||
|
self.assertEquals([], node.FindCycles())
|
||||||
|
|
||||||
|
def test_cycle_self_reference(self):
|
||||||
|
self._create_dependency(self.nodes['a'], self.nodes['a'])
|
||||||
|
|
||||||
|
self.assertEquals([[self.nodes['a'], self.nodes['a']]],
|
||||||
|
self.nodes['a'].FindCycles())
|
||||||
|
|
||||||
|
def test_cycle_two_nodes(self):
|
||||||
|
self._create_dependency(self.nodes['a'], self.nodes['b'])
|
||||||
|
self._create_dependency(self.nodes['b'], self.nodes['a'])
|
||||||
|
|
||||||
|
self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]],
|
||||||
|
self.nodes['a'].FindCycles())
|
||||||
|
self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]],
|
||||||
|
self.nodes['b'].FindCycles())
|
||||||
|
|
||||||
|
def test_two_cycles(self):
|
||||||
|
self._create_dependency(self.nodes['a'], self.nodes['b'])
|
||||||
|
self._create_dependency(self.nodes['b'], self.nodes['a'])
|
||||||
|
|
||||||
|
self._create_dependency(self.nodes['b'], self.nodes['c'])
|
||||||
|
self._create_dependency(self.nodes['c'], self.nodes['b'])
|
||||||
|
|
||||||
|
cycles = self.nodes['a'].FindCycles()
|
||||||
|
self.assertTrue(
|
||||||
|
[self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles)
|
||||||
|
self.assertTrue(
|
||||||
|
[self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles)
|
||||||
|
self.assertEquals(2, len(cycles))
|
||||||
|
|
||||||
|
def test_big_cycle(self):
|
||||||
|
self._create_dependency(self.nodes['a'], self.nodes['b'])
|
||||||
|
self._create_dependency(self.nodes['b'], self.nodes['c'])
|
||||||
|
self._create_dependency(self.nodes['c'], self.nodes['d'])
|
||||||
|
self._create_dependency(self.nodes['d'], self.nodes['e'])
|
||||||
|
self._create_dependency(self.nodes['e'], self.nodes['a'])
|
||||||
|
|
||||||
|
self.assertEquals([[self.nodes['a'],
|
||||||
|
self.nodes['b'],
|
||||||
|
self.nodes['c'],
|
||||||
|
self.nodes['d'],
|
||||||
|
self.nodes['e'],
|
||||||
|
self.nodes['a']]],
|
||||||
|
self.nodes['a'].FindCycles())
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
721
third_party/python/gyp/build/lib/gyp/mac_tool.py
vendored
Normal file
721
third_party/python/gyp/build/lib/gyp/mac_tool.py
vendored
Normal file
@@ -0,0 +1,721 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""Utility functions to perform Xcode-style build steps.
|
||||||
|
|
||||||
|
These functions are executed via gyp-mac-tool when using the Makefile generator.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import fcntl
|
||||||
|
import fnmatch
|
||||||
|
import glob
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import plistlib
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import struct
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
|
||||||
|
def main(args):
|
||||||
|
executor = MacTool()
|
||||||
|
exit_code = executor.Dispatch(args)
|
||||||
|
if exit_code is not None:
|
||||||
|
sys.exit(exit_code)
|
||||||
|
|
||||||
|
|
||||||
|
class MacTool(object):
|
||||||
|
"""This class performs all the Mac tooling steps. The methods can either be
|
||||||
|
executed directly, or dispatched from an argument list."""
|
||||||
|
|
||||||
|
def Dispatch(self, args):
|
||||||
|
"""Dispatches a string command to a method."""
|
||||||
|
if len(args) < 1:
|
||||||
|
raise Exception("Not enough arguments")
|
||||||
|
|
||||||
|
method = "Exec%s" % self._CommandifyName(args[0])
|
||||||
|
return getattr(self, method)(*args[1:])
|
||||||
|
|
||||||
|
def _CommandifyName(self, name_string):
|
||||||
|
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
|
||||||
|
return name_string.title().replace('-', '')
|
||||||
|
|
||||||
|
def ExecCopyBundleResource(self, source, dest, convert_to_binary):
|
||||||
|
"""Copies a resource file to the bundle/Resources directory, performing any
|
||||||
|
necessary compilation on each resource."""
|
||||||
|
convert_to_binary = convert_to_binary == 'True'
|
||||||
|
extension = os.path.splitext(source)[1].lower()
|
||||||
|
if os.path.isdir(source):
|
||||||
|
# Copy tree.
|
||||||
|
# TODO(thakis): This copies file attributes like mtime, while the
|
||||||
|
# single-file branch below doesn't. This should probably be changed to
|
||||||
|
# be consistent with the single-file branch.
|
||||||
|
if os.path.exists(dest):
|
||||||
|
shutil.rmtree(dest)
|
||||||
|
shutil.copytree(source, dest)
|
||||||
|
elif extension == '.xib':
|
||||||
|
return self._CopyXIBFile(source, dest)
|
||||||
|
elif extension == '.storyboard':
|
||||||
|
return self._CopyXIBFile(source, dest)
|
||||||
|
elif extension == '.strings' and not convert_to_binary:
|
||||||
|
self._CopyStringsFile(source, dest)
|
||||||
|
else:
|
||||||
|
if os.path.exists(dest):
|
||||||
|
os.unlink(dest)
|
||||||
|
shutil.copy(source, dest)
|
||||||
|
|
||||||
|
if convert_to_binary and extension in ('.plist', '.strings'):
|
||||||
|
self._ConvertToBinary(dest)
|
||||||
|
|
||||||
|
def _CopyXIBFile(self, source, dest):
|
||||||
|
"""Compiles a XIB file with ibtool into a binary plist in the bundle."""
|
||||||
|
|
||||||
|
# ibtool sometimes crashes with relative paths. See crbug.com/314728.
|
||||||
|
base = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
if os.path.relpath(source):
|
||||||
|
source = os.path.join(base, source)
|
||||||
|
if os.path.relpath(dest):
|
||||||
|
dest = os.path.join(base, dest)
|
||||||
|
|
||||||
|
args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices']
|
||||||
|
|
||||||
|
if os.environ['XCODE_VERSION_ACTUAL'] > '0700':
|
||||||
|
args.extend(['--auto-activate-custom-fonts'])
|
||||||
|
if 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ:
|
||||||
|
args.extend([
|
||||||
|
'--target-device', 'iphone', '--target-device', 'ipad',
|
||||||
|
'--minimum-deployment-target',
|
||||||
|
os.environ['IPHONEOS_DEPLOYMENT_TARGET'],
|
||||||
|
])
|
||||||
|
else:
|
||||||
|
args.extend([
|
||||||
|
'--target-device', 'mac',
|
||||||
|
'--minimum-deployment-target',
|
||||||
|
os.environ['MACOSX_DEPLOYMENT_TARGET'],
|
||||||
|
])
|
||||||
|
|
||||||
|
args.extend(['--output-format', 'human-readable-text', '--compile', dest,
|
||||||
|
source])
|
||||||
|
|
||||||
|
ibtool_section_re = re.compile(r'/\*.*\*/')
|
||||||
|
ibtool_re = re.compile(r'.*note:.*is clipping its content')
|
||||||
|
try:
|
||||||
|
stdout = subprocess.check_output(args)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
print(e.output)
|
||||||
|
raise
|
||||||
|
current_section_header = None
|
||||||
|
for line in stdout.splitlines():
|
||||||
|
line_decoded = line.decode('utf-8')
|
||||||
|
if ibtool_section_re.match(line_decoded):
|
||||||
|
current_section_header = line_decoded
|
||||||
|
elif not ibtool_re.match(line_decoded):
|
||||||
|
if current_section_header:
|
||||||
|
print(current_section_header)
|
||||||
|
current_section_header = None
|
||||||
|
print(line_decoded)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def _ConvertToBinary(self, dest):
|
||||||
|
subprocess.check_call([
|
||||||
|
'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
|
||||||
|
|
||||||
|
def _CopyStringsFile(self, source, dest):
|
||||||
|
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
|
||||||
|
input_code = self._DetectInputEncoding(source) or "UTF-8"
|
||||||
|
|
||||||
|
# Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
|
||||||
|
# CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
|
||||||
|
# CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
|
||||||
|
# semicolon in dictionary.
|
||||||
|
# on invalid files. Do the same kind of validation.
|
||||||
|
import CoreFoundation
|
||||||
|
s = open(source, 'rb').read()
|
||||||
|
d = CoreFoundation.CFDataCreate(None, s, len(s))
|
||||||
|
_, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
|
||||||
|
if error:
|
||||||
|
return
|
||||||
|
|
||||||
|
fp = open(dest, 'wb')
|
||||||
|
fp.write(s.decode(input_code).encode('UTF-16'))
|
||||||
|
fp.close()
|
||||||
|
|
||||||
|
def _DetectInputEncoding(self, file_name):
|
||||||
|
"""Reads the first few bytes from file_name and tries to guess the text
|
||||||
|
encoding. Returns None as a guess if it can't detect it."""
|
||||||
|
fp = open(file_name, 'rb')
|
||||||
|
try:
|
||||||
|
header = fp.read(3)
|
||||||
|
except:
|
||||||
|
fp.close()
|
||||||
|
return None
|
||||||
|
fp.close()
|
||||||
|
if header.startswith(b"\xFE\xFF"):
|
||||||
|
return "UTF-16"
|
||||||
|
elif header.startswith(b"\xFF\xFE"):
|
||||||
|
return "UTF-16"
|
||||||
|
elif header.startswith(b"\xEF\xBB\xBF"):
|
||||||
|
return "UTF-8"
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
|
||||||
|
"""Copies the |source| Info.plist to the destination directory |dest|."""
|
||||||
|
# Read the source Info.plist into memory.
|
||||||
|
fd = open(source, 'r')
|
||||||
|
lines = fd.read()
|
||||||
|
fd.close()
|
||||||
|
|
||||||
|
# Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
|
||||||
|
plist = plistlib.readPlistFromString(lines)
|
||||||
|
if keys:
|
||||||
|
plist.update(json.loads(keys[0]))
|
||||||
|
lines = plistlib.writePlistToString(plist)
|
||||||
|
|
||||||
|
# Go through all the environment variables and replace them as variables in
|
||||||
|
# the file.
|
||||||
|
IDENT_RE = re.compile(r'[_/\s]')
|
||||||
|
for key in os.environ:
|
||||||
|
if key.startswith('_'):
|
||||||
|
continue
|
||||||
|
evar = '${%s}' % key
|
||||||
|
evalue = os.environ[key]
|
||||||
|
lines = lines.replace(evar, evalue)
|
||||||
|
|
||||||
|
# Xcode supports various suffices on environment variables, which are
|
||||||
|
# all undocumented. :rfc1034identifier is used in the standard project
|
||||||
|
# template these days, and :identifier was used earlier. They are used to
|
||||||
|
# convert non-url characters into things that look like valid urls --
|
||||||
|
# except that the replacement character for :identifier, '_' isn't valid
|
||||||
|
# in a URL either -- oops, hence :rfc1034identifier was born.
|
||||||
|
evar = '${%s:identifier}' % key
|
||||||
|
evalue = IDENT_RE.sub('_', os.environ[key])
|
||||||
|
lines = lines.replace(evar, evalue)
|
||||||
|
|
||||||
|
evar = '${%s:rfc1034identifier}' % key
|
||||||
|
evalue = IDENT_RE.sub('-', os.environ[key])
|
||||||
|
lines = lines.replace(evar, evalue)
|
||||||
|
|
||||||
|
# Remove any keys with values that haven't been replaced.
|
||||||
|
lines = lines.split('\n')
|
||||||
|
for i in range(len(lines)):
|
||||||
|
if lines[i].strip().startswith("<string>${"):
|
||||||
|
lines[i] = None
|
||||||
|
lines[i - 1] = None
|
||||||
|
lines = '\n'.join(filter(lambda x: x is not None, lines))
|
||||||
|
|
||||||
|
# Write out the file with variables replaced.
|
||||||
|
fd = open(dest, 'w')
|
||||||
|
fd.write(lines)
|
||||||
|
fd.close()
|
||||||
|
|
||||||
|
# Now write out PkgInfo file now that the Info.plist file has been
|
||||||
|
# "compiled".
|
||||||
|
self._WritePkgInfo(dest)
|
||||||
|
|
||||||
|
if convert_to_binary == 'True':
|
||||||
|
self._ConvertToBinary(dest)
|
||||||
|
|
||||||
|
def _WritePkgInfo(self, info_plist):
|
||||||
|
"""This writes the PkgInfo file from the data stored in Info.plist."""
|
||||||
|
plist = plistlib.readPlist(info_plist)
|
||||||
|
if not plist:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Only create PkgInfo for executable types.
|
||||||
|
package_type = plist['CFBundlePackageType']
|
||||||
|
if package_type != 'APPL':
|
||||||
|
return
|
||||||
|
|
||||||
|
# The format of PkgInfo is eight characters, representing the bundle type
|
||||||
|
# and bundle signature, each four characters. If that is missing, four
|
||||||
|
# '?' characters are used instead.
|
||||||
|
signature_code = plist.get('CFBundleSignature', '????')
|
||||||
|
if len(signature_code) != 4: # Wrong length resets everything, too.
|
||||||
|
signature_code = '?' * 4
|
||||||
|
|
||||||
|
dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
|
||||||
|
fp = open(dest, 'w')
|
||||||
|
fp.write('%s%s' % (package_type, signature_code))
|
||||||
|
fp.close()
|
||||||
|
|
||||||
|
def ExecFlock(self, lockfile, *cmd_list):
|
||||||
|
"""Emulates the most basic behavior of Linux's flock(1)."""
|
||||||
|
# Rely on exception handling to report errors.
|
||||||
|
fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
|
||||||
|
fcntl.flock(fd, fcntl.LOCK_EX)
|
||||||
|
return subprocess.call(cmd_list)
|
||||||
|
|
||||||
|
def ExecFilterLibtool(self, *cmd_list):
|
||||||
|
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
|
||||||
|
symbols'."""
|
||||||
|
libtool_re = re.compile(r'^.*libtool: (?:for architecture: \S* )?'
|
||||||
|
r'file: .* has no symbols$')
|
||||||
|
libtool_re5 = re.compile(
|
||||||
|
r'^.*libtool: warning for library: ' +
|
||||||
|
r'.* the table of contents is empty ' +
|
||||||
|
r'\(no object file members in the library define global symbols\)$')
|
||||||
|
env = os.environ.copy()
|
||||||
|
# Ref:
|
||||||
|
# http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
|
||||||
|
# The problem with this flag is that it resets the file mtime on the file to
|
||||||
|
# epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
|
||||||
|
env['ZERO_AR_DATE'] = '1'
|
||||||
|
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
|
||||||
|
_, err = libtoolout.communicate()
|
||||||
|
for line in err.splitlines():
|
||||||
|
line_decoded = line.decode('utf-8')
|
||||||
|
if not libtool_re.match(line_decoded) and not libtool_re5.match(line_decoded):
|
||||||
|
print(line_decoded, file=sys.stderr)
|
||||||
|
# Unconditionally touch the output .a file on the command line if present
|
||||||
|
# and the command succeeded. A bit hacky.
|
||||||
|
if not libtoolout.returncode:
|
||||||
|
for i in range(len(cmd_list) - 1):
|
||||||
|
if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
|
||||||
|
os.utime(cmd_list[i+1], None)
|
||||||
|
break
|
||||||
|
return libtoolout.returncode
|
||||||
|
|
||||||
|
def ExecPackageIosFramework(self, framework):
|
||||||
|
# Find the name of the binary based on the part before the ".framework".
|
||||||
|
binary = os.path.basename(framework).split('.')[0]
|
||||||
|
module_path = os.path.join(framework, 'Modules');
|
||||||
|
if not os.path.exists(module_path):
|
||||||
|
os.mkdir(module_path)
|
||||||
|
module_template = 'framework module %s {\n' \
|
||||||
|
' umbrella header "%s.h"\n' \
|
||||||
|
'\n' \
|
||||||
|
' export *\n' \
|
||||||
|
' module * { export * }\n' \
|
||||||
|
'}\n' % (binary, binary)
|
||||||
|
|
||||||
|
module_file = open(os.path.join(module_path, 'module.modulemap'), "w")
|
||||||
|
module_file.write(module_template)
|
||||||
|
module_file.close()
|
||||||
|
|
||||||
|
def ExecPackageFramework(self, framework, version):
|
||||||
|
"""Takes a path to Something.framework and the Current version of that and
|
||||||
|
sets up all the symlinks."""
|
||||||
|
# Find the name of the binary based on the part before the ".framework".
|
||||||
|
binary = os.path.basename(framework).split('.')[0]
|
||||||
|
|
||||||
|
CURRENT = 'Current'
|
||||||
|
RESOURCES = 'Resources'
|
||||||
|
VERSIONS = 'Versions'
|
||||||
|
|
||||||
|
if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
|
||||||
|
# Binary-less frameworks don't seem to contain symlinks (see e.g.
|
||||||
|
# chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
|
||||||
|
return
|
||||||
|
|
||||||
|
# Move into the framework directory to set the symlinks correctly.
|
||||||
|
pwd = os.getcwd()
|
||||||
|
os.chdir(framework)
|
||||||
|
|
||||||
|
# Set up the Current version.
|
||||||
|
self._Relink(version, os.path.join(VERSIONS, CURRENT))
|
||||||
|
|
||||||
|
# Set up the root symlinks.
|
||||||
|
self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
|
||||||
|
self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
|
||||||
|
|
||||||
|
# Back to where we were before!
|
||||||
|
os.chdir(pwd)
|
||||||
|
|
||||||
|
def _Relink(self, dest, link):
|
||||||
|
"""Creates a symlink to |dest| named |link|. If |link| already exists,
|
||||||
|
it is overwritten."""
|
||||||
|
if os.path.lexists(link):
|
||||||
|
os.remove(link)
|
||||||
|
os.symlink(dest, link)
|
||||||
|
|
||||||
|
def ExecCompileIosFrameworkHeaderMap(self, out, framework, *all_headers):
|
||||||
|
framework_name = os.path.basename(framework).split('.')[0]
|
||||||
|
all_headers = map(os.path.abspath, all_headers)
|
||||||
|
filelist = {}
|
||||||
|
for header in all_headers:
|
||||||
|
filename = os.path.basename(header)
|
||||||
|
filelist[filename] = header
|
||||||
|
filelist[os.path.join(framework_name, filename)] = header
|
||||||
|
WriteHmap(out, filelist)
|
||||||
|
|
||||||
|
def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers):
|
||||||
|
header_path = os.path.join(framework, 'Headers');
|
||||||
|
if not os.path.exists(header_path):
|
||||||
|
os.makedirs(header_path)
|
||||||
|
for header in copy_headers:
|
||||||
|
shutil.copy(header, os.path.join(header_path, os.path.basename(header)))
|
||||||
|
|
||||||
|
def ExecCompileXcassets(self, keys, *inputs):
|
||||||
|
"""Compiles multiple .xcassets files into a single .car file.
|
||||||
|
|
||||||
|
This invokes 'actool' to compile all the inputs .xcassets files. The
|
||||||
|
|keys| arguments is a json-encoded dictionary of extra arguments to
|
||||||
|
pass to 'actool' when the asset catalogs contains an application icon
|
||||||
|
or a launch image.
|
||||||
|
|
||||||
|
Note that 'actool' does not create the Assets.car file if the asset
|
||||||
|
catalogs does not contains imageset.
|
||||||
|
"""
|
||||||
|
command_line = [
|
||||||
|
'xcrun', 'actool', '--output-format', 'human-readable-text',
|
||||||
|
'--compress-pngs', '--notices', '--warnings', '--errors',
|
||||||
|
]
|
||||||
|
is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
|
||||||
|
if is_iphone_target:
|
||||||
|
platform = os.environ['CONFIGURATION'].split('-')[-1]
|
||||||
|
if platform not in ('iphoneos', 'iphonesimulator'):
|
||||||
|
platform = 'iphonesimulator'
|
||||||
|
command_line.extend([
|
||||||
|
'--platform', platform, '--target-device', 'iphone',
|
||||||
|
'--target-device', 'ipad', '--minimum-deployment-target',
|
||||||
|
os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
|
||||||
|
os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
|
||||||
|
])
|
||||||
|
else:
|
||||||
|
command_line.extend([
|
||||||
|
'--platform', 'macosx', '--target-device', 'mac',
|
||||||
|
'--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
|
||||||
|
'--compile',
|
||||||
|
os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
|
||||||
|
])
|
||||||
|
if keys:
|
||||||
|
keys = json.loads(keys)
|
||||||
|
for key, value in keys.items():
|
||||||
|
arg_name = '--' + key
|
||||||
|
if isinstance(value, bool):
|
||||||
|
if value:
|
||||||
|
command_line.append(arg_name)
|
||||||
|
elif isinstance(value, list):
|
||||||
|
for v in value:
|
||||||
|
command_line.append(arg_name)
|
||||||
|
command_line.append(str(v))
|
||||||
|
else:
|
||||||
|
command_line.append(arg_name)
|
||||||
|
command_line.append(str(value))
|
||||||
|
# Note: actool crashes if inputs path are relative, so use os.path.abspath
|
||||||
|
# to get absolute path name for inputs.
|
||||||
|
command_line.extend(map(os.path.abspath, inputs))
|
||||||
|
subprocess.check_call(command_line)
|
||||||
|
|
||||||
|
def ExecMergeInfoPlist(self, output, *inputs):
|
||||||
|
"""Merge multiple .plist files into a single .plist file."""
|
||||||
|
merged_plist = {}
|
||||||
|
for path in inputs:
|
||||||
|
plist = self._LoadPlistMaybeBinary(path)
|
||||||
|
self._MergePlist(merged_plist, plist)
|
||||||
|
plistlib.writePlist(merged_plist, output)
|
||||||
|
|
||||||
|
def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve):
|
||||||
|
"""Code sign a bundle.
|
||||||
|
|
||||||
|
This function tries to code sign an iOS bundle, following the same
|
||||||
|
algorithm as Xcode:
|
||||||
|
1. pick the provisioning profile that best match the bundle identifier,
|
||||||
|
and copy it into the bundle as embedded.mobileprovision,
|
||||||
|
2. copy Entitlements.plist from user or SDK next to the bundle,
|
||||||
|
3. code sign the bundle.
|
||||||
|
"""
|
||||||
|
substitutions, overrides = self._InstallProvisioningProfile(
|
||||||
|
provisioning, self._GetCFBundleIdentifier())
|
||||||
|
entitlements_path = self._InstallEntitlements(
|
||||||
|
entitlements, substitutions, overrides)
|
||||||
|
|
||||||
|
args = ['codesign', '--force', '--sign', key]
|
||||||
|
if preserve == 'True':
|
||||||
|
args.extend(['--deep', '--preserve-metadata=identifier,entitlements'])
|
||||||
|
else:
|
||||||
|
args.extend(['--entitlements', entitlements_path])
|
||||||
|
args.extend(['--timestamp=none', path])
|
||||||
|
subprocess.check_call(args)
|
||||||
|
|
||||||
|
def _InstallProvisioningProfile(self, profile, bundle_identifier):
|
||||||
|
"""Installs embedded.mobileprovision into the bundle.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
profile: string, optional, short name of the .mobileprovision file
|
||||||
|
to use, if empty or the file is missing, the best file installed
|
||||||
|
will be used
|
||||||
|
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A tuple containing two dictionary: variables substitutions and values
|
||||||
|
to overrides when generating the entitlements file.
|
||||||
|
"""
|
||||||
|
source_path, provisioning_data, team_id = self._FindProvisioningProfile(
|
||||||
|
profile, bundle_identifier)
|
||||||
|
target_path = os.path.join(
|
||||||
|
os.environ['BUILT_PRODUCTS_DIR'],
|
||||||
|
os.environ['CONTENTS_FOLDER_PATH'],
|
||||||
|
'embedded.mobileprovision')
|
||||||
|
shutil.copy2(source_path, target_path)
|
||||||
|
substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
|
||||||
|
return substitutions, provisioning_data['Entitlements']
|
||||||
|
|
||||||
|
def _FindProvisioningProfile(self, profile, bundle_identifier):
|
||||||
|
"""Finds the .mobileprovision file to use for signing the bundle.
|
||||||
|
|
||||||
|
Checks all the installed provisioning profiles (or if the user specified
|
||||||
|
the PROVISIONING_PROFILE variable, only consult it) and select the most
|
||||||
|
specific that correspond to the bundle identifier.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
profile: string, optional, short name of the .mobileprovision file
|
||||||
|
to use, if empty or the file is missing, the best file installed
|
||||||
|
will be used
|
||||||
|
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A tuple of the path to the selected provisioning profile, the data of
|
||||||
|
the embedded plist in the provisioning profile and the team identifier
|
||||||
|
to use for code signing.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
SystemExit: if no .mobileprovision can be used to sign the bundle.
|
||||||
|
"""
|
||||||
|
profiles_dir = os.path.join(
|
||||||
|
os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
|
||||||
|
if not os.path.isdir(profiles_dir):
|
||||||
|
print((
|
||||||
|
'cannot find mobile provisioning for %s' % bundle_identifier),
|
||||||
|
file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
provisioning_profiles = None
|
||||||
|
if profile:
|
||||||
|
profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
|
||||||
|
if os.path.exists(profile_path):
|
||||||
|
provisioning_profiles = [profile_path]
|
||||||
|
if not provisioning_profiles:
|
||||||
|
provisioning_profiles = glob.glob(
|
||||||
|
os.path.join(profiles_dir, '*.mobileprovision'))
|
||||||
|
valid_provisioning_profiles = {}
|
||||||
|
for profile_path in provisioning_profiles:
|
||||||
|
profile_data = self._LoadProvisioningProfile(profile_path)
|
||||||
|
app_id_pattern = profile_data.get(
|
||||||
|
'Entitlements', {}).get('application-identifier', '')
|
||||||
|
for team_identifier in profile_data.get('TeamIdentifier', []):
|
||||||
|
app_id = '%s.%s' % (team_identifier, bundle_identifier)
|
||||||
|
if fnmatch.fnmatch(app_id, app_id_pattern):
|
||||||
|
valid_provisioning_profiles[app_id_pattern] = (
|
||||||
|
profile_path, profile_data, team_identifier)
|
||||||
|
if not valid_provisioning_profiles:
|
||||||
|
print((
|
||||||
|
'cannot find mobile provisioning for %s' % bundle_identifier),
|
||||||
|
file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
# If the user has multiple provisioning profiles installed that can be
|
||||||
|
# used for ${bundle_identifier}, pick the most specific one (ie. the
|
||||||
|
# provisioning profile whose pattern is the longest).
|
||||||
|
selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
|
||||||
|
return valid_provisioning_profiles[selected_key]
|
||||||
|
|
||||||
|
def _LoadProvisioningProfile(self, profile_path):
|
||||||
|
"""Extracts the plist embedded in a provisioning profile.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
profile_path: string, path to the .mobileprovision file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Content of the plist embedded in the provisioning profile as a dictionary.
|
||||||
|
"""
|
||||||
|
with tempfile.NamedTemporaryFile() as temp:
|
||||||
|
subprocess.check_call([
|
||||||
|
'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
|
||||||
|
return self._LoadPlistMaybeBinary(temp.name)
|
||||||
|
|
||||||
|
def _MergePlist(self, merged_plist, plist):
|
||||||
|
"""Merge |plist| into |merged_plist|."""
|
||||||
|
for key, value in plist.items():
|
||||||
|
if isinstance(value, dict):
|
||||||
|
merged_value = merged_plist.get(key, {})
|
||||||
|
if isinstance(merged_value, dict):
|
||||||
|
self._MergePlist(merged_value, value)
|
||||||
|
merged_plist[key] = merged_value
|
||||||
|
else:
|
||||||
|
merged_plist[key] = value
|
||||||
|
else:
|
||||||
|
merged_plist[key] = value
|
||||||
|
|
||||||
|
def _LoadPlistMaybeBinary(self, plist_path):
|
||||||
|
"""Loads into a memory a plist possibly encoded in binary format.
|
||||||
|
|
||||||
|
This is a wrapper around plistlib.readPlist that tries to convert the
|
||||||
|
plist to the XML format if it can't be parsed (assuming that it is in
|
||||||
|
the binary format).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
plist_path: string, path to a plist file, in XML or binary format
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Content of the plist as a dictionary.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# First, try to read the file using plistlib that only supports XML,
|
||||||
|
# and if an exception is raised, convert a temporary copy to XML and
|
||||||
|
# load that copy.
|
||||||
|
return plistlib.readPlist(plist_path)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
with tempfile.NamedTemporaryFile() as temp:
|
||||||
|
shutil.copy2(plist_path, temp.name)
|
||||||
|
subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
|
||||||
|
return plistlib.readPlist(temp.name)
|
||||||
|
|
||||||
|
def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
|
||||||
|
"""Constructs a dictionary of variable substitutions for Entitlements.plist.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
|
||||||
|
app_identifier_prefix: string, value for AppIdentifierPrefix
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary of substitutions to apply when generating Entitlements.plist.
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
'CFBundleIdentifier': bundle_identifier,
|
||||||
|
'AppIdentifierPrefix': app_identifier_prefix,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _GetCFBundleIdentifier(self):
|
||||||
|
"""Extracts CFBundleIdentifier value from Info.plist in the bundle.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Value of CFBundleIdentifier in the Info.plist located in the bundle.
|
||||||
|
"""
|
||||||
|
info_plist_path = os.path.join(
|
||||||
|
os.environ['TARGET_BUILD_DIR'],
|
||||||
|
os.environ['INFOPLIST_PATH'])
|
||||||
|
info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
|
||||||
|
return info_plist_data['CFBundleIdentifier']
|
||||||
|
|
||||||
|
def _InstallEntitlements(self, entitlements, substitutions, overrides):
|
||||||
|
"""Generates and install the ${BundleName}.xcent entitlements file.
|
||||||
|
|
||||||
|
Expands variables "$(variable)" pattern in the source entitlements file,
|
||||||
|
add extra entitlements defined in the .mobileprovision file and the copy
|
||||||
|
the generated plist to "${BundlePath}.xcent".
|
||||||
|
|
||||||
|
Args:
|
||||||
|
entitlements: string, optional, path to the Entitlements.plist template
|
||||||
|
to use, defaults to "${SDKROOT}/Entitlements.plist"
|
||||||
|
substitutions: dictionary, variable substitutions
|
||||||
|
overrides: dictionary, values to add to the entitlements
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to the generated entitlements file.
|
||||||
|
"""
|
||||||
|
source_path = entitlements
|
||||||
|
target_path = os.path.join(
|
||||||
|
os.environ['BUILT_PRODUCTS_DIR'],
|
||||||
|
os.environ['PRODUCT_NAME'] + '.xcent')
|
||||||
|
if not source_path:
|
||||||
|
source_path = os.path.join(
|
||||||
|
os.environ['SDKROOT'],
|
||||||
|
'Entitlements.plist')
|
||||||
|
shutil.copy2(source_path, target_path)
|
||||||
|
data = self._LoadPlistMaybeBinary(target_path)
|
||||||
|
data = self._ExpandVariables(data, substitutions)
|
||||||
|
if overrides:
|
||||||
|
for key in overrides:
|
||||||
|
if key not in data:
|
||||||
|
data[key] = overrides[key]
|
||||||
|
plistlib.writePlist(data, target_path)
|
||||||
|
return target_path
|
||||||
|
|
||||||
|
def _ExpandVariables(self, data, substitutions):
|
||||||
|
"""Expands variables "$(variable)" in data.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: object, can be either string, list or dictionary
|
||||||
|
substitutions: dictionary, variable substitutions to perform
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Copy of data where each references to "$(variable)" has been replaced
|
||||||
|
by the corresponding value found in substitutions, or left intact if
|
||||||
|
the key was not found.
|
||||||
|
"""
|
||||||
|
if isinstance(data, str):
|
||||||
|
for key, value in substitutions.items():
|
||||||
|
data = data.replace('$(%s)' % key, value)
|
||||||
|
return data
|
||||||
|
if isinstance(data, list):
|
||||||
|
return [self._ExpandVariables(v, substitutions) for v in data]
|
||||||
|
if isinstance(data, dict):
|
||||||
|
return {k: self._ExpandVariables(data[k], substitutions) for k in data}
|
||||||
|
return data
|
||||||
|
|
||||||
|
def NextGreaterPowerOf2(x):
|
||||||
|
return 2**(x).bit_length()
|
||||||
|
|
||||||
|
def WriteHmap(output_name, filelist):
|
||||||
|
"""Generates a header map based on |filelist|.
|
||||||
|
|
||||||
|
Per Mark Mentovai:
|
||||||
|
A header map is structured essentially as a hash table, keyed by names used
|
||||||
|
in #includes, and providing pathnames to the actual files.
|
||||||
|
|
||||||
|
The implementation below and the comment above comes from inspecting:
|
||||||
|
http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
|
||||||
|
while also looking at the implementation in clang in:
|
||||||
|
https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
|
||||||
|
"""
|
||||||
|
magic = 1751998832
|
||||||
|
version = 1
|
||||||
|
_reserved = 0
|
||||||
|
count = len(filelist)
|
||||||
|
capacity = NextGreaterPowerOf2(count)
|
||||||
|
strings_offset = 24 + (12 * capacity)
|
||||||
|
max_value_length = len(max(filelist.items(), key=lambda t: len(t[1]))[1])
|
||||||
|
|
||||||
|
out = open(output_name, "wb")
|
||||||
|
out.write(struct.pack('<LHHLLLL', magic, version, _reserved, strings_offset,
|
||||||
|
count, capacity, max_value_length))
|
||||||
|
|
||||||
|
# Create empty hashmap buckets.
|
||||||
|
buckets = [None] * capacity
|
||||||
|
for file, path in filelist.items():
|
||||||
|
key = 0
|
||||||
|
for c in file:
|
||||||
|
key += ord(c.lower()) * 13
|
||||||
|
|
||||||
|
# Fill next empty bucket.
|
||||||
|
while buckets[key & capacity - 1] is not None:
|
||||||
|
key = key + 1
|
||||||
|
buckets[key & capacity - 1] = (file, path)
|
||||||
|
|
||||||
|
next_offset = 1
|
||||||
|
for bucket in buckets:
|
||||||
|
if bucket is None:
|
||||||
|
out.write(struct.pack('<LLL', 0, 0, 0))
|
||||||
|
else:
|
||||||
|
(file, path) = bucket
|
||||||
|
key_offset = next_offset
|
||||||
|
prefix_offset = key_offset + len(file) + 1
|
||||||
|
suffix_offset = prefix_offset + len(os.path.dirname(path) + os.sep) + 1
|
||||||
|
next_offset = suffix_offset + len(os.path.basename(path)) + 1
|
||||||
|
out.write(struct.pack('<LLL', key_offset, prefix_offset, suffix_offset))
|
||||||
|
|
||||||
|
# Pad byte since next offset starts at 1.
|
||||||
|
out.write(struct.pack('<x'))
|
||||||
|
|
||||||
|
for bucket in buckets:
|
||||||
|
if bucket is not None:
|
||||||
|
(file, path) = bucket
|
||||||
|
out.write(struct.pack('<%ds' % len(file), file))
|
||||||
|
out.write(struct.pack('<s', '\0'))
|
||||||
|
base = os.path.dirname(path) + os.sep
|
||||||
|
out.write(struct.pack('<%ds' % len(base), base))
|
||||||
|
out.write(struct.pack('<s', '\0'))
|
||||||
|
path = os.path.basename(path)
|
||||||
|
out.write(struct.pack('<%ds' % len(path), path))
|
||||||
|
out.write(struct.pack('<s', '\0'))
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(main(sys.argv[1:]))
|
||||||
1112
third_party/python/gyp/build/lib/gyp/msvs_emulation.py
vendored
Normal file
1112
third_party/python/gyp/build/lib/gyp/msvs_emulation.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
168
third_party/python/gyp/build/lib/gyp/ninja_syntax.py
vendored
Normal file
168
third_party/python/gyp/build/lib/gyp/ninja_syntax.py
vendored
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
# This file comes from
|
||||||
|
# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py
|
||||||
|
# Do not edit! Edit the upstream one instead.
|
||||||
|
|
||||||
|
"""Python module for generating .ninja files.
|
||||||
|
|
||||||
|
Note that this is emphatically not a required piece of Ninja; it's
|
||||||
|
just a helpful utility for build-file-generation systems that already
|
||||||
|
use Python.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import textwrap
|
||||||
|
import re
|
||||||
|
|
||||||
|
def escape_path(word):
|
||||||
|
return word.replace('$ ','$$ ').replace(' ','$ ').replace(':', '$:')
|
||||||
|
|
||||||
|
class Writer(object):
|
||||||
|
def __init__(self, output, width=78):
|
||||||
|
self.output = output
|
||||||
|
self.width = width
|
||||||
|
|
||||||
|
def newline(self):
|
||||||
|
self.output.write('\n')
|
||||||
|
|
||||||
|
def comment(self, text):
|
||||||
|
for line in textwrap.wrap(text, self.width - 2):
|
||||||
|
self.output.write('# ' + line + '\n')
|
||||||
|
|
||||||
|
def variable(self, key, value, indent=0):
|
||||||
|
if value is None:
|
||||||
|
return
|
||||||
|
if isinstance(value, list):
|
||||||
|
value = ' '.join(filter(None, value)) # Filter out empty strings.
|
||||||
|
self._line('%s = %s' % (key, value), indent)
|
||||||
|
|
||||||
|
def pool(self, name, depth):
|
||||||
|
self._line('pool %s' % name)
|
||||||
|
self.variable('depth', depth, indent=1)
|
||||||
|
|
||||||
|
def rule(self, name, command, description=None, depfile=None,
|
||||||
|
generator=False, pool=None, restat=False, rspfile=None,
|
||||||
|
rspfile_content=None, deps=None):
|
||||||
|
self._line('rule %s' % name)
|
||||||
|
self.variable('command', command, indent=1)
|
||||||
|
if description:
|
||||||
|
self.variable('description', description, indent=1)
|
||||||
|
if depfile:
|
||||||
|
self.variable('depfile', depfile, indent=1)
|
||||||
|
if generator:
|
||||||
|
self.variable('generator', '1', indent=1)
|
||||||
|
if pool:
|
||||||
|
self.variable('pool', pool, indent=1)
|
||||||
|
if restat:
|
||||||
|
self.variable('restat', '1', indent=1)
|
||||||
|
if rspfile:
|
||||||
|
self.variable('rspfile', rspfile, indent=1)
|
||||||
|
if rspfile_content:
|
||||||
|
self.variable('rspfile_content', rspfile_content, indent=1)
|
||||||
|
if deps:
|
||||||
|
self.variable('deps', deps, indent=1)
|
||||||
|
|
||||||
|
def build(self, outputs, rule, inputs=None, implicit=None, order_only=None,
|
||||||
|
variables=None):
|
||||||
|
outputs = self._as_list(outputs)
|
||||||
|
all_inputs = self._as_list(inputs)[:]
|
||||||
|
out_outputs = list(map(escape_path, outputs))
|
||||||
|
all_inputs = list(map(escape_path, all_inputs))
|
||||||
|
|
||||||
|
if implicit:
|
||||||
|
implicit = map(escape_path, self._as_list(implicit))
|
||||||
|
all_inputs.append('|')
|
||||||
|
all_inputs.extend(implicit)
|
||||||
|
if order_only:
|
||||||
|
order_only = map(escape_path, self._as_list(order_only))
|
||||||
|
all_inputs.append('||')
|
||||||
|
all_inputs.extend(order_only)
|
||||||
|
|
||||||
|
self._line('build %s: %s' % (' '.join(out_outputs),
|
||||||
|
' '.join([rule] + all_inputs)))
|
||||||
|
|
||||||
|
if variables:
|
||||||
|
if isinstance(variables, dict):
|
||||||
|
iterator = iter(variables.items())
|
||||||
|
else:
|
||||||
|
iterator = iter(variables)
|
||||||
|
|
||||||
|
for key, val in iterator:
|
||||||
|
self.variable(key, val, indent=1)
|
||||||
|
|
||||||
|
return outputs
|
||||||
|
|
||||||
|
def include(self, path):
|
||||||
|
self._line('include %s' % path)
|
||||||
|
|
||||||
|
def subninja(self, path):
|
||||||
|
self._line('subninja %s' % path)
|
||||||
|
|
||||||
|
def default(self, paths):
|
||||||
|
self._line('default %s' % ' '.join(self._as_list(paths)))
|
||||||
|
|
||||||
|
def _count_dollars_before_index(self, s, i):
|
||||||
|
"""Returns the number of '$' characters right in front of s[i]."""
|
||||||
|
dollar_count = 0
|
||||||
|
dollar_index = i - 1
|
||||||
|
while dollar_index > 0 and s[dollar_index] == '$':
|
||||||
|
dollar_count += 1
|
||||||
|
dollar_index -= 1
|
||||||
|
return dollar_count
|
||||||
|
|
||||||
|
def _line(self, text, indent=0):
|
||||||
|
"""Write 'text' word-wrapped at self.width characters."""
|
||||||
|
leading_space = ' ' * indent
|
||||||
|
while len(leading_space) + len(text) > self.width:
|
||||||
|
# The text is too wide; wrap if possible.
|
||||||
|
|
||||||
|
# Find the rightmost space that would obey our width constraint and
|
||||||
|
# that's not an escaped space.
|
||||||
|
available_space = self.width - len(leading_space) - len(' $')
|
||||||
|
space = available_space
|
||||||
|
while True:
|
||||||
|
space = text.rfind(' ', 0, space)
|
||||||
|
if space < 0 or \
|
||||||
|
self._count_dollars_before_index(text, space) % 2 == 0:
|
||||||
|
break
|
||||||
|
|
||||||
|
if space < 0:
|
||||||
|
# No such space; just use the first unescaped space we can find.
|
||||||
|
space = available_space - 1
|
||||||
|
while True:
|
||||||
|
space = text.find(' ', space + 1)
|
||||||
|
if space < 0 or \
|
||||||
|
self._count_dollars_before_index(text, space) % 2 == 0:
|
||||||
|
break
|
||||||
|
if space < 0:
|
||||||
|
# Give up on breaking.
|
||||||
|
break
|
||||||
|
|
||||||
|
self.output.write(leading_space + text[0:space] + ' $\n')
|
||||||
|
text = text[space+1:]
|
||||||
|
|
||||||
|
# Subsequent lines are continuations, so indent them.
|
||||||
|
leading_space = ' ' * (indent+2)
|
||||||
|
|
||||||
|
self.output.write(leading_space + text + '\n')
|
||||||
|
|
||||||
|
def _as_list(self, input):
|
||||||
|
if input is None:
|
||||||
|
return []
|
||||||
|
if isinstance(input, list):
|
||||||
|
return input
|
||||||
|
|
||||||
|
# map is not a class in Python 2
|
||||||
|
try:
|
||||||
|
if isinstance(input, map):
|
||||||
|
return list(input)
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return [input]
|
||||||
|
|
||||||
|
|
||||||
|
def escape(string):
|
||||||
|
"""Escape a string such that it can be embedded into a Ninja file without
|
||||||
|
further interpretation."""
|
||||||
|
assert '\n' not in string, 'Ninja syntax does not allow newlines'
|
||||||
|
# We only have one special metacharacter: '$'.
|
||||||
|
return string.replace('$', '$$')
|
||||||
57
third_party/python/gyp/build/lib/gyp/simple_copy.py
vendored
Normal file
57
third_party/python/gyp/build/lib/gyp/simple_copy.py
vendored
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
# Copyright 2014 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""A clone of the default copy.deepcopy that doesn't handle cyclic
|
||||||
|
structures or complex types except for dicts and lists. This is
|
||||||
|
because gyp copies so large structure that small copy overhead ends up
|
||||||
|
taking seconds in a project the size of Chromium."""
|
||||||
|
|
||||||
|
class Error(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
__all__ = ["Error", "deepcopy"]
|
||||||
|
|
||||||
|
def deepcopy(x):
|
||||||
|
"""Deep copy operation on gyp objects such as strings, ints, dicts
|
||||||
|
and lists. More than twice as fast as copy.deepcopy but much less
|
||||||
|
generic."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
return _deepcopy_dispatch[type(x)](x)
|
||||||
|
except KeyError:
|
||||||
|
raise Error('Unsupported type %s for deepcopy. Use copy.deepcopy ' +
|
||||||
|
'or expand simple_copy support.' % type(x))
|
||||||
|
|
||||||
|
_deepcopy_dispatch = d = {}
|
||||||
|
|
||||||
|
def _deepcopy_atomic(x):
|
||||||
|
return x
|
||||||
|
|
||||||
|
try:
|
||||||
|
_string_types = (str, unicode)
|
||||||
|
# There's no unicode in python3
|
||||||
|
except NameError:
|
||||||
|
_string_types = (str, )
|
||||||
|
|
||||||
|
try:
|
||||||
|
_integer_types = (int, long)
|
||||||
|
# There's no long in python3
|
||||||
|
except NameError:
|
||||||
|
_integer_types = (int, )
|
||||||
|
|
||||||
|
for x in (type(None), float, bool, type) + _integer_types + _string_types:
|
||||||
|
d[x] = _deepcopy_atomic
|
||||||
|
|
||||||
|
def _deepcopy_list(x):
|
||||||
|
return [deepcopy(a) for a in x]
|
||||||
|
d[list] = _deepcopy_list
|
||||||
|
|
||||||
|
def _deepcopy_dict(x):
|
||||||
|
y = {}
|
||||||
|
for key, value in x.items():
|
||||||
|
y[deepcopy(key)] = deepcopy(value)
|
||||||
|
return y
|
||||||
|
d[dict] = _deepcopy_dict
|
||||||
|
|
||||||
|
del d
|
||||||
326
third_party/python/gyp/build/lib/gyp/win_tool.py
vendored
Normal file
326
third_party/python/gyp/build/lib/gyp/win_tool.py
vendored
Normal file
@@ -0,0 +1,326 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""Utility functions for Windows builds.
|
||||||
|
|
||||||
|
These functions are executed via gyp-win-tool when using the ninja generator.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import stat
|
||||||
|
import string
|
||||||
|
import sys
|
||||||
|
|
||||||
|
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
|
# A regex matching an argument corresponding to the output filename passed to
|
||||||
|
# link.exe.
|
||||||
|
_LINK_EXE_OUT_ARG = re.compile('/OUT:(?P<out>.+)$', re.IGNORECASE)
|
||||||
|
|
||||||
|
def main(args):
|
||||||
|
executor = WinTool()
|
||||||
|
exit_code = executor.Dispatch(args)
|
||||||
|
if exit_code is not None:
|
||||||
|
sys.exit(exit_code)
|
||||||
|
|
||||||
|
|
||||||
|
class WinTool(object):
|
||||||
|
"""This class performs all the Windows tooling steps. The methods can either
|
||||||
|
be executed directly, or dispatched from an argument list."""
|
||||||
|
|
||||||
|
def _UseSeparateMspdbsrv(self, env, args):
|
||||||
|
"""Allows to use a unique instance of mspdbsrv.exe per linker instead of a
|
||||||
|
shared one."""
|
||||||
|
if len(args) < 1:
|
||||||
|
raise Exception("Not enough arguments")
|
||||||
|
|
||||||
|
if args[0] != 'link.exe':
|
||||||
|
return
|
||||||
|
|
||||||
|
# Use the output filename passed to the linker to generate an endpoint name
|
||||||
|
# for mspdbsrv.exe.
|
||||||
|
endpoint_name = None
|
||||||
|
for arg in args:
|
||||||
|
m = _LINK_EXE_OUT_ARG.match(arg)
|
||||||
|
if m:
|
||||||
|
endpoint_name = re.sub(r'\W+', '',
|
||||||
|
'%s_%d' % (m.group('out'), os.getpid()))
|
||||||
|
break
|
||||||
|
|
||||||
|
if endpoint_name is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Adds the appropriate environment variable. This will be read by link.exe
|
||||||
|
# to know which instance of mspdbsrv.exe it should connect to (if it's
|
||||||
|
# not set then the default endpoint is used).
|
||||||
|
env['_MSPDBSRV_ENDPOINT_'] = endpoint_name
|
||||||
|
|
||||||
|
def Dispatch(self, args):
|
||||||
|
"""Dispatches a string command to a method."""
|
||||||
|
if len(args) < 1:
|
||||||
|
raise Exception("Not enough arguments")
|
||||||
|
|
||||||
|
method = "Exec%s" % self._CommandifyName(args[0])
|
||||||
|
return getattr(self, method)(*args[1:])
|
||||||
|
|
||||||
|
def _CommandifyName(self, name_string):
|
||||||
|
"""Transforms a tool name like recursive-mirror to RecursiveMirror."""
|
||||||
|
return name_string.title().replace('-', '')
|
||||||
|
|
||||||
|
def _GetEnv(self, arch):
|
||||||
|
"""Gets the saved environment from a file for a given architecture."""
|
||||||
|
# The environment is saved as an "environment block" (see CreateProcess
|
||||||
|
# and msvs_emulation for details). We convert to a dict here.
|
||||||
|
# Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
|
||||||
|
pairs = open(arch).read()[:-2].split('\0')
|
||||||
|
kvs = [item.split('=', 1) for item in pairs]
|
||||||
|
return dict(kvs)
|
||||||
|
|
||||||
|
def ExecStamp(self, path):
|
||||||
|
"""Simple stamp command."""
|
||||||
|
open(path, 'w').close()
|
||||||
|
|
||||||
|
def ExecRecursiveMirror(self, source, dest):
|
||||||
|
"""Emulation of rm -rf out && cp -af in out."""
|
||||||
|
if os.path.exists(dest):
|
||||||
|
if os.path.isdir(dest):
|
||||||
|
def _on_error(fn, path, excinfo):
|
||||||
|
# The operation failed, possibly because the file is set to
|
||||||
|
# read-only. If that's why, make it writable and try the op again.
|
||||||
|
if not os.access(path, os.W_OK):
|
||||||
|
os.chmod(path, stat.S_IWRITE)
|
||||||
|
fn(path)
|
||||||
|
shutil.rmtree(dest, onerror=_on_error)
|
||||||
|
else:
|
||||||
|
if not os.access(dest, os.W_OK):
|
||||||
|
# Attempt to make the file writable before deleting it.
|
||||||
|
os.chmod(dest, stat.S_IWRITE)
|
||||||
|
os.unlink(dest)
|
||||||
|
|
||||||
|
if os.path.isdir(source):
|
||||||
|
shutil.copytree(source, dest)
|
||||||
|
else:
|
||||||
|
shutil.copy2(source, dest)
|
||||||
|
|
||||||
|
def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
|
||||||
|
"""Filter diagnostic output from link that looks like:
|
||||||
|
' Creating library ui.dll.lib and object ui.dll.exp'
|
||||||
|
This happens when there are exports from the dll or exe.
|
||||||
|
"""
|
||||||
|
env = self._GetEnv(arch)
|
||||||
|
if use_separate_mspdbsrv == 'True':
|
||||||
|
self._UseSeparateMspdbsrv(env, args)
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
args = list(args) # *args is a tuple by default, which is read-only.
|
||||||
|
args[0] = args[0].replace('/', '\\')
|
||||||
|
# https://docs.python.org/2/library/subprocess.html:
|
||||||
|
# "On Unix with shell=True [...] if args is a sequence, the first item
|
||||||
|
# specifies the command string, and any additional items will be treated as
|
||||||
|
# additional arguments to the shell itself. That is to say, Popen does the
|
||||||
|
# equivalent of:
|
||||||
|
# Popen(['/bin/sh', '-c', args[0], args[1], ...])"
|
||||||
|
# For that reason, since going through the shell doesn't seem necessary on
|
||||||
|
# non-Windows don't do that there.
|
||||||
|
link = subprocess.Popen(args, shell=sys.platform == 'win32', env=env,
|
||||||
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
|
out, _ = link.communicate()
|
||||||
|
for line in out.splitlines():
|
||||||
|
if (not line.startswith(' Creating library ') and
|
||||||
|
not line.startswith('Generating code') and
|
||||||
|
not line.startswith('Finished generating code')):
|
||||||
|
print(line)
|
||||||
|
return link.returncode
|
||||||
|
|
||||||
|
def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname,
|
||||||
|
mt, rc, intermediate_manifest, *manifests):
|
||||||
|
"""A wrapper for handling creating a manifest resource and then executing
|
||||||
|
a link command."""
|
||||||
|
# The 'normal' way to do manifests is to have link generate a manifest
|
||||||
|
# based on gathering dependencies from the object files, then merge that
|
||||||
|
# manifest with other manifests supplied as sources, convert the merged
|
||||||
|
# manifest to a resource, and then *relink*, including the compiled
|
||||||
|
# version of the manifest resource. This breaks incremental linking, and
|
||||||
|
# is generally overly complicated. Instead, we merge all the manifests
|
||||||
|
# provided (along with one that includes what would normally be in the
|
||||||
|
# linker-generated one, see msvs_emulation.py), and include that into the
|
||||||
|
# first and only link. We still tell link to generate a manifest, but we
|
||||||
|
# only use that to assert that our simpler process did not miss anything.
|
||||||
|
variables = {
|
||||||
|
'python': sys.executable,
|
||||||
|
'arch': arch,
|
||||||
|
'out': out,
|
||||||
|
'ldcmd': ldcmd,
|
||||||
|
'resname': resname,
|
||||||
|
'mt': mt,
|
||||||
|
'rc': rc,
|
||||||
|
'intermediate_manifest': intermediate_manifest,
|
||||||
|
'manifests': ' '.join(manifests),
|
||||||
|
}
|
||||||
|
add_to_ld = ''
|
||||||
|
if manifests:
|
||||||
|
subprocess.check_call(
|
||||||
|
'%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
|
||||||
|
'-manifest %(manifests)s -out:%(out)s.manifest' % variables)
|
||||||
|
if embed_manifest == 'True':
|
||||||
|
subprocess.check_call(
|
||||||
|
'%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest'
|
||||||
|
' %(out)s.manifest.rc %(resname)s' % variables)
|
||||||
|
subprocess.check_call(
|
||||||
|
'%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s '
|
||||||
|
'%(out)s.manifest.rc' % variables)
|
||||||
|
add_to_ld = ' %(out)s.manifest.res' % variables
|
||||||
|
subprocess.check_call(ldcmd + add_to_ld)
|
||||||
|
|
||||||
|
# Run mt.exe on the theoretically complete manifest we generated, merging
|
||||||
|
# it with the one the linker generated to confirm that the linker
|
||||||
|
# generated one does not add anything. This is strictly unnecessary for
|
||||||
|
# correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
|
||||||
|
# used in a #pragma comment.
|
||||||
|
if manifests:
|
||||||
|
# Merge the intermediate one with ours to .assert.manifest, then check
|
||||||
|
# that .assert.manifest is identical to ours.
|
||||||
|
subprocess.check_call(
|
||||||
|
'%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
|
||||||
|
'-manifest %(out)s.manifest %(intermediate_manifest)s '
|
||||||
|
'-out:%(out)s.assert.manifest' % variables)
|
||||||
|
assert_manifest = '%(out)s.assert.manifest' % variables
|
||||||
|
our_manifest = '%(out)s.manifest' % variables
|
||||||
|
# Load and normalize the manifests. mt.exe sometimes removes whitespace,
|
||||||
|
# and sometimes doesn't unfortunately.
|
||||||
|
with open(our_manifest, 'r') as our_f:
|
||||||
|
with open(assert_manifest, 'r') as assert_f:
|
||||||
|
our_data = our_f.read().translate(None, string.whitespace)
|
||||||
|
assert_data = assert_f.read().translate(None, string.whitespace)
|
||||||
|
if our_data != assert_data:
|
||||||
|
os.unlink(out)
|
||||||
|
def dump(filename):
|
||||||
|
print(filename, file=sys.stderr)
|
||||||
|
print('-----', file=sys.stderr)
|
||||||
|
with open(filename, 'r') as f:
|
||||||
|
print(f.read(), file=sys.stderr)
|
||||||
|
print('-----', file=sys.stderr)
|
||||||
|
dump(intermediate_manifest)
|
||||||
|
dump(our_manifest)
|
||||||
|
dump(assert_manifest)
|
||||||
|
sys.stderr.write(
|
||||||
|
'Linker generated manifest "%s" added to final manifest "%s" '
|
||||||
|
'(result in "%s"). '
|
||||||
|
'Were /MANIFEST switches used in #pragma statements? ' % (
|
||||||
|
intermediate_manifest, our_manifest, assert_manifest))
|
||||||
|
return 1
|
||||||
|
|
||||||
|
def ExecManifestWrapper(self, arch, *args):
|
||||||
|
"""Run manifest tool with environment set. Strip out undesirable warning
|
||||||
|
(some XML blocks are recognized by the OS loader, but not the manifest
|
||||||
|
tool)."""
|
||||||
|
env = self._GetEnv(arch)
|
||||||
|
popen = subprocess.Popen(args, shell=True, env=env,
|
||||||
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
|
out, _ = popen.communicate()
|
||||||
|
for line in out.splitlines():
|
||||||
|
if line and 'manifest authoring warning 81010002' not in line:
|
||||||
|
print(line)
|
||||||
|
return popen.returncode
|
||||||
|
|
||||||
|
def ExecManifestToRc(self, arch, *args):
|
||||||
|
"""Creates a resource file pointing a SxS assembly manifest.
|
||||||
|
|args| is tuple containing path to resource file, path to manifest file
|
||||||
|
and resource name which can be "1" (for executables) or "2" (for DLLs)."""
|
||||||
|
manifest_path, resource_path, resource_name = args
|
||||||
|
with open(resource_path, 'w') as output:
|
||||||
|
output.write('#include <windows.h>\n%s RT_MANIFEST "%s"' % (
|
||||||
|
resource_name,
|
||||||
|
os.path.abspath(manifest_path).replace('\\', '/')))
|
||||||
|
|
||||||
|
def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl,
|
||||||
|
*flags):
|
||||||
|
"""Filter noisy filenames output from MIDL compile step that isn't
|
||||||
|
quietable via command line flags.
|
||||||
|
"""
|
||||||
|
args = ['midl', '/nologo'] + list(flags) + [
|
||||||
|
'/out', outdir,
|
||||||
|
'/tlb', tlb,
|
||||||
|
'/h', h,
|
||||||
|
'/dlldata', dlldata,
|
||||||
|
'/iid', iid,
|
||||||
|
'/proxy', proxy,
|
||||||
|
idl]
|
||||||
|
env = self._GetEnv(arch)
|
||||||
|
popen = subprocess.Popen(args, shell=True, env=env,
|
||||||
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
|
out, _ = popen.communicate()
|
||||||
|
# Filter junk out of stdout, and write filtered versions. Output we want
|
||||||
|
# to filter is pairs of lines that look like this:
|
||||||
|
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
|
||||||
|
# objidl.idl
|
||||||
|
lines = out.splitlines()
|
||||||
|
prefixes = ('Processing ', '64 bit Processing ')
|
||||||
|
processing = set(os.path.basename(x)
|
||||||
|
for x in lines if x.startswith(prefixes))
|
||||||
|
for line in lines:
|
||||||
|
if not line.startswith(prefixes) and line not in processing:
|
||||||
|
print(line)
|
||||||
|
return popen.returncode
|
||||||
|
|
||||||
|
def ExecAsmWrapper(self, arch, *args):
|
||||||
|
"""Filter logo banner from invocations of asm.exe."""
|
||||||
|
env = self._GetEnv(arch)
|
||||||
|
popen = subprocess.Popen(args, shell=True, env=env,
|
||||||
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
|
out, _ = popen.communicate()
|
||||||
|
for line in out.splitlines():
|
||||||
|
if (not line.startswith('Copyright (C) Microsoft Corporation') and
|
||||||
|
not line.startswith('Microsoft (R) Macro Assembler') and
|
||||||
|
not line.startswith(' Assembling: ') and
|
||||||
|
line):
|
||||||
|
print(line)
|
||||||
|
return popen.returncode
|
||||||
|
|
||||||
|
def ExecRcWrapper(self, arch, *args):
|
||||||
|
"""Filter logo banner from invocations of rc.exe. Older versions of RC
|
||||||
|
don't support the /nologo flag."""
|
||||||
|
env = self._GetEnv(arch)
|
||||||
|
popen = subprocess.Popen(args, shell=True, env=env,
|
||||||
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
|
out, _ = popen.communicate()
|
||||||
|
for line in out.splitlines():
|
||||||
|
if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and
|
||||||
|
not line.startswith('Copyright (C) Microsoft Corporation') and
|
||||||
|
line):
|
||||||
|
print(line)
|
||||||
|
return popen.returncode
|
||||||
|
|
||||||
|
def ExecActionWrapper(self, arch, rspfile, *dir):
|
||||||
|
"""Runs an action command line from a response file using the environment
|
||||||
|
for |arch|. If |dir| is supplied, use that as the working directory."""
|
||||||
|
env = self._GetEnv(arch)
|
||||||
|
# TODO(scottmg): This is a temporary hack to get some specific variables
|
||||||
|
# through to actions that are set after gyp-time. http://crbug.com/333738.
|
||||||
|
for k, v in os.environ.items():
|
||||||
|
if k not in env:
|
||||||
|
env[k] = v
|
||||||
|
args = open(rspfile).read()
|
||||||
|
dir = dir[0] if dir else None
|
||||||
|
return subprocess.call(args, shell=True, env=env, cwd=dir)
|
||||||
|
|
||||||
|
def ExecClCompile(self, project_dir, selected_files):
|
||||||
|
"""Executed by msvs-ninja projects when the 'ClCompile' target is used to
|
||||||
|
build selected C/C++ files."""
|
||||||
|
project_dir = os.path.relpath(project_dir, BASE_DIR)
|
||||||
|
selected_files = selected_files.split(';')
|
||||||
|
ninja_targets = [os.path.join(project_dir, filename) + '^^'
|
||||||
|
for filename in selected_files]
|
||||||
|
cmd = ['ninja.exe']
|
||||||
|
cmd.extend(ninja_targets)
|
||||||
|
return subprocess.call(cmd, shell=True, cwd=BASE_DIR)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(main(sys.argv[1:]))
|
||||||
1798
third_party/python/gyp/build/lib/gyp/xcode_emulation.py
vendored
Normal file
1798
third_party/python/gyp/build/lib/gyp/xcode_emulation.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
289
third_party/python/gyp/build/lib/gyp/xcode_ninja.py
vendored
Normal file
289
third_party/python/gyp/build/lib/gyp/xcode_ninja.py
vendored
Normal file
@@ -0,0 +1,289 @@
|
|||||||
|
# Copyright (c) 2014 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""Xcode-ninja wrapper project file generator.
|
||||||
|
|
||||||
|
This updates the data structures passed to the Xcode gyp generator to build
|
||||||
|
with ninja instead. The Xcode project itself is transformed into a list of
|
||||||
|
executable targets, each with a build step to build with ninja, and a target
|
||||||
|
with every source and resource file. This appears to sidestep some of the
|
||||||
|
major performance headaches experienced using complex projects and large number
|
||||||
|
of targets within Xcode.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import errno
|
||||||
|
import gyp.generator.ninja
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import xml.sax.saxutils
|
||||||
|
|
||||||
|
|
||||||
|
def _WriteWorkspace(main_gyp, sources_gyp, params):
|
||||||
|
""" Create a workspace to wrap main and sources gyp paths. """
|
||||||
|
(build_file_root, build_file_ext) = os.path.splitext(main_gyp)
|
||||||
|
workspace_path = build_file_root + '.xcworkspace'
|
||||||
|
options = params['options']
|
||||||
|
if options.generator_output:
|
||||||
|
workspace_path = os.path.join(options.generator_output, workspace_path)
|
||||||
|
try:
|
||||||
|
os.makedirs(workspace_path)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno != errno.EEXIST:
|
||||||
|
raise
|
||||||
|
output_string = '<?xml version="1.0" encoding="UTF-8"?>\n' + \
|
||||||
|
'<Workspace version = "1.0">\n'
|
||||||
|
for gyp_name in [main_gyp, sources_gyp]:
|
||||||
|
name = os.path.splitext(os.path.basename(gyp_name))[0] + '.xcodeproj'
|
||||||
|
name = xml.sax.saxutils.quoteattr("group:" + name)
|
||||||
|
output_string += ' <FileRef location = %s></FileRef>\n' % name
|
||||||
|
output_string += '</Workspace>\n'
|
||||||
|
|
||||||
|
workspace_file = os.path.join(workspace_path, "contents.xcworkspacedata")
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(workspace_file, 'r') as input_file:
|
||||||
|
input_string = input_file.read()
|
||||||
|
if input_string == output_string:
|
||||||
|
return
|
||||||
|
except IOError:
|
||||||
|
# Ignore errors if the file doesn't exist.
|
||||||
|
pass
|
||||||
|
|
||||||
|
with open(workspace_file, 'w') as output_file:
|
||||||
|
output_file.write(output_string)
|
||||||
|
|
||||||
|
def _TargetFromSpec(old_spec, params):
|
||||||
|
""" Create fake target for xcode-ninja wrapper. """
|
||||||
|
# Determine ninja top level build dir (e.g. /path/to/out).
|
||||||
|
ninja_toplevel = None
|
||||||
|
jobs = 0
|
||||||
|
if params:
|
||||||
|
options = params['options']
|
||||||
|
ninja_toplevel = \
|
||||||
|
os.path.join(options.toplevel_dir,
|
||||||
|
gyp.generator.ninja.ComputeOutputDir(params))
|
||||||
|
jobs = params.get('generator_flags', {}).get('xcode_ninja_jobs', 0)
|
||||||
|
|
||||||
|
target_name = old_spec.get('target_name')
|
||||||
|
product_name = old_spec.get('product_name', target_name)
|
||||||
|
product_extension = old_spec.get('product_extension')
|
||||||
|
|
||||||
|
ninja_target = {}
|
||||||
|
ninja_target['target_name'] = target_name
|
||||||
|
ninja_target['product_name'] = product_name
|
||||||
|
if product_extension:
|
||||||
|
ninja_target['product_extension'] = product_extension
|
||||||
|
ninja_target['toolset'] = old_spec.get('toolset')
|
||||||
|
ninja_target['default_configuration'] = old_spec.get('default_configuration')
|
||||||
|
ninja_target['configurations'] = {}
|
||||||
|
|
||||||
|
# Tell Xcode to look in |ninja_toplevel| for build products.
|
||||||
|
new_xcode_settings = {}
|
||||||
|
if ninja_toplevel:
|
||||||
|
new_xcode_settings['CONFIGURATION_BUILD_DIR'] = \
|
||||||
|
"%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel
|
||||||
|
|
||||||
|
if 'configurations' in old_spec:
|
||||||
|
for config in old_spec['configurations'].keys():
|
||||||
|
old_xcode_settings = \
|
||||||
|
old_spec['configurations'][config].get('xcode_settings', {})
|
||||||
|
if 'IPHONEOS_DEPLOYMENT_TARGET' in old_xcode_settings:
|
||||||
|
new_xcode_settings['CODE_SIGNING_REQUIRED'] = "NO"
|
||||||
|
new_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET'] = \
|
||||||
|
old_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET']
|
||||||
|
for key in ['BUNDLE_LOADER', 'TEST_HOST']:
|
||||||
|
if key in old_xcode_settings:
|
||||||
|
new_xcode_settings[key] = old_xcode_settings[key]
|
||||||
|
|
||||||
|
ninja_target['configurations'][config] = {}
|
||||||
|
ninja_target['configurations'][config]['xcode_settings'] = \
|
||||||
|
new_xcode_settings
|
||||||
|
|
||||||
|
ninja_target['mac_bundle'] = old_spec.get('mac_bundle', 0)
|
||||||
|
ninja_target['mac_xctest_bundle'] = old_spec.get('mac_xctest_bundle', 0)
|
||||||
|
ninja_target['ios_app_extension'] = old_spec.get('ios_app_extension', 0)
|
||||||
|
ninja_target['ios_watchkit_extension'] = \
|
||||||
|
old_spec.get('ios_watchkit_extension', 0)
|
||||||
|
ninja_target['ios_watchkit_app'] = old_spec.get('ios_watchkit_app', 0)
|
||||||
|
ninja_target['type'] = old_spec['type']
|
||||||
|
if ninja_toplevel:
|
||||||
|
ninja_target['actions'] = [
|
||||||
|
{
|
||||||
|
'action_name': 'Compile and copy %s via ninja' % target_name,
|
||||||
|
'inputs': [],
|
||||||
|
'outputs': [],
|
||||||
|
'action': [
|
||||||
|
'env',
|
||||||
|
'PATH=%s' % os.environ['PATH'],
|
||||||
|
'ninja',
|
||||||
|
'-C',
|
||||||
|
new_xcode_settings['CONFIGURATION_BUILD_DIR'],
|
||||||
|
target_name,
|
||||||
|
],
|
||||||
|
'message': 'Compile and copy %s via ninja' % target_name,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
if jobs > 0:
|
||||||
|
ninja_target['actions'][0]['action'].extend(('-j', jobs))
|
||||||
|
return ninja_target
|
||||||
|
|
||||||
|
def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
|
||||||
|
"""Limit targets for Xcode wrapper.
|
||||||
|
|
||||||
|
Xcode sometimes performs poorly with too many targets, so only include
|
||||||
|
proper executable targets, with filters to customize.
|
||||||
|
Arguments:
|
||||||
|
target_extras: Regular expression to always add, matching any target.
|
||||||
|
executable_target_pattern: Regular expression limiting executable targets.
|
||||||
|
spec: Specifications for target.
|
||||||
|
"""
|
||||||
|
target_name = spec.get('target_name')
|
||||||
|
# Always include targets matching target_extras.
|
||||||
|
if target_extras is not None and re.search(target_extras, target_name):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Otherwise just show executable targets and xc_tests.
|
||||||
|
if (int(spec.get('mac_xctest_bundle', 0)) != 0 or
|
||||||
|
(spec.get('type', '') == 'executable' and
|
||||||
|
spec.get('product_extension', '') != 'bundle')):
|
||||||
|
|
||||||
|
# If there is a filter and the target does not match, exclude the target.
|
||||||
|
if executable_target_pattern is not None:
|
||||||
|
if not re.search(executable_target_pattern, target_name):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def CreateWrapper(target_list, target_dicts, data, params):
|
||||||
|
"""Initialize targets for the ninja wrapper.
|
||||||
|
|
||||||
|
This sets up the necessary variables in the targets to generate Xcode projects
|
||||||
|
that use ninja as an external builder.
|
||||||
|
Arguments:
|
||||||
|
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||||
|
target_dicts: Dict of target properties keyed on target pair.
|
||||||
|
data: Dict of flattened build files keyed on gyp path.
|
||||||
|
params: Dict of global options for gyp.
|
||||||
|
"""
|
||||||
|
orig_gyp = params['build_files'][0]
|
||||||
|
for gyp_name, gyp_dict in data.items():
|
||||||
|
if gyp_name == orig_gyp:
|
||||||
|
depth = gyp_dict['_DEPTH']
|
||||||
|
|
||||||
|
# Check for custom main gyp name, otherwise use the default CHROMIUM_GYP_FILE
|
||||||
|
# and prepend .ninja before the .gyp extension.
|
||||||
|
generator_flags = params.get('generator_flags', {})
|
||||||
|
main_gyp = generator_flags.get('xcode_ninja_main_gyp', None)
|
||||||
|
if main_gyp is None:
|
||||||
|
(build_file_root, build_file_ext) = os.path.splitext(orig_gyp)
|
||||||
|
main_gyp = build_file_root + ".ninja" + build_file_ext
|
||||||
|
|
||||||
|
# Create new |target_list|, |target_dicts| and |data| data structures.
|
||||||
|
new_target_list = []
|
||||||
|
new_target_dicts = {}
|
||||||
|
new_data = {}
|
||||||
|
|
||||||
|
# Set base keys needed for |data|.
|
||||||
|
new_data[main_gyp] = {}
|
||||||
|
new_data[main_gyp]['included_files'] = []
|
||||||
|
new_data[main_gyp]['targets'] = []
|
||||||
|
new_data[main_gyp]['xcode_settings'] = \
|
||||||
|
data[orig_gyp].get('xcode_settings', {})
|
||||||
|
|
||||||
|
# Normally the xcode-ninja generator includes only valid executable targets.
|
||||||
|
# If |xcode_ninja_executable_target_pattern| is set, that list is reduced to
|
||||||
|
# executable targets that match the pattern. (Default all)
|
||||||
|
executable_target_pattern = \
|
||||||
|
generator_flags.get('xcode_ninja_executable_target_pattern', None)
|
||||||
|
|
||||||
|
# For including other non-executable targets, add the matching target name
|
||||||
|
# to the |xcode_ninja_target_pattern| regular expression. (Default none)
|
||||||
|
target_extras = generator_flags.get('xcode_ninja_target_pattern', None)
|
||||||
|
|
||||||
|
for old_qualified_target in target_list:
|
||||||
|
spec = target_dicts[old_qualified_target]
|
||||||
|
if IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
|
||||||
|
# Add to new_target_list.
|
||||||
|
target_name = spec.get('target_name')
|
||||||
|
new_target_name = '%s:%s#target' % (main_gyp, target_name)
|
||||||
|
new_target_list.append(new_target_name)
|
||||||
|
|
||||||
|
# Add to new_target_dicts.
|
||||||
|
new_target_dicts[new_target_name] = _TargetFromSpec(spec, params)
|
||||||
|
|
||||||
|
# Add to new_data.
|
||||||
|
for old_target in data[old_qualified_target.split(':')[0]]['targets']:
|
||||||
|
if old_target['target_name'] == target_name:
|
||||||
|
new_data_target = {}
|
||||||
|
new_data_target['target_name'] = old_target['target_name']
|
||||||
|
new_data_target['toolset'] = old_target['toolset']
|
||||||
|
new_data[main_gyp]['targets'].append(new_data_target)
|
||||||
|
|
||||||
|
# Create sources target.
|
||||||
|
sources_target_name = 'sources_for_indexing'
|
||||||
|
sources_target = _TargetFromSpec(
|
||||||
|
{ 'target_name' : sources_target_name,
|
||||||
|
'toolset': 'target',
|
||||||
|
'default_configuration': 'Default',
|
||||||
|
'mac_bundle': '0',
|
||||||
|
'type': 'executable'
|
||||||
|
}, None)
|
||||||
|
|
||||||
|
# Tell Xcode to look everywhere for headers.
|
||||||
|
sources_target['configurations'] = {'Default': { 'include_dirs': [ depth ] } }
|
||||||
|
|
||||||
|
# Put excluded files into the sources target so they can be opened in Xcode.
|
||||||
|
skip_excluded_files = \
|
||||||
|
not generator_flags.get('xcode_ninja_list_excluded_files', True)
|
||||||
|
|
||||||
|
sources = []
|
||||||
|
for target, target_dict in target_dicts.items():
|
||||||
|
base = os.path.dirname(target)
|
||||||
|
files = target_dict.get('sources', []) + \
|
||||||
|
target_dict.get('mac_bundle_resources', [])
|
||||||
|
|
||||||
|
if not skip_excluded_files:
|
||||||
|
files.extend(target_dict.get('sources_excluded', []) +
|
||||||
|
target_dict.get('mac_bundle_resources_excluded', []))
|
||||||
|
|
||||||
|
for action in target_dict.get('actions', []):
|
||||||
|
files.extend(action.get('inputs', []))
|
||||||
|
|
||||||
|
if not skip_excluded_files:
|
||||||
|
files.extend(action.get('inputs_excluded', []))
|
||||||
|
|
||||||
|
# Remove files starting with $. These are mostly intermediate files for the
|
||||||
|
# build system.
|
||||||
|
files = [ file for file in files if not file.startswith('$')]
|
||||||
|
|
||||||
|
# Make sources relative to root build file.
|
||||||
|
relative_path = os.path.dirname(main_gyp)
|
||||||
|
sources += [ os.path.relpath(os.path.join(base, file), relative_path)
|
||||||
|
for file in files ]
|
||||||
|
|
||||||
|
sources_target['sources'] = sorted(set(sources))
|
||||||
|
|
||||||
|
# Put sources_to_index in it's own gyp.
|
||||||
|
sources_gyp = \
|
||||||
|
os.path.join(os.path.dirname(main_gyp), sources_target_name + ".gyp")
|
||||||
|
fully_qualified_target_name = \
|
||||||
|
'%s:%s#target' % (sources_gyp, sources_target_name)
|
||||||
|
|
||||||
|
# Add to new_target_list, new_target_dicts and new_data.
|
||||||
|
new_target_list.append(fully_qualified_target_name)
|
||||||
|
new_target_dicts[fully_qualified_target_name] = sources_target
|
||||||
|
new_data_target = {}
|
||||||
|
new_data_target['target_name'] = sources_target['target_name']
|
||||||
|
new_data_target['_DEPTH'] = depth
|
||||||
|
new_data_target['toolset'] = "target"
|
||||||
|
new_data[sources_gyp] = {}
|
||||||
|
new_data[sources_gyp]['targets'] = []
|
||||||
|
new_data[sources_gyp]['included_files'] = []
|
||||||
|
new_data[sources_gyp]['xcode_settings'] = \
|
||||||
|
data[orig_gyp].get('xcode_settings', {})
|
||||||
|
new_data[sources_gyp]['targets'].append(new_data_target)
|
||||||
|
|
||||||
|
# Write workspace to file.
|
||||||
|
_WriteWorkspace(main_gyp, sources_gyp, params)
|
||||||
|
return (new_target_list, new_target_dicts, new_data)
|
||||||
2995
third_party/python/gyp/build/lib/gyp/xcodeproj_file.py
vendored
Normal file
2995
third_party/python/gyp/build/lib/gyp/xcodeproj_file.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
68
third_party/python/gyp/build/lib/gyp/xml_fix.py
vendored
Normal file
68
third_party/python/gyp/build/lib/gyp/xml_fix.py
vendored
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""Applies a fix to CR LF TAB handling in xml.dom.
|
||||||
|
|
||||||
|
Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293
|
||||||
|
Working around this: http://bugs.python.org/issue5752
|
||||||
|
TODO(bradnelson): Consider dropping this when we drop XP support.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
import xml.dom.minidom
|
||||||
|
|
||||||
|
|
||||||
|
def _Replacement_write_data(writer, data, is_attrib=False):
|
||||||
|
"""Writes datachars to writer."""
|
||||||
|
data = data.replace("&", "&").replace("<", "<")
|
||||||
|
data = data.replace("\"", """).replace(">", ">")
|
||||||
|
if is_attrib:
|
||||||
|
data = data.replace(
|
||||||
|
"\r", "
").replace(
|
||||||
|
"\n", "
").replace(
|
||||||
|
"\t", "	")
|
||||||
|
writer.write(data)
|
||||||
|
|
||||||
|
|
||||||
|
def _Replacement_writexml(self, writer, indent="", addindent="", newl=""):
|
||||||
|
# indent = current indentation
|
||||||
|
# addindent = indentation to add to higher levels
|
||||||
|
# newl = newline string
|
||||||
|
writer.write(indent+"<" + self.tagName)
|
||||||
|
|
||||||
|
attrs = self._get_attributes()
|
||||||
|
a_names = sorted(attrs.keys())
|
||||||
|
|
||||||
|
for a_name in a_names:
|
||||||
|
writer.write(" %s=\"" % a_name)
|
||||||
|
_Replacement_write_data(writer, attrs[a_name].value, is_attrib=True)
|
||||||
|
writer.write("\"")
|
||||||
|
if self.childNodes:
|
||||||
|
writer.write(">%s" % newl)
|
||||||
|
for node in self.childNodes:
|
||||||
|
node.writexml(writer, indent + addindent, addindent, newl)
|
||||||
|
writer.write("%s</%s>%s" % (indent, self.tagName, newl))
|
||||||
|
else:
|
||||||
|
writer.write("/>%s" % newl)
|
||||||
|
|
||||||
|
|
||||||
|
class XmlFix(object):
|
||||||
|
"""Object to manage temporary patching of xml.dom.minidom."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
# Preserve current xml.dom.minidom functions.
|
||||||
|
self.write_data = xml.dom.minidom._write_data
|
||||||
|
self.writexml = xml.dom.minidom.Element.writexml
|
||||||
|
# Inject replacement versions of a function and a method.
|
||||||
|
xml.dom.minidom._write_data = _Replacement_write_data
|
||||||
|
xml.dom.minidom.Element.writexml = _Replacement_writexml
|
||||||
|
|
||||||
|
def Cleanup(self):
|
||||||
|
if self.write_data:
|
||||||
|
xml.dom.minidom._write_data = self.write_data
|
||||||
|
xml.dom.minidom.Element.writexml = self.writexml
|
||||||
|
self.write_data = None
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
self.Cleanup()
|
||||||
9
third_party/python/gyp/pylib/gyp.egg-info/PKG-INFO
vendored
Normal file
9
third_party/python/gyp/pylib/gyp.egg-info/PKG-INFO
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: gyp
|
||||||
|
Version: 0.1
|
||||||
|
Summary: Generate Your Projects
|
||||||
|
Home-page: http://code.google.com/p/gyp
|
||||||
|
Author: Chromium Authors
|
||||||
|
Author-email: chromium-dev@googlegroups.com
|
||||||
|
License-File: LICENSE
|
||||||
|
License-File: AUTHORS
|
||||||
48
third_party/python/gyp/pylib/gyp.egg-info/SOURCES.txt
vendored
Normal file
48
third_party/python/gyp/pylib/gyp.egg-info/SOURCES.txt
vendored
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
AUTHORS
|
||||||
|
LICENSE
|
||||||
|
README.md
|
||||||
|
setup.py
|
||||||
|
pylib/gyp/MSVSNew.py
|
||||||
|
pylib/gyp/MSVSProject.py
|
||||||
|
pylib/gyp/MSVSSettings.py
|
||||||
|
pylib/gyp/MSVSSettings_test.py
|
||||||
|
pylib/gyp/MSVSToolFile.py
|
||||||
|
pylib/gyp/MSVSUserFile.py
|
||||||
|
pylib/gyp/MSVSUtil.py
|
||||||
|
pylib/gyp/MSVSVersion.py
|
||||||
|
pylib/gyp/__init__.py
|
||||||
|
pylib/gyp/common.py
|
||||||
|
pylib/gyp/common_test.py
|
||||||
|
pylib/gyp/easy_xml.py
|
||||||
|
pylib/gyp/easy_xml_test.py
|
||||||
|
pylib/gyp/flock_tool.py
|
||||||
|
pylib/gyp/input.py
|
||||||
|
pylib/gyp/input_test.py
|
||||||
|
pylib/gyp/mac_tool.py
|
||||||
|
pylib/gyp/msvs_emulation.py
|
||||||
|
pylib/gyp/ninja_syntax.py
|
||||||
|
pylib/gyp/simple_copy.py
|
||||||
|
pylib/gyp/win_tool.py
|
||||||
|
pylib/gyp/xcode_emulation.py
|
||||||
|
pylib/gyp/xcode_ninja.py
|
||||||
|
pylib/gyp/xcodeproj_file.py
|
||||||
|
pylib/gyp/xml_fix.py
|
||||||
|
pylib/gyp.egg-info/PKG-INFO
|
||||||
|
pylib/gyp.egg-info/SOURCES.txt
|
||||||
|
pylib/gyp.egg-info/dependency_links.txt
|
||||||
|
pylib/gyp.egg-info/entry_points.txt
|
||||||
|
pylib/gyp.egg-info/top_level.txt
|
||||||
|
pylib/gyp/generator/__init__.py
|
||||||
|
pylib/gyp/generator/analyzer.py
|
||||||
|
pylib/gyp/generator/cmake.py
|
||||||
|
pylib/gyp/generator/dump_dependency_json.py
|
||||||
|
pylib/gyp/generator/eclipse.py
|
||||||
|
pylib/gyp/generator/gypd.py
|
||||||
|
pylib/gyp/generator/gypsh.py
|
||||||
|
pylib/gyp/generator/make.py
|
||||||
|
pylib/gyp/generator/msvs.py
|
||||||
|
pylib/gyp/generator/msvs_test.py
|
||||||
|
pylib/gyp/generator/ninja.py
|
||||||
|
pylib/gyp/generator/ninja_test.py
|
||||||
|
pylib/gyp/generator/xcode.py
|
||||||
|
pylib/gyp/generator/xcode_test.py
|
||||||
1
third_party/python/gyp/pylib/gyp.egg-info/dependency_links.txt
vendored
Normal file
1
third_party/python/gyp/pylib/gyp.egg-info/dependency_links.txt
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
|
||||||
2
third_party/python/gyp/pylib/gyp.egg-info/entry_points.txt
vendored
Normal file
2
third_party/python/gyp/pylib/gyp.egg-info/entry_points.txt
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
[console_scripts]
|
||||||
|
gyp = gyp:script_main
|
||||||
1
third_party/python/gyp/pylib/gyp.egg-info/top_level.txt
vendored
Normal file
1
third_party/python/gyp/pylib/gyp.egg-info/top_level.txt
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
gyp
|
||||||
202
third_party/python/importlib_resources/importlib_resources-5.12.0.dist-info/LICENSE
vendored
Normal file
202
third_party/python/importlib_resources/importlib_resources-5.12.0.dist-info/LICENSE
vendored
Normal file
@@ -0,0 +1,202 @@
|
|||||||
|
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
104
third_party/python/importlib_resources/importlib_resources-5.12.0.dist-info/METADATA
vendored
Normal file
104
third_party/python/importlib_resources/importlib_resources-5.12.0.dist-info/METADATA
vendored
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: importlib-resources
|
||||||
|
Version: 5.12.0
|
||||||
|
Summary: Read resources from Python packages
|
||||||
|
Home-page: https://github.com/python/importlib_resources
|
||||||
|
Author: Barry Warsaw
|
||||||
|
Author-email: barry@python.org
|
||||||
|
Project-URL: Documentation, https://importlib-resources.readthedocs.io/
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: Apache Software License
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3 :: Only
|
||||||
|
Requires-Python: >=3.7
|
||||||
|
License-File: LICENSE
|
||||||
|
Requires-Dist: zipp (>=3.1.0) ; python_version < "3.10"
|
||||||
|
Provides-Extra: docs
|
||||||
|
Requires-Dist: sphinx (>=3.5) ; extra == 'docs'
|
||||||
|
Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs'
|
||||||
|
Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
|
||||||
|
Requires-Dist: furo ; extra == 'docs'
|
||||||
|
Requires-Dist: sphinx-lint ; extra == 'docs'
|
||||||
|
Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs'
|
||||||
|
Provides-Extra: testing
|
||||||
|
Requires-Dist: pytest (>=6) ; extra == 'testing'
|
||||||
|
Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
|
||||||
|
Requires-Dist: flake8 (<5) ; extra == 'testing'
|
||||||
|
Requires-Dist: pytest-cov ; extra == 'testing'
|
||||||
|
Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing'
|
||||||
|
Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
|
||||||
|
Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing'
|
||||||
|
Requires-Dist: pytest-flake8 ; (python_version < "3.12") and extra == 'testing'
|
||||||
|
|
||||||
|
.. image:: https://img.shields.io/pypi/v/importlib_resources.svg
|
||||||
|
:target: https://pypi.org/project/importlib_resources
|
||||||
|
|
||||||
|
.. image:: https://img.shields.io/pypi/pyversions/importlib_resources.svg
|
||||||
|
|
||||||
|
.. image:: https://github.com/python/importlib_resources/workflows/tests/badge.svg
|
||||||
|
:target: https://github.com/python/importlib_resources/actions?query=workflow%3A%22tests%22
|
||||||
|
:alt: tests
|
||||||
|
|
||||||
|
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
|
||||||
|
:target: https://github.com/psf/black
|
||||||
|
:alt: Code style: Black
|
||||||
|
|
||||||
|
.. image:: https://readthedocs.org/projects/importlib-resources/badge/?version=latest
|
||||||
|
:target: https://importlib-resources.readthedocs.io/en/latest/?badge=latest
|
||||||
|
|
||||||
|
.. image:: https://img.shields.io/badge/skeleton-2023-informational
|
||||||
|
:target: https://blog.jaraco.com/skeleton
|
||||||
|
|
||||||
|
.. image:: https://tidelift.com/badges/package/pypi/importlib-resources
|
||||||
|
:target: https://tidelift.com/subscription/pkg/pypi-importlib-resources?utm_source=pypi-importlib-resources&utm_medium=readme
|
||||||
|
|
||||||
|
``importlib_resources`` is a backport of Python standard library
|
||||||
|
`importlib.resources
|
||||||
|
<https://docs.python.org/3/library/importlib.html#module-importlib.resources>`_
|
||||||
|
module for older Pythons.
|
||||||
|
|
||||||
|
The key goal of this module is to replace parts of `pkg_resources
|
||||||
|
<https://setuptools.readthedocs.io/en/latest/pkg_resources.html>`_ with a
|
||||||
|
solution in Python's stdlib that relies on well-defined APIs. This makes
|
||||||
|
reading resources included in packages easier, with more stable and consistent
|
||||||
|
semantics.
|
||||||
|
|
||||||
|
Compatibility
|
||||||
|
=============
|
||||||
|
|
||||||
|
New features are introduced in this third-party library and later merged
|
||||||
|
into CPython. The following table indicates which versions of this library
|
||||||
|
were contributed to different versions in the standard library:
|
||||||
|
|
||||||
|
.. list-table::
|
||||||
|
:header-rows: 1
|
||||||
|
|
||||||
|
* - importlib_resources
|
||||||
|
- stdlib
|
||||||
|
* - 5.9
|
||||||
|
- 3.12
|
||||||
|
* - 5.7
|
||||||
|
- 3.11
|
||||||
|
* - 5.0
|
||||||
|
- 3.10
|
||||||
|
* - 1.3
|
||||||
|
- 3.9
|
||||||
|
* - 0.5 (?)
|
||||||
|
- 3.7
|
||||||
|
|
||||||
|
For Enterprise
|
||||||
|
==============
|
||||||
|
|
||||||
|
Available as part of the Tidelift Subscription.
|
||||||
|
|
||||||
|
This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
|
||||||
|
|
||||||
|
`Learn more <https://tidelift.com/subscription/pkg/pypi-importlib-resources?utm_source=pypi-importlib-resources&utm_medium=referral&utm_campaign=github>`_.
|
||||||
|
|
||||||
|
Security Contact
|
||||||
|
================
|
||||||
|
|
||||||
|
To report a security vulnerability, please use the
|
||||||
|
`Tidelift security contact <https://tidelift.com/security>`_.
|
||||||
|
Tidelift will coordinate the fix and disclosure.
|
||||||
48
third_party/python/importlib_resources/importlib_resources-5.12.0.dist-info/RECORD
vendored
Normal file
48
third_party/python/importlib_resources/importlib_resources-5.12.0.dist-info/RECORD
vendored
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
importlib_resources/__init__.py,sha256=evPm12kLgYqTm-pbzm60bOuumumT8IpBNWFp0uMyrzE,506
|
||||||
|
importlib_resources/_adapters.py,sha256=vprJGbUeHbajX6XCuMP6J3lMrqCi-P_MTlziJUR7jfk,4482
|
||||||
|
importlib_resources/_common.py,sha256=jSC4xfLdcMNbtbWHtpzbFkNa0W7kvf__nsYn14C_AEU,5457
|
||||||
|
importlib_resources/_compat.py,sha256=4oDJPpo63eH_3l5BkBHmkjAQW4HGs5qvYd2-ziLA_ck,2935
|
||||||
|
importlib_resources/_itertools.py,sha256=eDisV6RqiNZOogLSXf6LOGHOYc79FGgPrKNLzFLmCrU,1277
|
||||||
|
importlib_resources/_legacy.py,sha256=0TKdZixxLWA-xwtAZw4HcpqJmj4Xprx1Zkcty0gTRZY,3481
|
||||||
|
importlib_resources/abc.py,sha256=Icr2IJ2QtH7vvAB9vC5WRJ9KBoaDyJa7KUs8McuROzo,5140
|
||||||
|
importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/readers.py,sha256=i80n49L2rBAtlB9bU0zAeQHiEXxcdP99-pWR6ED-ypY,4312
|
||||||
|
importlib_resources/simple.py,sha256=0__2TQBTQoqkajYmNPt1HxERcReAT6boVKJA328pr04,2576
|
||||||
|
importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/_compat.py,sha256=YTSB0U1R9oADnh6GrQcOCgojxcF_N6H1LklymEWf9SQ,708
|
||||||
|
importlib_resources/tests/_path.py,sha256=nkv3ek7D1U898v921rYbldDCtKri2oyYOi3EJqGjEGU,1289
|
||||||
|
importlib_resources/tests/test_compatibilty_files.py,sha256=95N_R7aik8cvnE6sBJpsxmP0K5plOWRIJDgbalD-Hpw,3314
|
||||||
|
importlib_resources/tests/test_contents.py,sha256=V1Xfk3lqTDdvUsZuV18Kndf0CT_tkM2oEIwk9Vv0rhg,968
|
||||||
|
importlib_resources/tests/test_custom.py,sha256=jVYg9idEVdUN6idHUfDDlZ-zDWl56qYNbj5QrcZO76Y,1124
|
||||||
|
importlib_resources/tests/test_files.py,sha256=W5XoBWSTr84Ke15UtjqWLet2iUDUyJfQxbST4PDlj2w,3283
|
||||||
|
importlib_resources/tests/test_open.py,sha256=9qvdC6Eu2Kn3mh3xDR5HUEQoePSKIecTxU4vnH9veO8,2671
|
||||||
|
importlib_resources/tests/test_path.py,sha256=XR5RI7_zndI_Nqw9eHU1tDmSGIo29N1GP8INodPc584,2142
|
||||||
|
importlib_resources/tests/test_read.py,sha256=BYdRqZEEJE17NHPArpZW9VsIwMlna1BpHyWkgCvEKWk,2512
|
||||||
|
importlib_resources/tests/test_reader.py,sha256=YS1RHDzSIo7Dy3AhoK7sY-cFWIFnfkMNfQR3xlXsgio,4990
|
||||||
|
importlib_resources/tests/test_resource.py,sha256=cPHz7VLwq6bFznZ-JDYE3f_4VJthQztRHKhiA9SriT0,8270
|
||||||
|
importlib_resources/tests/update-zips.py,sha256=x-SrO5v87iLLUMXyefxDwAd3imAs_slI94sLWvJ6N40,1417
|
||||||
|
importlib_resources/tests/util.py,sha256=TQz12vSkHNjGlF3hB0OR4kx2sCR-xcj0wI2esDyHR9I,5001
|
||||||
|
importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
|
||||||
|
importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
|
||||||
|
importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
|
||||||
|
importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
|
||||||
|
importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13
|
||||||
|
importlib_resources/tests/data02/subdirectory/subsubdir/resource.txt,sha256=jnrBBztxYrtQck7cmVnc4xQVO4-agzAZDGSFkAWtlFw,10
|
||||||
|
importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13
|
||||||
|
importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
|
||||||
|
importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
|
||||||
|
importlib_resources/tests/namespacedata01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
|
||||||
|
importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=z5Of4dsv3T0t-46B0MsVhxlhsPGMz28aUhJDWpj3_oY,876
|
||||||
|
importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=ydI-_j-xgQ7tDxqBp9cjOqXBGxUp6ZBbwVJu6Xj-nrY,698
|
||||||
|
importlib_resources-5.12.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
|
||||||
|
importlib_resources-5.12.0.dist-info/METADATA,sha256=uEY10nhKI-5nXImnXgsNt7BDYf7u2Qw8-BO2K2hmlJA,4111
|
||||||
|
importlib_resources-5.12.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
||||||
|
importlib_resources-5.12.0.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20
|
||||||
|
importlib_resources-5.12.0.dist-info/RECORD,,
|
||||||
@@ -1,6 +1,5 @@
|
|||||||
Wheel-Version: 1.0
|
Wheel-Version: 1.0
|
||||||
Generator: bdist_wheel (0.33.6)
|
Generator: bdist_wheel (0.38.4)
|
||||||
Root-Is-Purelib: true
|
Root-Is-Purelib: true
|
||||||
Tag: py2-none-any
|
|
||||||
Tag: py3-none-any
|
Tag: py3-none-any
|
||||||
|
|
||||||
1
third_party/python/importlib_resources/importlib_resources-5.12.0.dist-info/top_level.txt
vendored
Normal file
1
third_party/python/importlib_resources/importlib_resources-5.12.0.dist-info/top_level.txt
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
importlib_resources
|
||||||
36
third_party/python/importlib_resources/importlib_resources/__init__.py
vendored
Normal file
36
third_party/python/importlib_resources/importlib_resources/__init__.py
vendored
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
"""Read resources contained within a package."""
|
||||||
|
|
||||||
|
from ._common import (
|
||||||
|
as_file,
|
||||||
|
files,
|
||||||
|
Package,
|
||||||
|
)
|
||||||
|
|
||||||
|
from ._legacy import (
|
||||||
|
contents,
|
||||||
|
open_binary,
|
||||||
|
read_binary,
|
||||||
|
open_text,
|
||||||
|
read_text,
|
||||||
|
is_resource,
|
||||||
|
path,
|
||||||
|
Resource,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .abc import ResourceReader
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'Package',
|
||||||
|
'Resource',
|
||||||
|
'ResourceReader',
|
||||||
|
'as_file',
|
||||||
|
'contents',
|
||||||
|
'files',
|
||||||
|
'is_resource',
|
||||||
|
'open_binary',
|
||||||
|
'open_text',
|
||||||
|
'path',
|
||||||
|
'read_binary',
|
||||||
|
'read_text',
|
||||||
|
]
|
||||||
168
third_party/python/importlib_resources/importlib_resources/_adapters.py
vendored
Normal file
168
third_party/python/importlib_resources/importlib_resources/_adapters.py
vendored
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
from contextlib import suppress
|
||||||
|
from io import TextIOWrapper
|
||||||
|
|
||||||
|
from . import abc
|
||||||
|
|
||||||
|
|
||||||
|
class SpecLoaderAdapter:
|
||||||
|
"""
|
||||||
|
Adapt a package spec to adapt the underlying loader.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, spec, adapter=lambda spec: spec.loader):
|
||||||
|
self.spec = spec
|
||||||
|
self.loader = adapter(spec)
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
return getattr(self.spec, name)
|
||||||
|
|
||||||
|
|
||||||
|
class TraversableResourcesLoader:
|
||||||
|
"""
|
||||||
|
Adapt a loader to provide TraversableResources.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, spec):
|
||||||
|
self.spec = spec
|
||||||
|
|
||||||
|
def get_resource_reader(self, name):
|
||||||
|
return CompatibilityFiles(self.spec)._native()
|
||||||
|
|
||||||
|
|
||||||
|
def _io_wrapper(file, mode='r', *args, **kwargs):
|
||||||
|
if mode == 'r':
|
||||||
|
return TextIOWrapper(file, *args, **kwargs)
|
||||||
|
elif mode == 'rb':
|
||||||
|
return file
|
||||||
|
raise ValueError(f"Invalid mode value '{mode}', only 'r' and 'rb' are supported")
|
||||||
|
|
||||||
|
|
||||||
|
class CompatibilityFiles:
|
||||||
|
"""
|
||||||
|
Adapter for an existing or non-existent resource reader
|
||||||
|
to provide a compatibility .files().
|
||||||
|
"""
|
||||||
|
|
||||||
|
class SpecPath(abc.Traversable):
|
||||||
|
"""
|
||||||
|
Path tied to a module spec.
|
||||||
|
Can be read and exposes the resource reader children.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, spec, reader):
|
||||||
|
self._spec = spec
|
||||||
|
self._reader = reader
|
||||||
|
|
||||||
|
def iterdir(self):
|
||||||
|
if not self._reader:
|
||||||
|
return iter(())
|
||||||
|
return iter(
|
||||||
|
CompatibilityFiles.ChildPath(self._reader, path)
|
||||||
|
for path in self._reader.contents()
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_file(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
is_dir = is_file
|
||||||
|
|
||||||
|
def joinpath(self, other):
|
||||||
|
if not self._reader:
|
||||||
|
return CompatibilityFiles.OrphanPath(other)
|
||||||
|
return CompatibilityFiles.ChildPath(self._reader, other)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
return self._spec.name
|
||||||
|
|
||||||
|
def open(self, mode='r', *args, **kwargs):
|
||||||
|
return _io_wrapper(self._reader.open_resource(None), mode, *args, **kwargs)
|
||||||
|
|
||||||
|
class ChildPath(abc.Traversable):
|
||||||
|
"""
|
||||||
|
Path tied to a resource reader child.
|
||||||
|
Can be read but doesn't expose any meaningful children.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, reader, name):
|
||||||
|
self._reader = reader
|
||||||
|
self._name = name
|
||||||
|
|
||||||
|
def iterdir(self):
|
||||||
|
return iter(())
|
||||||
|
|
||||||
|
def is_file(self):
|
||||||
|
return self._reader.is_resource(self.name)
|
||||||
|
|
||||||
|
def is_dir(self):
|
||||||
|
return not self.is_file()
|
||||||
|
|
||||||
|
def joinpath(self, other):
|
||||||
|
return CompatibilityFiles.OrphanPath(self.name, other)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
return self._name
|
||||||
|
|
||||||
|
def open(self, mode='r', *args, **kwargs):
|
||||||
|
return _io_wrapper(
|
||||||
|
self._reader.open_resource(self.name), mode, *args, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
class OrphanPath(abc.Traversable):
|
||||||
|
"""
|
||||||
|
Orphan path, not tied to a module spec or resource reader.
|
||||||
|
Can't be read and doesn't expose any meaningful children.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *path_parts):
|
||||||
|
if len(path_parts) < 1:
|
||||||
|
raise ValueError('Need at least one path part to construct a path')
|
||||||
|
self._path = path_parts
|
||||||
|
|
||||||
|
def iterdir(self):
|
||||||
|
return iter(())
|
||||||
|
|
||||||
|
def is_file(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
is_dir = is_file
|
||||||
|
|
||||||
|
def joinpath(self, other):
|
||||||
|
return CompatibilityFiles.OrphanPath(*self._path, other)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
return self._path[-1]
|
||||||
|
|
||||||
|
def open(self, mode='r', *args, **kwargs):
|
||||||
|
raise FileNotFoundError("Can't open orphan path")
|
||||||
|
|
||||||
|
def __init__(self, spec):
|
||||||
|
self.spec = spec
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _reader(self):
|
||||||
|
with suppress(AttributeError):
|
||||||
|
return self.spec.loader.get_resource_reader(self.spec.name)
|
||||||
|
|
||||||
|
def _native(self):
|
||||||
|
"""
|
||||||
|
Return the native reader if it supports files().
|
||||||
|
"""
|
||||||
|
reader = self._reader
|
||||||
|
return reader if hasattr(reader, 'files') else self
|
||||||
|
|
||||||
|
def __getattr__(self, attr):
|
||||||
|
return getattr(self._reader, attr)
|
||||||
|
|
||||||
|
def files(self):
|
||||||
|
return CompatibilityFiles.SpecPath(self.spec, self._reader)
|
||||||
|
|
||||||
|
|
||||||
|
def wrap_spec(package):
|
||||||
|
"""
|
||||||
|
Construct a package spec with traversable compatibility
|
||||||
|
on the spec/loader/reader.
|
||||||
|
"""
|
||||||
|
return SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
|
||||||
207
third_party/python/importlib_resources/importlib_resources/_common.py
vendored
Normal file
207
third_party/python/importlib_resources/importlib_resources/_common.py
vendored
Normal file
@@ -0,0 +1,207 @@
|
|||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import tempfile
|
||||||
|
import functools
|
||||||
|
import contextlib
|
||||||
|
import types
|
||||||
|
import importlib
|
||||||
|
import inspect
|
||||||
|
import warnings
|
||||||
|
import itertools
|
||||||
|
|
||||||
|
from typing import Union, Optional, cast
|
||||||
|
from .abc import ResourceReader, Traversable
|
||||||
|
|
||||||
|
from ._compat import wrap_spec
|
||||||
|
|
||||||
|
Package = Union[types.ModuleType, str]
|
||||||
|
Anchor = Package
|
||||||
|
|
||||||
|
|
||||||
|
def package_to_anchor(func):
|
||||||
|
"""
|
||||||
|
Replace 'package' parameter as 'anchor' and warn about the change.
|
||||||
|
|
||||||
|
Other errors should fall through.
|
||||||
|
|
||||||
|
>>> files('a', 'b')
|
||||||
|
Traceback (most recent call last):
|
||||||
|
TypeError: files() takes from 0 to 1 positional arguments but 2 were given
|
||||||
|
"""
|
||||||
|
undefined = object()
|
||||||
|
|
||||||
|
@functools.wraps(func)
|
||||||
|
def wrapper(anchor=undefined, package=undefined):
|
||||||
|
if package is not undefined:
|
||||||
|
if anchor is not undefined:
|
||||||
|
return func(anchor, package)
|
||||||
|
warnings.warn(
|
||||||
|
"First parameter to files is renamed to 'anchor'",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
return func(package)
|
||||||
|
elif anchor is undefined:
|
||||||
|
return func()
|
||||||
|
return func(anchor)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
@package_to_anchor
|
||||||
|
def files(anchor: Optional[Anchor] = None) -> Traversable:
|
||||||
|
"""
|
||||||
|
Get a Traversable resource for an anchor.
|
||||||
|
"""
|
||||||
|
return from_package(resolve(anchor))
|
||||||
|
|
||||||
|
|
||||||
|
def get_resource_reader(package: types.ModuleType) -> Optional[ResourceReader]:
|
||||||
|
"""
|
||||||
|
Return the package's loader if it's a ResourceReader.
|
||||||
|
"""
|
||||||
|
# We can't use
|
||||||
|
# a issubclass() check here because apparently abc.'s __subclasscheck__()
|
||||||
|
# hook wants to create a weak reference to the object, but
|
||||||
|
# zipimport.zipimporter does not support weak references, resulting in a
|
||||||
|
# TypeError. That seems terrible.
|
||||||
|
spec = package.__spec__
|
||||||
|
reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore
|
||||||
|
if reader is None:
|
||||||
|
return None
|
||||||
|
return reader(spec.name) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
@functools.singledispatch
|
||||||
|
def resolve(cand: Optional[Anchor]) -> types.ModuleType:
|
||||||
|
return cast(types.ModuleType, cand)
|
||||||
|
|
||||||
|
|
||||||
|
@resolve.register
|
||||||
|
def _(cand: str) -> types.ModuleType:
|
||||||
|
return importlib.import_module(cand)
|
||||||
|
|
||||||
|
|
||||||
|
@resolve.register
|
||||||
|
def _(cand: None) -> types.ModuleType:
|
||||||
|
return resolve(_infer_caller().f_globals['__name__'])
|
||||||
|
|
||||||
|
|
||||||
|
def _infer_caller():
|
||||||
|
"""
|
||||||
|
Walk the stack and find the frame of the first caller not in this module.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def is_this_file(frame_info):
|
||||||
|
return frame_info.filename == __file__
|
||||||
|
|
||||||
|
def is_wrapper(frame_info):
|
||||||
|
return frame_info.function == 'wrapper'
|
||||||
|
|
||||||
|
not_this_file = itertools.filterfalse(is_this_file, inspect.stack())
|
||||||
|
# also exclude 'wrapper' due to singledispatch in the call stack
|
||||||
|
callers = itertools.filterfalse(is_wrapper, not_this_file)
|
||||||
|
return next(callers).frame
|
||||||
|
|
||||||
|
|
||||||
|
def from_package(package: types.ModuleType):
|
||||||
|
"""
|
||||||
|
Return a Traversable object for the given package.
|
||||||
|
|
||||||
|
"""
|
||||||
|
spec = wrap_spec(package)
|
||||||
|
reader = spec.loader.get_resource_reader(spec.name)
|
||||||
|
return reader.files()
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def _tempfile(
|
||||||
|
reader,
|
||||||
|
suffix='',
|
||||||
|
# gh-93353: Keep a reference to call os.remove() in late Python
|
||||||
|
# finalization.
|
||||||
|
*,
|
||||||
|
_os_remove=os.remove,
|
||||||
|
):
|
||||||
|
# Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
|
||||||
|
# blocks due to the need to close the temporary file to work on Windows
|
||||||
|
# properly.
|
||||||
|
fd, raw_path = tempfile.mkstemp(suffix=suffix)
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
os.write(fd, reader())
|
||||||
|
finally:
|
||||||
|
os.close(fd)
|
||||||
|
del reader
|
||||||
|
yield pathlib.Path(raw_path)
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
_os_remove(raw_path)
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _temp_file(path):
|
||||||
|
return _tempfile(path.read_bytes, suffix=path.name)
|
||||||
|
|
||||||
|
|
||||||
|
def _is_present_dir(path: Traversable) -> bool:
|
||||||
|
"""
|
||||||
|
Some Traversables implement ``is_dir()`` to raise an
|
||||||
|
exception (i.e. ``FileNotFoundError``) when the
|
||||||
|
directory doesn't exist. This function wraps that call
|
||||||
|
to always return a boolean and only return True
|
||||||
|
if there's a dir and it exists.
|
||||||
|
"""
|
||||||
|
with contextlib.suppress(FileNotFoundError):
|
||||||
|
return path.is_dir()
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@functools.singledispatch
|
||||||
|
def as_file(path):
|
||||||
|
"""
|
||||||
|
Given a Traversable object, return that object as a
|
||||||
|
path on the local file system in a context manager.
|
||||||
|
"""
|
||||||
|
return _temp_dir(path) if _is_present_dir(path) else _temp_file(path)
|
||||||
|
|
||||||
|
|
||||||
|
@as_file.register(pathlib.Path)
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def _(path):
|
||||||
|
"""
|
||||||
|
Degenerate behavior for pathlib.Path objects.
|
||||||
|
"""
|
||||||
|
yield path
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def _temp_path(dir: tempfile.TemporaryDirectory):
|
||||||
|
"""
|
||||||
|
Wrap tempfile.TemporyDirectory to return a pathlib object.
|
||||||
|
"""
|
||||||
|
with dir as result:
|
||||||
|
yield pathlib.Path(result)
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def _temp_dir(path):
|
||||||
|
"""
|
||||||
|
Given a traversable dir, recursively replicate the whole tree
|
||||||
|
to the file system in a context manager.
|
||||||
|
"""
|
||||||
|
assert path.is_dir()
|
||||||
|
with _temp_path(tempfile.TemporaryDirectory()) as temp_dir:
|
||||||
|
yield _write_contents(temp_dir, path)
|
||||||
|
|
||||||
|
|
||||||
|
def _write_contents(target, source):
|
||||||
|
child = target.joinpath(source.name)
|
||||||
|
if source.is_dir():
|
||||||
|
child.mkdir()
|
||||||
|
for item in source.iterdir():
|
||||||
|
_write_contents(child, item)
|
||||||
|
else:
|
||||||
|
child.write_bytes(source.read_bytes())
|
||||||
|
return child
|
||||||
109
third_party/python/importlib_resources/importlib_resources/_compat.py
vendored
Normal file
109
third_party/python/importlib_resources/importlib_resources/_compat.py
vendored
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
# flake8: noqa
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import pathlib
|
||||||
|
from contextlib import suppress
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 10):
|
||||||
|
from zipfile import Path as ZipPath # type: ignore
|
||||||
|
else:
|
||||||
|
from zipp import Path as ZipPath # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import runtime_checkable # type: ignore
|
||||||
|
except ImportError:
|
||||||
|
|
||||||
|
def runtime_checkable(cls): # type: ignore
|
||||||
|
return cls
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import Protocol # type: ignore
|
||||||
|
except ImportError:
|
||||||
|
Protocol = abc.ABC # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
class TraversableResourcesLoader:
|
||||||
|
"""
|
||||||
|
Adapt loaders to provide TraversableResources and other
|
||||||
|
compatibility.
|
||||||
|
|
||||||
|
Used primarily for Python 3.9 and earlier where the native
|
||||||
|
loaders do not yet implement TraversableResources.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, spec):
|
||||||
|
self.spec = spec
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path(self):
|
||||||
|
return self.spec.origin
|
||||||
|
|
||||||
|
def get_resource_reader(self, name):
|
||||||
|
from . import readers, _adapters
|
||||||
|
|
||||||
|
def _zip_reader(spec):
|
||||||
|
with suppress(AttributeError):
|
||||||
|
return readers.ZipReader(spec.loader, spec.name)
|
||||||
|
|
||||||
|
def _namespace_reader(spec):
|
||||||
|
with suppress(AttributeError, ValueError):
|
||||||
|
return readers.NamespaceReader(spec.submodule_search_locations)
|
||||||
|
|
||||||
|
def _available_reader(spec):
|
||||||
|
with suppress(AttributeError):
|
||||||
|
return spec.loader.get_resource_reader(spec.name)
|
||||||
|
|
||||||
|
def _native_reader(spec):
|
||||||
|
reader = _available_reader(spec)
|
||||||
|
return reader if hasattr(reader, 'files') else None
|
||||||
|
|
||||||
|
def _file_reader(spec):
|
||||||
|
try:
|
||||||
|
path = pathlib.Path(self.path)
|
||||||
|
except TypeError:
|
||||||
|
return None
|
||||||
|
if path.exists():
|
||||||
|
return readers.FileReader(self)
|
||||||
|
|
||||||
|
return (
|
||||||
|
# local ZipReader if a zip module
|
||||||
|
_zip_reader(self.spec)
|
||||||
|
or
|
||||||
|
# local NamespaceReader if a namespace module
|
||||||
|
_namespace_reader(self.spec)
|
||||||
|
or
|
||||||
|
# local FileReader
|
||||||
|
_file_reader(self.spec)
|
||||||
|
or
|
||||||
|
# native reader if it supplies 'files'
|
||||||
|
_native_reader(self.spec)
|
||||||
|
or
|
||||||
|
# fallback - adapt the spec ResourceReader to TraversableReader
|
||||||
|
_adapters.CompatibilityFiles(self.spec)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def wrap_spec(package):
|
||||||
|
"""
|
||||||
|
Construct a package spec with traversable compatibility
|
||||||
|
on the spec/loader/reader.
|
||||||
|
|
||||||
|
Supersedes _adapters.wrap_spec to use TraversableResourcesLoader
|
||||||
|
from above for older Python compatibility (<3.10).
|
||||||
|
"""
|
||||||
|
from . import _adapters
|
||||||
|
|
||||||
|
return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 9):
|
||||||
|
StrPath = Union[str, os.PathLike[str]]
|
||||||
|
else:
|
||||||
|
# PathLike is only subscriptable at runtime in 3.9+
|
||||||
|
StrPath = Union[str, "os.PathLike[str]"]
|
||||||
38
third_party/python/importlib_resources/importlib_resources/_itertools.py
vendored
Normal file
38
third_party/python/importlib_resources/importlib_resources/_itertools.py
vendored
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# from more_itertools 9.0
|
||||||
|
def only(iterable, default=None, too_long=None):
|
||||||
|
"""If *iterable* has only one item, return it.
|
||||||
|
If it has zero items, return *default*.
|
||||||
|
If it has more than one item, raise the exception given by *too_long*,
|
||||||
|
which is ``ValueError`` by default.
|
||||||
|
>>> only([], default='missing')
|
||||||
|
'missing'
|
||||||
|
>>> only([1])
|
||||||
|
1
|
||||||
|
>>> only([1, 2]) # doctest: +IGNORE_EXCEPTION_DETAIL
|
||||||
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
ValueError: Expected exactly one item in iterable, but got 1, 2,
|
||||||
|
and perhaps more.'
|
||||||
|
>>> only([1, 2], too_long=TypeError) # doctest: +IGNORE_EXCEPTION_DETAIL
|
||||||
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
TypeError
|
||||||
|
Note that :func:`only` attempts to advance *iterable* twice to ensure there
|
||||||
|
is only one item. See :func:`spy` or :func:`peekable` to check
|
||||||
|
iterable contents less destructively.
|
||||||
|
"""
|
||||||
|
it = iter(iterable)
|
||||||
|
first_value = next(it, default)
|
||||||
|
|
||||||
|
try:
|
||||||
|
second_value = next(it)
|
||||||
|
except StopIteration:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
msg = (
|
||||||
|
'Expected exactly one item in iterable, but got {!r}, {!r}, '
|
||||||
|
'and perhaps more.'.format(first_value, second_value)
|
||||||
|
)
|
||||||
|
raise too_long or ValueError(msg)
|
||||||
|
|
||||||
|
return first_value
|
||||||
120
third_party/python/importlib_resources/importlib_resources/_legacy.py
vendored
Normal file
120
third_party/python/importlib_resources/importlib_resources/_legacy.py
vendored
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
import functools
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import types
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from typing import Union, Iterable, ContextManager, BinaryIO, TextIO, Any
|
||||||
|
|
||||||
|
from . import _common
|
||||||
|
|
||||||
|
Package = Union[types.ModuleType, str]
|
||||||
|
Resource = str
|
||||||
|
|
||||||
|
|
||||||
|
def deprecated(func):
|
||||||
|
@functools.wraps(func)
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
warnings.warn(
|
||||||
|
f"{func.__name__} is deprecated. Use files() instead. "
|
||||||
|
"Refer to https://importlib-resources.readthedocs.io"
|
||||||
|
"/en/latest/using.html#migrating-from-legacy for migration advice.",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_path(path: Any) -> str:
|
||||||
|
"""Normalize a path by ensuring it is a string.
|
||||||
|
|
||||||
|
If the resulting string contains path separators, an exception is raised.
|
||||||
|
"""
|
||||||
|
str_path = str(path)
|
||||||
|
parent, file_name = os.path.split(str_path)
|
||||||
|
if parent:
|
||||||
|
raise ValueError(f'{path!r} must be only a file name')
|
||||||
|
return file_name
|
||||||
|
|
||||||
|
|
||||||
|
@deprecated
|
||||||
|
def open_binary(package: Package, resource: Resource) -> BinaryIO:
|
||||||
|
"""Return a file-like object opened for binary reading of the resource."""
|
||||||
|
return (_common.files(package) / normalize_path(resource)).open('rb')
|
||||||
|
|
||||||
|
|
||||||
|
@deprecated
|
||||||
|
def read_binary(package: Package, resource: Resource) -> bytes:
|
||||||
|
"""Return the binary contents of the resource."""
|
||||||
|
return (_common.files(package) / normalize_path(resource)).read_bytes()
|
||||||
|
|
||||||
|
|
||||||
|
@deprecated
|
||||||
|
def open_text(
|
||||||
|
package: Package,
|
||||||
|
resource: Resource,
|
||||||
|
encoding: str = 'utf-8',
|
||||||
|
errors: str = 'strict',
|
||||||
|
) -> TextIO:
|
||||||
|
"""Return a file-like object opened for text reading of the resource."""
|
||||||
|
return (_common.files(package) / normalize_path(resource)).open(
|
||||||
|
'r', encoding=encoding, errors=errors
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@deprecated
|
||||||
|
def read_text(
|
||||||
|
package: Package,
|
||||||
|
resource: Resource,
|
||||||
|
encoding: str = 'utf-8',
|
||||||
|
errors: str = 'strict',
|
||||||
|
) -> str:
|
||||||
|
"""Return the decoded string of the resource.
|
||||||
|
|
||||||
|
The decoding-related arguments have the same semantics as those of
|
||||||
|
bytes.decode().
|
||||||
|
"""
|
||||||
|
with open_text(package, resource, encoding, errors) as fp:
|
||||||
|
return fp.read()
|
||||||
|
|
||||||
|
|
||||||
|
@deprecated
|
||||||
|
def contents(package: Package) -> Iterable[str]:
|
||||||
|
"""Return an iterable of entries in `package`.
|
||||||
|
|
||||||
|
Note that not all entries are resources. Specifically, directories are
|
||||||
|
not considered resources. Use `is_resource()` on each entry returned here
|
||||||
|
to check if it is a resource or not.
|
||||||
|
"""
|
||||||
|
return [path.name for path in _common.files(package).iterdir()]
|
||||||
|
|
||||||
|
|
||||||
|
@deprecated
|
||||||
|
def is_resource(package: Package, name: str) -> bool:
|
||||||
|
"""True if `name` is a resource inside `package`.
|
||||||
|
|
||||||
|
Directories are *not* resources.
|
||||||
|
"""
|
||||||
|
resource = normalize_path(name)
|
||||||
|
return any(
|
||||||
|
traversable.name == resource and traversable.is_file()
|
||||||
|
for traversable in _common.files(package).iterdir()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@deprecated
|
||||||
|
def path(
|
||||||
|
package: Package,
|
||||||
|
resource: Resource,
|
||||||
|
) -> ContextManager[pathlib.Path]:
|
||||||
|
"""A context manager providing a file path object to the resource.
|
||||||
|
|
||||||
|
If the resource does not already exist on its own on the file system,
|
||||||
|
a temporary file will be created. If the file was created, the file
|
||||||
|
will be deleted upon exiting the context manager (no exception is
|
||||||
|
raised if the file was deleted prior to the context manager
|
||||||
|
exiting).
|
||||||
|
"""
|
||||||
|
return _common.as_file(_common.files(package) / normalize_path(resource))
|
||||||
170
third_party/python/importlib_resources/importlib_resources/abc.py
vendored
Normal file
170
third_party/python/importlib_resources/importlib_resources/abc.py
vendored
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
import abc
|
||||||
|
import io
|
||||||
|
import itertools
|
||||||
|
import pathlib
|
||||||
|
from typing import Any, BinaryIO, Iterable, Iterator, NoReturn, Text, Optional
|
||||||
|
|
||||||
|
from ._compat import runtime_checkable, Protocol, StrPath
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["ResourceReader", "Traversable", "TraversableResources"]
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceReader(metaclass=abc.ABCMeta):
|
||||||
|
"""Abstract base class for loaders to provide resource reading support."""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def open_resource(self, resource: Text) -> BinaryIO:
|
||||||
|
"""Return an opened, file-like object for binary reading.
|
||||||
|
|
||||||
|
The 'resource' argument is expected to represent only a file name.
|
||||||
|
If the resource cannot be found, FileNotFoundError is raised.
|
||||||
|
"""
|
||||||
|
# This deliberately raises FileNotFoundError instead of
|
||||||
|
# NotImplementedError so that if this method is accidentally called,
|
||||||
|
# it'll still do the right thing.
|
||||||
|
raise FileNotFoundError
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def resource_path(self, resource: Text) -> Text:
|
||||||
|
"""Return the file system path to the specified resource.
|
||||||
|
|
||||||
|
The 'resource' argument is expected to represent only a file name.
|
||||||
|
If the resource does not exist on the file system, raise
|
||||||
|
FileNotFoundError.
|
||||||
|
"""
|
||||||
|
# This deliberately raises FileNotFoundError instead of
|
||||||
|
# NotImplementedError so that if this method is accidentally called,
|
||||||
|
# it'll still do the right thing.
|
||||||
|
raise FileNotFoundError
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def is_resource(self, path: Text) -> bool:
|
||||||
|
"""Return True if the named 'path' is a resource.
|
||||||
|
|
||||||
|
Files are resources, directories are not.
|
||||||
|
"""
|
||||||
|
raise FileNotFoundError
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def contents(self) -> Iterable[str]:
|
||||||
|
"""Return an iterable of entries in `package`."""
|
||||||
|
raise FileNotFoundError
|
||||||
|
|
||||||
|
|
||||||
|
class TraversalError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@runtime_checkable
|
||||||
|
class Traversable(Protocol):
|
||||||
|
"""
|
||||||
|
An object with a subset of pathlib.Path methods suitable for
|
||||||
|
traversing directories and opening files.
|
||||||
|
|
||||||
|
Any exceptions that occur when accessing the backing resource
|
||||||
|
may propagate unaltered.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def iterdir(self) -> Iterator["Traversable"]:
|
||||||
|
"""
|
||||||
|
Yield Traversable objects in self
|
||||||
|
"""
|
||||||
|
|
||||||
|
def read_bytes(self) -> bytes:
|
||||||
|
"""
|
||||||
|
Read contents of self as bytes
|
||||||
|
"""
|
||||||
|
with self.open('rb') as strm:
|
||||||
|
return strm.read()
|
||||||
|
|
||||||
|
def read_text(self, encoding: Optional[str] = None) -> str:
|
||||||
|
"""
|
||||||
|
Read contents of self as text
|
||||||
|
"""
|
||||||
|
with self.open(encoding=encoding) as strm:
|
||||||
|
return strm.read()
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def is_dir(self) -> bool:
|
||||||
|
"""
|
||||||
|
Return True if self is a directory
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def is_file(self) -> bool:
|
||||||
|
"""
|
||||||
|
Return True if self is a file
|
||||||
|
"""
|
||||||
|
|
||||||
|
def joinpath(self, *descendants: StrPath) -> "Traversable":
|
||||||
|
"""
|
||||||
|
Return Traversable resolved with any descendants applied.
|
||||||
|
|
||||||
|
Each descendant should be a path segment relative to self
|
||||||
|
and each may contain multiple levels separated by
|
||||||
|
``posixpath.sep`` (``/``).
|
||||||
|
"""
|
||||||
|
if not descendants:
|
||||||
|
return self
|
||||||
|
names = itertools.chain.from_iterable(
|
||||||
|
path.parts for path in map(pathlib.PurePosixPath, descendants)
|
||||||
|
)
|
||||||
|
target = next(names)
|
||||||
|
matches = (
|
||||||
|
traversable for traversable in self.iterdir() if traversable.name == target
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
match = next(matches)
|
||||||
|
except StopIteration:
|
||||||
|
raise TraversalError(
|
||||||
|
"Target not found during traversal.", target, list(names)
|
||||||
|
)
|
||||||
|
return match.joinpath(*names)
|
||||||
|
|
||||||
|
def __truediv__(self, child: StrPath) -> "Traversable":
|
||||||
|
"""
|
||||||
|
Return Traversable child in self
|
||||||
|
"""
|
||||||
|
return self.joinpath(child)
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def open(self, mode='r', *args, **kwargs):
|
||||||
|
"""
|
||||||
|
mode may be 'r' or 'rb' to open as text or binary. Return a handle
|
||||||
|
suitable for reading (same as pathlib.Path.open).
|
||||||
|
|
||||||
|
When opening as text, accepts encoding parameters such as those
|
||||||
|
accepted by io.TextIOWrapper.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abc.abstractmethod
|
||||||
|
def name(self) -> str:
|
||||||
|
"""
|
||||||
|
The base name of this object without any parent references.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class TraversableResources(ResourceReader):
|
||||||
|
"""
|
||||||
|
The required interface for providing traversable
|
||||||
|
resources.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def files(self) -> "Traversable":
|
||||||
|
"""Return a Traversable object for the loaded package."""
|
||||||
|
|
||||||
|
def open_resource(self, resource: StrPath) -> io.BufferedReader:
|
||||||
|
return self.files().joinpath(resource).open('rb')
|
||||||
|
|
||||||
|
def resource_path(self, resource: Any) -> NoReturn:
|
||||||
|
raise FileNotFoundError(resource)
|
||||||
|
|
||||||
|
def is_resource(self, path: StrPath) -> bool:
|
||||||
|
return self.files().joinpath(path).is_file()
|
||||||
|
|
||||||
|
def contents(self) -> Iterator[str]:
|
||||||
|
return (item.name for item in self.files().iterdir())
|
||||||
0
third_party/python/importlib_resources/importlib_resources/py.typed
vendored
Normal file
0
third_party/python/importlib_resources/importlib_resources/py.typed
vendored
Normal file
144
third_party/python/importlib_resources/importlib_resources/readers.py
vendored
Normal file
144
third_party/python/importlib_resources/importlib_resources/readers.py
vendored
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
import collections
|
||||||
|
import itertools
|
||||||
|
import pathlib
|
||||||
|
import operator
|
||||||
|
|
||||||
|
from . import abc
|
||||||
|
|
||||||
|
from ._itertools import only
|
||||||
|
from ._compat import ZipPath
|
||||||
|
|
||||||
|
|
||||||
|
def remove_duplicates(items):
|
||||||
|
return iter(collections.OrderedDict.fromkeys(items))
|
||||||
|
|
||||||
|
|
||||||
|
class FileReader(abc.TraversableResources):
|
||||||
|
def __init__(self, loader):
|
||||||
|
self.path = pathlib.Path(loader.path).parent
|
||||||
|
|
||||||
|
def resource_path(self, resource):
|
||||||
|
"""
|
||||||
|
Return the file system path to prevent
|
||||||
|
`resources.path()` from creating a temporary
|
||||||
|
copy.
|
||||||
|
"""
|
||||||
|
return str(self.path.joinpath(resource))
|
||||||
|
|
||||||
|
def files(self):
|
||||||
|
return self.path
|
||||||
|
|
||||||
|
|
||||||
|
class ZipReader(abc.TraversableResources):
|
||||||
|
def __init__(self, loader, module):
|
||||||
|
_, _, name = module.rpartition('.')
|
||||||
|
self.prefix = loader.prefix.replace('\\', '/') + name + '/'
|
||||||
|
self.archive = loader.archive
|
||||||
|
|
||||||
|
def open_resource(self, resource):
|
||||||
|
try:
|
||||||
|
return super().open_resource(resource)
|
||||||
|
except KeyError as exc:
|
||||||
|
raise FileNotFoundError(exc.args[0])
|
||||||
|
|
||||||
|
def is_resource(self, path):
|
||||||
|
"""
|
||||||
|
Workaround for `zipfile.Path.is_file` returning true
|
||||||
|
for non-existent paths.
|
||||||
|
"""
|
||||||
|
target = self.files().joinpath(path)
|
||||||
|
return target.is_file() and target.exists()
|
||||||
|
|
||||||
|
def files(self):
|
||||||
|
return ZipPath(self.archive, self.prefix)
|
||||||
|
|
||||||
|
|
||||||
|
class MultiplexedPath(abc.Traversable):
|
||||||
|
"""
|
||||||
|
Given a series of Traversable objects, implement a merged
|
||||||
|
version of the interface across all objects. Useful for
|
||||||
|
namespace packages which may be multihomed at a single
|
||||||
|
name.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *paths):
|
||||||
|
self._paths = list(map(pathlib.Path, remove_duplicates(paths)))
|
||||||
|
if not self._paths:
|
||||||
|
message = 'MultiplexedPath must contain at least one path'
|
||||||
|
raise FileNotFoundError(message)
|
||||||
|
if not all(path.is_dir() for path in self._paths):
|
||||||
|
raise NotADirectoryError('MultiplexedPath only supports directories')
|
||||||
|
|
||||||
|
def iterdir(self):
|
||||||
|
children = (child for path in self._paths for child in path.iterdir())
|
||||||
|
by_name = operator.attrgetter('name')
|
||||||
|
groups = itertools.groupby(sorted(children, key=by_name), key=by_name)
|
||||||
|
return map(self._follow, (locs for name, locs in groups))
|
||||||
|
|
||||||
|
def read_bytes(self):
|
||||||
|
raise FileNotFoundError(f'{self} is not a file')
|
||||||
|
|
||||||
|
def read_text(self, *args, **kwargs):
|
||||||
|
raise FileNotFoundError(f'{self} is not a file')
|
||||||
|
|
||||||
|
def is_dir(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def is_file(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def joinpath(self, *descendants):
|
||||||
|
try:
|
||||||
|
return super().joinpath(*descendants)
|
||||||
|
except abc.TraversalError:
|
||||||
|
# One of the paths did not resolve (a directory does not exist).
|
||||||
|
# Just return something that will not exist.
|
||||||
|
return self._paths[0].joinpath(*descendants)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _follow(cls, children):
|
||||||
|
"""
|
||||||
|
Construct a MultiplexedPath if needed.
|
||||||
|
|
||||||
|
If children contains a sole element, return it.
|
||||||
|
Otherwise, return a MultiplexedPath of the items.
|
||||||
|
Unless one of the items is not a Directory, then return the first.
|
||||||
|
"""
|
||||||
|
subdirs, one_dir, one_file = itertools.tee(children, 3)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return only(one_dir)
|
||||||
|
except ValueError:
|
||||||
|
try:
|
||||||
|
return cls(*subdirs)
|
||||||
|
except NotADirectoryError:
|
||||||
|
return next(one_file)
|
||||||
|
|
||||||
|
def open(self, *args, **kwargs):
|
||||||
|
raise FileNotFoundError(f'{self} is not a file')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
return self._paths[0].name
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
paths = ', '.join(f"'{path}'" for path in self._paths)
|
||||||
|
return f'MultiplexedPath({paths})'
|
||||||
|
|
||||||
|
|
||||||
|
class NamespaceReader(abc.TraversableResources):
|
||||||
|
def __init__(self, namespace_path):
|
||||||
|
if 'NamespacePath' not in str(namespace_path):
|
||||||
|
raise ValueError('Invalid path')
|
||||||
|
self.path = MultiplexedPath(*list(namespace_path))
|
||||||
|
|
||||||
|
def resource_path(self, resource):
|
||||||
|
"""
|
||||||
|
Return the file system path to prevent
|
||||||
|
`resources.path()` from creating a temporary
|
||||||
|
copy.
|
||||||
|
"""
|
||||||
|
return str(self.path.joinpath(resource))
|
||||||
|
|
||||||
|
def files(self):
|
||||||
|
return self.path
|
||||||
106
third_party/python/importlib_resources/importlib_resources/simple.py
vendored
Normal file
106
third_party/python/importlib_resources/importlib_resources/simple.py
vendored
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
"""
|
||||||
|
Interface adapters for low-level readers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import abc
|
||||||
|
import io
|
||||||
|
import itertools
|
||||||
|
from typing import BinaryIO, List
|
||||||
|
|
||||||
|
from .abc import Traversable, TraversableResources
|
||||||
|
|
||||||
|
|
||||||
|
class SimpleReader(abc.ABC):
|
||||||
|
"""
|
||||||
|
The minimum, low-level interface required from a resource
|
||||||
|
provider.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abc.abstractmethod
|
||||||
|
def package(self) -> str:
|
||||||
|
"""
|
||||||
|
The name of the package for which this reader loads resources.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def children(self) -> List['SimpleReader']:
|
||||||
|
"""
|
||||||
|
Obtain an iterable of SimpleReader for available
|
||||||
|
child containers (e.g. directories).
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def resources(self) -> List[str]:
|
||||||
|
"""
|
||||||
|
Obtain available named resources for this virtual package.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def open_binary(self, resource: str) -> BinaryIO:
|
||||||
|
"""
|
||||||
|
Obtain a File-like for a named resource.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
return self.package.split('.')[-1]
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceContainer(Traversable):
|
||||||
|
"""
|
||||||
|
Traversable container for a package's resources via its reader.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, reader: SimpleReader):
|
||||||
|
self.reader = reader
|
||||||
|
|
||||||
|
def is_dir(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def is_file(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def iterdir(self):
|
||||||
|
files = (ResourceHandle(self, name) for name in self.reader.resources)
|
||||||
|
dirs = map(ResourceContainer, self.reader.children())
|
||||||
|
return itertools.chain(files, dirs)
|
||||||
|
|
||||||
|
def open(self, *args, **kwargs):
|
||||||
|
raise IsADirectoryError()
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceHandle(Traversable):
|
||||||
|
"""
|
||||||
|
Handle to a named resource in a ResourceReader.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, parent: ResourceContainer, name: str):
|
||||||
|
self.parent = parent
|
||||||
|
self.name = name # type: ignore
|
||||||
|
|
||||||
|
def is_file(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def is_dir(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def open(self, mode='r', *args, **kwargs):
|
||||||
|
stream = self.parent.reader.open_binary(self.name)
|
||||||
|
if 'b' not in mode:
|
||||||
|
stream = io.TextIOWrapper(*args, **kwargs)
|
||||||
|
return stream
|
||||||
|
|
||||||
|
def joinpath(self, name):
|
||||||
|
raise RuntimeError("Cannot traverse into a resource")
|
||||||
|
|
||||||
|
|
||||||
|
class TraversableReader(TraversableResources, SimpleReader):
|
||||||
|
"""
|
||||||
|
A TraversableResources based on SimpleReader. Resource providers
|
||||||
|
may derive from this class to provide the TraversableResources
|
||||||
|
interface by supplying the SimpleReader interface.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def files(self):
|
||||||
|
return ResourceContainer(self)
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
jsonschema/__init__.py,sha256=dHAr_pQLbbDFoRnbVMrQVztVUvnBFgFlm7bU82pMvOk,934
|
|
||||||
jsonschema/__main__.py,sha256=in4bbzfixCAyGe3RhBwhQVZnGkruszNedcbmwxGyJgc,39
|
|
||||||
jsonschema/_format.py,sha256=vwD1v7S8BmJvSF5y0o6dbPgjAyzt07PZpyO3pvNVVgQ,11691
|
|
||||||
jsonschema/_legacy_validators.py,sha256=kYcYiHfRV-aQtIQv2qe_71L3QFs3LiJ3v69ifteAN4E,4584
|
|
||||||
jsonschema/_reflect.py,sha256=gggQrcrf5FRoyhgdE6ggJ4n2FQHEzWS4CS-cm9bYcqI,5023
|
|
||||||
jsonschema/_types.py,sha256=t2naRRhuTKIUIB0GMR9kOp2la2aVqeT2tFlVAobndmg,4490
|
|
||||||
jsonschema/_utils.py,sha256=ezZJMQ0eU4oPvkTmZi6g5vsCtIFRhb8vN4Y9S4uQwW8,5168
|
|
||||||
jsonschema/_validators.py,sha256=UDYawpxK8f_rIeEBXZtwr0tlxi3OH1Zt2ca0zAxjNdk,11703
|
|
||||||
jsonschema/cli.py,sha256=3Vc8ptc2GD7zDxK2F-kamqmrE9f35a2KVDGR1p1acUU,2310
|
|
||||||
jsonschema/compat.py,sha256=37gSA8MmAR65zlqzsSEB-0ObZk_I2TF7z1kp9zmkskg,1353
|
|
||||||
jsonschema/exceptions.py,sha256=ukWIE7aEES8Kh0UaUP9turpUkV2ZzXEN8CwfRObzlMA,10450
|
|
||||||
jsonschema/validators.py,sha256=RIZTQyZxhWwsyIIRFQGEjLzq38LlyzzzdYUl9jxzV0M,29400
|
|
||||||
jsonschema/benchmarks/__init__.py,sha256=A0sQrxDBVHSyQ-8ru3L11hMXf3q9gVuB9x_YgHb4R9M,70
|
|
||||||
jsonschema/benchmarks/issue232.py,sha256=-azAUmrP75f0uj0x2zEdBc3-DhQw3XX9UQVDCyhBKRk,541
|
|
||||||
jsonschema/benchmarks/json_schema_test_suite.py,sha256=okRE6ACue2C0Hd1dMhnpZ0bc3AoZdDd8cw2lwTnbzwU,343
|
|
||||||
jsonschema/schemas/draft3.json,sha256=PdtCu2s06Va3hV9cX5A5-rvye50SVF__NrvxG0vuzz0,4564
|
|
||||||
jsonschema/schemas/draft4.json,sha256=ODL-0W3kv7MmqL3tm3GJguuVxN1QNO1GtBcpWE3ok98,5399
|
|
||||||
jsonschema/schemas/draft6.json,sha256=wp386fVINcOgbAOzxdXsDtp3cGVo-cTffPvHVmpRAG0,4437
|
|
||||||
jsonschema/schemas/draft7.json,sha256=PVOSCIJhYGxVm2A_OFMpyfGrRbXWZ-uZBodFOwVdQF4,4819
|
|
||||||
jsonschema/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
jsonschema/tests/_helpers.py,sha256=3c-b9CK0cdGfhtuUhzM1AjtqPtR2VFvfcKC6G2g0a-0,157
|
|
||||||
jsonschema/tests/_suite.py,sha256=6lxDHOyjJfCjdn9vfOLcUpXtNl0vLIljrinSFi1tRhc,6728
|
|
||||||
jsonschema/tests/test_cli.py,sha256=djw7ZD6zm5_8FgsAr9XyYk4zErIEoPRs8SzBe5nYcWY,4727
|
|
||||||
jsonschema/tests/test_exceptions.py,sha256=zw9bd_al5zOzAm8nJ0IqeymiweH6i8k1AN3CB7t618A,15348
|
|
||||||
jsonschema/tests/test_format.py,sha256=ob0QDop_nwRwiLs1P6sGsf6ZITik00CWhe1pL8JRiA0,2982
|
|
||||||
jsonschema/tests/test_jsonschema_test_suite.py,sha256=8uiplgvQq5yFvtvWxbyqyr7HMYRCx6jNE3OiU-u8AEk,8464
|
|
||||||
jsonschema/tests/test_types.py,sha256=lntWPZ86fwo_aNKbfCueX5R2xdwrYYN7Zo5C0-ppk-0,5902
|
|
||||||
jsonschema/tests/test_validators.py,sha256=R_zhsDKG5r66LE1OVlzdcPyKRWKgc07e6NVWxQkrRiQ,60394
|
|
||||||
jsonschema-3.2.0.dist-info/COPYING,sha256=T5KgFaE8TRoEC-8BiqE0MLTxvHO0Gxa7hGw0Z2bedDk,1057
|
|
||||||
jsonschema-3.2.0.dist-info/METADATA,sha256=os_TL7tiSfPYDMKYoAqoNsw_yMkDJmCL2bqhp-csNR0,7760
|
|
||||||
jsonschema-3.2.0.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110
|
|
||||||
jsonschema-3.2.0.dist-info/entry_points.txt,sha256=KaVUBBSLyzi5naUkVg-r3q6T_igdLgaHY6Mm3oLX73s,52
|
|
||||||
jsonschema-3.2.0.dist-info/top_level.txt,sha256=jGoNS61vDONU8U7p0Taf-y_8JVG1Z2CJ5Eif6zMN_cw,11
|
|
||||||
jsonschema-3.2.0.dist-info/RECORD,,
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
jsonschema
|
|
||||||
@@ -1,51 +1,62 @@
|
|||||||
Metadata-Version: 2.1
|
Metadata-Version: 2.1
|
||||||
Name: jsonschema
|
Name: jsonschema
|
||||||
Version: 3.2.0
|
Version: 4.10.0
|
||||||
Summary: An implementation of JSON Schema validation for Python
|
Summary: An implementation of JSON Schema validation for Python
|
||||||
Home-page: https://github.com/Julian/jsonschema
|
Project-URL: Homepage, https://github.com/python-jsonschema/jsonschema
|
||||||
|
Project-URL: Documentation, https://python-jsonschema.readthedocs.io/
|
||||||
|
Project-URL: Issues, https://github.com/python-jsonschema/jsonschema/issues/
|
||||||
|
Project-URL: Funding, https://github.com/sponsors/Julian
|
||||||
|
Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-jsonschema?utm_source=pypi-jsonschema&utm_medium=referral&utm_campaign=pypi-link
|
||||||
|
Project-URL: Changelog, https://github.com/python-jsonschema/jsonschema/blob/main/CHANGELOG.rst
|
||||||
|
Project-URL: Source, https://github.com/python-jsonschema/jsonschema
|
||||||
Author: Julian Berman
|
Author: Julian Berman
|
||||||
Author-email: Julian@GrayVines.com
|
Author-email: Julian+jsonschema@GrayVines.com
|
||||||
License: UNKNOWN
|
License: MIT
|
||||||
Project-URL: Docs, https://python-jsonschema.readthedocs.io/en/latest/
|
Keywords: data validation,json,jsonschema,validation
|
||||||
Platform: UNKNOWN
|
|
||||||
Classifier: Development Status :: 5 - Production/Stable
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
Classifier: Intended Audience :: Developers
|
Classifier: Intended Audience :: Developers
|
||||||
Classifier: License :: OSI Approved :: MIT License
|
Classifier: License :: OSI Approved :: MIT License
|
||||||
Classifier: Operating System :: OS Independent
|
Classifier: Operating System :: OS Independent
|
||||||
Classifier: Programming Language :: Python
|
Classifier: Programming Language :: Python
|
||||||
Classifier: Programming Language :: Python :: 2
|
|
||||||
Classifier: Programming Language :: Python :: 2.7
|
|
||||||
Classifier: Programming Language :: Python :: 3
|
|
||||||
Classifier: Programming Language :: Python :: 3.5
|
|
||||||
Classifier: Programming Language :: Python :: 3.6
|
|
||||||
Classifier: Programming Language :: Python :: 3.7
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
Classifier: Programming Language :: Python :: 3.8
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Programming Language :: Python :: 3.9
|
||||||
|
Classifier: Programming Language :: Python :: 3.10
|
||||||
|
Classifier: Programming Language :: Python :: 3.11
|
||||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||||
Requires-Dist: attrs (>=17.4.0)
|
Requires-Python: >=3.7
|
||||||
Requires-Dist: pyrsistent (>=0.14.0)
|
Requires-Dist: attrs>=17.4.0
|
||||||
Requires-Dist: setuptools
|
Requires-Dist: importlib-metadata; python_version < '3.8'
|
||||||
Requires-Dist: six (>=1.11.0)
|
Requires-Dist: importlib-resources>=1.4.0; python_version < '3.9'
|
||||||
Requires-Dist: functools32 ; python_version < "3"
|
Requires-Dist: pkgutil-resolve-name>=1.3.10; python_version < '3.9'
|
||||||
Requires-Dist: importlib-metadata ; python_version < "3.8"
|
Requires-Dist: pyrsistent!=0.17.0,!=0.17.1,!=0.17.2,>=0.14.0
|
||||||
|
Requires-Dist: typing-extensions; python_version < '3.8'
|
||||||
Provides-Extra: format
|
Provides-Extra: format
|
||||||
Requires-Dist: idna ; extra == 'format'
|
Requires-Dist: fqdn; extra == 'format'
|
||||||
Requires-Dist: jsonpointer (>1.13) ; extra == 'format'
|
Requires-Dist: idna; extra == 'format'
|
||||||
Requires-Dist: rfc3987 ; extra == 'format'
|
Requires-Dist: isoduration; extra == 'format'
|
||||||
Requires-Dist: strict-rfc3339 ; extra == 'format'
|
Requires-Dist: jsonpointer>1.13; extra == 'format'
|
||||||
Requires-Dist: webcolors ; extra == 'format'
|
Requires-Dist: rfc3339-validator; extra == 'format'
|
||||||
Provides-Extra: format_nongpl
|
Requires-Dist: rfc3987; extra == 'format'
|
||||||
Requires-Dist: idna ; extra == 'format_nongpl'
|
Requires-Dist: uri-template; extra == 'format'
|
||||||
Requires-Dist: jsonpointer (>1.13) ; extra == 'format_nongpl'
|
Requires-Dist: webcolors>=1.11; extra == 'format'
|
||||||
Requires-Dist: webcolors ; extra == 'format_nongpl'
|
Provides-Extra: format-nongpl
|
||||||
Requires-Dist: rfc3986-validator (>0.1.0) ; extra == 'format_nongpl'
|
Requires-Dist: fqdn; extra == 'format-nongpl'
|
||||||
Requires-Dist: rfc3339-validator ; extra == 'format_nongpl'
|
Requires-Dist: idna; extra == 'format-nongpl'
|
||||||
|
Requires-Dist: isoduration; extra == 'format-nongpl'
|
||||||
|
Requires-Dist: jsonpointer>1.13; extra == 'format-nongpl'
|
||||||
|
Requires-Dist: rfc3339-validator; extra == 'format-nongpl'
|
||||||
|
Requires-Dist: rfc3986-validator>0.1.0; extra == 'format-nongpl'
|
||||||
|
Requires-Dist: uri-template; extra == 'format-nongpl'
|
||||||
|
Requires-Dist: webcolors>=1.11; extra == 'format-nongpl'
|
||||||
|
Description-Content-Type: text/x-rst
|
||||||
|
|
||||||
==========
|
==========
|
||||||
jsonschema
|
jsonschema
|
||||||
==========
|
==========
|
||||||
|
|
||||||
|PyPI| |Pythons| |Travis| |AppVeyor| |Codecov| |ReadTheDocs|
|
|PyPI| |Pythons| |CI| |ReadTheDocs| |Precommit| |Zenodo|
|
||||||
|
|
||||||
.. |PyPI| image:: https://img.shields.io/pypi/v/jsonschema.svg
|
.. |PyPI| image:: https://img.shields.io/pypi/v/jsonschema.svg
|
||||||
:alt: PyPI version
|
:alt: PyPI version
|
||||||
@@ -55,25 +66,24 @@ jsonschema
|
|||||||
:alt: Supported Python versions
|
:alt: Supported Python versions
|
||||||
:target: https://pypi.org/project/jsonschema/
|
:target: https://pypi.org/project/jsonschema/
|
||||||
|
|
||||||
.. |Travis| image:: https://travis-ci.com/Julian/jsonschema.svg?branch=master
|
.. |CI| image:: https://github.com/python-jsonschema/jsonschema/workflows/CI/badge.svg
|
||||||
:alt: Travis build status
|
:alt: Build status
|
||||||
:target: https://travis-ci.com/Julian/jsonschema
|
:target: https://github.com/python-jsonschema/jsonschema/actions?query=workflow%3ACI
|
||||||
|
|
||||||
.. |AppVeyor| image:: https://ci.appveyor.com/api/projects/status/adtt0aiaihy6muyn/branch/master?svg=true
|
|
||||||
:alt: AppVeyor build status
|
|
||||||
:target: https://ci.appveyor.com/project/Julian/jsonschema
|
|
||||||
|
|
||||||
.. |Codecov| image:: https://codecov.io/gh/Julian/jsonschema/branch/master/graph/badge.svg
|
|
||||||
:alt: Codecov Code coverage
|
|
||||||
:target: https://codecov.io/gh/Julian/jsonschema
|
|
||||||
|
|
||||||
.. |ReadTheDocs| image:: https://readthedocs.org/projects/python-jsonschema/badge/?version=stable&style=flat
|
.. |ReadTheDocs| image:: https://readthedocs.org/projects/python-jsonschema/badge/?version=stable&style=flat
|
||||||
:alt: ReadTheDocs status
|
:alt: ReadTheDocs status
|
||||||
:target: https://python-jsonschema.readthedocs.io/en/stable/
|
:target: https://python-jsonschema.readthedocs.io/en/stable/
|
||||||
|
|
||||||
|
.. |Precommit| image:: https://results.pre-commit.ci/badge/github/python-jsonschema/jsonschema/main.svg
|
||||||
|
:alt: pre-commit.ci status
|
||||||
|
:target: https://results.pre-commit.ci/latest/github/python-jsonschema/jsonschema/main
|
||||||
|
|
||||||
``jsonschema`` is an implementation of `JSON Schema <https://json-schema.org>`_
|
.. |Zenodo| image:: https://zenodo.org/badge/3072629.svg
|
||||||
for Python (supporting 2.7+ including Python 3).
|
:target: https://zenodo.org/badge/latestdoi/3072629
|
||||||
|
|
||||||
|
|
||||||
|
``jsonschema`` is an implementation of the `JSON Schema
|
||||||
|
<https://json-schema.org>`_ specification for Python.
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
@@ -102,19 +112,23 @@ It can also be used from console:
|
|||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
$ jsonschema -i sample.json sample.schema
|
$ jsonschema --instance sample.json sample.schema
|
||||||
|
|
||||||
Features
|
Features
|
||||||
--------
|
--------
|
||||||
|
|
||||||
* Full support for
|
* Partial support for
|
||||||
|
`Draft 2020-12 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft202012Validator>`_ and
|
||||||
|
`Draft 2019-09 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft201909Validator>`_,
|
||||||
|
except for ``dynamicRef`` / ``recursiveRef`` and ``$vocabulary`` (in-progress).
|
||||||
|
Full support for
|
||||||
`Draft 7 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft7Validator>`_,
|
`Draft 7 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft7Validator>`_,
|
||||||
`Draft 6 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft6Validator>`_,
|
`Draft 6 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft6Validator>`_,
|
||||||
`Draft 4 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft4Validator>`_
|
`Draft 4 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft4Validator>`_
|
||||||
and
|
and
|
||||||
`Draft 3 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft3Validator>`_
|
`Draft 3 <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.Draft3Validator>`_
|
||||||
|
|
||||||
* `Lazy validation <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.IValidator.iter_errors>`_
|
* `Lazy validation <https://python-jsonschema.readthedocs.io/en/latest/validate/#jsonschema.protocols.Validator.iter_errors>`_
|
||||||
that can iteratively report *all* validation errors.
|
that can iteratively report *all* validation errors.
|
||||||
|
|
||||||
* `Programmatic querying <https://python-jsonschema.readthedocs.io/en/latest/errors/>`_
|
* `Programmatic querying <https://python-jsonschema.readthedocs.io/en/latest/errors/>`_
|
||||||
@@ -131,33 +145,6 @@ Installation
|
|||||||
$ pip install jsonschema
|
$ pip install jsonschema
|
||||||
|
|
||||||
|
|
||||||
Demo
|
|
||||||
----
|
|
||||||
|
|
||||||
Try ``jsonschema`` interactively in this online demo:
|
|
||||||
|
|
||||||
.. image:: https://user-images.githubusercontent.com/1155573/56745335-8b158a00-6750-11e9-8776-83fa675939c4.png
|
|
||||||
:target: https://notebooks.ai/demo/gh/Julian/jsonschema
|
|
||||||
:alt: Open Live Demo
|
|
||||||
|
|
||||||
|
|
||||||
Online demo Notebook will look similar to this:
|
|
||||||
|
|
||||||
|
|
||||||
.. image:: https://user-images.githubusercontent.com/1155573/56820861-5c1c1880-6823-11e9-802a-ce01c5ec574f.gif
|
|
||||||
:alt: Open Live Demo
|
|
||||||
:width: 480 px
|
|
||||||
|
|
||||||
|
|
||||||
Release Notes
|
|
||||||
-------------
|
|
||||||
|
|
||||||
v3.1 brings support for ECMA 262 dialect regular expressions
|
|
||||||
throughout schemas, as recommended by the specification. Big
|
|
||||||
thanks to @Zac-HD for authoring support in a new `js-regex
|
|
||||||
<https://pypi.org/project/js-regex/>`_ library.
|
|
||||||
|
|
||||||
|
|
||||||
Running the Test Suite
|
Running the Test Suite
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
@@ -176,49 +163,40 @@ Benchmarks
|
|||||||
----------
|
----------
|
||||||
|
|
||||||
``jsonschema``'s benchmarks make use of `pyperf
|
``jsonschema``'s benchmarks make use of `pyperf
|
||||||
<https://pyperf.readthedocs.io>`_.
|
<https://pyperf.readthedocs.io>`_. Running them can be done via::
|
||||||
|
|
||||||
Running them can be done via ``tox -e perf``, or by invoking the ``pyperf``
|
$ tox -e perf
|
||||||
commands externally (after ensuring that both it and ``jsonschema`` itself are
|
|
||||||
installed)::
|
|
||||||
|
|
||||||
$ python -m pyperf jsonschema/benchmarks/test_suite.py --hist --output results.json
|
|
||||||
|
|
||||||
To compare to a previous run, use::
|
|
||||||
|
|
||||||
$ python -m pyperf compare_to --table reference.json results.json
|
|
||||||
|
|
||||||
See the ``pyperf`` documentation for more details.
|
|
||||||
|
|
||||||
|
|
||||||
Community
|
Community
|
||||||
---------
|
---------
|
||||||
|
|
||||||
There's a `mailing list <https://groups.google.com/forum/#!forum/jsonschema>`_
|
The JSON Schema specification has `a Slack
|
||||||
for this implementation on Google Groups.
|
<https://json-schema.slack.com>`_, with an `invite link on its home page
|
||||||
|
<https://json-schema.org/>`_. Many folks knowledgeable on authoring
|
||||||
|
schemas can be found there.
|
||||||
|
|
||||||
Please join, and feel free to send questions there.
|
Otherwise, asking questions on Stack Overflow is another means of
|
||||||
|
getting help if you're stuck.
|
||||||
|
|
||||||
|
|
||||||
Contributing
|
About
|
||||||
------------
|
-----
|
||||||
|
|
||||||
I'm Julian Berman.
|
I'm Julian Berman.
|
||||||
|
|
||||||
``jsonschema`` is on `GitHub <https://github.com/Julian/jsonschema>`_.
|
``jsonschema`` is on `GitHub <https://github.com/python-jsonschema/jsonschema>`_.
|
||||||
|
|
||||||
Get in touch, via GitHub or otherwise, if you've got something to contribute,
|
Get in touch, via GitHub or otherwise, if you've got something to contribute,
|
||||||
it'd be most welcome!
|
it'd be most welcome!
|
||||||
|
|
||||||
You can also generally find me on Freenode (nick: ``tos9``) in various
|
You can also generally find me on Libera (nick: ``Julian``) in various
|
||||||
channels, including ``#python``.
|
channels, including ``#python``.
|
||||||
|
|
||||||
If you feel overwhelmingly grateful, you can also woo me with beer money
|
If you feel overwhelmingly grateful, you can also `sponsor me
|
||||||
via Google Pay with the email in my GitHub profile.
|
<https://github.com/sponsors/Julian/>`_.
|
||||||
|
|
||||||
And for companies who appreciate ``jsonschema`` and its continued support
|
And for companies who appreciate ``jsonschema`` and its continued support
|
||||||
and growth, ``jsonschema`` is also now supportable via `TideLift
|
and growth, ``jsonschema`` is also now supportable via `TideLift
|
||||||
<https://tidelift.com/subscription/pkg/pypi-jsonschema?utm_source=pypi-j
|
<https://tidelift.com/subscription/pkg/pypi-jsonschema?utm_source=pypi-j
|
||||||
sonschema&utm_medium=referral&utm_campaign=readme>`_.
|
sonschema&utm_medium=referral&utm_campaign=readme>`_.
|
||||||
|
|
||||||
|
|
||||||
39
third_party/python/jsonschema/jsonschema-4.10.0.dist-info/RECORD
vendored
Normal file
39
third_party/python/jsonschema/jsonschema-4.10.0.dist-info/RECORD
vendored
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
jsonschema/__init__.py,sha256=h0l2RPVM9kimU7-jTSKoEnguV3QGvrrQvlnJN3F6UPk,1561
|
||||||
|
jsonschema/__main__.py,sha256=Sfz1ZNeogymj_KZxq6JXY3F6O_1v28sLIiskusifQ5s,40
|
||||||
|
jsonschema/_format.py,sha256=xYMOH4Y5b9xtUV-Ol34QdtD412iHetTYJpiswIP-HY4,14091
|
||||||
|
jsonschema/_legacy_validators.py,sha256=vpxM-RSGLiEGUBcQq6867bOL2YpHB2ocMQANuFzinUY,7343
|
||||||
|
jsonschema/_types.py,sha256=_NDm3OxdPPWAqBSpfo4QVEA_oqfKMACg1QslVx0S900,5364
|
||||||
|
jsonschema/_utils.py,sha256=xrdrhbf611Cpkv-GKVtY_lXfDwwuKg-FDswibvRVIeg,10437
|
||||||
|
jsonschema/_validators.py,sha256=qmoubuJKRguYnJoG-_G-s4WnE1B8KFvxiacnmLAITCM,15563
|
||||||
|
jsonschema/cli.py,sha256=c2yQYyhTkBOgaa_s9e2gQ8clHmE6n3OZQ9a8Etcy6Pw,8239
|
||||||
|
jsonschema/exceptions.py,sha256=MBG9buP0C_8Z6fIZ2ob3KdgP7Nx4tIV9cX9S5SfR06Y,11267
|
||||||
|
jsonschema/protocols.py,sha256=WgzzOvm0Vnu39FHPQxjhEs4d2ctq-U3vFMnAchQ3Vbg,6598
|
||||||
|
jsonschema/validators.py,sha256=jbevK3aOsGVLPaQE4eO8H4bOQPTKUOqahbPMskRL0ko,36262
|
||||||
|
jsonschema/benchmarks/__init__.py,sha256=A0sQrxDBVHSyQ-8ru3L11hMXf3q9gVuB9x_YgHb4R9M,70
|
||||||
|
jsonschema/benchmarks/issue232.py,sha256=GKQBwm03sf-pPSxBxc4YDvBBnMYknOk6m-WtTntN5VE,506
|
||||||
|
jsonschema/benchmarks/json_schema_test_suite.py,sha256=PvfabpUYcF4_7csYDTcTauED8rnFEGYbdY5RqTXD08s,320
|
||||||
|
jsonschema/benchmarks/issue232/issue.json,sha256=eaPOZjMRu5u8RpKrsA9uk7ucPZS5tkKG4D_hkOTQ3Hk,117105
|
||||||
|
jsonschema/schemas/draft2019-09.json,sha256=e3YbPhIfCgyh6ioLjizIVrz4AWBLgmjXG6yqICvAwTs,1785
|
||||||
|
jsonschema/schemas/draft2020-12.json,sha256=Qdp29a-3zgYtJI92JGOpL3ykfk4PkFsiS6av7vkd7Q8,2452
|
||||||
|
jsonschema/schemas/draft3.json,sha256=2LanCgvBrUT8Eyk37KszzCjFxuOw0UBFOeS-ahb5Crg,2699
|
||||||
|
jsonschema/schemas/draft4.json,sha256=d-VZ-zmogXIypnObMGPT_e88TPZ9Zb40jd2-Fuvs9j4,4355
|
||||||
|
jsonschema/schemas/draft6.json,sha256=wp386fVINcOgbAOzxdXsDtp3cGVo-cTffPvHVmpRAG0,4437
|
||||||
|
jsonschema/schemas/draft7.json,sha256=PVOSCIJhYGxVm2A_OFMpyfGrRbXWZ-uZBodFOwVdQF4,4819
|
||||||
|
jsonschema/schemas/vocabularies.json,sha256=SW7oOta6bhkEdVDPBKgvrosztMW_UyKs-s04pgpgXqs,12845
|
||||||
|
jsonschema/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
jsonschema/tests/_helpers.py,sha256=yoWpWVYq4auuTPPd1_8FXV77RlczQtiyutSh6c191ZM,618
|
||||||
|
jsonschema/tests/_suite.py,sha256=r9QUEFsH-s8tClyw_b1zU4tmmC71-CLSWw-zfw4BxVk,7153
|
||||||
|
jsonschema/tests/fuzz_validate.py,sha256=fUA7yTJIihaCwJplkUehZeyB84HcXEcqtY5oPJXIO7I,1114
|
||||||
|
jsonschema/tests/test_cli.py,sha256=Pxes9nmTU_7-U4iKIbaNg4Lj3CpgirC_bxujrxcNqeo,28738
|
||||||
|
jsonschema/tests/test_deprecations.py,sha256=paMq3Hd33zDfVsJpTd95MAOzI6y7IoUQ5brgp9qqVdU,3901
|
||||||
|
jsonschema/tests/test_exceptions.py,sha256=NXbsKfRYoaCYQvI1Er1ld_NNL4Xuun4Tw2b4VIsNfSA,19190
|
||||||
|
jsonschema/tests/test_format.py,sha256=-J0Em4cx5mPx87MuTAkt4yGMTp8Hn32QTG3jZ_P8yKg,3755
|
||||||
|
jsonschema/tests/test_jsonschema_test_suite.py,sha256=GjHoqkl1E1scfMccoLgxiCqMxl0GZL96EXaowj_DNlI,21201
|
||||||
|
jsonschema/tests/test_types.py,sha256=DyvSKPtuaIu93Lkde80PkJkNOKgvCbaDYAfHz0yxyL0,6803
|
||||||
|
jsonschema/tests/test_utils.py,sha256=lJRVYyQeZQTUCTU_M3BhlkxPMgjsc8KQCd7U_Qkook8,3749
|
||||||
|
jsonschema/tests/test_validators.py,sha256=1iONAWZFKJ9VmW2V9K3SPqDvLN7y9zbIX9pX08WjXws,74030
|
||||||
|
jsonschema-4.10.0.dist-info/METADATA,sha256=oqJIOlik3CgABEotcdfBHr5rUinBp-Ahe1w5XsJQQ90,7964
|
||||||
|
jsonschema-4.10.0.dist-info/WHEEL,sha256=3DSmSyYE1SDERO2-rI3qUbMVovBs7--ggc7mubOemsc,86
|
||||||
|
jsonschema-4.10.0.dist-info/entry_points.txt,sha256=vO7rX4Fs_xIVJy2pnAtKgTSxfpnozAVQ0DjCmpMxnWE,51
|
||||||
|
jsonschema-4.10.0.dist-info/licenses/COPYING,sha256=T5KgFaE8TRoEC-8BiqE0MLTxvHO0Gxa7hGw0Z2bedDk,1057
|
||||||
|
jsonschema-4.10.0.dist-info/RECORD,,
|
||||||
4
third_party/python/jsonschema/jsonschema-4.10.0.dist-info/WHEEL
vendored
Normal file
4
third_party/python/jsonschema/jsonschema-4.10.0.dist-info/WHEEL
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: hatchling 1.8.0
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py3-none-any
|
||||||
@@ -1,3 +1,2 @@
|
|||||||
[console_scripts]
|
[console_scripts]
|
||||||
jsonschema = jsonschema.cli:main
|
jsonschema = jsonschema.cli:main
|
||||||
|
|
||||||
@@ -7,28 +7,52 @@ supported JSON Schema versions.
|
|||||||
Most commonly, `validate` is the quickest way to simply validate a given
|
Most commonly, `validate` is the quickest way to simply validate a given
|
||||||
instance under a schema, and will create a validator for you.
|
instance under a schema, and will create a validator for you.
|
||||||
"""
|
"""
|
||||||
|
import warnings
|
||||||
|
|
||||||
from jsonschema.exceptions import (
|
|
||||||
ErrorTree, FormatError, RefResolutionError, SchemaError, ValidationError
|
|
||||||
)
|
|
||||||
from jsonschema._format import (
|
from jsonschema._format import (
|
||||||
FormatChecker,
|
FormatChecker,
|
||||||
draft3_format_checker,
|
draft3_format_checker,
|
||||||
draft4_format_checker,
|
draft4_format_checker,
|
||||||
draft6_format_checker,
|
draft6_format_checker,
|
||||||
draft7_format_checker,
|
draft7_format_checker,
|
||||||
|
draft201909_format_checker,
|
||||||
|
draft202012_format_checker,
|
||||||
)
|
)
|
||||||
from jsonschema._types import TypeChecker
|
from jsonschema._types import TypeChecker
|
||||||
|
from jsonschema.exceptions import (
|
||||||
|
ErrorTree,
|
||||||
|
FormatError,
|
||||||
|
RefResolutionError,
|
||||||
|
SchemaError,
|
||||||
|
ValidationError,
|
||||||
|
)
|
||||||
|
from jsonschema.protocols import Validator
|
||||||
from jsonschema.validators import (
|
from jsonschema.validators import (
|
||||||
Draft3Validator,
|
Draft3Validator,
|
||||||
Draft4Validator,
|
Draft4Validator,
|
||||||
Draft6Validator,
|
Draft6Validator,
|
||||||
Draft7Validator,
|
Draft7Validator,
|
||||||
|
Draft201909Validator,
|
||||||
|
Draft202012Validator,
|
||||||
RefResolver,
|
RefResolver,
|
||||||
validate,
|
validate,
|
||||||
)
|
)
|
||||||
try:
|
|
||||||
from importlib import metadata
|
|
||||||
except ImportError: # for Python<3.8
|
def __getattr__(name):
|
||||||
import importlib_metadata as metadata
|
if name == "__version__":
|
||||||
__version__ = metadata.version("jsonschema")
|
warnings.warn(
|
||||||
|
"Accessing jsonschema.__version__ is deprecated and will be "
|
||||||
|
"removed in a future release. Use importlib.metadata directly "
|
||||||
|
"to query for jsonschema's version.",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from importlib import metadata
|
||||||
|
except ImportError:
|
||||||
|
import importlib_metadata as metadata
|
||||||
|
|
||||||
|
return metadata.version("jsonschema")
|
||||||
|
raise AttributeError(f"module {__name__} has no attribute {name}")
|
||||||
|
|||||||
@@ -1,2 +1,3 @@
|
|||||||
from jsonschema.cli import main
|
from jsonschema.cli import main
|
||||||
|
|
||||||
main()
|
main()
|
||||||
|
|||||||
369
third_party/python/jsonschema/jsonschema/_format.py
vendored
369
third_party/python/jsonschema/jsonschema/_format.py
vendored
@@ -1,11 +1,20 @@
|
|||||||
import datetime
|
from __future__ import annotations
|
||||||
import re
|
|
||||||
import socket
|
from contextlib import suppress
|
||||||
import struct
|
from uuid import UUID
|
||||||
|
import datetime
|
||||||
|
import ipaddress
|
||||||
|
import re
|
||||||
|
import typing
|
||||||
|
|
||||||
from jsonschema.compat import str_types
|
|
||||||
from jsonschema.exceptions import FormatError
|
from jsonschema.exceptions import FormatError
|
||||||
|
|
||||||
|
_FormatCheckCallable = typing.Callable[[object], bool]
|
||||||
|
_F = typing.TypeVar("_F", bound=_FormatCheckCallable)
|
||||||
|
_RaisesType = typing.Union[
|
||||||
|
typing.Type[Exception], typing.Tuple[typing.Type[Exception], ...],
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class FormatChecker(object):
|
class FormatChecker(object):
|
||||||
"""
|
"""
|
||||||
@@ -24,15 +33,18 @@ class FormatChecker(object):
|
|||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
|
|
||||||
formats (~collections.Iterable):
|
formats (~collections.abc.Iterable):
|
||||||
|
|
||||||
The known formats to validate. This argument can be used to
|
The known formats to validate. This argument can be used to
|
||||||
limit which formats will be used during validation.
|
limit which formats will be used during validation.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
checkers = {}
|
checkers: dict[
|
||||||
|
str,
|
||||||
|
tuple[_FormatCheckCallable, _RaisesType],
|
||||||
|
] = {}
|
||||||
|
|
||||||
def __init__(self, formats=None):
|
def __init__(self, formats: typing.Iterable[str] | None = None):
|
||||||
if formats is None:
|
if formats is None:
|
||||||
self.checkers = self.checkers.copy()
|
self.checkers = self.checkers.copy()
|
||||||
else:
|
else:
|
||||||
@@ -41,7 +53,9 @@ class FormatChecker(object):
|
|||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<FormatChecker checkers={}>".format(sorted(self.checkers))
|
return "<FormatChecker checkers={}>".format(sorted(self.checkers))
|
||||||
|
|
||||||
def checks(self, format, raises=()):
|
def checks(
|
||||||
|
self, format: str, raises: _RaisesType = (),
|
||||||
|
) -> typing.Callable[[_F], _F]:
|
||||||
"""
|
"""
|
||||||
Register a decorated function as validating a new format.
|
Register a decorated function as validating a new format.
|
||||||
|
|
||||||
@@ -61,14 +75,23 @@ class FormatChecker(object):
|
|||||||
resulting validation error.
|
resulting validation error.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _checks(func):
|
def _checks(func: _F) -> _F:
|
||||||
self.checkers[format] = (func, raises)
|
self.checkers[format] = (func, raises)
|
||||||
return func
|
return func
|
||||||
|
|
||||||
return _checks
|
return _checks
|
||||||
|
|
||||||
cls_checks = classmethod(checks)
|
@classmethod
|
||||||
|
def cls_checks(
|
||||||
|
cls, format: str, raises: _RaisesType = (),
|
||||||
|
) -> typing.Callable[[_F], _F]:
|
||||||
|
def _checks(func: _F) -> _F:
|
||||||
|
cls.checkers[format] = (func, raises)
|
||||||
|
return func
|
||||||
|
|
||||||
def check(self, instance, format):
|
return _checks
|
||||||
|
|
||||||
|
def check(self, instance: object, format: str) -> None:
|
||||||
"""
|
"""
|
||||||
Check whether the instance conforms to the given format.
|
Check whether the instance conforms to the given format.
|
||||||
|
|
||||||
@@ -98,11 +121,9 @@ class FormatChecker(object):
|
|||||||
except raises as e:
|
except raises as e:
|
||||||
cause = e
|
cause = e
|
||||||
if not result:
|
if not result:
|
||||||
raise FormatError(
|
raise FormatError(f"{instance!r} is not a {format!r}", cause=cause)
|
||||||
"%r is not a %r" % (instance, format), cause=cause,
|
|
||||||
)
|
|
||||||
|
|
||||||
def conforms(self, instance, format):
|
def conforms(self, instance: object, format: str) -> bool:
|
||||||
"""
|
"""
|
||||||
Check whether the instance conforms to the given format.
|
Check whether the instance conforms to the given format.
|
||||||
|
|
||||||
@@ -133,13 +154,16 @@ draft3_format_checker = FormatChecker()
|
|||||||
draft4_format_checker = FormatChecker()
|
draft4_format_checker = FormatChecker()
|
||||||
draft6_format_checker = FormatChecker()
|
draft6_format_checker = FormatChecker()
|
||||||
draft7_format_checker = FormatChecker()
|
draft7_format_checker = FormatChecker()
|
||||||
|
draft201909_format_checker = FormatChecker()
|
||||||
|
draft202012_format_checker = FormatChecker()
|
||||||
|
|
||||||
|
_draft_checkers: dict[str, FormatChecker] = dict(
|
||||||
_draft_checkers = dict(
|
|
||||||
draft3=draft3_format_checker,
|
draft3=draft3_format_checker,
|
||||||
draft4=draft4_format_checker,
|
draft4=draft4_format_checker,
|
||||||
draft6=draft6_format_checker,
|
draft6=draft6_format_checker,
|
||||||
draft7=draft7_format_checker,
|
draft7=draft7_format_checker,
|
||||||
|
draft201909=draft201909_format_checker,
|
||||||
|
draft202012=draft202012_format_checker,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -149,14 +173,18 @@ def _checks_drafts(
|
|||||||
draft4=None,
|
draft4=None,
|
||||||
draft6=None,
|
draft6=None,
|
||||||
draft7=None,
|
draft7=None,
|
||||||
|
draft201909=None,
|
||||||
|
draft202012=None,
|
||||||
raises=(),
|
raises=(),
|
||||||
):
|
) -> typing.Callable[[_F], _F]:
|
||||||
draft3 = draft3 or name
|
draft3 = draft3 or name
|
||||||
draft4 = draft4 or name
|
draft4 = draft4 or name
|
||||||
draft6 = draft6 or name
|
draft6 = draft6 or name
|
||||||
draft7 = draft7 or name
|
draft7 = draft7 or name
|
||||||
|
draft201909 = draft201909 or name
|
||||||
|
draft202012 = draft202012 or name
|
||||||
|
|
||||||
def wrap(func):
|
def wrap(func: _F) -> _F:
|
||||||
if draft3:
|
if draft3:
|
||||||
func = _draft_checkers["draft3"].checks(draft3, raises)(func)
|
func = _draft_checkers["draft3"].checks(draft3, raises)(func)
|
||||||
if draft4:
|
if draft4:
|
||||||
@@ -165,81 +193,86 @@ def _checks_drafts(
|
|||||||
func = _draft_checkers["draft6"].checks(draft6, raises)(func)
|
func = _draft_checkers["draft6"].checks(draft6, raises)(func)
|
||||||
if draft7:
|
if draft7:
|
||||||
func = _draft_checkers["draft7"].checks(draft7, raises)(func)
|
func = _draft_checkers["draft7"].checks(draft7, raises)(func)
|
||||||
|
if draft201909:
|
||||||
|
func = _draft_checkers["draft201909"].checks(draft201909, raises)(
|
||||||
|
func,
|
||||||
|
)
|
||||||
|
if draft202012:
|
||||||
|
func = _draft_checkers["draft202012"].checks(draft202012, raises)(
|
||||||
|
func,
|
||||||
|
)
|
||||||
|
|
||||||
# Oy. This is bad global state, but relied upon for now, until
|
# Oy. This is bad global state, but relied upon for now, until
|
||||||
# deprecation. See https://github.com/Julian/jsonschema/issues/519
|
# deprecation. See #519 and test_format_checkers_come_with_defaults
|
||||||
# and test_format_checkers_come_with_defaults
|
FormatChecker.cls_checks(
|
||||||
FormatChecker.cls_checks(draft7 or draft6 or draft4 or draft3, raises)(
|
draft202012 or draft201909 or draft7 or draft6 or draft4 or draft3,
|
||||||
func,
|
raises,
|
||||||
)
|
)(func)
|
||||||
return func
|
return func
|
||||||
|
|
||||||
return wrap
|
return wrap
|
||||||
|
|
||||||
|
|
||||||
@_checks_drafts(name="idn-email")
|
@_checks_drafts(name="idn-email")
|
||||||
@_checks_drafts(name="email")
|
@_checks_drafts(name="email")
|
||||||
def is_email(instance):
|
def is_email(instance: object) -> bool:
|
||||||
if not isinstance(instance, str_types):
|
if not isinstance(instance, str):
|
||||||
return True
|
return True
|
||||||
return "@" in instance
|
return "@" in instance
|
||||||
|
|
||||||
|
|
||||||
_ipv4_re = re.compile(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$")
|
|
||||||
|
|
||||||
|
|
||||||
@_checks_drafts(
|
@_checks_drafts(
|
||||||
draft3="ip-address", draft4="ipv4", draft6="ipv4", draft7="ipv4",
|
draft3="ip-address",
|
||||||
|
draft4="ipv4",
|
||||||
|
draft6="ipv4",
|
||||||
|
draft7="ipv4",
|
||||||
|
draft201909="ipv4",
|
||||||
|
draft202012="ipv4",
|
||||||
|
raises=ipaddress.AddressValueError,
|
||||||
)
|
)
|
||||||
def is_ipv4(instance):
|
def is_ipv4(instance: object) -> bool:
|
||||||
if not isinstance(instance, str_types):
|
if not isinstance(instance, str):
|
||||||
return True
|
return True
|
||||||
if not _ipv4_re.match(instance):
|
return bool(ipaddress.IPv4Address(instance))
|
||||||
return False
|
|
||||||
return all(0 <= int(component) <= 255 for component in instance.split("."))
|
|
||||||
|
|
||||||
|
|
||||||
if hasattr(socket, "inet_pton"):
|
@_checks_drafts(name="ipv6", raises=ipaddress.AddressValueError)
|
||||||
# FIXME: Really this only should raise struct.error, but see the sadness
|
def is_ipv6(instance: object) -> bool:
|
||||||
# that is https://twistedmatrix.com/trac/ticket/9409
|
if not isinstance(instance, str):
|
||||||
|
return True
|
||||||
|
address = ipaddress.IPv6Address(instance)
|
||||||
|
return not getattr(address, "scope_id", "")
|
||||||
|
|
||||||
|
|
||||||
|
with suppress(ImportError):
|
||||||
|
from fqdn import FQDN
|
||||||
|
|
||||||
@_checks_drafts(
|
@_checks_drafts(
|
||||||
name="ipv6", raises=(socket.error, struct.error, ValueError),
|
draft3="host-name",
|
||||||
|
draft4="hostname",
|
||||||
|
draft6="hostname",
|
||||||
|
draft7="hostname",
|
||||||
|
draft201909="hostname",
|
||||||
|
draft202012="hostname",
|
||||||
)
|
)
|
||||||
def is_ipv6(instance):
|
def is_host_name(instance: object) -> bool:
|
||||||
if not isinstance(instance, str_types):
|
if not isinstance(instance, str):
|
||||||
return True
|
return True
|
||||||
return socket.inet_pton(socket.AF_INET6, instance)
|
return FQDN(instance).is_valid
|
||||||
|
|
||||||
|
|
||||||
_host_name_re = re.compile(r"^[A-Za-z0-9][A-Za-z0-9\.\-]{1,255}$")
|
with suppress(ImportError):
|
||||||
|
|
||||||
|
|
||||||
@_checks_drafts(
|
|
||||||
draft3="host-name",
|
|
||||||
draft4="hostname",
|
|
||||||
draft6="hostname",
|
|
||||||
draft7="hostname",
|
|
||||||
)
|
|
||||||
def is_host_name(instance):
|
|
||||||
if not isinstance(instance, str_types):
|
|
||||||
return True
|
|
||||||
if not _host_name_re.match(instance):
|
|
||||||
return False
|
|
||||||
components = instance.split(".")
|
|
||||||
for component in components:
|
|
||||||
if len(component) > 63:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
# The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
|
# The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
|
||||||
import idna
|
import idna
|
||||||
except ImportError:
|
|
||||||
pass
|
@_checks_drafts(
|
||||||
else:
|
draft7="idn-hostname",
|
||||||
@_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
|
draft201909="idn-hostname",
|
||||||
def is_idn_host_name(instance):
|
draft202012="idn-hostname",
|
||||||
if not isinstance(instance, str_types):
|
raises=(idna.IDNAError, UnicodeError),
|
||||||
|
)
|
||||||
|
def is_idn_host_name(instance: object) -> bool:
|
||||||
|
if not isinstance(instance, str):
|
||||||
return True
|
return True
|
||||||
idna.encode(instance)
|
idna.encode(instance)
|
||||||
return True
|
return True
|
||||||
@@ -248,137 +281,148 @@ else:
|
|||||||
try:
|
try:
|
||||||
import rfc3987
|
import rfc3987
|
||||||
except ImportError:
|
except ImportError:
|
||||||
try:
|
with suppress(ImportError):
|
||||||
from rfc3986_validator import validate_rfc3986
|
from rfc3986_validator import validate_rfc3986
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
@_checks_drafts(name="uri")
|
@_checks_drafts(name="uri")
|
||||||
def is_uri(instance):
|
def is_uri(instance: object) -> bool:
|
||||||
if not isinstance(instance, str_types):
|
if not isinstance(instance, str):
|
||||||
return True
|
return True
|
||||||
return validate_rfc3986(instance, rule="URI")
|
return validate_rfc3986(instance, rule="URI")
|
||||||
|
|
||||||
@_checks_drafts(
|
@_checks_drafts(
|
||||||
draft6="uri-reference",
|
draft6="uri-reference",
|
||||||
draft7="uri-reference",
|
draft7="uri-reference",
|
||||||
|
draft201909="uri-reference",
|
||||||
|
draft202012="uri-reference",
|
||||||
raises=ValueError,
|
raises=ValueError,
|
||||||
)
|
)
|
||||||
def is_uri_reference(instance):
|
def is_uri_reference(instance: object) -> bool:
|
||||||
if not isinstance(instance, str_types):
|
if not isinstance(instance, str):
|
||||||
return True
|
return True
|
||||||
return validate_rfc3986(instance, rule="URI_reference")
|
return validate_rfc3986(instance, rule="URI_reference")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@_checks_drafts(draft7="iri", raises=ValueError)
|
|
||||||
def is_iri(instance):
|
@_checks_drafts(
|
||||||
if not isinstance(instance, str_types):
|
draft7="iri",
|
||||||
|
draft201909="iri",
|
||||||
|
draft202012="iri",
|
||||||
|
raises=ValueError,
|
||||||
|
)
|
||||||
|
def is_iri(instance: object) -> bool:
|
||||||
|
if not isinstance(instance, str):
|
||||||
return True
|
return True
|
||||||
return rfc3987.parse(instance, rule="IRI")
|
return rfc3987.parse(instance, rule="IRI")
|
||||||
|
|
||||||
@_checks_drafts(draft7="iri-reference", raises=ValueError)
|
@_checks_drafts(
|
||||||
def is_iri_reference(instance):
|
draft7="iri-reference",
|
||||||
if not isinstance(instance, str_types):
|
draft201909="iri-reference",
|
||||||
|
draft202012="iri-reference",
|
||||||
|
raises=ValueError,
|
||||||
|
)
|
||||||
|
def is_iri_reference(instance: object) -> bool:
|
||||||
|
if not isinstance(instance, str):
|
||||||
return True
|
return True
|
||||||
return rfc3987.parse(instance, rule="IRI_reference")
|
return rfc3987.parse(instance, rule="IRI_reference")
|
||||||
|
|
||||||
@_checks_drafts(name="uri", raises=ValueError)
|
@_checks_drafts(name="uri", raises=ValueError)
|
||||||
def is_uri(instance):
|
def is_uri(instance: object) -> bool:
|
||||||
if not isinstance(instance, str_types):
|
if not isinstance(instance, str):
|
||||||
return True
|
return True
|
||||||
return rfc3987.parse(instance, rule="URI")
|
return rfc3987.parse(instance, rule="URI")
|
||||||
|
|
||||||
@_checks_drafts(
|
@_checks_drafts(
|
||||||
draft6="uri-reference",
|
draft6="uri-reference",
|
||||||
draft7="uri-reference",
|
draft7="uri-reference",
|
||||||
|
draft201909="uri-reference",
|
||||||
|
draft202012="uri-reference",
|
||||||
raises=ValueError,
|
raises=ValueError,
|
||||||
)
|
)
|
||||||
def is_uri_reference(instance):
|
def is_uri_reference(instance: object) -> bool:
|
||||||
if not isinstance(instance, str_types):
|
if not isinstance(instance, str):
|
||||||
return True
|
return True
|
||||||
return rfc3987.parse(instance, rule="URI_reference")
|
return rfc3987.parse(instance, rule="URI_reference")
|
||||||
|
|
||||||
|
|
||||||
try:
|
with suppress(ImportError):
|
||||||
from strict_rfc3339 import validate_rfc3339
|
from rfc3339_validator import validate_rfc3339
|
||||||
except ImportError:
|
|
||||||
try:
|
|
||||||
from rfc3339_validator import validate_rfc3339
|
|
||||||
except ImportError:
|
|
||||||
validate_rfc3339 = None
|
|
||||||
|
|
||||||
if validate_rfc3339:
|
|
||||||
@_checks_drafts(name="date-time")
|
@_checks_drafts(name="date-time")
|
||||||
def is_datetime(instance):
|
def is_datetime(instance: object) -> bool:
|
||||||
if not isinstance(instance, str_types):
|
if not isinstance(instance, str):
|
||||||
return True
|
return True
|
||||||
return validate_rfc3339(instance)
|
return validate_rfc3339(instance.upper())
|
||||||
|
|
||||||
@_checks_drafts(draft7="time")
|
@_checks_drafts(
|
||||||
def is_time(instance):
|
draft7="time",
|
||||||
if not isinstance(instance, str_types):
|
draft201909="time",
|
||||||
|
draft202012="time",
|
||||||
|
)
|
||||||
|
def is_time(instance: object) -> bool:
|
||||||
|
if not isinstance(instance, str):
|
||||||
return True
|
return True
|
||||||
return is_datetime("1970-01-01T" + instance)
|
return is_datetime("1970-01-01T" + instance)
|
||||||
|
|
||||||
|
|
||||||
@_checks_drafts(name="regex", raises=re.error)
|
@_checks_drafts(name="regex", raises=re.error)
|
||||||
def is_regex(instance):
|
def is_regex(instance: object) -> bool:
|
||||||
if not isinstance(instance, str_types):
|
if not isinstance(instance, str):
|
||||||
return True
|
return True
|
||||||
return re.compile(instance)
|
return bool(re.compile(instance))
|
||||||
|
|
||||||
|
|
||||||
@_checks_drafts(draft3="date", draft7="date", raises=ValueError)
|
@_checks_drafts(
|
||||||
def is_date(instance):
|
draft3="date",
|
||||||
if not isinstance(instance, str_types):
|
draft7="date",
|
||||||
|
draft201909="date",
|
||||||
|
draft202012="date",
|
||||||
|
raises=ValueError,
|
||||||
|
)
|
||||||
|
def is_date(instance: object) -> bool:
|
||||||
|
if not isinstance(instance, str):
|
||||||
return True
|
return True
|
||||||
return datetime.datetime.strptime(instance, "%Y-%m-%d")
|
return bool(instance.isascii() and datetime.date.fromisoformat(instance))
|
||||||
|
|
||||||
|
|
||||||
@_checks_drafts(draft3="time", raises=ValueError)
|
@_checks_drafts(draft3="time", raises=ValueError)
|
||||||
def is_draft3_time(instance):
|
def is_draft3_time(instance: object) -> bool:
|
||||||
if not isinstance(instance, str_types):
|
if not isinstance(instance, str):
|
||||||
return True
|
return True
|
||||||
return datetime.datetime.strptime(instance, "%H:%M:%S")
|
return bool(datetime.datetime.strptime(instance, "%H:%M:%S"))
|
||||||
|
|
||||||
|
|
||||||
try:
|
with suppress(ImportError):
|
||||||
|
from webcolors import CSS21_NAMES_TO_HEX
|
||||||
import webcolors
|
import webcolors
|
||||||
except ImportError:
|
|
||||||
pass
|
def is_css_color_code(instance: object) -> bool:
|
||||||
else:
|
|
||||||
def is_css_color_code(instance):
|
|
||||||
return webcolors.normalize_hex(instance)
|
return webcolors.normalize_hex(instance)
|
||||||
|
|
||||||
@_checks_drafts(draft3="color", raises=(ValueError, TypeError))
|
@_checks_drafts(draft3="color", raises=(ValueError, TypeError))
|
||||||
def is_css21_color(instance):
|
def is_css21_color(instance: object) -> bool:
|
||||||
if (
|
if (
|
||||||
not isinstance(instance, str_types) or
|
not isinstance(instance, str)
|
||||||
instance.lower() in webcolors.css21_names_to_hex
|
or instance.lower() in CSS21_NAMES_TO_HEX
|
||||||
):
|
):
|
||||||
return True
|
return True
|
||||||
return is_css_color_code(instance)
|
return is_css_color_code(instance)
|
||||||
|
|
||||||
def is_css3_color(instance):
|
|
||||||
if instance.lower() in webcolors.css3_names_to_hex:
|
|
||||||
return True
|
|
||||||
return is_css_color_code(instance)
|
|
||||||
|
|
||||||
|
with suppress(ImportError):
|
||||||
try:
|
|
||||||
import jsonpointer
|
import jsonpointer
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
@_checks_drafts(
|
@_checks_drafts(
|
||||||
draft6="json-pointer",
|
draft6="json-pointer",
|
||||||
draft7="json-pointer",
|
draft7="json-pointer",
|
||||||
|
draft201909="json-pointer",
|
||||||
|
draft202012="json-pointer",
|
||||||
raises=jsonpointer.JsonPointerException,
|
raises=jsonpointer.JsonPointerException,
|
||||||
)
|
)
|
||||||
def is_json_pointer(instance):
|
def is_json_pointer(instance: object) -> bool:
|
||||||
if not isinstance(instance, str_types):
|
if not isinstance(instance, str):
|
||||||
return True
|
return True
|
||||||
return jsonpointer.JsonPointer(instance)
|
return bool(jsonpointer.JsonPointer(instance))
|
||||||
|
|
||||||
# TODO: I don't want to maintain this, so it
|
# TODO: I don't want to maintain this, so it
|
||||||
# needs to go either into jsonpointer (pending
|
# needs to go either into jsonpointer (pending
|
||||||
@@ -386,16 +430,22 @@ else:
|
|||||||
# into a new external library.
|
# into a new external library.
|
||||||
@_checks_drafts(
|
@_checks_drafts(
|
||||||
draft7="relative-json-pointer",
|
draft7="relative-json-pointer",
|
||||||
|
draft201909="relative-json-pointer",
|
||||||
|
draft202012="relative-json-pointer",
|
||||||
raises=jsonpointer.JsonPointerException,
|
raises=jsonpointer.JsonPointerException,
|
||||||
)
|
)
|
||||||
def is_relative_json_pointer(instance):
|
def is_relative_json_pointer(instance: object) -> bool:
|
||||||
# Definition taken from:
|
# Definition taken from:
|
||||||
# https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
|
# https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
|
||||||
if not isinstance(instance, str_types):
|
if not isinstance(instance, str):
|
||||||
return True
|
return True
|
||||||
non_negative_integer, rest = [], ""
|
non_negative_integer, rest = [], ""
|
||||||
for i, character in enumerate(instance):
|
for i, character in enumerate(instance):
|
||||||
if character.isdigit():
|
if character.isdigit():
|
||||||
|
# digits with a leading "0" are not allowed
|
||||||
|
if i > 0 and int(instance[i - 1]) == 0:
|
||||||
|
return False
|
||||||
|
|
||||||
non_negative_integer.append(character)
|
non_negative_integer.append(character)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -404,22 +454,45 @@ else:
|
|||||||
|
|
||||||
rest = instance[i:]
|
rest = instance[i:]
|
||||||
break
|
break
|
||||||
return (rest == "#") or jsonpointer.JsonPointer(rest)
|
return (rest == "#") or bool(jsonpointer.JsonPointer(rest))
|
||||||
|
|
||||||
|
|
||||||
try:
|
with suppress(ImportError):
|
||||||
import uritemplate.exceptions
|
import uri_template
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
@_checks_drafts(
|
@_checks_drafts(
|
||||||
draft6="uri-template",
|
draft6="uri-template",
|
||||||
draft7="uri-template",
|
draft7="uri-template",
|
||||||
raises=uritemplate.exceptions.InvalidTemplate,
|
draft201909="uri-template",
|
||||||
|
draft202012="uri-template",
|
||||||
)
|
)
|
||||||
def is_uri_template(
|
def is_uri_template(instance: object) -> bool:
|
||||||
instance,
|
if not isinstance(instance, str):
|
||||||
template_validator=uritemplate.Validator().force_balanced_braces(),
|
return True
|
||||||
):
|
return uri_template.validate(instance)
|
||||||
template = uritemplate.URITemplate(instance)
|
|
||||||
return template_validator.validate(template)
|
|
||||||
|
with suppress(ImportError):
|
||||||
|
import isoduration
|
||||||
|
|
||||||
|
@_checks_drafts(
|
||||||
|
draft201909="duration",
|
||||||
|
draft202012="duration",
|
||||||
|
raises=isoduration.DurationParsingException,
|
||||||
|
)
|
||||||
|
def is_duration(instance: object) -> bool:
|
||||||
|
if not isinstance(instance, str):
|
||||||
|
return True
|
||||||
|
return bool(isoduration.parse_duration(instance))
|
||||||
|
|
||||||
|
|
||||||
|
@_checks_drafts(
|
||||||
|
draft201909="uuid",
|
||||||
|
draft202012="uuid",
|
||||||
|
raises=ValueError,
|
||||||
|
)
|
||||||
|
def is_uuid(instance: object) -> bool:
|
||||||
|
if not isinstance(instance, str):
|
||||||
|
return True
|
||||||
|
UUID(instance)
|
||||||
|
return all(instance[position] == "-" for position in (8, 13, 18, 23))
|
||||||
|
|||||||
@@ -1,49 +1,88 @@
|
|||||||
from jsonschema import _utils
|
from jsonschema import _utils
|
||||||
from jsonschema.compat import iteritems
|
|
||||||
from jsonschema.exceptions import ValidationError
|
from jsonschema.exceptions import ValidationError
|
||||||
|
|
||||||
|
|
||||||
|
def ignore_ref_siblings(schema):
|
||||||
|
"""
|
||||||
|
Ignore siblings of ``$ref`` if it is present.
|
||||||
|
|
||||||
|
Otherwise, return all keywords.
|
||||||
|
|
||||||
|
Suitable for use with `create`'s ``applicable_validators`` argument.
|
||||||
|
"""
|
||||||
|
ref = schema.get("$ref")
|
||||||
|
if ref is not None:
|
||||||
|
return [("$ref", ref)]
|
||||||
|
else:
|
||||||
|
return schema.items()
|
||||||
|
|
||||||
|
|
||||||
def dependencies_draft3(validator, dependencies, instance, schema):
|
def dependencies_draft3(validator, dependencies, instance, schema):
|
||||||
if not validator.is_type(instance, "object"):
|
if not validator.is_type(instance, "object"):
|
||||||
return
|
return
|
||||||
|
|
||||||
for property, dependency in iteritems(dependencies):
|
for property, dependency in dependencies.items():
|
||||||
if property not in instance:
|
if property not in instance:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if validator.is_type(dependency, "object"):
|
if validator.is_type(dependency, "object"):
|
||||||
for error in validator.descend(
|
yield from validator.descend(
|
||||||
instance, dependency, schema_path=property,
|
instance, dependency, schema_path=property,
|
||||||
):
|
)
|
||||||
yield error
|
|
||||||
elif validator.is_type(dependency, "string"):
|
elif validator.is_type(dependency, "string"):
|
||||||
if dependency not in instance:
|
if dependency not in instance:
|
||||||
yield ValidationError(
|
message = f"{dependency!r} is a dependency of {property!r}"
|
||||||
"%r is a dependency of %r" % (dependency, property)
|
yield ValidationError(message)
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
for each in dependency:
|
for each in dependency:
|
||||||
if each not in instance:
|
if each not in instance:
|
||||||
message = "%r is a dependency of %r"
|
message = f"{each!r} is a dependency of {property!r}"
|
||||||
yield ValidationError(message % (each, property))
|
yield ValidationError(message)
|
||||||
|
|
||||||
|
|
||||||
|
def dependencies_draft4_draft6_draft7(
|
||||||
|
validator,
|
||||||
|
dependencies,
|
||||||
|
instance,
|
||||||
|
schema,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Support for the ``dependencies`` keyword from pre-draft 2019-09.
|
||||||
|
|
||||||
|
In later drafts, the keyword was split into separate
|
||||||
|
``dependentRequired`` and ``dependentSchemas`` validators.
|
||||||
|
"""
|
||||||
|
if not validator.is_type(instance, "object"):
|
||||||
|
return
|
||||||
|
|
||||||
|
for property, dependency in dependencies.items():
|
||||||
|
if property not in instance:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if validator.is_type(dependency, "array"):
|
||||||
|
for each in dependency:
|
||||||
|
if each not in instance:
|
||||||
|
message = f"{each!r} is a dependency of {property!r}"
|
||||||
|
yield ValidationError(message)
|
||||||
|
else:
|
||||||
|
yield from validator.descend(
|
||||||
|
instance, dependency, schema_path=property,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def disallow_draft3(validator, disallow, instance, schema):
|
def disallow_draft3(validator, disallow, instance, schema):
|
||||||
for disallowed in _utils.ensure_list(disallow):
|
for disallowed in _utils.ensure_list(disallow):
|
||||||
if validator.is_valid(instance, {"type": [disallowed]}):
|
if validator.evolve(schema={"type": [disallowed]}).is_valid(instance):
|
||||||
yield ValidationError(
|
message = f"{disallowed!r} is disallowed for {instance!r}"
|
||||||
"%r is disallowed for %r" % (disallowed, instance)
|
yield ValidationError(message)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def extends_draft3(validator, extends, instance, schema):
|
def extends_draft3(validator, extends, instance, schema):
|
||||||
if validator.is_type(extends, "object"):
|
if validator.is_type(extends, "object"):
|
||||||
for error in validator.descend(instance, extends):
|
yield from validator.descend(instance, extends)
|
||||||
yield error
|
|
||||||
return
|
return
|
||||||
for index, subschema in enumerate(extends):
|
for index, subschema in enumerate(extends):
|
||||||
for error in validator.descend(instance, subschema, schema_path=index):
|
yield from validator.descend(instance, subschema, schema_path=index)
|
||||||
yield error
|
|
||||||
|
|
||||||
|
|
||||||
def items_draft3_draft4(validator, items, instance, schema):
|
def items_draft3_draft4(validator, items, instance, schema):
|
||||||
@@ -52,14 +91,26 @@ def items_draft3_draft4(validator, items, instance, schema):
|
|||||||
|
|
||||||
if validator.is_type(items, "object"):
|
if validator.is_type(items, "object"):
|
||||||
for index, item in enumerate(instance):
|
for index, item in enumerate(instance):
|
||||||
for error in validator.descend(item, items, path=index):
|
yield from validator.descend(item, items, path=index)
|
||||||
yield error
|
|
||||||
else:
|
else:
|
||||||
for (index, item), subschema in zip(enumerate(instance), items):
|
for (index, item), subschema in zip(enumerate(instance), items):
|
||||||
for error in validator.descend(
|
yield from validator.descend(
|
||||||
item, subschema, path=index, schema_path=index,
|
item, subschema, path=index, schema_path=index,
|
||||||
):
|
)
|
||||||
yield error
|
|
||||||
|
|
||||||
|
def items_draft6_draft7_draft201909(validator, items, instance, schema):
|
||||||
|
if not validator.is_type(instance, "array"):
|
||||||
|
return
|
||||||
|
|
||||||
|
if validator.is_type(items, "array"):
|
||||||
|
for (index, item), subschema in zip(enumerate(instance), items):
|
||||||
|
yield from validator.descend(
|
||||||
|
item, subschema, path=index, schema_path=index,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
for index, item in enumerate(instance):
|
||||||
|
yield from validator.descend(item, items, path=index)
|
||||||
|
|
||||||
|
|
||||||
def minimum_draft3_draft4(validator, minimum, instance, schema):
|
def minimum_draft3_draft4(validator, minimum, instance, schema):
|
||||||
@@ -74,9 +125,8 @@ def minimum_draft3_draft4(validator, minimum, instance, schema):
|
|||||||
cmp = "less than"
|
cmp = "less than"
|
||||||
|
|
||||||
if failed:
|
if failed:
|
||||||
yield ValidationError(
|
message = f"{instance!r} is {cmp} the minimum of {minimum!r}"
|
||||||
"%r is %s the minimum of %r" % (instance, cmp, minimum)
|
yield ValidationError(message)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def maximum_draft3_draft4(validator, maximum, instance, schema):
|
def maximum_draft3_draft4(validator, maximum, instance, schema):
|
||||||
@@ -91,26 +141,24 @@ def maximum_draft3_draft4(validator, maximum, instance, schema):
|
|||||||
cmp = "greater than"
|
cmp = "greater than"
|
||||||
|
|
||||||
if failed:
|
if failed:
|
||||||
yield ValidationError(
|
message = f"{instance!r} is {cmp} the maximum of {maximum!r}"
|
||||||
"%r is %s the maximum of %r" % (instance, cmp, maximum)
|
yield ValidationError(message)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def properties_draft3(validator, properties, instance, schema):
|
def properties_draft3(validator, properties, instance, schema):
|
||||||
if not validator.is_type(instance, "object"):
|
if not validator.is_type(instance, "object"):
|
||||||
return
|
return
|
||||||
|
|
||||||
for property, subschema in iteritems(properties):
|
for property, subschema in properties.items():
|
||||||
if property in instance:
|
if property in instance:
|
||||||
for error in validator.descend(
|
yield from validator.descend(
|
||||||
instance[property],
|
instance[property],
|
||||||
subschema,
|
subschema,
|
||||||
path=property,
|
path=property,
|
||||||
schema_path=property,
|
schema_path=property,
|
||||||
):
|
)
|
||||||
yield error
|
|
||||||
elif subschema.get("required", False):
|
elif subschema.get("required", False):
|
||||||
error = ValidationError("%r is a required property" % property)
|
error = ValidationError(f"{property!r} is a required property")
|
||||||
error._set(
|
error._set(
|
||||||
validator="required",
|
validator="required",
|
||||||
validator_value=subschema["required"],
|
validator_value=subschema["required"],
|
||||||
@@ -136,6 +184,45 @@ def type_draft3(validator, types, instance, schema):
|
|||||||
if validator.is_type(instance, type):
|
if validator.is_type(instance, type):
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
|
reprs = []
|
||||||
|
for type in types:
|
||||||
|
try:
|
||||||
|
reprs.append(repr(type["name"]))
|
||||||
|
except Exception:
|
||||||
|
reprs.append(repr(type))
|
||||||
yield ValidationError(
|
yield ValidationError(
|
||||||
_utils.types_msg(instance, types), context=all_errors,
|
f"{instance!r} is not of type {', '.join(reprs)}",
|
||||||
|
context=all_errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def contains_draft6_draft7(validator, contains, instance, schema):
|
||||||
|
if not validator.is_type(instance, "array"):
|
||||||
|
return
|
||||||
|
|
||||||
|
if not any(
|
||||||
|
validator.evolve(schema=contains).is_valid(element)
|
||||||
|
for element in instance
|
||||||
|
):
|
||||||
|
yield ValidationError(
|
||||||
|
f"None of {instance!r} are valid under the given schema",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def recursiveRef(validator, recursiveRef, instance, schema):
|
||||||
|
lookup_url, target = validator.resolver.resolution_scope, validator.schema
|
||||||
|
|
||||||
|
for each in reversed(validator.resolver._scopes_stack[1:]):
|
||||||
|
lookup_url, next_target = validator.resolver.resolve(each)
|
||||||
|
if next_target.get("$recursiveAnchor"):
|
||||||
|
target = next_target
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
fragment = recursiveRef.lstrip("#")
|
||||||
|
subschema = validator.resolver.resolve_fragment(target, fragment)
|
||||||
|
# FIXME: This is gutted (and not calling .descend) because it can trigger
|
||||||
|
# recursion errors, so there's a bug here. Re-enable the tests to
|
||||||
|
# see it.
|
||||||
|
subschema
|
||||||
|
return []
|
||||||
|
|||||||
155
third_party/python/jsonschema/jsonschema/_reflect.py
vendored
155
third_party/python/jsonschema/jsonschema/_reflect.py
vendored
@@ -1,155 +0,0 @@
|
|||||||
# -*- test-case-name: twisted.test.test_reflect -*-
|
|
||||||
# Copyright (c) Twisted Matrix Laboratories.
|
|
||||||
# See LICENSE for details.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Standardized versions of various cool and/or strange things that you can do
|
|
||||||
with Python's reflection capabilities.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from jsonschema.compat import PY3
|
|
||||||
|
|
||||||
|
|
||||||
class _NoModuleFound(Exception):
|
|
||||||
"""
|
|
||||||
No module was found because none exists.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidName(ValueError):
|
|
||||||
"""
|
|
||||||
The given name is not a dot-separated list of Python objects.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class ModuleNotFound(InvalidName):
|
|
||||||
"""
|
|
||||||
The module associated with the given name doesn't exist and it can't be
|
|
||||||
imported.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class ObjectNotFound(InvalidName):
|
|
||||||
"""
|
|
||||||
The object associated with the given name doesn't exist and it can't be
|
|
||||||
imported.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if PY3:
|
|
||||||
def reraise(exception, traceback):
|
|
||||||
raise exception.with_traceback(traceback)
|
|
||||||
else:
|
|
||||||
exec("""def reraise(exception, traceback):
|
|
||||||
raise exception.__class__, exception, traceback""")
|
|
||||||
|
|
||||||
reraise.__doc__ = """
|
|
||||||
Re-raise an exception, with an optional traceback, in a way that is compatible
|
|
||||||
with both Python 2 and Python 3.
|
|
||||||
|
|
||||||
Note that on Python 3, re-raised exceptions will be mutated, with their
|
|
||||||
C{__traceback__} attribute being set.
|
|
||||||
|
|
||||||
@param exception: The exception instance.
|
|
||||||
@param traceback: The traceback to use, or C{None} indicating a new traceback.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def _importAndCheckStack(importName):
|
|
||||||
"""
|
|
||||||
Import the given name as a module, then walk the stack to determine whether
|
|
||||||
the failure was the module not existing, or some code in the module (for
|
|
||||||
example a dependent import) failing. This can be helpful to determine
|
|
||||||
whether any actual application code was run. For example, to distiguish
|
|
||||||
administrative error (entering the wrong module name), from programmer
|
|
||||||
error (writing buggy code in a module that fails to import).
|
|
||||||
|
|
||||||
@param importName: The name of the module to import.
|
|
||||||
@type importName: C{str}
|
|
||||||
@raise Exception: if something bad happens. This can be any type of
|
|
||||||
exception, since nobody knows what loading some arbitrary code might
|
|
||||||
do.
|
|
||||||
@raise _NoModuleFound: if no module was found.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return __import__(importName)
|
|
||||||
except ImportError:
|
|
||||||
excType, excValue, excTraceback = sys.exc_info()
|
|
||||||
while excTraceback:
|
|
||||||
execName = excTraceback.tb_frame.f_globals["__name__"]
|
|
||||||
# in Python 2 execName is None when an ImportError is encountered,
|
|
||||||
# where in Python 3 execName is equal to the importName.
|
|
||||||
if execName is None or execName == importName:
|
|
||||||
reraise(excValue, excTraceback)
|
|
||||||
excTraceback = excTraceback.tb_next
|
|
||||||
raise _NoModuleFound()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def namedAny(name):
|
|
||||||
"""
|
|
||||||
Retrieve a Python object by its fully qualified name from the global Python
|
|
||||||
module namespace. The first part of the name, that describes a module,
|
|
||||||
will be discovered and imported. Each subsequent part of the name is
|
|
||||||
treated as the name of an attribute of the object specified by all of the
|
|
||||||
name which came before it. For example, the fully-qualified name of this
|
|
||||||
object is 'twisted.python.reflect.namedAny'.
|
|
||||||
|
|
||||||
@type name: L{str}
|
|
||||||
@param name: The name of the object to return.
|
|
||||||
|
|
||||||
@raise InvalidName: If the name is an empty string, starts or ends with
|
|
||||||
a '.', or is otherwise syntactically incorrect.
|
|
||||||
|
|
||||||
@raise ModuleNotFound: If the name is syntactically correct but the
|
|
||||||
module it specifies cannot be imported because it does not appear to
|
|
||||||
exist.
|
|
||||||
|
|
||||||
@raise ObjectNotFound: If the name is syntactically correct, includes at
|
|
||||||
least one '.', but the module it specifies cannot be imported because
|
|
||||||
it does not appear to exist.
|
|
||||||
|
|
||||||
@raise AttributeError: If an attribute of an object along the way cannot be
|
|
||||||
accessed, or a module along the way is not found.
|
|
||||||
|
|
||||||
@return: the Python object identified by 'name'.
|
|
||||||
"""
|
|
||||||
if not name:
|
|
||||||
raise InvalidName('Empty module name')
|
|
||||||
|
|
||||||
names = name.split('.')
|
|
||||||
|
|
||||||
# if the name starts or ends with a '.' or contains '..', the __import__
|
|
||||||
# will raise an 'Empty module name' error. This will provide a better error
|
|
||||||
# message.
|
|
||||||
if '' in names:
|
|
||||||
raise InvalidName(
|
|
||||||
"name must be a string giving a '.'-separated list of Python "
|
|
||||||
"identifiers, not %r" % (name,))
|
|
||||||
|
|
||||||
topLevelPackage = None
|
|
||||||
moduleNames = names[:]
|
|
||||||
while not topLevelPackage:
|
|
||||||
if moduleNames:
|
|
||||||
trialname = '.'.join(moduleNames)
|
|
||||||
try:
|
|
||||||
topLevelPackage = _importAndCheckStack(trialname)
|
|
||||||
except _NoModuleFound:
|
|
||||||
moduleNames.pop()
|
|
||||||
else:
|
|
||||||
if len(names) == 1:
|
|
||||||
raise ModuleNotFound("No module named %r" % (name,))
|
|
||||||
else:
|
|
||||||
raise ObjectNotFound('%r does not name an object' % (name,))
|
|
||||||
|
|
||||||
obj = topLevelPackage
|
|
||||||
for n in names[1:]:
|
|
||||||
obj = getattr(obj, n)
|
|
||||||
|
|
||||||
return obj
|
|
||||||
@@ -1,12 +1,33 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import numbers
|
import numbers
|
||||||
|
import typing
|
||||||
|
|
||||||
from pyrsistent import pmap
|
from pyrsistent import pmap
|
||||||
import attr
|
import attr
|
||||||
|
|
||||||
from jsonschema.compat import int_types, str_types
|
|
||||||
from jsonschema.exceptions import UndefinedTypeCheck
|
from jsonschema.exceptions import UndefinedTypeCheck
|
||||||
|
|
||||||
|
|
||||||
|
# unfortunately, the type of pmap is generic, and if used as the attr.ib
|
||||||
|
# converter, the generic type is presented to mypy, which then fails to match
|
||||||
|
# the concrete type of a type checker mapping
|
||||||
|
# this "do nothing" wrapper presents the correct information to mypy
|
||||||
|
def _typed_pmap_converter(
|
||||||
|
init_val: typing.Mapping[
|
||||||
|
str,
|
||||||
|
typing.Callable[["TypeChecker", typing.Any], bool],
|
||||||
|
],
|
||||||
|
) -> typing.Mapping[str, typing.Callable[["TypeChecker", typing.Any], bool]]:
|
||||||
|
return typing.cast(
|
||||||
|
typing.Mapping[
|
||||||
|
str,
|
||||||
|
typing.Callable[["TypeChecker", typing.Any], bool],
|
||||||
|
],
|
||||||
|
pmap(init_val),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def is_array(checker, instance):
|
def is_array(checker, instance):
|
||||||
return isinstance(instance, list)
|
return isinstance(instance, list)
|
||||||
|
|
||||||
@@ -19,7 +40,7 @@ def is_integer(checker, instance):
|
|||||||
# bool inherits from int, so ensure bools aren't reported as ints
|
# bool inherits from int, so ensure bools aren't reported as ints
|
||||||
if isinstance(instance, bool):
|
if isinstance(instance, bool):
|
||||||
return False
|
return False
|
||||||
return isinstance(instance, int_types)
|
return isinstance(instance, int)
|
||||||
|
|
||||||
|
|
||||||
def is_null(checker, instance):
|
def is_null(checker, instance):
|
||||||
@@ -38,7 +59,7 @@ def is_object(checker, instance):
|
|||||||
|
|
||||||
|
|
||||||
def is_string(checker, instance):
|
def is_string(checker, instance):
|
||||||
return isinstance(instance, str_types)
|
return isinstance(instance, str)
|
||||||
|
|
||||||
|
|
||||||
def is_any(checker, instance):
|
def is_any(checker, instance):
|
||||||
@@ -50,7 +71,7 @@ class TypeChecker(object):
|
|||||||
"""
|
"""
|
||||||
A ``type`` property checker.
|
A ``type`` property checker.
|
||||||
|
|
||||||
A `TypeChecker` performs type checking for an `IValidator`. Type
|
A `TypeChecker` performs type checking for a `Validator`. Type
|
||||||
checks to perform are updated using `TypeChecker.redefine` or
|
checks to perform are updated using `TypeChecker.redefine` or
|
||||||
`TypeChecker.redefine_many` and removed via `TypeChecker.remove`.
|
`TypeChecker.redefine_many` and removed via `TypeChecker.remove`.
|
||||||
Each of these return a new `TypeChecker` object.
|
Each of these return a new `TypeChecker` object.
|
||||||
@@ -61,7 +82,13 @@ class TypeChecker(object):
|
|||||||
|
|
||||||
The initial mapping of types to their checking functions.
|
The initial mapping of types to their checking functions.
|
||||||
"""
|
"""
|
||||||
_type_checkers = attr.ib(default=pmap(), converter=pmap)
|
|
||||||
|
_type_checkers: typing.Mapping[
|
||||||
|
str, typing.Callable[["TypeChecker", typing.Any], bool],
|
||||||
|
] = attr.ib(
|
||||||
|
default=pmap(),
|
||||||
|
converter=_typed_pmap_converter,
|
||||||
|
)
|
||||||
|
|
||||||
def is_type(self, instance, type):
|
def is_type(self, instance, type):
|
||||||
"""
|
"""
|
||||||
@@ -90,7 +117,7 @@ class TypeChecker(object):
|
|||||||
try:
|
try:
|
||||||
fn = self._type_checkers[type]
|
fn = self._type_checkers[type]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise UndefinedTypeCheck(type)
|
raise UndefinedTypeCheck(type) from None
|
||||||
|
|
||||||
return fn(self, instance)
|
return fn(self, instance)
|
||||||
|
|
||||||
@@ -104,7 +131,7 @@ class TypeChecker(object):
|
|||||||
|
|
||||||
The name of the type to check.
|
The name of the type to check.
|
||||||
|
|
||||||
fn (collections.Callable):
|
fn (collections.abc.Callable):
|
||||||
|
|
||||||
A function taking exactly two parameters - the type
|
A function taking exactly two parameters - the type
|
||||||
checker calling the function and the instance to check.
|
checker calling the function and the instance to check.
|
||||||
@@ -141,7 +168,7 @@ class TypeChecker(object):
|
|||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
|
|
||||||
types (~collections.Iterable):
|
types (~collections.abc.Iterable):
|
||||||
|
|
||||||
the names of the types to remove.
|
the names of the types to remove.
|
||||||
|
|
||||||
@@ -167,22 +194,24 @@ class TypeChecker(object):
|
|||||||
|
|
||||||
draft3_type_checker = TypeChecker(
|
draft3_type_checker = TypeChecker(
|
||||||
{
|
{
|
||||||
u"any": is_any,
|
"any": is_any,
|
||||||
u"array": is_array,
|
"array": is_array,
|
||||||
u"boolean": is_bool,
|
"boolean": is_bool,
|
||||||
u"integer": is_integer,
|
"integer": is_integer,
|
||||||
u"object": is_object,
|
"object": is_object,
|
||||||
u"null": is_null,
|
"null": is_null,
|
||||||
u"number": is_number,
|
"number": is_number,
|
||||||
u"string": is_string,
|
"string": is_string,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
draft4_type_checker = draft3_type_checker.remove(u"any")
|
draft4_type_checker = draft3_type_checker.remove("any")
|
||||||
draft6_type_checker = draft4_type_checker.redefine(
|
draft6_type_checker = draft4_type_checker.redefine(
|
||||||
u"integer",
|
"integer",
|
||||||
lambda checker, instance: (
|
lambda checker, instance: (
|
||||||
is_integer(checker, instance) or
|
is_integer(checker, instance)
|
||||||
isinstance(instance, float) and instance.is_integer()
|
or isinstance(instance, float) and instance.is_integer()
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
draft7_type_checker = draft6_type_checker
|
draft7_type_checker = draft6_type_checker
|
||||||
|
draft201909_type_checker = draft7_type_checker
|
||||||
|
draft202012_type_checker = draft201909_type_checker
|
||||||
|
|||||||
293
third_party/python/jsonschema/jsonschema/_utils.py
vendored
293
third_party/python/jsonschema/jsonschema/_utils.py
vendored
@@ -1,9 +1,15 @@
|
|||||||
|
from collections.abc import Mapping, MutableMapping, Sequence
|
||||||
|
from urllib.parse import urlsplit
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
import pkgutil
|
|
||||||
import re
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
from jsonschema.compat import MutableMapping, str_types, urlsplit
|
# The files() API was added in Python 3.9.
|
||||||
|
if sys.version_info >= (3, 9): # pragma: no cover
|
||||||
|
from importlib import resources
|
||||||
|
else: # pragma: no cover
|
||||||
|
import importlib_resources as resources # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class URIDict(MutableMapping):
|
class URIDict(MutableMapping):
|
||||||
@@ -51,34 +57,31 @@ def load_schema(name):
|
|||||||
Load a schema from ./schemas/``name``.json and return it.
|
Load a schema from ./schemas/``name``.json and return it.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
data = pkgutil.get_data("jsonschema", "schemas/{0}.json".format(name))
|
path = resources.files(__package__).joinpath(f"schemas/{name}.json")
|
||||||
return json.loads(data.decode("utf-8"))
|
data = path.read_text(encoding="utf-8")
|
||||||
|
return json.loads(data)
|
||||||
|
|
||||||
|
|
||||||
def indent(string, times=1):
|
def format_as_index(container, indices):
|
||||||
"""
|
|
||||||
A dumb version of `textwrap.indent` from Python 3.3.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return "\n".join(" " * (4 * times) + line for line in string.splitlines())
|
|
||||||
|
|
||||||
|
|
||||||
def format_as_index(indices):
|
|
||||||
"""
|
"""
|
||||||
Construct a single string containing indexing operations for the indices.
|
Construct a single string containing indexing operations for the indices.
|
||||||
|
|
||||||
For example, [1, 2, "foo"] -> [1][2]["foo"]
|
For example for a container ``bar``, [1, 2, "foo"] -> bar[1][2]["foo"]
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
|
|
||||||
|
container (str):
|
||||||
|
|
||||||
|
A word to use for the thing being indexed
|
||||||
|
|
||||||
indices (sequence):
|
indices (sequence):
|
||||||
|
|
||||||
The indices to format.
|
The indices to format.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not indices:
|
if not indices:
|
||||||
return ""
|
return container
|
||||||
return "[%s]" % "][".join(repr(index) for index in indices)
|
return f"{container}[{']['.join(repr(index) for index in indices)}]"
|
||||||
|
|
||||||
|
|
||||||
def find_additional_properties(instance, schema):
|
def find_additional_properties(instance, schema):
|
||||||
@@ -109,48 +112,7 @@ def extras_msg(extras):
|
|||||||
verb = "was"
|
verb = "was"
|
||||||
else:
|
else:
|
||||||
verb = "were"
|
verb = "were"
|
||||||
return ", ".join(repr(extra) for extra in extras), verb
|
return ", ".join(repr(extra) for extra in sorted(extras)), verb
|
||||||
|
|
||||||
|
|
||||||
def types_msg(instance, types):
|
|
||||||
"""
|
|
||||||
Create an error message for a failure to match the given types.
|
|
||||||
|
|
||||||
If the ``instance`` is an object and contains a ``name`` property, it will
|
|
||||||
be considered to be a description of that object and used as its type.
|
|
||||||
|
|
||||||
Otherwise the message is simply the reprs of the given ``types``.
|
|
||||||
"""
|
|
||||||
|
|
||||||
reprs = []
|
|
||||||
for type in types:
|
|
||||||
try:
|
|
||||||
reprs.append(repr(type["name"]))
|
|
||||||
except Exception:
|
|
||||||
reprs.append(repr(type))
|
|
||||||
return "%r is not of type %s" % (instance, ", ".join(reprs))
|
|
||||||
|
|
||||||
|
|
||||||
def flatten(suitable_for_isinstance):
|
|
||||||
"""
|
|
||||||
isinstance() can accept a bunch of really annoying different types:
|
|
||||||
* a single type
|
|
||||||
* a tuple of types
|
|
||||||
* an arbitrary nested tree of tuples
|
|
||||||
|
|
||||||
Return a flattened tuple of the given argument.
|
|
||||||
"""
|
|
||||||
|
|
||||||
types = set()
|
|
||||||
|
|
||||||
if not isinstance(suitable_for_isinstance, tuple):
|
|
||||||
suitable_for_isinstance = (suitable_for_isinstance,)
|
|
||||||
for thing in suitable_for_isinstance:
|
|
||||||
if isinstance(thing, tuple):
|
|
||||||
types.update(flatten(thing))
|
|
||||||
else:
|
|
||||||
types.add(thing)
|
|
||||||
return tuple(types)
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_list(thing):
|
def ensure_list(thing):
|
||||||
@@ -160,15 +122,45 @@ def ensure_list(thing):
|
|||||||
Otherwise, return it unchanged.
|
Otherwise, return it unchanged.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if isinstance(thing, str_types):
|
if isinstance(thing, str):
|
||||||
return [thing]
|
return [thing]
|
||||||
return thing
|
return thing
|
||||||
|
|
||||||
|
|
||||||
|
def _mapping_equal(one, two):
|
||||||
|
"""
|
||||||
|
Check if two mappings are equal using the semantics of `equal`.
|
||||||
|
"""
|
||||||
|
if len(one) != len(two):
|
||||||
|
return False
|
||||||
|
return all(
|
||||||
|
key in two and equal(value, two[key])
|
||||||
|
for key, value in one.items()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _sequence_equal(one, two):
|
||||||
|
"""
|
||||||
|
Check if two sequences are equal using the semantics of `equal`.
|
||||||
|
"""
|
||||||
|
if len(one) != len(two):
|
||||||
|
return False
|
||||||
|
return all(equal(i, j) for i, j in zip(one, two))
|
||||||
|
|
||||||
|
|
||||||
def equal(one, two):
|
def equal(one, two):
|
||||||
"""
|
"""
|
||||||
Check if two things are equal, but evade booleans and ints being equal.
|
Check if two things are equal evading some Python type hierarchy semantics.
|
||||||
|
|
||||||
|
Specifically in JSON Schema, evade `bool` inheriting from `int`,
|
||||||
|
recursing into sequences to do the same.
|
||||||
"""
|
"""
|
||||||
|
if isinstance(one, str) or isinstance(two, str):
|
||||||
|
return one == two
|
||||||
|
if isinstance(one, Sequence) and isinstance(two, Sequence):
|
||||||
|
return _sequence_equal(one, two)
|
||||||
|
if isinstance(one, Mapping) and isinstance(two, Mapping):
|
||||||
|
return _mapping_equal(one, two)
|
||||||
return unbool(one) == unbool(two)
|
return unbool(one) == unbool(two)
|
||||||
|
|
||||||
|
|
||||||
@@ -188,25 +180,170 @@ def uniq(container):
|
|||||||
"""
|
"""
|
||||||
Check if all of a container's elements are unique.
|
Check if all of a container's elements are unique.
|
||||||
|
|
||||||
Successively tries first to rely that the elements are hashable, then
|
Tries to rely on the container being recursively sortable, or otherwise
|
||||||
falls back on them being sortable, and finally falls back on brute
|
falls back on (slow) brute force.
|
||||||
force.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return len(set(unbool(i) for i in container)) == len(container)
|
sort = sorted(unbool(i) for i in container)
|
||||||
except TypeError:
|
sliced = itertools.islice(sort, 1, None)
|
||||||
try:
|
|
||||||
sort = sorted(unbool(i) for i in container)
|
for i, j in zip(sort, sliced):
|
||||||
sliced = itertools.islice(sort, 1, None)
|
if equal(i, j):
|
||||||
for i, j in zip(sort, sliced):
|
return False
|
||||||
if i == j:
|
|
||||||
|
except (NotImplementedError, TypeError):
|
||||||
|
seen = []
|
||||||
|
for e in container:
|
||||||
|
e = unbool(e)
|
||||||
|
|
||||||
|
for i in seen:
|
||||||
|
if equal(i, e):
|
||||||
return False
|
return False
|
||||||
except (NotImplementedError, TypeError):
|
|
||||||
seen = []
|
seen.append(e)
|
||||||
for e in container:
|
|
||||||
e = unbool(e)
|
|
||||||
if e in seen:
|
|
||||||
return False
|
|
||||||
seen.append(e)
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def find_evaluated_item_indexes_by_schema(validator, instance, schema):
|
||||||
|
"""
|
||||||
|
Get all indexes of items that get evaluated under the current schema
|
||||||
|
|
||||||
|
Covers all keywords related to unevaluatedItems: items, prefixItems, if,
|
||||||
|
then, else, contains, unevaluatedItems, allOf, oneOf, anyOf
|
||||||
|
"""
|
||||||
|
if validator.is_type(schema, "boolean"):
|
||||||
|
return []
|
||||||
|
evaluated_indexes = []
|
||||||
|
|
||||||
|
if "items" in schema:
|
||||||
|
return list(range(0, len(instance)))
|
||||||
|
|
||||||
|
if "$ref" in schema:
|
||||||
|
scope, resolved = validator.resolver.resolve(schema["$ref"])
|
||||||
|
validator.resolver.push_scope(scope)
|
||||||
|
|
||||||
|
try:
|
||||||
|
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
||||||
|
validator, instance, resolved,
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
validator.resolver.pop_scope()
|
||||||
|
|
||||||
|
if "prefixItems" in schema:
|
||||||
|
evaluated_indexes += list(range(0, len(schema["prefixItems"])))
|
||||||
|
|
||||||
|
if "if" in schema:
|
||||||
|
if validator.evolve(schema=schema["if"]).is_valid(instance):
|
||||||
|
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
||||||
|
validator, instance, schema["if"],
|
||||||
|
)
|
||||||
|
if "then" in schema:
|
||||||
|
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
||||||
|
validator, instance, schema["then"],
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if "else" in schema:
|
||||||
|
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
||||||
|
validator, instance, schema["else"],
|
||||||
|
)
|
||||||
|
|
||||||
|
for keyword in ["contains", "unevaluatedItems"]:
|
||||||
|
if keyword in schema:
|
||||||
|
for k, v in enumerate(instance):
|
||||||
|
if validator.evolve(schema=schema[keyword]).is_valid(v):
|
||||||
|
evaluated_indexes.append(k)
|
||||||
|
|
||||||
|
for keyword in ["allOf", "oneOf", "anyOf"]:
|
||||||
|
if keyword in schema:
|
||||||
|
for subschema in schema[keyword]:
|
||||||
|
errs = list(validator.descend(instance, subschema))
|
||||||
|
if not errs:
|
||||||
|
evaluated_indexes += find_evaluated_item_indexes_by_schema(
|
||||||
|
validator, instance, subschema,
|
||||||
|
)
|
||||||
|
|
||||||
|
return evaluated_indexes
|
||||||
|
|
||||||
|
|
||||||
|
def find_evaluated_property_keys_by_schema(validator, instance, schema):
|
||||||
|
"""
|
||||||
|
Get all keys of items that get evaluated under the current schema
|
||||||
|
|
||||||
|
Covers all keywords related to unevaluatedProperties: properties,
|
||||||
|
additionalProperties, unevaluatedProperties, patternProperties,
|
||||||
|
dependentSchemas, allOf, oneOf, anyOf, if, then, else
|
||||||
|
"""
|
||||||
|
if validator.is_type(schema, "boolean"):
|
||||||
|
return []
|
||||||
|
evaluated_keys = []
|
||||||
|
|
||||||
|
if "$ref" in schema:
|
||||||
|
scope, resolved = validator.resolver.resolve(schema["$ref"])
|
||||||
|
validator.resolver.push_scope(scope)
|
||||||
|
|
||||||
|
try:
|
||||||
|
evaluated_keys += find_evaluated_property_keys_by_schema(
|
||||||
|
validator, instance, resolved,
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
validator.resolver.pop_scope()
|
||||||
|
|
||||||
|
for keyword in [
|
||||||
|
"properties", "additionalProperties", "unevaluatedProperties",
|
||||||
|
]:
|
||||||
|
if keyword in schema:
|
||||||
|
if validator.is_type(schema[keyword], "boolean"):
|
||||||
|
for property, value in instance.items():
|
||||||
|
if validator.evolve(schema=schema[keyword]).is_valid(
|
||||||
|
{property: value},
|
||||||
|
):
|
||||||
|
evaluated_keys.append(property)
|
||||||
|
|
||||||
|
if validator.is_type(schema[keyword], "object"):
|
||||||
|
for property, subschema in schema[keyword].items():
|
||||||
|
if property in instance and validator.evolve(
|
||||||
|
schema=subschema,
|
||||||
|
).is_valid(instance[property]):
|
||||||
|
evaluated_keys.append(property)
|
||||||
|
|
||||||
|
if "patternProperties" in schema:
|
||||||
|
for property, value in instance.items():
|
||||||
|
for pattern, _ in schema["patternProperties"].items():
|
||||||
|
if re.search(pattern, property) and validator.evolve(
|
||||||
|
schema=schema["patternProperties"],
|
||||||
|
).is_valid({property: value}):
|
||||||
|
evaluated_keys.append(property)
|
||||||
|
|
||||||
|
if "dependentSchemas" in schema:
|
||||||
|
for property, subschema in schema["dependentSchemas"].items():
|
||||||
|
if property not in instance:
|
||||||
|
continue
|
||||||
|
evaluated_keys += find_evaluated_property_keys_by_schema(
|
||||||
|
validator, instance, subschema,
|
||||||
|
)
|
||||||
|
|
||||||
|
for keyword in ["allOf", "oneOf", "anyOf"]:
|
||||||
|
if keyword in schema:
|
||||||
|
for subschema in schema[keyword]:
|
||||||
|
errs = list(validator.descend(instance, subschema))
|
||||||
|
if not errs:
|
||||||
|
evaluated_keys += find_evaluated_property_keys_by_schema(
|
||||||
|
validator, instance, subschema,
|
||||||
|
)
|
||||||
|
|
||||||
|
if "if" in schema:
|
||||||
|
if validator.evolve(schema=schema["if"]).is_valid(instance):
|
||||||
|
evaluated_keys += find_evaluated_property_keys_by_schema(
|
||||||
|
validator, instance, schema["if"],
|
||||||
|
)
|
||||||
|
if "then" in schema:
|
||||||
|
evaluated_keys += find_evaluated_property_keys_by_schema(
|
||||||
|
validator, instance, schema["then"],
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if "else" in schema:
|
||||||
|
evaluated_keys += find_evaluated_property_keys_by_schema(
|
||||||
|
validator, instance, schema["else"],
|
||||||
|
)
|
||||||
|
|
||||||
|
return evaluated_keys
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
from fractions import Fraction
|
||||||
|
from urllib.parse import urldefrag, urljoin
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from jsonschema._utils import (
|
from jsonschema._utils import (
|
||||||
@@ -5,25 +7,24 @@ from jsonschema._utils import (
|
|||||||
equal,
|
equal,
|
||||||
extras_msg,
|
extras_msg,
|
||||||
find_additional_properties,
|
find_additional_properties,
|
||||||
types_msg,
|
find_evaluated_item_indexes_by_schema,
|
||||||
|
find_evaluated_property_keys_by_schema,
|
||||||
unbool,
|
unbool,
|
||||||
uniq,
|
uniq,
|
||||||
)
|
)
|
||||||
from jsonschema.exceptions import FormatError, ValidationError
|
from jsonschema.exceptions import FormatError, ValidationError
|
||||||
from jsonschema.compat import iteritems
|
|
||||||
|
|
||||||
|
|
||||||
def patternProperties(validator, patternProperties, instance, schema):
|
def patternProperties(validator, patternProperties, instance, schema):
|
||||||
if not validator.is_type(instance, "object"):
|
if not validator.is_type(instance, "object"):
|
||||||
return
|
return
|
||||||
|
|
||||||
for pattern, subschema in iteritems(patternProperties):
|
for pattern, subschema in patternProperties.items():
|
||||||
for k, v in iteritems(instance):
|
for k, v in instance.items():
|
||||||
if re.search(pattern, k):
|
if re.search(pattern, k):
|
||||||
for error in validator.descend(
|
yield from validator.descend(
|
||||||
v, subschema, path=k, schema_path=pattern,
|
v, subschema, path=k, schema_path=pattern,
|
||||||
):
|
)
|
||||||
yield error
|
|
||||||
|
|
||||||
|
|
||||||
def propertyNames(validator, propertyNames, instance, schema):
|
def propertyNames(validator, propertyNames, instance, schema):
|
||||||
@@ -31,11 +32,7 @@ def propertyNames(validator, propertyNames, instance, schema):
|
|||||||
return
|
return
|
||||||
|
|
||||||
for property in instance:
|
for property in instance:
|
||||||
for error in validator.descend(
|
yield from validator.descend(instance=property, schema=propertyNames)
|
||||||
instance=property,
|
|
||||||
schema=propertyNames,
|
|
||||||
):
|
|
||||||
yield error
|
|
||||||
|
|
||||||
|
|
||||||
def additionalProperties(validator, aP, instance, schema):
|
def additionalProperties(validator, aP, instance, schema):
|
||||||
@@ -46,20 +43,19 @@ def additionalProperties(validator, aP, instance, schema):
|
|||||||
|
|
||||||
if validator.is_type(aP, "object"):
|
if validator.is_type(aP, "object"):
|
||||||
for extra in extras:
|
for extra in extras:
|
||||||
for error in validator.descend(instance[extra], aP, path=extra):
|
yield from validator.descend(instance[extra], aP, path=extra)
|
||||||
yield error
|
|
||||||
elif not aP and extras:
|
elif not aP and extras:
|
||||||
if "patternProperties" in schema:
|
if "patternProperties" in schema:
|
||||||
patterns = sorted(schema["patternProperties"])
|
|
||||||
if len(extras) == 1:
|
if len(extras) == 1:
|
||||||
verb = "does"
|
verb = "does"
|
||||||
else:
|
else:
|
||||||
verb = "do"
|
verb = "do"
|
||||||
error = "%s %s not match any of the regexes: %s" % (
|
|
||||||
", ".join(map(repr, sorted(extras))),
|
joined = ", ".join(repr(each) for each in sorted(extras))
|
||||||
verb,
|
patterns = ", ".join(
|
||||||
", ".join(map(repr, patterns)),
|
repr(each) for each in sorted(schema["patternProperties"])
|
||||||
)
|
)
|
||||||
|
error = f"{joined} {verb} not match any of the regexes: {patterns}"
|
||||||
yield ValidationError(error)
|
yield ValidationError(error)
|
||||||
else:
|
else:
|
||||||
error = "Additional properties are not allowed (%s %s unexpected)"
|
error = "Additional properties are not allowed (%s %s unexpected)"
|
||||||
@@ -70,51 +66,76 @@ def items(validator, items, instance, schema):
|
|||||||
if not validator.is_type(instance, "array"):
|
if not validator.is_type(instance, "array"):
|
||||||
return
|
return
|
||||||
|
|
||||||
if validator.is_type(items, "array"):
|
prefix = len(schema.get("prefixItems", []))
|
||||||
for (index, item), subschema in zip(enumerate(instance), items):
|
total = len(instance)
|
||||||
for error in validator.descend(
|
if items is False and total > prefix:
|
||||||
item, subschema, path=index, schema_path=index,
|
message = f"Expected at most {prefix} items, but found {total}"
|
||||||
):
|
yield ValidationError(message)
|
||||||
yield error
|
|
||||||
else:
|
else:
|
||||||
for index, item in enumerate(instance):
|
for index in range(prefix, total):
|
||||||
for error in validator.descend(item, items, path=index):
|
yield from validator.descend(
|
||||||
yield error
|
instance=instance[index],
|
||||||
|
schema=items,
|
||||||
|
path=index,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def additionalItems(validator, aI, instance, schema):
|
def additionalItems(validator, aI, instance, schema):
|
||||||
if (
|
if (
|
||||||
not validator.is_type(instance, "array") or
|
not validator.is_type(instance, "array")
|
||||||
validator.is_type(schema.get("items", {}), "object")
|
or validator.is_type(schema.get("items", {}), "object")
|
||||||
):
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
len_items = len(schema.get("items", []))
|
len_items = len(schema.get("items", []))
|
||||||
if validator.is_type(aI, "object"):
|
if validator.is_type(aI, "object"):
|
||||||
for index, item in enumerate(instance[len_items:], start=len_items):
|
for index, item in enumerate(instance[len_items:], start=len_items):
|
||||||
for error in validator.descend(item, aI, path=index):
|
yield from validator.descend(item, aI, path=index)
|
||||||
yield error
|
|
||||||
elif not aI and len(instance) > len(schema.get("items", [])):
|
elif not aI and len(instance) > len(schema.get("items", [])):
|
||||||
error = "Additional items are not allowed (%s %s unexpected)"
|
error = "Additional items are not allowed (%s %s unexpected)"
|
||||||
yield ValidationError(
|
yield ValidationError(
|
||||||
error %
|
error % extras_msg(instance[len(schema.get("items", [])):]),
|
||||||
extras_msg(instance[len(schema.get("items", [])):])
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def const(validator, const, instance, schema):
|
def const(validator, const, instance, schema):
|
||||||
if not equal(instance, const):
|
if not equal(instance, const):
|
||||||
yield ValidationError("%r was expected" % (const,))
|
yield ValidationError(f"{const!r} was expected")
|
||||||
|
|
||||||
|
|
||||||
def contains(validator, contains, instance, schema):
|
def contains(validator, contains, instance, schema):
|
||||||
if not validator.is_type(instance, "array"):
|
if not validator.is_type(instance, "array"):
|
||||||
return
|
return
|
||||||
|
|
||||||
if not any(validator.is_valid(element, contains) for element in instance):
|
matches = 0
|
||||||
yield ValidationError(
|
min_contains = schema.get("minContains", 1)
|
||||||
"None of %r are valid under the given schema" % (instance,)
|
max_contains = schema.get("maxContains", len(instance))
|
||||||
)
|
|
||||||
|
for each in instance:
|
||||||
|
if validator.evolve(schema=contains).is_valid(each):
|
||||||
|
matches += 1
|
||||||
|
if matches > max_contains:
|
||||||
|
yield ValidationError(
|
||||||
|
"Too many items match the given schema "
|
||||||
|
f"(expected at most {max_contains})",
|
||||||
|
validator="maxContains",
|
||||||
|
validator_value=max_contains,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if matches < min_contains:
|
||||||
|
if not matches:
|
||||||
|
yield ValidationError(
|
||||||
|
f"{instance!r} does not contain items "
|
||||||
|
"matching the given schema",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
yield ValidationError(
|
||||||
|
"Too few items match the given schema (expected at least "
|
||||||
|
f"{min_contains} but only {matches} matched)",
|
||||||
|
validator="minContains",
|
||||||
|
validator_value=min_contains,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def exclusiveMinimum(validator, minimum, instance, schema):
|
def exclusiveMinimum(validator, minimum, instance, schema):
|
||||||
@@ -123,9 +144,8 @@ def exclusiveMinimum(validator, minimum, instance, schema):
|
|||||||
|
|
||||||
if instance <= minimum:
|
if instance <= minimum:
|
||||||
yield ValidationError(
|
yield ValidationError(
|
||||||
"%r is less than or equal to the minimum of %r" % (
|
f"{instance!r} is less than or equal to "
|
||||||
instance, minimum,
|
f"the minimum of {minimum!r}",
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -135,9 +155,8 @@ def exclusiveMaximum(validator, maximum, instance, schema):
|
|||||||
|
|
||||||
if instance >= maximum:
|
if instance >= maximum:
|
||||||
yield ValidationError(
|
yield ValidationError(
|
||||||
"%r is greater than or equal to the maximum of %r" % (
|
f"{instance!r} is greater than or equal "
|
||||||
instance, maximum,
|
f"to the maximum of {maximum!r}",
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -146,9 +165,8 @@ def minimum(validator, minimum, instance, schema):
|
|||||||
return
|
return
|
||||||
|
|
||||||
if instance < minimum:
|
if instance < minimum:
|
||||||
yield ValidationError(
|
message = f"{instance!r} is less than the minimum of {minimum!r}"
|
||||||
"%r is less than the minimum of %r" % (instance, minimum)
|
yield ValidationError(message)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def maximum(validator, maximum, instance, schema):
|
def maximum(validator, maximum, instance, schema):
|
||||||
@@ -156,9 +174,8 @@ def maximum(validator, maximum, instance, schema):
|
|||||||
return
|
return
|
||||||
|
|
||||||
if instance > maximum:
|
if instance > maximum:
|
||||||
yield ValidationError(
|
message = f"{instance!r} is greater than the maximum of {maximum!r}"
|
||||||
"%r is greater than the maximum of %r" % (instance, maximum)
|
yield ValidationError(message)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def multipleOf(validator, dB, instance, schema):
|
def multipleOf(validator, dB, instance, schema):
|
||||||
@@ -167,39 +184,52 @@ def multipleOf(validator, dB, instance, schema):
|
|||||||
|
|
||||||
if isinstance(dB, float):
|
if isinstance(dB, float):
|
||||||
quotient = instance / dB
|
quotient = instance / dB
|
||||||
failed = int(quotient) != quotient
|
try:
|
||||||
|
failed = int(quotient) != quotient
|
||||||
|
except OverflowError:
|
||||||
|
# When `instance` is large and `dB` is less than one,
|
||||||
|
# quotient can overflow to infinity; and then casting to int
|
||||||
|
# raises an error.
|
||||||
|
#
|
||||||
|
# In this case we fall back to Fraction logic, which is
|
||||||
|
# exact and cannot overflow. The performance is also
|
||||||
|
# acceptable: we try the fast all-float option first, and
|
||||||
|
# we know that fraction(dB) can have at most a few hundred
|
||||||
|
# digits in each part. The worst-case slowdown is therefore
|
||||||
|
# for already-slow enormous integers or Decimals.
|
||||||
|
failed = (Fraction(instance) / Fraction(dB)).denominator != 1
|
||||||
else:
|
else:
|
||||||
failed = instance % dB
|
failed = instance % dB
|
||||||
|
|
||||||
if failed:
|
if failed:
|
||||||
yield ValidationError("%r is not a multiple of %r" % (instance, dB))
|
yield ValidationError(f"{instance!r} is not a multiple of {dB}")
|
||||||
|
|
||||||
|
|
||||||
def minItems(validator, mI, instance, schema):
|
def minItems(validator, mI, instance, schema):
|
||||||
if validator.is_type(instance, "array") and len(instance) < mI:
|
if validator.is_type(instance, "array") and len(instance) < mI:
|
||||||
yield ValidationError("%r is too short" % (instance,))
|
yield ValidationError(f"{instance!r} is too short")
|
||||||
|
|
||||||
|
|
||||||
def maxItems(validator, mI, instance, schema):
|
def maxItems(validator, mI, instance, schema):
|
||||||
if validator.is_type(instance, "array") and len(instance) > mI:
|
if validator.is_type(instance, "array") and len(instance) > mI:
|
||||||
yield ValidationError("%r is too long" % (instance,))
|
yield ValidationError(f"{instance!r} is too long")
|
||||||
|
|
||||||
|
|
||||||
def uniqueItems(validator, uI, instance, schema):
|
def uniqueItems(validator, uI, instance, schema):
|
||||||
if (
|
if (
|
||||||
uI and
|
uI
|
||||||
validator.is_type(instance, "array") and
|
and validator.is_type(instance, "array")
|
||||||
not uniq(instance)
|
and not uniq(instance)
|
||||||
):
|
):
|
||||||
yield ValidationError("%r has non-unique elements" % (instance,))
|
yield ValidationError(f"{instance!r} has non-unique elements")
|
||||||
|
|
||||||
|
|
||||||
def pattern(validator, patrn, instance, schema):
|
def pattern(validator, patrn, instance, schema):
|
||||||
if (
|
if (
|
||||||
validator.is_type(instance, "string") and
|
validator.is_type(instance, "string")
|
||||||
not re.search(patrn, instance)
|
and not re.search(patrn, instance)
|
||||||
):
|
):
|
||||||
yield ValidationError("%r does not match %r" % (instance, patrn))
|
yield ValidationError(f"{instance!r} does not match {patrn!r}")
|
||||||
|
|
||||||
|
|
||||||
def format(validator, format, instance, schema):
|
def format(validator, format, instance, schema):
|
||||||
@@ -212,80 +242,99 @@ def format(validator, format, instance, schema):
|
|||||||
|
|
||||||
def minLength(validator, mL, instance, schema):
|
def minLength(validator, mL, instance, schema):
|
||||||
if validator.is_type(instance, "string") and len(instance) < mL:
|
if validator.is_type(instance, "string") and len(instance) < mL:
|
||||||
yield ValidationError("%r is too short" % (instance,))
|
yield ValidationError(f"{instance!r} is too short")
|
||||||
|
|
||||||
|
|
||||||
def maxLength(validator, mL, instance, schema):
|
def maxLength(validator, mL, instance, schema):
|
||||||
if validator.is_type(instance, "string") and len(instance) > mL:
|
if validator.is_type(instance, "string") and len(instance) > mL:
|
||||||
yield ValidationError("%r is too long" % (instance,))
|
yield ValidationError(f"{instance!r} is too long")
|
||||||
|
|
||||||
|
|
||||||
def dependencies(validator, dependencies, instance, schema):
|
def dependentRequired(validator, dependentRequired, instance, schema):
|
||||||
if not validator.is_type(instance, "object"):
|
if not validator.is_type(instance, "object"):
|
||||||
return
|
return
|
||||||
|
|
||||||
for property, dependency in iteritems(dependencies):
|
for property, dependency in dependentRequired.items():
|
||||||
if property not in instance:
|
if property not in instance:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if validator.is_type(dependency, "array"):
|
for each in dependency:
|
||||||
for each in dependency:
|
if each not in instance:
|
||||||
if each not in instance:
|
message = f"{each!r} is a dependency of {property!r}"
|
||||||
message = "%r is a dependency of %r"
|
yield ValidationError(message)
|
||||||
yield ValidationError(message % (each, property))
|
|
||||||
else:
|
|
||||||
for error in validator.descend(
|
def dependentSchemas(validator, dependentSchemas, instance, schema):
|
||||||
instance, dependency, schema_path=property,
|
if not validator.is_type(instance, "object"):
|
||||||
):
|
return
|
||||||
yield error
|
|
||||||
|
for property, dependency in dependentSchemas.items():
|
||||||
|
if property not in instance:
|
||||||
|
continue
|
||||||
|
yield from validator.descend(
|
||||||
|
instance, dependency, schema_path=property,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def enum(validator, enums, instance, schema):
|
def enum(validator, enums, instance, schema):
|
||||||
if instance == 0 or instance == 1:
|
if instance == 0 or instance == 1:
|
||||||
unbooled = unbool(instance)
|
unbooled = unbool(instance)
|
||||||
if all(unbooled != unbool(each) for each in enums):
|
if all(unbooled != unbool(each) for each in enums):
|
||||||
yield ValidationError("%r is not one of %r" % (instance, enums))
|
yield ValidationError(f"{instance!r} is not one of {enums!r}")
|
||||||
elif instance not in enums:
|
elif instance not in enums:
|
||||||
yield ValidationError("%r is not one of %r" % (instance, enums))
|
yield ValidationError(f"{instance!r} is not one of {enums!r}")
|
||||||
|
|
||||||
|
|
||||||
def ref(validator, ref, instance, schema):
|
def ref(validator, ref, instance, schema):
|
||||||
resolve = getattr(validator.resolver, "resolve", None)
|
resolve = getattr(validator.resolver, "resolve", None)
|
||||||
if resolve is None:
|
if resolve is None:
|
||||||
with validator.resolver.resolving(ref) as resolved:
|
with validator.resolver.resolving(ref) as resolved:
|
||||||
for error in validator.descend(instance, resolved):
|
yield from validator.descend(instance, resolved)
|
||||||
yield error
|
|
||||||
else:
|
else:
|
||||||
scope, resolved = validator.resolver.resolve(ref)
|
scope, resolved = validator.resolver.resolve(ref)
|
||||||
validator.resolver.push_scope(scope)
|
validator.resolver.push_scope(scope)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for error in validator.descend(instance, resolved):
|
yield from validator.descend(instance, resolved)
|
||||||
yield error
|
|
||||||
finally:
|
finally:
|
||||||
validator.resolver.pop_scope()
|
validator.resolver.pop_scope()
|
||||||
|
|
||||||
|
|
||||||
|
def dynamicRef(validator, dynamicRef, instance, schema):
|
||||||
|
_, fragment = urldefrag(dynamicRef)
|
||||||
|
|
||||||
|
for url in validator.resolver._scopes_stack:
|
||||||
|
lookup_url = urljoin(url, dynamicRef)
|
||||||
|
with validator.resolver.resolving(lookup_url) as subschema:
|
||||||
|
if ("$dynamicAnchor" in subschema
|
||||||
|
and fragment == subschema["$dynamicAnchor"]):
|
||||||
|
yield from validator.descend(instance, subschema)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
with validator.resolver.resolving(dynamicRef) as subschema:
|
||||||
|
yield from validator.descend(instance, subschema)
|
||||||
|
|
||||||
|
|
||||||
def type(validator, types, instance, schema):
|
def type(validator, types, instance, schema):
|
||||||
types = ensure_list(types)
|
types = ensure_list(types)
|
||||||
|
|
||||||
if not any(validator.is_type(instance, type) for type in types):
|
if not any(validator.is_type(instance, type) for type in types):
|
||||||
yield ValidationError(types_msg(instance, types))
|
reprs = ", ".join(repr(type) for type in types)
|
||||||
|
yield ValidationError(f"{instance!r} is not of type {reprs}")
|
||||||
|
|
||||||
|
|
||||||
def properties(validator, properties, instance, schema):
|
def properties(validator, properties, instance, schema):
|
||||||
if not validator.is_type(instance, "object"):
|
if not validator.is_type(instance, "object"):
|
||||||
return
|
return
|
||||||
|
|
||||||
for property, subschema in iteritems(properties):
|
for property, subschema in properties.items():
|
||||||
if property in instance:
|
if property in instance:
|
||||||
for error in validator.descend(
|
yield from validator.descend(
|
||||||
instance[property],
|
instance[property],
|
||||||
subschema,
|
subschema,
|
||||||
path=property,
|
path=property,
|
||||||
schema_path=property,
|
schema_path=property,
|
||||||
):
|
)
|
||||||
yield error
|
|
||||||
|
|
||||||
|
|
||||||
def required(validator, required, instance, schema):
|
def required(validator, required, instance, schema):
|
||||||
@@ -293,27 +342,24 @@ def required(validator, required, instance, schema):
|
|||||||
return
|
return
|
||||||
for property in required:
|
for property in required:
|
||||||
if property not in instance:
|
if property not in instance:
|
||||||
yield ValidationError("%r is a required property" % property)
|
yield ValidationError(f"{property!r} is a required property")
|
||||||
|
|
||||||
|
|
||||||
def minProperties(validator, mP, instance, schema):
|
def minProperties(validator, mP, instance, schema):
|
||||||
if validator.is_type(instance, "object") and len(instance) < mP:
|
if validator.is_type(instance, "object") and len(instance) < mP:
|
||||||
yield ValidationError(
|
yield ValidationError(f"{instance!r} does not have enough properties")
|
||||||
"%r does not have enough properties" % (instance,)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def maxProperties(validator, mP, instance, schema):
|
def maxProperties(validator, mP, instance, schema):
|
||||||
if not validator.is_type(instance, "object"):
|
if not validator.is_type(instance, "object"):
|
||||||
return
|
return
|
||||||
if validator.is_type(instance, "object") and len(instance) > mP:
|
if validator.is_type(instance, "object") and len(instance) > mP:
|
||||||
yield ValidationError("%r has too many properties" % (instance,))
|
yield ValidationError(f"{instance!r} has too many properties")
|
||||||
|
|
||||||
|
|
||||||
def allOf(validator, allOf, instance, schema):
|
def allOf(validator, allOf, instance, schema):
|
||||||
for index, subschema in enumerate(allOf):
|
for index, subschema in enumerate(allOf):
|
||||||
for error in validator.descend(instance, subschema, schema_path=index):
|
yield from validator.descend(instance, subschema, schema_path=index)
|
||||||
yield error
|
|
||||||
|
|
||||||
|
|
||||||
def anyOf(validator, anyOf, instance, schema):
|
def anyOf(validator, anyOf, instance, schema):
|
||||||
@@ -325,7 +371,7 @@ def anyOf(validator, anyOf, instance, schema):
|
|||||||
all_errors.extend(errs)
|
all_errors.extend(errs)
|
||||||
else:
|
else:
|
||||||
yield ValidationError(
|
yield ValidationError(
|
||||||
"%r is not valid under any of the given schemas" % (instance,),
|
f"{instance!r} is not valid under any of the given schemas",
|
||||||
context=all_errors,
|
context=all_errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -341,33 +387,81 @@ def oneOf(validator, oneOf, instance, schema):
|
|||||||
all_errors.extend(errs)
|
all_errors.extend(errs)
|
||||||
else:
|
else:
|
||||||
yield ValidationError(
|
yield ValidationError(
|
||||||
"%r is not valid under any of the given schemas" % (instance,),
|
f"{instance!r} is not valid under any of the given schemas",
|
||||||
context=all_errors,
|
context=all_errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
more_valid = [s for i, s in subschemas if validator.is_valid(instance, s)]
|
more_valid = [
|
||||||
|
each for _, each in subschemas
|
||||||
|
if validator.evolve(schema=each).is_valid(instance)
|
||||||
|
]
|
||||||
if more_valid:
|
if more_valid:
|
||||||
more_valid.append(first_valid)
|
more_valid.append(first_valid)
|
||||||
reprs = ", ".join(repr(schema) for schema in more_valid)
|
reprs = ", ".join(repr(schema) for schema in more_valid)
|
||||||
yield ValidationError(
|
yield ValidationError(f"{instance!r} is valid under each of {reprs}")
|
||||||
"%r is valid under each of %s" % (instance, reprs)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def not_(validator, not_schema, instance, schema):
|
def not_(validator, not_schema, instance, schema):
|
||||||
if validator.is_valid(instance, not_schema):
|
if validator.evolve(schema=not_schema).is_valid(instance):
|
||||||
yield ValidationError(
|
message = f"{instance!r} should not be valid under {not_schema!r}"
|
||||||
"%r is not allowed for %r" % (not_schema, instance)
|
yield ValidationError(message)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def if_(validator, if_schema, instance, schema):
|
def if_(validator, if_schema, instance, schema):
|
||||||
if validator.is_valid(instance, if_schema):
|
if validator.evolve(schema=if_schema).is_valid(instance):
|
||||||
if u"then" in schema:
|
if "then" in schema:
|
||||||
then = schema[u"then"]
|
then = schema["then"]
|
||||||
for error in validator.descend(instance, then, schema_path="then"):
|
yield from validator.descend(instance, then, schema_path="then")
|
||||||
yield error
|
elif "else" in schema:
|
||||||
elif u"else" in schema:
|
else_ = schema["else"]
|
||||||
else_ = schema[u"else"]
|
yield from validator.descend(instance, else_, schema_path="else")
|
||||||
for error in validator.descend(instance, else_, schema_path="else"):
|
|
||||||
yield error
|
|
||||||
|
def unevaluatedItems(validator, unevaluatedItems, instance, schema):
|
||||||
|
if not validator.is_type(instance, "array"):
|
||||||
|
return
|
||||||
|
evaluated_item_indexes = find_evaluated_item_indexes_by_schema(
|
||||||
|
validator, instance, schema,
|
||||||
|
)
|
||||||
|
unevaluated_items = [
|
||||||
|
item for index, item in enumerate(instance)
|
||||||
|
if index not in evaluated_item_indexes
|
||||||
|
]
|
||||||
|
if unevaluated_items:
|
||||||
|
error = "Unevaluated items are not allowed (%s %s unexpected)"
|
||||||
|
yield ValidationError(error % extras_msg(unevaluated_items))
|
||||||
|
|
||||||
|
|
||||||
|
def unevaluatedProperties(validator, unevaluatedProperties, instance, schema):
|
||||||
|
if not validator.is_type(instance, "object"):
|
||||||
|
return
|
||||||
|
evaluated_property_keys = find_evaluated_property_keys_by_schema(
|
||||||
|
validator, instance, schema,
|
||||||
|
)
|
||||||
|
unevaluated_property_keys = []
|
||||||
|
for property in instance:
|
||||||
|
if property not in evaluated_property_keys:
|
||||||
|
for _ in validator.descend(
|
||||||
|
instance[property],
|
||||||
|
unevaluatedProperties,
|
||||||
|
path=property,
|
||||||
|
schema_path=property,
|
||||||
|
):
|
||||||
|
unevaluated_property_keys.append(property)
|
||||||
|
|
||||||
|
if unevaluated_property_keys:
|
||||||
|
error = "Unevaluated properties are not allowed (%s %s unexpected)"
|
||||||
|
yield ValidationError(error % extras_msg(unevaluated_property_keys))
|
||||||
|
|
||||||
|
|
||||||
|
def prefixItems(validator, prefixItems, instance, schema):
|
||||||
|
if not validator.is_type(instance, "array"):
|
||||||
|
return
|
||||||
|
|
||||||
|
for (index, item), subschema in zip(enumerate(instance), prefixItems):
|
||||||
|
yield from validator.descend(
|
||||||
|
instance=item,
|
||||||
|
schema=subschema,
|
||||||
|
schema_path=index,
|
||||||
|
path=index,
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,19 +1,18 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
"""
|
"""
|
||||||
A performance benchmark using the example from issue #232.
|
A performance benchmark using the example from issue #232.
|
||||||
|
|
||||||
See https://github.com/Julian/jsonschema/pull/232.
|
See https://github.com/python-jsonschema/jsonschema/pull/232.
|
||||||
"""
|
"""
|
||||||
from twisted.python.filepath import FilePath
|
from pathlib import Path
|
||||||
|
|
||||||
from pyperf import Runner
|
from pyperf import Runner
|
||||||
from pyrsistent import m
|
from pyrsistent import m
|
||||||
|
|
||||||
from jsonschema.tests._suite import Version
|
from jsonschema.tests._suite import Version
|
||||||
import jsonschema
|
import jsonschema
|
||||||
|
|
||||||
|
|
||||||
issue232 = Version(
|
issue232 = Version(
|
||||||
path=FilePath(__file__).sibling("issue232"),
|
path=Path(__file__).parent / "issue232",
|
||||||
remotes=m(),
|
remotes=m(),
|
||||||
name="issue232",
|
name="issue232",
|
||||||
)
|
)
|
||||||
|
|||||||
2653
third_party/python/jsonschema/jsonschema/benchmarks/issue232/issue.json
vendored
Normal file
2653
third_party/python/jsonschema/jsonschema/benchmarks/issue232/issue.json
vendored
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,3 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
"""
|
"""
|
||||||
A performance benchmark using the official test suite.
|
A performance benchmark using the official test suite.
|
||||||
|
|
||||||
@@ -9,6 +8,5 @@ from pyperf import Runner
|
|||||||
|
|
||||||
from jsonschema.tests._suite import Suite
|
from jsonschema.tests._suite import Suite
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
Suite().benchmark(runner=Runner())
|
Suite().benchmark(runner=Runner())
|
||||||
|
|||||||
284
third_party/python/jsonschema/jsonschema/cli.py
vendored
284
third_party/python/jsonschema/jsonschema/cli.py
vendored
@@ -1,25 +1,143 @@
|
|||||||
"""
|
"""
|
||||||
The ``jsonschema`` command line.
|
The ``jsonschema`` command line.
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import
|
|
||||||
|
from json import JSONDecodeError
|
||||||
|
from textwrap import dedent
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
|
import traceback
|
||||||
|
|
||||||
from jsonschema import __version__
|
try:
|
||||||
from jsonschema._reflect import namedAny
|
from importlib import metadata
|
||||||
from jsonschema.validators import validator_for
|
except ImportError:
|
||||||
|
import importlib_metadata as metadata # type: ignore
|
||||||
|
|
||||||
|
try:
|
||||||
|
from pkgutil import resolve_name
|
||||||
|
except ImportError:
|
||||||
|
from pkgutil_resolve_name import resolve_name # type: ignore
|
||||||
|
|
||||||
|
import attr
|
||||||
|
|
||||||
|
from jsonschema.exceptions import SchemaError
|
||||||
|
from jsonschema.validators import RefResolver, validator_for
|
||||||
|
|
||||||
|
|
||||||
def _namedAnyWithDefault(name):
|
class _CannotLoadFile(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s
|
||||||
|
class _Outputter(object):
|
||||||
|
|
||||||
|
_formatter = attr.ib()
|
||||||
|
_stdout = attr.ib()
|
||||||
|
_stderr = attr.ib()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_arguments(cls, arguments, stdout, stderr):
|
||||||
|
if arguments["output"] == "plain":
|
||||||
|
formatter = _PlainFormatter(arguments["error_format"])
|
||||||
|
elif arguments["output"] == "pretty":
|
||||||
|
formatter = _PrettyFormatter()
|
||||||
|
return cls(formatter=formatter, stdout=stdout, stderr=stderr)
|
||||||
|
|
||||||
|
def load(self, path):
|
||||||
|
try:
|
||||||
|
file = open(path)
|
||||||
|
except FileNotFoundError:
|
||||||
|
self.filenotfound_error(path=path, exc_info=sys.exc_info())
|
||||||
|
raise _CannotLoadFile()
|
||||||
|
|
||||||
|
with file:
|
||||||
|
try:
|
||||||
|
return json.load(file)
|
||||||
|
except JSONDecodeError:
|
||||||
|
self.parsing_error(path=path, exc_info=sys.exc_info())
|
||||||
|
raise _CannotLoadFile()
|
||||||
|
|
||||||
|
def filenotfound_error(self, **kwargs):
|
||||||
|
self._stderr.write(self._formatter.filenotfound_error(**kwargs))
|
||||||
|
|
||||||
|
def parsing_error(self, **kwargs):
|
||||||
|
self._stderr.write(self._formatter.parsing_error(**kwargs))
|
||||||
|
|
||||||
|
def validation_error(self, **kwargs):
|
||||||
|
self._stderr.write(self._formatter.validation_error(**kwargs))
|
||||||
|
|
||||||
|
def validation_success(self, **kwargs):
|
||||||
|
self._stdout.write(self._formatter.validation_success(**kwargs))
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s
|
||||||
|
class _PrettyFormatter(object):
|
||||||
|
|
||||||
|
_ERROR_MSG = dedent(
|
||||||
|
"""\
|
||||||
|
===[{type}]===({path})===
|
||||||
|
|
||||||
|
{body}
|
||||||
|
-----------------------------
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
_SUCCESS_MSG = "===[SUCCESS]===({path})===\n"
|
||||||
|
|
||||||
|
def filenotfound_error(self, path, exc_info):
|
||||||
|
return self._ERROR_MSG.format(
|
||||||
|
path=path,
|
||||||
|
type="FileNotFoundError",
|
||||||
|
body="{!r} does not exist.".format(path),
|
||||||
|
)
|
||||||
|
|
||||||
|
def parsing_error(self, path, exc_info):
|
||||||
|
exc_type, exc_value, exc_traceback = exc_info
|
||||||
|
exc_lines = "".join(
|
||||||
|
traceback.format_exception(exc_type, exc_value, exc_traceback),
|
||||||
|
)
|
||||||
|
return self._ERROR_MSG.format(
|
||||||
|
path=path,
|
||||||
|
type=exc_type.__name__,
|
||||||
|
body=exc_lines,
|
||||||
|
)
|
||||||
|
|
||||||
|
def validation_error(self, instance_path, error):
|
||||||
|
return self._ERROR_MSG.format(
|
||||||
|
path=instance_path,
|
||||||
|
type=error.__class__.__name__,
|
||||||
|
body=error,
|
||||||
|
)
|
||||||
|
|
||||||
|
def validation_success(self, instance_path):
|
||||||
|
return self._SUCCESS_MSG.format(path=instance_path)
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s
|
||||||
|
class _PlainFormatter(object):
|
||||||
|
|
||||||
|
_error_format = attr.ib()
|
||||||
|
|
||||||
|
def filenotfound_error(self, path, exc_info):
|
||||||
|
return "{!r} does not exist.\n".format(path)
|
||||||
|
|
||||||
|
def parsing_error(self, path, exc_info):
|
||||||
|
return "Failed to parse {}: {}\n".format(
|
||||||
|
"<stdin>" if path == "<stdin>" else repr(path),
|
||||||
|
exc_info[1],
|
||||||
|
)
|
||||||
|
|
||||||
|
def validation_error(self, instance_path, error):
|
||||||
|
return self._error_format.format(file_name=instance_path, error=error)
|
||||||
|
|
||||||
|
def validation_success(self, instance_path):
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_name_with_default(name):
|
||||||
if "." not in name:
|
if "." not in name:
|
||||||
name = "jsonschema." + name
|
name = "jsonschema." + name
|
||||||
return namedAny(name)
|
return resolve_name(name)
|
||||||
|
|
||||||
|
|
||||||
def _json_file(path):
|
|
||||||
with open(path) as file:
|
|
||||||
return json.load(file)
|
|
||||||
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
@@ -29,62 +147,142 @@ parser.add_argument(
|
|||||||
"-i", "--instance",
|
"-i", "--instance",
|
||||||
action="append",
|
action="append",
|
||||||
dest="instances",
|
dest="instances",
|
||||||
type=_json_file,
|
help="""
|
||||||
help=(
|
a path to a JSON instance (i.e. filename.json) to validate (may
|
||||||
"a path to a JSON instance (i.e. filename.json) "
|
be specified multiple times). If no instances are provided via this
|
||||||
"to validate (may be specified multiple times)"
|
option, one will be expected on standard input.
|
||||||
),
|
""",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-F", "--error-format",
|
"-F", "--error-format",
|
||||||
default="{error.instance}: {error.message}\n",
|
help="""
|
||||||
help=(
|
the format to use for each validation error message, specified
|
||||||
"the format to use for each error output message, specified in "
|
in a form suitable for str.format. This string will be passed
|
||||||
"a form suitable for passing to str.format, which will be called "
|
one formatted object named 'error' for each ValidationError.
|
||||||
"with 'error' for each error"
|
Only provide this option when using --output=plain, which is the
|
||||||
),
|
default. If this argument is unprovided and --output=plain is
|
||||||
|
used, a simple default representation will be used.
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-o", "--output",
|
||||||
|
choices=["plain", "pretty"],
|
||||||
|
default="plain",
|
||||||
|
help="""
|
||||||
|
an output format to use. 'plain' (default) will produce minimal
|
||||||
|
text with one line for each error, while 'pretty' will produce
|
||||||
|
more detailed human-readable output on multiple lines.
|
||||||
|
""",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-V", "--validator",
|
"-V", "--validator",
|
||||||
type=_namedAnyWithDefault,
|
type=_resolve_name_with_default,
|
||||||
help=(
|
help="""
|
||||||
"the fully qualified object name of a validator to use, or, for "
|
the fully qualified object name of a validator to use, or, for
|
||||||
"validators that are registered with jsonschema, simply the name "
|
validators that are registered with jsonschema, simply the name
|
||||||
"of the class."
|
of the class.
|
||||||
),
|
""",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--base-uri",
|
||||||
|
help="""
|
||||||
|
a base URI to assign to the provided schema, even if it does not
|
||||||
|
declare one (via e.g. $id). This option can be used if you wish to
|
||||||
|
resolve relative references to a particular URI (or local path)
|
||||||
|
""",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--version",
|
"--version",
|
||||||
action="version",
|
action="version",
|
||||||
version=__version__,
|
version=metadata.version("jsonschema"),
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"schema",
|
"schema",
|
||||||
help="the JSON Schema to validate with (i.e. schema.json)",
|
help="the path to a JSON Schema to validate with (i.e. schema.json)",
|
||||||
type=_json_file,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def parse_args(args):
|
def parse_args(args):
|
||||||
arguments = vars(parser.parse_args(args=args or ["--help"]))
|
arguments = vars(parser.parse_args(args=args or ["--help"]))
|
||||||
if arguments["validator"] is None:
|
if arguments["output"] != "plain" and arguments["error_format"]:
|
||||||
arguments["validator"] = validator_for(arguments["schema"])
|
raise parser.error(
|
||||||
|
"--error-format can only be used with --output plain",
|
||||||
|
)
|
||||||
|
if arguments["output"] == "plain" and arguments["error_format"] is None:
|
||||||
|
arguments["error_format"] = "{error.instance}: {error.message}\n"
|
||||||
return arguments
|
return arguments
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_instance(instance_path, instance, validator, outputter):
|
||||||
|
invalid = False
|
||||||
|
for error in validator.iter_errors(instance):
|
||||||
|
invalid = True
|
||||||
|
outputter.validation_error(instance_path=instance_path, error=error)
|
||||||
|
|
||||||
|
if not invalid:
|
||||||
|
outputter.validation_success(instance_path=instance_path)
|
||||||
|
return invalid
|
||||||
|
|
||||||
|
|
||||||
def main(args=sys.argv[1:]):
|
def main(args=sys.argv[1:]):
|
||||||
sys.exit(run(arguments=parse_args(args=args)))
|
sys.exit(run(arguments=parse_args(args=args)))
|
||||||
|
|
||||||
|
|
||||||
def run(arguments, stdout=sys.stdout, stderr=sys.stderr):
|
def run(arguments, stdout=sys.stdout, stderr=sys.stderr, stdin=sys.stdin):
|
||||||
error_format = arguments["error_format"]
|
outputter = _Outputter.from_arguments(
|
||||||
validator = arguments["validator"](schema=arguments["schema"])
|
arguments=arguments,
|
||||||
|
stdout=stdout,
|
||||||
|
stderr=stderr,
|
||||||
|
)
|
||||||
|
|
||||||
validator.check_schema(arguments["schema"])
|
try:
|
||||||
|
schema = outputter.load(arguments["schema"])
|
||||||
|
except _CannotLoadFile:
|
||||||
|
return 1
|
||||||
|
|
||||||
errored = False
|
if arguments["validator"] is None:
|
||||||
for instance in arguments["instances"] or ():
|
arguments["validator"] = validator_for(schema)
|
||||||
for error in validator.iter_errors(instance):
|
|
||||||
stderr.write(error_format.format(error=error))
|
try:
|
||||||
errored = True
|
arguments["validator"].check_schema(schema)
|
||||||
return errored
|
except SchemaError as error:
|
||||||
|
outputter.validation_error(
|
||||||
|
instance_path=arguments["schema"],
|
||||||
|
error=error,
|
||||||
|
)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
if arguments["instances"]:
|
||||||
|
load, instances = outputter.load, arguments["instances"]
|
||||||
|
else:
|
||||||
|
def load(_):
|
||||||
|
try:
|
||||||
|
return json.load(stdin)
|
||||||
|
except JSONDecodeError:
|
||||||
|
outputter.parsing_error(
|
||||||
|
path="<stdin>", exc_info=sys.exc_info(),
|
||||||
|
)
|
||||||
|
raise _CannotLoadFile()
|
||||||
|
instances = ["<stdin>"]
|
||||||
|
|
||||||
|
resolver = RefResolver(
|
||||||
|
base_uri=arguments["base_uri"],
|
||||||
|
referrer=schema,
|
||||||
|
) if arguments["base_uri"] is not None else None
|
||||||
|
|
||||||
|
validator = arguments["validator"](schema, resolver=resolver)
|
||||||
|
exit_code = 0
|
||||||
|
for each in instances:
|
||||||
|
try:
|
||||||
|
instance = load(each)
|
||||||
|
except _CannotLoadFile:
|
||||||
|
exit_code = 1
|
||||||
|
else:
|
||||||
|
exit_code |= _validate_instance(
|
||||||
|
instance_path=each,
|
||||||
|
instance=instance,
|
||||||
|
validator=validator,
|
||||||
|
outputter=outputter,
|
||||||
|
)
|
||||||
|
|
||||||
|
return exit_code
|
||||||
|
|||||||
@@ -1,55 +0,0 @@
|
|||||||
"""
|
|
||||||
Python 2/3 compatibility helpers.
|
|
||||||
|
|
||||||
Note: This module is *not* public API.
|
|
||||||
"""
|
|
||||||
import contextlib
|
|
||||||
import operator
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
from collections.abc import MutableMapping, Sequence # noqa
|
|
||||||
except ImportError:
|
|
||||||
from collections import MutableMapping, Sequence # noqa
|
|
||||||
|
|
||||||
PY3 = sys.version_info[0] >= 3
|
|
||||||
|
|
||||||
if PY3:
|
|
||||||
zip = zip
|
|
||||||
from functools import lru_cache
|
|
||||||
from io import StringIO as NativeIO
|
|
||||||
from urllib.parse import (
|
|
||||||
unquote, urljoin, urlunsplit, SplitResult, urlsplit
|
|
||||||
)
|
|
||||||
from urllib.request import pathname2url, urlopen
|
|
||||||
str_types = str,
|
|
||||||
int_types = int,
|
|
||||||
iteritems = operator.methodcaller("items")
|
|
||||||
else:
|
|
||||||
from itertools import izip as zip # noqa
|
|
||||||
from io import BytesIO as NativeIO
|
|
||||||
from urlparse import urljoin, urlunsplit, SplitResult, urlsplit
|
|
||||||
from urllib import pathname2url, unquote # noqa
|
|
||||||
import urllib2 # noqa
|
|
||||||
def urlopen(*args, **kwargs):
|
|
||||||
return contextlib.closing(urllib2.urlopen(*args, **kwargs))
|
|
||||||
|
|
||||||
str_types = basestring
|
|
||||||
int_types = int, long
|
|
||||||
iteritems = operator.methodcaller("iteritems")
|
|
||||||
|
|
||||||
from functools32 import lru_cache
|
|
||||||
|
|
||||||
|
|
||||||
def urldefrag(url):
|
|
||||||
if "#" in url:
|
|
||||||
s, n, p, q, frag = urlsplit(url)
|
|
||||||
defrag = urlunsplit((s, n, p, q, ""))
|
|
||||||
else:
|
|
||||||
defrag = url
|
|
||||||
frag = ""
|
|
||||||
return defrag, frag
|
|
||||||
|
|
||||||
|
|
||||||
# flake8: noqa
|
|
||||||
@@ -1,19 +1,20 @@
|
|||||||
"""
|
"""
|
||||||
Validation errors, and some surrounding helpers.
|
Validation errors, and some surrounding helpers.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from collections import defaultdict, deque
|
from collections import defaultdict, deque
|
||||||
|
from pprint import pformat
|
||||||
|
from textwrap import dedent, indent
|
||||||
|
import heapq
|
||||||
import itertools
|
import itertools
|
||||||
import pprint
|
|
||||||
import textwrap
|
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
|
|
||||||
from jsonschema import _utils
|
from jsonschema import _utils
|
||||||
from jsonschema.compat import PY3, iteritems
|
|
||||||
|
|
||||||
|
WEAK_MATCHES: frozenset[str] = frozenset(["anyOf", "oneOf"])
|
||||||
WEAK_MATCHES = frozenset(["anyOf", "oneOf"])
|
STRONG_MATCHES: frozenset[str] = frozenset()
|
||||||
STRONG_MATCHES = frozenset()
|
|
||||||
|
|
||||||
_unset = _utils.Unset()
|
_unset = _utils.Unset()
|
||||||
|
|
||||||
@@ -31,6 +32,7 @@ class _Error(Exception):
|
|||||||
schema=_unset,
|
schema=_unset,
|
||||||
schema_path=(),
|
schema_path=(),
|
||||||
parent=None,
|
parent=None,
|
||||||
|
type_checker=_unset,
|
||||||
):
|
):
|
||||||
super(_Error, self).__init__(
|
super(_Error, self).__init__(
|
||||||
message,
|
message,
|
||||||
@@ -54,45 +56,42 @@ class _Error(Exception):
|
|||||||
self.instance = instance
|
self.instance = instance
|
||||||
self.schema = schema
|
self.schema = schema
|
||||||
self.parent = parent
|
self.parent = parent
|
||||||
|
self._type_checker = type_checker
|
||||||
|
|
||||||
for error in context:
|
for error in context:
|
||||||
error.parent = self
|
error.parent = self
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<%s: %r>" % (self.__class__.__name__, self.message)
|
return f"<{self.__class__.__name__}: {self.message!r}>"
|
||||||
|
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
essential_for_verbose = (
|
essential_for_verbose = (
|
||||||
self.validator, self.validator_value, self.instance, self.schema,
|
self.validator, self.validator_value, self.instance, self.schema,
|
||||||
)
|
)
|
||||||
if any(m is _unset for m in essential_for_verbose):
|
if any(m is _unset for m in essential_for_verbose):
|
||||||
return self.message
|
return self.message
|
||||||
|
|
||||||
pschema = pprint.pformat(self.schema, width=72)
|
schema_path = _utils.format_as_index(
|
||||||
pinstance = pprint.pformat(self.instance, width=72)
|
container=self._word_for_schema_in_error_message,
|
||||||
return self.message + textwrap.dedent("""
|
indices=list(self.relative_schema_path)[:-1],
|
||||||
|
|
||||||
Failed validating %r in %s%s:
|
|
||||||
%s
|
|
||||||
|
|
||||||
On %s%s:
|
|
||||||
%s
|
|
||||||
""".rstrip()
|
|
||||||
) % (
|
|
||||||
self.validator,
|
|
||||||
self._word_for_schema_in_error_message,
|
|
||||||
_utils.format_as_index(list(self.relative_schema_path)[:-1]),
|
|
||||||
_utils.indent(pschema),
|
|
||||||
self._word_for_instance_in_error_message,
|
|
||||||
_utils.format_as_index(self.relative_path),
|
|
||||||
_utils.indent(pinstance),
|
|
||||||
)
|
)
|
||||||
|
instance_path = _utils.format_as_index(
|
||||||
|
container=self._word_for_instance_in_error_message,
|
||||||
|
indices=self.relative_path,
|
||||||
|
)
|
||||||
|
prefix = 16 * " "
|
||||||
|
|
||||||
if PY3:
|
return dedent(
|
||||||
__str__ = __unicode__
|
f"""\
|
||||||
else:
|
{self.message}
|
||||||
def __str__(self):
|
|
||||||
return unicode(self).encode("utf-8")
|
Failed validating {self.validator!r} in {schema_path}:
|
||||||
|
{indent(pformat(self.schema, width=72), prefix).lstrip()}
|
||||||
|
|
||||||
|
On {instance_path}:
|
||||||
|
{indent(pformat(self.instance, width=72), prefix).lstrip()}
|
||||||
|
""".rstrip(),
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_from(cls, other):
|
def create_from(cls, other):
|
||||||
@@ -118,8 +117,21 @@ class _Error(Exception):
|
|||||||
path.extendleft(reversed(parent.absolute_schema_path))
|
path.extendleft(reversed(parent.absolute_schema_path))
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def _set(self, **kwargs):
|
@property
|
||||||
for k, v in iteritems(kwargs):
|
def json_path(self):
|
||||||
|
path = "$"
|
||||||
|
for elem in self.absolute_path:
|
||||||
|
if isinstance(elem, int):
|
||||||
|
path += "[" + str(elem) + "]"
|
||||||
|
else:
|
||||||
|
path += "." + elem
|
||||||
|
return path
|
||||||
|
|
||||||
|
def _set(self, type_checker=None, **kwargs):
|
||||||
|
if type_checker is not None and self._type_checker is _unset:
|
||||||
|
self._type_checker = type_checker
|
||||||
|
|
||||||
|
for k, v in kwargs.items():
|
||||||
if getattr(self, k) is _unset:
|
if getattr(self, k) is _unset:
|
||||||
setattr(self, k, v)
|
setattr(self, k, v)
|
||||||
|
|
||||||
@@ -130,6 +142,20 @@ class _Error(Exception):
|
|||||||
)
|
)
|
||||||
return dict((attr, getattr(self, attr)) for attr in attrs)
|
return dict((attr, getattr(self, attr)) for attr in attrs)
|
||||||
|
|
||||||
|
def _matches_type(self):
|
||||||
|
try:
|
||||||
|
expected = self.schema["type"]
|
||||||
|
except (KeyError, TypeError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if isinstance(expected, str):
|
||||||
|
return self._type_checker.is_type(self.instance, expected)
|
||||||
|
|
||||||
|
return any(
|
||||||
|
self._type_checker.is_type(self.instance, expected_type)
|
||||||
|
for expected_type in expected
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ValidationError(_Error):
|
class ValidationError(_Error):
|
||||||
"""
|
"""
|
||||||
@@ -169,14 +195,8 @@ class UndefinedTypeCheck(Exception):
|
|||||||
def __init__(self, type):
|
def __init__(self, type):
|
||||||
self.type = type
|
self.type = type
|
||||||
|
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
return "Type %r is unknown to this type checker" % self.type
|
return f"Type {self.type!r} is unknown to this type checker"
|
||||||
|
|
||||||
if PY3:
|
|
||||||
__str__ = __unicode__
|
|
||||||
else:
|
|
||||||
def __str__(self):
|
|
||||||
return unicode(self).encode("utf-8")
|
|
||||||
|
|
||||||
|
|
||||||
class UnknownType(Exception):
|
class UnknownType(Exception):
|
||||||
@@ -189,23 +209,18 @@ class UnknownType(Exception):
|
|||||||
self.instance = instance
|
self.instance = instance
|
||||||
self.schema = schema
|
self.schema = schema
|
||||||
|
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
pschema = pprint.pformat(self.schema, width=72)
|
prefix = 16 * " "
|
||||||
pinstance = pprint.pformat(self.instance, width=72)
|
|
||||||
return textwrap.dedent("""
|
return dedent(
|
||||||
Unknown type %r for validator with schema:
|
f"""\
|
||||||
%s
|
Unknown type {self.type!r} for validator with schema:
|
||||||
|
{indent(pformat(self.schema, width=72), prefix).lstrip()}
|
||||||
|
|
||||||
While checking instance:
|
While checking instance:
|
||||||
%s
|
{indent(pformat(self.instance, width=72), prefix).lstrip()}
|
||||||
""".rstrip()
|
""".rstrip(),
|
||||||
) % (self.type, _utils.indent(pschema), _utils.indent(pinstance))
|
)
|
||||||
|
|
||||||
if PY3:
|
|
||||||
__str__ = __unicode__
|
|
||||||
else:
|
|
||||||
def __str__(self):
|
|
||||||
return unicode(self).encode("utf-8")
|
|
||||||
|
|
||||||
|
|
||||||
class FormatError(Exception):
|
class FormatError(Exception):
|
||||||
@@ -218,15 +233,9 @@ class FormatError(Exception):
|
|||||||
self.message = message
|
self.message = message
|
||||||
self.cause = self.__cause__ = cause
|
self.cause = self.__cause__ = cause
|
||||||
|
|
||||||
def __unicode__(self):
|
def __str__(self):
|
||||||
return self.message
|
return self.message
|
||||||
|
|
||||||
if PY3:
|
|
||||||
__str__ = __unicode__
|
|
||||||
else:
|
|
||||||
def __str__(self):
|
|
||||||
return self.message.encode("utf-8")
|
|
||||||
|
|
||||||
|
|
||||||
class ErrorTree(object):
|
class ErrorTree(object):
|
||||||
"""
|
"""
|
||||||
@@ -258,10 +267,10 @@ class ErrorTree(object):
|
|||||||
"""
|
"""
|
||||||
Retrieve the child tree one level down at the given ``index``.
|
Retrieve the child tree one level down at the given ``index``.
|
||||||
|
|
||||||
If the index is not in the instance that this tree corresponds to and
|
If the index is not in the instance that this tree corresponds
|
||||||
is not known by this tree, whatever error would be raised by
|
to and is not known by this tree, whatever error would be raised
|
||||||
``instance.__getitem__`` will be propagated (usually this is some
|
by ``instance.__getitem__`` will be propagated (usually this is
|
||||||
subclass of `exceptions.LookupError`.
|
some subclass of `LookupError`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if self._instance is not _unset and index not in self:
|
if self._instance is not _unset and index not in self:
|
||||||
@@ -288,7 +297,7 @@ class ErrorTree(object):
|
|||||||
return self.total_errors
|
return self.total_errors
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<%s (%s total errors)>" % (self.__class__.__name__, len(self))
|
return f"<{self.__class__.__name__} ({len(self)} total errors)>"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def total_errors(self):
|
def total_errors(self):
|
||||||
@@ -296,7 +305,7 @@ class ErrorTree(object):
|
|||||||
The total number of errors in the entire tree, including children.
|
The total number of errors in the entire tree, including children.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
child_errors = sum(len(tree) for _, tree in iteritems(self._contents))
|
child_errors = sum(len(tree) for _, tree in self._contents.items())
|
||||||
return len(self.errors) + child_errors
|
return len(self.errors) + child_errors
|
||||||
|
|
||||||
|
|
||||||
@@ -306,19 +315,25 @@ def by_relevance(weak=WEAK_MATCHES, strong=STRONG_MATCHES):
|
|||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
weak (set):
|
weak (set):
|
||||||
a collection of validator names to consider to be "weak".
|
a collection of validation keywords to consider to be
|
||||||
If there are two errors at the same level of the instance
|
"weak". If there are two errors at the same level of the
|
||||||
and one is in the set of weak validator names, the other
|
instance and one is in the set of weak validation keywords,
|
||||||
error will take priority. By default, :validator:`anyOf` and
|
the other error will take priority. By default, :kw:`anyOf`
|
||||||
:validator:`oneOf` are considered weak validators and will
|
and :kw:`oneOf` are considered weak keywords and will be
|
||||||
be superseded by other same-level validation errors.
|
superseded by other same-level validation errors.
|
||||||
|
|
||||||
strong (set):
|
strong (set):
|
||||||
a collection of validator names to consider to be "strong"
|
a collection of validation keywords to consider to be
|
||||||
|
"strong"
|
||||||
"""
|
"""
|
||||||
def relevance(error):
|
def relevance(error):
|
||||||
validator = error.validator
|
validator = error.validator
|
||||||
return -len(error.path), validator not in weak, validator in strong
|
return (
|
||||||
|
-len(error.path),
|
||||||
|
validator not in weak,
|
||||||
|
validator in strong,
|
||||||
|
not error._matches_type(),
|
||||||
|
)
|
||||||
return relevance
|
return relevance
|
||||||
|
|
||||||
|
|
||||||
@@ -333,20 +348,20 @@ def best_match(errors, key=relevance):
|
|||||||
`ValidationError.path` is shorter) are considered better matches,
|
`ValidationError.path` is shorter) are considered better matches,
|
||||||
since they indicate "more" is wrong with the instance.
|
since they indicate "more" is wrong with the instance.
|
||||||
|
|
||||||
If the resulting match is either :validator:`oneOf` or :validator:`anyOf`,
|
If the resulting match is either :kw:`oneOf` or :kw:`anyOf`, the
|
||||||
the *opposite* assumption is made -- i.e. the deepest error is picked,
|
*opposite* assumption is made -- i.e. the deepest error is picked,
|
||||||
since these validators only need to match once, and any other errors may
|
since these keywords only need to match once, and any other errors
|
||||||
not be relevant.
|
may not be relevant.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
errors (collections.Iterable):
|
errors (collections.abc.Iterable):
|
||||||
|
|
||||||
the errors to select from. Do not provide a mixture of
|
the errors to select from. Do not provide a mixture of
|
||||||
errors from different validation attempts (i.e. from
|
errors from different validation attempts (i.e. from
|
||||||
different instances or schemas), since it won't produce
|
different instances or schemas), since it won't produce
|
||||||
sensical output.
|
sensical output.
|
||||||
|
|
||||||
key (collections.Callable):
|
key (collections.abc.Callable):
|
||||||
|
|
||||||
the key to use when sorting errors. See `relevance` and
|
the key to use when sorting errors. See `relevance` and
|
||||||
transitively `by_relevance` for more details (the default is
|
transitively `by_relevance` for more details (the default is
|
||||||
@@ -370,5 +385,10 @@ def best_match(errors, key=relevance):
|
|||||||
best = max(itertools.chain([best], errors), key=key)
|
best = max(itertools.chain([best], errors), key=key)
|
||||||
|
|
||||||
while best.context:
|
while best.context:
|
||||||
best = min(best.context, key=key)
|
# Calculate the minimum via nsmallest, because we don't recurse if
|
||||||
|
# all nested errors have the same relevance (i.e. if min == max == all)
|
||||||
|
smallest = heapq.nsmallest(2, best.context, key=key)
|
||||||
|
if len(smallest) == 2 and key(smallest[0]) == key(smallest[1]):
|
||||||
|
return best
|
||||||
|
best = smallest[0]
|
||||||
return best
|
return best
|
||||||
|
|||||||
181
third_party/python/jsonschema/jsonschema/protocols.py
vendored
Normal file
181
third_party/python/jsonschema/jsonschema/protocols.py
vendored
Normal file
@@ -0,0 +1,181 @@
|
|||||||
|
"""
|
||||||
|
typing.Protocol classes for jsonschema interfaces.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# for reference material on Protocols, see
|
||||||
|
# https://www.python.org/dev/peps/pep-0544/
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING, Any, ClassVar, Iterator
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# doing these imports with `try ... except ImportError` doesn't pass mypy
|
||||||
|
# checking because mypy sees `typing._SpecialForm` and
|
||||||
|
# `typing_extensions._SpecialForm` as incompatible
|
||||||
|
#
|
||||||
|
# see:
|
||||||
|
# https://mypy.readthedocs.io/en/stable/runtime_troubles.html#using-new-additions-to-the-typing-module
|
||||||
|
# https://github.com/python/mypy/issues/4427
|
||||||
|
if sys.version_info >= (3, 8):
|
||||||
|
from typing import Protocol, runtime_checkable
|
||||||
|
else:
|
||||||
|
from typing_extensions import Protocol, runtime_checkable
|
||||||
|
|
||||||
|
# in order for Sphinx to resolve references accurately from type annotations,
|
||||||
|
# it needs to see names like `jsonschema.TypeChecker`
|
||||||
|
# therefore, only import at type-checking time (to avoid circular references),
|
||||||
|
# but use `jsonschema` for any types which will otherwise not be resolvable
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
import jsonschema
|
||||||
|
|
||||||
|
from jsonschema.exceptions import ValidationError
|
||||||
|
from jsonschema.validators import RefResolver
|
||||||
|
|
||||||
|
# For code authors working on the validator protocol, these are the three
|
||||||
|
# use-cases which should be kept in mind:
|
||||||
|
#
|
||||||
|
# 1. As a protocol class, it can be used in type annotations to describe the
|
||||||
|
# available methods and attributes of a validator
|
||||||
|
# 2. It is the source of autodoc for the validator documentation
|
||||||
|
# 3. It is runtime_checkable, meaning that it can be used in isinstance()
|
||||||
|
# checks.
|
||||||
|
#
|
||||||
|
# Since protocols are not base classes, isinstance() checking is limited in
|
||||||
|
# its capabilities. See docs on runtime_checkable for detail
|
||||||
|
|
||||||
|
|
||||||
|
@runtime_checkable
|
||||||
|
class Validator(Protocol):
|
||||||
|
"""
|
||||||
|
The protocol to which all validator classes should adhere.
|
||||||
|
|
||||||
|
:argument schema: the schema that the validator object
|
||||||
|
will validate with. It is assumed to be valid, and providing
|
||||||
|
an invalid schema can lead to undefined behavior. See
|
||||||
|
`Validator.check_schema` to validate a schema first.
|
||||||
|
:argument resolver: an instance of `jsonschema.RefResolver` that will be
|
||||||
|
used to resolve :kw:`$ref` properties (JSON references). If
|
||||||
|
unprovided, one will be created.
|
||||||
|
:argument format_checker: an instance of `jsonschema.FormatChecker`
|
||||||
|
whose `jsonschema.FormatChecker.conforms` method will be called to
|
||||||
|
check and see if instances conform to each :kw:`format`
|
||||||
|
property present in the schema. If unprovided, no validation
|
||||||
|
will be done for :kw:`format`. Certain formats require
|
||||||
|
additional packages to be installed (ipv5, uri, color, date-time).
|
||||||
|
The required packages can be found at the bottom of this page.
|
||||||
|
"""
|
||||||
|
|
||||||
|
#: An object representing the validator's meta schema (the schema that
|
||||||
|
#: describes valid schemas in the given version).
|
||||||
|
META_SCHEMA: ClassVar[dict]
|
||||||
|
|
||||||
|
#: A mapping of validation keywords (`str`\s) to functions that
|
||||||
|
#: validate the keyword with that name. For more information see
|
||||||
|
#: `creating-validators`.
|
||||||
|
VALIDATORS: ClassVar[dict]
|
||||||
|
|
||||||
|
#: A `jsonschema.TypeChecker` that will be used when validating
|
||||||
|
#: :kw:`type` keywords in JSON schemas.
|
||||||
|
TYPE_CHECKER: ClassVar[jsonschema.TypeChecker]
|
||||||
|
|
||||||
|
#: A `jsonschema.FormatChecker` that will be used when validating
|
||||||
|
#: :kw:`format` properties in JSON schemas.
|
||||||
|
FORMAT_CHECKER: ClassVar[jsonschema.FormatChecker]
|
||||||
|
|
||||||
|
#: The schema that was passed in when initializing the object.
|
||||||
|
schema: dict | bool
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
schema: dict | bool,
|
||||||
|
resolver: RefResolver | None = None,
|
||||||
|
format_checker: jsonschema.FormatChecker | None = None,
|
||||||
|
) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def check_schema(cls, schema: dict) -> None:
|
||||||
|
"""
|
||||||
|
Validate the given schema against the validator's `META_SCHEMA`.
|
||||||
|
|
||||||
|
:raises: `jsonschema.exceptions.SchemaError` if the schema
|
||||||
|
is invalid
|
||||||
|
"""
|
||||||
|
|
||||||
|
def is_type(self, instance: Any, type: str) -> bool:
|
||||||
|
"""
|
||||||
|
Check if the instance is of the given (JSON Schema) type.
|
||||||
|
|
||||||
|
:type type: str
|
||||||
|
:rtype: bool
|
||||||
|
:raises: `jsonschema.exceptions.UnknownType` if ``type``
|
||||||
|
is not a known type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def is_valid(self, instance: dict) -> bool:
|
||||||
|
"""
|
||||||
|
Check if the instance is valid under the current `schema`.
|
||||||
|
|
||||||
|
:rtype: bool
|
||||||
|
|
||||||
|
>>> schema = {"maxItems" : 2}
|
||||||
|
>>> Draft202012Validator(schema).is_valid([2, 3, 4])
|
||||||
|
False
|
||||||
|
"""
|
||||||
|
|
||||||
|
def iter_errors(self, instance: dict) -> Iterator[ValidationError]:
|
||||||
|
r"""
|
||||||
|
Lazily yield each of the validation errors in the given instance.
|
||||||
|
|
||||||
|
:rtype: an `collections.abc.Iterable` of
|
||||||
|
`jsonschema.exceptions.ValidationError`\s
|
||||||
|
|
||||||
|
>>> schema = {
|
||||||
|
... "type" : "array",
|
||||||
|
... "items" : {"enum" : [1, 2, 3]},
|
||||||
|
... "maxItems" : 2,
|
||||||
|
... }
|
||||||
|
>>> v = Draft202012Validator(schema)
|
||||||
|
>>> for error in sorted(v.iter_errors([2, 3, 4]), key=str):
|
||||||
|
... print(error.message)
|
||||||
|
4 is not one of [1, 2, 3]
|
||||||
|
[2, 3, 4] is too long
|
||||||
|
"""
|
||||||
|
|
||||||
|
def validate(self, instance: dict) -> None:
|
||||||
|
"""
|
||||||
|
Check if the instance is valid under the current `schema`.
|
||||||
|
|
||||||
|
:raises: `jsonschema.exceptions.ValidationError` if the
|
||||||
|
instance is invalid
|
||||||
|
|
||||||
|
>>> schema = {"maxItems" : 2}
|
||||||
|
>>> Draft202012Validator(schema).validate([2, 3, 4])
|
||||||
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
ValidationError: [2, 3, 4] is too long
|
||||||
|
"""
|
||||||
|
|
||||||
|
def evolve(self, **kwargs) -> "Validator":
|
||||||
|
"""
|
||||||
|
Create a new validator like this one, but with given changes.
|
||||||
|
|
||||||
|
Preserves all other attributes, so can be used to e.g. create a
|
||||||
|
validator with a different schema but with the same :kw:`$ref`
|
||||||
|
resolution behavior.
|
||||||
|
|
||||||
|
>>> validator = Draft202012Validator({})
|
||||||
|
>>> validator.evolve(schema={"type": "number"})
|
||||||
|
Draft202012Validator(schema={'type': 'number'}, format_checker=None)
|
||||||
|
|
||||||
|
The returned object satisfies the validator protocol, but may not
|
||||||
|
be of the same concrete class! In particular this occurs
|
||||||
|
when a :kw:`$ref` occurs to a schema with a different
|
||||||
|
:kw:`$schema` than this one (i.e. for a different draft).
|
||||||
|
|
||||||
|
>>> validator.evolve(
|
||||||
|
... schema={"$schema": Draft7Validator.META_SCHEMA["$id"]}
|
||||||
|
... )
|
||||||
|
Draft7Validator(schema=..., format_checker=None)
|
||||||
|
"""
|
||||||
42
third_party/python/jsonschema/jsonschema/schemas/draft2019-09.json
vendored
Normal file
42
third_party/python/jsonschema/jsonschema/schemas/draft2019-09.json
vendored
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||||
|
"$id": "https://json-schema.org/draft/2019-09/schema",
|
||||||
|
"$vocabulary": {
|
||||||
|
"https://json-schema.org/draft/2019-09/vocab/core": true,
|
||||||
|
"https://json-schema.org/draft/2019-09/vocab/applicator": true,
|
||||||
|
"https://json-schema.org/draft/2019-09/vocab/validation": true,
|
||||||
|
"https://json-schema.org/draft/2019-09/vocab/meta-data": true,
|
||||||
|
"https://json-schema.org/draft/2019-09/vocab/format": false,
|
||||||
|
"https://json-schema.org/draft/2019-09/vocab/content": true
|
||||||
|
},
|
||||||
|
"$recursiveAnchor": true,
|
||||||
|
|
||||||
|
"title": "Core and Validation specifications meta-schema",
|
||||||
|
"allOf": [
|
||||||
|
{"$ref": "meta/core"},
|
||||||
|
{"$ref": "meta/applicator"},
|
||||||
|
{"$ref": "meta/validation"},
|
||||||
|
{"$ref": "meta/meta-data"},
|
||||||
|
{"$ref": "meta/format"},
|
||||||
|
{"$ref": "meta/content"}
|
||||||
|
],
|
||||||
|
"type": ["object", "boolean"],
|
||||||
|
"properties": {
|
||||||
|
"definitions": {
|
||||||
|
"$comment": "While no longer an official keyword as it is replaced by $defs, this keyword is retained in the meta-schema to prevent incompatible extensions as it remains in common use.",
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": { "$recursiveRef": "#" },
|
||||||
|
"default": {}
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"$comment": "\"dependencies\" is no longer a keyword, but schema authors should avoid redefining it to facilitate a smooth transition to \"dependentSchemas\" and \"dependentRequired\"",
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"anyOf": [
|
||||||
|
{ "$recursiveRef": "#" },
|
||||||
|
{ "$ref": "meta/validation#/$defs/stringArray" }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
58
third_party/python/jsonschema/jsonschema/schemas/draft2020-12.json
vendored
Normal file
58
third_party/python/jsonschema/jsonschema/schemas/draft2020-12.json
vendored
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||||
|
"$id": "https://json-schema.org/draft/2020-12/schema",
|
||||||
|
"$vocabulary": {
|
||||||
|
"https://json-schema.org/draft/2020-12/vocab/core": true,
|
||||||
|
"https://json-schema.org/draft/2020-12/vocab/applicator": true,
|
||||||
|
"https://json-schema.org/draft/2020-12/vocab/unevaluated": true,
|
||||||
|
"https://json-schema.org/draft/2020-12/vocab/validation": true,
|
||||||
|
"https://json-schema.org/draft/2020-12/vocab/meta-data": true,
|
||||||
|
"https://json-schema.org/draft/2020-12/vocab/format-annotation": true,
|
||||||
|
"https://json-schema.org/draft/2020-12/vocab/content": true
|
||||||
|
},
|
||||||
|
"$dynamicAnchor": "meta",
|
||||||
|
|
||||||
|
"title": "Core and Validation specifications meta-schema",
|
||||||
|
"allOf": [
|
||||||
|
{"$ref": "meta/core"},
|
||||||
|
{"$ref": "meta/applicator"},
|
||||||
|
{"$ref": "meta/unevaluated"},
|
||||||
|
{"$ref": "meta/validation"},
|
||||||
|
{"$ref": "meta/meta-data"},
|
||||||
|
{"$ref": "meta/format-annotation"},
|
||||||
|
{"$ref": "meta/content"}
|
||||||
|
],
|
||||||
|
"type": ["object", "boolean"],
|
||||||
|
"$comment": "This meta-schema also defines keywords that have appeared in previous drafts in order to prevent incompatible extensions as they remain in common use.",
|
||||||
|
"properties": {
|
||||||
|
"definitions": {
|
||||||
|
"$comment": "\"definitions\" has been replaced by \"$defs\".",
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": { "$dynamicRef": "#meta" },
|
||||||
|
"deprecated": true,
|
||||||
|
"default": {}
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"$comment": "\"dependencies\" has been split and replaced by \"dependentSchemas\" and \"dependentRequired\" in order to serve their differing semantics.",
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"anyOf": [
|
||||||
|
{ "$dynamicRef": "#meta" },
|
||||||
|
{ "$ref": "meta/validation#/$defs/stringArray" }
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"deprecated": true,
|
||||||
|
"default": {}
|
||||||
|
},
|
||||||
|
"$recursiveAnchor": {
|
||||||
|
"$comment": "\"$recursiveAnchor\" has been replaced by \"$dynamicAnchor\".",
|
||||||
|
"$ref": "meta/core#/$defs/anchorString",
|
||||||
|
"deprecated": true
|
||||||
|
},
|
||||||
|
"$recursiveRef": {
|
||||||
|
"$comment": "\"$recursiveRef\" has been replaced by \"$dynamicRef\".",
|
||||||
|
"$ref": "meta/core#/$defs/uriReferenceString",
|
||||||
|
"deprecated": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,199 +1,177 @@
|
|||||||
{
|
{
|
||||||
"$schema": "http://json-schema.org/draft-03/schema#",
|
"$schema" : "http://json-schema.org/draft-03/schema#",
|
||||||
"dependencies": {
|
"id" : "http://json-schema.org/draft-03/schema#",
|
||||||
"exclusiveMaximum": "maximum",
|
"type" : "object",
|
||||||
"exclusiveMinimum": "minimum"
|
|
||||||
},
|
"properties" : {
|
||||||
"id": "http://json-schema.org/draft-03/schema#",
|
"type" : {
|
||||||
"properties": {
|
"type" : ["string", "array"],
|
||||||
"$ref": {
|
"items" : {
|
||||||
"format": "uri",
|
"type" : ["string", {"$ref" : "#"}]
|
||||||
"type": "string"
|
},
|
||||||
},
|
"uniqueItems" : true,
|
||||||
"$schema": {
|
"default" : "any"
|
||||||
"format": "uri",
|
},
|
||||||
"type": "string"
|
|
||||||
},
|
"properties" : {
|
||||||
"additionalItems": {
|
"type" : "object",
|
||||||
"default": {},
|
"additionalProperties" : {"$ref" : "#", "type" : "object"},
|
||||||
"type": [
|
"default" : {}
|
||||||
{
|
},
|
||||||
"$ref": "#"
|
|
||||||
},
|
"patternProperties" : {
|
||||||
"boolean"
|
"type" : "object",
|
||||||
]
|
"additionalProperties" : {"$ref" : "#"},
|
||||||
},
|
"default" : {}
|
||||||
"additionalProperties": {
|
},
|
||||||
"default": {},
|
|
||||||
"type": [
|
"additionalProperties" : {
|
||||||
{
|
"type" : [{"$ref" : "#"}, "boolean"],
|
||||||
"$ref": "#"
|
"default" : {}
|
||||||
},
|
},
|
||||||
"boolean"
|
|
||||||
]
|
"items" : {
|
||||||
},
|
"type" : [{"$ref" : "#"}, "array"],
|
||||||
"default": {
|
"items" : {"$ref" : "#"},
|
||||||
"type": "any"
|
"default" : {}
|
||||||
},
|
},
|
||||||
"dependencies": {
|
|
||||||
"additionalProperties": {
|
"additionalItems" : {
|
||||||
"items": {
|
"type" : [{"$ref" : "#"}, "boolean"],
|
||||||
"type": "string"
|
"default" : {}
|
||||||
},
|
},
|
||||||
"type": [
|
|
||||||
"string",
|
"required" : {
|
||||||
"array",
|
"type" : "boolean",
|
||||||
{
|
"default" : false
|
||||||
"$ref": "#"
|
},
|
||||||
}
|
|
||||||
]
|
"dependencies" : {
|
||||||
},
|
"type" : ["string", "array", "object"],
|
||||||
"default": {},
|
"additionalProperties" : {
|
||||||
"type": [
|
"type" : ["string", "array", {"$ref" : "#"}],
|
||||||
"string",
|
"items" : {
|
||||||
"array",
|
"type" : "string"
|
||||||
"object"
|
}
|
||||||
]
|
},
|
||||||
},
|
"default" : {}
|
||||||
"description": {
|
},
|
||||||
"type": "string"
|
|
||||||
},
|
"minimum" : {
|
||||||
"disallow": {
|
"type" : "number"
|
||||||
"items": {
|
},
|
||||||
"type": [
|
|
||||||
"string",
|
"maximum" : {
|
||||||
{
|
"type" : "number"
|
||||||
"$ref": "#"
|
},
|
||||||
}
|
|
||||||
]
|
"exclusiveMinimum" : {
|
||||||
},
|
"type" : "boolean",
|
||||||
"type": [
|
"default" : false
|
||||||
"string",
|
},
|
||||||
"array"
|
|
||||||
],
|
"exclusiveMaximum" : {
|
||||||
"uniqueItems": true
|
"type" : "boolean",
|
||||||
},
|
"default" : false
|
||||||
"divisibleBy": {
|
},
|
||||||
"default": 1,
|
|
||||||
"exclusiveMinimum": true,
|
"maxDecimal": {
|
||||||
"minimum": 0,
|
"minimum": 0,
|
||||||
"type": "number"
|
"type": "number"
|
||||||
},
|
},
|
||||||
"enum": {
|
|
||||||
"type": "array"
|
"minItems" : {
|
||||||
},
|
"type" : "integer",
|
||||||
"exclusiveMaximum": {
|
"minimum" : 0,
|
||||||
"default": false,
|
"default" : 0
|
||||||
"type": "boolean"
|
},
|
||||||
},
|
|
||||||
"exclusiveMinimum": {
|
"maxItems" : {
|
||||||
"default": false,
|
"type" : "integer",
|
||||||
"type": "boolean"
|
"minimum" : 0
|
||||||
},
|
},
|
||||||
"extends": {
|
|
||||||
"default": {},
|
"uniqueItems" : {
|
||||||
"items": {
|
"type" : "boolean",
|
||||||
"$ref": "#"
|
"default" : false
|
||||||
},
|
},
|
||||||
"type": [
|
|
||||||
{
|
"pattern" : {
|
||||||
"$ref": "#"
|
"type" : "string",
|
||||||
},
|
"format" : "regex"
|
||||||
"array"
|
},
|
||||||
]
|
|
||||||
},
|
"minLength" : {
|
||||||
"format": {
|
"type" : "integer",
|
||||||
"type": "string"
|
"minimum" : 0,
|
||||||
},
|
"default" : 0
|
||||||
"id": {
|
},
|
||||||
"format": "uri",
|
|
||||||
"type": "string"
|
"maxLength" : {
|
||||||
},
|
"type" : "integer"
|
||||||
"items": {
|
},
|
||||||
"default": {},
|
|
||||||
"items": {
|
"enum" : {
|
||||||
"$ref": "#"
|
"type" : "array"
|
||||||
},
|
},
|
||||||
"type": [
|
|
||||||
{
|
"default" : {
|
||||||
"$ref": "#"
|
"type" : "any"
|
||||||
},
|
},
|
||||||
"array"
|
|
||||||
]
|
"title" : {
|
||||||
},
|
"type" : "string"
|
||||||
"maxDecimal": {
|
},
|
||||||
"minimum": 0,
|
|
||||||
"type": "number"
|
"description" : {
|
||||||
},
|
"type" : "string"
|
||||||
"maxItems": {
|
},
|
||||||
"minimum": 0,
|
|
||||||
"type": "integer"
|
"format" : {
|
||||||
},
|
"type" : "string"
|
||||||
"maxLength": {
|
},
|
||||||
"type": "integer"
|
|
||||||
},
|
"divisibleBy" : {
|
||||||
"maximum": {
|
"type" : "number",
|
||||||
"type": "number"
|
"minimum" : 0,
|
||||||
},
|
"exclusiveMinimum" : true,
|
||||||
"minItems": {
|
"default" : 1
|
||||||
"default": 0,
|
},
|
||||||
"minimum": 0,
|
|
||||||
"type": "integer"
|
"disallow" : {
|
||||||
},
|
"type" : ["string", "array"],
|
||||||
"minLength": {
|
"items" : {
|
||||||
"default": 0,
|
"type" : ["string", {"$ref" : "#"}]
|
||||||
"minimum": 0,
|
},
|
||||||
"type": "integer"
|
"uniqueItems" : true
|
||||||
},
|
},
|
||||||
"minimum": {
|
|
||||||
"type": "number"
|
"extends" : {
|
||||||
},
|
"type" : [{"$ref" : "#"}, "array"],
|
||||||
"pattern": {
|
"items" : {"$ref" : "#"},
|
||||||
"format": "regex",
|
"default" : {}
|
||||||
"type": "string"
|
},
|
||||||
},
|
|
||||||
"patternProperties": {
|
"id" : {
|
||||||
"additionalProperties": {
|
"type" : "string",
|
||||||
"$ref": "#"
|
"format" : "uri"
|
||||||
},
|
},
|
||||||
"default": {},
|
|
||||||
"type": "object"
|
"$ref" : {
|
||||||
},
|
"type" : "string",
|
||||||
"properties": {
|
"format" : "uri"
|
||||||
"additionalProperties": {
|
},
|
||||||
"$ref": "#",
|
|
||||||
"type": "object"
|
"$schema" : {
|
||||||
},
|
"type" : "string",
|
||||||
"default": {},
|
"format" : "uri"
|
||||||
"type": "object"
|
}
|
||||||
},
|
},
|
||||||
"required": {
|
|
||||||
"default": false,
|
"dependencies" : {
|
||||||
"type": "boolean"
|
"exclusiveMinimum" : "minimum",
|
||||||
},
|
"exclusiveMaximum" : "maximum"
|
||||||
"title": {
|
},
|
||||||
"type": "string"
|
|
||||||
},
|
"default" : {}
|
||||||
"type": {
|
|
||||||
"default": "any",
|
|
||||||
"items": {
|
|
||||||
"type": [
|
|
||||||
"string",
|
|
||||||
{
|
|
||||||
"$ref": "#"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"type": [
|
|
||||||
"string",
|
|
||||||
"array"
|
|
||||||
],
|
|
||||||
"uniqueItems": true
|
|
||||||
},
|
|
||||||
"uniqueItems": {
|
|
||||||
"default": false,
|
|
||||||
"type": "boolean"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"type": "object"
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,222 +1,149 @@
|
|||||||
{
|
{
|
||||||
|
"id": "http://json-schema.org/draft-04/schema#",
|
||||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||||
"default": {},
|
"description": "Core schema meta-schema",
|
||||||
"definitions": {
|
"definitions": {
|
||||||
|
"schemaArray": {
|
||||||
|
"type": "array",
|
||||||
|
"minItems": 1,
|
||||||
|
"items": { "$ref": "#" }
|
||||||
|
},
|
||||||
"positiveInteger": {
|
"positiveInteger": {
|
||||||
"minimum": 0,
|
"type": "integer",
|
||||||
"type": "integer"
|
"minimum": 0
|
||||||
},
|
},
|
||||||
"positiveIntegerDefault0": {
|
"positiveIntegerDefault0": {
|
||||||
"allOf": [
|
"allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ]
|
||||||
{
|
|
||||||
"$ref": "#/definitions/positiveInteger"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"default": 0
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"schemaArray": {
|
|
||||||
"items": {
|
|
||||||
"$ref": "#"
|
|
||||||
},
|
|
||||||
"minItems": 1,
|
|
||||||
"type": "array"
|
|
||||||
},
|
},
|
||||||
"simpleTypes": {
|
"simpleTypes": {
|
||||||
"enum": [
|
"enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ]
|
||||||
"array",
|
|
||||||
"boolean",
|
|
||||||
"integer",
|
|
||||||
"null",
|
|
||||||
"number",
|
|
||||||
"object",
|
|
||||||
"string"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"stringArray": {
|
"stringArray": {
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"minItems": 1,
|
|
||||||
"type": "array",
|
"type": "array",
|
||||||
|
"items": { "type": "string" },
|
||||||
|
"minItems": 1,
|
||||||
"uniqueItems": true
|
"uniqueItems": true
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"type": "object",
|
||||||
"exclusiveMaximum": [
|
|
||||||
"maximum"
|
|
||||||
],
|
|
||||||
"exclusiveMinimum": [
|
|
||||||
"minimum"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"description": "Core schema meta-schema",
|
|
||||||
"id": "http://json-schema.org/draft-04/schema#",
|
|
||||||
"properties": {
|
"properties": {
|
||||||
"$schema": {
|
|
||||||
"format": "uri",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"additionalItems": {
|
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$ref": "#"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"default": {}
|
|
||||||
},
|
|
||||||
"additionalProperties": {
|
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$ref": "#"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"default": {}
|
|
||||||
},
|
|
||||||
"allOf": {
|
|
||||||
"$ref": "#/definitions/schemaArray"
|
|
||||||
},
|
|
||||||
"anyOf": {
|
|
||||||
"$ref": "#/definitions/schemaArray"
|
|
||||||
},
|
|
||||||
"default": {},
|
|
||||||
"definitions": {
|
|
||||||
"additionalProperties": {
|
|
||||||
"$ref": "#"
|
|
||||||
},
|
|
||||||
"default": {},
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"additionalProperties": {
|
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"$ref": "#"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$ref": "#/definitions/stringArray"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
"description": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"enum": {
|
|
||||||
"type": "array"
|
|
||||||
},
|
|
||||||
"exclusiveMaximum": {
|
|
||||||
"default": false,
|
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
"exclusiveMinimum": {
|
|
||||||
"default": false,
|
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
"format": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"id": {
|
"id": {
|
||||||
"format": "uri",
|
"format": "uri",
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"items": {
|
"$schema": {
|
||||||
"anyOf": [
|
"type": "string",
|
||||||
{
|
"format": "uri"
|
||||||
"$ref": "#"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$ref": "#/definitions/schemaArray"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"default": {}
|
|
||||||
},
|
|
||||||
"maxItems": {
|
|
||||||
"$ref": "#/definitions/positiveInteger"
|
|
||||||
},
|
|
||||||
"maxLength": {
|
|
||||||
"$ref": "#/definitions/positiveInteger"
|
|
||||||
},
|
|
||||||
"maxProperties": {
|
|
||||||
"$ref": "#/definitions/positiveInteger"
|
|
||||||
},
|
|
||||||
"maximum": {
|
|
||||||
"type": "number"
|
|
||||||
},
|
|
||||||
"minItems": {
|
|
||||||
"$ref": "#/definitions/positiveIntegerDefault0"
|
|
||||||
},
|
|
||||||
"minLength": {
|
|
||||||
"$ref": "#/definitions/positiveIntegerDefault0"
|
|
||||||
},
|
|
||||||
"minProperties": {
|
|
||||||
"$ref": "#/definitions/positiveIntegerDefault0"
|
|
||||||
},
|
|
||||||
"minimum": {
|
|
||||||
"type": "number"
|
|
||||||
},
|
|
||||||
"multipleOf": {
|
|
||||||
"exclusiveMinimum": true,
|
|
||||||
"minimum": 0,
|
|
||||||
"type": "number"
|
|
||||||
},
|
|
||||||
"not": {
|
|
||||||
"$ref": "#"
|
|
||||||
},
|
|
||||||
"oneOf": {
|
|
||||||
"$ref": "#/definitions/schemaArray"
|
|
||||||
},
|
|
||||||
"pattern": {
|
|
||||||
"format": "regex",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"patternProperties": {
|
|
||||||
"additionalProperties": {
|
|
||||||
"$ref": "#"
|
|
||||||
},
|
|
||||||
"default": {},
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
"properties": {
|
|
||||||
"additionalProperties": {
|
|
||||||
"$ref": "#"
|
|
||||||
},
|
|
||||||
"default": {},
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
"required": {
|
|
||||||
"$ref": "#/definitions/stringArray"
|
|
||||||
},
|
},
|
||||||
"title": {
|
"title": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"description": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"default": {},
|
||||||
|
"multipleOf": {
|
||||||
|
"type": "number",
|
||||||
|
"minimum": 0,
|
||||||
|
"exclusiveMinimum": true
|
||||||
|
},
|
||||||
|
"maximum": {
|
||||||
|
"type": "number"
|
||||||
|
},
|
||||||
|
"exclusiveMaximum": {
|
||||||
|
"type": "boolean",
|
||||||
|
"default": false
|
||||||
|
},
|
||||||
|
"minimum": {
|
||||||
|
"type": "number"
|
||||||
|
},
|
||||||
|
"exclusiveMinimum": {
|
||||||
|
"type": "boolean",
|
||||||
|
"default": false
|
||||||
|
},
|
||||||
|
"maxLength": { "$ref": "#/definitions/positiveInteger" },
|
||||||
|
"minLength": { "$ref": "#/definitions/positiveIntegerDefault0" },
|
||||||
|
"pattern": {
|
||||||
|
"type": "string",
|
||||||
|
"format": "regex"
|
||||||
|
},
|
||||||
|
"additionalItems": {
|
||||||
|
"anyOf": [
|
||||||
|
{ "type": "boolean" },
|
||||||
|
{ "$ref": "#" }
|
||||||
|
],
|
||||||
|
"default": {}
|
||||||
|
},
|
||||||
|
"items": {
|
||||||
|
"anyOf": [
|
||||||
|
{ "$ref": "#" },
|
||||||
|
{ "$ref": "#/definitions/schemaArray" }
|
||||||
|
],
|
||||||
|
"default": {}
|
||||||
|
},
|
||||||
|
"maxItems": { "$ref": "#/definitions/positiveInteger" },
|
||||||
|
"minItems": { "$ref": "#/definitions/positiveIntegerDefault0" },
|
||||||
|
"uniqueItems": {
|
||||||
|
"type": "boolean",
|
||||||
|
"default": false
|
||||||
|
},
|
||||||
|
"maxProperties": { "$ref": "#/definitions/positiveInteger" },
|
||||||
|
"minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" },
|
||||||
|
"required": { "$ref": "#/definitions/stringArray" },
|
||||||
|
"additionalProperties": {
|
||||||
|
"anyOf": [
|
||||||
|
{ "type": "boolean" },
|
||||||
|
{ "$ref": "#" }
|
||||||
|
],
|
||||||
|
"default": {}
|
||||||
|
},
|
||||||
|
"definitions": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": { "$ref": "#" },
|
||||||
|
"default": {}
|
||||||
|
},
|
||||||
|
"properties": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": { "$ref": "#" },
|
||||||
|
"default": {}
|
||||||
|
},
|
||||||
|
"patternProperties": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": { "$ref": "#" },
|
||||||
|
"default": {}
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"anyOf": [
|
||||||
|
{ "$ref": "#" },
|
||||||
|
{ "$ref": "#/definitions/stringArray" }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"enum": {
|
||||||
|
"type": "array"
|
||||||
|
},
|
||||||
"type": {
|
"type": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
|
{ "$ref": "#/definitions/simpleTypes" },
|
||||||
{
|
{
|
||||||
"$ref": "#/definitions/simpleTypes"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/simpleTypes"
|
|
||||||
},
|
|
||||||
"minItems": 1,
|
|
||||||
"type": "array",
|
"type": "array",
|
||||||
|
"items": { "$ref": "#/definitions/simpleTypes" },
|
||||||
|
"minItems": 1,
|
||||||
"uniqueItems": true
|
"uniqueItems": true
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"uniqueItems": {
|
"format": { "type": "string" },
|
||||||
"default": false,
|
"allOf": { "$ref": "#/definitions/schemaArray" },
|
||||||
"type": "boolean"
|
"anyOf": { "$ref": "#/definitions/schemaArray" },
|
||||||
}
|
"oneOf": { "$ref": "#/definitions/schemaArray" },
|
||||||
|
"not": { "$ref": "#" }
|
||||||
},
|
},
|
||||||
"type": "object"
|
"dependencies": {
|
||||||
|
"exclusiveMaximum": [ "maximum" ],
|
||||||
|
"exclusiveMinimum": [ "minimum" ]
|
||||||
|
},
|
||||||
|
"default": {}
|
||||||
}
|
}
|
||||||
|
|||||||
1
third_party/python/jsonschema/jsonschema/schemas/vocabularies.json
vendored
Normal file
1
third_party/python/jsonschema/jsonschema/schemas/vocabularies.json
vendored
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
75
third_party/python/pkgutil_resolve_name/pkgutil_resolve_name-1.3.10.dist-info/LICENSE
vendored
Normal file
75
third_party/python/pkgutil_resolve_name/pkgutil_resolve_name-1.3.10.dist-info/LICENSE
vendored
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2020 Thomas Grainger.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
|
||||||
|
Portions of this pkgutil-resolve-name Software may utilize the following copyrighted material, the use of which is hereby acknowledged.
|
||||||
|
|
||||||
|
cpython: https://github.com/python/cpython/tree/1ed61617a4a6632905ad6a0b440cd2cafb8b6414
|
||||||
|
|
||||||
|
|
||||||
|
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
||||||
|
--------------------------------------------
|
||||||
|
|
||||||
|
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
||||||
|
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
||||||
|
otherwise using this software ("Python") in source or binary form and
|
||||||
|
its associated documentation.
|
||||||
|
|
||||||
|
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
||||||
|
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
||||||
|
analyze, test, perform and/or display publicly, prepare derivative works,
|
||||||
|
distribute, and otherwise use Python alone or in any derivative version,
|
||||||
|
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
||||||
|
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
||||||
|
2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation;
|
||||||
|
All Rights Reserved" are retained in Python alone or in any derivative version
|
||||||
|
prepared by Licensee.
|
||||||
|
|
||||||
|
3. In the event Licensee prepares a derivative work that is based on
|
||||||
|
or incorporates Python or any part thereof, and wants to make
|
||||||
|
the derivative work available to others as provided herein, then
|
||||||
|
Licensee hereby agrees to include in any such work a brief summary of
|
||||||
|
the changes made to Python.
|
||||||
|
|
||||||
|
4. PSF is making Python available to Licensee on an "AS IS"
|
||||||
|
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||||
|
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
||||||
|
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||||
|
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
||||||
|
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||||
|
|
||||||
|
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||||
|
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||||
|
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
||||||
|
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||||
|
|
||||||
|
6. This License Agreement will automatically terminate upon a material
|
||||||
|
breach of its terms and conditions.
|
||||||
|
|
||||||
|
7. Nothing in this License Agreement shall be deemed to create any
|
||||||
|
relationship of agency, partnership, or joint venture between PSF and
|
||||||
|
Licensee. This License Agreement does not grant permission to use PSF
|
||||||
|
trademarks or trade name in a trademark sense to endorse or promote
|
||||||
|
products or services of Licensee, or any third party.
|
||||||
|
|
||||||
|
8. By copying, installing or otherwise using Python, Licensee
|
||||||
|
agrees to be bound by the terms and conditions of this License
|
||||||
|
Agreement.
|
||||||
19
third_party/python/pkgutil_resolve_name/pkgutil_resolve_name-1.3.10.dist-info/METADATA
vendored
Normal file
19
third_party/python/pkgutil_resolve_name/pkgutil_resolve_name-1.3.10.dist-info/METADATA
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
Metadata-Version: 2.1
|
||||||
|
Name: pkgutil_resolve_name
|
||||||
|
Version: 1.3.10
|
||||||
|
Summary: Resolve a name to an object.
|
||||||
|
Home-page: https://github.com/graingert/pkgutil-resolve-name
|
||||||
|
Author: Vinay Sajip
|
||||||
|
Author-email: vinay_sajip@yahoo.co.uk
|
||||||
|
Maintainer: Thomas Grainger
|
||||||
|
Maintainer-email: pkgutil-resolve-name@graingert.co.uk
|
||||||
|
Requires-Python: >=3.6
|
||||||
|
Description-Content-Type: text/x-rst
|
||||||
|
Classifier: License :: OSI Approved :: MIT License
|
||||||
|
|
||||||
|
pkgutil-resolve-name
|
||||||
|
====================
|
||||||
|
|
||||||
|
A backport of Python 3.9's ``pkgutil.resolve_name``.
|
||||||
|
See the `Python 3.9 documentation <https://docs.python.org/3.9/library/pkgutil.html#pkgutil.resolve_name>`__.
|
||||||
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user