mirror of
https://github.com/wakatime/sublime-wakatime.git
synced 2023-08-10 21:13:02 +03:00
Compare commits
15 Commits
Author | SHA1 | Date | |
---|---|---|---|
ce885501ad | |||
c9448a9a19 | |||
04f8c61ebc | |||
04a4630024 | |||
02138220fd | |||
d0b162bdd8 | |||
1b8895cd38 | |||
938bbb73d1 | |||
008fdc6b49 | |||
a788625dd0 | |||
bcbce681c3 | |||
35299db832 | |||
eb7814624c | |||
1c092b2fd8 | |||
507ef95f71 |
13
HISTORY.rst
13
HISTORY.rst
@ -3,6 +3,19 @@ History
|
||||
-------
|
||||
|
||||
|
||||
7.0.18 (2017-03-16)
|
||||
++++++++++++++++++
|
||||
|
||||
- Upgrade wakatime-cli to v8.0.0.
|
||||
- No longer creating ~/.wakatime.cfg file, since only using Sublime settings.
|
||||
|
||||
|
||||
7.0.17 (2017-03-01)
|
||||
++++++++++++++++++
|
||||
|
||||
- Upgrade wakatime-cli to v7.0.4.
|
||||
|
||||
|
||||
7.0.16 (2017-02-20)
|
||||
++++++++++++++++++
|
||||
|
||||
|
@ -51,6 +51,7 @@ First, turn on debug mode in your `WakaTime.sublime-settings` file.
|
||||
|
||||
Add the line: `"debug": true`
|
||||
|
||||
Then, open your Sublime Console with `View -> Show Console` to see the plugin executing the wakatime cli process when sending a heartbeat. Also, tail your `$HOME/.wakatime.log` file to debug wakatime cli problems.
|
||||
Then, open your Sublime Console with `View -> Show Console` ( CTRL + \` ) to see the plugin executing the wakatime cli process when sending a heartbeat.
|
||||
Also, tail your `$HOME/.wakatime.log` file to debug wakatime cli problems.
|
||||
|
||||
For more general troubleshooting information, see [wakatime/wakatime#troubleshooting](https://github.com/wakatime/wakatime#troubleshooting).
|
||||
|
48
WakaTime.py
48
WakaTime.py
@ -7,7 +7,7 @@ Website: https://wakatime.com/
|
||||
==========================================================="""
|
||||
|
||||
|
||||
__version__ = '7.0.16'
|
||||
__version__ = '7.0.18'
|
||||
|
||||
|
||||
import sublime
|
||||
@ -177,28 +177,11 @@ def update_status_bar(status):
|
||||
set_timeout(lambda: update_status_bar(status), 0)
|
||||
|
||||
|
||||
def create_config_file():
|
||||
"""Creates the .wakatime.cfg INI file in $HOME directory, if it does
|
||||
not already exist.
|
||||
"""
|
||||
configFile = os.path.join(os.path.expanduser('~'), '.wakatime.cfg')
|
||||
try:
|
||||
with open(configFile) as fh:
|
||||
pass
|
||||
except IOError:
|
||||
try:
|
||||
with open(configFile, 'w') as fh:
|
||||
fh.write("[settings]\n")
|
||||
fh.write("debug = false\n")
|
||||
fh.write("hidefilenames = false\n")
|
||||
except IOError:
|
||||
pass
|
||||
|
||||
|
||||
def prompt_api_key():
|
||||
global SETTINGS
|
||||
|
||||
create_config_file()
|
||||
if SETTINGS.get('api_key'):
|
||||
return True
|
||||
|
||||
default_key = ''
|
||||
try:
|
||||
@ -209,20 +192,17 @@ def prompt_api_key():
|
||||
except:
|
||||
pass
|
||||
|
||||
if SETTINGS.get('api_key'):
|
||||
return True
|
||||
else:
|
||||
window = sublime.active_window()
|
||||
if window:
|
||||
def got_key(text):
|
||||
if text:
|
||||
SETTINGS.set('api_key', str(text))
|
||||
sublime.save_settings(SETTINGS_FILE)
|
||||
window = sublime.active_window()
|
||||
if window:
|
||||
window.show_input_panel('[WakaTime] Enter your wakatime.com api key:', default_key, got_key, None, None)
|
||||
return True
|
||||
else:
|
||||
log(ERROR, 'Could not prompt for api key because no window found.')
|
||||
return False
|
||||
window.show_input_panel('[WakaTime] Enter your wakatime.com api key:', default_key, got_key, None, None)
|
||||
return True
|
||||
else:
|
||||
log(ERROR, 'Could not prompt for api key because no window found.')
|
||||
return False
|
||||
|
||||
|
||||
def python_binary():
|
||||
@ -482,6 +462,8 @@ class SendHeartbeatsThread(threading.Thread):
|
||||
self.debug = SETTINGS.get('debug')
|
||||
self.api_key = SETTINGS.get('api_key', '')
|
||||
self.ignore = SETTINGS.get('ignore', [])
|
||||
self.hidefilenames = SETTINGS.get('hidefilenames')
|
||||
self.proxy = SETTINGS.get('proxy')
|
||||
|
||||
self.heartbeat = heartbeat
|
||||
self.has_extra_heartbeats = False
|
||||
@ -537,9 +519,13 @@ class SendHeartbeatsThread(threading.Thread):
|
||||
if heartbeat.get('cursorpos') is not None:
|
||||
cmd.extend(['--cursorpos', heartbeat['cursorpos']])
|
||||
for pattern in self.ignore:
|
||||
cmd.extend(['--ignore', pattern])
|
||||
cmd.extend(['--exclude', pattern])
|
||||
if self.debug:
|
||||
cmd.append('--verbose')
|
||||
if self.hidefilenames:
|
||||
cmd.append('--hidefilenames')
|
||||
if self.proxy:
|
||||
cmd.extend(['--proxy', self.proxy])
|
||||
if self.has_extra_heartbeats:
|
||||
cmd.append('--extra-heartbeats')
|
||||
stdin = PIPE
|
||||
|
@ -7,17 +7,23 @@
|
||||
// Set this in your User specific WakaTime.sublime-settings file.
|
||||
"api_key": "",
|
||||
|
||||
// Debug mode. Set to true for verbose logging. Defaults to false.
|
||||
"debug": false,
|
||||
|
||||
// Proxy with format https://user:pass@host:port or socks5://user:pass@host:port or domain\\user:pass.
|
||||
"proxy": "",
|
||||
|
||||
// Ignore files; Files (including absolute paths) that match one of these
|
||||
// POSIX regular expressions will not be logged.
|
||||
"ignore": ["^/tmp/", "^/etc/", "^/var/", "COMMIT_EDITMSG$", "PULLREQ_EDITMSG$", "MERGE_MSG$", "TAG_EDITMSG$"],
|
||||
|
||||
// Debug mode. Set to true for verbose logging. Defaults to false.
|
||||
"debug": false,
|
||||
|
||||
// Status bar message. Set to false to hide status bar message.
|
||||
// Defaults to true.
|
||||
"status_bar_message": true,
|
||||
|
||||
// Status bar message format.
|
||||
"status_bar_message_fmt": "WakaTime {status} %I:%M %p"
|
||||
"status_bar_message_fmt": "WakaTime {status} %I:%M %p",
|
||||
|
||||
// Obfuscate file paths when sending to API. Your dashboard will no longer display coding activity per file.
|
||||
"hidefilenames": false
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
__title__ = 'wakatime'
|
||||
__description__ = 'Common interface to the WakaTime api.'
|
||||
__url__ = 'https://github.com/wakatime/wakatime'
|
||||
__version_info__ = ('7', '0', '2')
|
||||
__version_info__ = ('8', '0', '0')
|
||||
__version__ = '.'.join(__version_info__)
|
||||
__author__ = 'Alan Hamlett'
|
||||
__author_email__ = 'alan@wakatime.com'
|
||||
__license__ = 'BSD'
|
||||
__copyright__ = 'Copyright 2016 Alan Hamlett'
|
||||
__copyright__ = 'Copyright 2017 Alan Hamlett'
|
||||
|
@ -128,8 +128,6 @@ def parseArguments():
|
||||
|
||||
# parse ~/.wakatime.cfg file
|
||||
configs = parseConfigFile(args.config)
|
||||
if configs is None:
|
||||
return args, configs
|
||||
|
||||
# update args from configs
|
||||
if not args.hostname:
|
||||
|
@ -13,17 +13,16 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from .compat import u, open
|
||||
from .compat import open
|
||||
from .constants import CONFIG_FILE_PARSE_ERROR
|
||||
|
||||
|
||||
try:
|
||||
import ConfigParser as configparser
|
||||
except ImportError: # pragma: nocover
|
||||
import configparser
|
||||
except ImportError:
|
||||
from .packages import configparser
|
||||
|
||||
|
||||
def parseConfigFile(configFile=None):
|
||||
@ -41,15 +40,14 @@ def parseConfigFile(configFile=None):
|
||||
if not configFile:
|
||||
configFile = os.path.join(os.path.expanduser('~'), '.wakatime.cfg')
|
||||
|
||||
configs = configparser.SafeConfigParser()
|
||||
configs = configparser.ConfigParser(delimiters=('='), strict=False)
|
||||
try:
|
||||
with open(configFile, 'r', encoding='utf-8') as fh:
|
||||
try:
|
||||
configs.readfp(fh)
|
||||
configs.read_file(fh)
|
||||
except configparser.Error:
|
||||
print(traceback.format_exc())
|
||||
return None
|
||||
raise SystemExit(CONFIG_FILE_PARSE_ERROR)
|
||||
except IOError:
|
||||
sys.stderr.write(u("Error: Could not read from config file {0}\n").format(u(configFile)))
|
||||
raise SystemExit(CONFIG_FILE_PARSE_ERROR)
|
||||
pass
|
||||
return configs
|
||||
|
18
packages/wakatime/language_priorities.py
Normal file
18
packages/wakatime/language_priorities.py
Normal file
@ -0,0 +1,18 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
wakatime.language_priorities
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Overwrite pygments Lexer.priority attribute for specific languages.
|
||||
|
||||
:copyright: (c) 2017 Alan Hamlett.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
|
||||
LANGUAGES = {
|
||||
'typescript': 0.01,
|
||||
'perl': 0.01,
|
||||
'perl6': 0.01,
|
||||
'f#': 0.01,
|
||||
}
|
@ -31,7 +31,7 @@
|
||||
"groovy": "Groovy",
|
||||
"haml": "Haml",
|
||||
"haskell": "Haskell",
|
||||
"haxe": "HaXe",
|
||||
"haxe": "Haxe",
|
||||
"html": "HTML",
|
||||
"ini": "INI",
|
||||
"jade": "Jade",
|
||||
|
@ -29,7 +29,6 @@ from .compat import u, is_py3
|
||||
from .constants import (
|
||||
API_ERROR,
|
||||
AUTH_ERROR,
|
||||
CONFIG_FILE_PARSE_ERROR,
|
||||
SUCCESS,
|
||||
UNKNOWN_ERROR,
|
||||
MALFORMED_HEARTBEAT_ERROR,
|
||||
@ -293,8 +292,6 @@ def execute(argv=None):
|
||||
sys.argv = ['wakatime'] + argv
|
||||
|
||||
args, configs = parseArguments()
|
||||
if configs is None:
|
||||
return CONFIG_FILE_PARSE_ERROR
|
||||
|
||||
setup_logging(args, __version__)
|
||||
|
||||
|
1390
packages/wakatime/packages/configparser/__init__.py
Normal file
1390
packages/wakatime/packages/configparser/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
171
packages/wakatime/packages/configparser/helpers.py
Normal file
171
packages/wakatime/packages/configparser/helpers.py
Normal file
@ -0,0 +1,171 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from collections import MutableMapping
|
||||
try:
|
||||
from collections import UserDict
|
||||
except ImportError:
|
||||
from UserDict import UserDict
|
||||
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
from ordereddict import OrderedDict
|
||||
|
||||
from io import open
|
||||
import sys
|
||||
try:
|
||||
from thread import get_ident
|
||||
except ImportError:
|
||||
try:
|
||||
from _thread import get_ident
|
||||
except ImportError:
|
||||
from _dummy_thread import get_ident
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
str = type('str')
|
||||
|
||||
|
||||
def from_none(exc):
|
||||
"""raise from_none(ValueError('a')) == raise ValueError('a') from None"""
|
||||
exc.__cause__ = None
|
||||
exc.__suppress_context__ = True
|
||||
return exc
|
||||
|
||||
|
||||
# from reprlib 3.2.1
|
||||
def recursive_repr(fillvalue='...'):
|
||||
'Decorator to make a repr function return fillvalue for a recursive call'
|
||||
|
||||
def decorating_function(user_function):
|
||||
repr_running = set()
|
||||
|
||||
def wrapper(self):
|
||||
key = id(self), get_ident()
|
||||
if key in repr_running:
|
||||
return fillvalue
|
||||
repr_running.add(key)
|
||||
try:
|
||||
result = user_function(self)
|
||||
finally:
|
||||
repr_running.discard(key)
|
||||
return result
|
||||
|
||||
# Can't use functools.wraps() here because of bootstrap issues
|
||||
wrapper.__module__ = getattr(user_function, '__module__')
|
||||
wrapper.__doc__ = getattr(user_function, '__doc__')
|
||||
wrapper.__name__ = getattr(user_function, '__name__')
|
||||
wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
|
||||
return wrapper
|
||||
|
||||
return decorating_function
|
||||
|
||||
# from collections 3.2.1
|
||||
class _ChainMap(MutableMapping):
|
||||
''' A ChainMap groups multiple dicts (or other mappings) together
|
||||
to create a single, updateable view.
|
||||
|
||||
The underlying mappings are stored in a list. That list is public and can
|
||||
accessed or updated using the *maps* attribute. There is no other state.
|
||||
|
||||
Lookups search the underlying mappings successively until a key is found.
|
||||
In contrast, writes, updates, and deletions only operate on the first
|
||||
mapping.
|
||||
|
||||
'''
|
||||
|
||||
def __init__(self, *maps):
|
||||
'''Initialize a ChainMap by setting *maps* to the given mappings.
|
||||
If no mappings are provided, a single empty dictionary is used.
|
||||
|
||||
'''
|
||||
self.maps = list(maps) or [{}] # always at least one map
|
||||
|
||||
def __missing__(self, key):
|
||||
raise KeyError(key)
|
||||
|
||||
def __getitem__(self, key):
|
||||
for mapping in self.maps:
|
||||
try:
|
||||
return mapping[key] # can't use 'key in mapping' with defaultdict
|
||||
except KeyError:
|
||||
pass
|
||||
return self.__missing__(key) # support subclasses that define __missing__
|
||||
|
||||
def get(self, key, default=None):
|
||||
return self[key] if key in self else default
|
||||
|
||||
def __len__(self):
|
||||
return len(set().union(*self.maps)) # reuses stored hash values if possible
|
||||
|
||||
def __iter__(self):
|
||||
return iter(set().union(*self.maps))
|
||||
|
||||
def __contains__(self, key):
|
||||
return any(key in m for m in self.maps)
|
||||
|
||||
@recursive_repr()
|
||||
def __repr__(self):
|
||||
return '{0.__class__.__name__}({1})'.format(
|
||||
self, ', '.join(map(repr, self.maps)))
|
||||
|
||||
@classmethod
|
||||
def fromkeys(cls, iterable, *args):
|
||||
'Create a ChainMap with a single dict created from the iterable.'
|
||||
return cls(dict.fromkeys(iterable, *args))
|
||||
|
||||
def copy(self):
|
||||
'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
|
||||
return self.__class__(self.maps[0].copy(), *self.maps[1:])
|
||||
|
||||
__copy__ = copy
|
||||
|
||||
def new_child(self): # like Django's Context.push()
|
||||
'New ChainMap with a new dict followed by all previous maps.'
|
||||
return self.__class__({}, *self.maps)
|
||||
|
||||
@property
|
||||
def parents(self): # like Django's Context.pop()
|
||||
'New ChainMap from maps[1:].'
|
||||
return self.__class__(*self.maps[1:])
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.maps[0][key] = value
|
||||
|
||||
def __delitem__(self, key):
|
||||
try:
|
||||
del self.maps[0][key]
|
||||
except KeyError:
|
||||
raise KeyError('Key not found in the first mapping: {!r}'.format(key))
|
||||
|
||||
def popitem(self):
|
||||
'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
|
||||
try:
|
||||
return self.maps[0].popitem()
|
||||
except KeyError:
|
||||
raise KeyError('No keys found in the first mapping.')
|
||||
|
||||
def pop(self, key, *args):
|
||||
'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
|
||||
try:
|
||||
return self.maps[0].pop(key, *args)
|
||||
except KeyError:
|
||||
raise KeyError('Key not found in the first mapping: {!r}'.format(key))
|
||||
|
||||
def clear(self):
|
||||
'Clear maps[0], leaving maps[1:] intact.'
|
||||
self.maps[0].clear()
|
||||
|
||||
|
||||
try:
|
||||
from collections import ChainMap
|
||||
except ImportError:
|
||||
ChainMap = _ChainMap
|
@ -148,7 +148,7 @@ LEXERS = {
|
||||
'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)),
|
||||
'EvoqueXmlLexer': ('pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)),
|
||||
'EzhilLexer': ('pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)),
|
||||
'FSharpLexer': ('pygments.lexers.dotnet', 'FSharp', ('fsharp',), ('*.fs', '*.fsi'), ('text/x-fsharp',)),
|
||||
'FSharpLexer': ('pygments.lexers.dotnet', 'F#', ('fsharp',), ('*.fs', '*.fsi'), ('text/x-fsharp',)),
|
||||
'FactorLexer': ('pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
|
||||
'FancyLexer': ('pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
|
||||
'FantomLexer': ('pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
|
||||
|
@ -549,7 +549,7 @@ class FSharpLexer(RegexLexer):
|
||||
.. versionadded:: 1.5
|
||||
"""
|
||||
|
||||
name = 'FSharp'
|
||||
name = 'F#'
|
||||
aliases = ['fsharp']
|
||||
filenames = ['*.fs', '*.fsi']
|
||||
mimetypes = ['text/x-fsharp']
|
||||
|
@ -450,7 +450,6 @@ class TypeScriptLexer(RegexLexer):
|
||||
aliases = ['ts', 'typescript']
|
||||
filenames = ['*.ts', '*.tsx']
|
||||
mimetypes = ['text/x-typescript']
|
||||
priority = 0.11
|
||||
|
||||
flags = re.DOTALL | re.MULTILINE
|
||||
|
||||
|
@ -113,9 +113,6 @@ class TypoScriptLexer(RegexLexer):
|
||||
|
||||
flags = re.DOTALL | re.MULTILINE
|
||||
|
||||
# Slightly higher than TypeScript (which is 0).
|
||||
priority = 0.1
|
||||
|
||||
tokens = {
|
||||
'root': [
|
||||
include('comment'),
|
||||
|
886
packages/wakatime/packages/six.py
Normal file
886
packages/wakatime/packages/six.py
Normal file
@ -0,0 +1,886 @@
|
||||
# Copyright (c) 2010-2017 Benjamin Peterson
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import functools
|
||||
import itertools
|
||||
import operator
|
||||
import sys
|
||||
import types
|
||||
|
||||
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||
__version__ = "1.10.0"
|
||||
|
||||
|
||||
# Useful for very coarse version differentiation.
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] == 3
|
||||
PY34 = sys.version_info[0:2] >= (3, 4)
|
||||
|
||||
if PY3:
|
||||
string_types = str,
|
||||
integer_types = int,
|
||||
class_types = type,
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
|
||||
MAXSIZE = sys.maxsize
|
||||
else:
|
||||
string_types = basestring,
|
||||
integer_types = (int, long)
|
||||
class_types = (type, types.ClassType)
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
|
||||
if sys.platform.startswith("java"):
|
||||
# Jython always uses 32 bits.
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||
class X(object):
|
||||
|
||||
def __len__(self):
|
||||
return 1 << 31
|
||||
try:
|
||||
len(X())
|
||||
except OverflowError:
|
||||
# 32-bit
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# 64-bit
|
||||
MAXSIZE = int((1 << 63) - 1)
|
||||
del X
|
||||
|
||||
|
||||
def _add_doc(func, doc):
|
||||
"""Add documentation to a function."""
|
||||
func.__doc__ = doc
|
||||
|
||||
|
||||
def _import_module(name):
|
||||
"""Import module, returning the module after the last dot."""
|
||||
__import__(name)
|
||||
return sys.modules[name]
|
||||
|
||||
|
||||
class _LazyDescr(object):
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def __get__(self, obj, tp):
|
||||
result = self._resolve()
|
||||
setattr(obj, self.name, result) # Invokes __set__.
|
||||
try:
|
||||
# This is a bit ugly, but it avoids running this again by
|
||||
# removing this descriptor.
|
||||
delattr(obj.__class__, self.name)
|
||||
except AttributeError:
|
||||
pass
|
||||
return result
|
||||
|
||||
|
||||
class MovedModule(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old, new=None):
|
||||
super(MovedModule, self).__init__(name)
|
||||
if PY3:
|
||||
if new is None:
|
||||
new = name
|
||||
self.mod = new
|
||||
else:
|
||||
self.mod = old
|
||||
|
||||
def _resolve(self):
|
||||
return _import_module(self.mod)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
_module = self._resolve()
|
||||
value = getattr(_module, attr)
|
||||
setattr(self, attr, value)
|
||||
return value
|
||||
|
||||
|
||||
class _LazyModule(types.ModuleType):
|
||||
|
||||
def __init__(self, name):
|
||||
super(_LazyModule, self).__init__(name)
|
||||
self.__doc__ = self.__class__.__doc__
|
||||
|
||||
def __dir__(self):
|
||||
attrs = ["__doc__", "__name__"]
|
||||
attrs += [attr.name for attr in self._moved_attributes]
|
||||
return attrs
|
||||
|
||||
# Subclasses should override this
|
||||
_moved_attributes = []
|
||||
|
||||
|
||||
class MovedAttribute(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
|
||||
super(MovedAttribute, self).__init__(name)
|
||||
if PY3:
|
||||
if new_mod is None:
|
||||
new_mod = name
|
||||
self.mod = new_mod
|
||||
if new_attr is None:
|
||||
if old_attr is None:
|
||||
new_attr = name
|
||||
else:
|
||||
new_attr = old_attr
|
||||
self.attr = new_attr
|
||||
else:
|
||||
self.mod = old_mod
|
||||
if old_attr is None:
|
||||
old_attr = name
|
||||
self.attr = old_attr
|
||||
|
||||
def _resolve(self):
|
||||
module = _import_module(self.mod)
|
||||
return getattr(module, self.attr)
|
||||
|
||||
|
||||
class _SixMetaPathImporter(object):
|
||||
|
||||
"""
|
||||
A meta path importer to import six.moves and its submodules.
|
||||
|
||||
This class implements a PEP302 finder and loader. It should be compatible
|
||||
with Python 2.5 and all existing versions of Python3
|
||||
"""
|
||||
|
||||
def __init__(self, six_module_name):
|
||||
self.name = six_module_name
|
||||
self.known_modules = {}
|
||||
|
||||
def _add_module(self, mod, *fullnames):
|
||||
for fullname in fullnames:
|
||||
self.known_modules[self.name + "." + fullname] = mod
|
||||
|
||||
def _get_module(self, fullname):
|
||||
return self.known_modules[self.name + "." + fullname]
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
if fullname in self.known_modules:
|
||||
return self
|
||||
return None
|
||||
|
||||
def __get_module(self, fullname):
|
||||
try:
|
||||
return self.known_modules[fullname]
|
||||
except KeyError:
|
||||
raise ImportError("This loader does not know module " + fullname)
|
||||
|
||||
def load_module(self, fullname):
|
||||
try:
|
||||
# in case of a reload
|
||||
return sys.modules[fullname]
|
||||
except KeyError:
|
||||
pass
|
||||
mod = self.__get_module(fullname)
|
||||
if isinstance(mod, MovedModule):
|
||||
mod = mod._resolve()
|
||||
else:
|
||||
mod.__loader__ = self
|
||||
sys.modules[fullname] = mod
|
||||
return mod
|
||||
|
||||
def is_package(self, fullname):
|
||||
"""
|
||||
Return true, if the named module is a package.
|
||||
|
||||
We need this method to get correct spec objects with
|
||||
Python 3.4 (see PEP451)
|
||||
"""
|
||||
return hasattr(self.__get_module(fullname), "__path__")
|
||||
|
||||
def get_code(self, fullname):
|
||||
"""Return None
|
||||
|
||||
Required, if is_package is implemented"""
|
||||
self.__get_module(fullname) # eventually raises ImportError
|
||||
return None
|
||||
get_source = get_code # same as get_code
|
||||
|
||||
_importer = _SixMetaPathImporter(__name__)
|
||||
|
||||
|
||||
class _MovedItems(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects"""
|
||||
__path__ = [] # mark as package
|
||||
|
||||
|
||||
_moved_attributes = [
|
||||
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
|
||||
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
|
||||
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||
MovedAttribute("intern", "__builtin__", "sys"),
|
||||
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
|
||||
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
|
||||
MovedAttribute("getstatusoutput", "commands", "subprocess"),
|
||||
MovedAttribute("getoutput", "commands", "subprocess"),
|
||||
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
|
||||
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
|
||||
MovedAttribute("StringIO", "StringIO", "io"),
|
||||
MovedAttribute("UserDict", "UserDict", "collections"),
|
||||
MovedAttribute("UserList", "UserList", "collections"),
|
||||
MovedAttribute("UserString", "UserString", "collections"),
|
||||
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
|
||||
MovedModule("builtins", "__builtin__"),
|
||||
MovedModule("configparser", "ConfigParser"),
|
||||
MovedModule("copyreg", "copy_reg"),
|
||||
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
|
||||
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
|
||||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||
MovedModule("http_client", "httplib", "http.client"),
|
||||
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
|
||||
MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
|
||||
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
|
||||
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
|
||||
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
|
||||
MovedModule("cPickle", "cPickle", "pickle"),
|
||||
MovedModule("queue", "Queue"),
|
||||
MovedModule("reprlib", "repr"),
|
||||
MovedModule("socketserver", "SocketServer"),
|
||||
MovedModule("_thread", "thread", "_thread"),
|
||||
MovedModule("tkinter", "Tkinter"),
|
||||
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
|
||||
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
|
||||
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
|
||||
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
|
||||
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
|
||||
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
|
||||
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
|
||||
MovedModule("tkinter_colorchooser", "tkColorChooser",
|
||||
"tkinter.colorchooser"),
|
||||
MovedModule("tkinter_commondialog", "tkCommonDialog",
|
||||
"tkinter.commondialog"),
|
||||
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
|
||||
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
|
||||
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
|
||||
"tkinter.simpledialog"),
|
||||
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
|
||||
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
|
||||
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
|
||||
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
|
||||
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
|
||||
]
|
||||
# Add windows specific modules.
|
||||
if sys.platform == "win32":
|
||||
_moved_attributes += [
|
||||
MovedModule("winreg", "_winreg"),
|
||||
]
|
||||
|
||||
for attr in _moved_attributes:
|
||||
setattr(_MovedItems, attr.name, attr)
|
||||
if isinstance(attr, MovedModule):
|
||||
_importer._add_module(attr, "moves." + attr.name)
|
||||
del attr
|
||||
|
||||
_MovedItems._moved_attributes = _moved_attributes
|
||||
|
||||
moves = _MovedItems(__name__ + ".moves")
|
||||
_importer._add_module(moves, "moves")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_parse(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_parse"""
|
||||
|
||||
|
||||
_urllib_parse_moved_attributes = [
|
||||
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("quote", "urllib", "urllib.parse"),
|
||||
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
|
||||
MovedAttribute("urlencode", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splitquery", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splittag", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splituser", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splitvalue", "urllib", "urllib.parse"),
|
||||
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
|
||||
]
|
||||
for attr in _urllib_parse_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_parse, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
|
||||
"moves.urllib_parse", "moves.urllib.parse")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_error(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_error"""
|
||||
|
||||
|
||||
_urllib_error_moved_attributes = [
|
||||
MovedAttribute("URLError", "urllib2", "urllib.error"),
|
||||
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
|
||||
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
|
||||
]
|
||||
for attr in _urllib_error_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_error, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
|
||||
"moves.urllib_error", "moves.urllib.error")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_request(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_request"""
|
||||
|
||||
|
||||
_urllib_request_moved_attributes = [
|
||||
MovedAttribute("urlopen", "urllib2", "urllib.request"),
|
||||
MovedAttribute("install_opener", "urllib2", "urllib.request"),
|
||||
MovedAttribute("build_opener", "urllib2", "urllib.request"),
|
||||
MovedAttribute("pathname2url", "urllib", "urllib.request"),
|
||||
MovedAttribute("url2pathname", "urllib", "urllib.request"),
|
||||
MovedAttribute("getproxies", "urllib", "urllib.request"),
|
||||
MovedAttribute("Request", "urllib2", "urllib.request"),
|
||||
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
|
||||
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
|
||||
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
|
||||
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
|
||||
MovedAttribute("URLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
|
||||
]
|
||||
for attr in _urllib_request_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_request, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
|
||||
"moves.urllib_request", "moves.urllib.request")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_response(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_response"""
|
||||
|
||||
|
||||
_urllib_response_moved_attributes = [
|
||||
MovedAttribute("addbase", "urllib", "urllib.response"),
|
||||
MovedAttribute("addclosehook", "urllib", "urllib.response"),
|
||||
MovedAttribute("addinfo", "urllib", "urllib.response"),
|
||||
MovedAttribute("addinfourl", "urllib", "urllib.response"),
|
||||
]
|
||||
for attr in _urllib_response_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_response, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
|
||||
"moves.urllib_response", "moves.urllib.response")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_robotparser(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
|
||||
|
||||
|
||||
_urllib_robotparser_moved_attributes = [
|
||||
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
|
||||
]
|
||||
for attr in _urllib_robotparser_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
|
||||
"moves.urllib_robotparser", "moves.urllib.robotparser")
|
||||
|
||||
|
||||
class Module_six_moves_urllib(types.ModuleType):
|
||||
|
||||
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
|
||||
__path__ = [] # mark as package
|
||||
parse = _importer._get_module("moves.urllib_parse")
|
||||
error = _importer._get_module("moves.urllib_error")
|
||||
request = _importer._get_module("moves.urllib_request")
|
||||
response = _importer._get_module("moves.urllib_response")
|
||||
robotparser = _importer._get_module("moves.urllib_robotparser")
|
||||
|
||||
def __dir__(self):
|
||||
return ['parse', 'error', 'request', 'response', 'robotparser']
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
|
||||
"moves.urllib")
|
||||
|
||||
|
||||
def add_move(move):
|
||||
"""Add an item to six.moves."""
|
||||
setattr(_MovedItems, move.name, move)
|
||||
|
||||
|
||||
def remove_move(name):
|
||||
"""Remove item from six.moves."""
|
||||
try:
|
||||
delattr(_MovedItems, name)
|
||||
except AttributeError:
|
||||
try:
|
||||
del moves.__dict__[name]
|
||||
except KeyError:
|
||||
raise AttributeError("no such move, %r" % (name,))
|
||||
|
||||
|
||||
if PY3:
|
||||
_meth_func = "__func__"
|
||||
_meth_self = "__self__"
|
||||
|
||||
_func_closure = "__closure__"
|
||||
_func_code = "__code__"
|
||||
_func_defaults = "__defaults__"
|
||||
_func_globals = "__globals__"
|
||||
else:
|
||||
_meth_func = "im_func"
|
||||
_meth_self = "im_self"
|
||||
|
||||
_func_closure = "func_closure"
|
||||
_func_code = "func_code"
|
||||
_func_defaults = "func_defaults"
|
||||
_func_globals = "func_globals"
|
||||
|
||||
|
||||
try:
|
||||
advance_iterator = next
|
||||
except NameError:
|
||||
def advance_iterator(it):
|
||||
return it.next()
|
||||
next = advance_iterator
|
||||
|
||||
|
||||
try:
|
||||
callable = callable
|
||||
except NameError:
|
||||
def callable(obj):
|
||||
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
|
||||
|
||||
|
||||
if PY3:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound
|
||||
|
||||
create_bound_method = types.MethodType
|
||||
|
||||
def create_unbound_method(func, cls):
|
||||
return func
|
||||
|
||||
Iterator = object
|
||||
else:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound.im_func
|
||||
|
||||
def create_bound_method(func, obj):
|
||||
return types.MethodType(func, obj, obj.__class__)
|
||||
|
||||
def create_unbound_method(func, cls):
|
||||
return types.MethodType(func, None, cls)
|
||||
|
||||
class Iterator(object):
|
||||
|
||||
def next(self):
|
||||
return type(self).__next__(self)
|
||||
|
||||
callable = callable
|
||||
_add_doc(get_unbound_function,
|
||||
"""Get the function out of a possibly unbound function""")
|
||||
|
||||
|
||||
get_method_function = operator.attrgetter(_meth_func)
|
||||
get_method_self = operator.attrgetter(_meth_self)
|
||||
get_function_closure = operator.attrgetter(_func_closure)
|
||||
get_function_code = operator.attrgetter(_func_code)
|
||||
get_function_defaults = operator.attrgetter(_func_defaults)
|
||||
get_function_globals = operator.attrgetter(_func_globals)
|
||||
|
||||
|
||||
if PY3:
|
||||
def iterkeys(d, **kw):
|
||||
return iter(d.keys(**kw))
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return iter(d.values(**kw))
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return iter(d.items(**kw))
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return iter(d.lists(**kw))
|
||||
|
||||
viewkeys = operator.methodcaller("keys")
|
||||
|
||||
viewvalues = operator.methodcaller("values")
|
||||
|
||||
viewitems = operator.methodcaller("items")
|
||||
else:
|
||||
def iterkeys(d, **kw):
|
||||
return d.iterkeys(**kw)
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return d.itervalues(**kw)
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return d.iteritems(**kw)
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return d.iterlists(**kw)
|
||||
|
||||
viewkeys = operator.methodcaller("viewkeys")
|
||||
|
||||
viewvalues = operator.methodcaller("viewvalues")
|
||||
|
||||
viewitems = operator.methodcaller("viewitems")
|
||||
|
||||
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
|
||||
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
|
||||
_add_doc(iteritems,
|
||||
"Return an iterator over the (key, value) pairs of a dictionary.")
|
||||
_add_doc(iterlists,
|
||||
"Return an iterator over the (key, [values]) pairs of a dictionary.")
|
||||
|
||||
|
||||
if PY3:
|
||||
def b(s):
|
||||
return s.encode("latin-1")
|
||||
|
||||
def u(s):
|
||||
return s
|
||||
unichr = chr
|
||||
import struct
|
||||
int2byte = struct.Struct(">B").pack
|
||||
del struct
|
||||
byte2int = operator.itemgetter(0)
|
||||
indexbytes = operator.getitem
|
||||
iterbytes = iter
|
||||
import io
|
||||
StringIO = io.StringIO
|
||||
BytesIO = io.BytesIO
|
||||
_assertCountEqual = "assertCountEqual"
|
||||
if sys.version_info[1] <= 1:
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
else:
|
||||
_assertRaisesRegex = "assertRaisesRegex"
|
||||
_assertRegex = "assertRegex"
|
||||
else:
|
||||
def b(s):
|
||||
return s
|
||||
# Workaround for standalone backslash
|
||||
|
||||
def u(s):
|
||||
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
|
||||
unichr = unichr
|
||||
int2byte = chr
|
||||
|
||||
def byte2int(bs):
|
||||
return ord(bs[0])
|
||||
|
||||
def indexbytes(buf, i):
|
||||
return ord(buf[i])
|
||||
iterbytes = functools.partial(itertools.imap, ord)
|
||||
import StringIO
|
||||
StringIO = BytesIO = StringIO.StringIO
|
||||
_assertCountEqual = "assertItemsEqual"
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
_add_doc(b, """Byte literal""")
|
||||
_add_doc(u, """Text literal""")
|
||||
|
||||
|
||||
def assertCountEqual(self, *args, **kwargs):
|
||||
return getattr(self, _assertCountEqual)(*args, **kwargs)
|
||||
|
||||
|
||||
def assertRaisesRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
def assertRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
if PY3:
|
||||
exec_ = getattr(moves.builtins, "exec")
|
||||
|
||||
def reraise(tp, value, tb=None):
|
||||
try:
|
||||
if value is None:
|
||||
value = tp()
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
finally:
|
||||
value = None
|
||||
tb = None
|
||||
|
||||
else:
|
||||
def exec_(_code_, _globs_=None, _locs_=None):
|
||||
"""Execute code in a namespace."""
|
||||
if _globs_ is None:
|
||||
frame = sys._getframe(1)
|
||||
_globs_ = frame.f_globals
|
||||
if _locs_ is None:
|
||||
_locs_ = frame.f_locals
|
||||
del frame
|
||||
elif _locs_ is None:
|
||||
_locs_ = _globs_
|
||||
exec("""exec _code_ in _globs_, _locs_""")
|
||||
|
||||
exec_("""def reraise(tp, value, tb=None):
|
||||
try:
|
||||
raise tp, value, tb
|
||||
finally:
|
||||
tb = None
|
||||
""")
|
||||
|
||||
|
||||
if sys.version_info[:2] == (3, 2):
|
||||
exec_("""def raise_from(value, from_value):
|
||||
try:
|
||||
if from_value is None:
|
||||
raise value
|
||||
raise value from from_value
|
||||
finally:
|
||||
value = None
|
||||
""")
|
||||
elif sys.version_info[:2] > (3, 2):
|
||||
exec_("""def raise_from(value, from_value):
|
||||
try:
|
||||
raise value from from_value
|
||||
finally:
|
||||
value = None
|
||||
""")
|
||||
else:
|
||||
def raise_from(value, from_value):
|
||||
raise value
|
||||
|
||||
|
||||
print_ = getattr(moves.builtins, "print", None)
|
||||
if print_ is None:
|
||||
def print_(*args, **kwargs):
|
||||
"""The new-style print function for Python 2.4 and 2.5."""
|
||||
fp = kwargs.pop("file", sys.stdout)
|
||||
if fp is None:
|
||||
return
|
||||
|
||||
def write(data):
|
||||
if not isinstance(data, basestring):
|
||||
data = str(data)
|
||||
# If the file has an encoding, encode unicode with it.
|
||||
if (isinstance(fp, file) and
|
||||
isinstance(data, unicode) and
|
||||
fp.encoding is not None):
|
||||
errors = getattr(fp, "errors", None)
|
||||
if errors is None:
|
||||
errors = "strict"
|
||||
data = data.encode(fp.encoding, errors)
|
||||
fp.write(data)
|
||||
want_unicode = False
|
||||
sep = kwargs.pop("sep", None)
|
||||
if sep is not None:
|
||||
if isinstance(sep, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(sep, str):
|
||||
raise TypeError("sep must be None or a string")
|
||||
end = kwargs.pop("end", None)
|
||||
if end is not None:
|
||||
if isinstance(end, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(end, str):
|
||||
raise TypeError("end must be None or a string")
|
||||
if kwargs:
|
||||
raise TypeError("invalid keyword arguments to print()")
|
||||
if not want_unicode:
|
||||
for arg in args:
|
||||
if isinstance(arg, unicode):
|
||||
want_unicode = True
|
||||
break
|
||||
if want_unicode:
|
||||
newline = unicode("\n")
|
||||
space = unicode(" ")
|
||||
else:
|
||||
newline = "\n"
|
||||
space = " "
|
||||
if sep is None:
|
||||
sep = space
|
||||
if end is None:
|
||||
end = newline
|
||||
for i, arg in enumerate(args):
|
||||
if i:
|
||||
write(sep)
|
||||
write(arg)
|
||||
write(end)
|
||||
if sys.version_info[:2] < (3, 3):
|
||||
_print = print_
|
||||
|
||||
def print_(*args, **kwargs):
|
||||
fp = kwargs.get("file", sys.stdout)
|
||||
flush = kwargs.pop("flush", False)
|
||||
_print(*args, **kwargs)
|
||||
if flush and fp is not None:
|
||||
fp.flush()
|
||||
|
||||
_add_doc(reraise, """Reraise an exception.""")
|
||||
|
||||
if sys.version_info[0:2] < (3, 4):
|
||||
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||
updated=functools.WRAPPER_UPDATES):
|
||||
def wrapper(f):
|
||||
f = functools.wraps(wrapped, assigned, updated)(f)
|
||||
f.__wrapped__ = wrapped
|
||||
return f
|
||||
return wrapper
|
||||
else:
|
||||
wraps = functools.wraps
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||
# metaclass for one level of class instantiation that replaces itself with
|
||||
# the actual metaclass.
|
||||
class metaclass(meta):
|
||||
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||
|
||||
|
||||
def add_metaclass(metaclass):
|
||||
"""Class decorator for creating a class with a metaclass."""
|
||||
def wrapper(cls):
|
||||
orig_vars = cls.__dict__.copy()
|
||||
slots = orig_vars.get('__slots__')
|
||||
if slots is not None:
|
||||
if isinstance(slots, str):
|
||||
slots = [slots]
|
||||
for slots_var in slots:
|
||||
orig_vars.pop(slots_var)
|
||||
orig_vars.pop('__dict__', None)
|
||||
orig_vars.pop('__weakref__', None)
|
||||
return metaclass(cls.__name__, cls.__bases__, orig_vars)
|
||||
return wrapper
|
||||
|
||||
|
||||
def python_2_unicode_compatible(klass):
|
||||
"""
|
||||
A decorator that defines __unicode__ and __str__ methods under Python 2.
|
||||
Under Python 3 it does nothing.
|
||||
|
||||
To support Python 2 and 3 with a single code base, define a __str__ method
|
||||
returning text and apply this decorator to the class.
|
||||
"""
|
||||
if PY2:
|
||||
if '__str__' not in klass.__dict__:
|
||||
raise ValueError("@python_2_unicode_compatible cannot be applied "
|
||||
"to %s because it doesn't define __str__()." %
|
||||
klass.__name__)
|
||||
klass.__unicode__ = klass.__str__
|
||||
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
|
||||
return klass
|
||||
|
||||
|
||||
# Complete the moves implementation.
|
||||
# This code is at the end of this module to speed up module loading.
|
||||
# Turn this module into a package.
|
||||
__path__ = [] # required for PEP 302 and PEP 451
|
||||
__package__ = __name__ # see PEP 366 @ReservedAssignment
|
||||
if globals().get("__spec__") is not None:
|
||||
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
|
||||
# Remove other six meta path importers, since they cause problems. This can
|
||||
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
|
||||
# this for some reason.)
|
||||
if sys.meta_path:
|
||||
for i, importer in enumerate(sys.meta_path):
|
||||
# Here's some real nastiness: Another "instance" of the six module might
|
||||
# be floating around. Therefore, we can't use isinstance() to check for
|
||||
# the six meta path importer, since the other six instance will have
|
||||
# inserted an importer with different class.
|
||||
if (type(importer).__name__ == "_SixMetaPathImporter" and
|
||||
importer.name == __name__):
|
||||
del sys.meta_path[i]
|
||||
break
|
||||
del i, importer
|
||||
# Finally, add the importer to the meta path import hook.
|
||||
sys.meta_path.append(_importer)
|
@ -11,16 +11,20 @@
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from .compat import u, open
|
||||
from .dependencies import DependencyParser
|
||||
from .language_priorities import LANGUAGES
|
||||
|
||||
from .packages.pygments.lexers import (
|
||||
_iter_lexerclasses,
|
||||
_fn_matches,
|
||||
basename,
|
||||
ClassNotFound,
|
||||
find_lexer_class,
|
||||
get_lexer_by_name,
|
||||
guess_lexer_for_filename,
|
||||
)
|
||||
from .packages.pygments.modeline import get_filetype_from_buffer
|
||||
|
||||
@ -44,9 +48,7 @@ def get_file_stats(file_name, entity_type='file', lineno=None, cursorpos=None,
|
||||
'cursorpos': cursorpos,
|
||||
}
|
||||
else:
|
||||
language = standardize_language(language, plugin)
|
||||
lexer = get_lexer(language)
|
||||
|
||||
language, lexer = standardize_language(language, plugin)
|
||||
if not language:
|
||||
language, lexer = guess_language(file_name)
|
||||
|
||||
@ -63,29 +65,21 @@ def get_file_stats(file_name, entity_type='file', lineno=None, cursorpos=None,
|
||||
return stats
|
||||
|
||||
|
||||
def get_lexer(language):
|
||||
"""Return a Pygments Lexer object for the given language string."""
|
||||
|
||||
if not language:
|
||||
return None
|
||||
|
||||
lexer_cls = find_lexer_class(language)
|
||||
if lexer_cls:
|
||||
return lexer_cls()
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def guess_language(file_name):
|
||||
"""Guess lexer and language for a file.
|
||||
|
||||
Returns (language, lexer) tuple where language is a unicode string.
|
||||
Returns a tuple of (language_str, lexer_obj).
|
||||
"""
|
||||
|
||||
lexer = None
|
||||
|
||||
language = get_language_from_extension(file_name)
|
||||
lexer = smart_guess_lexer(file_name)
|
||||
if language is None and lexer is not None:
|
||||
language = u(lexer.name)
|
||||
if language:
|
||||
lexer = get_lexer(language)
|
||||
else:
|
||||
lexer = smart_guess_lexer(file_name)
|
||||
if lexer:
|
||||
language = u(lexer.name)
|
||||
|
||||
return language, lexer
|
||||
|
||||
@ -108,7 +102,7 @@ def smart_guess_lexer(file_name):
|
||||
lexer = lexer1
|
||||
if (lexer2 and accuracy2 and
|
||||
(not accuracy1 or accuracy2 > accuracy1)):
|
||||
lexer = lexer2 # pragma: nocover
|
||||
lexer = lexer2
|
||||
|
||||
return lexer
|
||||
|
||||
@ -122,15 +116,15 @@ def guess_lexer_using_filename(file_name, text):
|
||||
lexer, accuracy = None, None
|
||||
|
||||
try:
|
||||
lexer = guess_lexer_for_filename(file_name, text)
|
||||
lexer = custom_pygments_guess_lexer_for_filename(file_name, text)
|
||||
except:
|
||||
pass
|
||||
log.traceback(logging.DEBUG)
|
||||
|
||||
if lexer is not None:
|
||||
try:
|
||||
accuracy = lexer.analyse_text(text)
|
||||
except:
|
||||
pass
|
||||
log.traceback(logging.DEBUG)
|
||||
|
||||
return lexer, accuracy
|
||||
|
||||
@ -147,34 +141,36 @@ def guess_lexer_using_modeline(text):
|
||||
try:
|
||||
file_type = get_filetype_from_buffer(text)
|
||||
except: # pragma: nocover
|
||||
pass
|
||||
log.traceback(logging.DEBUG)
|
||||
|
||||
if file_type is not None:
|
||||
try:
|
||||
lexer = get_lexer_by_name(file_type)
|
||||
except ClassNotFound:
|
||||
pass
|
||||
log.traceback(logging.DEBUG)
|
||||
|
||||
if lexer is not None:
|
||||
try:
|
||||
accuracy = lexer.analyse_text(text)
|
||||
except: # pragma: nocover
|
||||
pass
|
||||
log.traceback(logging.DEBUG)
|
||||
|
||||
return lexer, accuracy
|
||||
|
||||
|
||||
def get_language_from_extension(file_name):
|
||||
"""Returns a matching language for the given file extension.
|
||||
|
||||
When guessed_language is 'C', does not restrict to known file extensions.
|
||||
"""
|
||||
|
||||
filepart, extension = os.path.splitext(file_name)
|
||||
|
||||
if os.path.exists(u('{0}{1}').format(u(filepart), u('.c'))) or os.path.exists(u('{0}{1}').format(u(filepart), u('.C'))):
|
||||
return 'C'
|
||||
if re.match(r'\.h.*', extension, re.IGNORECASE) or re.match(r'\.c.*', extension, re.IGNORECASE):
|
||||
|
||||
if os.path.exists(u('{0}{1}').format(u(filepart), u('.c'))) or os.path.exists(u('{0}{1}').format(u(filepart), u('.C'))):
|
||||
return 'C'
|
||||
|
||||
extension = extension.lower()
|
||||
if extension == '.h':
|
||||
directory = os.path.dirname(file_name)
|
||||
available_files = os.listdir(directory)
|
||||
available_extensions = list(zip(*map(os.path.splitext, available_files)))[1]
|
||||
@ -204,21 +200,37 @@ def number_lines_in_file(file_name):
|
||||
|
||||
|
||||
def standardize_language(language, plugin):
|
||||
"""Maps a string to the equivalent Pygments language."""
|
||||
"""Maps a string to the equivalent Pygments language.
|
||||
|
||||
Returns a tuple of (language_str, lexer_obj).
|
||||
"""
|
||||
|
||||
if not language:
|
||||
return None
|
||||
return None, None
|
||||
|
||||
# standardize language for this plugin
|
||||
if plugin:
|
||||
plugin = plugin.split(' ')[-1].split('/')[0].split('-')[0]
|
||||
standardized = get_language_from_json(language, plugin)
|
||||
if standardized is not None:
|
||||
return standardized
|
||||
return standardized, get_lexer(standardized)
|
||||
|
||||
# standardize language against default languages
|
||||
standardized = get_language_from_json(language, 'default')
|
||||
return standardized
|
||||
return standardized, get_lexer(standardized)
|
||||
|
||||
|
||||
def get_lexer(language):
|
||||
"""Return a Pygments Lexer object for the given language string."""
|
||||
|
||||
if not language:
|
||||
return None
|
||||
|
||||
lexer_cls = find_lexer_class(language)
|
||||
if lexer_cls:
|
||||
return lexer_cls()
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_language_from_json(language, key):
|
||||
@ -229,13 +241,14 @@ def get_language_from_json(language, key):
|
||||
'languages',
|
||||
'{0}.json').format(key.lower())
|
||||
|
||||
try:
|
||||
with open(file_name, 'r', encoding='utf-8') as fh:
|
||||
languages = json.loads(fh.read())
|
||||
if languages.get(language.lower()):
|
||||
return languages[language.lower()]
|
||||
except:
|
||||
pass
|
||||
if os.path.exists(file_name):
|
||||
try:
|
||||
with open(file_name, 'r', encoding='utf-8') as fh:
|
||||
languages = json.loads(fh.read())
|
||||
if languages.get(language.lower()):
|
||||
return languages[language.lower()]
|
||||
except:
|
||||
log.traceback(logging.DEBUG)
|
||||
|
||||
return None
|
||||
|
||||
@ -254,3 +267,51 @@ def get_file_head(file_name):
|
||||
except:
|
||||
log.traceback(logging.DEBUG)
|
||||
return text
|
||||
|
||||
|
||||
def custom_pygments_guess_lexer_for_filename(_fn, _text, **options):
|
||||
"""Overwrite pygments.lexers.guess_lexer_for_filename to customize the
|
||||
priority of different lexers based on popularity of languages."""
|
||||
|
||||
fn = basename(_fn)
|
||||
primary = {}
|
||||
matching_lexers = set()
|
||||
for lexer in _iter_lexerclasses():
|
||||
for filename in lexer.filenames:
|
||||
if _fn_matches(fn, filename):
|
||||
matching_lexers.add(lexer)
|
||||
primary[lexer] = True
|
||||
for filename in lexer.alias_filenames:
|
||||
if _fn_matches(fn, filename):
|
||||
matching_lexers.add(lexer)
|
||||
primary[lexer] = False
|
||||
if not matching_lexers:
|
||||
raise ClassNotFound('no lexer for filename %r found' % fn)
|
||||
if len(matching_lexers) == 1:
|
||||
return matching_lexers.pop()(**options)
|
||||
result = []
|
||||
for lexer in matching_lexers:
|
||||
rv = lexer.analyse_text(_text)
|
||||
if rv == 1.0:
|
||||
return lexer(**options)
|
||||
result.append((rv, customize_priority(lexer)))
|
||||
|
||||
def type_sort(t):
|
||||
# sort by:
|
||||
# - analyse score
|
||||
# - is primary filename pattern?
|
||||
# - priority
|
||||
# - last resort: class name
|
||||
return (t[0], primary[t[1]], t[1].priority, t[1].__name__)
|
||||
result.sort(key=type_sort)
|
||||
|
||||
return result[-1][1](**options)
|
||||
|
||||
|
||||
def customize_priority(lexer):
|
||||
"""Return an integer priority for the given lexer object."""
|
||||
|
||||
lexer_name = lexer.name.lower().replace('sharp', '#')
|
||||
if lexer_name in LANGUAGES:
|
||||
lexer.priority = LANGUAGES[lexer_name]
|
||||
return lexer
|
||||
|
Reference in New Issue
Block a user