mirror of
https://github.com/wakatime/sublime-wakatime.git
synced 2023-08-10 21:13:02 +03:00
Compare commits
42 Commits
Author | SHA1 | Date | |
---|---|---|---|
aba89d3948 | |||
18d87118e1 | |||
fd91b9e032 | |||
16b15773bf | |||
f0b518862a | |||
7ee7de70d5 | |||
fb479f8e84 | |||
7d37193f65 | |||
6bd62b95db | |||
abf4a94a59 | |||
9337e3173b | |||
57fa4d4d84 | |||
9b5c59e677 | |||
71ce25a326 | |||
f2f14207f5 | |||
ac2ec0e73c | |||
040a76b93c | |||
dab0621b97 | |||
675f9ecd69 | |||
a6f92b9c74 | |||
bfcc242d7e | |||
762027644f | |||
3c4ceb95fa | |||
d6d8bceca0 | |||
acaad2dc83 | |||
23c5801080 | |||
05a3bfbb53 | |||
8faaa3b0e3 | |||
4bcddf2a98 | |||
b51ae5c2c4 | |||
5cd0061653 | |||
651c84325e | |||
89368529cb | |||
f1f408284b | |||
7053932731 | |||
b6c4956521 | |||
68a2557884 | |||
c7ee7258fb | |||
aaff2503fb | |||
00a1193bd3 | |||
2371daac1b | |||
4395db2b2d |
98
HISTORY.rst
98
HISTORY.rst
@ -3,6 +3,104 @@ History
|
||||
-------
|
||||
|
||||
|
||||
4.0.9 (2015-07-29)
|
||||
++++++++++++++++++
|
||||
|
||||
- catch exceptions from pygments.modeline.get_filetype_from_buffer
|
||||
|
||||
|
||||
4.0.8 (2015-06-23)
|
||||
++++++++++++++++++
|
||||
|
||||
- fix offline logging
|
||||
- limit language detection to known file extensions, unless file contents has a vim modeline
|
||||
- upgrade wakatime cli to v4.0.16
|
||||
|
||||
|
||||
4.0.7 (2015-06-21)
|
||||
++++++++++++++++++
|
||||
|
||||
- allow customizing status bar message in sublime-settings file
|
||||
- guess language using multiple methods, then use most accurate guess
|
||||
- use entity and type for new heartbeats api resource schema
|
||||
- correctly log message from py.warnings module
|
||||
- upgrade wakatime cli to v4.0.15
|
||||
|
||||
|
||||
4.0.6 (2015-05-16)
|
||||
++++++++++++++++++
|
||||
|
||||
- fix bug with auto detecting project name
|
||||
- upgrade wakatime cli to v4.0.13
|
||||
|
||||
|
||||
4.0.5 (2015-05-15)
|
||||
++++++++++++++++++
|
||||
|
||||
- correctly display caller and lineno in log file when debug is true
|
||||
- project passed with --project argument will always be used
|
||||
- new --alternate-project argument
|
||||
- upgrade wakatime cli to v4.0.12
|
||||
|
||||
|
||||
4.0.4 (2015-05-12)
|
||||
++++++++++++++++++
|
||||
|
||||
- reuse SSL connection over multiple processes for improved performance
|
||||
- upgrade wakatime cli to v4.0.11
|
||||
|
||||
|
||||
4.0.3 (2015-05-06)
|
||||
++++++++++++++++++
|
||||
|
||||
- send cursorpos to wakatime cli
|
||||
- upgrade wakatime cli to v4.0.10
|
||||
|
||||
|
||||
4.0.2 (2015-05-06)
|
||||
++++++++++++++++++
|
||||
|
||||
- only send heartbeats for the currently active buffer
|
||||
|
||||
|
||||
4.0.1 (2015-05-06)
|
||||
++++++++++++++++++
|
||||
|
||||
- ignore git temporary files
|
||||
- don't send two write heartbeats within 2 seconds of eachother
|
||||
|
||||
|
||||
4.0.0 (2015-04-12)
|
||||
++++++++++++++++++
|
||||
|
||||
- listen for selection modified instead of buffer activated for better performance
|
||||
|
||||
|
||||
3.0.19 (2015-04-07)
|
||||
+++++++++++++++++++
|
||||
|
||||
- fix bug in project detection when folder not found
|
||||
|
||||
|
||||
3.0.18 (2015-04-04)
|
||||
+++++++++++++++++++
|
||||
|
||||
- upgrade wakatime cli to v4.0.8
|
||||
- added api_url config option to .wakatime.cfg file
|
||||
|
||||
|
||||
3.0.17 (2015-04-02)
|
||||
+++++++++++++++++++
|
||||
|
||||
- use open folder as current project when not using revision control
|
||||
|
||||
|
||||
3.0.16 (2015-04-02)
|
||||
+++++++++++++++++++
|
||||
|
||||
- copy list when obfuscating api key so original command is not modified
|
||||
|
||||
|
||||
3.0.15 (2015-04-01)
|
||||
+++++++++++++++++++
|
||||
|
||||
|
14
README.md
14
README.md
@ -18,7 +18,7 @@ Heads Up! For Sublime Text 2 on Windows & Linux, WakaTime depends on [Python](ht
|
||||
|
||||
c) Type `wakatime`, then press `enter` with the `WakaTime` plugin selected.
|
||||
|
||||
3. Enter your [api key](https://wakatime.com/settings#apikey) from https://wakatime.com/settings#apikey, then press `enter`.
|
||||
3. Enter your [api key](https://wakatime.com/settings#apikey), then press `enter`.
|
||||
|
||||
4. Use Sublime and your time will be tracked for you automatically.
|
||||
|
||||
@ -29,3 +29,15 @@ Screen Shots
|
||||
|
||||

|
||||
|
||||
Troubleshooting
|
||||
---------------
|
||||
|
||||
First, turn on debug mode in your `WakaTime.sublime-settings` file.
|
||||
|
||||

|
||||
|
||||
Add the line: `"debug": true`
|
||||
|
||||
Then, open your Sublime Console with `View -> Show Console` to see the plugin executing the wakatime cli process when sending a heartbeat. Also, tail your `$HOME/.wakatime.log` file to debug wakatime cli problems.
|
||||
|
||||
For more general troubleshooting information, see [wakatime/wakatime#troubleshooting](https://github.com/wakatime/wakatime#troubleshooting).
|
||||
|
104
WakaTime.py
104
WakaTime.py
@ -7,7 +7,7 @@ Website: https://wakatime.com/
|
||||
==========================================================="""
|
||||
|
||||
|
||||
__version__ = '3.0.15'
|
||||
__version__ = '4.0.9'
|
||||
|
||||
|
||||
import sublime
|
||||
@ -25,13 +25,13 @@ from subprocess import Popen
|
||||
|
||||
|
||||
# globals
|
||||
ACTION_FREQUENCY = 2
|
||||
HEARTBEAT_FREQUENCY = 2
|
||||
ST_VERSION = int(sublime.version())
|
||||
PLUGIN_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||
API_CLIENT = os.path.join(PLUGIN_DIR, 'packages', 'wakatime', 'cli.py')
|
||||
SETTINGS_FILE = 'WakaTime.sublime-settings'
|
||||
SETTINGS = {}
|
||||
LAST_ACTION = {
|
||||
LAST_HEARTBEAT = {
|
||||
'time': 0,
|
||||
'file': None,
|
||||
'is_write': False,
|
||||
@ -124,7 +124,8 @@ def python_binary():
|
||||
return None
|
||||
|
||||
|
||||
def obfuscate_apikey(cmd):
|
||||
def obfuscate_apikey(command_list):
|
||||
cmd = list(command_list)
|
||||
apikey_index = None
|
||||
for num in range(len(cmd)):
|
||||
if cmd[num] == '--key':
|
||||
@ -135,34 +136,64 @@ def obfuscate_apikey(cmd):
|
||||
return cmd
|
||||
|
||||
|
||||
def enough_time_passed(now, last_time):
|
||||
if now - last_time > ACTION_FREQUENCY * 60:
|
||||
def enough_time_passed(now, last_heartbeat, is_write):
|
||||
if now - last_heartbeat['time'] > HEARTBEAT_FREQUENCY * 60:
|
||||
return True
|
||||
if is_write and now - last_heartbeat['time'] > 2:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def find_project_name_from_folders(folders):
|
||||
try:
|
||||
def find_folder_containing_file(folders, current_file):
|
||||
"""Returns absolute path to folder containing the file.
|
||||
"""
|
||||
|
||||
parent_folder = None
|
||||
|
||||
current_folder = current_file
|
||||
while True:
|
||||
for folder in folders:
|
||||
for file_name in os.listdir(folder):
|
||||
if file_name.endswith('.sublime-project'):
|
||||
return file_name.replace('.sublime-project', '', 1)
|
||||
except:
|
||||
pass
|
||||
return None
|
||||
if os.path.realpath(os.path.dirname(current_folder)) == os.path.realpath(folder):
|
||||
parent_folder = folder
|
||||
break
|
||||
if parent_folder is not None:
|
||||
break
|
||||
if not current_folder or os.path.dirname(current_folder) == current_folder:
|
||||
break
|
||||
current_folder = os.path.dirname(current_folder)
|
||||
|
||||
return parent_folder
|
||||
|
||||
|
||||
def handle_action(view, is_write=False):
|
||||
def find_project_from_folders(folders, current_file):
|
||||
"""Find project name from open folders.
|
||||
"""
|
||||
|
||||
folder = find_folder_containing_file(folders, current_file)
|
||||
return os.path.basename(folder) if folder else None
|
||||
|
||||
|
||||
def is_view_active(view):
|
||||
if view:
|
||||
active_window = sublime.active_window()
|
||||
if active_window:
|
||||
active_view = active_window.active_view()
|
||||
if active_view:
|
||||
return active_view.buffer_id() == view.buffer_id()
|
||||
return False
|
||||
|
||||
|
||||
def handle_heartbeat(view, is_write=False):
|
||||
window = view.window()
|
||||
if window is not None:
|
||||
target_file = view.file_name()
|
||||
project = window.project_file_name() if hasattr(window, 'project_file_name') else None
|
||||
project = window.project_data() if hasattr(window, 'project_data') else None
|
||||
folders = window.folders()
|
||||
thread = SendActionThread(target_file, view, is_write=is_write, project=project, folders=folders)
|
||||
thread = SendHeartbeatThread(target_file, view, is_write=is_write, project=project, folders=folders)
|
||||
thread.start()
|
||||
|
||||
|
||||
class SendActionThread(threading.Thread):
|
||||
class SendHeartbeatThread(threading.Thread):
|
||||
|
||||
def __init__(self, target_file, view, is_write=False, project=None, folders=None, force=False):
|
||||
threading.Thread.__init__(self)
|
||||
@ -175,14 +206,15 @@ class SendActionThread(threading.Thread):
|
||||
self.debug = SETTINGS.get('debug')
|
||||
self.api_key = SETTINGS.get('api_key', '')
|
||||
self.ignore = SETTINGS.get('ignore', [])
|
||||
self.last_action = LAST_ACTION.copy()
|
||||
self.last_heartbeat = LAST_HEARTBEAT.copy()
|
||||
self.cursorpos = view.sel()[0].begin() if view.sel() else None
|
||||
self.view = view
|
||||
|
||||
def run(self):
|
||||
with self.lock:
|
||||
if self.target_file:
|
||||
self.timestamp = time.time()
|
||||
if self.force or (self.is_write and not self.last_action['is_write']) or self.target_file != self.last_action['file'] or enough_time_passed(self.timestamp, self.last_action['time']):
|
||||
if self.force or self.target_file != self.last_heartbeat['file'] or enough_time_passed(self.timestamp, self.last_heartbeat, self.is_write):
|
||||
self.send_heartbeat()
|
||||
|
||||
def send_heartbeat(self):
|
||||
@ -199,14 +231,14 @@ class SendActionThread(threading.Thread):
|
||||
]
|
||||
if self.is_write:
|
||||
cmd.append('--write')
|
||||
if self.project:
|
||||
self.project = os.path.basename(self.project).replace('.sublime-project', '', 1)
|
||||
if self.project:
|
||||
cmd.extend(['--project', self.project])
|
||||
if self.project and self.project.get('name'):
|
||||
cmd.extend(['--alternate-project', self.project.get('name')])
|
||||
elif self.folders:
|
||||
project_name = find_project_name_from_folders(self.folders)
|
||||
project_name = find_project_from_folders(self.folders, self.target_file)
|
||||
if project_name:
|
||||
cmd.extend(['--project', project_name])
|
||||
cmd.extend(['--alternate-project', project_name])
|
||||
if self.cursorpos is not None:
|
||||
cmd.extend(['--cursorpos', '{0}'.format(self.cursorpos)])
|
||||
for pattern in self.ignore:
|
||||
cmd.extend(['--ignore', pattern])
|
||||
if self.debug:
|
||||
@ -226,15 +258,15 @@ class SendActionThread(threading.Thread):
|
||||
|
||||
def sent(self):
|
||||
sublime.set_timeout(self.set_status_bar, 0)
|
||||
sublime.set_timeout(self.set_last_action, 0)
|
||||
sublime.set_timeout(self.set_last_heartbeat, 0)
|
||||
|
||||
def set_status_bar(self):
|
||||
if SETTINGS.get('status_bar_message'):
|
||||
self.view.set_status('wakatime', 'WakaTime active {0}'.format(datetime.now().strftime('%I:%M %p')))
|
||||
self.view.set_status('wakatime', datetime.now().strftime(SETTINGS.get('status_bar_message_fmt')))
|
||||
|
||||
def set_last_action(self):
|
||||
global LAST_ACTION
|
||||
LAST_ACTION = {
|
||||
def set_last_heartbeat(self):
|
||||
global LAST_HEARTBEAT
|
||||
LAST_HEARTBEAT = {
|
||||
'file': self.target_file,
|
||||
'time': self.timestamp,
|
||||
'is_write': self.is_write,
|
||||
@ -266,13 +298,15 @@ if ST_VERSION < 3000:
|
||||
class WakatimeListener(sublime_plugin.EventListener):
|
||||
|
||||
def on_post_save(self, view):
|
||||
handle_action(view, is_write=True)
|
||||
handle_heartbeat(view, is_write=True)
|
||||
|
||||
def on_activated(self, view):
|
||||
handle_action(view)
|
||||
def on_selection_modified(self, view):
|
||||
if is_view_active(view):
|
||||
handle_heartbeat(view)
|
||||
|
||||
def on_modified(self, view):
|
||||
handle_action(view)
|
||||
if is_view_active(view):
|
||||
handle_heartbeat(view)
|
||||
|
||||
|
||||
class WakatimeDashboardCommand(sublime_plugin.ApplicationCommand):
|
||||
|
@ -9,12 +9,15 @@
|
||||
|
||||
// Ignore files; Files (including absolute paths) that match one of these
|
||||
// POSIX regular expressions will not be logged.
|
||||
"ignore": ["^/tmp/", "^/etc/", "^/var/"],
|
||||
"ignore": ["^/tmp/", "^/etc/", "^/var/", "COMMIT_EDITMSG$", "PULLREQ_EDITMSG$", "MERGE_MSG$", "TAG_EDITMSG$"],
|
||||
|
||||
// Debug mode. Set to true for verbose logging. Defaults to false.
|
||||
"debug": false,
|
||||
|
||||
// Status bar message. Set to false to hide status bar message.
|
||||
// Defaults to true.
|
||||
"status_bar_message": true
|
||||
"status_bar_message": true,
|
||||
|
||||
// Status bar message format.
|
||||
"status_bar_message_fmt": "WakaTime active %I:%M %p"
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
__title__ = 'wakatime'
|
||||
__description__ = 'Common interface to the WakaTime api.'
|
||||
__url__ = 'https://github.com/wakatime/wakatime'
|
||||
__version_info__ = ('4', '0', '6')
|
||||
__version_info__ = ('4', '1', '0')
|
||||
__version__ = '.'.join(__version_info__)
|
||||
__author__ = 'Alan Hamlett'
|
||||
__author_email__ = 'alan@wakatime.com'
|
||||
|
@ -19,6 +19,7 @@ import re
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
import socket
|
||||
try:
|
||||
import ConfigParser as configparser
|
||||
except ImportError:
|
||||
@ -26,18 +27,17 @@ except ImportError:
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'packages'))
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'packages', 'requests', 'packages'))
|
||||
|
||||
from .__about__ import __version__
|
||||
from .compat import u, open, is_py3
|
||||
from .logger import setup_logging
|
||||
from .offlinequeue import Queue
|
||||
from .log import setup_logging
|
||||
from .project import find_project
|
||||
from .stats import get_file_stats
|
||||
from .packages import argparse
|
||||
from .packages import simplejson as json
|
||||
from .packages import requests
|
||||
from .packages.requests.exceptions import RequestException
|
||||
from .project import get_project_info
|
||||
from .session_cache import SessionCache
|
||||
from .stats import get_file_stats
|
||||
try:
|
||||
from .packages import tzlocal
|
||||
except:
|
||||
@ -148,14 +148,21 @@ def parseArguments(argv):
|
||||
type=float,
|
||||
help='optional floating-point unix epoch timestamp; '+
|
||||
'uses current time by default')
|
||||
parser.add_argument('--lineno', dest='lineno',
|
||||
help='optional line number; current line being edited')
|
||||
parser.add_argument('--cursorpos', dest='cursorpos',
|
||||
help='optional cursor position in the current file')
|
||||
parser.add_argument('--notfile', dest='notfile', action='store_true',
|
||||
help='when set, will accept any value for the file. for example, '+
|
||||
'a domain name or other item you want to log time towards.')
|
||||
parser.add_argument('--proxy', dest='proxy',
|
||||
help='optional https proxy url; for example: '+
|
||||
'https://user:pass@localhost:8080')
|
||||
parser.add_argument('--project', dest='project_name',
|
||||
help='optional project name; will auto-discover by default')
|
||||
parser.add_argument('--project', dest='project',
|
||||
help='optional project name')
|
||||
parser.add_argument('--alternate-project', dest='alternate_project',
|
||||
help='optional alternate project name; auto-discovered project takes priority')
|
||||
parser.add_argument('--hostname', dest='hostname', help='hostname of current machine.')
|
||||
parser.add_argument('--disableoffline', dest='offline',
|
||||
action='store_false',
|
||||
help='disables offline time logging instead of queuing logged time')
|
||||
@ -173,6 +180,8 @@ def parseArguments(argv):
|
||||
help=argparse.SUPPRESS)
|
||||
parser.add_argument('--logfile', dest='logfile',
|
||||
help='defaults to ~/.wakatime.log')
|
||||
parser.add_argument('--apiurl', dest='api_url',
|
||||
help='heartbeats api url; for debugging with a local server')
|
||||
parser.add_argument('--config', dest='config',
|
||||
help='defaults to ~/.wakatime.conf')
|
||||
parser.add_argument('--verbose', dest='verbose', action='store_true',
|
||||
@ -239,6 +248,8 @@ def parseArguments(argv):
|
||||
args.verbose = configs.getboolean('settings', 'debug')
|
||||
if not args.logfile and configs.has_option('settings', 'logfile'):
|
||||
args.logfile = configs.get('settings', 'logfile')
|
||||
if not args.api_url and configs.has_option('settings', 'api_url'):
|
||||
args.api_url = configs.get('settings', 'api_url')
|
||||
|
||||
return args, configs
|
||||
|
||||
@ -294,27 +305,36 @@ def get_user_agent(plugin):
|
||||
return user_agent
|
||||
|
||||
|
||||
def send_heartbeat(project=None, branch=None, stats={}, key=None, targetFile=None,
|
||||
timestamp=None, isWrite=None, plugin=None, offline=None,
|
||||
hidefilenames=None, notfile=False, proxy=None, **kwargs):
|
||||
url = 'https://wakatime.com/api/v1/heartbeats'
|
||||
log.debug('Sending heartbeat to api at %s' % url)
|
||||
def send_heartbeat(project=None, branch=None, hostname=None, stats={}, key=None, targetFile=None,
|
||||
timestamp=None, isWrite=None, plugin=None, offline=None, notfile=False,
|
||||
hidefilenames=None, proxy=None, api_url=None, **kwargs):
|
||||
"""Sends heartbeat as POST request to WakaTime api server.
|
||||
"""
|
||||
|
||||
if not api_url:
|
||||
api_url = 'https://wakatime.com/api/v1/heartbeats'
|
||||
log.debug('Sending heartbeat to api at %s' % api_url)
|
||||
data = {
|
||||
'time': timestamp,
|
||||
'file': targetFile,
|
||||
'entity': targetFile,
|
||||
'type': 'file',
|
||||
}
|
||||
if hidefilenames and targetFile is not None and not notfile:
|
||||
data['file'] = data['file'].rsplit('/', 1)[-1].rsplit('\\', 1)[-1]
|
||||
if len(data['file'].strip('.').split('.', 1)) > 1:
|
||||
data['file'] = u('HIDDEN.{ext}').format(ext=u(data['file'].strip('.').rsplit('.', 1)[-1]))
|
||||
data['entity'] = data['entity'].rsplit('/', 1)[-1].rsplit('\\', 1)[-1]
|
||||
if len(data['entity'].strip('.').split('.', 1)) > 1:
|
||||
data['entity'] = u('HIDDEN.{ext}').format(ext=u(data['entity'].strip('.').rsplit('.', 1)[-1]))
|
||||
else:
|
||||
data['file'] = u('HIDDEN')
|
||||
data['entity'] = u('HIDDEN')
|
||||
if stats.get('lines'):
|
||||
data['lines'] = stats['lines']
|
||||
if stats.get('language'):
|
||||
data['language'] = stats['language']
|
||||
if stats.get('dependencies'):
|
||||
data['dependencies'] = stats['dependencies']
|
||||
if stats.get('lineno'):
|
||||
data['lineno'] = stats['lineno']
|
||||
if stats.get('cursorpos'):
|
||||
data['cursorpos'] = stats['cursorpos']
|
||||
if isWrite:
|
||||
data['is_write'] = isWrite
|
||||
if project:
|
||||
@ -333,6 +353,8 @@ def send_heartbeat(project=None, branch=None, stats={}, key=None, targetFile=Non
|
||||
'Accept': 'application/json',
|
||||
'Authorization': auth,
|
||||
}
|
||||
if hostname:
|
||||
headers['X-Machine-Name'] = hostname
|
||||
proxies = {}
|
||||
if proxy:
|
||||
proxies['https'] = proxy
|
||||
@ -345,10 +367,13 @@ def send_heartbeat(project=None, branch=None, stats={}, key=None, targetFile=Non
|
||||
if tz:
|
||||
headers['TimeZone'] = u(tz.zone)
|
||||
|
||||
session_cache = SessionCache()
|
||||
session = session_cache.get()
|
||||
|
||||
# log time to api
|
||||
response = None
|
||||
try:
|
||||
response = requests.post(url, data=request_body, headers=headers,
|
||||
response = session.post(api_url, data=request_body, headers=headers,
|
||||
proxies=proxies)
|
||||
except RequestException:
|
||||
exception_data = {
|
||||
@ -370,6 +395,7 @@ def send_heartbeat(project=None, branch=None, stats={}, key=None, targetFile=Non
|
||||
log.debug({
|
||||
'response_code': response_code,
|
||||
})
|
||||
session_cache.save(session)
|
||||
return True
|
||||
if offline:
|
||||
if response_code != 400:
|
||||
@ -395,6 +421,7 @@ def send_heartbeat(project=None, branch=None, stats={}, key=None, targetFile=Non
|
||||
'response_code': response_code,
|
||||
'response_content': response_content,
|
||||
})
|
||||
session_cache.delete()
|
||||
return False
|
||||
|
||||
|
||||
@ -417,40 +444,41 @@ def main(argv=None):
|
||||
|
||||
if os.path.isfile(args.targetFile) or args.notfile:
|
||||
|
||||
stats = get_file_stats(args.targetFile, notfile=args.notfile)
|
||||
stats = get_file_stats(args.targetFile, notfile=args.notfile,
|
||||
lineno=args.lineno, cursorpos=args.cursorpos)
|
||||
|
||||
project = None
|
||||
project, branch = None, None
|
||||
if not args.notfile:
|
||||
project = find_project(args.targetFile, configs=configs)
|
||||
branch = None
|
||||
project_name = args.project_name
|
||||
if project:
|
||||
branch = project.branch()
|
||||
project_name = project.name()
|
||||
project, branch = get_project_info(configs=configs, args=args)
|
||||
|
||||
if send_heartbeat(
|
||||
project=project_name,
|
||||
branch=branch,
|
||||
stats=stats,
|
||||
**vars(args)
|
||||
):
|
||||
kwargs = vars(args)
|
||||
kwargs['project'] = project
|
||||
kwargs['branch'] = branch
|
||||
kwargs['stats'] = stats
|
||||
kwargs['hostname'] = args.hostname or socket.gethostname()
|
||||
|
||||
if send_heartbeat(**kwargs):
|
||||
queue = Queue()
|
||||
while True:
|
||||
heartbeat = queue.pop()
|
||||
if heartbeat is None:
|
||||
break
|
||||
sent = send_heartbeat(project=heartbeat['project'],
|
||||
targetFile=heartbeat['file'],
|
||||
timestamp=heartbeat['time'],
|
||||
branch=heartbeat['branch'],
|
||||
stats=json.loads(heartbeat['stats']),
|
||||
key=args.key,
|
||||
isWrite=heartbeat['is_write'],
|
||||
plugin=heartbeat['plugin'],
|
||||
offline=args.offline,
|
||||
hidefilenames=args.hidefilenames,
|
||||
notfile=args.notfile,
|
||||
proxy=args.proxy)
|
||||
sent = send_heartbeat(
|
||||
project=heartbeat['project'],
|
||||
targetFile=heartbeat['file'],
|
||||
timestamp=heartbeat['time'],
|
||||
branch=heartbeat['branch'],
|
||||
hostname=kwargs['hostname'],
|
||||
stats=json.loads(heartbeat['stats']),
|
||||
key=args.key,
|
||||
isWrite=heartbeat['is_write'],
|
||||
plugin=heartbeat['plugin'],
|
||||
offline=args.offline,
|
||||
hidefilenames=args.hidefilenames,
|
||||
notfile=args.notfile,
|
||||
proxy=args.proxy,
|
||||
api_url=args.api_url,
|
||||
)
|
||||
if not sent:
|
||||
break
|
||||
return 0 # success
|
||||
|
@ -11,8 +11,25 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
import wakatime
|
||||
|
||||
|
||||
# get path to local wakatime package
|
||||
package_folder = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
# add local wakatime package to sys.path
|
||||
sys.path.insert(0, package_folder)
|
||||
|
||||
# import local wakatime package
|
||||
try:
|
||||
import wakatime
|
||||
except TypeError:
|
||||
# on Windows, non-ASCII characters in import path can be fixed using
|
||||
# the script path from sys.argv[0].
|
||||
# More info at https://github.com/wakatime/wakatime/issues/32
|
||||
package_folder = os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0])))
|
||||
sys.path.insert(0, package_folder)
|
||||
import wakatime
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(wakatime.main(sys.argv))
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
wakatime.log
|
||||
~~~~~~~~~~~~
|
||||
wakatime.logger
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
Provides the configured logger for writing JSON to the log file.
|
||||
|
||||
@ -37,28 +37,34 @@ class CustomEncoder(json.JSONEncoder):
|
||||
|
||||
class JsonFormatter(logging.Formatter):
|
||||
|
||||
def setup(self, timestamp, isWrite, targetFile, version, plugin):
|
||||
def setup(self, timestamp, isWrite, targetFile, version, plugin, verbose,
|
||||
warnings=False):
|
||||
self.timestamp = timestamp
|
||||
self.isWrite = isWrite
|
||||
self.targetFile = targetFile
|
||||
self.version = version
|
||||
self.plugin = plugin
|
||||
self.verbose = verbose
|
||||
self.warnings = warnings
|
||||
|
||||
def format(self, record):
|
||||
def format(self, record, *args):
|
||||
data = OrderedDict([
|
||||
('now', self.formatTime(record, self.datefmt)),
|
||||
('version', self.version),
|
||||
('plugin', self.plugin),
|
||||
('time', self.timestamp),
|
||||
('isWrite', self.isWrite),
|
||||
('file', self.targetFile),
|
||||
('level', record.levelname),
|
||||
('message', record.msg),
|
||||
])
|
||||
data['version'] = self.version
|
||||
data['plugin'] = self.plugin
|
||||
data['time'] = self.timestamp
|
||||
if self.verbose:
|
||||
data['caller'] = record.pathname
|
||||
data['lineno'] = record.lineno
|
||||
data['isWrite'] = self.isWrite
|
||||
data['file'] = self.targetFile
|
||||
if not self.isWrite:
|
||||
del data['isWrite']
|
||||
data['level'] = record.levelname
|
||||
data['message'] = record.getMessage() if self.warnings else record.msg
|
||||
if not self.plugin:
|
||||
del data['plugin']
|
||||
if not self.isWrite:
|
||||
del data['isWrite']
|
||||
return CustomEncoder().encode(data)
|
||||
|
||||
def formatException(self, exc_info):
|
||||
@ -83,6 +89,7 @@ def setup_logging(args, version):
|
||||
targetFile=args.targetFile,
|
||||
version=version,
|
||||
plugin=args.plugin,
|
||||
verbose=args.verbose,
|
||||
)
|
||||
logger.handlers[0].setFormatter(formatter)
|
||||
return logger
|
||||
@ -97,7 +104,27 @@ def setup_logging(args, version):
|
||||
targetFile=args.targetFile,
|
||||
version=version,
|
||||
plugin=args.plugin,
|
||||
verbose=args.verbose,
|
||||
)
|
||||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
|
||||
warnings_formatter = JsonFormatter(datefmt='%Y/%m/%d %H:%M:%S %z')
|
||||
warnings_formatter.setup(
|
||||
timestamp=args.timestamp,
|
||||
isWrite=args.isWrite,
|
||||
targetFile=args.targetFile,
|
||||
version=version,
|
||||
plugin=args.plugin,
|
||||
verbose=args.verbose,
|
||||
warnings=True,
|
||||
)
|
||||
warnings_handler = logging.FileHandler(os.path.expanduser(logfile))
|
||||
warnings_handler.setFormatter(warnings_formatter)
|
||||
logging.getLogger('py.warnings').addHandler(warnings_handler)
|
||||
try:
|
||||
logging.captureWarnings(True)
|
||||
except AttributeError:
|
||||
pass # Python >= 2.7 is needed to capture warnings
|
||||
|
||||
return logger
|
@ -1,10 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
wakatime.queue
|
||||
~~~~~~~~~~~~~~
|
||||
wakatime.offlinequeue
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Queue for offline time logging.
|
||||
http://wakatime.com
|
||||
Queue for saving heartbeats while offline.
|
||||
|
||||
:copyright: (c) 2014 Alan Hamlett.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
@ -51,7 +50,7 @@ class Queue(object):
|
||||
try:
|
||||
conn, c = self.connect()
|
||||
heartbeat = {
|
||||
'file': data.get('file'),
|
||||
'file': data.get('entity'),
|
||||
'time': data.get('time'),
|
||||
'project': data.get('project'),
|
||||
'branch': data.get('branch'),
|
||||
|
@ -6,7 +6,7 @@
|
||||
# /
|
||||
|
||||
"""
|
||||
requests HTTP library
|
||||
Requests HTTP library
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Requests is an HTTP library, written in Python, for human beings. Basic GET
|
||||
@ -42,8 +42,8 @@ is at <http://python-requests.org>.
|
||||
"""
|
||||
|
||||
__title__ = 'requests'
|
||||
__version__ = '2.6.0'
|
||||
__build__ = 0x020503
|
||||
__version__ = '2.7.0'
|
||||
__build__ = 0x020700
|
||||
__author__ = 'Kenneth Reitz'
|
||||
__license__ = 'Apache 2.0'
|
||||
__copyright__ = 'Copyright 2015 Kenneth Reitz'
|
||||
|
@ -35,6 +35,7 @@ from .auth import _basic_auth_str
|
||||
DEFAULT_POOLBLOCK = False
|
||||
DEFAULT_POOLSIZE = 10
|
||||
DEFAULT_RETRIES = 0
|
||||
DEFAULT_POOL_TIMEOUT = None
|
||||
|
||||
|
||||
class BaseAdapter(object):
|
||||
@ -375,7 +376,7 @@ class HTTPAdapter(BaseAdapter):
|
||||
if hasattr(conn, 'proxy_pool'):
|
||||
conn = conn.proxy_pool
|
||||
|
||||
low_conn = conn._get_conn(timeout=timeout)
|
||||
low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
|
||||
|
||||
try:
|
||||
low_conn.putrequest(request.method,
|
||||
@ -407,9 +408,6 @@ class HTTPAdapter(BaseAdapter):
|
||||
# Then, reraise so that we can handle the actual exception.
|
||||
low_conn.close()
|
||||
raise
|
||||
else:
|
||||
# All is well, return the connection to the pool.
|
||||
conn._put_conn(low_conn)
|
||||
|
||||
except (ProtocolError, socket.error) as err:
|
||||
raise ConnectionError(err, request=request)
|
||||
|
@ -55,17 +55,18 @@ def request(method, url, **kwargs):
|
||||
return response
|
||||
|
||||
|
||||
def get(url, **kwargs):
|
||||
def get(url, params=None, **kwargs):
|
||||
"""Sends a GET request.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
|
||||
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
||||
:return: :class:`Response <Response>` object
|
||||
:rtype: requests.Response
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return request('get', url, **kwargs)
|
||||
return request('get', url, params=params, **kwargs)
|
||||
|
||||
|
||||
def options(url, **kwargs):
|
||||
|
@ -103,7 +103,8 @@ class HTTPDigestAuth(AuthBase):
|
||||
# XXX not implemented yet
|
||||
entdig = None
|
||||
p_parsed = urlparse(url)
|
||||
path = p_parsed.path
|
||||
#: path is request-uri defined in RFC 2616 which should not be empty
|
||||
path = p_parsed.path or "/"
|
||||
if p_parsed.query:
|
||||
path += '?' + p_parsed.query
|
||||
|
||||
@ -178,7 +179,7 @@ class HTTPDigestAuth(AuthBase):
|
||||
# Consume content and release the original connection
|
||||
# to allow our new request to reuse the same one.
|
||||
r.content
|
||||
r.raw.release_conn()
|
||||
r.close()
|
||||
prep = r.request.copy()
|
||||
extract_cookies_to_jar(prep._cookies, r.request, r.raw)
|
||||
prep.prepare_cookies(prep._cookies)
|
||||
|
@ -6,6 +6,7 @@ Compatibility code to be able to use `cookielib.CookieJar` with requests.
|
||||
requests.utils imports from here, so be careful with imports.
|
||||
"""
|
||||
|
||||
import copy
|
||||
import time
|
||||
import collections
|
||||
from .compat import cookielib, urlparse, urlunparse, Morsel
|
||||
@ -302,7 +303,7 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
|
||||
"""Updates this jar with cookies from another CookieJar or dict-like"""
|
||||
if isinstance(other, cookielib.CookieJar):
|
||||
for cookie in other:
|
||||
self.set_cookie(cookie)
|
||||
self.set_cookie(copy.copy(cookie))
|
||||
else:
|
||||
super(RequestsCookieJar, self).update(other)
|
||||
|
||||
@ -359,6 +360,21 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
|
||||
return new_cj
|
||||
|
||||
|
||||
def _copy_cookie_jar(jar):
|
||||
if jar is None:
|
||||
return None
|
||||
|
||||
if hasattr(jar, 'copy'):
|
||||
# We're dealing with an instane of RequestsCookieJar
|
||||
return jar.copy()
|
||||
# We're dealing with a generic CookieJar instance
|
||||
new_jar = copy.copy(jar)
|
||||
new_jar.clear()
|
||||
for cookie in jar:
|
||||
new_jar.set_cookie(copy.copy(cookie))
|
||||
return new_jar
|
||||
|
||||
|
||||
def create_cookie(name, value, **kwargs):
|
||||
"""Make a cookie from underspecified parameters.
|
||||
|
||||
@ -399,11 +415,14 @@ def morsel_to_cookie(morsel):
|
||||
|
||||
expires = None
|
||||
if morsel['max-age']:
|
||||
expires = time.time() + morsel['max-age']
|
||||
try:
|
||||
expires = int(time.time() + int(morsel['max-age']))
|
||||
except ValueError:
|
||||
raise TypeError('max-age: %s must be integer' % morsel['max-age'])
|
||||
elif morsel['expires']:
|
||||
time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
|
||||
expires = time.mktime(
|
||||
time.strptime(morsel['expires'], time_template)) - time.timezone
|
||||
expires = int(time.mktime(
|
||||
time.strptime(morsel['expires'], time_template)) - time.timezone)
|
||||
return create_cookie(
|
||||
comment=morsel['comment'],
|
||||
comment_url=bool(morsel['comment']),
|
||||
|
@ -15,7 +15,7 @@ from .hooks import default_hooks
|
||||
from .structures import CaseInsensitiveDict
|
||||
|
||||
from .auth import HTTPBasicAuth
|
||||
from .cookies import cookiejar_from_dict, get_cookie_header
|
||||
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
|
||||
from .packages.urllib3.fields import RequestField
|
||||
from .packages.urllib3.filepost import encode_multipart_formdata
|
||||
from .packages.urllib3.util import parse_url
|
||||
@ -30,7 +30,8 @@ from .utils import (
|
||||
iter_slices, guess_json_utf, super_len, to_native_string)
|
||||
from .compat import (
|
||||
cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
|
||||
is_py2, chardet, json, builtin_str, basestring)
|
||||
is_py2, chardet, builtin_str, basestring)
|
||||
from .compat import json as complexjson
|
||||
from .status_codes import codes
|
||||
|
||||
#: The set of HTTP status codes that indicate an automatically
|
||||
@ -42,12 +43,11 @@ REDIRECT_STATI = (
|
||||
codes.temporary_redirect, # 307
|
||||
codes.permanent_redirect, # 308
|
||||
)
|
||||
|
||||
DEFAULT_REDIRECT_LIMIT = 30
|
||||
CONTENT_CHUNK_SIZE = 10 * 1024
|
||||
ITER_CHUNK_SIZE = 512
|
||||
|
||||
json_dumps = json.dumps
|
||||
|
||||
|
||||
class RequestEncodingMixin(object):
|
||||
@property
|
||||
@ -149,8 +149,7 @@ class RequestEncodingMixin(object):
|
||||
else:
|
||||
fdata = fp.read()
|
||||
|
||||
rf = RequestField(name=k, data=fdata,
|
||||
filename=fn, headers=fh)
|
||||
rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
|
||||
rf.make_multipart(content_type=ft)
|
||||
new_fields.append(rf)
|
||||
|
||||
@ -207,17 +206,8 @@ class Request(RequestHooksMixin):
|
||||
<PreparedRequest [GET]>
|
||||
|
||||
"""
|
||||
def __init__(self,
|
||||
method=None,
|
||||
url=None,
|
||||
headers=None,
|
||||
files=None,
|
||||
data=None,
|
||||
params=None,
|
||||
auth=None,
|
||||
cookies=None,
|
||||
hooks=None,
|
||||
json=None):
|
||||
def __init__(self, method=None, url=None, headers=None, files=None,
|
||||
data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
|
||||
|
||||
# Default empty dicts for dict params.
|
||||
data = [] if data is None else data
|
||||
@ -296,8 +286,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||
self.hooks = default_hooks()
|
||||
|
||||
def prepare(self, method=None, url=None, headers=None, files=None,
|
||||
data=None, params=None, auth=None, cookies=None, hooks=None,
|
||||
json=None):
|
||||
data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
|
||||
"""Prepares the entire request with the given parameters."""
|
||||
|
||||
self.prepare_method(method)
|
||||
@ -306,6 +295,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||
self.prepare_cookies(cookies)
|
||||
self.prepare_body(data, files, json)
|
||||
self.prepare_auth(auth, url)
|
||||
|
||||
# Note that prepare_auth must be last to enable authentication schemes
|
||||
# such as OAuth to work on a fully prepared request.
|
||||
|
||||
@ -320,7 +310,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||
p.method = self.method
|
||||
p.url = self.url
|
||||
p.headers = self.headers.copy() if self.headers is not None else None
|
||||
p._cookies = self._cookies.copy() if self._cookies is not None else None
|
||||
p._cookies = _copy_cookie_jar(self._cookies)
|
||||
p.body = self.body
|
||||
p.hooks = self.hooks
|
||||
return p
|
||||
@ -357,8 +347,10 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||
raise InvalidURL(*e.args)
|
||||
|
||||
if not scheme:
|
||||
raise MissingSchema("Invalid URL {0!r}: No schema supplied. "
|
||||
"Perhaps you meant http://{0}?".format(url))
|
||||
error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")
|
||||
error = error.format(to_native_string(url, 'utf8'))
|
||||
|
||||
raise MissingSchema(error)
|
||||
|
||||
if not host:
|
||||
raise InvalidURL("Invalid URL %r: No host supplied" % url)
|
||||
@ -424,7 +416,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||
|
||||
if json is not None:
|
||||
content_type = 'application/json'
|
||||
body = json_dumps(json)
|
||||
body = complexjson.dumps(json)
|
||||
|
||||
is_stream = all([
|
||||
hasattr(data, '__iter__'),
|
||||
@ -501,7 +493,15 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||
self.prepare_content_length(self.body)
|
||||
|
||||
def prepare_cookies(self, cookies):
|
||||
"""Prepares the given HTTP cookie data."""
|
||||
"""Prepares the given HTTP cookie data.
|
||||
|
||||
This function eventually generates a ``Cookie`` header from the
|
||||
given cookies using cookielib. Due to cookielib's design, the header
|
||||
will not be regenerated if it already exists, meaning this function
|
||||
can only be called once for the life of the
|
||||
:class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
|
||||
to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
|
||||
header is removed beforehand."""
|
||||
|
||||
if isinstance(cookies, cookielib.CookieJar):
|
||||
self._cookies = cookies
|
||||
@ -514,6 +514,10 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
|
||||
|
||||
def prepare_hooks(self, hooks):
|
||||
"""Prepares the given hooks."""
|
||||
# hooks can be passed as None to the prepare method and to this
|
||||
# method. To prevent iterating over None, simply use an empty list
|
||||
# if hooks is False-y
|
||||
hooks = hooks or []
|
||||
for event in hooks:
|
||||
self.register_hook(event, hooks[event])
|
||||
|
||||
@ -524,16 +528,8 @@ class Response(object):
|
||||
"""
|
||||
|
||||
__attrs__ = [
|
||||
'_content',
|
||||
'status_code',
|
||||
'headers',
|
||||
'url',
|
||||
'history',
|
||||
'encoding',
|
||||
'reason',
|
||||
'cookies',
|
||||
'elapsed',
|
||||
'request',
|
||||
'_content', 'status_code', 'headers', 'url', 'history',
|
||||
'encoding', 'reason', 'cookies', 'elapsed', 'request'
|
||||
]
|
||||
|
||||
def __init__(self):
|
||||
@ -653,9 +649,10 @@ class Response(object):
|
||||
If decode_unicode is True, content will be decoded using the best
|
||||
available encoding based on the response.
|
||||
"""
|
||||
|
||||
def generate():
|
||||
try:
|
||||
# Special case for urllib3.
|
||||
# Special case for urllib3.
|
||||
if hasattr(self.raw, 'stream'):
|
||||
try:
|
||||
for chunk in self.raw.stream(chunk_size, decode_content=True):
|
||||
yield chunk
|
||||
@ -665,7 +662,7 @@ class Response(object):
|
||||
raise ContentDecodingError(e)
|
||||
except ReadTimeoutError as e:
|
||||
raise ConnectionError(e)
|
||||
except AttributeError:
|
||||
else:
|
||||
# Standard file-like object.
|
||||
while True:
|
||||
chunk = self.raw.read(chunk_size)
|
||||
@ -796,14 +793,16 @@ class Response(object):
|
||||
encoding = guess_json_utf(self.content)
|
||||
if encoding is not None:
|
||||
try:
|
||||
return json.loads(self.content.decode(encoding), **kwargs)
|
||||
return complexjson.loads(
|
||||
self.content.decode(encoding), **kwargs
|
||||
)
|
||||
except UnicodeDecodeError:
|
||||
# Wrong UTF codec detected; usually because it's not UTF-8
|
||||
# but some other 8-bit codec. This is an RFC violation,
|
||||
# and the server didn't bother to tell us what codec *was*
|
||||
# used.
|
||||
pass
|
||||
return json.loads(self.text, **kwargs)
|
||||
return complexjson.loads(self.text, **kwargs)
|
||||
|
||||
@property
|
||||
def links(self):
|
||||
@ -829,10 +828,10 @@ class Response(object):
|
||||
http_error_msg = ''
|
||||
|
||||
if 400 <= self.status_code < 500:
|
||||
http_error_msg = '%s Client Error: %s' % (self.status_code, self.reason)
|
||||
http_error_msg = '%s Client Error: %s for url: %s' % (self.status_code, self.reason, self.url)
|
||||
|
||||
elif 500 <= self.status_code < 600:
|
||||
http_error_msg = '%s Server Error: %s' % (self.status_code, self.reason)
|
||||
http_error_msg = '%s Server Error: %s for url: %s' % (self.status_code, self.reason, self.url)
|
||||
|
||||
if http_error_msg:
|
||||
raise HTTPError(http_error_msg, response=self)
|
||||
@ -843,4 +842,7 @@ class Response(object):
|
||||
|
||||
*Note: Should not normally need to be called explicitly.*
|
||||
"""
|
||||
if not self._content_consumed:
|
||||
return self.raw.close()
|
||||
|
||||
return self.raw.release_conn()
|
||||
|
@ -1,107 +1,3 @@
|
||||
"""
|
||||
Copyright (c) Donald Stufft, pip, and individual contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
class VendorAlias(object):
|
||||
|
||||
def __init__(self, package_names):
|
||||
self._package_names = package_names
|
||||
self._vendor_name = __name__
|
||||
self._vendor_pkg = self._vendor_name + "."
|
||||
self._vendor_pkgs = [
|
||||
self._vendor_pkg + name for name in self._package_names
|
||||
]
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
if fullname.startswith(self._vendor_pkg):
|
||||
return self
|
||||
|
||||
def load_module(self, name):
|
||||
# Ensure that this only works for the vendored name
|
||||
if not name.startswith(self._vendor_pkg):
|
||||
raise ImportError(
|
||||
"Cannot import %s, must be a subpackage of '%s'." % (
|
||||
name, self._vendor_name,
|
||||
)
|
||||
)
|
||||
|
||||
if not (name == self._vendor_name or
|
||||
any(name.startswith(pkg) for pkg in self._vendor_pkgs)):
|
||||
raise ImportError(
|
||||
"Cannot import %s, must be one of %s." % (
|
||||
name, self._vendor_pkgs
|
||||
)
|
||||
)
|
||||
|
||||
# Check to see if we already have this item in sys.modules, if we do
|
||||
# then simply return that.
|
||||
if name in sys.modules:
|
||||
return sys.modules[name]
|
||||
|
||||
# Check to see if we can import the vendor name
|
||||
try:
|
||||
# We do this dance here because we want to try and import this
|
||||
# module without hitting a recursion error because of a bunch of
|
||||
# VendorAlias instances on sys.meta_path
|
||||
real_meta_path = sys.meta_path[:]
|
||||
try:
|
||||
sys.meta_path = [
|
||||
m for m in sys.meta_path
|
||||
if not isinstance(m, VendorAlias)
|
||||
]
|
||||
__import__(name)
|
||||
module = sys.modules[name]
|
||||
finally:
|
||||
# Re-add any additions to sys.meta_path that were made while
|
||||
# during the import we just did, otherwise things like
|
||||
# requests.packages.urllib3.poolmanager will fail.
|
||||
for m in sys.meta_path:
|
||||
if m not in real_meta_path:
|
||||
real_meta_path.append(m)
|
||||
|
||||
# Restore sys.meta_path with any new items.
|
||||
sys.meta_path = real_meta_path
|
||||
except ImportError:
|
||||
# We can't import the vendor name, so we'll try to import the
|
||||
# "real" name.
|
||||
real_name = name[len(self._vendor_pkg):]
|
||||
try:
|
||||
__import__(real_name)
|
||||
module = sys.modules[real_name]
|
||||
except ImportError:
|
||||
raise ImportError("No module named '%s'" % (name,))
|
||||
|
||||
# If we've gotten here we've found the module we're looking for, either
|
||||
# as part of our vendored package, or as the real name, so we'll add
|
||||
# it to sys.modules as the vendored name so that we don't have to do
|
||||
# the lookup again.
|
||||
sys.modules[name] = module
|
||||
|
||||
# Finally, return the loaded module
|
||||
return module
|
||||
|
||||
|
||||
sys.meta_path.append(VendorAlias(["urllib3", "chardet"]))
|
||||
from . import urllib3
|
||||
|
@ -4,7 +4,7 @@ urllib3 - Thread-safe connection pooling and re-using.
|
||||
|
||||
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
|
||||
__license__ = 'MIT'
|
||||
__version__ = '1.10.2'
|
||||
__version__ = '1.10.4'
|
||||
|
||||
|
||||
from .connectionpool import (
|
||||
@ -55,9 +55,12 @@ def add_stderr_logger(level=logging.DEBUG):
|
||||
del NullHandler
|
||||
|
||||
|
||||
# Set security warning to always go off by default.
|
||||
import warnings
|
||||
warnings.simplefilter('always', exceptions.SecurityWarning)
|
||||
# SecurityWarning's always go off by default.
|
||||
warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
|
||||
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
|
||||
warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
|
||||
append=True)
|
||||
|
||||
def disable_warnings(category=exceptions.HTTPWarning):
|
||||
"""
|
||||
|
@ -227,20 +227,20 @@ class HTTPHeaderDict(dict):
|
||||
# Need to convert the tuple to list for further extension
|
||||
_dict_setitem(self, key_lower, [vals[0], vals[1], val])
|
||||
|
||||
def extend(*args, **kwargs):
|
||||
def extend(self, *args, **kwargs):
|
||||
"""Generic import function for any type of header-like object.
|
||||
Adapted version of MutableMapping.update in order to insert items
|
||||
with self.add instead of self.__setitem__
|
||||
"""
|
||||
if len(args) > 2:
|
||||
raise TypeError("update() takes at most 2 positional "
|
||||
if len(args) > 1:
|
||||
raise TypeError("extend() takes at most 1 positional "
|
||||
"arguments ({} given)".format(len(args)))
|
||||
elif not args:
|
||||
raise TypeError("update() takes at least 1 argument (0 given)")
|
||||
self = args[0]
|
||||
other = args[1] if len(args) >= 2 else ()
|
||||
other = args[0] if len(args) >= 1 else ()
|
||||
|
||||
if isinstance(other, Mapping):
|
||||
if isinstance(other, HTTPHeaderDict):
|
||||
for key, val in other.iteritems():
|
||||
self.add(key, val)
|
||||
elif isinstance(other, Mapping):
|
||||
for key in other:
|
||||
self.add(key, other[key])
|
||||
elif hasattr(other, "keys"):
|
||||
@ -304,17 +304,20 @@ class HTTPHeaderDict(dict):
|
||||
return list(self.iteritems())
|
||||
|
||||
@classmethod
|
||||
def from_httplib(cls, message, duplicates=('set-cookie',)): # Python 2
|
||||
def from_httplib(cls, message): # Python 2
|
||||
"""Read headers from a Python 2 httplib message object."""
|
||||
ret = cls(message.items())
|
||||
# ret now contains only the last header line for each duplicate.
|
||||
# Importing with all duplicates would be nice, but this would
|
||||
# mean to repeat most of the raw parsing already done, when the
|
||||
# message object was created. Extracting only the headers of interest
|
||||
# separately, the cookies, should be faster and requires less
|
||||
# extra code.
|
||||
for key in duplicates:
|
||||
ret.discard(key)
|
||||
for val in message.getheaders(key):
|
||||
ret.add(key, val)
|
||||
return ret
|
||||
# python2.7 does not expose a proper API for exporting multiheaders
|
||||
# efficiently. This function re-reads raw lines from the message
|
||||
# object and extracts the multiheaders properly.
|
||||
headers = []
|
||||
|
||||
for line in message.headers:
|
||||
if line.startswith((' ', '\t')):
|
||||
key, value = headers[-1]
|
||||
headers[-1] = (key, value + '\r\n' + line.rstrip())
|
||||
continue
|
||||
|
||||
key, value = line.split(':', 1)
|
||||
headers.append((key, value.strip()))
|
||||
|
||||
return cls(headers)
|
||||
|
@ -260,3 +260,5 @@ if ssl:
|
||||
# Make a copy for testing.
|
||||
UnverifiedHTTPSConnection = HTTPSConnection
|
||||
HTTPSConnection = VerifiedHTTPSConnection
|
||||
else:
|
||||
HTTPSConnection = DummyConnection
|
||||
|
@ -735,7 +735,6 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
||||
% (self.num_connections, self.host))
|
||||
|
||||
if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
|
||||
# Platform-specific: Python without ssl
|
||||
raise SSLError("Can't connect to HTTPS URL because the SSL "
|
||||
"module is not available.")
|
||||
|
||||
|
@ -38,8 +38,6 @@ Module Variables
|
||||
----------------
|
||||
|
||||
:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites.
|
||||
Default: ``ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:
|
||||
ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5:!DSS``
|
||||
|
||||
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
|
||||
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
|
||||
@ -85,22 +83,7 @@ _openssl_verify = {
|
||||
+ OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
|
||||
}
|
||||
|
||||
# A secure default.
|
||||
# Sources for more information on TLS ciphers:
|
||||
#
|
||||
# - https://wiki.mozilla.org/Security/Server_Side_TLS
|
||||
# - https://www.ssllabs.com/projects/best-practices/index.html
|
||||
# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
|
||||
#
|
||||
# The general intent is:
|
||||
# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
|
||||
# - prefer ECDHE over DHE for better performance,
|
||||
# - prefer any AES-GCM over any AES-CBC for better performance and security,
|
||||
# - use 3DES as fallback which is secure but slow,
|
||||
# - disable NULL authentication, MD5 MACs and DSS for security reasons.
|
||||
DEFAULT_SSL_CIPHER_LIST = "ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:" + \
|
||||
"ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:" + \
|
||||
"!aNULL:!MD5:!DSS"
|
||||
DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS
|
||||
|
||||
|
||||
orig_util_HAS_SNI = util.HAS_SNI
|
||||
@ -299,7 +282,9 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
|
||||
try:
|
||||
cnx.do_handshake()
|
||||
except OpenSSL.SSL.WantReadError:
|
||||
select.select([sock], [], [])
|
||||
rd, _, _ = select.select([sock], [], [], sock.gettimeout())
|
||||
if not rd:
|
||||
raise timeout('select timed out')
|
||||
continue
|
||||
except OpenSSL.SSL.Error as e:
|
||||
raise ssl.SSLError('bad handshake', e)
|
||||
|
@ -162,3 +162,8 @@ class SystemTimeWarning(SecurityWarning):
|
||||
class InsecurePlatformWarning(SecurityWarning):
|
||||
"Warned when certain SSL configuration is not available on a platform."
|
||||
pass
|
||||
|
||||
|
||||
class ResponseNotChunked(ProtocolError, ValueError):
|
||||
"Response needs to be chunked in order to read it as chunks."
|
||||
pass
|
||||
|
@ -1,9 +1,15 @@
|
||||
try:
|
||||
import http.client as httplib
|
||||
except ImportError:
|
||||
import httplib
|
||||
import zlib
|
||||
import io
|
||||
from socket import timeout as SocketTimeout
|
||||
|
||||
from ._collections import HTTPHeaderDict
|
||||
from .exceptions import ProtocolError, DecodeError, ReadTimeoutError
|
||||
from .exceptions import (
|
||||
ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked
|
||||
)
|
||||
from .packages.six import string_types as basestring, binary_type, PY3
|
||||
from .connection import HTTPException, BaseSSLError
|
||||
from .util.response import is_fp_closed
|
||||
@ -117,7 +123,17 @@ class HTTPResponse(io.IOBase):
|
||||
if hasattr(body, 'read'):
|
||||
self._fp = body
|
||||
|
||||
if preload_content and not self._body:
|
||||
# Are we using the chunked-style of transfer encoding?
|
||||
self.chunked = False
|
||||
self.chunk_left = None
|
||||
tr_enc = self.headers.get('transfer-encoding', '').lower()
|
||||
# Don't incur the penalty of creating a list and then discarding it
|
||||
encodings = (enc.strip() for enc in tr_enc.split(","))
|
||||
if "chunked" in encodings:
|
||||
self.chunked = True
|
||||
|
||||
# We certainly don't want to preload content when the response is chunked.
|
||||
if not self.chunked and preload_content and not self._body:
|
||||
self._body = self.read(decode_content=decode_content)
|
||||
|
||||
def get_redirect_location(self):
|
||||
@ -157,6 +173,35 @@ class HTTPResponse(io.IOBase):
|
||||
"""
|
||||
return self._fp_bytes_read
|
||||
|
||||
def _init_decoder(self):
|
||||
"""
|
||||
Set-up the _decoder attribute if necessar.
|
||||
"""
|
||||
# Note: content-encoding value should be case-insensitive, per RFC 7230
|
||||
# Section 3.2
|
||||
content_encoding = self.headers.get('content-encoding', '').lower()
|
||||
if self._decoder is None and content_encoding in self.CONTENT_DECODERS:
|
||||
self._decoder = _get_decoder(content_encoding)
|
||||
|
||||
def _decode(self, data, decode_content, flush_decoder):
|
||||
"""
|
||||
Decode the data passed in and potentially flush the decoder.
|
||||
"""
|
||||
try:
|
||||
if decode_content and self._decoder:
|
||||
data = self._decoder.decompress(data)
|
||||
except (IOError, zlib.error) as e:
|
||||
content_encoding = self.headers.get('content-encoding', '').lower()
|
||||
raise DecodeError(
|
||||
"Received response with content-encoding: %s, but "
|
||||
"failed to decode it." % content_encoding, e)
|
||||
|
||||
if flush_decoder and decode_content and self._decoder:
|
||||
buf = self._decoder.decompress(binary_type())
|
||||
data += buf + self._decoder.flush()
|
||||
|
||||
return data
|
||||
|
||||
def read(self, amt=None, decode_content=None, cache_content=False):
|
||||
"""
|
||||
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
|
||||
@ -178,12 +223,7 @@ class HTTPResponse(io.IOBase):
|
||||
after having ``.read()`` the file object. (Overridden if ``amt`` is
|
||||
set.)
|
||||
"""
|
||||
# Note: content-encoding value should be case-insensitive, per RFC 7230
|
||||
# Section 3.2
|
||||
content_encoding = self.headers.get('content-encoding', '').lower()
|
||||
if self._decoder is None:
|
||||
if content_encoding in self.CONTENT_DECODERS:
|
||||
self._decoder = _get_decoder(content_encoding)
|
||||
self._init_decoder()
|
||||
if decode_content is None:
|
||||
decode_content = self.decode_content
|
||||
|
||||
@ -232,17 +272,7 @@ class HTTPResponse(io.IOBase):
|
||||
|
||||
self._fp_bytes_read += len(data)
|
||||
|
||||
try:
|
||||
if decode_content and self._decoder:
|
||||
data = self._decoder.decompress(data)
|
||||
except (IOError, zlib.error) as e:
|
||||
raise DecodeError(
|
||||
"Received response with content-encoding: %s, but "
|
||||
"failed to decode it." % content_encoding, e)
|
||||
|
||||
if flush_decoder and decode_content and self._decoder:
|
||||
buf = self._decoder.decompress(binary_type())
|
||||
data += buf + self._decoder.flush()
|
||||
data = self._decode(data, decode_content, flush_decoder)
|
||||
|
||||
if cache_content:
|
||||
self._body = data
|
||||
@ -269,11 +299,15 @@ class HTTPResponse(io.IOBase):
|
||||
If True, will attempt to decode the body based on the
|
||||
'content-encoding' header.
|
||||
"""
|
||||
while not is_fp_closed(self._fp):
|
||||
data = self.read(amt=amt, decode_content=decode_content)
|
||||
if self.chunked:
|
||||
for line in self.read_chunked(amt, decode_content=decode_content):
|
||||
yield line
|
||||
else:
|
||||
while not is_fp_closed(self._fp):
|
||||
data = self.read(amt=amt, decode_content=decode_content)
|
||||
|
||||
if data:
|
||||
yield data
|
||||
if data:
|
||||
yield data
|
||||
|
||||
@classmethod
|
||||
def from_httplib(ResponseCls, r, **response_kw):
|
||||
@ -351,3 +385,82 @@ class HTTPResponse(io.IOBase):
|
||||
else:
|
||||
b[:len(temp)] = temp
|
||||
return len(temp)
|
||||
|
||||
def _update_chunk_length(self):
|
||||
# First, we'll figure out length of a chunk and then
|
||||
# we'll try to read it from socket.
|
||||
if self.chunk_left is not None:
|
||||
return
|
||||
line = self._fp.fp.readline()
|
||||
line = line.split(b';', 1)[0]
|
||||
try:
|
||||
self.chunk_left = int(line, 16)
|
||||
except ValueError:
|
||||
# Invalid chunked protocol response, abort.
|
||||
self.close()
|
||||
raise httplib.IncompleteRead(line)
|
||||
|
||||
def _handle_chunk(self, amt):
|
||||
returned_chunk = None
|
||||
if amt is None:
|
||||
chunk = self._fp._safe_read(self.chunk_left)
|
||||
returned_chunk = chunk
|
||||
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
|
||||
self.chunk_left = None
|
||||
elif amt < self.chunk_left:
|
||||
value = self._fp._safe_read(amt)
|
||||
self.chunk_left = self.chunk_left - amt
|
||||
returned_chunk = value
|
||||
elif amt == self.chunk_left:
|
||||
value = self._fp._safe_read(amt)
|
||||
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
|
||||
self.chunk_left = None
|
||||
returned_chunk = value
|
||||
else: # amt > self.chunk_left
|
||||
returned_chunk = self._fp._safe_read(self.chunk_left)
|
||||
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
|
||||
self.chunk_left = None
|
||||
return returned_chunk
|
||||
|
||||
def read_chunked(self, amt=None, decode_content=None):
|
||||
"""
|
||||
Similar to :meth:`HTTPResponse.read`, but with an additional
|
||||
parameter: ``decode_content``.
|
||||
|
||||
:param decode_content:
|
||||
If True, will attempt to decode the body based on the
|
||||
'content-encoding' header.
|
||||
"""
|
||||
self._init_decoder()
|
||||
# FIXME: Rewrite this method and make it a class with a better structured logic.
|
||||
if not self.chunked:
|
||||
raise ResponseNotChunked("Response is not chunked. "
|
||||
"Header 'transfer-encoding: chunked' is missing.")
|
||||
|
||||
if self._original_response and self._original_response._method.upper() == 'HEAD':
|
||||
# Don't bother reading the body of a HEAD request.
|
||||
# FIXME: Can we do this somehow without accessing private httplib _method?
|
||||
self._original_response.close()
|
||||
return
|
||||
|
||||
while True:
|
||||
self._update_chunk_length()
|
||||
if self.chunk_left == 0:
|
||||
break
|
||||
chunk = self._handle_chunk(amt)
|
||||
yield self._decode(chunk, decode_content=decode_content,
|
||||
flush_decoder=True)
|
||||
|
||||
# Chunk content ends with \r\n: discard it.
|
||||
while True:
|
||||
line = self._fp.fp.readline()
|
||||
if not line:
|
||||
# Some sites may not end with '\r\n'.
|
||||
break
|
||||
if line == b'\r\n':
|
||||
break
|
||||
|
||||
# We read everything; close the "file".
|
||||
if self._original_response:
|
||||
self._original_response.close()
|
||||
self.release_conn()
|
||||
|
@ -9,10 +9,10 @@ HAS_SNI = False
|
||||
create_default_context = None
|
||||
|
||||
import errno
|
||||
import ssl
|
||||
import warnings
|
||||
|
||||
try: # Test for SSL features
|
||||
import ssl
|
||||
from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
|
||||
from ssl import HAS_SNI # Has SNI?
|
||||
except ImportError:
|
||||
@ -25,14 +25,24 @@ except ImportError:
|
||||
OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
|
||||
OP_NO_COMPRESSION = 0x20000
|
||||
|
||||
try:
|
||||
from ssl import _DEFAULT_CIPHERS
|
||||
except ImportError:
|
||||
_DEFAULT_CIPHERS = (
|
||||
'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
|
||||
'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
|
||||
'!eNULL:!MD5'
|
||||
)
|
||||
# A secure default.
|
||||
# Sources for more information on TLS ciphers:
|
||||
#
|
||||
# - https://wiki.mozilla.org/Security/Server_Side_TLS
|
||||
# - https://www.ssllabs.com/projects/best-practices/index.html
|
||||
# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
|
||||
#
|
||||
# The general intent is:
|
||||
# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
|
||||
# - prefer ECDHE over DHE for better performance,
|
||||
# - prefer any AES-GCM over any AES-CBC for better performance and security,
|
||||
# - use 3DES as fallback which is secure but slow,
|
||||
# - disable NULL authentication, MD5 MACs and DSS for security reasons.
|
||||
DEFAULT_CIPHERS = (
|
||||
'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
|
||||
'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
|
||||
'!eNULL:!MD5'
|
||||
)
|
||||
|
||||
try:
|
||||
from ssl import SSLContext # Modern SSL?
|
||||
@ -40,7 +50,8 @@ except ImportError:
|
||||
import sys
|
||||
|
||||
class SSLContext(object): # Platform-specific: Python 2 & 3.1
|
||||
supports_set_ciphers = sys.version_info >= (2, 7)
|
||||
supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or
|
||||
(3, 2) <= sys.version_info)
|
||||
|
||||
def __init__(self, protocol_version):
|
||||
self.protocol = protocol_version
|
||||
@ -167,7 +178,7 @@ def resolve_ssl_version(candidate):
|
||||
return candidate
|
||||
|
||||
|
||||
def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED,
|
||||
def create_urllib3_context(ssl_version=None, cert_reqs=None,
|
||||
options=None, ciphers=None):
|
||||
"""All arguments have the same meaning as ``ssl_wrap_socket``.
|
||||
|
||||
@ -204,6 +215,9 @@ def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED,
|
||||
"""
|
||||
context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23)
|
||||
|
||||
# Setting the default here, as we may have no ssl module on import
|
||||
cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
|
||||
|
||||
if options is None:
|
||||
options = 0
|
||||
# SSLv2 is easily broken and is considered harmful and dangerous
|
||||
@ -217,7 +231,7 @@ def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED,
|
||||
context.options |= options
|
||||
|
||||
if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6
|
||||
context.set_ciphers(ciphers or _DEFAULT_CIPHERS)
|
||||
context.set_ciphers(ciphers or DEFAULT_CIPHERS)
|
||||
|
||||
context.verify_mode = cert_reqs
|
||||
if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2
|
||||
|
@ -15,6 +15,8 @@ class Url(namedtuple('Url', url_attrs)):
|
||||
|
||||
def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None,
|
||||
query=None, fragment=None):
|
||||
if path and not path.startswith('/'):
|
||||
path = '/' + path
|
||||
return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
|
||||
query, fragment)
|
||||
|
||||
|
@ -90,7 +90,7 @@ def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
|
||||
|
||||
class SessionRedirectMixin(object):
|
||||
def resolve_redirects(self, resp, req, stream=False, timeout=None,
|
||||
verify=True, cert=None, proxies=None):
|
||||
verify=True, cert=None, proxies=None, **adapter_kwargs):
|
||||
"""Receives a Response. Returns a generator of Responses."""
|
||||
|
||||
i = 0
|
||||
@ -193,6 +193,7 @@ class SessionRedirectMixin(object):
|
||||
cert=cert,
|
||||
proxies=proxies,
|
||||
allow_redirects=False,
|
||||
**adapter_kwargs
|
||||
)
|
||||
|
||||
extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
|
||||
@ -560,10 +561,6 @@ class Session(SessionRedirectMixin):
|
||||
# Set up variables needed for resolve_redirects and dispatching of hooks
|
||||
allow_redirects = kwargs.pop('allow_redirects', True)
|
||||
stream = kwargs.get('stream')
|
||||
timeout = kwargs.get('timeout')
|
||||
verify = kwargs.get('verify')
|
||||
cert = kwargs.get('cert')
|
||||
proxies = kwargs.get('proxies')
|
||||
hooks = request.hooks
|
||||
|
||||
# Get the appropriate adapter to use
|
||||
@ -591,12 +588,7 @@ class Session(SessionRedirectMixin):
|
||||
extract_cookies_to_jar(self.cookies, request, r.raw)
|
||||
|
||||
# Redirect resolving generator.
|
||||
gen = self.resolve_redirects(r, request,
|
||||
stream=stream,
|
||||
timeout=timeout,
|
||||
verify=verify,
|
||||
cert=cert,
|
||||
proxies=proxies)
|
||||
gen = self.resolve_redirects(r, request, **kwargs)
|
||||
|
||||
# Resolve redirects if allowed.
|
||||
history = [resp for resp in gen] if allow_redirects else []
|
||||
|
@ -67,7 +67,7 @@ def super_len(o):
|
||||
return len(o.getvalue())
|
||||
|
||||
|
||||
def get_netrc_auth(url):
|
||||
def get_netrc_auth(url, raise_errors=False):
|
||||
"""Returns the Requests tuple auth for a given url from netrc."""
|
||||
|
||||
try:
|
||||
@ -105,8 +105,9 @@ def get_netrc_auth(url):
|
||||
return (_netrc[login_i], _netrc[2])
|
||||
except (NetrcParseError, IOError):
|
||||
# If there was a parsing error or a permissions issue reading the file,
|
||||
# we'll just skip netrc auth
|
||||
pass
|
||||
# we'll just skip netrc auth unless explicitly asked to raise errors.
|
||||
if raise_errors:
|
||||
raise
|
||||
|
||||
# AppEngine hackiness.
|
||||
except (ImportError, AttributeError):
|
||||
|
@ -15,30 +15,70 @@ from .projects.git import Git
|
||||
from .projects.mercurial import Mercurial
|
||||
from .projects.projectmap import ProjectMap
|
||||
from .projects.subversion import Subversion
|
||||
from .projects.wakatime import WakaTime
|
||||
from .projects.wakatime_project_file import WakaTimeProjectFile
|
||||
|
||||
|
||||
log = logging.getLogger('WakaTime')
|
||||
|
||||
|
||||
# List of plugin classes to find a project for the current file path.
|
||||
# Project plugins will be processed with priority in the order below.
|
||||
PLUGINS = [
|
||||
WakaTime,
|
||||
CONFIG_PLUGINS = [
|
||||
WakaTimeProjectFile,
|
||||
ProjectMap,
|
||||
]
|
||||
REV_CONTROL_PLUGINS = [
|
||||
Git,
|
||||
Mercurial,
|
||||
Subversion,
|
||||
]
|
||||
|
||||
|
||||
def find_project(path, configs=None):
|
||||
for plugin in PLUGINS:
|
||||
plugin_name = plugin.__name__.lower()
|
||||
plugin_configs = None
|
||||
if configs and configs.has_section(plugin_name):
|
||||
plugin_configs = dict(configs.items(plugin_name))
|
||||
project = plugin(path, configs=plugin_configs)
|
||||
def get_project_info(configs=None, args=None):
|
||||
"""Find the current project and branch.
|
||||
|
||||
First looks for a .wakatime-project file. Second, uses the --project arg.
|
||||
Third, uses the folder name from a revision control repository. Last, uses
|
||||
the --alternate-project arg.
|
||||
|
||||
Returns a project, branch tuple.
|
||||
"""
|
||||
|
||||
project_name, branch_name = None, None
|
||||
|
||||
for plugin_cls in CONFIG_PLUGINS:
|
||||
|
||||
plugin_name = plugin_cls.__name__.lower()
|
||||
plugin_configs = get_configs_for_plugin(plugin_name, configs)
|
||||
|
||||
project = plugin_cls(args.targetFile, configs=plugin_configs)
|
||||
if project.process():
|
||||
return project
|
||||
project_name = project.name()
|
||||
branch_name = project.branch()
|
||||
break
|
||||
|
||||
if project_name is None:
|
||||
project_name = args.project
|
||||
|
||||
if project_name is None or branch_name is None:
|
||||
|
||||
for plugin_cls in REV_CONTROL_PLUGINS:
|
||||
|
||||
plugin_name = plugin_cls.__name__.lower()
|
||||
plugin_configs = get_configs_for_plugin(plugin_name, configs)
|
||||
|
||||
project = plugin_cls(args.targetFile, configs=plugin_configs)
|
||||
if project.process():
|
||||
project_name = project_name or project.name()
|
||||
branch_name = branch_name or project.branch()
|
||||
break
|
||||
|
||||
if project_name is None:
|
||||
project_name = args.alternate_project
|
||||
|
||||
return project_name, branch_name
|
||||
|
||||
|
||||
def get_configs_for_plugin(plugin_name, configs):
|
||||
if configs and configs.has_section(plugin_name):
|
||||
return dict(configs.items(plugin_name))
|
||||
return None
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
wakatime.projects.wakatime
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
wakatime.projects.wakatime_project_file
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Information from a .wakatime-project file about the project for
|
||||
a given file. First line of .wakatime-project sets the project
|
||||
@ -21,7 +21,7 @@ from ..compat import u, open
|
||||
log = logging.getLogger('WakaTime')
|
||||
|
||||
|
||||
class WakaTime(BaseProject):
|
||||
class WakaTimeProjectFile(BaseProject):
|
||||
|
||||
def process(self):
|
||||
self.config = self._find_config(self.path)
|
109
packages/wakatime/session_cache.py
Normal file
109
packages/wakatime/session_cache.py
Normal file
@ -0,0 +1,109 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
wakatime.session_cache
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Persist requests.Session for multiprocess SSL handshake pooling.
|
||||
|
||||
:copyright: (c) 2015 Alan Hamlett.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
|
||||
import logging
|
||||
import os
|
||||
import pickle
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
try:
|
||||
import sqlite3
|
||||
HAS_SQL = True
|
||||
except ImportError:
|
||||
HAS_SQL = False
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'packages'))
|
||||
|
||||
from .packages import requests
|
||||
|
||||
|
||||
log = logging.getLogger('WakaTime')
|
||||
|
||||
|
||||
class SessionCache(object):
|
||||
DB_FILE = os.path.join(os.path.expanduser('~'), '.wakatime.db')
|
||||
|
||||
def connect(self):
|
||||
conn = sqlite3.connect(self.DB_FILE)
|
||||
c = conn.cursor()
|
||||
c.execute('''CREATE TABLE IF NOT EXISTS session (
|
||||
value BLOB)
|
||||
''')
|
||||
return (conn, c)
|
||||
|
||||
|
||||
def save(self, session):
|
||||
"""Saves a requests.Session object for the next heartbeat process.
|
||||
"""
|
||||
|
||||
if not HAS_SQL:
|
||||
return
|
||||
try:
|
||||
conn, c = self.connect()
|
||||
c.execute('DELETE FROM session')
|
||||
values = {
|
||||
'value': pickle.dumps(session),
|
||||
}
|
||||
c.execute('INSERT INTO session VALUES (:value)', values)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
except:
|
||||
log.error(traceback.format_exc())
|
||||
|
||||
|
||||
def get(self):
|
||||
"""Returns a requests.Session object.
|
||||
|
||||
Gets Session from sqlite3 cache or creates a new Session.
|
||||
"""
|
||||
|
||||
if not HAS_SQL:
|
||||
return requests.session()
|
||||
|
||||
try:
|
||||
conn, c = self.connect()
|
||||
except:
|
||||
log.error(traceback.format_exc())
|
||||
return requests.session()
|
||||
|
||||
session = None
|
||||
try:
|
||||
c.execute('BEGIN IMMEDIATE')
|
||||
c.execute('SELECT value FROM session LIMIT 1')
|
||||
row = c.fetchone()
|
||||
if row is not None:
|
||||
session = pickle.loads(row[0])
|
||||
except:
|
||||
log.error(traceback.format_exc())
|
||||
|
||||
try:
|
||||
conn.close()
|
||||
except:
|
||||
log.error(traceback.format_exc())
|
||||
|
||||
return session if session is not None else requests.session()
|
||||
|
||||
|
||||
def delete(self):
|
||||
"""Clears all cached Session objects.
|
||||
"""
|
||||
|
||||
if not HAS_SQL:
|
||||
return
|
||||
try:
|
||||
conn, c = self.connect()
|
||||
c.execute('DELETE FROM session')
|
||||
conn.commit()
|
||||
conn.close()
|
||||
except:
|
||||
log.error(traceback.format_exc())
|
@ -20,13 +20,15 @@ if sys.version_info[0] == 2:
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'packages', 'pygments_py2'))
|
||||
else:
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), 'packages', 'pygments_py3'))
|
||||
from pygments.lexers import guess_lexer_for_filename
|
||||
from pygments.lexers import get_lexer_by_name, guess_lexer_for_filename
|
||||
from pygments.modeline import get_filetype_from_buffer
|
||||
from pygments.util import ClassNotFound
|
||||
|
||||
|
||||
log = logging.getLogger('WakaTime')
|
||||
|
||||
|
||||
# force file name extensions to be recognized as a certain language
|
||||
# extensions taking priority over lexer
|
||||
EXTENSIONS = {
|
||||
'j2': 'HTML',
|
||||
'markdown': 'Markdown',
|
||||
@ -34,6 +36,8 @@ EXTENSIONS = {
|
||||
'mdown': 'Markdown',
|
||||
'twig': 'Twig',
|
||||
}
|
||||
|
||||
# lexers to human readable languages
|
||||
TRANSLATIONS = {
|
||||
'CSS+Genshi Text': 'CSS',
|
||||
'CSS+Lasso': 'CSS',
|
||||
@ -45,31 +49,132 @@ TRANSLATIONS = {
|
||||
'RHTML': 'HTML',
|
||||
}
|
||||
|
||||
# extensions for when no lexer is found
|
||||
AUXILIARY_EXTENSIONS = {
|
||||
'vb': 'VB.net',
|
||||
}
|
||||
|
||||
|
||||
def guess_language(file_name):
|
||||
language, lexer = None, None
|
||||
try:
|
||||
with open(file_name, 'r', encoding='utf-8') as fh:
|
||||
lexer = guess_lexer_for_filename(file_name, fh.read(512000))
|
||||
except:
|
||||
pass
|
||||
"""Guess lexer and language for a file.
|
||||
|
||||
Returns (language, lexer) tuple where language is a unicode string.
|
||||
"""
|
||||
|
||||
lexer = smart_guess_lexer(file_name)
|
||||
|
||||
language = None
|
||||
|
||||
# guess language from file extension
|
||||
if file_name:
|
||||
language = guess_language_from_extension(file_name.rsplit('.', 1)[-1])
|
||||
if lexer and language is None:
|
||||
language = translate_language(u(lexer.name))
|
||||
language = get_language_from_extension(file_name, EXTENSIONS)
|
||||
|
||||
# get language from lexer if we didn't have a hard-coded extension rule
|
||||
if language is None and lexer:
|
||||
language = u(lexer.name)
|
||||
|
||||
if language is None:
|
||||
language = get_language_from_extension(file_name, AUXILIARY_EXTENSIONS)
|
||||
|
||||
if language is not None:
|
||||
language = translate_language(language)
|
||||
|
||||
return language, lexer
|
||||
|
||||
|
||||
def guess_language_from_extension(extension):
|
||||
def smart_guess_lexer(file_name):
|
||||
"""Guess Pygments lexer for a file.
|
||||
|
||||
Looks for a vim modeline in file contents, then compares the accuracy
|
||||
of that lexer with a second guess. The second guess looks up all lexers
|
||||
matching the file name, then runs a text analysis for the best choice.
|
||||
"""
|
||||
lexer = None
|
||||
|
||||
text = get_file_contents(file_name)
|
||||
|
||||
lexer_1, accuracy_1 = guess_lexer_using_filename(file_name, text)
|
||||
lexer_2, accuracy_2 = guess_lexer_using_modeline(text)
|
||||
|
||||
if lexer_1:
|
||||
lexer = lexer_1
|
||||
if (lexer_2 and accuracy_2 and
|
||||
(not accuracy_1 or accuracy_2 > accuracy_1)):
|
||||
lexer = lexer_2
|
||||
|
||||
return lexer
|
||||
|
||||
|
||||
def guess_lexer_using_filename(file_name, text):
|
||||
"""Guess lexer for given text, limited to lexers for this file's extension.
|
||||
|
||||
Returns a tuple of (lexer, accuracy).
|
||||
"""
|
||||
|
||||
lexer, accuracy = None, None
|
||||
|
||||
try:
|
||||
lexer = guess_lexer_for_filename(file_name, text)
|
||||
except:
|
||||
pass
|
||||
|
||||
if lexer is not None:
|
||||
try:
|
||||
accuracy = lexer.analyse_text(text)
|
||||
except:
|
||||
pass
|
||||
|
||||
return lexer, accuracy
|
||||
|
||||
|
||||
def guess_lexer_using_modeline(text):
|
||||
"""Guess lexer for given text using Vim modeline.
|
||||
|
||||
Returns a tuple of (lexer, accuracy).
|
||||
"""
|
||||
|
||||
lexer, accuracy = None, None
|
||||
|
||||
file_type = None
|
||||
try:
|
||||
file_type = get_filetype_from_buffer(text)
|
||||
except:
|
||||
pass
|
||||
|
||||
if file_type is not None:
|
||||
try:
|
||||
lexer = get_lexer_by_name(file_type)
|
||||
except ClassNotFound:
|
||||
pass
|
||||
|
||||
if lexer is not None:
|
||||
try:
|
||||
accuracy = lexer.analyse_text(text)
|
||||
except:
|
||||
pass
|
||||
|
||||
return lexer, accuracy
|
||||
|
||||
|
||||
def get_language_from_extension(file_name, extension_map):
|
||||
"""Returns a matching language for the given file_name using extension_map.
|
||||
"""
|
||||
|
||||
extension = file_name.rsplit('.', 1)[-1] if len(file_name.rsplit('.', 1)) > 1 else None
|
||||
|
||||
if extension:
|
||||
if extension in EXTENSIONS:
|
||||
return EXTENSIONS[extension]
|
||||
if extension.lower() in EXTENSIONS:
|
||||
return EXTENSIONS[extension.lower()]
|
||||
if extension in extension_map:
|
||||
return extension_map[extension]
|
||||
if extension.lower() in extension_map:
|
||||
return extension_map[extension.lower()]
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def translate_language(language):
|
||||
"""Turns Pygments lexer class name string into human-readable language.
|
||||
"""
|
||||
|
||||
if language in TRANSLATIONS:
|
||||
language = TRANSLATIONS[language]
|
||||
return language
|
||||
@ -86,12 +191,14 @@ def number_lines_in_file(file_name):
|
||||
return lines
|
||||
|
||||
|
||||
def get_file_stats(file_name, notfile=False):
|
||||
def get_file_stats(file_name, notfile=False, lineno=None, cursorpos=None):
|
||||
if notfile:
|
||||
stats = {
|
||||
'language': None,
|
||||
'dependencies': [],
|
||||
'lines': None,
|
||||
'lineno': lineno,
|
||||
'cursorpos': cursorpos,
|
||||
}
|
||||
else:
|
||||
language, lexer = guess_language(file_name)
|
||||
@ -101,5 +208,20 @@ def get_file_stats(file_name, notfile=False):
|
||||
'language': language,
|
||||
'dependencies': dependencies,
|
||||
'lines': number_lines_in_file(file_name),
|
||||
'lineno': lineno,
|
||||
'cursorpos': cursorpos,
|
||||
}
|
||||
return stats
|
||||
|
||||
|
||||
def get_file_contents(file_name):
|
||||
"""Returns the first 512000 bytes of the file's contents.
|
||||
"""
|
||||
|
||||
text = None
|
||||
try:
|
||||
with open(file_name, 'r', encoding='utf-8') as fh:
|
||||
text = fh.read(512000)
|
||||
except:
|
||||
pass
|
||||
return text
|
||||
|
Reference in New Issue
Block a user