2017-11-09 10:12:05 +03:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
"""
|
|
|
|
wakatime.heartbeat
|
|
|
|
~~~~~~~~~~~~~~~~~~
|
|
|
|
:copyright: (c) 2017 Alan Hamlett.
|
|
|
|
:license: BSD, see LICENSE for more details.
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
import os
|
|
|
|
import logging
|
|
|
|
import re
|
2018-10-03 10:47:20 +03:00
|
|
|
from subprocess import PIPE
|
2017-11-09 10:12:05 +03:00
|
|
|
|
2018-10-03 10:47:20 +03:00
|
|
|
from .compat import u, json, is_win, Popen
|
2018-03-15 11:31:17 +03:00
|
|
|
from .exceptions import SkipHeartbeat
|
2017-11-09 10:12:05 +03:00
|
|
|
from .project import get_project_info
|
|
|
|
from .stats import get_file_stats
|
2018-01-05 10:33:07 +03:00
|
|
|
from .utils import get_user_agent, should_exclude, format_file_path, find_project_file
|
2017-11-09 10:12:05 +03:00
|
|
|
|
|
|
|
|
|
|
|
log = logging.getLogger('WakaTime')
|
|
|
|
|
|
|
|
|
|
|
|
class Heartbeat(object):
|
|
|
|
"""Heartbeat data for sending to API or storing in offline cache."""
|
|
|
|
|
|
|
|
skip = False
|
|
|
|
args = None
|
|
|
|
configs = None
|
|
|
|
|
|
|
|
time = None
|
|
|
|
entity = None
|
|
|
|
type = None
|
2018-04-26 18:40:02 +03:00
|
|
|
category = None
|
2017-11-09 10:12:05 +03:00
|
|
|
is_write = None
|
|
|
|
project = None
|
|
|
|
branch = None
|
|
|
|
language = None
|
|
|
|
dependencies = None
|
|
|
|
lines = None
|
|
|
|
lineno = None
|
|
|
|
cursorpos = None
|
|
|
|
user_agent = None
|
|
|
|
|
2019-06-24 04:56:57 +03:00
|
|
|
_sensitive_when_hiding_filename = (
|
|
|
|
'dependencies',
|
|
|
|
'lines',
|
|
|
|
'lineno',
|
|
|
|
'cursorpos',
|
|
|
|
)
|
|
|
|
_sensitive_when_hiding_branch = (
|
|
|
|
'branch',
|
|
|
|
)
|
2018-09-21 08:29:34 +03:00
|
|
|
|
2017-11-09 10:12:05 +03:00
|
|
|
def __init__(self, data, args, configs, _clone=None):
|
2017-11-23 23:41:59 +03:00
|
|
|
if not data:
|
|
|
|
self.skip = u('Skipping because heartbeat data is missing.')
|
|
|
|
return
|
|
|
|
|
2017-11-09 10:12:05 +03:00
|
|
|
self.args = args
|
|
|
|
self.configs = configs
|
|
|
|
|
|
|
|
self.entity = data.get('entity')
|
|
|
|
self.time = data.get('time', data.get('timestamp'))
|
|
|
|
self.is_write = data.get('is_write')
|
|
|
|
self.user_agent = data.get('user_agent') or get_user_agent(args.plugin)
|
|
|
|
|
|
|
|
self.type = data.get('type', data.get('entity_type'))
|
|
|
|
if self.type not in ['file', 'domain', 'app']:
|
|
|
|
self.type = 'file'
|
|
|
|
|
2018-04-26 18:40:02 +03:00
|
|
|
self.category = data.get('category')
|
|
|
|
allowed_categories = [
|
|
|
|
'coding',
|
|
|
|
'building',
|
|
|
|
'indexing',
|
|
|
|
'debugging',
|
|
|
|
'running tests',
|
|
|
|
'manual testing',
|
2018-12-19 18:38:18 +03:00
|
|
|
'writing tests',
|
2018-04-26 18:40:02 +03:00
|
|
|
'browsing',
|
|
|
|
'code reviewing',
|
|
|
|
'designing',
|
|
|
|
]
|
|
|
|
if self.category not in allowed_categories:
|
|
|
|
self.category = None
|
|
|
|
|
2017-11-09 10:12:05 +03:00
|
|
|
if not _clone:
|
|
|
|
exclude = self._excluded_by_pattern()
|
|
|
|
if exclude:
|
|
|
|
self.skip = u('Skipping because matches exclude pattern: {pattern}').format(
|
|
|
|
pattern=u(exclude),
|
|
|
|
)
|
|
|
|
return
|
|
|
|
if self.type == 'file':
|
|
|
|
self.entity = format_file_path(self.entity)
|
2018-10-03 10:47:20 +03:00
|
|
|
self._format_local_file()
|
2018-09-21 08:29:34 +03:00
|
|
|
if not self._file_exists():
|
2018-01-05 10:33:07 +03:00
|
|
|
self.skip = u('File does not exist; ignoring this heartbeat.')
|
|
|
|
return
|
|
|
|
if self._excluded_by_missing_project_file():
|
|
|
|
self.skip = u('Skipping because missing .wakatime-project file in parent path.')
|
|
|
|
return
|
2017-11-09 10:12:05 +03:00
|
|
|
|
2018-09-21 08:29:34 +03:00
|
|
|
if args.local_file and not os.path.isfile(args.local_file):
|
|
|
|
args.local_file = None
|
|
|
|
|
2017-11-09 10:12:05 +03:00
|
|
|
project, branch = get_project_info(configs, self, data)
|
|
|
|
self.project = project
|
|
|
|
self.branch = branch
|
|
|
|
|
2018-04-26 18:40:02 +03:00
|
|
|
if self._excluded_by_unknown_project():
|
|
|
|
self.skip = u('Skipping because project unknown.')
|
|
|
|
return
|
|
|
|
|
2018-03-15 11:31:17 +03:00
|
|
|
try:
|
|
|
|
stats = get_file_stats(self.entity,
|
|
|
|
entity_type=self.type,
|
|
|
|
lineno=data.get('lineno'),
|
|
|
|
cursorpos=data.get('cursorpos'),
|
|
|
|
plugin=args.plugin,
|
2018-09-21 08:29:34 +03:00
|
|
|
language=data.get('language'),
|
|
|
|
local_file=args.local_file)
|
2018-03-15 11:31:17 +03:00
|
|
|
except SkipHeartbeat as ex:
|
|
|
|
self.skip = u(ex) or 'Skipping'
|
|
|
|
return
|
|
|
|
|
2017-11-09 10:12:05 +03:00
|
|
|
else:
|
|
|
|
self.project = data.get('project')
|
|
|
|
self.branch = data.get('branch')
|
|
|
|
stats = data
|
|
|
|
|
|
|
|
for key in ['language', 'dependencies', 'lines', 'lineno', 'cursorpos']:
|
|
|
|
if stats.get(key) is not None:
|
|
|
|
setattr(self, key, stats[key])
|
|
|
|
|
|
|
|
def update(self, attrs):
|
|
|
|
"""Return a copy of the current Heartbeat with updated attributes."""
|
|
|
|
|
|
|
|
data = self.dict()
|
|
|
|
data.update(attrs)
|
|
|
|
heartbeat = Heartbeat(data, self.args, self.configs, _clone=True)
|
|
|
|
return heartbeat
|
|
|
|
|
|
|
|
def sanitize(self):
|
|
|
|
"""Removes sensitive data including file names and dependencies.
|
|
|
|
|
|
|
|
Returns a Heartbeat.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if self.entity is None:
|
|
|
|
return self
|
|
|
|
|
|
|
|
if self.type != 'file':
|
|
|
|
return self
|
|
|
|
|
2019-06-24 04:56:57 +03:00
|
|
|
if self._should_obfuscate_filename():
|
|
|
|
self._sanitize_metadata(keys=self._sensitive_when_hiding_filename)
|
|
|
|
if self._should_obfuscate_branch(default=True):
|
|
|
|
self._sanitize_metadata(keys=self._sensitive_when_hiding_branch)
|
2018-09-21 08:29:34 +03:00
|
|
|
extension = u(os.path.splitext(self.entity)[1])
|
|
|
|
self.entity = u('HIDDEN{0}').format(extension)
|
|
|
|
elif self.should_obfuscate_project():
|
2019-06-24 04:56:57 +03:00
|
|
|
self._sanitize_metadata(keys=self._sensitive_when_hiding_filename)
|
|
|
|
if self._should_obfuscate_branch(default=True):
|
|
|
|
self._sanitize_metadata(keys=self._sensitive_when_hiding_branch)
|
|
|
|
elif self._should_obfuscate_branch():
|
|
|
|
self._sanitize_metadata(keys=self._sensitive_when_hiding_branch)
|
2017-11-09 10:12:05 +03:00
|
|
|
|
|
|
|
return self
|
|
|
|
|
|
|
|
def json(self):
|
|
|
|
return json.dumps(self.dict())
|
|
|
|
|
|
|
|
def dict(self):
|
|
|
|
return {
|
|
|
|
'time': self.time,
|
2017-11-23 00:09:17 +03:00
|
|
|
'entity': self._unicode(self.entity),
|
2017-11-09 10:12:05 +03:00
|
|
|
'type': self.type,
|
2018-04-26 18:40:02 +03:00
|
|
|
'category': self.category,
|
2017-11-09 10:12:05 +03:00
|
|
|
'is_write': self.is_write,
|
2017-11-23 00:09:17 +03:00
|
|
|
'project': self._unicode(self.project),
|
|
|
|
'branch': self._unicode(self.branch),
|
|
|
|
'language': self._unicode(self.language),
|
|
|
|
'dependencies': self._unicode_list(self.dependencies),
|
2017-11-09 10:12:05 +03:00
|
|
|
'lines': self.lines,
|
|
|
|
'lineno': self.lineno,
|
|
|
|
'cursorpos': self.cursorpos,
|
2017-11-23 00:09:17 +03:00
|
|
|
'user_agent': self._unicode(self.user_agent),
|
2017-11-09 10:12:05 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
def items(self):
|
|
|
|
return self.dict().items()
|
|
|
|
|
|
|
|
def get_id(self):
|
2018-04-26 18:40:02 +03:00
|
|
|
return u('{time}-{type}-{category}-{project}-{branch}-{entity}-{is_write}').format(
|
2017-11-23 00:09:17 +03:00
|
|
|
time=self.time,
|
|
|
|
type=self.type,
|
2018-04-26 18:40:02 +03:00
|
|
|
category=self.category,
|
2017-11-23 00:09:17 +03:00
|
|
|
project=self._unicode(self.project),
|
|
|
|
branch=self._unicode(self.branch),
|
|
|
|
entity=self._unicode(self.entity),
|
|
|
|
is_write=self.is_write,
|
2017-11-09 10:12:05 +03:00
|
|
|
)
|
|
|
|
|
2019-06-24 04:56:57 +03:00
|
|
|
def should_obfuscate_project(self):
|
|
|
|
"""Returns True if hide_project_names is true or the entity file path
|
|
|
|
matches one in the list of obfuscated project paths."""
|
|
|
|
|
|
|
|
for pattern in self.args.hide_project_names:
|
|
|
|
try:
|
|
|
|
compiled = re.compile(pattern, re.IGNORECASE)
|
|
|
|
if compiled.search(self.entity):
|
|
|
|
return True
|
|
|
|
except re.error as ex:
|
|
|
|
log.warning(u('Regex error ({msg}) for hide_project_names pattern: {pattern}').format(
|
|
|
|
msg=u(ex),
|
|
|
|
pattern=u(pattern),
|
|
|
|
))
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
def _should_obfuscate_filename(self):
|
2018-09-21 08:29:34 +03:00
|
|
|
"""Returns True if hide_file_names is true or the entity file path
|
|
|
|
matches one in the list of obfuscated file paths."""
|
|
|
|
|
|
|
|
for pattern in self.args.hide_file_names:
|
|
|
|
try:
|
|
|
|
compiled = re.compile(pattern, re.IGNORECASE)
|
|
|
|
if compiled.search(self.entity):
|
|
|
|
return True
|
|
|
|
except re.error as ex:
|
|
|
|
log.warning(u('Regex error ({msg}) for hide_file_names pattern: {pattern}').format(
|
|
|
|
msg=u(ex),
|
|
|
|
pattern=u(pattern),
|
|
|
|
))
|
2019-06-24 04:56:57 +03:00
|
|
|
|
2018-09-21 08:29:34 +03:00
|
|
|
return False
|
|
|
|
|
2019-06-24 04:56:57 +03:00
|
|
|
def _should_obfuscate_branch(self, default=False):
|
|
|
|
"""Returns True if hide_file_names is true or the entity file path
|
|
|
|
matches one in the list of obfuscated file paths."""
|
2018-09-21 08:29:34 +03:00
|
|
|
|
2019-06-24 04:56:57 +03:00
|
|
|
# when project names or file names are hidden and hide_branch_names is
|
|
|
|
# not set, we default to hiding branch names along with file/project.
|
|
|
|
if default and self.args.hide_branch_names is None:
|
|
|
|
return True
|
|
|
|
|
|
|
|
if not self.branch or not self.args.hide_branch_names:
|
|
|
|
return False
|
|
|
|
|
|
|
|
for pattern in self.args.hide_branch_names:
|
2018-09-21 08:29:34 +03:00
|
|
|
try:
|
|
|
|
compiled = re.compile(pattern, re.IGNORECASE)
|
2019-06-24 04:56:57 +03:00
|
|
|
if compiled.search(self.entity) or compiled.search(self.branch):
|
2018-09-21 08:29:34 +03:00
|
|
|
return True
|
|
|
|
except re.error as ex:
|
2019-06-24 04:56:57 +03:00
|
|
|
log.warning(u('Regex error ({msg}) for hide_branch_names pattern: {pattern}').format(
|
2018-09-21 08:29:34 +03:00
|
|
|
msg=u(ex),
|
|
|
|
pattern=u(pattern),
|
|
|
|
))
|
2019-06-24 04:56:57 +03:00
|
|
|
|
2018-09-21 08:29:34 +03:00
|
|
|
return False
|
|
|
|
|
2017-11-23 00:09:17 +03:00
|
|
|
def _unicode(self, value):
|
|
|
|
if value is None:
|
|
|
|
return None
|
|
|
|
return u(value)
|
|
|
|
|
|
|
|
def _unicode_list(self, values):
|
|
|
|
if values is None:
|
|
|
|
return None
|
|
|
|
return [self._unicode(value) for value in values]
|
|
|
|
|
2018-09-21 08:29:34 +03:00
|
|
|
def _file_exists(self):
|
|
|
|
return (self.entity and os.path.isfile(self.entity) or
|
|
|
|
self.args.local_file and os.path.isfile(self.args.local_file))
|
|
|
|
|
2018-10-03 10:47:20 +03:00
|
|
|
def _format_local_file(self):
|
|
|
|
"""When args.local_file empty on Windows, tries to map args.entity to a
|
|
|
|
unc path.
|
|
|
|
|
|
|
|
Updates args.local_file in-place without returning anything.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if self.type != 'file':
|
|
|
|
return
|
|
|
|
|
2019-03-31 04:53:40 +03:00
|
|
|
if not self.entity:
|
|
|
|
return
|
|
|
|
|
2018-10-03 10:47:20 +03:00
|
|
|
if not is_win:
|
|
|
|
return
|
|
|
|
|
|
|
|
if self._file_exists():
|
|
|
|
return
|
|
|
|
|
|
|
|
self.args.local_file = self._to_unc_path(self.entity)
|
|
|
|
|
|
|
|
def _to_unc_path(self, filepath):
|
|
|
|
drive, rest = self._splitdrive(filepath)
|
|
|
|
if not drive:
|
|
|
|
return filepath
|
|
|
|
|
|
|
|
stdout = None
|
|
|
|
try:
|
|
|
|
stdout, stderr = Popen(['net', 'use'], stdout=PIPE, stderr=PIPE).communicate()
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
if stdout:
|
|
|
|
cols = None
|
|
|
|
for line in stdout.strip().splitlines()[1:]:
|
|
|
|
line = u(line)
|
|
|
|
if not line.strip():
|
|
|
|
continue
|
|
|
|
if not cols:
|
|
|
|
cols = self._unc_columns(line)
|
|
|
|
continue
|
|
|
|
start, end = cols.get('local', (0, 0))
|
|
|
|
if not start and not end:
|
|
|
|
break
|
|
|
|
local = line[start:end].strip().split(':')[0].upper()
|
|
|
|
if not local.isalpha():
|
|
|
|
continue
|
|
|
|
if local == drive:
|
|
|
|
start, end = cols.get('remote', (0, 0))
|
|
|
|
if not start and not end:
|
|
|
|
break
|
|
|
|
remote = line[start:end].strip()
|
|
|
|
return remote + rest
|
|
|
|
|
|
|
|
return filepath
|
|
|
|
|
|
|
|
def _unc_columns(self, line):
|
|
|
|
cols = {}
|
|
|
|
current_col = u('')
|
|
|
|
newcol = False
|
|
|
|
start, end = 0, 0
|
|
|
|
for char in line:
|
|
|
|
if char.isalpha():
|
|
|
|
if newcol:
|
|
|
|
cols[current_col.strip().lower()] = (start, end)
|
|
|
|
current_col = u('')
|
|
|
|
start = end
|
|
|
|
newcol = False
|
|
|
|
current_col += u(char)
|
|
|
|
else:
|
|
|
|
newcol = True
|
|
|
|
end += 1
|
|
|
|
if start != end and current_col:
|
|
|
|
cols[current_col.strip().lower()] = (start, -1)
|
|
|
|
return cols
|
|
|
|
|
|
|
|
def _splitdrive(self, filepath):
|
|
|
|
if filepath[1:2] != ':' or not filepath[0].isalpha():
|
|
|
|
return None, filepath
|
|
|
|
return filepath[0].upper(), filepath[2:]
|
|
|
|
|
2017-11-09 10:12:05 +03:00
|
|
|
def _excluded_by_pattern(self):
|
|
|
|
return should_exclude(self.entity, self.args.include, self.args.exclude)
|
|
|
|
|
2018-04-26 18:40:02 +03:00
|
|
|
def _excluded_by_unknown_project(self):
|
|
|
|
if self.project:
|
|
|
|
return False
|
|
|
|
return self.args.exclude_unknown_project
|
|
|
|
|
2018-01-05 10:33:07 +03:00
|
|
|
def _excluded_by_missing_project_file(self):
|
|
|
|
if not self.args.include_only_with_project_file:
|
|
|
|
return False
|
|
|
|
return find_project_file(self.entity) is None
|
|
|
|
|
2019-06-24 04:56:57 +03:00
|
|
|
def _sanitize_metadata(self, keys=[]):
|
|
|
|
for key in keys:
|
2018-09-21 08:29:34 +03:00
|
|
|
setattr(self, key, None)
|
|
|
|
|
2017-11-09 10:12:05 +03:00
|
|
|
def __repr__(self):
|
|
|
|
return self.json()
|
|
|
|
|
|
|
|
def __bool__(self):
|
|
|
|
return not self.skip
|
|
|
|
|
|
|
|
def __nonzero__(self):
|
|
|
|
return self.__bool__()
|
|
|
|
|
|
|
|
def __getitem__(self, key):
|
|
|
|
return self.dict()[key]
|