Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _get_pattern(cls):
if cls._match_pattern is None:
pattern = codeanalyze.get_comment_pattern() + '|' + \
codeanalyze.get_string_pattern() + '|' + \
r'(?P\$\{[^\s\$\}]*\})'
cls._match_pattern = re.compile(pattern)
return cls._match_pattern
def extract(self):
extract_info = self._collect_info()
content = codeanalyze.ChangeCollector(self.info.source)
definition = extract_info.definition
lineno, indents = extract_info.definition_location
offset = self.info.lines.get_line_start(lineno)
indented = sourceutils.fix_indentation(definition, indents)
content.add_change(offset, offset, indented)
self._replace_occurrences(content, extract_info)
return content.get_changed()
def get_changes(self):
changes = change.ChangeSet('Generate %s <%s>' %
(self._get_element_kind(), self.name))
indents = self.info.get_scope_indents()
blanks = self.info.get_blank_lines()
base_definition = sourceutils.fix_indentation(self._get_element(),
indents)
definition = '\n' * blanks[0] + base_definition + '\n' * blanks[1]
resource = self.info.get_insertion_resource()
start, end = self.info.get_insertion_offsets()
collector = codeanalyze.ChangeCollector(resource.read())
collector.add_change(start, end, definition)
changes.add_change(change.ChangeContents(
resource, collector.get_changed()))
return changes
if c in ')}]':
parens -= 1
if c == '\n' and parens > 0:
collector.add_change(i, i + 1, ' ')
source = collector.get_changed() or source
return source.replace('\\\n', ' ').replace('\t', ' ').replace(';', '\n')
@utils.cached(7)
def ignored_regions(source):
"""Return ignored regions like strings and comments in `source` """
return [(match.start(), match.end()) for match in _str.finditer(source)]
_str = re.compile('%s|%s' % (codeanalyze.get_comment_pattern(),
codeanalyze.get_string_pattern()))
_parens = re.compile(r'[\({\[\]}\)\n]')
def consume_string(self, end=None):
if _Source._string_pattern is None:
original = codeanalyze.get_string_pattern()
pattern = r'(%s)((\s|\\\n|#[^\n]*\n)*(%s))*' % \
(original, original)
_Source._string_pattern = re.compile(pattern)
repattern = _Source._string_pattern
return self._consume_pattern(repattern, end)
def make_pattern(code, variables):
variables = set(variables)
collector = codeanalyze.ChangeCollector(code)
def does_match(node, name):
return isinstance(node, ast.Name) and node.id == name
finder = RawSimilarFinder(code, does_match=does_match)
for variable in variables:
for match in finder.get_matches('${%s}' % variable):
start, end = match.get_region()
collector.add_change(start, end, '${%s}' % variable)
result = collector.get_changed()
return result if result is not None else code
def _get_return_pattern(cls):
if not hasattr(cls, '_return_pattern'):
def named_pattern(name, list_):
return "(?P<%s>" % name + "|".join(list_) + ")"
comment_pattern = named_pattern('comment', [r'#[^\n]*'])
string_pattern = named_pattern('string',
[codeanalyze.get_string_pattern()])
return_pattern = r'\b(?Preturn)\b'
cls._return_pattern = re.compile(comment_pattern + "|" +
string_pattern + "|" +
return_pattern)
return cls._return_pattern
def __init__(self, name, docs=False):
self.name = name
self.docs = docs
self.comment_pattern = _TextualFinder.any('comment', [r'#[^\n]*'])
self.string_pattern = _TextualFinder.any(
'string', [codeanalyze.get_string_pattern()])
self.f_string_pattern = _TextualFinder.any(
'fstring', [codeanalyze.get_formatted_string_pattern()])
self.pattern = self._get_occurrence_pattern(self.name)
def _check_nothing_after_return(self, source, offset):
lines = codeanalyze.SourceLinesAdapter(source)
lineno = lines.get_line_number(offset)
logical_lines = codeanalyze.LogicalLineFinder(lines)
lineno = logical_lines.logical_line_in(lineno)[1]
if source[lines.get_line_end(lineno):len(source)].strip() != '':
raise rope.base.exceptions.RefactoringError(
'Cannot inline functions with statements ' +
'after return statement.')