Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def traverse(tree):
result = []
for node in tree:
if node[0] == 't':
# split text nodes on usual separators
result.extend([t for t in re.split(r'(\.|-|_)', node[1]) if t])
elif node[0] == 'm':
m = '%{{{}}}'.format(node[1])
if MacroHelper.expand(m):
result.append(m)
elif node[0] == 'c':
if MacroHelper.expand('%{{{}:1}}'.format(node[1])):
result.extend(traverse(node[2]))
elif node[0] == 's':
# ignore shell expansions, push nonsensical value
result.append('@')
return result
"""
Update %setup or %autosetup dirname argument if needed
:param dirname: new dirname to be used
"""
parser = self._get_setup_parser()
prep = self.spec_content.section('%prep')
if not prep:
return
for index, line in enumerate(prep):
if line.startswith('%setup') or line.startswith('%autosetup'):
args = shlex.split(line)
macro = args[0]
args = [MacroHelper.expand(a, '') for a in args[1:]]
# parse macro arguments
try:
ns, unknown = parser.parse_known_args(args)
except ParseError:
continue
# check if this macro instance is extracting Source0
if ns.T and ns.a != 0 and ns.b != 0:
continue
# check if modification is really necessary
if dirname != ns.n:
new_dirname = dirname
# get %{name} and %{version} macros
def generate_patch(self):
"""
Generates patch to the results_dir containing all needed changes for
the rebased package version
"""
# Delete removed patches from rebased_sources_dir from git
removed_patches = self.rebase_spec_file.removed_patches
if removed_patches:
self.rebased_repo.index.remove(removed_patches, working_tree=True)
self.rebase_spec_file.update_paths_to_sources_and_patches()
# Generate patch
self.rebased_repo.git.add(all=True)
self.rebase_spec_file.update()
self.rebased_repo.index.commit(MacroHelper.expand(self.conf.changelog_entry, self.conf.changelog_entry))
patch = self.rebased_repo.git.format_patch('-1', stdout=True, stdout_as_string=False)
with open(os.path.join(self.results_dir, 'changes.patch'), 'wb') as f:
f.write(patch)
f.write(b'\n')
results_store.set_changes_patch('changes_patch', os.path.join(self.results_dir, 'changes.patch'))
def get_release(self) -> str:
"""Returns release string without %dist"""
release = self.header.release
dist = MacroHelper.expand('%{dist}')
if dist and release.endswith(dist):
release = release[:-len(dist)]
return release
continue
split_line = original_line[:]
directives: List[str] = []
prepend_macro = None
for element in reversed(split_line):
if element in cls.FILES_DIRECTIVES:
if cls.FILES_DIRECTIVES[element]:
prepend_macro = cls.FILES_DIRECTIVES[element]
directives.insert(0, element)
split_line.remove(element)
if prepend_macro:
for j, path in enumerate(split_line):
if not os.path.isabs(path):
split_line[j] = os.path.join(prepend_macro, subpackage, os.path.basename(path))
split_line = [MacroHelper.expand(p) for p in split_line]
j = 0
while j < len(split_line) and files:
file = split_line[j]
for deleted_file in reversed(files):
if not fnmatch.fnmatch(deleted_file, file):
continue
original_file = original_line[len(directives) + j]
del split_line[j]
del original_line[len(directives) + j]
files.remove(deleted_file)
result['removed'][sec_name].append(original_file)
logger.info("Removed %s from '%s' section", original_file, sec_name)
break
def _sync_macros(s):
"""Makes all macros present in a string up-to-date in rpm context"""
_, macros = _expand_macros(s)
for macro in macros:
MacroHelper.purge_macro(macro)
value = _get_macro_value(macro)
if value and MacroHelper.expand(value):
rpm.addMacro(macro, value)
def _get_setup_parser(self):
"""
Construct ArgumentParser for parsing %(auto)setup macro arguments
:return: constructed ArgumentParser
"""
parser = SilentArgumentParser()
parser.add_argument('-n', default=MacroHelper.expand('%{name}-%{version}', '%{name}-%{version}'))
parser.add_argument('-a', type=int, default=-1)
parser.add_argument('-b', type=int, default=-1)
parser.add_argument('-T', action='store_true')
parser.add_argument('-q', action='store_true')
parser.add_argument('-c', action='store_true')
parser.add_argument('-D', action='store_true')
parser.add_argument('-v', action='store_true')
parser.add_argument('-N', action='store_true')
parser.add_argument('-p', type=int, default=-1)
parser.add_argument('-S', default='')
return parser
Returns:
list: List of lines of the new entry.
"""
new_record = []
today = date.today()
evr = '{epoch}:{ver}-{rel}'.format(epoch=self.header.epochnum,
ver=self.header.version,
rel=self.get_release())
evr = evr[2:] if evr.startswith('0:') else evr
new_record.append('* {day} {name} <{email}> - {evr}'.format(day=today.strftime('%a %b %d %Y'),
name=GitHelper.get_user(),
email=GitHelper.get_email(),
evr=evr))
self.update()
new_record.append(MacroHelper.expand(changelog_entry, changelog_entry))
new_record.append('')
return new_record
def _process_value(curval, newval):
"""
Replaces non-redefinable-macro parts of curval with matching parts from newval
and redefines values of macros accordingly
"""
value, _ = _expand_macros(curval)
_sync_macros(curval + newval)
tokens = _tokenize(value)
values = [None] * len(tokens)
sm = SequenceMatcher(a=newval)
i = 0
# split newval to match tokens
for index, token in enumerate(tokens):
if token[0] == '%':
# for macros, try both literal and expanded value
for v in [token, MacroHelper.expand(token, token)]:
sm.set_seq2(v)
m = sm.find_longest_match(i, len(newval), 0, len(v))
valid = m.size == len(v) # only full match is valid
if valid:
break
else:
sm.set_seq2(token)
m = sm.find_longest_match(i, len(newval), 0, len(token))
valid = m.size > 0
if not valid:
continue
if token == sm.b:
tokens[index] = token[m.b:m.b+m.size]
if index > 0:
values[index] = newval[m.a:m.a+m.size]
if not values[index - 1]: