Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_to_filename(self):
m1 = MatchSpec(fn='foo-1.7-52.tar.bz2')
m2 = MatchSpec(name='foo', version='1.7', build='52')
m3 = MatchSpec(Dist('defaults::foo-1.7-52'))
assert m1._to_filename_do_not_use() == 'foo-1.7-52.tar.bz2'
assert m2._to_filename_do_not_use() == 'foo-1.7-52.tar.bz2'
assert m3._to_filename_do_not_use() == 'foo-1.7-52.tar.bz2'
for spec in 'bitarray', 'pycosat 0.6.0', 'numpy 1.6*':
ms = MatchSpec(spec)
assert ms._to_filename_do_not_use() is None
def test_channel(self):
d = Dist.from_string("conda-forge::spyder-app-2.3.8-py27_0.tar.bz2")
assert d.channel == 'conda-forge'
assert d.quad[0] == "spyder-app"
assert d.dist_name == "spyder-app-2.3.8-py27_0"
d = Dist.from_string("s3://some/bucket/name::spyder-app-2.3.8-py27_0.tar.bz2")
assert d.channel == 's3://some/bucket/name'
assert d.quad[0] == "spyder-app"
assert d.dist_name == "spyder-app-2.3.8-py27_0"
assert d.to_url() == join_url("s3://some/bucket/name", context.subdir,
"spyder-app-2.3.8-py27_0.tar.bz2")
'build_number': 0,
'depends': ['python 3.3*'],
'name': 'mypackage',
'version': '1.0',
'features': 'feature',
})
index2['feature-1.0-py33_0.tar.bz2'] = IndexRecord(**{
'build': 'py33_0',
'build_number': 0,
'depends': ['python 3.3*'],
'name': 'feature',
'version': '1.0',
'track_features': 'feature',
})
index2 = {Dist(key): value for key, value in iteritems(index2)}
r = Resolve(index2)
# It should not raise
r.install(['mypackage','feature 1.0'])
'\nWarning: %s possible package resolutions '
'(only showing differing packages):%s%s' %
('>10' if nsol > 10 else nsol,
dashlist(', '.join(diff) for diff in diffs),
'\n ... and others' if nsol > 10 else ''))
def stripfeat(sol):
return sol.split('[')[0]
if not context.quiet:
stdoutlog.info('\n')
if returnall:
return [sorted(Dist(stripfeat(dname)) for dname in psol) for psol in psolutions]
else:
return sorted(Dist(stripfeat(dname)) for dname in psolutions[0])
except:
stdoutlog.info('\n')
raise
def default_filter(self, features=None, filter=None):
if filter is None:
filter = {}
else:
filter.clear()
filter.update({Dist(fstr+'@'): False for fstr in iterkeys(self.trackers)})
if features:
filter.update({Dist(fstr+'@'): True for fstr in features})
return filter
meta_in_common = { # just need to make this once, then apply with .update()
'arch': repodata.get('info', {}).get('arch'),
'channel': channel_url,
'platform': repodata.get('info', {}).get('platform'),
'priority': priority,
'schannel': schannel,
}
packages = {}
for fn, info in iteritems(opackages):
info['fn'] = fn
info['url'] = join_url(channel_url, fn)
if add_pip and info['name'] == 'python' and info['version'].startswith(('2.', '3.')):
info['depends'].append('pip')
info.update(meta_in_common)
rec = IndexRecord(**info)
packages[Dist(rec)] = rec
repodata['packages'] = packages
and should only be installed in the root environment: %s
These packages need to be removed before conda can proceed.""" % (' '.join(linked),))
raise InstallError("Error: 'conda' can only be installed into the "
"root environment")
smh = r.dependency_sort(must_have)
actions = ensure_linked_actions(
smh, prefix,
index=r.index,
force=force, always_copy=always_copy)
if actions[LINK]:
actions[SYMLINK_CONDA] = [context.root_prefix]
for dist in sorted(linked):
dist = Dist(dist)
name = r.package_name(dist)
replace_existing = name in must_have and dist != must_have[name]
prune_it = prune and dist not in smh
if replace_existing or prune_it:
add_unlink(actions, dist)
return actions
def dist_str_in_index(index, dist_str):
return Dist(dist_str) in index
solution = C.sat(constraints)
limit = xtra = None
if not solution or xtra:
def get_(name, snames):
if name not in snames:
snames.add(name)
for fn in self.groups.get(name, []):
for ms in self.ms_depends(fn):
get_(ms.name, snames)
# New addition: find the largest set of installed packages that
# are consistent with each other, and include those in the
# list of packages to maintain consistency with
snames = set()
eq_optional_c = r2.generate_removal_count(C, specs)
solution, _ = C.minimize(eq_optional_c, C.sat())
snames.update(dists[Dist(q)]['name']
for q in (C.from_index(s) for s in solution)
if q and q[0] != '!' and '@' not in q)
# Existing behavior: keep all specs and their dependencies
for spec in new_specs:
get_(MatchSpec(spec).name, snames)
if len(snames) < len(dists):
limit = snames
xtra = [dist for dist, rec in iteritems(dists) if rec['name'] not in snames]
log.debug('Limiting solver to the following packages: %s', ', '.join(limit))
if xtra:
log.debug('Packages to be preserved: %s', xtra)
return limit, xtra
def _add_entry(__packages_map, pkgs_dir, package_filename):
if not package_filename.endswith(CONDA_TARBALL_EXTENSION):
package_filename += CONDA_TARBALL_EXTENSION
log.trace("adding to package cache %s", join(pkgs_dir, package_filename))
dist = first(self.urls_data, lambda x: basename(x) == package_filename,
apply=Dist)
if not dist:
dist = Dist.from_string(package_filename, channel_override=UNKNOWN_CHANNEL)
pc_entry = PackageCacheEntry.make_legacy(pkgs_dir, dist)
__packages_map[pc_entry.dist] = pc_entry