Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def sysexec(self, *argv, **popen_opts):
""" return stdout text from executing a system child process,
where the 'self' path points to executable.
The process is directly invoked and not through a system shell.
"""
from subprocess import Popen, PIPE
argv = map_as_list(str, argv)
popen_opts['stdout'] = popen_opts['stderr'] = PIPE
proc = Popen([str(self)] + argv, **popen_opts)
stdout, stderr = proc.communicate()
ret = proc.wait()
if py.builtin._isbytes(stdout):
stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
if ret != 0:
if py.builtin._isbytes(stderr):
stderr = py.builtin._totext(stderr, sys.getdefaultencoding())
raise py.process.cmdexec.Error(ret, ret, str(self),
stdout, stderr,)
return stdout
def _makefile(self, ext, args, kwargs):
items = list(kwargs.items())
if args:
source = py.builtin._totext("\n").join(
map(py.builtin._totext, args)) + py.builtin._totext("\n")
basename = self.request.function.__name__
items.insert(0, (basename, source))
ret = None
for name, value in items:
p = self.tmpdir.join(name).new(ext=ext)
source = Source(value)
def my_totext(s, encoding="utf-8"):
if py.builtin._isbytes(s):
s = py.builtin._totext(s, encoding=encoding)
return s
source_unicode = "\n".join([my_totext(line) for line in source.lines])
source = py.builtin._totext(source_unicode)
content = source.strip().encode("utf-8") # + "\n"
#content = content.rstrip() + "\n"
p.write(content, "wb")
if ret is None:
items = list(kwargs.items())
if args:
source = py.builtin._totext("\n").join(
map(py.builtin._totext, args)) + py.builtin._totext("\n")
basename = self.request.function.__name__
items.insert(0, (basename, source))
ret = None
for name, value in items:
p = self.tmpdir.join(name).new(ext=ext)
source = Source(value)
def my_totext(s, encoding="utf-8"):
if py.builtin._isbytes(s):
s = py.builtin._totext(s, encoding=encoding)
return s
source_unicode = "\n".join([my_totext(line) for line in source.lines])
source = py.builtin._totext(source_unicode)
content = source.strip().encode("utf-8") # + "\n"
#content = content.rstrip() + "\n"
p.write(content, "wb")
if ret is None:
ret = p
return ret
"""Utilities for assertion debugging"""
import pprint
import _pytest._code
import py
try:
from collections import Sequence
except ImportError:
Sequence = list
BuiltinAssertionError = py.builtin.builtins.AssertionError
u = py.builtin._totext
# The _reprcompare attribute on the util module is used by the new assertion
# interpretation code and assertion rewriter to detect this plugin was
# loaded and in turn call the hooks defined here as part of the
# DebugInterpreter.
_reprcompare = None
# the re-encoding is needed for python2 repr
# with non-ascii characters (see issue 877 and 1379)
def ecu(s):
try:
return u(s, 'utf-8', 'replace')
except TypeError:
return s
def _makefile(self, ext, args, kwargs):
items = list(kwargs.items())
if args:
source = py.builtin._totext("\n").join(
map(py.builtin._totext, args)) + py.builtin._totext("\n")
basename = self.request.function.__name__
items.insert(0, (basename, source))
ret = None
for name, value in items:
p = self.tmpdir.join(name).new(ext=ext)
source = Source(value)
def my_totext(s, encoding="utf-8"):
if py.builtin._isbytes(s):
s = py.builtin._totext(s, encoding=encoding)
return s
source_unicode = "\n".join([my_totext(line) for line in source.lines])
source = py.builtin._totext(source_unicode)
content = source.strip().encode("utf-8") # + "\n"
#content = content.rstrip() + "\n"
p.write(content, "wb")
def test_capturing_readouterr_decode_error_handling(self):
cap = self.getcapture()
# triggered a internal error in pytest
print('\xa6')
out, err = cap.readouterr()
assert out == py.builtin._totext('\ufffd\n', 'unicode-escape')
def snap(self):
f = self.tmpfile
f.seek(0)
res = f.read()
if res:
enc = getattr(f, "encoding", None)
if enc and isinstance(res, bytes):
res = py.builtin._totext(res, enc, "replace")
f.truncate(0)
f.seek(0)
return res
return ''
def test_unicode_on_file_with_ascii_encoding(tmpdir, monkeypatch, encoding):
msg = py.builtin._totext('hell\xf6', "latin1")
#pytest.raises(UnicodeEncodeError, lambda: bytes(msg))
f = py.std.codecs.open(str(tmpdir.join("x")), "w", encoding)
tw = py.io.TerminalWriter(f)
tw.line(msg)
f.close()
s = tmpdir.join("x").open("rb").read().strip()
assert encoding == "ascii"
assert s == msg.encode("unicode-escape")
def test_update(self, keyfs):
key1 = keyfs.add_key("NAME1", "some1", dict)
key2 = keyfs.add_key("NAME2", "some2", list)
keyfs.restart_as_write_transaction()
with key1.update() as d:
with key2.update() as l:
l.append(1)
d[py.builtin._totext("hello")] = l
assert key1.get()["hello"] == l
def newsalt():
return py.builtin._totext(base64.b64encode(os.urandom(16)), "ascii")