Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save MSeifert04/1850d3da5213061588c94d27422ec607 to your computer and use it in GitHub Desktop.
Save MSeifert04/1850d3da5213061588c94d27422ec607 to your computer and use it in GitHub Desktop.
[work] C:\Users\-\GIT\asv>python setup.py test
running test
running egg_info
writing requirements to asv.egg-info\requires.txt
writing top-level names to asv.egg-info\top_level.txt
writing entry points to asv.egg-info\entry_points.txt
writing asv.egg-info\PKG-INFO
writing dependency_links to asv.egg-info\dependency_links.txt
reading manifest file 'asv.egg-info\SOURCES.txt'
reading manifest template 'MANIFEST.in'
warning: no files found matching 'astropy\tests\coveragerc'
no previously-included directories found matching 'docs\build'
no previously-included directories found matching 'build'
warning: no previously-included files matching '*.pyc' found anywhere in distribution
warning: no previously-included files matching '*.o' found anywhere in distribution
writing manifest file 'asv.egg-info\SOURCES.txt'
running build_ext
============================= test session starts =============================
platform win32 -- Python 3.5.1, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
rootdir: C:\Users\-\GIT\asv, inifile:
plugins: pep8-1.0.6
collected 80 items
test\test_benchmarks.py ....
test\test_compare.py .
test\test_conf.py ...
test\test_console.py ..
test\test_dev.py .....
test\test_environment.py ..X.......
test\test_graph.py .....
test\test_machine.py .
test\test_publish.py ..s.s.s.sFsFs.s
test\test_quickstart.py .
test\test_repo.py ..s.s....ssss
test\test_results.py ...
test\test_rm.py .
test\test_step_detect.py ....
test\test_subprocess.py ...
test\test_util.py ...
test\test_web.py ss
test\test_workflow.py ....
================================== FAILURES ===================================
_____________________ test_regression_first_commits[git] ______________________
generate_result_dir = <function generate_result_dir.<locals>._generate_result_dir at 0x00000198F0710620>
def test_regression_first_commits(generate_result_dir):
> conf, repo, commits = generate_result_dir(5 * [1] + 10 * [10])
C:\Users\-\GIT\asv\test\test_publish.py:188:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
C:\Users\-\GIT\asv\test\test_publish.py:134: in _generate_result_dir
tmpdir, dvcs_type, [("commit", i) for i in range(len(values))])
C:\Users\-\GIT\asv\test\tools.py:323: in generate_repo_from_ops
"dummy_value": op[1],
C:\Users\-\GIT\asv\test\tools.py:238: in copy_template
dvcs.add(dst_path)
C:\Users\-\GIT\asv\test\tools.py:116: in add
self._run_git(['add', relpath(filename, self.path)])
C:\Users\-\GIT\asv\test\tools.py:95: in _run_git
[self._git] + args, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
args = ['C:\\Program Files\\Git\\bin\\git.exe', 'add', 'setup.py']
valid_return_codes = (0,), timeout = 600, dots = True, display_error = True
shell = False, return_stderr = False, env = None
cwd = 'C:\\Users\\-\\AppData\\Local\\Temp\\pytest-of--\\pytest-1067\\test_regression_first_commits_0\\test_repo'
def check_output(args, valid_return_codes=(0,), timeout=600, dots=True,
display_error=True, shell=False, return_stderr=False,
env=None, cwd=None):
"""
Runs the given command in a subprocess, raising ProcessError if it
fails. Returns stdout as a string on success.
Parameters
----------
valid_return_codes : list, optional
A list of return codes to ignore. Defaults to only ignoring zero.
Setting to None ignores all return codes.
timeout : number, optional
Kill the process if it lasts longer than `timeout` seconds.
dots : bool, optional
If `True` (default) write a dot to the console to show
progress as the subprocess outputs content. May also be
a callback function to call (with no arguments) to indicate
progress.
display_error : bool, optional
If `True` (default) display the stdout and stderr of the
subprocess when the subprocess returns an error code.
shell : bool, optional
If `True`, run the command through the shell. Default is
`False`.
return_stderr : bool, optional
If `True`, return both the (stdout, stderr, errcode) as a
tuple.
env : dict, optional
Specify environment variables for the subprocess.
cwd : str, optional
Specify the current working directory to use when running the
process.
"""
def get_content(header=None):
content = []
if header is not None:
content.append(header)
content.extend([
'STDOUT -------->',
stdout[:-1],
'STDERR -------->',
stderr[:-1]
])
return '\n'.join(content)
if isinstance(args, six.string_types):
args = [args]
log.debug("Running '{0}'".format(' '.join(args)))
posix = getattr(os, 'setpgid', None)
if posix:
# Run the subprocess in a separate process group, so that we
# can kill it and all child processes it spawns e.g. on
# timeouts. Note that subprocess.Popen will wait until exec()
# before returning in parent process, so there is no race
# condition in setting the process group vs. calls to os.killpg
preexec_fn = lambda: os.setpgid(0, 0)
else:
preexec_fn = None
proc = subprocess.Popen(
args,
close_fds=(not WIN),
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=shell,
preexec_fn=preexec_fn,
cwd=cwd)
last_dot_time = time.time()
stdout_chunks = []
stderr_chunks = []
is_timeout = False
if WIN:
start_time = [time.time()]
was_timeout = [False]
def stdout_reader_run():
while True:
c = proc.stdout.read(1)
if not c:
break
start_time[0] = time.time()
stdout_chunks.append(c)
def stderr_reader_run():
while True:
c = proc.stderr.read(1)
if not c:
break
start_time[0] = time.time()
stderr_chunks.append(c)
def watcher_run():
while proc.returncode is None:
time.sleep(0.1)
if time.time() - start_time[0] > timeout:
was_timeout[0] = True
proc.terminate()
watcher = threading.Thread(target=watcher_run)
watcher.start()
stdout_reader = threading.Thread(target=stdout_reader_run)
stdout_reader.start()
stderr_reader = threading.Thread(target=stderr_reader_run)
stderr_reader.start()
try:
proc.wait()
finally:
if proc.returncode is None:
proc.terminate()
proc.wait()
watcher.join()
stderr_reader.join()
stdout_reader.join()
is_timeout = was_timeout[0]
else:
try:
if posix:
# Forward signals related to Ctrl-Z handling; the child
# process is in a separate process group so it won't receive
# these automatically from the terminal
def sig_forward(signum, frame):
_killpg_safe(proc.pid, signum)
if signum == signal.SIGTSTP:
os.kill(os.getpid(), signal.SIGSTOP)
signal.signal(signal.SIGTSTP, sig_forward)
signal.signal(signal.SIGCONT, sig_forward)
fds = {
proc.stdout.fileno(): stdout_chunks,
proc.stderr.fileno(): stderr_chunks
}
while proc.poll() is None:
try:
rlist, wlist, xlist = select.select(
list(fds.keys()), [], [], timeout)
except select.error as err:
if err.args[0] == errno.EINTR:
# interrupted by signal handler; try again
continue
raise
if len(rlist) == 0:
# We got a timeout
is_timeout = True
break
for f in rlist:
output = os.read(f, PIPE_BUF)
fds[f].append(output)
if dots and time.time() - last_dot_time > 0.5:
if dots is True:
log.dot()
elif dots:
dots()
last_dot_time = time.time()
finally:
if posix:
# Restore signal handlers
signal.signal(signal.SIGTSTP, signal.SIG_DFL)
signal.signal(signal.SIGCONT, signal.SIG_DFL)
if proc.returncode is None:
# Timeout or another exceptional condition occurred, and
# the program is still running.
if posix:
# Terminate the whole process group
_killpg_safe(proc.pid, signal.SIGTERM)
for j in range(10):
time.sleep(0.1)
if proc.poll() is not None:
break
else:
# Didn't terminate within 1 sec, so kill it
_killpg_safe(proc.pid, signal.SIGKILL)
else:
proc.terminate()
proc.wait()
proc.stdout.flush()
proc.stderr.flush()
stdout_chunks.append(proc.stdout.read())
stderr_chunks.append(proc.stderr.read())
stdout = b''.join(stdout_chunks)
stderr = b''.join(stderr_chunks)
stdout = stdout.decode('utf-8', 'replace')
stderr = stderr.decode('utf-8', 'replace')
if is_timeout:
retcode = TIMEOUT_RETCODE
else:
retcode = proc.returncode
if valid_return_codes is not None and retcode not in valid_return_codes:
header = 'Error running {0}'.format(' '.join(args))
if display_error:
log.error(get_content(header))
else:
if log.is_debug_enabled():
log.debug(get_content(header))
> raise ProcessError(args, retcode, stdout, stderr)
E asv.util.ProcessError: Command 'C:\Program Files\Git\bin\git.exe add setup.py' returned non-zero exit status 128
C:\Users\-\GIT\asv\asv\util.py:506: ProcessError
---------------------------- Captured stdout call -----------------------------
[2700.00%] · Error running C:\Program Files\Git\bin\git.exe add setup.py
STDOUT -------->
STDERR -------->
fatal: Unable to write new index file
_____________________ test_regression_parameterized[git] ______________________
generate_result_dir = <function generate_result_dir.<locals>._generate_result_dir at 0x00000198F1730D08>
def test_regression_parameterized(generate_result_dir):
before = {"params": [["a", "b", "c", "d"]], "result": [5, 1, 1, 10]}
after = {"params": [["a", "b", "c", "d"]], "result": [6, 1, 10, 1]}
> conf, repo, commits = generate_result_dir(5 * [before] + 5 * [after])
C:\Users\-\GIT\asv\test\test_publish.py:214:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
C:\Users\-\GIT\asv\test\test_publish.py:134: in _generate_result_dir
tmpdir, dvcs_type, [("commit", i) for i in range(len(values))])
C:\Users\-\GIT\asv\test\tools.py:323: in generate_repo_from_ops
"dummy_value": op[1],
C:\Users\-\GIT\asv\test\tools.py:238: in copy_template
dvcs.add(dst_path)
C:\Users\-\GIT\asv\test\tools.py:116: in add
self._run_git(['add', relpath(filename, self.path)])
C:\Users\-\GIT\asv\test\tools.py:95: in _run_git
[self._git] + args, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
args = ['C:\\Program Files\\Git\\bin\\git.exe', 'add', 'README']
valid_return_codes = (0,), timeout = 600, dots = True, display_error = True
shell = False, return_stderr = False, env = None
cwd = 'C:\\Users\\-\\AppData\\Local\\Temp\\pytest-of--\\pytest-1067\\test_regression_parameterized_0\\test_repo'
def check_output(args, valid_return_codes=(0,), timeout=600, dots=True,
display_error=True, shell=False, return_stderr=False,
env=None, cwd=None):
"""
Runs the given command in a subprocess, raising ProcessError if it
fails. Returns stdout as a string on success.
Parameters
----------
valid_return_codes : list, optional
A list of return codes to ignore. Defaults to only ignoring zero.
Setting to None ignores all return codes.
timeout : number, optional
Kill the process if it lasts longer than `timeout` seconds.
dots : bool, optional
If `True` (default) write a dot to the console to show
progress as the subprocess outputs content. May also be
a callback function to call (with no arguments) to indicate
progress.
display_error : bool, optional
If `True` (default) display the stdout and stderr of the
subprocess when the subprocess returns an error code.
shell : bool, optional
If `True`, run the command through the shell. Default is
`False`.
return_stderr : bool, optional
If `True`, return both the (stdout, stderr, errcode) as a
tuple.
env : dict, optional
Specify environment variables for the subprocess.
cwd : str, optional
Specify the current working directory to use when running the
process.
"""
def get_content(header=None):
content = []
if header is not None:
content.append(header)
content.extend([
'STDOUT -------->',
stdout[:-1],
'STDERR -------->',
stderr[:-1]
])
return '\n'.join(content)
if isinstance(args, six.string_types):
args = [args]
log.debug("Running '{0}'".format(' '.join(args)))
posix = getattr(os, 'setpgid', None)
if posix:
# Run the subprocess in a separate process group, so that we
# can kill it and all child processes it spawns e.g. on
# timeouts. Note that subprocess.Popen will wait until exec()
# before returning in parent process, so there is no race
# condition in setting the process group vs. calls to os.killpg
preexec_fn = lambda: os.setpgid(0, 0)
else:
preexec_fn = None
proc = subprocess.Popen(
args,
close_fds=(not WIN),
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=shell,
preexec_fn=preexec_fn,
cwd=cwd)
last_dot_time = time.time()
stdout_chunks = []
stderr_chunks = []
is_timeout = False
if WIN:
start_time = [time.time()]
was_timeout = [False]
def stdout_reader_run():
while True:
c = proc.stdout.read(1)
if not c:
break
start_time[0] = time.time()
stdout_chunks.append(c)
def stderr_reader_run():
while True:
c = proc.stderr.read(1)
if not c:
break
start_time[0] = time.time()
stderr_chunks.append(c)
def watcher_run():
while proc.returncode is None:
time.sleep(0.1)
if time.time() - start_time[0] > timeout:
was_timeout[0] = True
proc.terminate()
watcher = threading.Thread(target=watcher_run)
watcher.start()
stdout_reader = threading.Thread(target=stdout_reader_run)
stdout_reader.start()
stderr_reader = threading.Thread(target=stderr_reader_run)
stderr_reader.start()
try:
proc.wait()
finally:
if proc.returncode is None:
proc.terminate()
proc.wait()
watcher.join()
stderr_reader.join()
stdout_reader.join()
is_timeout = was_timeout[0]
else:
try:
if posix:
# Forward signals related to Ctrl-Z handling; the child
# process is in a separate process group so it won't receive
# these automatically from the terminal
def sig_forward(signum, frame):
_killpg_safe(proc.pid, signum)
if signum == signal.SIGTSTP:
os.kill(os.getpid(), signal.SIGSTOP)
signal.signal(signal.SIGTSTP, sig_forward)
signal.signal(signal.SIGCONT, sig_forward)
fds = {
proc.stdout.fileno(): stdout_chunks,
proc.stderr.fileno(): stderr_chunks
}
while proc.poll() is None:
try:
rlist, wlist, xlist = select.select(
list(fds.keys()), [], [], timeout)
except select.error as err:
if err.args[0] == errno.EINTR:
# interrupted by signal handler; try again
continue
raise
if len(rlist) == 0:
# We got a timeout
is_timeout = True
break
for f in rlist:
output = os.read(f, PIPE_BUF)
fds[f].append(output)
if dots and time.time() - last_dot_time > 0.5:
if dots is True:
log.dot()
elif dots:
dots()
last_dot_time = time.time()
finally:
if posix:
# Restore signal handlers
signal.signal(signal.SIGTSTP, signal.SIG_DFL)
signal.signal(signal.SIGCONT, signal.SIG_DFL)
if proc.returncode is None:
# Timeout or another exceptional condition occurred, and
# the program is still running.
if posix:
# Terminate the whole process group
_killpg_safe(proc.pid, signal.SIGTERM)
for j in range(10):
time.sleep(0.1)
if proc.poll() is not None:
break
else:
# Didn't terminate within 1 sec, so kill it
_killpg_safe(proc.pid, signal.SIGKILL)
else:
proc.terminate()
proc.wait()
proc.stdout.flush()
proc.stderr.flush()
stdout_chunks.append(proc.stdout.read())
stderr_chunks.append(proc.stderr.read())
stdout = b''.join(stdout_chunks)
stderr = b''.join(stderr_chunks)
stdout = stdout.decode('utf-8', 'replace')
stderr = stderr.decode('utf-8', 'replace')
if is_timeout:
retcode = TIMEOUT_RETCODE
else:
retcode = proc.returncode
if valid_return_codes is not None and retcode not in valid_return_codes:
header = 'Error running {0}'.format(' '.join(args))
if display_error:
log.error(get_content(header))
else:
if log.is_debug_enabled():
log.debug(get_content(header))
> raise ProcessError(args, retcode, stdout, stderr)
E asv.util.ProcessError: Command 'C:\Program Files\Git\bin\git.exe add README' returned non-zero exit status 128
C:\Users\-\GIT\asv\asv\util.py:506: ProcessError
---------------------------- Captured stdout call -----------------------------
[2700.00%] · Error running C:\Program Files\Git\bin\git.exe add README
STDOUT -------->
STDERR -------->
fatal: Unable to write new index file
======== 2 failed, 62 passed, 15 skipped, 1 xpassed in 2919.69 seconds ========
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment