Created
December 4, 2014 15:51
-
-
Save cdeil/45051376b76f9cf0c9f1 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
________________________________________________ ERROR collecting gammapy/detect/tests/test_test_statistic.py ________________________________________________ | |
gammapy/detect/tests/test_test_statistic.py:6: in <module> | |
> from ...detect import ts_image, TSMapCalculator | |
E ImportError: cannot import name 'ts_image' | |
========================================================================== FAILURES ========================================================================== | |
____________________________________________________________________ test_compute_ts_map _____________________________________________________________________ | |
@pytest.mark.skipif('not HAS_SCIPY') | |
def test_compute_ts_map(): | |
"""Minimal test of compute_ts_map""" | |
data = load_poisson_stats_image(extra_info=True) | |
kernel = Gaussian2DKernel(5).array | |
exposure = np.ones(data['counts'].shape) | |
> TS = compute_ts_map(data['counts'], data['background'], exposure, kernel) | |
gammapy/detect/tests/test_teststatistic.py:24: | |
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ | |
counts = array([[ 2., 1., 1., ..., 2., 3., 0.], | |
[ 1., 0., 1., ..., 0., 0., 1.], | |
[ 1., 1., 2., ..., 1... 3., 3., ..., 0., 1., 0.], | |
[ 0., 3., 3., ..., 0., 1., 0.], | |
[ 0., 0., 1., ..., 0., 3., 3.]]) | |
background = array([[ 1., 1., 1., ..., 1., 1., 1.], | |
[ 1., 1., 1., ..., 1., 1., 1.], | |
[ 1., 1., 1., ..., 1... 1., 1., ..., 1., 1., 1.], | |
[ 1., 1., 1., ..., 1., 1., 1.], | |
[ 1., 1., 1., ..., 1., 1., 1.]]) | |
exposure = array([[ 1., 1., 1., ..., 1., 1., 1.], | |
[ 1., 1., 1., ..., 1., 1., 1.], | |
[ 1., 1., 1., ..., 1... 1., 1., ..., 1., 1., 1.], | |
[ 1., 1., 1., ..., 1., 1., 1.], | |
[ 1., 1., 1., ..., 1., 1., 1.]]) | |
kernel = array([[ 7.16421173e-10, 1.56285292e-09, 3.27563894e-09, ..., | |
3.27563894e-09, 1.56285292e-09, 7.164....16421173e-10, 1.56285292e-09, 3.27563894e-09, ..., | |
3.27563894e-09, 1.56285292e-09, 7.16421173e-10]]) | |
flux = array([[ 8.00000000e+12, 3.00000000e+12, -1.00000000e+12, ..., | |
-8.00000000e+12, -7.00000000e+12, -7.000....10000000e+13, 7.00000000e+12, 7.00000000e+12, ..., | |
-6.00000000e+12, -7.00000000e+12, -6.00000000e+12]]) | |
method = 'root', optimizer = 'Brent', parallel = True, radius = 5, threshold = None, debug = False | |
def compute_ts_map(counts, background, exposure, kernel, flux=None, | |
method='root', optimizer='Brent', parallel=True, | |
radius=5, threshold=None, debug=False): | |
""" | |
Compute TS map using different methods. | |
Parameters | |
---------- | |
counts : array | |
Count map | |
background : array | |
Background map | |
exposure : array | |
Exposure map | |
kernel : astropy.convolution.core.Kernel2D | |
Source model kernel. | |
flux : float (None) | |
Flux map used as a starting value for the amplitude fit. | |
method : str ('root') | |
The following options are available: | |
* ``'root'`` (default) | |
Fit amplitude finding roots of the the derivative of | |
the fit statistics. Described in Appendix A in Stewart (2009). | |
* ``'fit scipy'`` | |
Use scipy.optimize for fitting. | |
* ``'fit minuit'`` | |
Use minuit for fitting. | |
optimizer : str ('Brent') | |
Which optimizing algorithm to use from scipy. See | |
scipy.optimize.minimize_scalar for options. | |
parallel : bool (True) | |
Whether to use multiple cores for parallel processing. | |
radius : float (5) | |
Correlation radius of the tophat kernel for the initial | |
flux estimate. | |
threshold : float (None) | |
If the TS value corresponding to the initial flux estimate is not above | |
this threshold, the optimizing step is omitted to save computing time. | |
debug : bool (False) | |
Run function in debug mode which returns and additional fitted flux and | |
number of iterations map (see section `Returns`). | |
Returns | |
------- | |
TS : array | |
TS map. | |
amplitude : array (optional) | |
Flux amplitude map. | |
niter : array (optional) | |
Map of number of fit iterations. | |
""" | |
from scipy.ndimage.morphology import binary_erosion | |
assert counts.shape == background.shape | |
assert counts.shape == exposure.shape | |
if flux is None: | |
logging.info('Using correlation radius of {0:.1f} pix to estimate initial flux.'.format(radius)) | |
flux = disk_correlate((counts - background) / exposure, radius) / FLUX_FACTOR | |
else: | |
assert counts.shape == flux.shape | |
TS = np.zeros(counts.shape) | |
x_min, x_max = kernel.shape[1] // 2, counts.shape[1] - kernel.shape[1] // 2 | |
y_min, y_max = kernel.shape[0] // 2, counts.shape[0] - kernel.shape[0] // 2 | |
positions = product(range(x_min, x_max), range(y_min, y_max)) | |
# Positions where exposure == 0 and flux < 0 are not processed | |
mask = binary_erosion(exposure > 0, np.ones(kernel.shape)) | |
positions = [(i, j) for i, j in positions if mask[j][i] and flux[j][i] > 0] | |
wrap = partial(_ts_value, counts=counts, exposure=exposure, | |
background=background, kernel=kernel, flux=flux, | |
method=method, optimizer=optimizer, threshold=threshold, | |
debug=debug) | |
if parallel: | |
from multiprocessing import Pool, cpu_count | |
logging.info('Using {0} cores to compute TS map.'.format(cpu_count())) | |
pool = Pool() | |
> results = pool.map(wrap, positions) | |
gammapy/detect/teststatistics.py:174: | |
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ | |
self = <multiprocessing.pool.Pool object at 0x103765128> | |
func = functools.partial(<function _ts_value at 0x10772d2f0>, exposure=array([[ 1., 1., 1., ..., 1., 1., 1.], | |
[ 1... 1., 1., 1.], | |
[ 1., 1., 1., ..., 1., 1., 1.], | |
[ 1., 1., 1., ..., 1., 1., 1.]]), debug=False) | |
iterable = [(20, 47), (20, 48), (20, 49), (20, 50), (20, 51), (20, 52), ...], chunksize = None | |
def map(self, func, iterable, chunksize=None): | |
''' | |
Apply `func` to each element in `iterable`, collecting the results | |
in a list that is returned. | |
''' | |
> return self._map_async(func, iterable, mapstar, chunksize).get() | |
/opt/local/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/multiprocessing/pool.py:260: | |
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ | |
self = <multiprocessing.pool.MapResult object at 0x10b112668>, timeout = None | |
def get(self, timeout=None): | |
self.wait(timeout) | |
if not self.ready(): | |
raise TimeoutError | |
if self._success: | |
return self._value | |
else: | |
> raise self._value | |
E AttributeError: 'numpy.ndarray' object has no attribute '_array' | |
/opt/local/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/multiprocessing/pool.py:599: AttributeError | |
===================================== 1 failed, 228 passed, 30 skipped, 20 xfailed, 1 xpassed, 1 error in 31.69 seconds ====================================== |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment