Test executed on commit 4bc3c2ec610c16581782131d7b3723c7b0646b4b
Report generated on 19-Aug-2016 at 04:10:26
Platform | Linux-3.13.0-042stab111.12-x86_64-with-debian-jessie-sid |
Python | 2.7.11 |
0 tests ran in 121.02 seconds.
0 passed, 21 skipped, 0 failed, 6 errors.
0 expected failures, 0 unexpected passes.
Result | Test | Duration | Links | |
---|---|---|---|---|
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_j_estimators[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/at]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 88, u'Skipped: Introduction of HDF mechanism.') | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_j_blue_estimators[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/at]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 94, u'Skipped: Introduction of HDF mechanism.') | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_last_line_interactions[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/at]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 104, u'Skipped: Introduction of HDF mechanism.') | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_nubar_estimators[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/at]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 122, u'Skipped: Introduction of HDF mechanism.') | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_ws[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/at]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 128, u'Skipped: Introduction of HDF mechanism.') | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_luminosity_inner[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/at]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 134, u'Skipped: Introduction of HDF mechanism.') | |
Error | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_spectrum[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/at]::setup | 17.04 |
self = <class 'tardis.tests.integration_tests.test_integration.TestIntegration'> request = <SubRequest 'setup' for <Function 'test_spectrum[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/at]'>> reference = <class 'pandas.io.pytables.HDFStore'> File path: /home/karandesai96/slow-tests...rtual/wavelength series (shape->[10000]) data_path = {'atom_data_url': 'http://opensupernova.org/~karandesai96/atom_data', 'config_dirpath': '/tmp/tardis-test-CQcka6/lib.l.../home/karandesai96/slow-tests/bigref/4bc3c2e', 'reference_filepath': '/home/karandesai96/slow-tests/bigref/at.h5', ...} @classmethod @pytest.fixture(scope="class", autouse=True) def setup(self, request, reference, data_path): """ This method does initial setup of creating configuration and performing a single run of integration test. """ # The last component in dirpath can be extracted as name of setup. self.name = data_path['setup_name'] self.config_file = os.path.join(data_path['config_dirpath'], "config.yml") # A quick hack to download and cache atom data according to requirement # of setup. This will be cleaned up after new atomic data is available. # Read the name of atomic data required: atom_data_name = yaml.load(open(self.config_file))['atom_data'] # Download and cache the atom data file atom_data_filepath = download_file("{url}/{name}".format( > url=data_path['atom_data_url'], name=atom_data_name), cache=True ) tardis/tests/integration_tests/test_integration.py:43: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ remote_url = 'http://opensupernova.org/~karandesai96/atom_data/kurucz_cd23_chianti_H_He.h5', cache = True, show_progress = True, timeout = 3.0 def download_file(remote_url, cache=False, show_progress=True, timeout=None): """ Accepts a URL, downloads and optionally caches the result returning the filename, with a name determined by the file's MD5 hash. If ``cache=True`` and the file is present in the cache, just returns the filename. Parameters ---------- remote_url : str The URL of the file to download cache : bool, optional Whether to use the cache show_progress : bool, optional Whether to display a progress bar during the download (default is `True`) timeout : float, optional The timeout, in seconds. Otherwise, use `astropy.utils.data.Conf.remote_timeout`. Returns ------- local_path : str Returns the local path that the file was download to. Raises ------ urllib2.URLError, urllib.error.URLError Whenever there's a problem getting the remote file. """ from ..utils.console import ProgressBarOrSpinner if timeout is None: timeout = conf.remote_timeout missing_cache = False if timeout is None: # use configfile default timeout = REMOTE_TIMEOUT() if cache: try: dldir, urlmapfn = _get_download_cache_locs() except (IOError, OSError) as e: msg = 'Remote data cache could not be accessed due to ' estr = '' if len(e.args) < 1 else (': ' + str(e)) warn(CacheMissingWarning(msg + e.__class__.__name__ + estr)) cache = False missing_cache = True # indicates that the cache is missing to raise a warning later if six.PY2 and isinstance(remote_url, six.text_type): # shelve DBs don't accept unicode strings in Python 2 url_key = remote_url.encode('utf-8') else: url_key = remote_url try: if cache: # We don't need to acquire the lock here, since we are only reading with _open_shelve(urlmapfn, True) as url2hash: if url_key in url2hash: return url2hash[url_key] with contextlib.closing(urllib.request.urlopen( remote_url, timeout=timeout)) as remote: #keep a hash to rename the local file to the hashed name hash = hashlib.md5() info = remote.info() if 'Content-Length' in info: try: size = int(info['Content-Length']) except ValueError: size = None else: size = None if size is not None: check_free_space_in_dir(gettempdir(), size) if cache: check_free_space_in_dir(dldir, size) if show_progress: progress_stream = sys.stdout else: progress_stream = io.StringIO() dlmsg = "Downloading {0}".format(remote_url) with ProgressBarOrSpinner(size, dlmsg, file=progress_stream) as p: with NamedTemporaryFile(delete=False) as f: try: bytes_read = 0 block = remote.read(conf.download_block_size) while block: f.write(block) hash.update(block) bytes_read += len(block) p.update(bytes_read) block = remote.read(conf.download_block_size) except: if os.path.exists(f.name): os.remove(f.name) raise if cache: _acquire_download_cache_lock() try: with _open_shelve(urlmapfn, True) as url2hash: # We check now to see if another process has # inadvertently written the file underneath us # already if url_key in url2hash: return url2hash[url_key] local_path = os.path.join(dldir, hash.hexdigest()) shutil.move(f.name, local_path) url2hash[url_key] = local_path finally: _release_download_cache_lock() else: local_path = f.name if missing_cache: msg = ('File downloaded to temporary location due to problem ' 'with cache directory and will not be cached.') warn(CacheMissingWarning(msg, local_path)) if conf.delete_temporary_downloads_at_exit: global _tempfilestodel _tempfilestodel.append(local_path) except urllib.error.URLError as e: if hasattr(e, 'reason') and hasattr(e.reason, 'errno') and e.reason.errno == 8: e.reason.strerror = e.reason.strerror + '. requested URL: ' + remote_url e.reason.args = (e.reason.errno, e.reason.strerror) raise e except socket.timeout as e: # this isn't supposed to happen, but occasionally a socket.timeout gets # through. It's supposed to be caught in `urrlib2` and raised in this # way, but for some reason in mysterious circumstances it doesn't. So # we'll just re-raise it here instead > raise urllib.error.URLError(e) E URLError: <urlopen error timed out> /home/karandesai96/anaconda2/envs/tardis/lib/python2.7/site-packages/astropy/utils/data.py:1029: URLError | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_montecarlo_properties[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/at]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 176, u'Skipped: Introduction of HDF mechanism.') | |
Error | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_shell_temperature[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/at]::setup | 0.00 |
self = <class 'tardis.tests.integration_tests.test_integration.TestIntegration'> request = <SubRequest 'setup' for <Function 'test_spectrum[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/at]'>> reference = <class 'pandas.io.pytables.HDFStore'> File path: /home/karandesai96/slow-tests...rtual/wavelength series (shape->[10000]) data_path = {'atom_data_url': 'http://opensupernova.org/~karandesai96/atom_data', 'config_dirpath': '/tmp/tardis-test-CQcka6/lib.l.../home/karandesai96/slow-tests/bigref/4bc3c2e', 'reference_filepath': '/home/karandesai96/slow-tests/bigref/at.h5', ...} @classmethod @pytest.fixture(scope="class", autouse=True) def setup(self, request, reference, data_path): """ This method does initial setup of creating configuration and performing a single run of integration test. """ # The last component in dirpath can be extracted as name of setup. self.name = data_path['setup_name'] self.config_file = os.path.join(data_path['config_dirpath'], "config.yml") # A quick hack to download and cache atom data according to requirement # of setup. This will be cleaned up after new atomic data is available. # Read the name of atomic data required: atom_data_name = yaml.load(open(self.config_file))['atom_data'] # Download and cache the atom data file atom_data_filepath = download_file("{url}/{name}".format( > url=data_path['atom_data_url'], name=atom_data_name), cache=True ) tardis/tests/integration_tests/test_integration.py:43: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ remote_url = 'http://opensupernova.org/~karandesai96/atom_data/kurucz_cd23_chianti_H_He.h5', cache = True, show_progress = True, timeout = 3.0 def download_file(remote_url, cache=False, show_progress=True, timeout=None): """ Accepts a URL, downloads and optionally caches the result returning the filename, with a name determined by the file's MD5 hash. If ``cache=True`` and the file is present in the cache, just returns the filename. Parameters ---------- remote_url : str The URL of the file to download cache : bool, optional Whether to use the cache show_progress : bool, optional Whether to display a progress bar during the download (default is `True`) timeout : float, optional The timeout, in seconds. Otherwise, use `astropy.utils.data.Conf.remote_timeout`. Returns ------- local_path : str Returns the local path that the file was download to. Raises ------ urllib2.URLError, urllib.error.URLError Whenever there's a problem getting the remote file. """ from ..utils.console import ProgressBarOrSpinner if timeout is None: timeout = conf.remote_timeout missing_cache = False if timeout is None: # use configfile default timeout = REMOTE_TIMEOUT() if cache: try: dldir, urlmapfn = _get_download_cache_locs() except (IOError, OSError) as e: msg = 'Remote data cache could not be accessed due to ' estr = '' if len(e.args) < 1 else (': ' + str(e)) warn(CacheMissingWarning(msg + e.__class__.__name__ + estr)) cache = False missing_cache = True # indicates that the cache is missing to raise a warning later if six.PY2 and isinstance(remote_url, six.text_type): # shelve DBs don't accept unicode strings in Python 2 url_key = remote_url.encode('utf-8') else: url_key = remote_url try: if cache: # We don't need to acquire the lock here, since we are only reading with _open_shelve(urlmapfn, True) as url2hash: if url_key in url2hash: return url2hash[url_key] with contextlib.closing(urllib.request.urlopen( remote_url, timeout=timeout)) as remote: #keep a hash to rename the local file to the hashed name hash = hashlib.md5() info = remote.info() if 'Content-Length' in info: try: size = int(info['Content-Length']) except ValueError: size = None else: size = None if size is not None: check_free_space_in_dir(gettempdir(), size) if cache: check_free_space_in_dir(dldir, size) if show_progress: progress_stream = sys.stdout else: progress_stream = io.StringIO() dlmsg = "Downloading {0}".format(remote_url) with ProgressBarOrSpinner(size, dlmsg, file=progress_stream) as p: with NamedTemporaryFile(delete=False) as f: try: bytes_read = 0 block = remote.read(conf.download_block_size) while block: f.write(block) hash.update(block) bytes_read += len(block) p.update(bytes_read) block = remote.read(conf.download_block_size) except: if os.path.exists(f.name): os.remove(f.name) raise if cache: _acquire_download_cache_lock() try: with _open_shelve(urlmapfn, True) as url2hash: # We check now to see if another process has # inadvertently written the file underneath us # already if url_key in url2hash: return url2hash[url_key] local_path = os.path.join(dldir, hash.hexdigest()) shutil.move(f.name, local_path) url2hash[url_key] = local_path finally: _release_download_cache_lock() else: local_path = f.name if missing_cache: msg = ('File downloaded to temporary location due to problem ' 'with cache directory and will not be cached.') warn(CacheMissingWarning(msg, local_path)) if conf.delete_temporary_downloads_at_exit: global _tempfilestodel _tempfilestodel.append(local_path) except urllib.error.URLError as e: if hasattr(e, 'reason') and hasattr(e.reason, 'errno') and e.reason.errno == 8: e.reason.strerror = e.reason.strerror + '. requested URL: ' + remote_url e.reason.args = (e.reason.errno, e.reason.strerror) raise e except socket.timeout as e: # this isn't supposed to happen, but occasionally a socket.timeout gets # through. It's supposed to be caught in `urrlib2` and raised in this # way, but for some reason in mysterious circumstances it doesn't. So # we'll just re-raise it here instead > raise urllib.error.URLError(e) E URLError: <urlopen error timed out> /home/karandesai96/anaconda2/envs/tardis/lib/python2.7/site-packages/astropy/utils/data.py:1029: URLError | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_j_estimators[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/artis]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 88, u'Skipped: Introduction of HDF mechanism.') | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_j_blue_estimators[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/artis]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 94, u'Skipped: Introduction of HDF mechanism.') | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_last_line_interactions[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/artis]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 104, u'Skipped: Introduction of HDF mechanism.') | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_nubar_estimators[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/artis]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 122, u'Skipped: Introduction of HDF mechanism.') | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_ws[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/artis]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 128, u'Skipped: Introduction of HDF mechanism.') | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_luminosity_inner[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/artis]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 134, u'Skipped: Introduction of HDF mechanism.') | |
Error | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_spectrum[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/artis]::setup | 37.06 |
self = <class 'tardis.tests.integration_tests.test_integration.TestIntegration'> request = <SubRequest 'setup' for <Function 'test_spectrum[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/artis]'>> reference = <class 'pandas.io.pytables.HDFStore'> File path: /home/karandesai96/slow-tests...rtual/wavelength series (shape->[10000]) data_path = {'atom_data_url': 'http://opensupernova.org/~karandesai96/atom_data', 'config_dirpath': '/tmp/tardis-test-CQcka6/lib.l...me/karandesai96/slow-tests/bigref/4bc3c2e', 'reference_filepath': '/home/karandesai96/slow-tests/bigref/artis.h5', ...} @classmethod @pytest.fixture(scope="class", autouse=True) def setup(self, request, reference, data_path): """ This method does initial setup of creating configuration and performing a single run of integration test. """ # The last component in dirpath can be extracted as name of setup. self.name = data_path['setup_name'] self.config_file = os.path.join(data_path['config_dirpath'], "config.yml") # A quick hack to download and cache atom data according to requirement # of setup. This will be cleaned up after new atomic data is available. # Read the name of atomic data required: atom_data_name = yaml.load(open(self.config_file))['atom_data'] # Download and cache the atom data file atom_data_filepath = download_file("{url}/{name}".format( > url=data_path['atom_data_url'], name=atom_data_name), cache=True ) tardis/tests/integration_tests/test_integration.py:43: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ remote_url = 'http://opensupernova.org/~karandesai96/atom_data/kurucz_atom_chianti_many.h5', cache = True, show_progress = True, timeout = 3.0 def download_file(remote_url, cache=False, show_progress=True, timeout=None): """ Accepts a URL, downloads and optionally caches the result returning the filename, with a name determined by the file's MD5 hash. If ``cache=True`` and the file is present in the cache, just returns the filename. Parameters ---------- remote_url : str The URL of the file to download cache : bool, optional Whether to use the cache show_progress : bool, optional Whether to display a progress bar during the download (default is `True`) timeout : float, optional The timeout, in seconds. Otherwise, use `astropy.utils.data.Conf.remote_timeout`. Returns ------- local_path : str Returns the local path that the file was download to. Raises ------ urllib2.URLError, urllib.error.URLError Whenever there's a problem getting the remote file. """ from ..utils.console import ProgressBarOrSpinner if timeout is None: timeout = conf.remote_timeout missing_cache = False if timeout is None: # use configfile default timeout = REMOTE_TIMEOUT() if cache: try: dldir, urlmapfn = _get_download_cache_locs() except (IOError, OSError) as e: msg = 'Remote data cache could not be accessed due to ' estr = '' if len(e.args) < 1 else (': ' + str(e)) warn(CacheMissingWarning(msg + e.__class__.__name__ + estr)) cache = False missing_cache = True # indicates that the cache is missing to raise a warning later if six.PY2 and isinstance(remote_url, six.text_type): # shelve DBs don't accept unicode strings in Python 2 url_key = remote_url.encode('utf-8') else: url_key = remote_url try: if cache: # We don't need to acquire the lock here, since we are only reading with _open_shelve(urlmapfn, True) as url2hash: if url_key in url2hash: return url2hash[url_key] with contextlib.closing(urllib.request.urlopen( remote_url, timeout=timeout)) as remote: #keep a hash to rename the local file to the hashed name hash = hashlib.md5() info = remote.info() if 'Content-Length' in info: try: size = int(info['Content-Length']) except ValueError: size = None else: size = None if size is not None: check_free_space_in_dir(gettempdir(), size) if cache: check_free_space_in_dir(dldir, size) if show_progress: progress_stream = sys.stdout else: progress_stream = io.StringIO() dlmsg = "Downloading {0}".format(remote_url) with ProgressBarOrSpinner(size, dlmsg, file=progress_stream) as p: with NamedTemporaryFile(delete=False) as f: try: bytes_read = 0 block = remote.read(conf.download_block_size) while block: f.write(block) hash.update(block) bytes_read += len(block) p.update(bytes_read) block = remote.read(conf.download_block_size) except: if os.path.exists(f.name): os.remove(f.name) raise if cache: _acquire_download_cache_lock() try: with _open_shelve(urlmapfn, True) as url2hash: # We check now to see if another process has # inadvertently written the file underneath us # already if url_key in url2hash: return url2hash[url_key] local_path = os.path.join(dldir, hash.hexdigest()) shutil.move(f.name, local_path) url2hash[url_key] = local_path finally: _release_download_cache_lock() else: local_path = f.name if missing_cache: msg = ('File downloaded to temporary location due to problem ' 'with cache directory and will not be cached.') warn(CacheMissingWarning(msg, local_path)) if conf.delete_temporary_downloads_at_exit: global _tempfilestodel _tempfilestodel.append(local_path) except urllib.error.URLError as e: if hasattr(e, 'reason') and hasattr(e.reason, 'errno') and e.reason.errno == 8: e.reason.strerror = e.reason.strerror + '. requested URL: ' + remote_url e.reason.args = (e.reason.errno, e.reason.strerror) raise e except socket.timeout as e: # this isn't supposed to happen, but occasionally a socket.timeout gets # through. It's supposed to be caught in `urrlib2` and raised in this # way, but for some reason in mysterious circumstances it doesn't. So # we'll just re-raise it here instead > raise urllib.error.URLError(e) E URLError: <urlopen error timed out> /home/karandesai96/anaconda2/envs/tardis/lib/python2.7/site-packages/astropy/utils/data.py:1029: URLError | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_montecarlo_properties[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/artis]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 176, u'Skipped: Introduction of HDF mechanism.') | |
Error | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_shell_temperature[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/artis]::setup | 0.00 |
self = <class 'tardis.tests.integration_tests.test_integration.TestIntegration'> request = <SubRequest 'setup' for <Function 'test_spectrum[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/artis]'>> reference = <class 'pandas.io.pytables.HDFStore'> File path: /home/karandesai96/slow-tests...rtual/wavelength series (shape->[10000]) data_path = {'atom_data_url': 'http://opensupernova.org/~karandesai96/atom_data', 'config_dirpath': '/tmp/tardis-test-CQcka6/lib.l...me/karandesai96/slow-tests/bigref/4bc3c2e', 'reference_filepath': '/home/karandesai96/slow-tests/bigref/artis.h5', ...} @classmethod @pytest.fixture(scope="class", autouse=True) def setup(self, request, reference, data_path): """ This method does initial setup of creating configuration and performing a single run of integration test. """ # The last component in dirpath can be extracted as name of setup. self.name = data_path['setup_name'] self.config_file = os.path.join(data_path['config_dirpath'], "config.yml") # A quick hack to download and cache atom data according to requirement # of setup. This will be cleaned up after new atomic data is available. # Read the name of atomic data required: atom_data_name = yaml.load(open(self.config_file))['atom_data'] # Download and cache the atom data file atom_data_filepath = download_file("{url}/{name}".format( > url=data_path['atom_data_url'], name=atom_data_name), cache=True ) tardis/tests/integration_tests/test_integration.py:43: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ remote_url = 'http://opensupernova.org/~karandesai96/atom_data/kurucz_atom_chianti_many.h5', cache = True, show_progress = True, timeout = 3.0 def download_file(remote_url, cache=False, show_progress=True, timeout=None): """ Accepts a URL, downloads and optionally caches the result returning the filename, with a name determined by the file's MD5 hash. If ``cache=True`` and the file is present in the cache, just returns the filename. Parameters ---------- remote_url : str The URL of the file to download cache : bool, optional Whether to use the cache show_progress : bool, optional Whether to display a progress bar during the download (default is `True`) timeout : float, optional The timeout, in seconds. Otherwise, use `astropy.utils.data.Conf.remote_timeout`. Returns ------- local_path : str Returns the local path that the file was download to. Raises ------ urllib2.URLError, urllib.error.URLError Whenever there's a problem getting the remote file. """ from ..utils.console import ProgressBarOrSpinner if timeout is None: timeout = conf.remote_timeout missing_cache = False if timeout is None: # use configfile default timeout = REMOTE_TIMEOUT() if cache: try: dldir, urlmapfn = _get_download_cache_locs() except (IOError, OSError) as e: msg = 'Remote data cache could not be accessed due to ' estr = '' if len(e.args) < 1 else (': ' + str(e)) warn(CacheMissingWarning(msg + e.__class__.__name__ + estr)) cache = False missing_cache = True # indicates that the cache is missing to raise a warning later if six.PY2 and isinstance(remote_url, six.text_type): # shelve DBs don't accept unicode strings in Python 2 url_key = remote_url.encode('utf-8') else: url_key = remote_url try: if cache: # We don't need to acquire the lock here, since we are only reading with _open_shelve(urlmapfn, True) as url2hash: if url_key in url2hash: return url2hash[url_key] with contextlib.closing(urllib.request.urlopen( remote_url, timeout=timeout)) as remote: #keep a hash to rename the local file to the hashed name hash = hashlib.md5() info = remote.info() if 'Content-Length' in info: try: size = int(info['Content-Length']) except ValueError: size = None else: size = None if size is not None: check_free_space_in_dir(gettempdir(), size) if cache: check_free_space_in_dir(dldir, size) if show_progress: progress_stream = sys.stdout else: progress_stream = io.StringIO() dlmsg = "Downloading {0}".format(remote_url) with ProgressBarOrSpinner(size, dlmsg, file=progress_stream) as p: with NamedTemporaryFile(delete=False) as f: try: bytes_read = 0 block = remote.read(conf.download_block_size) while block: f.write(block) hash.update(block) bytes_read += len(block) p.update(bytes_read) block = remote.read(conf.download_block_size) except: if os.path.exists(f.name): os.remove(f.name) raise if cache: _acquire_download_cache_lock() try: with _open_shelve(urlmapfn, True) as url2hash: # We check now to see if another process has # inadvertently written the file underneath us # already if url_key in url2hash: return url2hash[url_key] local_path = os.path.join(dldir, hash.hexdigest()) shutil.move(f.name, local_path) url2hash[url_key] = local_path finally: _release_download_cache_lock() else: local_path = f.name if missing_cache: msg = ('File downloaded to temporary location due to problem ' 'with cache directory and will not be cached.') warn(CacheMissingWarning(msg, local_path)) if conf.delete_temporary_downloads_at_exit: global _tempfilestodel _tempfilestodel.append(local_path) except urllib.error.URLError as e: if hasattr(e, 'reason') and hasattr(e.reason, 'errno') and e.reason.errno == 8: e.reason.strerror = e.reason.strerror + '. requested URL: ' + remote_url e.reason.args = (e.reason.errno, e.reason.strerror) raise e except socket.timeout as e: # this isn't supposed to happen, but occasionally a socket.timeout gets # through. It's supposed to be caught in `urrlib2` and raised in this # way, but for some reason in mysterious circumstances it doesn't. So # we'll just re-raise it here instead > raise urllib.error.URLError(e) E URLError: <urlopen error timed out> /home/karandesai96/anaconda2/envs/tardis/lib/python2.7/site-packages/astropy/utils/data.py:1029: URLError | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_j_estimators[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/w7]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 88, u'Skipped: Introduction of HDF mechanism.') | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_j_blue_estimators[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/w7]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 94, u'Skipped: Introduction of HDF mechanism.') | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_last_line_interactions[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/w7]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 104, u'Skipped: Introduction of HDF mechanism.') | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_nubar_estimators[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/w7]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 122, u'Skipped: Introduction of HDF mechanism.') | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_ws[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/w7]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 128, u'Skipped: Introduction of HDF mechanism.') | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_luminosity_inner[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/w7]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 134, u'Skipped: Introduction of HDF mechanism.') | |
Error | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_spectrum[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/w7]::setup | 21.88 |
self = <class 'tardis.tests.integration_tests.test_integration.TestIntegration'> request = <SubRequest 'setup' for <Function 'test_spectrum[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/w7]'>> reference = <class 'pandas.io.pytables.HDFStore'> File path: /home/karandesai96/slow-tests...rtual/wavelength series (shape->[10000]) data_path = {'atom_data_url': 'http://opensupernova.org/~karandesai96/atom_data', 'config_dirpath': '/tmp/tardis-test-CQcka6/lib.l.../home/karandesai96/slow-tests/bigref/4bc3c2e', 'reference_filepath': '/home/karandesai96/slow-tests/bigref/w7.h5', ...} @classmethod @pytest.fixture(scope="class", autouse=True) def setup(self, request, reference, data_path): """ This method does initial setup of creating configuration and performing a single run of integration test. """ # The last component in dirpath can be extracted as name of setup. self.name = data_path['setup_name'] self.config_file = os.path.join(data_path['config_dirpath'], "config.yml") # A quick hack to download and cache atom data according to requirement # of setup. This will be cleaned up after new atomic data is available. # Read the name of atomic data required: atom_data_name = yaml.load(open(self.config_file))['atom_data'] # Download and cache the atom data file atom_data_filepath = download_file("{url}/{name}".format( > url=data_path['atom_data_url'], name=atom_data_name), cache=True ) tardis/tests/integration_tests/test_integration.py:43: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ remote_url = 'http://opensupernova.org/~karandesai96/atom_data/kurucz_cd23_chianti_H_He.h5', cache = True, show_progress = True, timeout = 3.0 def download_file(remote_url, cache=False, show_progress=True, timeout=None): """ Accepts a URL, downloads and optionally caches the result returning the filename, with a name determined by the file's MD5 hash. If ``cache=True`` and the file is present in the cache, just returns the filename. Parameters ---------- remote_url : str The URL of the file to download cache : bool, optional Whether to use the cache show_progress : bool, optional Whether to display a progress bar during the download (default is `True`) timeout : float, optional The timeout, in seconds. Otherwise, use `astropy.utils.data.Conf.remote_timeout`. Returns ------- local_path : str Returns the local path that the file was download to. Raises ------ urllib2.URLError, urllib.error.URLError Whenever there's a problem getting the remote file. """ from ..utils.console import ProgressBarOrSpinner if timeout is None: timeout = conf.remote_timeout missing_cache = False if timeout is None: # use configfile default timeout = REMOTE_TIMEOUT() if cache: try: dldir, urlmapfn = _get_download_cache_locs() except (IOError, OSError) as e: msg = 'Remote data cache could not be accessed due to ' estr = '' if len(e.args) < 1 else (': ' + str(e)) warn(CacheMissingWarning(msg + e.__class__.__name__ + estr)) cache = False missing_cache = True # indicates that the cache is missing to raise a warning later if six.PY2 and isinstance(remote_url, six.text_type): # shelve DBs don't accept unicode strings in Python 2 url_key = remote_url.encode('utf-8') else: url_key = remote_url try: if cache: # We don't need to acquire the lock here, since we are only reading with _open_shelve(urlmapfn, True) as url2hash: if url_key in url2hash: return url2hash[url_key] with contextlib.closing(urllib.request.urlopen( remote_url, timeout=timeout)) as remote: #keep a hash to rename the local file to the hashed name hash = hashlib.md5() info = remote.info() if 'Content-Length' in info: try: size = int(info['Content-Length']) except ValueError: size = None else: size = None if size is not None: check_free_space_in_dir(gettempdir(), size) if cache: check_free_space_in_dir(dldir, size) if show_progress: progress_stream = sys.stdout else: progress_stream = io.StringIO() dlmsg = "Downloading {0}".format(remote_url) with ProgressBarOrSpinner(size, dlmsg, file=progress_stream) as p: with NamedTemporaryFile(delete=False) as f: try: bytes_read = 0 block = remote.read(conf.download_block_size) while block: f.write(block) hash.update(block) bytes_read += len(block) p.update(bytes_read) block = remote.read(conf.download_block_size) except: if os.path.exists(f.name): os.remove(f.name) raise if cache: _acquire_download_cache_lock() try: with _open_shelve(urlmapfn, True) as url2hash: # We check now to see if another process has # inadvertently written the file underneath us # already if url_key in url2hash: return url2hash[url_key] local_path = os.path.join(dldir, hash.hexdigest()) shutil.move(f.name, local_path) url2hash[url_key] = local_path finally: _release_download_cache_lock() else: local_path = f.name if missing_cache: msg = ('File downloaded to temporary location due to problem ' 'with cache directory and will not be cached.') warn(CacheMissingWarning(msg, local_path)) if conf.delete_temporary_downloads_at_exit: global _tempfilestodel _tempfilestodel.append(local_path) except urllib.error.URLError as e: if hasattr(e, 'reason') and hasattr(e.reason, 'errno') and e.reason.errno == 8: e.reason.strerror = e.reason.strerror + '. requested URL: ' + remote_url e.reason.args = (e.reason.errno, e.reason.strerror) raise e except socket.timeout as e: # this isn't supposed to happen, but occasionally a socket.timeout gets # through. It's supposed to be caught in `urrlib2` and raised in this # way, but for some reason in mysterious circumstances it doesn't. So # we'll just re-raise it here instead > raise urllib.error.URLError(e) E URLError: <urlopen error timed out> /home/karandesai96/anaconda2/envs/tardis/lib/python2.7/site-packages/astropy/utils/data.py:1029: URLError | |
Skipped | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_montecarlo_properties[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/w7]::setup | 0.00 |
('lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py', 176, u'Skipped: Introduction of HDF mechanism.') | |
Error | lib.linux-x86_64-2.7/tardis/tests/integration_tests/test_integration.py::TestIntegration::()::test_shell_temperature[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/w7]::setup | 0.00 |
self = <class 'tardis.tests.integration_tests.test_integration.TestIntegration'> request = <SubRequest 'setup' for <Function 'test_spectrum[/tmp/tardis-test-CQcka6/lib.linux-x86_64-2.7/tardis/tests/integration_tests/w7]'>> reference = <class 'pandas.io.pytables.HDFStore'> File path: /home/karandesai96/slow-tests...rtual/wavelength series (shape->[10000]) data_path = {'atom_data_url': 'http://opensupernova.org/~karandesai96/atom_data', 'config_dirpath': '/tmp/tardis-test-CQcka6/lib.l.../home/karandesai96/slow-tests/bigref/4bc3c2e', 'reference_filepath': '/home/karandesai96/slow-tests/bigref/w7.h5', ...} @classmethod @pytest.fixture(scope="class", autouse=True) def setup(self, request, reference, data_path): """ This method does initial setup of creating configuration and performing a single run of integration test. """ # The last component in dirpath can be extracted as name of setup. self.name = data_path['setup_name'] self.config_file = os.path.join(data_path['config_dirpath'], "config.yml") # A quick hack to download and cache atom data according to requirement # of setup. This will be cleaned up after new atomic data is available. # Read the name of atomic data required: atom_data_name = yaml.load(open(self.config_file))['atom_data'] # Download and cache the atom data file atom_data_filepath = download_file("{url}/{name}".format( > url=data_path['atom_data_url'], name=atom_data_name), cache=True ) tardis/tests/integration_tests/test_integration.py:43: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ remote_url = 'http://opensupernova.org/~karandesai96/atom_data/kurucz_cd23_chianti_H_He.h5', cache = True, show_progress = True, timeout = 3.0 def download_file(remote_url, cache=False, show_progress=True, timeout=None): """ Accepts a URL, downloads and optionally caches the result returning the filename, with a name determined by the file's MD5 hash. If ``cache=True`` and the file is present in the cache, just returns the filename. Parameters ---------- remote_url : str The URL of the file to download cache : bool, optional Whether to use the cache show_progress : bool, optional Whether to display a progress bar during the download (default is `True`) timeout : float, optional The timeout, in seconds. Otherwise, use `astropy.utils.data.Conf.remote_timeout`. Returns ------- local_path : str Returns the local path that the file was download to. Raises ------ urllib2.URLError, urllib.error.URLError Whenever there's a problem getting the remote file. """ from ..utils.console import ProgressBarOrSpinner if timeout is None: timeout = conf.remote_timeout missing_cache = False if timeout is None: # use configfile default timeout = REMOTE_TIMEOUT() if cache: try: dldir, urlmapfn = _get_download_cache_locs() except (IOError, OSError) as e: msg = 'Remote data cache could not be accessed due to ' estr = '' if len(e.args) < 1 else (': ' + str(e)) warn(CacheMissingWarning(msg + e.__class__.__name__ + estr)) cache = False missing_cache = True # indicates that the cache is missing to raise a warning later if six.PY2 and isinstance(remote_url, six.text_type): # shelve DBs don't accept unicode strings in Python 2 url_key = remote_url.encode('utf-8') else: url_key = remote_url try: if cache: # We don't need to acquire the lock here, since we are only reading with _open_shelve(urlmapfn, True) as url2hash: if url_key in url2hash: return url2hash[url_key] with contextlib.closing(urllib.request.urlopen( remote_url, timeout=timeout)) as remote: #keep a hash to rename the local file to the hashed name hash = hashlib.md5() info = remote.info() if 'Content-Length' in info: try: size = int(info['Content-Length']) except ValueError: size = None else: size = None if size is not None: check_free_space_in_dir(gettempdir(), size) if cache: check_free_space_in_dir(dldir, size) if show_progress: progress_stream = sys.stdout else: progress_stream = io.StringIO() dlmsg = "Downloading {0}".format(remote_url) with ProgressBarOrSpinner(size, dlmsg, file=progress_stream) as p: with NamedTemporaryFile(delete=False) as f: try: bytes_read = 0 block = remote.read(conf.download_block_size) while block: f.write(block) hash.update(block) bytes_read += len(block) p.update(bytes_read) block = remote.read(conf.download_block_size) except: if os.path.exists(f.name): os.remove(f.name) raise if cache: _acquire_download_cache_lock() try: with _open_shelve(urlmapfn, True) as url2hash: # We check now to see if another process has # inadvertently written the file underneath us # already if url_key in url2hash: return url2hash[url_key] local_path = os.path.join(dldir, hash.hexdigest()) shutil.move(f.name, local_path) url2hash[url_key] = local_path finally: _release_download_cache_lock() else: local_path = f.name if missing_cache: msg = ('File downloaded to temporary location due to problem ' 'with cache directory and will not be cached.') warn(CacheMissingWarning(msg, local_path)) if conf.delete_temporary_downloads_at_exit: global _tempfilestodel _tempfilestodel.append(local_path) except urllib.error.URLError as e: if hasattr(e, 'reason') and hasattr(e.reason, 'errno') and e.reason.errno == 8: e.reason.strerror = e.reason.strerror + '. requested URL: ' + remote_url e.reason.args = (e.reason.errno, e.reason.strerror) raise e except socket.timeout as e: # this isn't supposed to happen, but occasionally a socket.timeout gets # through. It's supposed to be caught in `urrlib2` and raised in this # way, but for some reason in mysterious circumstances it doesn't. So # we'll just re-raise it here instead > raise urllib.error.URLError(e) E URLError: <urlopen error timed out> /home/karandesai96/anaconda2/envs/tardis/lib/python2.7/site-packages/astropy/utils/data.py:1029: URLError |