Describe the bug
Just noticed when running the HAL Example Notebook from the threeML docs with pandas=3.0.0 (PyTables=3.10.2) that this results in a TypeError: a bytes-like object is required, not 'str'. This does not happen when enforcing pandas<3.0.0.
I used this conda env.
Is there a chance that this stems from HAL and can be fixed here? If not feel free to close this issue anytime :)
Thanks for looking into this!
To Reproduce
import requests
import shutil
import os
from hawc_hal import HAL, HealpixConeROI
def get_hawc_file(filename, odir="./", overwrite=False):
if overwrite or not os.path.exists(odir + filename):
url = "https://data.hawc-observatory.org/datasets/crab_data/public_data/crab_2017/"
req = requests.get(url + filename, verify=False, stream=True)
req.raw.decode_content = True
with open(odir + filename, "wb") as f:
shutil.copyfileobj(req.raw, f)
return odir + filename
maptree = "HAWC_9bin_507days_crab_data.hd5"
response = "HAWC_9bin_507days_crab_response.hd5"
odir = "./"
maptree = get_hawc_file(maptree, odir)
response = get_hawc_file(response, odir)
ra_crab, dec_crab = 83.63, 22.02
data_radius = 3.0 # in degree
model_radius = 8.0 # in degree
roi = HealpixConeROI(
data_radius=data_radius, model_radius=model_radius, ra=ra_crab, dec=dec_crab
)
hawc = HAL("HAWC", maptree, response, roi, flat_sky_pixels_size=0.1, set_transits=None)
Log files
| TypeError Traceback (most recent call last)
| Cell In[3], line 23
| 18 roi = HealpixConeROI(
| 19 data_radius=data_radius, model_radius=model_radius, ra=ra_crab, dec=dec_crab
| 20 )
| 22 # Instance the plugin
| ---> 23 hawc = HAL("HAWC", maptree, response, roi, flat_sky_pixels_size=0.1, set_transits=None)
| 25 # Use from bin 1 to bin 9
| 26 hawc.set_active_measurements(1, 9)
|
| File ~/miniconda3/envs/test_env/lib/python3.11/site-packages/hawc_hal/HAL.py:92, in HAL.__init__(self, name, maptree, response_file, roi, flat_sky_pixels_size, n_workers, set_transits)
| 87 self._flat_sky_projection = self._roi.get_flat_sky_projection(
| 88 self.flat_sky_pixels_size
| 89 )
| 91 # Read map tree (data)
| ---> 92 self._maptree = map_tree_factory(
| 93 maptree, roi=self._roi, n_transits=n_transits, n_workers=self._n_workers
| 94 )
| 96 # Read detector response_file
| 97 self._response = hawc_response_factory(
| 98 response_file_name=response_file, n_workers=self._n_workers
| 99 )
|
| File ~/miniconda3/envs/test_env/lib/python3.11/site-packages/hawc_hal/maptree/map_tree.py:28, in map_tree_factory(map_tree_file, roi, n_workers, n_transits)
| 25 if os.path.splitext(map_tree_file)[-1] == ".root":
| 26 return MapTree.from_root_file(map_tree_file, roi, n_transits, n_workers)
| ---> 28 return MapTree.from_hdf5(map_tree_file, roi, n_transits)
|
| File ~/miniconda3/envs/test_env/lib/python3.11/site-packages/hawc_hal/maptree/map_tree.py:50, in MapTree.from_hdf5(cls, map_tree_file, roi, n_transits)
| 47 @classmethod
| 48 # def from_hdf5(cls, map_tree_file, roi):
| 49 def from_hdf5(cls, map_tree_file, roi, n_transits):
| ---> 50 data_analysis_bins, transits = from_hdf5_file(map_tree_file, roi, n_transits)
| 52 return cls(data_analysis_bins, roi, transits)
|
| File ~/miniconda3/envs/test_env/lib/python3.11/site-packages/hawc_hal/maptree/from_hdf5_file.py:30, in from_hdf5_file(map_tree_file, roi, transits)
| 27 # Read the data frames contained in the file
| 28 with Serialization(map_tree_file) as serializer:
| ---> 30 analysis_bins_df, _ = serializer.retrieve_pandas_object("/analysis_bins")
| 31 meta_df, _ = serializer.retrieve_pandas_object("/analysis_bins_meta")
| 32 roimap, roi_meta = serializer.retrieve_pandas_object("/ROI")
|
| File ~/miniconda3/envs/test_env/lib/python3.11/site-packages/hawc_hal/serialize.py:45, in Serialization.retrieve_pandas_object(self, path)
| 42 def retrieve_pandas_object(self, path):
| 43
| 44 # Get the metadata
| ---> 45 metadata = self._store.get_storer(path).attrs.metadata
| 47 # Get the object
| 48 obj = self._store.get(path)
|
| File ~/miniconda3/envs/test_env/lib/python3.11/site-packages/pandas/io/pytables.py:1684, in HDFStore.get_storer(self, key)
| 1681 if group is None:
| 1682 raise KeyError(f"No object named {key} in the file")
| -> 1684 s = self._create_storer(group)
| 1685 s.infer_axes()
| 1686 return s
|
| File ~/miniconda3/envs/test_env/lib/python3.11/site-packages/pandas/io/pytables.py:1866, in HDFStore._create_storer(self, group, format, value, encoding, errors)
| 1863 pt += "_table"
| 1865 # a storer node
| -> 1866 if "table" not in pt:
| 1867 _STORER_MAP = {"series": SeriesFixed, "frame": FrameFixed}
| 1868 try:
|
| TypeError: a bytes-like object is required, not 'str'
Desktop (please complete the following information):
Describe the bug
Just noticed when running the HAL Example Notebook from the
threeMLdocs withpandas=3.0.0(PyTables=3.10.2) that this results in aTypeError: a bytes-like object is required, not 'str'. This does not happen when enforcingpandas<3.0.0.I used this conda env.
Is there a chance that this stems from
HALand can be fixed here? If not feel free to close this issue anytime :)Thanks for looking into this!
To Reproduce
Log files
Desktop (please complete the following information):