Skip to content

Instantly share code, notes, and snippets.

@rly
Last active May 30, 2023 20:07
Show Gist options
  • Save rly/fcea32480f0444d704b0aa453e63783c to your computer and use it in GitHub Desktop.
Save rly/fcea32480f0444d704b0aa453e63783c to your computer and use it in GitHub Desktop.
Create an NWB DynamicTable with a column of object references
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "markdown",
"id": "48be29fd-2005-4218-9901-39fc158352f7",
"metadata": {},
"source": [
"## Create an NWB file with a DynamicTable with a column that contains references to container objects elsewhere in the file."
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "bb08d841-3150-4c6b-ae0e-55d6166a3fdd",
"metadata": {
"tags": []
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
" test_column\n",
"id \n",
"0 test_ts1 pynwb.base.TimeSeries at 0x4357943120...\n",
"1 test_ts2 pynwb.base.TimeSeries at 0x5233163520...\n"
]
},
{
"data": {
"text/plain": [
"root pynwb.file.NWBFile at 0x5232280720\n",
"Fields:\n",
" acquisition: {\n",
" test_dt <class 'hdmf.common.table.DynamicTable'>,\n",
" test_ts1 <class 'pynwb.base.TimeSeries'>,\n",
" test_ts2 <class 'pynwb.base.TimeSeries'>\n",
" }\n",
" file_create_date: [datetime.datetime(2023, 5, 30, 13, 6, 11, 52009, tzinfo=tzlocal())]\n",
" identifier: identifier\n",
" session_description: session_description\n",
" session_start_time: 2023-05-30 20:06:11.051719+00:00\n",
" timestamps_reference_time: 2023-05-30 20:06:11.051719+00:00"
]
},
"execution_count": 1,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"from pynwb import NWBFile, NWBHDF5IO, TimeSeries\n",
"from hdmf.common import DynamicTable\n",
"import datetime\n",
"\n",
"# create a test NWB file\n",
"nwbfile = NWBFile(\n",
" session_description=\"session_description\",\n",
" identifier=\"identifier\",\n",
" session_start_time=datetime.datetime.now(datetime.timezone.utc),\n",
")\n",
"\n",
"# create two TimeSeries and add them to the file\n",
"ts1 = TimeSeries(\n",
" name=\"test_ts1\",\n",
" data=[1, 2, 3],\n",
" unit=\"unit\",\n",
" rate=1.0\n",
")\n",
"ts2 = TimeSeries(\n",
" name=\"test_ts2\",\n",
" data=[1, 2, 3],\n",
" unit=\"unit\",\n",
" rate=1.0\n",
")\n",
"nwbfile.add_acquisition(ts1)\n",
"nwbfile.add_acquisition(ts2)\n",
"\n",
"# create an empty DynamicTable and add a column\n",
"dt = DynamicTable(\n",
" name=\"test_dt\",\n",
" description=\"test\"\n",
")\n",
"dt.add_column(\n",
" name=\"test_column\",\n",
" description=\"test\"\n",
")\n",
"\n",
"# add two rows to the DynamicTable and set the values to be the two TimeSeries we created earlier\n",
"dt.add_row(test_column=ts1)\n",
"dt.add_row(test_column=ts2)\n",
"\n",
"print(dt.to_dataframe())\n",
"\n",
"# add the DynamicTable to the file\n",
"nwbfile.add_acquisition(dt)\n",
"\n",
"nwbfile"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "f85a41b5-0537-4740-b71c-64b5e3b45897",
"metadata": {
"tags": []
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"root pynwb.file.NWBFile at 0x5233397040\n",
"Fields:\n",
" acquisition: {\n",
" test_dt <class 'hdmf.common.table.DynamicTable'>,\n",
" test_ts1 <class 'pynwb.base.TimeSeries'>,\n",
" test_ts2 <class 'pynwb.base.TimeSeries'>\n",
" }\n",
" file_create_date: [datetime.datetime(2023, 5, 30, 13, 6, 11, 52009, tzinfo=tzoffset(None, -25200))]\n",
" identifier: identifier\n",
" session_description: session_description\n",
" session_start_time: 2023-05-30 20:06:11.051719+00:00\n",
" timestamps_reference_time: 2023-05-30 20:06:11.051719+00:00\n",
"\n",
"test_ts1 pynwb.base.TimeSeries at 0x5439904384\n",
"Fields:\n",
" comments: no comments\n",
" conversion: 1.0\n",
" data: <HDF5 dataset \"data\": shape (3,), type \"<i8\">\n",
" description: no description\n",
" offset: 0.0\n",
" rate: 1.0\n",
" resolution: -1.0\n",
" starting_time: 0.0\n",
" starting_time_unit: seconds\n",
" unit: unit\n",
"\n",
"test_ts2 pynwb.base.TimeSeries at 0x5439905392\n",
"Fields:\n",
" comments: no comments\n",
" conversion: 1.0\n",
" data: <HDF5 dataset \"data\": shape (3,), type \"<i8\">\n",
" description: no description\n",
" offset: 0.0\n",
" rate: 1.0\n",
" resolution: -1.0\n",
" starting_time: 0.0\n",
" starting_time_unit: seconds\n",
" unit: unit\n",
"\n",
" test_column\n",
"id \n",
"0 test_ts1 pynwb.base.TimeSeries at 0x5439904384...\n",
"1 test_ts2 pynwb.base.TimeSeries at 0x5439905392...\n"
]
}
],
"source": [
"filename = \"test_column_of_containers.nwb\"\n",
"\n",
"# write the file to disk\n",
"with NWBHDF5IO(filename, \"w\") as io:\n",
" io.write(nwbfile)\n",
"\n",
"# read the file from disk and inspect its contents\n",
"with NWBHDF5IO(filename, \"r\") as io:\n",
" nwbfile = io.read()\n",
" print(nwbfile)\n",
" print(nwbfile.acquisition[\"test_ts1\"])\n",
" print(nwbfile.acquisition[\"test_ts2\"])\n",
" print(nwbfile.acquisition[\"test_dt\"].to_dataframe())\n",
" \n",
" assert nwbfile.acquisition[\"test_ts1\"] is nwbfile.acquisition[\"test_dt\"][\"test_column\"][0]\n",
" assert nwbfile.acquisition[\"test_ts2\"] is nwbfile.acquisition[\"test_dt\"][\"test_column\"][1]"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "a2353bf4-7ceb-4e31-b5b8-e38463a0f3a2",
"metadata": {
"tags": []
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"<HDF5 object reference>\n"
]
}
],
"source": [
"# confirm using h5py that the HDF5 dataset contains object references\n",
"import h5py\n",
"f = h5py.File(filename, \"r\")\n",
"print(f[\"acquisition/test_dt/test_column\"][0])"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.9"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment