Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
f88dc2d
First draft
h-mayorquin Apr 18, 2026
cd52ef3
second iteration
h-mayorquin Apr 18, 2026
f52c39c
using cache and consistent use of set_probe_groups
h-mayorquin Apr 19, 2026
594122c
using cache and consistent use of set_probe_groups
h-mayorquin Apr 19, 2026
8bdcdc9
recover cophy semantics
h-mayorquin Apr 19, 2026
d499bc0
add docstring
h-mayorquin Apr 19, 2026
f15e5c6
just copies
h-mayorquin Apr 19, 2026
e1b7103
remove comments
h-mayorquin Apr 19, 2026
5d1f57d
rename
h-mayorquin Apr 19, 2026
c02e760
more backwards compatability
h-mayorquin Apr 19, 2026
591be63
testing
h-mayorquin Apr 19, 2026
ed22a73
beahvior for 0 channel recording
h-mayorquin Apr 20, 2026
565d775
more fixes
h-mayorquin Apr 20, 2026
5ef0221
second fix
h-mayorquin Apr 20, 2026
07f4e9e
remove non-overallaping redundant check
h-mayorquin Apr 20, 2026
e6129bc
another fallack
h-mayorquin Apr 20, 2026
b3ab028
Merge branch 'main' into probe_refactoring
h-mayorquin Apr 20, 2026
6eb09a6
propgate to children
h-mayorquin Apr 20, 2026
1f4afee
fix tests
h-mayorquin Apr 20, 2026
eb07eea
fixes
h-mayorquin Apr 20, 2026
cc9e9b0
another numpy fix
h-mayorquin Apr 20, 2026
a7db1e3
remove cache
h-mayorquin Apr 21, 2026
4f96a31
Merge branch 'main' into probe_refactoring
h-mayorquin Apr 21, 2026
20b535f
Refactor with wiring
h-mayorquin Apr 21, 2026
b1de969
go furhter
h-mayorquin Apr 21, 2026
56761c8
drop deep cpy
h-mayorquin Apr 21, 2026
999a5c4
fix
h-mayorquin Apr 21, 2026
5119d53
refactor
h-mayorquin Apr 21, 2026
bdf9d24
serialization fix
h-mayorquin Apr 21, 2026
8b642d9
more fixes
h-mayorquin Apr 21, 2026
e801124
add to main properties
h-mayorquin Apr 22, 2026
ffee07c
cleanup
h-mayorquin Apr 22, 2026
f48b88d
fix doccstring
h-mayorquin Apr 22, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ docs = [
"huggingface_hub", # For automated curation

# for release we need pypi, so this needs to be commented
"probeinterface @ git+https://github.com/SpikeInterface/probeinterface.git", # We always build from the latest version
"probeinterface @ git+https://github.com/SpikeInterface/probeinterface.git",
"neo @ git+https://github.com/NeuralEnsemble/python-neo.git", # We always build from the latest version
]

Expand Down
56 changes: 52 additions & 4 deletions src/spikeinterface/core/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -574,6 +574,9 @@ def to_dict(
folder_metadata = Path(folder_metadata).resolve().absolute().relative_to(relative_to)
dump_dict["folder_metadata"] = str(folder_metadata)

if getattr(self, "_probegroup", None) is not None:
dump_dict["probegroup"] = self._probegroup.to_dict(array_as_list=True)

return dump_dict

@staticmethod
Expand Down Expand Up @@ -607,12 +610,11 @@ def from_dict(dictionary: dict, base_folder: Path | str | None = None) -> "BaseE
return extractor

def load_metadata_from_folder(self, folder_metadata):
# hack to load probe for recording
folder_metadata = Path(folder_metadata)

self._extra_metadata_from_folder(folder_metadata)

# load properties
# load properties first so that `_extra_metadata_from_folder` can see
# restored state like the `wiring` property and skip re-running
# `set_probegroup` when the mapping is already in place.
prop_folder = folder_metadata / "properties"
if prop_folder.is_dir():
for prop_file in prop_folder.iterdir():
Expand All @@ -621,6 +623,8 @@ def load_metadata_from_folder(self, folder_metadata):
key = prop_file.stem
self.set_property(key, values)

self._extra_metadata_from_folder(folder_metadata)

def save_metadata_to_folder(self, folder_metadata):
self._extra_metadata_to_folder(folder_metadata)

Expand Down Expand Up @@ -1155,9 +1159,53 @@ def _load_extractor_from_dict(dic) -> "BaseExtractor":
for k, v in dic["properties"].items():
extractor.set_property(k, v)

if "probegroup" in dic:
from probeinterface import ProbeGroup

probegroup = ProbeGroup.from_dict(dic["probegroup"])
# The `wiring` per-channel property was restored above by the standard
# property-load loop; we just attach the probegroup object.
extractor._probegroup = probegroup
elif "contact_vector" in dic.get("properties", {}):
_restore_probegroup_from_legacy_contact_vector(extractor)

return extractor


def _restore_probegroup_from_legacy_contact_vector(extractor) -> None:
"""
Reconstruct a `ProbeGroup` from the legacy `contact_vector` property.

Recordings saved before the probegroup refactor stored the probe as a structured numpy
array under the `contact_vector` property, with probe-level annotations under a separate
`probes_info` annotation and per-probe planar contours under `probe_{i}_planar_contour`
annotations. This function reconstructs a `ProbeGroup` from those legacy fields, attaches
it via the canonical `set_probegroup` path, and removes the legacy property so the new
and old representations do not coexist on the loaded extractor.
"""
from probeinterface import ProbeGroup

contact_vector_array = extractor.get_property("contact_vector")
probegroup = ProbeGroup.from_numpy(contact_vector_array)

if "probes_info" in extractor.get_annotation_keys():
probes_info = extractor.get_annotation("probes_info")
for probe, probe_info in zip(probegroup.probes, probes_info):
probe.annotations = probe_info

for probe_index, probe in enumerate(probegroup.probes):
contour = extractor._annotations.get(f"probe_{probe_index}_planar_contour")
if contour is not None:
probe.set_planar_contour(contour)

if hasattr(extractor, "set_probegroup"):
extractor.set_probegroup(probegroup, in_place=True)
else:
extractor._probegroup = probegroup

extractor._properties.pop("contact_vector", None)


def _get_class_from_string(class_string):
class_name = class_string.split(".")[-1]
module = ".".join(class_string.split(".")[:-1])
Expand Down
23 changes: 18 additions & 5 deletions src/spikeinterface/core/baserecording.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ class BaseRecording(BaseRecordingSnippets, ChunkableMixin):
_main_properties = [
"group",
"location",
"wiring",
"gain_to_uV",
"offset_to_uV",
"gain_to_physical_unit",
Expand Down Expand Up @@ -392,9 +393,14 @@ def _save(self, format="binary", verbose: bool = False, **save_kwargs):
else:
raise ValueError(f"format {format} not supported")

if self.get_property("contact_vector") is not None:
probegroup = self.get_probegroup()
cached.set_probegroup(probegroup)
if self.has_probe() and not cached.has_probe():
# Share the probegroup by reference. We deliberately skip
# `set_probegroup` (which re-runs _set_probes and validates dci)
# because a child of `split_by` references the parent's full
# probegroup whose dci values can exceed the child's channel
# count. Wiring/location/group properties are carried over by
# the caller's `copy_metadata` step.
cached._probegroup = self._probegroup

return cached

Expand All @@ -403,7 +409,14 @@ def _extra_metadata_from_folder(self, folder):
folder = Path(folder)
if (folder / "probe.json").is_file():
probegroup = read_probeinterface(folder / "probe.json")
self.set_probegroup(probegroup, in_place=True)
if "wiring" in self.get_property_keys():
# wiring was restored via the property-load loop; the stored
# probegroup's dci refers to the parent's channel space, so
# re-running `_set_probes` would fail for sliced children.
# Attach the probegroup object directly.
self._probegroup = probegroup
else:
self.set_probegroup(probegroup, in_place=True)

# load time vector if any
for segment_index, rs in enumerate(self.segments):
Expand All @@ -414,7 +427,7 @@ def _extra_metadata_from_folder(self, folder):

def _extra_metadata_to_folder(self, folder):
# save probe
if self.get_property("contact_vector") is not None:
if self.has_probe():
probegroup = self.get_probegroup()
write_probeinterface(folder / "probe.json", probegroup)

Expand Down
Loading
Loading