Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
59 changes: 54 additions & 5 deletions python_code/batch_processing/full_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,23 +17,30 @@
from python_code.batch_processing.postprocess_recording import process_recording
from python_code.cameras.postprocess import postprocess
from python_code.utilities.folder_utilities.recording_folder import RecordingFolder
from python_code.utilities.processing_metadata import write_step_metadata


HEAD_DLC_ITERATION = 17
EYE_DLC_ITERATION = 30
TOY_DLC_ITERATION = 10


def _dlc_metadata_is_outdated(dlc_output_folder: Path | None, required_iteration: int) -> bool:
"""Return True if skellyclicker_metadata.json exists and has a lower iteration than required."""
def _read_dlc_iteration(dlc_output_folder: Path | None) -> int | None:
"""Return the iteration from skellyclicker_metadata.json, or None if unavailable."""
if dlc_output_folder is None:
return True
return None
metadata_path = dlc_output_folder / "skellyclicker_metadata.json"
if not metadata_path.exists():
return True
return None
with open(metadata_path) as f:
metadata = json.load(f)
return metadata.get("iteration", 0) < required_iteration
return metadata.get("iteration")


def _dlc_metadata_is_outdated(dlc_output_folder: Path | None, required_iteration: int) -> bool:
"""Return True if skellyclicker_metadata.json exists and has a lower iteration than required."""
iteration = _read_dlc_iteration(dlc_output_folder)
return iteration is None or iteration < required_iteration


def _run_subprocess_streaming(command_list: list, clean_env: dict, use_pty: bool = False) -> None:
Expand Down Expand Up @@ -198,6 +205,12 @@ def full_pipeline(
timings["Synchronization"] = None

recording_folder.check_synchronization()
if timings["Synchronization"] is not None:
write_step_metadata(
recording_folder.processing_metadata_path,
step="synchronization",
parameters={"include_eyes": include_eye},
)

# Calibration
if overwrite_calibration or not recording_folder.is_calibrated():
Expand All @@ -210,6 +223,15 @@ def full_pipeline(
timings["Calibration"] = None

recording_folder.check_calibration()
if timings["Calibration"] is not None:
write_step_metadata(
recording_folder.processing_metadata_path,
step="calibration",
parameters={
"venv_path": "/home/scholl-lab/anaconda3/envs/fmc/bin/python",
"script_path": "/home/scholl-lab/Documents/git_repos/freemocap/experimental/batch_process/headless_calibration.py",
},
)

# DLC — check each model independently
run_dlc_body = overwrite_dlc or _dlc_metadata_is_outdated(recording_folder.head_body_dlc_output, HEAD_DLC_ITERATION)
Expand Down Expand Up @@ -240,6 +262,23 @@ def full_pipeline(
print("Pose estimation: skipped")

recording_folder.check_dlc_output()
if timings["Pose estimation"] is not None:
write_step_metadata(
recording_folder.processing_metadata_path,
step="pose_estimation",
parameters={
"include_eye": run_dlc_eye,
"include_body": run_dlc_body,
"include_toy": run_dlc_toy,
},
extra={
"dlc_iterations": {
"body": _read_dlc_iteration(recording_folder.head_body_dlc_output),
"eye": _read_dlc_iteration(recording_folder.eye_dlc_output),
"toy": _read_dlc_iteration(recording_folder.toy_dlc_output),
}
},
)

# Propagate DLC results to downstream steps
if run_dlc_eye:
Expand All @@ -266,6 +305,16 @@ def full_pipeline(
timings["Triangulation"] = None

recording_folder.check_triangulation()
if timings["Triangulation"] is not None:
write_step_metadata(
recording_folder.processing_metadata_path,
step="triangulation",
parameters={
"skip_toy": not run_dlc_toy,
"venv_path": "/home/scholl-lab/Documents/git_repos/dlc_to_3d/.venv/bin/python",
"script_path": "/home/scholl-lab/Documents/git_repos/dlc_to_3d/dlc_reconstruction/dlc_to_3d.py",
},
)

eye_postprocessing = recording_folder.is_eye_postprocessed()
skull_postprocessing = recording_folder.is_skull_postprocessed()
Expand Down
26 changes: 25 additions & 1 deletion python_code/batch_processing/postprocess_recording.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from python_code.rigid_body_solver.ferret_skull_solver import run_ferret_skull_solver_from_recording_folder
from python_code.utilities.find_bad_eye_data import bad_eye_data
from python_code.utilities.folder_utilities.recording_folder import RecordingFolder
from python_code.utilities.processing_metadata import write_step_metadata


def process_recording(
Expand All @@ -18,20 +19,43 @@ def process_recording(
if not skip_eye:
# process eye data
process_eye_session_from_recording_folder(recording_folder=recording_folder.folder_path)
write_step_metadata(
recording_folder.processing_metadata_path,
step="eye_processing",
parameters={},
)

# run eye confidence analysis
bad_eye_data(recording_folder=recording_folder)
write_step_metadata(
recording_folder.processing_metadata_path,
step="eye_quality",
parameters={},
)

if not skip_skull:
# process ceres solver
run_ferret_skull_solver_from_recording_folder(recording_folder=recording_folder, visualize=False)
run_ferret_skull_solver_from_recording_folder(recording_folder=recording_folder)
write_step_metadata(
recording_folder.processing_metadata_path,
step="skull_solving",
parameters={},
)

if not skip_gaze:
run_gaze_pipeline(
recording_path=recording_folder.folder_path,
resampling_strategy=ResamplingStrategy.FASTEST,
reprocess_all=True,
)
write_step_metadata(
recording_folder.processing_metadata_path,
step="gaze_pipeline",
parameters={
"resampling_strategy": "FASTEST",
"reprocess_all": True,
},
)

def pre_recording_validation(recording_folder: RecordingFolder):
recording_folder.check_triangulation(enforce_toy=False, enforce_annotated=False)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,15 @@

Saved trajectories:
- orientation (quaternion wxyz)
- angular_velocity_global / angular_velocity_local
- angular_acceleration_global / angular_acceleration_local
- angular_velocity_local
- angular_acceleration_local
- keypoint__tear_duct / keypoint__outer_eye
- keypoint__pupil_center / keypoint__p1-p8

NOT saved (eye is in camera frame, not world frame):
- angular_velocity_global
- angular_acceleration_global

NOT saved (can be recomputed from quaternions + reference geometry):
- position (always [0,0,0] for eye)
- linear velocity/acceleration (always [0,0,0])
Expand Down Expand Up @@ -96,17 +100,7 @@ def ferret_eye_kinematics_to_tidy_dataframe(
units="quaternion",
))

# Angular velocity global
chunks.append(_build_vector_chunk(
frame_indices=frame_indices,
timestamps=timestamps,
values=kinematics.angular_velocity_global,
trajectory_name="angular_velocity_global",
component_names=["x", "y", "z"],
units="rad_s",
))

# Angular velocity local
# Angular velocity local (eye camera frame only — no global, eye is not in world frame)
chunks.append(_build_vector_chunk(
frame_indices=frame_indices,
timestamps=timestamps,
Expand All @@ -116,16 +110,6 @@ def ferret_eye_kinematics_to_tidy_dataframe(
units="rad_s",
))

# Angular acceleration global
chunks.append(_build_vector_chunk(
frame_indices=frame_indices,
timestamps=timestamps,
values=kinematics.angular_acceleration_global,
trajectory_name="angular_acceleration_global",
component_names=["x", "y", "z"],
units="rad_s2",
))

# Angular acceleration local
chunks.append(_build_vector_chunk(
frame_indices=frame_indices,
Expand Down
26 changes: 26 additions & 0 deletions python_code/ferret_gaze/run_gaze_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@
"""
import logging
import re
import shutil
from dataclasses import dataclass
from pathlib import Path
from typing import Literal
Expand Down Expand Up @@ -499,6 +500,29 @@ def generate_blender_script(paths: ClipPaths) -> Path:
return paths.blender_script_path


def copy_analyzable_output(
recording_folder: RecordingFolder,
destination: Path = Path("/home/scholl-lab/Dropbox/projects/VisBehavDev/data/analyzable_outputs"),
) -> None:
source = recording_folder.analyzable_output
if source is None:
logger.warning("analyzable_output folder not found — skipping Dropbox copy")
return
dest_folder = destination / f"{recording_folder.recording_name}_analyzable_output"
if dest_folder.exists():
shutil.rmtree(dest_folder)
shutil.copytree(source, dest_folder)

source_files = set(p.name for p in source.iterdir())
dest_files = set(p.name for p in dest_folder.iterdir())
if source_files == dest_files:
logger.info(f"Copied analyzable_output to {dest_folder} ({len(dest_files)} files)")
else:
missing = source_files - dest_files
extra = dest_files - source_files
logger.warning(f"Copy to {dest_folder} may be incomplete — missing: {missing}, extra: {extra}")


def run_gaze_pipeline(
recording_path: Path,
resampling_strategy: ResamplingStrategy = ResamplingStrategy.FASTEST,
Expand Down Expand Up @@ -615,6 +639,8 @@ def run_gaze_pipeline(
logger.info(" 3. Run with Alt+P")
logger.info(" 4. Press Spacebar to play animation")

copy_analyzable_output(recording_folder)

return paths.analyzable_output_dir


Expand Down
4 changes: 4 additions & 0 deletions python_code/utilities/folder_utilities/recording_folder.py
Original file line number Diff line number Diff line change
Expand Up @@ -533,6 +533,10 @@ def mocap_solver_output(self) -> Path | None:
else None
)

@property
def processing_metadata_path(self) -> Path:
return self.folder_path / "processing_metadata.json"

@property
def analyzable_output(self) -> Path | None:
analyzable_output = self.folder_path / "analyzable_output"
Expand Down
43 changes: 43 additions & 0 deletions python_code/utilities/processing_metadata.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import json
import subprocess
from datetime import datetime
from pathlib import Path


def _get_git_hash() -> str:
try:
result = subprocess.run(
["git", "rev-parse", "HEAD"],
capture_output=True,
text=True,
cwd=Path(__file__).parent,
)
return result.stdout.strip() if result.returncode == 0 else "unknown"
except Exception:
return "unknown"


def write_step_metadata(
metadata_path: Path,
step: str,
parameters: dict,
extra: dict | None = None,
) -> None:
"""Read existing metadata JSON (or start fresh), overwrite `step`, and save."""
metadata = {}
if metadata_path.exists():
with open(metadata_path) as f:
metadata = json.load(f)

step_data: dict = {
"timestamp": datetime.now().isoformat(),
"bs_git_hash": _get_git_hash(),
"parameters": parameters,
}
if extra:
step_data.update(extra)

metadata[step] = step_data

with open(metadata_path, "w") as f:
json.dump(metadata, f, indent=2)