|
|
|
|
|
""" |
|
|
Hugging Face Dataset Loader for WAVE BENDER IDE v5.0 |
|
|
This script loads the dataset WITHOUT ArrowInvalid errors. |
|
|
""" |
|
|
|
|
|
import json |
|
|
from datasets import Dataset, DatasetDict |
|
|
import os |
|
|
|
|
|
def load_wave_bender_dataset(dataset_path): |
|
|
""" |
|
|
Load WAVE BENDER dataset from extracted directory. |
|
|
|
|
|
Args: |
|
|
dataset_path (str): Path to extracted dataset directory |
|
|
|
|
|
Returns: |
|
|
DatasetDict: Dictionary of datasets |
|
|
""" |
|
|
|
|
|
datasets = {} |
|
|
|
|
|
|
|
|
telemetry_file = os.path.join(dataset_path, "telemetry", "telemetry.jsonl") |
|
|
if os.path.exists(telemetry_file): |
|
|
datasets['telemetry'] = Dataset.from_json(telemetry_file) |
|
|
print(f"Loaded telemetry data: {len(datasets['telemetry'])} records") |
|
|
|
|
|
|
|
|
slam_path = os.path.join(dataset_path, "slam") |
|
|
|
|
|
obstacles_file = os.path.join(slam_path, "obstacles.json") |
|
|
if os.path.exists(obstacles_file): |
|
|
datasets['slam_obstacles'] = Dataset.from_json(obstacles_file) |
|
|
print(f"Loaded SLAM obstacles: {len(datasets['slam_obstacles'])} records") |
|
|
|
|
|
detections_file = os.path.join(slam_path, "detections.json") |
|
|
if os.path.exists(detections_file): |
|
|
datasets['slam_detections'] = Dataset.from_json(detections_file) |
|
|
print(f"Loaded SLAM detections: {len(datasets['slam_detections'])} records") |
|
|
|
|
|
avoidances_file = os.path.join(slam_path, "avoidances.json") |
|
|
if os.path.exists(avoidances_file): |
|
|
datasets['slam_avoidances'] = Dataset.from_json(avoidances_file) |
|
|
print(f"Loaded SLAM avoidances: {len(datasets['slam_avoidances'])} records") |
|
|
|
|
|
|
|
|
stats_path = os.path.join(dataset_path, "statistics") |
|
|
|
|
|
epochs_file = os.path.join(stats_path, "epochs.json") |
|
|
if os.path.exists(epochs_file): |
|
|
datasets['training_epochs'] = Dataset.from_json(epochs_file) |
|
|
print(f"Loaded training epochs: {len(datasets['training_epochs'])} records") |
|
|
|
|
|
summary_file = os.path.join(stats_path, "summary.json") |
|
|
if os.path.exists(summary_file): |
|
|
datasets['statistics'] = Dataset.from_json(summary_file) |
|
|
print("Loaded statistics summary") |
|
|
|
|
|
|
|
|
dataset_dict = DatasetDict(datasets) |
|
|
|
|
|
print(f"\n✅ Dataset loaded successfully with {len(datasets)} components") |
|
|
print("✅ No ArrowInvalid errors - all schemas are separate and consistent") |
|
|
|
|
|
return dataset_dict |
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
|
|
dataset = load_wave_bender_dataset("./extracted_dataset") |
|
|
print(f"\nDataset structure: {list(dataset.keys())}") |