Skip to content

Commit

Permalink
Add scripts to run Voronoi profiles independently of streamlit
Browse files Browse the repository at this point in the history
  • Loading branch information
chraibi committed Mar 2, 2024
1 parent 0385456 commit d7dec8a
Show file tree
Hide file tree
Showing 9 changed files with 250 additions and 150 deletions.
3 changes: 1 addition & 2 deletions app.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
"""Main entry point to the data visualisator for MADRAS project."""

import logging
from log_config import setup_logging
import streamlit as st

import analysis_tab
Expand All @@ -11,6 +9,7 @@
import map_tab
import traj_tab
import ui
from log_config import setup_logging

setup_logging()
if __name__ == "__main__":
Expand Down
117 changes: 117 additions & 0 deletions calculate_profiles.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
"""Input profile data and calculate the profiles."""

import logging
import pickle
import sys
from pathlib import Path
from typing import Any, Dict, List, Tuple, cast

import numpy as np
import numpy.typing as npt
import pandas as pd
import pedpy as pp
from tqdm import tqdm
from joblib import Parallel, delayed
from pedpy import (
DensityMethod,
SpeedMethod,
compute_grid_cell_polygon_intersection_area,
compute_speed_profile,
get_grid_cells,
)

from log_config import setup_logging
from profile_config_data import Config


def process_file(
file_: str,
grid_cells: List[Any],
profile_data: Dict[str, pd.DataFrame],
walkable_area: pp.WalkableArea,
grid_size: float,
) -> Tuple[str, List[npt.NDArray[np.float64]], List[np.typing.NDArray[np.float64]]]:
"""Calculate density profile and speed profile."""
logging.info(file_)
profile_data_file = profile_data[file_]
grid_cell_intersection_area, resorted_profile_data = (
compute_grid_cell_polygon_intersection_area(
data=profile_data_file, grid_cells=grid_cells
)
)
logging.info("Compute density profile")
density_profile = pp.compute_density_profile(
data=resorted_profile_data,
walkable_area=walkable_area,
grid_intersections_area=grid_cell_intersection_area,
grid_size=grid_size,
density_method=DensityMethod.VORONOI,
)
logging.info("Compute speed profile")
speed_profile = compute_speed_profile(
data=resorted_profile_data,
walkable_area=walkable_area,
grid_intersections_area=grid_cell_intersection_area,
grid_size=grid_size,
speed_method=SpeedMethod.VORONOI,
)
return (file_, density_profile, speed_profile)


def calculate(
result_file: str,
grid_size: float,
files: List[str],
walkable_area: pp.WalkableArea,
profile_data: Dict[str, pd.DataFrame],
) -> None:
"""Parallelize calculation of density and speed profile data and save a pickle file.
This function utilizes the process_file function to perform calculations in parallel across the given files.
The results are then serialized and saved to the specified result_file.
"""
logging.info("Compute_grid ...")
grid_cells, _, _ = get_grid_cells(walkable_area=walkable_area, grid_size=grid_size)
results = Parallel(n_jobs=-1)(
delayed(process_file)(file_, grid_cells, profile_data, walkable_area, grid_size)
for file_ in tqdm(files, desc="Processing files")
)
logging.info("Aggregate results")
density_profiles = {}
speed_profiles = {}
for file_, density_profile, speed_profile in results:
density_profiles[file_] = density_profile
speed_profiles[file_] = speed_profile

with open(result_file, "wb") as f:
pickle.dump((density_profiles, speed_profiles), f)

logging.info(f"Results in {result_file}")


def main(config: Config):
"""Contains main logic for calculation of profiles."""
setup_logging()
grid_size = config.grid_size
result_file = config.result_file
walkable_area = config.walkable_area
logging.info("Read trajectories")
files = config.files
profile_data_file = config.profile_data_file
logging.info(f"Read file {profile_data_file}")
if Path(profile_data_file).exists():
with open(profile_data_file, "rb") as f:
profile_data = pickle.load(f)
profile_data = cast(Dict[str, pd.DataFrame], profile_data)
else:
logging.error(f"file: {profile_data_file} does not exist!")
sys.exit()
logging.info("Calculate profiles ...")
calculate(
result_file=result_file,
grid_size=grid_size,
files=files,
walkable_area=walkable_area,
profile_data=profile_data,
)
71 changes: 71 additions & 0 deletions create_profile_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
"""Create Voronoi profile density."""

import logging
import pickle
from pathlib import Path
from typing import Tuple

import pandas as pd
import pedpy as pp
from tqdm import tqdm
from joblib import Parallel, delayed
from pedpy.column_identifier import FRAME_COL, ID_COL

from log_config import setup_logging
from profile_config_data import Config


def process_trajectory(
file_: str, walkable_area: pp.WalkableArea, frame_step: int, fps: int = 30
) -> Tuple[str, pd.DataFrame]:
"""Calculate Voronoi polys, Individual speed, Voronoi density and merge all with traj data.
frame_step: steps forward and backward. See pedpy docu for compute_individual_speed.
fps: frames per second of the trajectory data
"""
traj = pp.load_trajectory(
trajectory_file=Path(file_),
default_frame_rate=fps,
default_unit=pp.TrajectoryUnit.METER,
)

voronoi_polygons = pp.compute_individual_voronoi_polygons(
traj_data=traj, walkable_area=walkable_area
)
density_voronoi, intersecting = pp.compute_voronoi_density(
individual_voronoi_data=voronoi_polygons,
measurement_area=walkable_area,
)
individual_speed = pp.compute_individual_speed(
traj_data=traj,
frame_step=frame_step,
compute_velocity=True,
speed_calculation=pp.SpeedCalculation.BORDER_SINGLE_SIDED,
)
merged_data = individual_speed.merge(voronoi_polygons, on=[ID_COL, FRAME_COL])
merged_data = merged_data.merge(traj.data, on=[ID_COL, FRAME_COL])

return (file_, merged_data)


def main(config: Config) -> None:
"""Process_trajectory in parallel and dump result in file."""
setup_logging()
files = config.files
profile_data_file = config.profile_data_file
frame_step = config.speed_frame_rate
fps = config.fps
walkable_area = config.walkable_area
# Parallel execution
logging.info("Process trajectories and create profile data ...")
results = Parallel(n_jobs=-1)(
delayed(process_trajectory)(file_, walkable_area, frame_step, fps)
for file_ in tqdm(files, desc="Processing files")
)
# Aggregate results into 'profile_data'
# profile_data = {file: data for file, data in results}
profile_data = dict(results)
with open(profile_data_file, "wb") as f:
pickle.dump(profile_data, f)

logging.info(f"Profile data computed and saved to {profile_data_file}")
20 changes: 20 additions & 0 deletions profile_config_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
"""Manages config data for Voronoi profile calculations"""

from dataclasses import dataclass
from typing import List

import pedpy


@dataclass
class Config:
files: List[str]
profile_data_file: str
result_file: str
speed_frame_rate: int
fps: int
walkable_area: pedpy.WalkableArea
grid_size: float
rmax: int
vmax: int
jmax: int
9 changes: 1 addition & 8 deletions profiles.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -55,14 +55,7 @@
"for idx, (filename, traj) in enumerate(trajectories.items()):\n",
" voronoi_polygons[filename] = pp.compute_individual_voronoi_polygons(\n",
" traj_data=traj, walkable_area=walkable_area\n",
" )\n",
"\n",
"\n",
"#individual_cutoff = pp.compute_individual_voronoi_polygons(\n",
"# traj_data=traj,\n",
"# walkable_area=walkable_area,\n",
"# cut_off=Cutoff(radius=0.8, quad_segments=3),\n",
"#)"
" )"
]
},
{
Expand Down
139 changes: 0 additions & 139 deletions profiles.py

This file was deleted.

1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,4 @@ matplotlib
streamlit_drawable_canvas==0.8.0
streamlit-option-menu
gpxpy
tqdm
Loading

0 comments on commit d7dec8a

Please sign in to comment.