Skip to content

Commit

Permalink
continue refactoring
Browse files Browse the repository at this point in the history
  • Loading branch information
Khoyo committed Dec 19, 2023
1 parent 35a2737 commit 0ca56e4
Showing 1 changed file with 122 additions and 90 deletions.
212 changes: 122 additions & 90 deletions editoast/src/views/timetable/import.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ use crate::views::timetable::TimetableError;
use crate::{core::CoreClient, views::timetable::Path, DbPool};
use actix_web::{post, web::Data};
use chrono::{DateTime, Utc};
use diesel_async::AsyncPgConnection;
use serde::Deserialize;
use std::collections::HashMap;

Expand All @@ -12,7 +13,8 @@ use crate::core::AsCoreRequest;
use crate::error::Result;
use crate::models::infra_objects::operational_point::OperationalPointModel;
use crate::models::{
Create, Infra, Retrieve, RollingStockModel, ScheduledPoint, Timetable, TrainSchedule,
Create, Infra, Pathfinding, Retrieve, RollingStockModel, ScheduledPoint, Timetable,
TrainSchedule,
};
use crate::schema::rolling_stock::{RollingStock, RollingStockComfortType};
use crate::views::infra::{call_core_infra_state, InfraState};
Expand Down Expand Up @@ -129,31 +131,12 @@ pub async fn post_timetable(
let rolling_stock: RollingStock = rolling_stock_model.into();
pf_request.with_rolling_stocks(&mut vec![rolling_stock.clone()]);
// List operational points uic needed for this import
let mut ops_uic = import_item
.path
.iter()
.filter_map(|step| match &step.location {
TimetableImportPathLocation::OperationalPointLocation { uic } => Some(*uic),
_ => None,
})
.collect::<Vec<_>>();
// Remove duplicates
ops_uic.sort();
ops_uic.dedup();
let ops_uic = ops_uic_from_path(&import_item.path);
// Retrieve operational points
let ops =
OperationalPointModel::retrieve_from_uic(&mut conn, infra_id, ops_uic.clone()).await?;
let mut op_id_to_parts = HashMap::<_, Vec<_>>::new();
for op in ops {
op_id_to_parts
.entry(op.data.0.extensions.identifier.unwrap().uic)
.or_default()
.extend(op.data.0.parts);
}
// If we didn't find all the operational points, we can't run the pathfinding
if op_id_to_parts.len() != ops_uic.len() {
let Some(op_id_to_parts) = find_operation_points(&ops_uic, infra_id, &mut conn).await?
else {
continue;
}
};
// Create waypoints
let mut waypoints = waypoints_from_steps(&import_item.path, &op_id_to_parts);
pf_request.with_waypoints(&mut waypoints);
Expand Down Expand Up @@ -186,72 +169,13 @@ pub async fn post_timetable(

// TRAIN SCHEDULES

// Build the request
let mut train_schedules = vec![];
for train in import_item.trains.iter() {
assert_eq!(waypoint_offsets.len(), import_item.path.len());
let mut stops: Vec<_> = import_item
.path
.iter()
.zip(&waypoint_offsets)
.map(|(step, path_offset)| {
let duration = step.schedule.get(&train.name).map_or(0., |ps| {
(ps.departure_time - ps.arrival_time).num_seconds().max(0) as f64
});
TrainStop {
position: Some(*path_offset),
location: None,
duration,
}
})
.collect();

// Force the last stop to be at least 1s long.
// This is to avoid the train to stop with a non-zero speed.
let last_stop = stops.last_mut().unwrap();
last_stop.duration = last_stop.duration.max(1.);

let departure_time = train.departure_time.num_seconds_from_midnight() as f64;
let scheduled_points = import_item
.path
.iter()
.zip(&waypoint_offsets)
.filter_map(|(step, &path_offset)| {
if path_offset <= 0. {
None
} else {
step.schedule.get(&train.name).map(|ps| ScheduledPoint {
path_offset,
time: (ps.arrival_time.num_seconds_from_midnight() as f64
- departure_time)
.max(0.),
})
}
})
.collect();
let core_train = CoreTrainSchedule {
train_name: train.name.clone(),
rolling_stock: import_item.rolling_stock.clone(),
initial_speed: 0.,
scheduled_points,
allowances: vec![],
stops,
tag: None,
comfort: RollingStockComfortType::Standard,
power_restriction_ranges: None,
options: None,
};
train_schedules.push(core_train);
}

let request = SimulationRequest {
infra: infra_id,
rolling_stocks: vec![rolling_stock.clone()],
train_schedules,
electrical_profile_set: None,
trains_path: path_response.into(),
};

let request = build_simulation_request(
&import_item,
&waypoint_offsets,
&rolling_stock,
infra_id,
path_response,
);
// Run the simulation
let response_payload = match request.fetch(&core_client).await {
Ok(response_payload) => response_payload,
Expand Down Expand Up @@ -287,6 +211,41 @@ pub async fn post_timetable(
Ok(HttpResponse::NoContent().finish())
}

async fn find_operation_points(
ops_uic: &Vec<i64>,
infra_id: i64,
conn: &mut AsyncPgConnection,
) -> Result<Option<HashMap<i64, Vec<OperationalPointPart>>>> {
// Retrieve operational points
let ops = OperationalPointModel::retrieve_from_uic(conn, infra_id, ops_uic.clone()).await?;
let mut op_id_to_parts = HashMap::<_, Vec<_>>::new();
for op in ops {
op_id_to_parts
.entry(op.data.0.extensions.identifier.unwrap().uic)
.or_default()
.extend(op.data.0.parts);
}
// If we didn't find all the operational points, we can't run the pathfinding
if op_id_to_parts.len() != ops_uic.len() {
return Ok(None);
}
Ok(Some(op_id_to_parts))
}

fn ops_uic_from_path(path: &Vec<TimetableImportPathStep>) -> Vec<i64> {
let mut ops_uic = path
.iter()
.filter_map(|step| match &step.location {
TimetableImportPathLocation::OperationalPointLocation { uic } => Some(*uic),
_ => None,
})
.collect::<Vec<_>>();
// Remove duplicates
ops_uic.sort();
ops_uic.dedup();
ops_uic
}

fn waypoints_from_steps(
path: &Vec<TimetableImportPathStep>,
op_id_to_parts: &HashMap<i64, Vec<OperationalPointPart>>,
Expand All @@ -308,3 +267,76 @@ fn waypoints_from_steps(
}
res
}

// Build the request
fn build_simulation_request(
import_item: &TimetableImportItem,
waypoint_offsets: &[f64],
rolling_stock: &RollingStock,
infra_id: i64,
path_response: Pathfinding,
) -> SimulationRequest {
let mut train_schedules = vec![];
for train in import_item.trains.iter() {
assert_eq!(waypoint_offsets.len(), import_item.path.len());
let mut stops: Vec<_> = import_item
.path
.iter()
.zip(waypoint_offsets)
.map(|(step, path_offset)| {
let duration = step.schedule.get(&train.name).map_or(0., |ps| {
(ps.departure_time - ps.arrival_time).num_seconds().max(0) as f64
});
TrainStop {
position: Some(*path_offset),
location: None,
duration,
}
})
.collect();

// Force the last stop to be at least 1s long.
// This is to avoid the train to stop with a non-zero speed.
let last_stop = stops.last_mut().unwrap();
last_stop.duration = last_stop.duration.max(1.);

let departure_time = train.departure_time.num_seconds_from_midnight() as f64;
let scheduled_points = import_item
.path
.iter()
.zip(waypoint_offsets)
.filter_map(|(step, &path_offset)| {
if path_offset <= 0. {
None
} else {
step.schedule.get(&train.name).map(|ps| ScheduledPoint {
path_offset,
time: (ps.arrival_time.num_seconds_from_midnight() as f64 - departure_time)
.max(0.),
})
}
})
.collect();
let core_train = CoreTrainSchedule {
train_name: train.name.clone(),
rolling_stock: import_item.rolling_stock.clone(),
initial_speed: 0.,
scheduled_points,
allowances: vec![],
stops,
tag: None,
comfort: RollingStockComfortType::Standard,
power_restriction_ranges: None,
options: None,
};
train_schedules.push(core_train);
}

SimulationRequest {
infra: infra_id,
rolling_stocks: vec![rolling_stock.clone()],
train_schedules,
electrical_profile_set: None,
trains_path: path_response.into(),
}
}

0 comments on commit 0ca56e4

Please sign in to comment.