diff --git a/.gitignore b/.gitignore index ead9b2cc..2e556424 100644 --- a/.gitignore +++ b/.gitignore @@ -1217,3 +1217,10 @@ code\ meetings altrios/resources/networks/Scenic - ALTRIOS Confidential.yaml altrios/dispatch/*.json activate.sh +*.xlsx +altrios/*.txt +truck_work_log.txt +vehicle_average_times.txt +avg_time_per_train.txt +crane_work_log.txt +hostler_work_log.txt diff --git a/applications/calibration/zanzeff_bel_cal.py b/applications/calibration/zanzeff_bel_cal.py index c0be74c9..c0723b7f 100644 --- a/applications/calibration/zanzeff_bel_cal.py +++ b/applications/calibration/zanzeff_bel_cal.py @@ -55,7 +55,7 @@ def get_bel_trip_mods(df: pd.DataFrame) -> pd.DataFrame: """ # df['dt [s]'] = pd.to_datetime(df['PacificTime']).dt.tz_convert('UTC') df['timestamp'] = pd.to_datetime(df['PacificTime']).dt.to_pydatetime() - df['time [s]'] = cval.get_delta_seconds(df['timestamp']).cumsum() + df['time [s]'] = cval.get_delta_seconds(df['timestamp']).cum_sum() df3000 = df[['PacificTime', 'time [s]']].copy() df3000['Tractive Power [W]'] = ( diff --git a/applications/calibration/zanzeff_conv_loco_cal.py b/applications/calibration/zanzeff_conv_loco_cal.py index eb3505f8..7425a71e 100644 --- a/applications/calibration/zanzeff_conv_loco_cal.py +++ b/applications/calibration/zanzeff_conv_loco_cal.py @@ -33,7 +33,7 @@ def get_conv_trip_mods(df: pd.DataFrame) -> pd.DataFrame: df = df.drop_duplicates('PacificTime').copy() df['timestamp'] = pd.to_datetime( df['PacificTime']).dt.to_pydatetime() - df['time [s]'] = cval.get_delta_seconds(df['timestamp']).cumsum() + df['time [s]'] = cval.get_delta_seconds(df['timestamp']).cum_sum() @@ -69,7 +69,7 @@ def get_conv_trip_mods(df: pd.DataFrame) -> pd.DataFrame: df39xx["Fuel Energy [J]"] = (df39xx["Fuel Power [W]"] * cval.get_delta_seconds( df39xx['timestamp']) - ).cumsum().copy() + ).cum_sum().copy() df39xx["engine_on"] = df['Engine Speed (RPM) BNSF ' + str(trailing_loc)] > 100 return df39xx diff --git a/applications/calibration/zanzeff_set_speed_train_cal.py b/applications/calibration/zanzeff_set_speed_train_cal.py index e3b5fa01..b5ef7526 100644 --- a/applications/calibration/zanzeff_set_speed_train_cal.py +++ b/applications/calibration/zanzeff_set_speed_train_cal.py @@ -44,7 +44,7 @@ def get_train_sim_df_mods( df_train_sim.drop_duplicates(subset='PacificTime', inplace=True) df_train_sim['time [s]'] = cval.get_delta_seconds( - df_train_sim['PacificTime']).cumsum() + df_train_sim['PacificTime']).cum_sum() df_train_sim['Total Tractive Force [N]'] = df_train_sim[[ 'Tractive Effort Feedback BNSF 3940', 'Tractive Effort Feedback BNSF 3965', @@ -56,13 +56,13 @@ def get_train_sim_df_mods( df_train_sim['Total Cumu. Tractive Energy [J]'] = ( df_train_sim['Total Tractive Power [W]'] * df_train_sim['time [s]'].diff().fillna(0.0) - ).cumsum() + ).cum_sum() df_train_sim['Total Pos. Cumu. Tractive Energy [J]'] = ( (df_train_sim['Total Tractive Power [W]'] * df_train_sim['time [s]'].diff().fillna(0.0)) .where(df_train_sim['Total Tractive Power [W]'] > 0, 0.0) - .cumsum() + .cum_sum() ) speed = savgol_filter( diff --git a/pyproject.toml b/pyproject.toml index c6083162..813ae078 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,10 +43,11 @@ dependencies = [ "plotly", "typing_extensions", "pyyaml", - "polars==0.20.25", + "polars >= 1.20.0", "pyarrow", "requests", "PyYAML==6.0.2", + "simpy" ] [project.urls] diff --git a/python/altrios/altrios_pyo3.pyi b/python/altrios/altrios_pyo3.pyi index d16a7940..063b84fc 100644 --- a/python/altrios/altrios_pyo3.pyi +++ b/python/altrios/altrios_pyo3.pyi @@ -819,6 +819,7 @@ class SpeedLimitTrainSim(SerdeAPI): origs: List[Location] dests: List[Location] loco_con: Consist + n_cars_by_type: Dict[str, int] state: TrainState # train_res: TrainRes # not accessible in Python path_tpc: PathTpc @@ -833,6 +834,7 @@ class SpeedLimitTrainSim(SerdeAPI): def __init__( cls, loco_con: Consist, + n_cars_by_type: Dict[str, int], state: TrainState, train_res_file: Optional[str], path_tpc_file: Optional[str], @@ -931,6 +933,7 @@ class RailVehicle(SerdeAPI): braking_ratio_empty: float braking_ratio_loaded: float car_type: str + freight_type: str davis_b_seconds_per_meter: float cd_area_empty_square_meters: float cd_area_loaded_square_meters: float diff --git a/python/altrios/defaults.py b/python/altrios/defaults.py index 06f0cfdf..848787ce 100644 --- a/python/altrios/defaults.py +++ b/python/altrios/defaults.py @@ -33,6 +33,7 @@ LOCO_LIFESPAN = 20 ANNUAL_LOCO_TURNOVER = 1.0/LOCO_LIFESPAN +DEFAULT_GAP_SIZE = 0.604 DEMAND_FILE = alt.resources_root() / "Default Demand.csv" FUEL_EMISSIONS_FILE = alt.resources_root() / "metrics_inputs" / "GREET-CA_Emissions_Factors.csv" diff --git a/python/altrios/demos/rollout_demo.py b/python/altrios/demos/rollout_demo.py index 982bf1a3..9ec56682 100644 --- a/python/altrios/demos/rollout_demo.py +++ b/python/altrios/demos/rollout_demo.py @@ -1,7 +1,8 @@ # %% import altrios as alt -from altrios import rollout, defaults, train_planner +from altrios import rollout, defaults +from altrios.train_planner import planner, planner_config import numpy as np import matplotlib.pyplot as plt import seaborn as sns @@ -21,9 +22,9 @@ plot_dir.mkdir(exist_ok=True) File = defaults.DEMAND_FILE #targets = [0,0.05,0.1,0.15,0.2,0.25,0.3,0.35,0.4,0.45,0.5,0.55,0.6,0.65,0.7,0.75, 0.8] -train_planner_config = train_planner.TrainPlannerConfig( - cars_per_locomotive=50, - target_cars_per_train=90) +train_planner_config = planner_config.TrainPlannerConfig( + cars_per_locomotive={"Default": 50}, + target_cars_per_train={"Default": 90}) targets = [0.5] for target in targets: scenario_infos, metrics = rollout.simulate_prescribed_rollout( diff --git a/python/altrios/demos/sim_manager_demo.py b/python/altrios/demos/sim_manager_demo.py index 65d99b37..6229c36d 100644 --- a/python/altrios/demos/sim_manager_demo.py +++ b/python/altrios/demos/sim_manager_demo.py @@ -1,8 +1,11 @@ # %% from altrios import sim_manager -from altrios import utilities, defaults, train_planner +from altrios import utilities, defaults import altrios as alt +from altrios.lifts import lifts_simulator +from altrios.train_planner import planner_config import numpy as np +import polars as pl import matplotlib.pyplot as plt import time import seaborn as sns @@ -34,9 +37,9 @@ f"Elapsed time to import rail vehicles, locations, and network: {t1_import - t0_import:.3g} s" ) -train_planner_config = train_planner.TrainPlannerConfig( - cars_per_locomotive=50, - target_cars_per_train=90) +train_planner_config = planner_config.TrainPlannerConfig( + cars_per_locomotive={"Default": 50}, + target_cars_per_train={"Default": 90}) t0_main = time.perf_counter() @@ -56,6 +59,11 @@ debug=True, ) +train_consist_plan_csv = pl.read_csv('C:/Users/mbruchon/Downloads/simulation_operation/lifts/results/train_consist_plan.csv') +container_data = lifts_simulator.run_simulation( + train_consist_plan = train_consist_plan_csv, + terminal = "Allouez") + t1_main = time.perf_counter() print(f"Elapsed time to run `sim_manager.main()`: {t1_main-t0_main:.3g} s") diff --git a/python/altrios/lifts/__init__.py b/python/altrios/lifts/__init__.py new file mode 100644 index 00000000..ffc4016e --- /dev/null +++ b/python/altrios/lifts/__init__.py @@ -0,0 +1,2 @@ +from pkg_resources import get_distribution +__version__ = get_distribution("altrios").version diff --git a/python/altrios/lifts/demos/starter_demo/train_consist_plan.csv b/python/altrios/lifts/demos/starter_demo/train_consist_plan.csv new file mode 100644 index 00000000..3a9e728b --- /dev/null +++ b/python/altrios/lifts/demos/starter_demo/train_consist_plan.csv @@ -0,0 +1,163 @@ +Train_ID,Train_Type,Locomotive_ID,Locomotive_Type,Origin_ID,Destination_ID,Cars_Loaded,Cars_Empty,Departure_SOC_J,Departure_Time_Planned_Hr,Arrival_Time_Planned_Hr,Refuel_Start_Time_Planned_Hr,Refuel_End_Time_Planned_Hr,Departure_Time_Actual_Hr,Arrival_Time_Actual_Hr,TrainSimVec_Index +25,Unit,1000,Diesel_Large,Allouez,Hibbing,43.0,44.0,828691080000.0,168.0,176.814986740618,176.814986740618,176.8360199346937,168.0,174.83160599585378,0 +32,Unit,1000,Diesel_Large,Hibbing,Allouez,44.0,43.0,828691080000.0,196.0,204.66115190304077,204.66115190304077,204.66655794550903,196.0,200.7194033488388,7 +37,Unit,1000,Diesel_Large,Allouez,Hibbing,43.0,44.0,828691080000.0,224.0,232.814986740618,232.814986740618,232.8360199346937,224.0,230.83160599585378,12 +41,Intermodal,1000,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,237.1764705882353,245.8340364958852,245.8340364958852,245.83566794943235,237.1764705882353,242.1023859005848,16 +43,Unit,1000,Diesel_Large,Allouez,Hibbing,43.0,44.0,828691080000.0,252.0,260.814986740618,260.814986740618,260.8360199346937,252.0,258.83160599585375,18 +47,Intermodal,1000,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,266.8235294117647,275.4810953194146,275.4810953194146,275.4827267729617,266.8235294117647,271.7494447241142,22 +49,Unit,1000,Diesel_Large,Allouez,Hibbing,43.0,44.0,828691080000.0,280.0,288.814986740618,288.814986740618,288.8360199346937,280.0,286.83160599585375,24 +53,Intermodal,1000,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,296.47058823529414,305.1281541429441,305.1281541429441,305.1297855964912,296.4705882352942,301.39650354764365,28 +57,Intermodal,1000,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,326.11764705882354,334.74920712941383,334.74920712941383,334.7576192186083,326.11764705882354,334.9090139408118,32 +25,Unit,1001,BEL,Allouez,Hibbing,43.0,44.0,8208000000.0,168.0,176.814986740618,176.814986740618,177.69498904461798,168.0,174.83160599585378,0 +32,Unit,1001,BEL,Hibbing,Allouez,44.0,43.0,2808004319.996545,196.0,204.66115190304077,204.66115190304077,207.54115420704076,196.0,200.7194033488388,7 +37,Unit,1001,BEL,Allouez,Hibbing,43.0,44.0,8208000000.0,224.0,232.814986740618,232.814986740618,233.69498904461798,224.0,230.83160599585378,12 +41,Intermodal,1001,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,237.1764705882353,245.8340364958852,245.8340364958852,248.50232589744067,237.1764705882353,242.1023859005848,16 +43,Unit,1001,BEL,Allouez,Hibbing,43.0,44.0,8208000000.0,252.0,260.814986740618,260.814986740618,261.694989044618,252.0,258.83160599585375,18 +47,Intermodal,1001,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,266.8235294117647,275.4810953194146,275.4810953194146,278.14938472097003,266.8235294117647,271.7494447241142,22 +49,Unit,1001,BEL,Allouez,Hibbing,43.0,44.0,8208000000.0,280.0,288.814986740618,288.814986740618,289.694989044618,280.0,286.83160599585375,24 +53,Intermodal,1001,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,296.47058823529414,305.1281541429441,305.1281541429441,307.79644354449954,296.4705882352942,301.39650354764365,28 +57,Intermodal,1001,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,326.11764705882354,334.74920712941383,334.74920712941383,335.62920943341385,326.11764705882354,334.9090139408118,32 +25,Unit,1002,Diesel_Large,Allouez,Hibbing,43.0,44.0,828691080000.0,168.0,176.814986740618,176.814986740618,176.8360199346937,168.0,174.83160599585378,0 +32,Unit,1002,Diesel_Large,Hibbing,Allouez,44.0,43.0,828691080000.0,196.0,204.66115190304077,204.66115190304077,204.66655794550903,196.0,200.7194033488388,7 +37,Unit,1002,Diesel_Large,Allouez,Hibbing,43.0,44.0,828691080000.0,224.0,232.814986740618,232.814986740618,232.8360199346937,224.0,230.83160599585378,12 +41,Intermodal,1002,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,237.1764705882353,245.8340364958852,245.8340364958852,245.83566794943235,237.1764705882353,242.1023859005848,16 +43,Unit,1002,Diesel_Large,Allouez,Hibbing,43.0,44.0,828691080000.0,252.0,260.814986740618,260.814986740618,260.8360199346937,252.0,258.83160599585375,18 +47,Intermodal,1002,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,266.8235294117647,275.4810953194146,275.4810953194146,275.4827267729617,266.8235294117647,271.7494447241142,22 +49,Unit,1002,Diesel_Large,Allouez,Hibbing,43.0,44.0,828691080000.0,280.0,288.814986740618,288.814986740618,288.8360199346937,280.0,286.83160599585375,24 +53,Intermodal,1002,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,296.47058823529414,305.1281541429441,305.1281541429441,305.1297855964912,296.4705882352942,301.39650354764365,28 +57,Intermodal,1002,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,326.11764705882354,334.74920712941383,334.74920712941383,334.7576192186083,326.11764705882354,334.9090139408118,32 +27,Intermodal,1003,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,177.88235294117646,186.51317428089578,186.51317428089578,187.39317658489577,177.88235294117646,186.72657255834858,2 +35,Intermodal,1003,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,207.52941176470588,216.1864980113696,216.1864980113696,218.8240577708916,207.52941176470588,212.4549844475042,10 +39,Intermodal,1003,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,237.1764705882353,245.80729192795462,245.80729192795462,246.6872942319546,237.1764705882353,245.9679767894695,14 +44,Unit,1003,BEL,Hibbing,Allouez,44.0,43.0,2808004319.996545,252.0,260.67653137595784,260.67653137595784,263.55653367995785,252.0,256.73038868663673,19 +45,Intermodal,1003,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,266.8235294117647,275.454350751484,275.454350751484,276.33435305548403,266.8235294117647,275.6150356129989,20 +50,Unit,1003,BEL,Hibbing,Allouez,44.0,43.0,2808004319.996545,280.0,288.67653137595784,288.67653137595784,291.55653367995785,280.0,284.73038868663673,25 +51,Intermodal,1003,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,296.47058823529414,305.10140957501335,305.10140957501335,305.98141187901336,296.4705882352942,305.3148078524663,26 +59,Intermodal,1003,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,326.11764705882354,334.7747333054872,334.7747333054872,337.4122930650092,326.11764705882354,331.0432197416219,34 +27,Intermodal,1004,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,177.88235294117646,186.51317428089578,186.51317428089578,186.5212678320977,177.88235294117646,186.72657255834858,2 +35,Intermodal,1004,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,207.52941176470588,216.1864980113696,216.1864980113696,216.18800275139554,207.52941176470588,212.4549844475042,10 +39,Intermodal,1004,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,237.1764705882353,245.80729192795462,245.80729192795462,245.81538547915653,237.1764705882353,245.9679767894695,14 +44,Unit,1004,Diesel_Large,Hibbing,Allouez,44.0,43.0,828691080000.0,252.0,260.67653137595784,260.67653137595784,260.6824713988032,252.0,256.73038868663673,19 +45,Intermodal,1004,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,266.8235294117647,275.454350751484,275.454350751484,275.4624443026859,266.8235294117647,275.6150356129989,20 +50,Unit,1004,Diesel_Large,Hibbing,Allouez,44.0,43.0,828691080000.0,280.0,288.67653137595784,288.67653137595784,288.6824713988032,280.0,284.73038868663673,25 +51,Intermodal,1004,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,296.47058823529414,305.10140957501335,305.10140957501335,305.10950312621526,296.4705882352942,305.3148078524663,26 +59,Intermodal,1004,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,326.11764705882354,334.7747333054872,334.7747333054872,334.77623804551314,326.11764705882354,331.0432197416219,34 +27,Intermodal,1005,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,177.88235294117646,186.51317428089578,186.51317428089578,187.39317658489577,177.88235294117646,186.72657255834858,2 +35,Intermodal,1005,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,207.52941176470588,216.1864980113696,216.1864980113696,218.8240577708916,207.52941176470588,212.4549844475042,10 +39,Intermodal,1005,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,237.1764705882353,245.80729192795462,245.80729192795462,246.6872942319546,237.1764705882353,245.9679767894695,14 +44,Unit,1005,BEL,Hibbing,Allouez,44.0,43.0,2808004319.996545,252.0,260.67653137595784,260.67653137595784,263.55653367995785,252.0,256.73038868663673,19 +45,Intermodal,1005,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,266.8235294117647,275.454350751484,275.454350751484,276.33435305548403,266.8235294117647,275.6150356129989,20 +50,Unit,1005,BEL,Hibbing,Allouez,44.0,43.0,2808004319.996545,280.0,288.67653137595784,288.67653137595784,291.55653367995785,280.0,284.73038868663673,25 +51,Intermodal,1005,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,296.47058823529414,305.10140957501335,305.10140957501335,305.98141187901336,296.4705882352942,305.3148078524663,26 +59,Intermodal,1005,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,326.11764705882354,334.7747333054872,334.7747333054872,337.4122930650092,326.11764705882354,331.0432197416219,34 +27,Intermodal,1006,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,177.88235294117646,186.51317428089578,186.51317428089578,186.5212678320977,177.88235294117646,186.72657255834858,2 +35,Intermodal,1006,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,207.52941176470588,216.1864980113696,216.1864980113696,216.18800275139554,207.52941176470588,212.4549844475042,10 +39,Intermodal,1006,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,237.1764705882353,245.80729192795462,245.80729192795462,245.81538547915653,237.1764705882353,245.9679767894695,14 +47,Intermodal,1006,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,266.8235294117647,275.4810953194146,275.4827267729617,275.48435822650885,266.8235294117647,271.7494447241142,22 +51,Intermodal,1006,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,296.47058823529414,305.10140957501335,305.10140957501335,305.10950312621526,296.4705882352942,305.3148078524663,26 +59,Intermodal,1006,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,326.11764705882354,334.7747333054872,334.7747333054872,334.77623804551314,326.11764705882354,331.0432197416219,34 +27,Intermodal,1007,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,177.88235294117646,186.51317428089578,187.39317658489577,188.27317888889576,177.88235294117646,186.72657255834858,2 +35,Intermodal,1007,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,207.52941176470588,216.1864980113696,218.8240577708916,221.46161753041363,207.52941176470588,212.4549844475042,10 +39,Intermodal,1007,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,237.1764705882353,245.80729192795462,246.6872942319546,247.5672965359546,237.1764705882353,245.9679767894695,14 +47,Intermodal,1007,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,266.8235294117647,275.4810953194146,275.4810953194146,278.14938472097003,266.8235294117647,271.7494447241142,22 +51,Intermodal,1007,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,296.47058823529414,305.10140957501335,305.98141187901336,306.8614141830134,296.4705882352942,305.3148078524663,26 +59,Intermodal,1007,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,326.11764705882354,334.7747333054872,337.4122930650092,340.0498528245312,326.11764705882354,331.0432197416219,34 +27,Intermodal,1008,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,177.88235294117646,186.51317428089578,186.5212678320977,186.5293613832996,177.88235294117646,186.72657255834858,2 +35,Intermodal,1008,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,207.52941176470588,216.1864980113696,216.18800275139554,216.1895074914215,207.52941176470588,212.4549844475042,10 +39,Intermodal,1008,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,237.1764705882353,245.80729192795462,245.81538547915653,245.82347903035844,237.1764705882353,245.9679767894695,14 +47,Intermodal,1008,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,266.8235294117647,275.4810953194146,275.4827267729617,275.48435822650885,266.8235294117647,271.7494447241142,22 +51,Intermodal,1008,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,296.47058823529414,305.10140957501335,305.10950312621526,305.11759667741717,296.4705882352942,305.3148078524663,26 +59,Intermodal,1008,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,326.11764705882354,334.7747333054872,334.77623804551314,334.7777427855391,326.11764705882354,331.0432197416219,34 +27,Intermodal,1009,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,177.88235294117646,186.51317428089578,187.39317658489577,188.27317888889576,177.88235294117646,186.72657255834858,2 +35,Intermodal,1009,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,207.52941176470588,216.1864980113696,218.8240577708916,221.46161753041363,207.52941176470588,212.4549844475042,10 +39,Intermodal,1009,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,237.1764705882353,245.80729192795462,246.6872942319546,247.5672965359546,237.1764705882353,245.9679767894695,14 +47,Intermodal,1009,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,266.8235294117647,275.4810953194146,278.14938472097003,280.8176741225255,266.8235294117647,271.7494447241142,22 +51,Intermodal,1009,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,296.47058823529414,305.10140957501335,305.98141187901336,306.8614141830134,296.4705882352942,305.3148078524663,26 +59,Intermodal,1009,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,326.11764705882354,334.7747333054872,337.4122930650092,340.0498528245312,326.11764705882354,331.0432197416219,34 +28,Manifest,1010,Diesel_Large,Allouez,Hibbing,63.0,30.0,828691080000.0,177.88235294117646,186.70233846857286,186.70233846857286,186.72458623428895,178.01568627450982,186.99131350414217,3 +36,Manifest,1010,Diesel_Large,Hibbing,Allouez,50.0,42.0,828691080000.0,207.52941176470588,216.2238677177506,216.2238677177506,216.23121043150283,207.6627450980392,212.67563028912116,11 +40,Manifest,1010,Diesel_Large,Allouez,Hibbing,63.0,30.0,828691080000.0,237.1764705882353,246.1958618491161,246.1958618491161,246.22413019295246,237.30980392156863,246.36320507047125,15 +48,Manifest,1010,Diesel_Large,Hibbing,Allouez,50.0,42.0,828691080000.0,266.8235294117647,275.5179853648094,275.5179853648094,275.52532807856164,266.956862745098,271.96987082832175,23 +52,Manifest,1010,Diesel_Large,Allouez,Hibbing,63.0,30.0,828691080000.0,296.47058823529414,305.2905737626903,305.2905737626903,305.3128215284064,296.6039215686275,305.57954879825974,27 +60,Manifest,1010,Diesel_Large,Hibbing,Allouez,50.0,42.0,828691080000.0,326.11764705882354,334.81210301186815,334.81210301186815,334.81944572562037,326.2509803921569,331.26386558323884,35 +28,Manifest,1011,BEL,Allouez,Hibbing,63.0,30.0,8208000000.0,177.88235294117646,186.70233846857286,188.27317888889576,189.15318119289574,178.01568627450982,186.99131350414217,3 +36,Manifest,1011,BEL,Hibbing,Allouez,50.0,42.0,2808004319.996545,207.52941176470588,216.2238677177506,221.46161753041363,224.34161983441362,207.6627450980392,212.67563028912116,11 +40,Manifest,1011,BEL,Allouez,Hibbing,63.0,30.0,8208000000.0,237.1764705882353,246.1958618491161,247.5672965359546,248.44729883995458,237.30980392156863,246.36320507047125,15 +48,Manifest,1011,BEL,Hibbing,Allouez,50.0,42.0,2808004319.996545,266.8235294117647,275.5179853648094,278.14938472097003,281.02938702497005,266.956862745098,271.96987082832175,23 +52,Manifest,1011,BEL,Allouez,Hibbing,63.0,30.0,8208000000.0,296.47058823529414,305.2905737626903,306.8614141830134,307.7414164870134,296.6039215686275,305.57954879825974,27 +60,Manifest,1011,BEL,Hibbing,Allouez,50.0,42.0,2808004319.996545,326.11764705882354,334.81210301186815,340.0498528245312,342.9298551285312,326.2509803921569,331.26386558323884,35 +28,Manifest,1012,Diesel_Large,Allouez,Hibbing,63.0,30.0,828691080000.0,177.88235294117646,186.70233846857286,186.70233846857286,186.72458623428895,178.01568627450982,186.99131350414217,3 +36,Manifest,1012,Diesel_Large,Hibbing,Allouez,50.0,42.0,828691080000.0,207.52941176470588,216.2238677177506,216.2238677177506,216.23121043150283,207.6627450980392,212.67563028912116,11 +40,Manifest,1012,Diesel_Large,Allouez,Hibbing,63.0,30.0,828691080000.0,237.1764705882353,246.1958618491161,246.1958618491161,246.22413019295246,237.30980392156863,246.36320507047125,15 +48,Manifest,1012,Diesel_Large,Hibbing,Allouez,50.0,42.0,828691080000.0,266.8235294117647,275.5179853648094,275.5179853648094,275.52532807856164,266.956862745098,271.96987082832175,23 +52,Manifest,1012,Diesel_Large,Allouez,Hibbing,63.0,30.0,828691080000.0,296.47058823529414,305.2905737626903,305.2905737626903,305.3128215284064,296.6039215686275,305.57954879825974,27 +60,Manifest,1012,Diesel_Large,Hibbing,Allouez,50.0,42.0,828691080000.0,326.11764705882354,334.81210301186815,334.81210301186815,334.81944572562037,326.2509803921569,331.26386558323884,35 +26,Unit,1013,BEL,Hibbing,Allouez,44.0,43.0,2808004319.996545,168.0,176.67653137595784,176.67653137595784,179.55653367995782,168.0,172.7303886866367,1 +31,Unit,1013,BEL,Allouez,Hibbing,43.0,44.0,8208000000.0,196.0,205.19727027176702,205.19727027176702,206.077272575767,196.0,202.9782388778005,6 +38,Unit,1013,BEL,Hibbing,Allouez,44.0,43.0,2808004319.996545,224.0,232.67653137595784,232.67653137595784,235.55653367995782,224.0,228.7303886866367,13 +40,Manifest,1013,BEL,Allouez,Hibbing,63.0,30.0,8208000000.0,237.1764705882353,246.1958618491161,247.5672965359546,248.44729883995458,237.30980392156863,246.36320507047125,15 +53,Intermodal,1013,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,296.47058823529414,305.1281541429441,305.1281541429441,307.79644354449954,296.4705882352942,301.39650354764365,28 +57,Intermodal,1013,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,326.11764705882354,334.74920712941383,334.74920712941383,335.62920943341385,326.11764705882354,334.9090139408118,32 +26,Unit,1014,Diesel_Large,Hibbing,Allouez,44.0,43.0,828691080000.0,168.0,176.67653137595784,176.67653137595784,176.6824713988032,168.0,172.7303886866367,1 +31,Unit,1014,Diesel_Large,Allouez,Hibbing,43.0,44.0,828691080000.0,196.0,205.19727027176702,205.19727027176702,205.22888927455517,196.0,202.9782388778005,6 +38,Unit,1014,Diesel_Large,Hibbing,Allouez,44.0,43.0,828691080000.0,224.0,232.67653137595784,232.67653137595784,232.6824713988032,224.0,228.7303886866367,13 +45,Intermodal,1014,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,266.8235294117647,275.454350751484,275.454350751484,275.4624443026859,266.8235294117647,275.6150356129989,20 +53,Intermodal,1014,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,296.47058823529414,305.1281541429441,305.1297855964912,305.13141705003835,296.4705882352942,301.39650354764365,28 +57,Intermodal,1014,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,326.11764705882354,334.74920712941383,334.7576192186083,334.76603130780273,326.11764705882354,334.9090139408118,32 +26,Unit,1015,BEL,Hibbing,Allouez,44.0,43.0,2808004319.996545,168.0,176.67653137595784,176.67653137595784,179.55653367995782,168.0,172.7303886866367,1 +31,Unit,1015,BEL,Allouez,Hibbing,43.0,44.0,8208000000.0,196.0,205.19727027176702,205.19727027176702,206.077272575767,196.0,202.9782388778005,6 +38,Unit,1015,BEL,Hibbing,Allouez,44.0,43.0,2808004319.996545,224.0,232.67653137595784,232.67653137595784,235.55653367995782,224.0,228.7303886866367,13 +45,Intermodal,1015,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,266.8235294117647,275.454350751484,276.33435305548403,277.21435535948405,266.8235294117647,275.6150356129989,20 +53,Intermodal,1015,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,296.47058823529414,305.1281541429441,307.79644354449954,310.464732946055,296.4705882352942,301.39650354764365,28 +57,Intermodal,1015,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,326.11764705882354,334.74920712941383,335.62920943341385,336.50921173741386,326.11764705882354,334.9090139408118,32 +29,Intermodal,1016,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,177.88235294117646,186.53991884882635,186.53991884882635,186.5415503023735,177.88235294117646,182.80826825352597,4 +33,Intermodal,1016,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,207.52941176470588,216.16097183529615,216.16097183529615,216.16938392449063,207.52941176470588,216.32077864669415,8 +41,Intermodal,1016,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,237.1764705882353,245.8340364958852,245.83566794943235,245.8372994029795,237.1764705882353,242.1023859005848,16 +45,Intermodal,1016,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,266.8235294117647,275.454350751484,275.4624443026859,275.47053785388783,266.8235294117647,275.6150356129989,20 +53,Intermodal,1016,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,296.47058823529414,305.1281541429441,305.1297855964912,305.13141705003835,296.4705882352942,301.39650354764365,28 +57,Intermodal,1016,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,326.11764705882354,334.74920712941383,334.7576192186083,334.76603130780273,326.11764705882354,334.9090139408118,32 +29,Intermodal,1017,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,177.88235294117646,186.53991884882635,186.53991884882635,189.20820825038183,177.88235294117646,182.80826825352597,4 +33,Intermodal,1017,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,207.52941176470588,216.16097183529615,216.16097183529615,217.04097413929614,207.52941176470588,216.32077864669415,8 +41,Intermodal,1017,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,237.1764705882353,245.8340364958852,245.8340364958852,248.50232589744067,237.1764705882353,242.1023859005848,16 +45,Intermodal,1017,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,266.8235294117647,275.454350751484,276.33435305548403,277.21435535948405,266.8235294117647,275.6150356129989,20 +54,Manifest,1017,BEL,Hibbing,Allouez,50.0,42.0,2808004319.996545,296.47058823529414,305.23775179113795,307.79644354449954,310.67644584849955,296.6039215686275,301.6696430677891,29 +58,Manifest,1017,BEL,Allouez,Hibbing,63.0,30.0,8208000000.0,326.11764705882354,335.1370383197046,335.62920943341385,336.50921173741386,326.2509803921569,335.3042586489178,33 +29,Intermodal,1018,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,177.88235294117646,186.53991884882635,186.53991884882635,186.5415503023735,177.88235294117646,182.80826825352597,4 +33,Intermodal,1018,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,207.52941176470588,216.16097183529615,216.16097183529615,216.16938392449063,207.52941176470588,216.32077864669415,8 +41,Intermodal,1018,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,237.1764705882353,245.8340364958852,245.83566794943235,245.8372994029795,237.1764705882353,242.1023859005848,16 +46,Manifest,1018,Diesel_Large,Allouez,Hibbing,63.0,30.0,828691080000.0,266.8235294117647,275.8429206726455,275.8429206726455,275.8711890164819,266.956862745098,276.0102638940006,21 +54,Manifest,1018,Diesel_Large,Hibbing,Allouez,50.0,42.0,828691080000.0,296.47058823529414,305.23775179113795,305.23775179113795,305.2469049024242,296.6039215686275,301.6696430677891,29 +58,Manifest,1018,Diesel_Large,Allouez,Hibbing,63.0,30.0,828691080000.0,326.11764705882354,335.1370383197046,335.1370383197046,335.16530666354095,326.2509803921569,335.3042586489178,33 +29,Intermodal,1019,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,177.88235294117646,186.53991884882635,186.53991884882635,189.20820825038183,177.88235294117646,182.80826825352597,4 +33,Intermodal,1019,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,207.52941176470588,216.16097183529615,216.16097183529615,217.04097413929614,207.52941176470588,216.32077864669415,8 +41,Intermodal,1019,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,237.1764705882353,245.8340364958852,248.50232589744067,251.17061529899615,237.1764705882353,242.1023859005848,16 +46,Manifest,1019,BEL,Allouez,Hibbing,63.0,30.0,8208000000.0,266.8235294117647,275.8429206726455,277.21435535948405,278.09435766348406,266.956862745098,276.0102638940006,21 +54,Manifest,1019,BEL,Hibbing,Allouez,50.0,42.0,2808004319.996545,296.47058823529414,305.23775179113795,310.464732946055,313.344735250055,296.6039215686275,301.6696430677891,29 +58,Manifest,1019,BEL,Allouez,Hibbing,63.0,30.0,8208000000.0,326.11764705882354,335.1370383197046,336.50921173741386,337.3892140414139,326.2509803921569,335.3042586489178,33 +29,Intermodal,1020,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,177.88235294117646,186.53991884882635,186.5415503023735,186.54318175592067,177.88235294117646,182.80826825352597,4 +33,Intermodal,1020,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,207.52941176470588,216.16097183529615,216.16938392449063,216.1777960136851,207.52941176470588,216.32077864669415,8 +42,Manifest,1020,Diesel_Large,Hibbing,Allouez,50.0,42.0,828691080000.0,237.1764705882353,245.87092654128003,245.87092654128003,245.87826925503225,237.30980392156863,242.32281200479235,17 +46,Manifest,1020,Diesel_Large,Allouez,Hibbing,63.0,30.0,828691080000.0,266.8235294117647,275.8429206726455,275.8429206726455,275.8711890164819,266.956862745098,276.0102638940006,21 +56,Unit,1020,Diesel_Large,Hibbing,Allouez,44.0,43.0,828691080000.0,308.0,316.67653137595795,316.67653137595795,316.68247139880333,308.0,312.7303886866368,31 +58,Manifest,1020,Diesel_Large,Allouez,Hibbing,63.0,30.0,828691080000.0,326.11764705882354,335.1370383197046,335.1370383197046,335.16530666354095,326.2509803921569,335.3042586489178,33 +29,Intermodal,1021,BEL,Hibbing,Allouez,43.0,41.0,2808004319.996545,177.88235294117646,186.53991884882635,189.20820825038183,191.8764976519373,177.88235294117646,182.80826825352597,4 +33,Intermodal,1021,BEL,Allouez,Hibbing,41.0,43.0,8208000000.0,207.52941176470588,216.16097183529615,217.04097413929614,217.92097644329613,207.52941176470588,216.32077864669415,8 +42,Manifest,1021,BEL,Hibbing,Allouez,50.0,42.0,2808004319.996545,237.1764705882353,245.87092654128003,248.50232589744067,251.38232820144066,237.30980392156863,242.32281200479235,17 +46,Manifest,1021,BEL,Allouez,Hibbing,63.0,30.0,8208000000.0,266.8235294117647,275.8429206726455,277.21435535948405,278.09435766348406,266.956862745098,276.0102638940006,21 +56,Unit,1021,BEL,Hibbing,Allouez,44.0,43.0,2808004319.996545,308.0,316.67653137595795,316.67653137595795,319.55653367995797,308.0,312.7303886866368,31 +29,Intermodal,1022,Diesel_Large,Hibbing,Allouez,43.0,41.0,828691080000.0,177.88235294117646,186.53991884882635,186.5415503023735,186.54318175592067,177.88235294117646,182.80826825352597,4 +33,Intermodal,1022,Diesel_Large,Allouez,Hibbing,41.0,43.0,828691080000.0,207.52941176470588,216.16097183529615,216.16938392449063,216.1777960136851,207.52941176470588,216.32077864669415,8 +42,Manifest,1022,Diesel_Large,Hibbing,Allouez,50.0,42.0,828691080000.0,237.1764705882353,245.87092654128003,245.87092654128003,245.87826925503225,237.30980392156863,242.32281200479235,17 +52,Manifest,1022,Diesel_Large,Allouez,Hibbing,63.0,30.0,828691080000.0,296.47058823529414,305.2905737626903,305.3128215284064,305.3350692941225,296.6039215686275,305.57954879825974,27 +30,Manifest,1023,BEL,Hibbing,Allouez,50.0,42.0,2808004319.996545,177.88235294117646,186.64951649702033,189.20820825038183,192.08821055438182,178.01568627450982,183.0814077736714,5 +34,Manifest,1023,BEL,Allouez,Hibbing,63.0,30.0,8208000000.0,207.52941176470588,217.6086187865999,217.6086187865999,218.48862109059988,207.6627450980392,217.41810872754803,9 +56,Unit,1023,BEL,Hibbing,Allouez,44.0,43.0,2808004319.996545,308.0,316.67653137595795,316.67653137595795,319.55653367995797,308.0,312.7303886866368,31 +30,Manifest,1024,Diesel_Large,Hibbing,Allouez,50.0,42.0,828691080000.0,177.88235294117646,186.64951649702033,186.64951649702033,186.65866960830658,178.01568627450982,183.0814077736714,5 +34,Manifest,1024,Diesel_Large,Allouez,Hibbing,63.0,30.0,828691080000.0,207.52941176470588,217.6086187865999,217.6086187865999,217.65401333240675,207.6627450980392,217.41810872754803,9 +30,Manifest,1025,BEL,Hibbing,Allouez,50.0,42.0,2808004319.996545,177.88235294117646,186.64951649702033,191.8764976519373,194.7564999559373,178.01568627450982,183.0814077736714,5 +34,Manifest,1025,BEL,Allouez,Hibbing,63.0,30.0,8208000000.0,207.52941176470588,217.6086187865999,217.92097644329613,218.8009787472961,207.6627450980392,217.41810872754803,9 +28,Manifest,1030,Diesel_Large,Allouez,Hibbing,63.0,30.0,828691080000.0,177.88235294117646,186.70233846857286,186.72458623428895,186.74683400000504,178.01568627450982,186.99131350414217,3 +34,Manifest,1031,BEL,Allouez,Hibbing,63.0,30.0,8208000000.0,207.52941176470588,217.6086187865999,218.48862109059988,219.36862339459987,207.6627450980392,217.41810872754803,9 +55,Unit,1032,Diesel_Large,Allouez,Hibbing,43.0,44.0,828691080000.0,308.0,316.814986740618,316.814986740618,316.8360199346937,308.0,314.8316059958538,30 +55,Unit,1033,BEL,Allouez,Hibbing,43.0,44.0,8208000000.0,308.0,316.814986740618,316.814986740618,317.694989044618,308.0,314.8316059958538,30 +55,Unit,1034,Diesel_Large,Allouez,Hibbing,43.0,44.0,828691080000.0,308.0,316.814986740618,316.814986740618,316.8360199346937,308.0,314.8316059958538,30 diff --git a/python/altrios/lifts/dictionary.py b/python/altrios/lifts/dictionary.py new file mode 100644 index 00000000..0c66b90f --- /dev/null +++ b/python/altrios/lifts/dictionary.py @@ -0,0 +1,22 @@ + +import polars as pl + +def calculate_oc_number() -> int: + return 40 + +def calculate_truck_number(df): + return (df + .with_columns( + pl.max_horizontal( + pl.col("full_cars"), + calculate_oc_number() + ).alias("truck_number")) + ) + +def truck_resource(df): + total_truck_number = df.select(pl.col("truck_number").sum()).item() + return total_truck_number + +#terminal = 'Allouez' +#train_data_as_dict = schedule.build_train_timetable(terminal, swap_arrive_depart = True, as_dicts = True) +#print(train_data_as_dict) \ No newline at end of file diff --git a/python/altrios/lifts/distances.py b/python/altrios/lifts/distances.py new file mode 100644 index 00000000..93895280 --- /dev/null +++ b/python/altrios/lifts/distances.py @@ -0,0 +1,70 @@ +import numpy as np +from scipy.stats import triang + +# Yard setting: optimal layout output +YARD_TYPE = 'parallel' # choose 'perpendicular' or 'parallel' +M = 2 # decide the number of rows of parking blocks in the layout +N = 2 # decide the number of columns of parking blocks in the layout +n_t = 2 # decide the numbers of train side aisles per group +n_p = 2 # decide the numbers of parking area aisles per group +n_r = 5 # decide the number of spots within each parking block (10 * n_r = BL_l, the length of each parking block) + +# Fixed yard parameters +P = 10 # fixed aisle width +BL_w = 80 # fixed block width + +A = M * 10 * n_r + (M+1) * n_p * P # the vertical width of the yard +B = N * 80 + (N+1) * n_p * P # the horizontal length of the yard + +def create_triang_distribution(min_val, avg_val, max_val): + c = (avg_val - min_val) / (max_val - min_val) + return triang(c, loc=min_val, scale=(max_val - min_val)) + +def ugly_sigma(x): + total_sum = 0 + for i in range(1, x): + total_sum += 2 * i * (x - i) + result = total_sum / (x ** 2) + return result + +def A(M, n_r, n_p): + return M * 10 * n_r + (M+1) * n_p * P + +def B(N, n_p): + return N * 80 + (N+1) * n_p * P + +# Distance estimation +if YARD_TYPE == 'parallel': + # d_h: hostler distance + d_h_min = n_t * P + 1.5 * n_p * P + d_h_max = n_t * P + A(M, n_r, n_p) + B(N, n_p) + d_h_avg = 0.5 * (d_h_min + d_h_max) + + # d_r: repositioning distance + d_r_min = 0 + # d_r_avg = 5 * n_r + 40 + ugly_sigma(M) * (10 * n_r + n_p*P) + ugly_sigma(N) * (80 + n_p * P) + d_r_max = 10 * n_r + 80 + A(M, n_r, n_p) - n_p * P + B(N, n_p) - n_p * P + d_r_avg = 0.5 * (d_r_min + d_r_max) + + # d_t: truck distance + d_t_min = 1.5 * n_p * P + # d_t_avg = 0.5 * (B(N, n_p) + A(M, n_r, n_p) - 0.5 * n_p * P) + d_t_max = B(N, n_p) + A(M, n_r, n_p) - 2 * n_p * P + d_t_avg = 0.5 * (d_t_min + d_t_max) + + +elif YARD_TYPE == 'perpendicular': + # d_h + d_h_min = n_t * P + 1.5 * n_p * P + d_h_avg = 10 * n_r * M + 80 * N + (M + N + 1.5) * n_p * P + 2 * n_t * P + d_h_max = n_t * P + A(M, n_r, n_p) - n_p * P + B(N, n_p) - n_p * P + + # d_r + d_r_min = 0 + d_r_avg = 5 * n_r + 40 + ugly_sigma(M) * (10 * n_r + n_p*P) + ugly_sigma(N) * (80 + n_p * P) + d_r_max = 10 * n_r + 80 + A(M, n_r, n_p) - n_p * P + B(N, n_p) - n_p * P + + # d_t + d_t_min = 1.5 * n_p * P + d_t_avg = 0.5 * (B(N, n_p) + A(M, n_r, n_p) - 0.5 * n_p * P) + d_t_max = B(N, n_p) + A(M, n_r, n_p) - 2 * n_p * P \ No newline at end of file diff --git a/python/altrios/lifts/lifts_simulator.py b/python/altrios/lifts/lifts_simulator.py new file mode 100644 index 00000000..a477d36e --- /dev/null +++ b/python/altrios/lifts/lifts_simulator.py @@ -0,0 +1,632 @@ +import simpy +import random +import polars as pl +from altrios.lifts import utilities +from altrios.lifts.parameters import * +from altrios.lifts.distances import * +from altrios.lifts.dictionary import * +from altrios.lifts.schedule import * +from altrios.lifts.vehicle_performance import record_vehicle_event, save_average_times, save_vehicle_logs + +# import sys +# +# if len(sys.argv) < 3: +# raise ValueError("Not enough arguments. Please provide HOSTLER_NUMBER and CRANE_NUMBER.") +# +# HOSTLER_NUMBER = int(sys.argv[1]) +# CRANE_NUMBER = int(sys.argv[2]) + +def record_event(container_id, event_type, timestamp): + global state + if container_id is None: + x = 5 + if container_id not in state.container_events: + state.container_events[container_id] = {} + state.container_events[container_id][event_type] = timestamp + + +def handle_truck_arrivals(env, in_gate_resource): + global state + truck_id = 1 + state.TRUCK_ARRIVAL_MEAN = abs(state.TRAIN_ARRIVAL_HR - state.previous_train_departure) / max(state.INBOUND_CONTAINER_NUMBER, state.OUTBOUND_CONTAINER_NUMBER) + print(f"current time is {env.now}") + print(f"next TRAIN_ARRIVAL_HR:{state.TRAIN_ARRIVAL_HR}") + print(f"TRUCK_ARRIVAL_MEAN IS {state.TRUCK_ARRIVAL_MEAN}") + + while truck_id <= state.TRUCK_NUMBERS: + inter_arrival_time = random.expovariate(1 / state.TRUCK_ARRIVAL_MEAN) + yield env.timeout(inter_arrival_time) + state.truck_arrival_time.append(env.now) + + env.process(truck_through_gate(env, in_gate_resource, truck_id)) + truck_id += 1 + + if truck_id > state.TRUCK_NUMBERS: + # print(f"truck_id = {truck_id} vs TRUCK_NUM = {TRUCK_NUMBERS}") + if not state.all_trucks_ready_event.triggered: + state.all_trucks_ready_event.succeed() + # print(f"{env.now}: All trucks arrived for the {TRAIN_ID} train.") + + +def truck_through_gate(env, in_gate_resource, truck_id): + global state + + with in_gate_resource.request() as request: + yield request + wait_time = max(0, state.truck_arrival_time[truck_id - 1] - state.last_leave_time) + if wait_time <= 0: + wait_time = 0 # first arriving trucks + # print(f"Truck {truck_id} enters the gate without waiting") + else: + # print(f"Truck {truck_id} enters the gate and queued for {wait_time} hrs") + state.truck_waiting_time.append(wait_time) + + yield env.timeout(state.TRUCK_INGATE_TIME + random.uniform(0, state.TRUCK_INGATE_TIME_DEV)) + + # Case 1: Normal handling when OC >= IC (all trucks have containers) + if state.OUTBOUND_CONTAINER_NUMBER >= state.INBOUND_CONTAINER_NUMBER: + env.process(handle_container(env, truck_id)) + + # Case 2: OC < IC, extra empty trucks are needed + else: + if truck_id <= state.OUTBOUND_CONTAINER_NUMBER: + env.process(handle_container(env, truck_id)) # Loaded trucks + else: + env.process(empty_truck(env, truck_id)) # Empty trucks + + +def handle_container(env, truck_id): + global state + + container_id = state.outbound_container_id_counter + if container_id is None: + x = 5 + state.outbound_container_id_counter += 1 + record_event(container_id, 'truck_arrival', env.now) + + d_t_dist = create_triang_distribution(d_t_min, d_t_avg, d_t_max).rvs() + yield env.timeout(d_t_dist / (2 * state.TRUCK_SPEED_LIMIT)) + + record_event(container_id, 'truck_drop_off', env.now) + # print(f"{env.now}: Truck {truck_id} drops outbound container {container_id}.") + state.last_leave_time = env.now + + +def empty_truck(env, truck_id): + global state + + d_t_dist = create_triang_distribution(d_t_min, d_t_avg, d_t_max).rvs() + yield env.timeout(d_t_dist / (2 * state.TRUCK_SPEED_LIMIT)) + + # print(f"{env.now}: Empty truck {truck_id} arrives.") + state.last_leave_time = env.now + + +def train_arrival(env, train_timetable, train_processing, cranes, hostlers, chassis, in_gate_resource, outbound_containers_store, truck_store, out_gate_resource): + global state + + for i, train in enumerate(train_timetable): + state.TRAIN_ARRIVAL_HR = train['arrival_time'] + state.TRAIN_DEPARTURE_HR = train['departure_time'] + state.INBOUND_CONTAINER_NUMBER = train['full_cars'] + state.OUTBOUND_CONTAINER_NUMBER = train['oc_number'] + state.TRUCK_NUMBERS = train['truck_number'] + state.TRAIN_ID = train['train_id'] + + print(f"---------- Next Train {state.TRAIN_ID} Is On the Way ----------") + print(f"IC {state.INBOUND_CONTAINER_NUMBER}") + print(f"OC {state.OUTBOUND_CONTAINER_NUMBER}") + + outbound_containers_store.items.clear() + for oc in range(state.record_oc_label, state.record_oc_label + state.OUTBOUND_CONTAINER_NUMBER): # from 10001 to 10001 + OC + # print("oc_number", oc) + outbound_containers_store.put(oc) + # yield outbound_containers_store.put(oc) + # print(f"Current store contents after putting {oc}: {outbound_containers_store.items}") + + # print("outbound_containers_store is:", outbound_containers_store.items) + + # Trucks enter until the precious train departs, if not the first truck + state.previous_train_departure = train_timetable[i-1]['departure_time'] if i > 0 else 0 + print(f"Schedule {state.TRUCK_NUMBERS} Trucks arriving between previous train departure at {state.previous_train_departure} and current train arrival at {state.TRAIN_ARRIVAL_HR}") + env.process(handle_truck_arrivals(env, in_gate_resource)) + + # Trains arrive according to the timetable, fix negative delay bug + delay = state.TRAIN_ARRIVAL_HR - env.now + if delay <= 0: + yield env.timeout(0) + else: + yield env.timeout(delay) + + train_id = state.train_id_counter + print(f"Train {state.TRAIN_ID} ({train_id} in the dictionary) arrives at {env.now}") + + # for container_id in range(inbound_container_id_counter, inbound_container_id_counter + INBOUND_CONTAINER_NUMBER): + for container_id in range(int(state.inbound_container_id_counter), int(state.inbound_container_id_counter) + int(state.INBOUND_CONTAINER_NUMBER)): # fix float error + + record_event(container_id, 'train_arrival', env.now) + + with train_processing.request() as request: + yield request + state.oc_chassis_filled_event = env.event() + yield env.process(process_train(env, train_id, cranes, hostlers, chassis, in_gate_resource, outbound_containers_store, truck_store, train_processing, state.oc_chassis_filled_event, out_gate_resource)) + state.train_id_counter += 1 + + state.record_oc_label += state.OUTBOUND_CONTAINER_NUMBER + # print("record_oc_label", record_oc_label) + # print("oc_variance in train_process:", oc_variance) + + +def process_train(env, train_id, cranes, hostlers, chassis, in_gate_resource, outbound_containers_store, truck_store, train_processing, oc_chassis_filled_event, out_gate_resource): + global state + + start_time = env.now + + # Cranes unload all IC + unload_processes = [] + chassis_inbound_ids = [] # To save chassis_id, current_inbound_id to hostler_transfer_IC_single_loop + + # if train_id < TRAIN_NUMBERS: + for chassis_id in range(1, int(state.INBOUND_CONTAINER_NUMBER) + 1): + unload_process = env.process(crane_and_chassis(env, train_id, 'unload', cranes, hostlers, chassis, truck_store, train_processing, outbound_containers_store, in_gate_resource, out_gate_resource, oc_chassis_filled_event)) + unload_processes.append(unload_process) + + # All IC are processed + # print("Unload process is:", unload_processes) + yield simpy.events.AllOf(env, unload_processes) + results = yield simpy.events.AllOf(env, unload_processes) + + # To pass chassis_id, current_inbound_id to hostler_transfer_IC_single_loop as a list from calling chassis_inbound_ids + for result in results.values(): + chassis_id, current_inbound_id = result + chassis_inbound_ids.append((chassis_id, current_inbound_id)) + env.process(hostler_transfer(env, hostlers, 'inbound', chassis, chassis_id, current_inbound_id, truck_store, cranes, + train_processing, outbound_containers_store, in_gate_resource, oc_chassis_filled_event, + out_gate_resource)) + + # # Once all OC are dropped by hostlers, crane start working + # print("Chassis are filled with OC (-1) now. ") + # print(f"Chassis status after OC processed is: {chassis_status}, where ") + # print(f"there are {chassis_status.count(0)} chassis is filled with OC (0)") + # print(f"there are {chassis_status.count(-1)} chassis is filled with empty (-1)") + # print(f"there are {chassis_status.count(1)} chassis is filled with IC (1)") + + # Cranes move all OC to chassis + load_processes = [] + for chassis_id in range(1, state.OUTBOUND_CONTAINER_NUMBER + 1): + load_process = env.process(crane_and_chassis(env, train_id, 'load', cranes, hostlers, chassis, truck_store, train_processing, outbound_containers_store, in_gate_resource, oc_chassis_filled_event, out_gate_resource, chassis_id=chassis_id)) + load_processes.append(load_process) + yield simpy.events.AllOf(env, load_processes) + + # Check if all outbound containers are loaded (all chassis is empty 0), the train departs + if state.chassis_status.count(-1) == state.TRAIN_UNITS: + # oc_chassis_filled_event.succeed() + state.TRAIN_ID_FIXED = state.TRAIN_ID + print(f"Train {state.TRAIN_ID_FIXED} is ready to depart at {env.now}.") + env.process(train_departure(env, train_id)) + state.time_per_train.append(env.now - start_time) + + end_time = env.now + state.time_per_train.append(end_time - start_time) + state.train_series += 1 + state.oc_variance += state.OUTBOUND_CONTAINER_NUMBER + + +def crane_and_chassis(env, train_id, action, cranes, hostlers, chassis, truck_store, train_processing, outbound_containers_store, in_gate_resource, out_gate_resource, oc_chassis_filled_event, chassis_id=None): + global state + + # # Print before requesting crane resource + if action == 'unload': + crane_id = state.crane_id_counter + state.crane_id_counter = (state.crane_id_counter % state.CRANE_NUMBER) + 1 + # print("inbound_id_counter", inbound_container_id_counter) + for container_id in range(int(state.inbound_container_id_counter), int(state.inbound_container_id_counter) + int(state.INBOUND_CONTAINER_NUMBER)): # fix float error + # print("container_id now:", container_id) + yield env.timeout(state.CRANE_UNLOAD_CONTAINER_TIME_MEAN + random.uniform(0, state.CRANE_MOVE_DEV_TIME)) + record_event(container_id, 'crane_unload', env.now) + # print(f"Crane {crane_id} unloads inbound container {inbound_container_id_counter} from train {train_id} at {env.now}") + + # if action == 'load': + # for container_id in range(record_oc_label, record_oc_label + OUTBOUND_CONTAINER_NUMBER): + # yield env.timeout(CRANE_LOAD_CONTAINER_TIME_MEAN + random.uniform(0, CRANE_MOVE_DEV_TIME)) + # chassis_status[chassis_id - 1] = -1 + # # print(f"Crane {crane_id} loads outbound container {container_id} to train {TRAIN_ID} at {env.now}") + # record_event(container_id, 'crane_load', env.now) + + with cranes.request() as request: + yield request + + # # Print after acquiring crane resource + # print(f"[{env.now}] Crane {crane_id_counter} acquired crane resource. Available cranes: {cranes.count}/{cranes.capacity}") + + start_time = env.now + record_vehicle_event('crane', state.crane_id_counter, 'start', start_time) # performance record: starting + + if action == 'unload': + # crane_id = crane_id_counter + # crane_id_counter = (crane_id_counter % CRANE_NUMBER) + 1 + + chassis_id = ((state.inbound_container_id_counter - 1) % state.CHASSIS_NUMBER) + 1 + + current_inbound_id = state.inbound_container_id_counter + state.inbound_container_id_counter += 1 + # yield env.timeout(CRANE_UNLOAD_CONTAINER_TIME_MEAN + random.uniform(0, CRANE_MOVE_DEV_TIME)) + + # for chassis_id in range(int(inbound_container_id_counter), int(inbound_container_id_counter) + int(INBOUND_CONTAINER_NUMBER)): + state.chassis_status[chassis_id - 1] = 1 + + end_time = env.now + record_vehicle_event('crane', state.crane_id_counter, 'end', end_time) # performance record: ending + + # hostler picks up IC + env.process(hostler_transfer(env, hostlers, 'inbound', chassis, chassis_id, current_inbound_id, truck_store, cranes, train_processing, outbound_containers_store, in_gate_resource, oc_chassis_filled_event, out_gate_resource)) + + return chassis_id, current_inbound_id + + elif action == 'load': + if chassis_id not in state.outbound_containers_mapping: + print(f"Notice: No outbound container mapped to chassis {chassis_id} at {env.now}") + return + + container_id = state.outbound_containers_mapping[chassis_id] # Retrieve container ID from mapping + # print("outbound_containers_mapping in crane and chassis func:", outbound_containers_mapping) + # print("container_id in crane and chassis func:", container_id) + + if state.CRANE_NUMBER == 1: + crane_id = 1 + else: + crane_id = (chassis_id % state.CRANE_NUMBER) + 1 + + state.chassis_status[chassis_id - 1] = -1 + + # yield env.timeout(CRANE_LOAD_CONTAINER_TIME_MEAN + random.uniform(0, CRANE_MOVE_DEV_TIME)) + # chassis_status[chassis_id - 1] = -1 + # print(f"Crane {crane_id} loads outbound container {container_id} from chassis {chassis_id} to train {TRAIN_ID} at {env.now}") + # record_event(container_id, 'crane_load', env.now) + + for container_id in range(state.record_oc_label, state.record_oc_label + state.OUTBOUND_CONTAINER_NUMBER): + yield env.timeout(state.CRANE_LOAD_CONTAINER_TIME_MEAN + random.uniform(0, state.CRANE_MOVE_DEV_TIME)) + # chassis_status[chassis_id - 1] = -1 + # print(f"Crane {crane_id} loads outbound container {container_id} to train {TRAIN_ID} at {env.now}") + record_event(container_id, 'crane_load', env.now) + + # # At this point, the crane resource should be released + # print(f"[{env.now}] Crane {crane_id_counter} has released crane resource. Available cranes: {cranes.count}/{cranes.capacity}") + + +def hostler_transfer(env, hostlers, container_type, chassis, chassis_id, container_id, truck_store, cranes, train_processing, outbound_containers_store, in_gate_resource, oc_chassis_filled_event, out_gate_resource): + global state + + with hostlers.request() as request: + yield request + if container_id is None: + x =5 + start_time = env.now + record_vehicle_event('hostler', state.hostler_id_counter, 'start', start_time) # performance record + + hostler_id = state.hostler_id_counter + state.hostler_id_counter = (state.hostler_id_counter % state.HOSTLER_NUMBER) + 1 + + with chassis.request() as chassis_request: + yield chassis_request + + if container_type == "inbound": + x = 5 + + if container_type == 'inbound' and state.chassis_status[chassis_id - 1] == 1: + d_h_dist = create_triang_distribution(d_h_min, d_h_avg, d_h_max).rvs() + state.HOSTLER_TRANSPORT_CONTAINER_TIME = d_h_dist / (2 * state.HOSTLER_SPEED_LIMIT) + print(f"Hostler pick-up time is:{state.HOSTLER_TRANSPORT_CONTAINER_TIME}") + yield env.timeout(state.HOSTLER_TRANSPORT_CONTAINER_TIME) + record_event(container_id, 'hostler_pickup', env.now) + print(f"Hostler {hostler_id} picks up inbound container {container_id} from chassis {chassis_id} and heads to parking area at {env.now}") + + state.chassis_status[chassis_id - 1] = -1 + + # Hostler drop off: different route for picking-up and dropping-off + d_h_dist = create_triang_distribution(d_h_min, d_h_avg, d_h_max).rvs() + state.HOSTLER_TRANSPORT_CONTAINER_TIME = d_h_dist / (2 * state.HOSTLER_SPEED_LIMIT) + print(f"Hostler drop-off time is:{state.HOSTLER_TRANSPORT_CONTAINER_TIME}") + yield env.timeout(state.HOSTLER_TRANSPORT_CONTAINER_TIME) + if container_id is None: + x =5 + record_event(container_id, 'hostler_dropoff', env.now) + print(f"Hostler {hostler_id} drops off inbound container {container_id} from chassis {chassis_id} and moves toward the assigned outbound container at {env.now}") + + end_time = env.now + record_vehicle_event('hostler', state.hostler_id_counter, 'end', end_time) # performance record + + # Process functions of notify_truck and handle_outbound_container simultaneously + env.process(notify_truck(env, truck_store, container_id, out_gate_resource)) + + # Assign outbound container and chassis_id for the hostler which drops off an inbound container + chassis_id, state.outbound_container_id = yield env.process(outbound_container_decision_making( + env, hostlers, chassis, container_id, truck_store, cranes, train_processing, + outbound_containers_store, + in_gate_resource, oc_chassis_filled_event, out_gate_resource)) + + # Process outbound containers + if chassis_id is not None and state.outbound_container_id is not None: + env.process(handle_outbound_container(env, hostler_id, chassis_id, state.outbound_container_id, truck_store, + cranes, train_processing, outbound_containers_store, in_gate_resource)) + + +# When OC are fully processed, but IC are not +def hostler_transfer_IC_single_loop(env, hostlers, container_type, chassis, chassis_id, container_id, truck_store, cranes, train_processing, outbound_containers_store, in_gate_resource, oc_chassis_filled_event, out_gate_resource): + print(f"Starting single hostler transfer IC loop for chassis {chassis_id} at {env.now}") + global state + + print(f"Requesting hostler for IC at chassis {chassis_id} at {env.now}") + + with hostlers.request() as request: + print(f"Request available hostlers: {hostlers.count} vs total hostlers {state.HOSTLER_NUMBER}, Hostlers capacity: {hostlers.capacity} at {env.now}") + yield request + + start_time = env.now + record_vehicle_event('hostler', state.hostler_id_counter, 'start', start_time) # performance record + + hostler_id = state.hostler_id_counter + state.hostler_id_counter = (state.hostler_id_counter % state.HOSTLER_NUMBER) + 1 + + with chassis.request() as chassis_request: + yield chassis_request + + if container_type == 'inbound' and state.chassis_status[chassis_id - 1] == 1: + state.chassis_status[chassis_id - 1] = -1 + print(f"Single loop chassis status {state.chassis_status}") + print(f"There are {state.chassis_status.count(1)} IC") + print(f"There are {state.chassis_status.count(-1)} empty") + print(f"There are {state.chassis_status.count(0)} OC") + d_h_dist = create_triang_distribution(d_h_min, d_h_avg, d_h_max).rvs() + state.HOSTLER_TRANSPORT_CONTAINER_TIME = d_h_dist / (2 * state.HOSTLER_SPEED_LIMIT) + + yield env.timeout(state.HOSTLER_TRANSPORT_CONTAINER_TIME) + # hostler picks up the rest of IC from the chassis + # chassis_status[chassis_id - 1] = -1 + record_event(container_id, 'hostler_pickup', env.now) + print(f"Hostler {hostler_id} picks up inbound container {container_id} from chassis {chassis_id} to parking area at {env.now}") + + # hostler drops off the IC + d_h_dist = create_triang_distribution(d_h_min, d_h_avg, d_h_max).rvs() + state.HOSTLER_TRANSPORT_CONTAINER_TIME = d_h_dist / (2 * state.HOSTLER_SPEED_LIMIT) + yield env.timeout(state.HOSTLER_TRANSPORT_CONTAINER_TIME) + record_event(container_id, 'hostler_dropoff', env.now) + print(f"Hostler {hostler_id} drops off inbound container {container_id} from chassis {chassis_id} to parking area at {env.now}") + + # Check if all chassis filled + if state.chassis_status.count(0) == state.OUTBOUND_CONTAINER_NUMBER and state.chassis_status.count( + -1) == state.TRAIN_UNITS - state.OUTBOUND_CONTAINER_NUMBER and not oc_chassis_filled_event.triggered: + print(f"Chassis is fully filled with OC, and cranes start moving: {state.chassis_status}") + print(f"where there are {state.chassis_status.count(0)} chassis filled with OC (0)") + print(f"where there are {state.chassis_status.count(-1)} chassis filled with empty (-1)") + print(f"where there are {state.chassis_status.count(1)} chassis filled with IC (1)") + oc_chassis_filled_event.succeed() + return + else: + print(f"Chassis is not fully filled: {state.chassis_status}") + print(f"where there are {state.chassis_status.count(0)} chassis filled with OC (0)") + print(f"where there are {state.chassis_status.count(-1)} chassis filled with empty (-1)") + print(f"where there are {state.chassis_status.count(1)} chassis filled with IC (1)") + + end_time = env.now + record_vehicle_event('hostler', hostler_id, 'end', end_time) # performance record + + # trucks pick up IC + yield env.process(notify_truck(env, truck_store, container_id, out_gate_resource)) + + +def outbound_container_decision_making(env, hostlers, chassis, current_inbound_id, truck_store, cranes, train_processing, outbound_containers_store, in_gate_resource, oc_chassis_filled_event, out_gate_resource): + global state + # Check if outbound_containers_store has outbound container + if len(outbound_containers_store.items) > 0: + outbound_container_id = yield outbound_containers_store.get() + print(f"Outbound containers remaining: {len(outbound_containers_store.items)}") + + if -1 in state.chassis_status: + chassis_id = state.chassis_status.index(-1) + 1 # find the first chassis + # If chassis are not assigned with outbound container + if chassis_id not in state.outbound_containers_mapping: + # outbound_container_id += state.record_oc_label + state.outbound_containers_mapping[chassis_id] = outbound_container_id + state.chassis_status[chassis_id - 1] = 0 # already assigned outbound container + print(f"OC mapping created: outbound container {outbound_container_id} assigned to chassis {chassis_id}") + else: + print(f"Chassis {chassis_id} is already mapped to an outbound container.") + else: + print("No empty chassis available for outbound container assignment.") + + # if outbound_containers_store is null, check if we need operate single loop + else: + chassis_id = None + outbound_container_id = None + # chassis_status = 1: inbound containers are not loaded + if state.chassis_status.count(1) != 0: + print(f"Haven't finished all IC yet at {env.now}. Starting single loop.") + chassis_id = state.chassis_status.index(1) + 1 + state.chassis_status[chassis_id - 1] = 0 # assigned with IC + # single loop takes rest inbound container + yield env.process(hostler_transfer_IC_single_loop(env, hostlers, 'inbound', chassis, chassis_id, current_inbound_id, + truck_store, cranes, train_processing, + outbound_containers_store, in_gate_resource, oc_chassis_filled_event, out_gate_resource)) + else: + print("All inbound containers have been processed.") + + if outbound_container_id is None: + x = 5 + return chassis_id, outbound_container_id + + +def handle_outbound_container(env, hostler_id, chassis_id, outbound_container_id, truck_store, cranes, train_processing, outbound_containers_store, in_gate_resource): + global state + + d_h_dist = create_triang_distribution(d_h_min, d_h_avg, d_h_max).rvs() + state.HOSTLER_TRANSPORT_CONTAINER_TIME = d_h_dist / (2 * state.HOSTLER_SPEED_LIMIT) + + d_r_dist = create_triang_distribution(d_r_min, d_r_avg, d_r_max).rvs() + state.HOSTLER_FIND_CONTAINER_TIME = d_r_dist / (2 * state.TRUCK_SPEED_LIMIT) + yield env.timeout(state.HOSTLER_FIND_CONTAINER_TIME) + + record_event(outbound_container_id, 'hostler_pickup', env.now) + print(f"Hostler {hostler_id} picks up outbound container {outbound_container_id} from parking area to chassis {chassis_id} at {env.now}") + + yield env.timeout(state.HOSTLER_TRANSPORT_CONTAINER_TIME) + + record_event(outbound_container_id, 'hostler_dropoff', env.now) + print(f"Hostler {hostler_id} drops off outbound container {outbound_container_id} to chassis {chassis_id} at {env.now}") + + +# truck pick up IC +def notify_truck(env, truck_store, container_id, out_gate_resource): + global state + truck_id = yield truck_store.get() + yield env.timeout(state.TRUCK_INGATE_TIME) + print(f"Truck {truck_id} arrives at parking area and prepare to pick up inbound container {container_id} at {env.now}") + yield env.process(truck_transfer(env, truck_id, container_id, out_gate_resource)) + + +def truck_transfer(env, truck_id, container_id, out_gate_resource): + global state + + start_time = env.now + record_vehicle_event('truck', truck_id, 'start', start_time) # performance record + + # Truck moves to the parking area + yield env.timeout(state.TRUCK_TO_PARKING) + record_event(container_id, 'truck_pickup', env.now) + print(f"Truck {truck_id} picks up inbound container {container_id} at {env.now}") + + # Calculate the transport time for the truck + d_t_dist = create_triang_distribution(d_t_min, d_t_avg, d_t_max).rvs() + state.TRUCK_TRANSPORT_CONTAINER_TIME = d_t_dist / (2 * state.TRUCK_SPEED_LIMIT) + yield env.timeout(state.TRUCK_TRANSPORT_CONTAINER_TIME) + + # Request out_gate_resource resource before the truck exits + with out_gate_resource.request() as request: + yield request + + # Simulate the time it takes for the truck to pass through the gate + yield env.timeout(state.TRUCK_OUTGATE_TIME + random.uniform(0,state.TRUCK_OUTGATE_TIME_DEV)) + record_event(container_id, 'truck_exit', env.now) + print(f"Truck {truck_id} exits gate with inbound container {container_id} at {env.now}") + + # End performance recording + end_time = env.now + record_vehicle_event('truck', truck_id, 'end', end_time) + + +def train_departure(env, train_id): + global state + + if env.now < state.TRAIN_DEPARTURE_HR: + yield env.timeout(state.TRAIN_DEPARTURE_HR - env.now) + yield env.timeout(state.TRAIN_INSPECTION_TIME) + print(f"Train {state.TRAIN_ID_FIXED} ({train_id} in the dictionary) departs at {env.now}") + + for container_id in range(state.record_oc_label - state.OUTBOUND_CONTAINER_NUMBER, state.record_oc_label): + record_event(container_id, 'train_depart', env.now) + + +def run_simulation( + train_consist_plan: pl.DataFrame, + terminal: str, + out_path = None): + global state + state.terminal = terminal + state.initialize_from_consist_plan(train_consist_plan) + + print(f"Starting simulation with No.{state.TRAIN_ID} trains, {state.HOSTLER_NUMBER} hostlers, {state.CRANE_NUMBER} cranes, and {state.TRUCK_NUMBERS} trucks.") + env = simpy.Environment() + + # Resources + train_processing = simpy.Resource(env, capacity=1) + cranes = simpy.Resource(env, capacity=state.CRANE_NUMBER) + chassis = simpy.Resource(env, capacity=state.CHASSIS_NUMBER) + hostlers = simpy.Resource(env, capacity=state.HOSTLER_NUMBER) + in_gate_resource = simpy.Resource(env, capacity=state.IN_GATE_NUMBERS) + out_gate_resource = simpy.Resource(env, capacity=state.OUT_GATE_NUMBERS) + outbound_containers_store = simpy.Store(env, capacity=100) + truck_store = simpy.Store(env, capacity=100) + + # Initialize trucks + truck_store.items.clear() + # print("TRUCK_NUMBERS:", TRUCK_NUMBERS) + for truck_id in range(1, 100 + 1): + truck_store.put(truck_id) + # print("TRUCK_STORE:", truck_store.items) + + state.all_trucks_ready_event = env.event() + + # # toy case + # train_timetable = [ + # {"train_id": 19, "arrival_time": 187, "departure_time": 200, "empty_cars": 3, "full_cars":7, "oc_number": 2, "truck_number":7 }, + # {"train_id": 25, "arrival_time": 250, "departure_time": 350, "empty_cars": 4, "full_cars":6, "oc_number": 2, "truck_number":6 }, + # {"train_id": 49, "arrival_time": 400, "departure_time": 600, "empty_cars": 5, "full_cars":5, "oc_number": 2, "truck_number":5 }, + # {"train_id": 60, "arrival_time": 650, "departure_time": 750, "empty_cars": 6, "full_cars":4, "oc_number": 2, "truck_number":4 }, + # {"train_id": 12, "arrival_time": 800, "departure_time": 1000, "empty_cars": 7, "full_cars":3, "oc_number": 4, "truck_number":4 }, + # ] + + # REAL TEST + train_timetable = build_train_timetable(train_consist_plan, terminal, swap_arrive_depart = True, as_dicts = True) + TRAIN_NUMBERS = len(train_timetable) + + # env.process(train_arrival(env, train_processing, cranes, in_gate_resource, outbound_containers_store, truck_store, train_timetable)) + env.process(train_arrival(env, train_timetable, train_processing, cranes, hostlers, chassis, in_gate_resource, + outbound_containers_store, truck_store, out_gate_resource)) + + env.run(until=state.sim_time) + + # Performance Matrix: train processing time + avg_time_per_train = sum(state.time_per_train) / len(state.time_per_train) + print(f"Average train processing time: {sum(state.time_per_train) / len(state.time_per_train) if state.time_per_train else 0:.2f}") + print("Simulation completed. ") + with open("avg_time_per_train.txt", "w") as f: + f.write(str(avg_time_per_train)) + + # Create DataFrame for container events + container_data = ( + pl.from_dicts( + [dict(event, **{'container_id': container_id}) for container_id, event in state.container_events.items()] + ) + .with_columns( + pl.when(pl.col("container_id") < 10001).then(pl.lit("inbound")).otherwise(pl.lit("outbound")).alias("container_type") + ) + .with_columns( + pl.when( + pl.col("container_type") == pl.lit("inbound"), + pl.col("truck_exit").is_not_null(), + pl.col("train_arrival").is_not_null() + ) + .then( + pl.col("truck_exit") - pl.col("train_arrival") + ) + .when( + pl.col("container_type") == pl.lit("outbound"), + pl.col("train_depart").is_not_null(), + pl.col("truck_drop_off").is_not_null() + ) + .then( + pl.col("train_depart") - pl.col("truck_drop_off") + ) + .otherwise(None) + .alias("container_processing_time") + ) + .sort("container_id") + .select(pl.col("container_id", "container_type"), pl.all().exclude("container_id", "container_type")) + ) + if out_path is not None: + container_data.write_excel(out_path / f"simulation_crane_{state.CRANE_NUMBER}_hostler_{state.HOSTLER_NUMBER}.xlsx") + + # Use save_average_times and save_vehicle_logs for vehicle related logs + save_average_times() + save_vehicle_logs() + + print("Done!") + return container_data + + +if __name__ == "__main__": + run_simulation( + train_consist_plan=pl.read_csv(utilities.package_root() / 'demos' / 'starter_demo' / 'train_consist_plan.csv'), + terminal = "Allouez", + out_path = utilities.package_root() / 'demos' / 'starter_demo' / 'results' + ) \ No newline at end of file diff --git a/python/altrios/lifts/merged_baseline.py b/python/altrios/lifts/merged_baseline.py new file mode 100644 index 00000000..b5bcb455 --- /dev/null +++ b/python/altrios/lifts/merged_baseline.py @@ -0,0 +1,454 @@ +import simpy +import random +import pandas as pd +from parameters import * +from distances import * +from dictionary import * +from vehicle_performance import record_vehicle_event, save_average_times, save_vehicle_logs + + +CRANE_NUMBER = 2 +HOSTLER_NUMBER = 20 + +def record_event(container_id, event_type, timestamp): + if container_id not in container_events: + container_events[container_id] = {} + container_events[container_id][event_type] = timestamp + + +def handle_truck_arrivals(env, in_gate_resource, truck_numbers): + global all_trucks_ready_event, truck_processed + + truck_id = 1 + truck_processed = 0 + + while truck_id <= TRUCK_NUMBERS: + inter_arrival_time = random.expovariate(1 / TRUCK_ARRIVAL_MEAN) + yield env.timeout(inter_arrival_time) + truck_arrival_time.append(env.now) + + env.process(truck_through_gate(env, in_gate_resource, truck_id)) + truck_id += 1 + + if truck_id > TRUCK_NUMBERS: + # print(f"truck_id = {truck_id} vs TRUCK_NUM = {TRUCK_NUMBERS}") + if not all_trucks_ready_event.triggered: + all_trucks_ready_event.succeed() + # print(f"{env.now}: All trucks arrived for the {TRAIN_ID} train.") + + +def truck_through_gate(env, in_gate_resource, truck_id): + global last_leave_time, INBOUND_CONTAINER_NUMBER, OUTBOUND_CONTAINER_NUMBER + + with in_gate_resource.request() as request: + yield request + wait_time = max(0, truck_arrival_time[truck_id - 1] - last_leave_time) + if wait_time <= 0: + wait_time = 0 # first arriving trucks + print(f"Truck {truck_id} enters the gate without waiting") + else: + print(f"Truck {truck_id} enters the gate and queued for {wait_time} hrs") + truck_waiting_time.append(wait_time) + + yield env.timeout(TRUCK_INGATE_TIME + random.uniform(0, TRUCK_INGATE_TIME_DEV)) + + # Case 1: Normal handling when OC >= IC (all trucks have containers) + if OUTBOUND_CONTAINER_NUMBER >= INBOUND_CONTAINER_NUMBER: + env.process(handle_container(env, truck_id)) + + # Case 2: OC < IC, extra empty trucks are needed + else: + if truck_id <= OUTBOUND_CONTAINER_NUMBER: + env.process(handle_container(env, truck_id)) # Loaded trucks + else: + env.process(empty_truck(env, truck_id)) # Empty trucks + + +def handle_container(env, truck_id): + global outbound_container_id_counter, last_leave_time + + container_id = outbound_container_id_counter + 1 + outbound_container_id_counter += 1 + record_event(container_id, 'truck_arrival', env.now) + + d_g_dist = create_triang_distribution(d_t_min, d_t_avg, d_t_max).rvs() + yield env.timeout(d_g_dist / (2 * TRUCK_SPEED_LIMIT)) + + record_event(container_id, 'truck_drop_off', env.now) + print(f"{env.now}: Truck {truck_id} drops outbound container {container_id}.") + last_leave_time = env.now + + +def empty_truck(env, truck_id): + global inbound_container_id_counter, last_leave_time + + container_id = inbound_container_id_counter + OUTBOUND_CONTAINER_NUMBER + 1 + record_event(container_id, 'truck_arrival', env.now) + + d_g_dist = create_triang_distribution(d_t_min, d_t_avg, d_t_max).rvs() + yield env.timeout(d_g_dist / (2 * TRUCK_SPEED_LIMIT)) + + record_event(container_id, 'truck_drop_off', env.now) + print(f"{env.now}: Empty truck {truck_id} arrives.") + last_leave_time = env.now + + +def train_arrival(env, train_timetable, train_processing, cranes, hostlers, chassis, in_gate_resource, outbound_containers_store, truck_store, out_gate_resource): +# def train_arrival(env, train_processing, cranes, in_gate_resource, outbound_containers_store, truck_store, train_timetable): + global train_id_counter, TRUCK_NUMBERS, INBOUND_CONTAINER_NUMBER, OUTBOUND_CONTAINER_NUMBER, TRAIN_DEPARTURE_HR + + + for i, train in enumerate(train_timetable): + TRAIN_ARRIVAL_HR = train['arrival_time'] # 获取列车到达时间 + TRAIN_DEPARTURE_HR = train['departure_time'] # 获取列车离开时间 + INBOUND_CONTAINER_NUMBER = train['full_cars'] + OUTBOUND_CONTAINER_NUMBER = train['oc_number'] + TRUCK_NUMBERS = train['truck_number'] + TRAIN_ID = train['train_id'] + + print(f"---------- Next Train {TRAIN_ID} Is On the Way ----------") + print(f"IC {INBOUND_CONTAINER_NUMBER}") + print(f"OC {OUTBOUND_CONTAINER_NUMBER}") + + # 如果不是第一列火车,在上一列火车离开后安排卡车的到达 + previous_train_departure = train_timetable[i-1]['departure_time'] + print(f"Schedule {TRUCK_NUMBERS} Trucks arriving between previous train departure at {previous_train_departure} and current train arrival at {TRAIN_ARRIVAL_HR}") + env.process(handle_truck_arrivals(env, in_gate_resource, outbound_containers_store)) + + # 等待当前火车到达的时间 + yield env.timeout(TRAIN_ARRIVAL_HR - env.now) + + train_id = train_id_counter + print(f"Train {train_id} arrives at {env.now}") + + with train_processing.request() as request: + yield request + oc_chassis_filled_event = env.event() + # yield env.process(process_train(env, train_id, cranes, TRAIN_DEPARTURE_HR)) + yield env.process(process_train(env, train_id, cranes, hostlers, chassis, in_gate_resource, outbound_containers_store, truck_store, train_processing, oc_chassis_filled_event, out_gate_resource)) + train_id_counter += 1 + + +def process_train(env, train_id, cranes, hostlers, chassis, in_gate_resource, outbound_containers_store, truck_store, train_processing, oc_chassis_filled_event, out_gate_resource): + global time_per_train, train_series, INBOUND_CONTAINER_NUMBER, OUTBOUND_CONTAINER_NUMBER + + start_time = env.now + + # Cranes unload all IC + unload_processes = [] + chassis_inbound_ids = [] # To save chassis_id, current_inbound_id to hostler_transfer_IC_single_loop + for _ in range(INBOUND_CONTAINER_NUMBER): + unload_process = env.process(crane_and_chassis(env, train_id, 'unload', cranes, hostlers, chassis, truck_store, train_processing, outbound_containers_store, in_gate_resource, out_gate_resource, oc_chassis_filled_event)) + unload_processes.append(unload_process) + # All IC are processed + results = yield simpy.events.AllOf(env, unload_processes) + + # To pass chassis_id, current_inbound_id to hostler_transfer_IC_single_loop as a list from calling chassis_inbound_ids + for result in results.values(): + chassis_id, current_inbound_id = result + chassis_inbound_ids.append((chassis_id, current_inbound_id)) + + # Are all chassis filled with OC? + # Once all OC are dropped by hostlers, crane start working + print("Check before cranes start: Chassis filled with OC (-1) ? ") + print(f"Chassis status after OC processed is: {chassis_status}, where ") + print(f"there are {chassis_status.count(0)} chassis is filled with OC (0)") + print(f"there are {chassis_status.count(-1)} chassis is filled with empty (-1)") + print(f"there are {chassis_status.count(1)} chassis is filled with IC (1)") + + if chassis_status.count(1) != 0: # IC is not fully processed + print("Haven't finished all IC yet") + # env.process(hostler_transfer_IC_single_loop(env, hostlers, 'inbound', chassis, chassis_id, current_inbound_id, truck_store, oc_chassis_filled_event, out_gate_resource)) + env.process(hostler_transfer_IC_single_loop(env, hostlers, 'inbound', chassis, chassis_id, current_inbound_id, truck_store, cranes, train_processing, + outbound_containers_store, in_gate_resource, oc_chassis_filled_event, out_gate_resource)) + + yield oc_chassis_filled_event + + # Cranes move all OC to chassis + load_processes = [] + for chassis_id in range(1, OUTBOUND_CONTAINER_NUMBER + 1): + load_process = env.process(crane_and_chassis(env, train_id, 'load', cranes, hostlers, chassis, truck_store, train_processing, outbound_containers_store, in_gate_resource, oc_chassis_filled_event, out_gate_resource, chassis_id=chassis_id)) + load_processes.append(load_process) + yield simpy.events.AllOf(env, load_processes) + + # Check if all outbound containers are loaded (all chassis is empty 0), the train departs + if chassis_status.count(-1) == TRAIN_UNITS: + oc_chassis_filled_event.succeed() + print(f"Train {TRAIN_ID} is ready to depart.") + env.process(train_departure(env, train_id)) + time_per_train.append(env.now - start_time) + + end_time = env.now + time_per_train.append(end_time - start_time) + train_series += 1 + + +def crane_and_chassis(env, train_id, action, cranes, hostlers, chassis, truck_store, train_processing, outbound_containers_store, in_gate_resource, out_gate_resource, oc_chassis_filled_event, chassis_id=None): + global crane_id_counter, chassis_status, inbound_container_id_counter, outbound_containers_mapping, outbound_container_id_counter, INBOUND_CONTAINER_NUMBER, OUTBOUND_CONTAINER_NUMBER + + with cranes.request() as request: + yield request + + start_time = env.now + record_vehicle_event('crane', crane_id_counter, 'start', start_time) # performance record: starting + + if action == 'unload': + crane_id = crane_id_counter + crane_id_counter = (crane_id_counter % CRANE_NUMBER) + 1 + + chassis_id = ((inbound_container_id_counter - 1) % CHASSIS_NUMBER) + 1 + + current_inbound_id = inbound_container_id_counter + inbound_container_id_counter += 1 + yield env.timeout(CRANE_UNLOAD_CONTAINER_TIME_MEAN + random.uniform(0, CRANE_MOVE_DEV_TIME)) + + end_time = env.now + record_vehicle_event('crane', crane_id_counter, 'end', end_time) # performance record: ending + + # print(f"length of chassis status: {len(chassis_status)}") + chassis_status[chassis_id - 1] = 1 + record_event(current_inbound_id, 'crane_unload', env.now) + print(f"Crane {crane_id} unloads inbound container {current_inbound_id} at chassis {chassis_id} from train {train_id} at {env.now}") + env.process(hostler_transfer(env, hostlers, 'inbound', chassis, chassis_id, current_inbound_id, truck_store, cranes, train_processing, outbound_containers_store, in_gate_resource, oc_chassis_filled_event, out_gate_resource)) + + return chassis_id, current_inbound_id + + elif action == 'load': + if chassis_id not in outbound_containers_mapping: + print(f"Error: No outbound container mapped to chassis {chassis_id} at {env.now}") + return + + container_id = outbound_containers_mapping[chassis_id] # Retrieve container ID from mapping + + if CRANE_NUMBER == 1: + crane_id = 1 + else: + crane_id = (chassis_id % CRANE_NUMBER) + 1 + + yield env.timeout(CRANE_LOAD_CONTAINER_TIME_MEAN + random.uniform(0, CRANE_MOVE_DEV_TIME)) + chassis_status[chassis_id - 1] = -1 + record_event(container_id, 'crane_load', env.now) + print(f"Crane {crane_id} loads outbound container {container_id} from chassis {chassis_id} to train {TRAIN_ID} at {env.now}") + + +def hostler_transfer(env, hostlers, container_type, chassis, chassis_id, container_id, truck_store, cranes, train_processing, outbound_containers_store, in_gate_resource, oc_chassis_filled_event, out_gate_resource): + global hostler_id_counter + + with hostlers.request() as request: + yield request + + start_time = env.now + record_vehicle_event('hostler', hostler_id_counter, 'start', start_time) # performance record + + hostler_id = hostler_id_counter + hostler_id_counter = (hostler_id_counter % HOSTLER_NUMBER) + 1 + + with chassis.request() as chassis_request: + yield chassis_request + + if container_type == 'inbound' and chassis_status[chassis_id - 1] == 1: + d_t_dist = create_triang_distribution(d_t_min, d_t_avg, d_t_max).rvs() + d_y_dist = create_triang_distribution(d_y_min, d_y_avg, d_y_max).rvs() + HOSTLER_TRANSPORT_CONTAINER_TIME = (d_t_dist + d_y_dist) / (2 * HOSTLER_SPEED_LIMIT) + yield env.timeout(HOSTLER_TRANSPORT_CONTAINER_TIME) + chassis_status[chassis_id - 1] = -1 + record_event(container_id, 'hostler_pickup', env.now) + print(f"Hostler {hostler_id} picks up inbound container {container_id} from chassis {chassis_id} to parking area at {env.now}") + yield env.timeout(HOSTLER_TRANSPORT_CONTAINER_TIME) + record_event(container_id, 'hostler_dropoff', env.now) + print(f"Hostler {hostler_id} drops off inbound container {container_id} from chassis {chassis_id} to parking area at {env.now}") + + end_time = env.now + record_vehicle_event('hostler', hostler_id_counter, 'end', end_time) # performance record + + yield env.process(notify_truck(env, truck_store, container_id, out_gate_resource)) + + # Check and process outbound container + yield env.process(handle_outbound_container(env, hostlers, chassis, chassis_id, truck_store, cranes, + train_processing, outbound_containers_store, + in_gate_resource, oc_chassis_filled_event)) + + # When all chassis are either filled with outbound container or empty, the cranes start loading + if chassis_status.count(0) == OUTBOUND_CONTAINER_NUMBER and chassis_status.count( + -1) == TRAIN_UNITS - OUTBOUND_CONTAINER_NUMBER and not oc_chassis_filled_event.triggered: + print(f"Chassis is fully filled with OC, and cranes start moving: {chassis_status}") + print(f"where there are {chassis_status.count(0)} chassis is filled with OC (0)") + print(f"where there are {chassis_status.count(-1)} chassis is filled with empty (-1)") + print(f"where there are {chassis_status.count(1)} chassis is filled with IC (1)") + oc_chassis_filled_event.succeed() + else: + print(f"Chassis is not fully filled: {chassis_status}") + print(f"where there are {chassis_status.count(0)} chassis is filled with OC (0)") + print(f"where there are {chassis_status.count(-1)} chassis is filled with empty (-1)") + print(f"where there are {chassis_status.count(1)} chassis is filled with IC (1)") + return + + +# When OC are fully processed, but IC are not +def hostler_transfer_IC_single_loop(env, hostlers, container_type, chassis, chassis_id, container_id, truck_store, cranes, train_processing, outbound_containers_store, in_gate_resource, oc_chassis_filled_event, out_gate_resource): + print(f"Starting hostler_transfer_IC_single_loop for chassis {chassis_id} at {env.now}") + global hostler_id_counter + + print(f"Requesting hostler for chassis {chassis_id} at {env.now}") + + with hostlers.request() as request: + print(f"Request available hostlers: {hostlers.count} vs total hostlers {HOSTLER_NUMBER}, Hostlers capacity: {hostlers.capacity} at {env.now}") + yield request + + start_time = env.now + record_vehicle_event('hostler', hostler_id_counter, 'start', start_time) # performance record + + hostler_id = hostler_id_counter + hostler_id_counter = (hostler_id_counter % HOSTLER_NUMBER) + 1 + + with chassis.request() as chassis_request: + yield chassis_request + + if container_type == 'inbound' and chassis_status[chassis_id - 1] == 1: + d_t_dist = create_triang_distribution(d_t_min, d_t_avg, d_t_max).rvs() + d_y_dist = create_triang_distribution(d_y_min, d_y_avg, d_y_max).rvs() + HOSTLER_TRANSPORT_CONTAINER_TIME = (d_t_dist + d_y_dist) / (2 * HOSTLER_SPEED_LIMIT) + + yield env.timeout(HOSTLER_TRANSPORT_CONTAINER_TIME) + # hostler picks up the rest of IC from the chassis + chassis_status[chassis_id - 1] = -1 + record_event(container_id, 'hostler_pickup', env.now) + print(f"Hostler {hostler_id} picks up inbound container {container_id} from chassis {chassis_id} to parking area at {env.now}") + # hostler drops off the IC + yield env.timeout(HOSTLER_TRANSPORT_CONTAINER_TIME) + record_event(container_id, 'hostler_dropoff', env.now) + print(f"Hostler {hostler_id} drops off inbound container {container_id} from chassis {chassis_id} to parking area at {env.now}") + + # Check if all chassis filled + if chassis_status.count(0) == OUTBOUND_CONTAINER_NUMBER and chassis_status.count( + -1) == TRAIN_UNITS - OUTBOUND_CONTAINER_NUMBER and not oc_chassis_filled_event.triggered: + print(f"Chassis is fully filled with OC, and cranes start moving: {chassis_status}") + oc_chassis_filled_event.succeed() + return + else: + print(f"Chassis is not fully filled: {chassis_status}") + print(f"where there are {chassis_status.count(0)} chassis filled with OC (0)") + print(f"where there are {chassis_status.count(-1)} chassis filled with empty (-1)") + print(f"where there are {chassis_status.count(1)} chassis filled with IC (1)") + + end_time = env.now + record_vehicle_event('hostler', hostler_id, 'end', end_time) # performance record + + # trucks pick up IC + yield env.process(notify_truck(env, truck_store, container_id, out_gate_resource)) + + +def handle_outbound_container(env, hostlers, chassis, chassis_id, truck_store, cranes, train_processing, outbound_containers_store, in_gate_resource, oc_chassis_filled_event): + global HOSTLER_FIND_CONTAINER_TIME, HOSTLER_TRANSPORT_CONTAINER_TIME, chassis_status, hostler_id_counter, outbound_container_id_counter, outbound_containers_mapping + + hostler_id = hostler_id_counter + hostler_id_counter = (hostler_id_counter % HOSTLER_NUMBER) + 1 + + outbound_container_id = yield outbound_containers_store.get() + + if chassis_id not in outbound_containers_mapping: # New mapping from outbound containers to chassis + outbound_container_id = outbound_container_id + outbound_containers_mapping[chassis_id] = outbound_container_id + chassis_status[chassis_id - 1] = 0 + print(f"New mapping created: outbound container {outbound_container_id} to chassis {chassis_id} at {env.now}") + + outbound_container_id = outbound_containers_mapping[chassis_id] + d_find_dist = create_triang_distribution(0, 0.5*(A+B), (A+B)).rvs() + HOSTLER_FIND_CONTAINER_TIME = d_find_dist / (2 * TRUCK_SPEED_LIMIT) + yield env.timeout(HOSTLER_FIND_CONTAINER_TIME) + record_event(outbound_container_id, 'hostler_pickup', env.now) + print(f"Hostler {hostler_id} brings back outbound container {outbound_container_id} from parking area to chassis {chassis_id} at {env.now}") + yield env.timeout(HOSTLER_TRANSPORT_CONTAINER_TIME) + record_event(outbound_container_id, 'hostler_dropoff', env.now) + print(f"Hostler {hostler_id} drops off outbound container {outbound_container_id} from parking area to chassis {chassis_id} at {env.now}") + + +def notify_truck(env, truck_store, container_id, out_gate_resource): + + truck_id = yield truck_store.get() + yield env.timeout(TRUCK_INGATE_TIME) + print(f"Truck {truck_id} arrives at parking area at {env.now}") + yield env.process(truck_transfer(env, truck_id, container_id, out_gate_resource)) + + +def truck_transfer(env, truck_id, container_id, out_gate_resource): + global TRUCK_INGATE_TIME, TRUCK_TRANSPORT_CONTAINER_TIME, outbound_container_id_counter + + start_time = env.now + record_vehicle_event('truck', truck_id, 'start', start_time) # performance record + + # Truck moves to the parking area + yield env.timeout(TRUCK_TO_PARKING) + record_event(container_id, 'truck_pickup', env.now) + print(f"Truck {truck_id} picks up inbound container {container_id} at {env.now}") + + # Calculate the transport time for the truck + d_g_dist = create_triang_distribution(d_t_min, d_t_avg, d_t_max).rvs() + TRUCK_TRANSPORT_CONTAINER_TIME = d_g_dist / (2 * TRUCK_SPEED_LIMIT) + yield env.timeout(TRUCK_TRANSPORT_CONTAINER_TIME) + + # Request out_gate_resource resource before the truck exits + with out_gate_resource.request() as request: + yield request + + # Simulate the time it takes for the truck to pass through the gate + yield env.timeout(TRUCK_OUTGATE_TIME + random.uniform(0,TRUCK_OUTGATE_TIME_DEV)) + record_event(container_id, 'truck_exit', env.now) + print(f"Truck {truck_id} exits gate with inbound container {container_id} at {env.now}") + + # End performance recording + end_time = env.now + record_vehicle_event('truck', truck_id, 'end', end_time) + + +def train_departure(env, train_id): + if env.now < TRAIN_DEPARTURE_HR: + yield env.timeout(TRAIN_DEPARTURE_HR - env.now) + yield env.timeout(TRAIN_INSPECTION_TIME) + print(f"Train {train_id} departs at {env.now}") + + +def run_simulation(): + global all_trucks_ready_event + + print(f"Starting simulation with No.{TRAIN_ID} trains, {HOSTLER_NUMBER} hostlers, {CRANE_NUMBER} cranes, and {TRUCK_NUMBERS} trucks.") + env = simpy.Environment() + + # Resources + train_processing = simpy.Resource(env, capacity=1) + cranes = simpy.Resource(env, capacity=CRANE_NUMBER) + chassis = simpy.Resource(env, capacity=CHASSIS_NUMBER) + hostlers = simpy.Resource(env, capacity=HOSTLER_NUMBER) + in_gate_resource = simpy.Resource(env, capacity=IN_GATE_NUMBERS) + out_gate_resource = simpy.Resource(env, capacity=OUT_GATE_NUMBERS) + outbound_containers_store = simpy.Store(env, capacity=OUTBOUND_CONTAINER_NUMBER) + truck_store = simpy.Store(env, capacity=TRUCK_NUMBERS) + + # Initialize trucks + for truck_id in range(1, TRUCK_NUMBERS + 1): + truck_store.put(truck_id) + + all_trucks_ready_event = env.event() + + train_timetable = [ + {"train_id": 19, "arrival_time": 187, "departure_time": 200, "empty_cars": 3, "full_cars":7, "oc_number": 2, "truck_number":7 }, + {"train_id": 25, "arrival_time": 250, "departure_time": 350, "empty_cars": 4, "full_cars":6, "oc_number": 2, "truck_number":6 }, + {"train_id": 49, "arrival_time": 400, "departure_time": 600, "empty_cars": 5, "full_cars":5, "oc_number": 2, "truck_number":5 }, + {"train_id": 60, "arrival_time": 650, "departure_time": 750, "empty_cars": 6, "full_cars":4, "oc_number": 2, "truck_number":4 }, + {"train_id": 12, "arrival_time": 800, "departure_time": 1000, "empty_cars": 7, "full_cars":3, "oc_number": 4, "truck_number":4 }, + ] + + # # REAL TEST + # train_timetable = train_timetable(terminal) + + # env.process(train_arrival(env, train_processing, cranes, in_gate_resource, outbound_containers_store, truck_store, train_timetable)) + env.process(train_arrival(env, train_timetable, train_processing, cranes, hostlers, chassis, in_gate_resource, + outbound_containers_store, truck_store, out_gate_resource)) + + env.run(until=state.sim_time) + + print(f"Average train processing time: {sum(time_per_train) / len(time_per_train) if time_per_train else 0:.2f}") + print("Simulation completed. ") + +if __name__ == "__main__": + run_simulation() \ No newline at end of file diff --git a/python/altrios/lifts/parameters.py b/python/altrios/lifts/parameters.py new file mode 100644 index 00000000..89f40f1c --- /dev/null +++ b/python/altrios/lifts/parameters.py @@ -0,0 +1,123 @@ +import polars +import simpy +from dataclasses import dataclass, field +from altrios.lifts.schedule import * + +def train_arrival_parameters(train_consist_plan, terminal, train_id_counter): + timetable = build_train_timetable(train_consist_plan, terminal, swap_arrive_depart = False, as_dicts = False) + TRAIN_TIMETABLE = timetable.iloc[train_id_counter-1] + + return TRAIN_TIMETABLE + +@dataclass +class LiftsState: + # Simulation parameters + random_seed: int = 42 + sim_time: int = 1100 + terminal: str = 'Allouez' # choose 'Hibbing' or 'Allouez' + # Counting vehicles + train_id_counter: int = 1 + crane_id_counter: int = 1 + hostler_id_counter: int = 1 + truck_id_counter: int = 1 + total_initial_oc_trucks: int = 1 + empty_truck_id_counter: int = 1 + # inbound container counting + inbound_containers_processed: int = 0 # trucks drop off OC to chassis + inbound_containers_hostler_processed: int = 0 # hostlers pick up IC from chassis + inbound_container_id_counter: int = 1 + # outbound container counting + outbound_container_id_counter: int = 10001 + outbound_containers_processed: int = 0 # trucks pick up IC from chassis + outbound_containers_hostler_processed: int = 0 # hostlers drop off OC to chassis + outbound_container_id: int = 0 # initialize OC id for chassis assignment + record_oc_label: int = 10001 # update outbound_containers_mapping among trains + oc_variance: int = 0 # record previous batch OC numbers cumulatively + # yield events or conditions + all_trucks_ready_event: simpy.events.Event = None # initialize trucks + train_has_arrived_event: simpy.events.Event = None # crane starts working after the train arrives + train_departure_event: simpy.events.Event = None # train arrives after the last train departs + oc_chassis_filled_event: simpy.events.Event = None # outbound containers fill available chassis before cranes load + # Trains + # TRAIN_UNITS = int(input("Enter the number of train units: ")) + # TRAIN_ARRIVAL_MEAN = 10 + TRAIN_INSPECTION_TIME: float = 10/60 # hr + previous_train_departure: float = 0 + train_series: int = 0 + time_per_train: list[float] = field(default_factory = lambda: []) + train_delay_time: list[float] = field(default_factory = lambda: []) + # Containers + CONTAINERS_PER_CAR: int = 1 + CONTAINER_LEN: float = 20 # 1 TEU = 20 ft long, 8 ft wide, and 8.6 ft tall + CONTAINER_WID: float = 8 + CONTAINER_TAL: float = 8.6 + container_events: dict = field(default_factory = lambda: {}) # Dictionary to store container event data + # Cranes + # CRANE_NUMBER = int(input("Enter the number of crane: ")) + CRANE_NUMBER: int = 1 + CONTAINERS_PER_CRANE_MOVE_MEAN: float = 600 # 10ft/min = 600 ft/hr, crane speed + CRANE_MOVE_DEV_TIME: float = 5/60 # hr + outbound_containers_mapping: dict = field(default_factory = lambda: {}) # To keep track of outbound containers ID mapped to chassis + # Hostlers + # HOSTLER_NUMBER = int(input("Enter the number of hostler: ")) + HOSTLER_NUMBER: int = 1 + CONTAINERS_PER_HOSTLER: int = 1 # hostler capacity + HOSTLER_SPEED_LIMIT: float = 20*5280 # ft/hr + HOSTLER_TRANSPORT_CONTAINER_TIME: float = 0 # hr, triangular distribution + HOSTLER_FIND_CONTAINER_TIME: float = 0 # hr, triangular distribution + # Trucks + TRUCK_ARRIVAL_MEAN: float = 40/60 # hr, calculate by + TRUCK_INGATE_TIME: float = 1/60 # hr + TRUCK_OUTGATE_TIME: float = 2/60 # hr + TRUCK_INGATE_TIME_DEV: float = 1/60 # hr + TRUCK_OUTGATE_TIME_DEV: float = 1/60 # hr + TRUCK_TO_PARKING: float = 2/60 # hr + TRUCK_SPEED_LIMIT: float = 20*5280 # ft/hr + TRUCK_TRANSPORT_CONTAINER_TIME: float = 0 # hr, triangular distribution + # Gate settings + IN_GATE_NUMBERS: int = 6 # test queuing module with 1; normal operations with 6 + OUT_GATE_NUMBERS: int = 6 + last_leave_time: float = 0 + truck_arrival_time: list[float] = field(default_factory = lambda: []) + truck_waiting_time: list[float] = field(default_factory = lambda: []) + train_consist_plan: pl.DataFrame = field(default_factory = lambda: pl.DataFrame()) + + def initialize_from_consist_plan(self, train_consist_plan): + self.train_consist_plan = train_consist_plan + self.TRAIN_TIMETABLE = train_arrival_parameters(self.train_consist_plan, self.terminal, self.train_id_counter) + self.TRAIN_ID = int(self.TRAIN_TIMETABLE['train_id']) + self.TRAIN_ID_FIXED = 0 + self.CARS_LOADED_ARRIVAL = int(float(self.TRAIN_TIMETABLE['full_cars'])) + self.CARS_EMPTY_ARRIVAL = int(float(self.TRAIN_TIMETABLE['empty_cars'])) + self.TRAIN_ARRIVAL_HR = self.TRAIN_TIMETABLE['arrival_time'] + self.TRAIN_DEPARTURE_HR = self.TRAIN_TIMETABLE['departure_time'] + self.TRAIN_UNITS = self.CARS_LOADED_ARRIVAL + self.CARS_EMPTY_ARRIVAL + self.TRAIN_SPOTS = self.TRAIN_UNITS + + # Containers + self.INBOUND_CONTAINER_NUMBER = self.CARS_LOADED_ARRIVAL + #df = outbound_containers() + # TODO: confirm expected source of Outbound_Num; expected input file not available + self.OUTBOUND_CONTAINER_NUMBER = self.INBOUND_CONTAINER_NUMBER#df.loc[df['Train_ID'] == TRAIN_ID, 'Outbound_Num'].values[0] + + # Chassis + self.CHASSIS_NUMBER = self.TRAIN_UNITS + self.chassis_status = [-1] * self.CHASSIS_NUMBER # -1 means empty, 1 means inbound container, 0 means outbound container + + # Trucks + self.TRUCK_NUMBERS = max(self.INBOUND_CONTAINER_NUMBER, self.OUTBOUND_CONTAINER_NUMBER) + self.IN_OUT_GAP = abs(self.INBOUND_CONTAINER_NUMBER - self.OUTBOUND_CONTAINER_NUMBER) + + def initialize(self): + self.CRANE_LOAD_CONTAINER_TIME_MEAN = (self.CONTAINERS_PER_CAR*(2*self.CONTAINER_TAL+self.CONTAINER_WID))/self.CONTAINERS_PER_CRANE_MOVE_MEAN # hr + self.CRANE_UNLOAD_CONTAINER_TIME_MEAN = (self.CONTAINERS_PER_CAR*(2*self.CONTAINER_TAL+self.CONTAINER_WID))/self.CONTAINERS_PER_CRANE_MOVE_MEAN # hr + self.hostler_status = [-1] * self.HOSTLER_NUMBER # 1 means trackside, 0 means parking side, -1 means hostler resources side + # Trains + if self.train_consist_plan.height > 0: + self.initialize_from_consist_plan() + + def __post_init__(self): + self.initialize() + +state = LiftsState() + diff --git a/python/altrios/lifts/schedule.py b/python/altrios/lifts/schedule.py new file mode 100644 index 00000000..4e796612 --- /dev/null +++ b/python/altrios/lifts/schedule.py @@ -0,0 +1,65 @@ +import pandas as pd +import polars as pl +import altrios.lifts as lifts + +def build_train_timetable(train_consist_plan, terminal, swap_arrive_depart, as_dicts): + df = (train_consist_plan + .filter( + pl.col("Destination_ID") == pl.lit(terminal), + pl.col("Train_Type").str.starts_with(pl.lit("Intermodal")) + ) + .rename({ + "Train_ID": "train_id", + "Departure_Time_Actual_Hr": "departure_time", + "Arrival_Time_Actual_Hr": "arrival_time", + "Cars_Empty": "empty_cars", + "Cars_Loaded": "full_cars" + }) + ) + + if swap_arrive_depart: + df = df.rename({"departure_time": "arrival_time", "arrival_time": "departure_time"}) + + df = (df + .group_by("train_id") + .agg(pl.col("full_cars", "empty_cars", "arrival_time", "departure_time").first()) + .sort("arrival_time", descending=False) + ) + + if as_dicts: + return (df + .with_columns( + pl.lit(lifts.dictionary.calculate_oc_number()).alias("oc_number"), + ) + .pipe(lifts.dictionary.calculate_truck_number) + .to_dicts() + ) + else: + return df.to_pandas() + + +def next_train_timetable(train_id, terminal): + df_terminal = build_train_timetable(terminal, swap_arrive_depart = False, as_dicts = False) + df_next_train = df_terminal.iloc[train_id] + return df_next_train + + +def outbound_containers(): + df = pd.read_csv('C:/Users/Irena Tong/PycharmProjects/simulation_test/data/outbound_plan.csv') + return df + + +def get_next_train_outbound_data(index): + outbound_df = outbound_containers() + outbound_num = outbound_df.iloc[index]['Outbound_Num'] + return outbound_num + +# # Test codes +# terminal = 'Allouez' +# print(train_timetable(terminal)) +# +# next_train = next_train_timetable(1, terminal) +# print(next_train) +# +# next_outbound_num = get_next_train_outbound_data(1) +# print(next_outbound_num) \ No newline at end of file diff --git a/python/altrios/lifts/test.py b/python/altrios/lifts/test.py new file mode 100644 index 00000000..33b09e24 --- /dev/null +++ b/python/altrios/lifts/test.py @@ -0,0 +1,603 @@ +import simpy +import random +from lifts.parameters import * +from lifts.distances import * +#from dictionary import * +from lifts.schedule import * +from lifts.vehicle_performance import record_vehicle_event, save_average_times, save_vehicle_logs + + +# Test input +CRANE_NUMBER = 1 +HOSTLER_NUMBER = 1 +TRUCK_NUMBERS = 1000 + +def record_event(container_id, event_type, timestamp): + if container_id not in container_events: + container_events[container_id] = {} + container_events[container_id][event_type] = timestamp + + +def handle_truck_arrivals(env, in_gate_resource, truck_numbers): + global all_trucks_ready_event, truck_processed, start_oc_container_id, end_oc_container_id, TRUCK_ARRIVAL_MEAN, TRAIN_ARRIVAL_HR + + truck_id = 1 + truck_processed = 0 + TRUCK_ARRIVAL_MEAN = abs(TRAIN_ARRIVAL_HR - previous_train_departure) / max(INBOUND_CONTAINER_NUMBER, OUTBOUND_CONTAINER_NUMBER) + print(f"current time is {env.now}") + print(f"next TRAIN_ARRIVAL_HR:{TRAIN_ARRIVAL_HR}") + print(f"TRUCK_ARRIVAL_MEAN IS {TRUCK_ARRIVAL_MEAN}") + + while truck_id <= TRUCK_NUMBERS: + inter_arrival_time = random.expovariate(1 / TRUCK_ARRIVAL_MEAN) + yield env.timeout(inter_arrival_time) + truck_arrival_time.append(env.now) + + env.process(truck_through_gate(env, in_gate_resource, truck_id)) + truck_id += 1 + + if truck_id > TRUCK_NUMBERS: + # print(f"truck_id = {truck_id} vs TRUCK_NUM = {TRUCK_NUMBERS}") + if not all_trucks_ready_event.triggered: + all_trucks_ready_event.succeed() + # print(f"{env.now}: All trucks arrived for the {TRAIN_ID} train.") + + +def truck_through_gate(env, in_gate_resource, truck_id): + global last_leave_time, INBOUND_CONTAINER_NUMBER, OUTBOUND_CONTAINER_NUMBER + + with in_gate_resource.request() as request: + yield request + wait_time = max(0, truck_arrival_time[truck_id - 1] - last_leave_time) + if wait_time <= 0: + wait_time = 0 # first arriving trucks + # print(f"Truck {truck_id} enters the gate without waiting") + else: + # print(f"Truck {truck_id} enters the gate and queued for {wait_time} hrs") + truck_waiting_time.append(wait_time) + + yield env.timeout(TRUCK_INGATE_TIME + random.uniform(0, TRUCK_INGATE_TIME_DEV)) + + # Case 1: Normal handling when OC >= IC (all trucks have containers) + if OUTBOUND_CONTAINER_NUMBER >= INBOUND_CONTAINER_NUMBER: + env.process(handle_container(env, truck_id)) + + # Case 2: OC < IC, extra empty trucks are needed + else: + if truck_id <= OUTBOUND_CONTAINER_NUMBER: + env.process(handle_container(env, truck_id)) # Loaded trucks + else: + env.process(empty_truck(env, truck_id)) # Empty trucks + + +def handle_container(env, truck_id): + global outbound_container_id_counter, last_leave_time + + container_id = outbound_container_id_counter + outbound_container_id_counter += 1 + record_event(container_id, 'truck_arrival', env.now) + + d_t_dist = create_triang_distribution(d_t_min, d_t_avg, d_t_max).rvs() + yield env.timeout(d_t_dist / (2 * TRUCK_SPEED_LIMIT)) + + record_event(container_id, 'truck_drop_off', env.now) + # print(f"{env.now}: Truck {truck_id} drops outbound container {container_id}.") + last_leave_time = env.now + + +def empty_truck(env, truck_id): + global inbound_container_id_counter, last_leave_time + + d_t_dist = create_triang_distribution(d_t_min, d_t_avg, d_t_max).rvs() + yield env.timeout(d_t_dist / (2 * TRUCK_SPEED_LIMIT)) + + # print(f"{env.now}: Empty truck {truck_id} arrives.") + last_leave_time = env.now + + +def train_arrival(env, train_timetable, train_processing, cranes, hostlers, chassis, in_gate_resource, outbound_containers_store, truck_store, out_gate_resource): + global record_oc_label, train_id_counter, TRUCK_NUMBERS, INBOUND_CONTAINER_NUMBER, OUTBOUND_CONTAINER_NUMBER, TRAIN_DEPARTURE_HR, oc_chassis_filled_event, TRAIN_ID, inbound_container_id_counter, previous_train_departure + + for i, train in enumerate(train_timetable): + TRAIN_ARRIVAL_HR = train['arrival_time'] + TRAIN_DEPARTURE_HR = train['departure_time'] + INBOUND_CONTAINER_NUMBER = train['full_cars'] + OUTBOUND_CONTAINER_NUMBER = train['oc_number'] + TRUCK_NUMBERS = train['truck_number'] + TRAIN_ID = train['train_id'] + + print(f"---------- Next Train {TRAIN_ID} Is On the Way ----------") + print(f"IC {INBOUND_CONTAINER_NUMBER}") + print(f"OC {OUTBOUND_CONTAINER_NUMBER}") + + outbound_containers_store.items.clear() + for oc in range(record_oc_label, record_oc_label + OUTBOUND_CONTAINER_NUMBER): # from 10001 to 10001 + OC + # print("oc_number", oc) + outbound_containers_store.put(oc) + # yield outbound_containers_store.put(oc) + # print(f"Current store contents after putting {oc}: {outbound_containers_store.items}") + + # print("outbound_containers_store is:", outbound_containers_store.items) + + # Trucks enter until the precious train departs, if not the first truck + previous_train_departure = train_timetable[i-1]['departure_time'] if i > 0 else 0 + print(f"Schedule {TRUCK_NUMBERS} Trucks arriving between previous train departure at {previous_train_departure} and current train arrival at {TRAIN_ARRIVAL_HR}") + env.process(handle_truck_arrivals(env, in_gate_resource, outbound_containers_store)) + + # Trains arrive according to the timetable, fix negative delay bug + delay = TRAIN_ARRIVAL_HR - env.now + if delay <= 0: + yield env.timeout(0) + else: + yield env.timeout(delay) + + train_id = train_id_counter + print(f"Train {TRAIN_ID} ({train_id} in the dictionary) arrives at {env.now}") + + # for container_id in range(inbound_container_id_counter, inbound_container_id_counter + INBOUND_CONTAINER_NUMBER): + for container_id in range(int(inbound_container_id_counter), int(inbound_container_id_counter) + int(INBOUND_CONTAINER_NUMBER)): # fix float error + + record_event(container_id, 'train_arrival', env.now) + + with train_processing.request() as request: + yield request + oc_chassis_filled_event = env.event() + yield env.process(process_train(env, train_id, cranes, hostlers, chassis, in_gate_resource, outbound_containers_store, truck_store, train_processing, oc_chassis_filled_event, out_gate_resource)) + train_id_counter += 1 + + record_oc_label += OUTBOUND_CONTAINER_NUMBER + # print("record_oc_label", record_oc_label) + # print("oc_variance in train_process:", oc_variance) + + +def process_train(env, train_id, cranes, hostlers, chassis, in_gate_resource, outbound_containers_store, truck_store, train_processing, oc_chassis_filled_event, out_gate_resource): + global oc_variance, time_per_train, train_series, INBOUND_CONTAINER_NUMBER, OUTBOUND_CONTAINER_NUMBER, record_oc_label, TRAIN_ID_FIXED + + start_time = env.now + + # Cranes unload all IC + unload_processes = [] + chassis_inbound_ids = [] # To save chassis_id, current_inbound_id to hostler_transfer_IC_single_loop + + # if train_id < TRAIN_NUMBERS: + for chassis_id in range(1, int(INBOUND_CONTAINER_NUMBER) + 1): + unload_process = env.process(crane_and_chassis(env, train_id, 'unload', cranes, hostlers, chassis, truck_store, train_processing, outbound_containers_store, in_gate_resource, out_gate_resource, oc_chassis_filled_event)) + unload_processes.append(unload_process) + + # All IC are processed + # print("Unload process is:", unload_processes) + yield simpy.events.AllOf(env, unload_processes) + results = yield simpy.events.AllOf(env, unload_processes) + + for container_id in range(int(inbound_container_id_counter), int(inbound_container_id_counter) + int(INBOUND_CONTAINER_NUMBER)): # fix float error + env.process(crane_movement(env, container_id, 'unload')) + + + # To pass chassis_id, current_inbound_id to hostler_transfer_IC_single_loop as a list from calling chassis_inbound_ids + for result in results.values(): + chassis_id, current_inbound_id = result + chassis_inbound_ids.append((chassis_id, current_inbound_id)) + env.process(hostler_transfer(env, hostlers, 'inbound', chassis, chassis_id, current_inbound_id, truck_store, cranes, + train_processing, outbound_containers_store, in_gate_resource, oc_chassis_filled_event, + out_gate_resource)) + + # # Once all OC are dropped by hostlers, crane start working + # print("Chassis are filled with OC (-1) now. ") + # print(f"Chassis status after OC processed is: {chassis_status}, where ") + # print(f"there are {chassis_status.count(0)} chassis is filled with OC (0)") + # print(f"there are {chassis_status.count(-1)} chassis is filled with empty (-1)") + # print(f"there are {chassis_status.count(1)} chassis is filled with IC (1)") + + # Cranes move all OC to chassis + load_processes = [] + for chassis_id in range(1, OUTBOUND_CONTAINER_NUMBER + 1): + load_process = env.process(crane_and_chassis(env, train_id, 'load', cranes, hostlers, chassis, truck_store, train_processing, outbound_containers_store, in_gate_resource, oc_chassis_filled_event, out_gate_resource, chassis_id=chassis_id)) + load_processes.append(load_process) + + yield simpy.events.AllOf(env, load_processes) + + for container_id in range(int(inbound_container_id_counter), int(inbound_container_id_counter) + int(INBOUND_CONTAINER_NUMBER)): # fix float error + env.process(crane_movement(env, container_id, 'load')) + + # Check if all outbound containers are loaded (all chassis is empty 0), the train departs + if chassis_status.count(-1) == TRAIN_UNITS: + # oc_chassis_filled_event.succeed() + TRAIN_ID_FIXED = TRAIN_ID + print(f"Train {TRAIN_ID_FIXED} is ready to depart at {env.now}.") + env.process(train_departure(env, train_id)) + time_per_train.append(env.now - start_time) + + end_time = env.now + time_per_train.append(end_time - start_time) + train_series += 1 + oc_variance += OUTBOUND_CONTAINER_NUMBER + +def crane_movement(env, container_id, action): + global record_oc_label, crane_id_counter, chassis_status, inbound_container_id_counter, outbound_containers_mapping, outbound_container_id_counter, INBOUND_CONTAINER_NUMBER, OUTBOUND_CONTAINER_NUMBER + + # # Print before requesting crane resource + if action == 'unload': + crane_id = crane_id_counter + crane_id_counter = (crane_id_counter % CRANE_NUMBER) + 1 + yield env.timeout(CRANE_UNLOAD_CONTAINER_TIME_MEAN + random.uniform(0, CRANE_MOVE_DEV_TIME)) + record_event(container_id, 'crane_unload', env.now) + print(f"Crane unloads outbound container {container_id} to train {TRAIN_ID} at {env.now}") + + if action == 'load': + for container_id in range(record_oc_label, record_oc_label + OUTBOUND_CONTAINER_NUMBER): + yield env.timeout(CRANE_LOAD_CONTAINER_TIME_MEAN + random.uniform(0, CRANE_MOVE_DEV_TIME)) + # chassis_status[chassis_id - 1] = -1 + print(f"Crane loads outbound container {container_id} to train {TRAIN_ID} at {env.now}") + record_event(container_id, 'crane_load', env.now) + + +def crane_and_chassis(env, train_id, action, cranes, hostlers, chassis, truck_store, train_processing, outbound_containers_store, in_gate_resource, out_gate_resource, oc_chassis_filled_event, chassis_id=None): + global record_oc_label, crane_id_counter, chassis_status, inbound_container_id_counter, outbound_containers_mapping, outbound_container_id_counter, INBOUND_CONTAINER_NUMBER, OUTBOUND_CONTAINER_NUMBER + + with cranes.request() as request: + yield request + + # # Print after acquiring crane resource + # print(f"[{env.now}] Crane {crane_id_counter} acquired crane resource. Available cranes: {cranes.count}/{cranes.capacity}") + + start_time = env.now + record_vehicle_event('crane', crane_id_counter, 'start', start_time) # performance record: starting + + if action == 'unload': + # crane_id = crane_id_counter + # crane_id_counter = (crane_id_counter % CRANE_NUMBER) + 1 + + chassis_id = ((inbound_container_id_counter - 1) % CHASSIS_NUMBER) + 1 + + current_inbound_id = inbound_container_id_counter + inbound_container_id_counter += 1 + # yield env.timeout(CRANE_UNLOAD_CONTAINER_TIME_MEAN + random.uniform(0, CRANE_MOVE_DEV_TIME)) + + # for chassis_id in range(int(inbound_container_id_counter), int(inbound_container_id_counter) + int(INBOUND_CONTAINER_NUMBER)): + chassis_status[chassis_id - 1] = 1 + + end_time = env.now + record_vehicle_event('crane', crane_id_counter, 'end', end_time) # performance record: ending + + # hostler picks up IC + env.process(hostler_transfer(env, hostlers, 'inbound', chassis, chassis_id, current_inbound_id, truck_store, cranes, train_processing, outbound_containers_store, in_gate_resource, oc_chassis_filled_event, out_gate_resource)) + + return chassis_id, current_inbound_id + + elif action == 'load': + if chassis_id not in outbound_containers_mapping: + print(f"Notice: No outbound container mapped to chassis {chassis_id} at {env.now}") + return + + container_id = outbound_containers_mapping[chassis_id] # Retrieve container ID from mapping + # print("outbound_containers_mapping in crane and chassis func:", outbound_containers_mapping) + # print("container_id in crane and chassis func:", container_id) + + if CRANE_NUMBER == 1: + crane_id = 1 + else: + crane_id = (chassis_id % CRANE_NUMBER) + 1 + + chassis_status[chassis_id - 1] = -1 + + # for container_id in range(record_oc_label, record_oc_label + OUTBOUND_CONTAINER_NUMBER): + # yield env.timeout(CRANE_LOAD_CONTAINER_TIME_MEAN + random.uniform(0, CRANE_MOVE_DEV_TIME)) + # chassis_status[chassis_id - 1] = -1 + # print(f"Crane {crane_id} loads outbound container {container_id} to train {TRAIN_ID} at {env.now}") + # record_event(container_id, 'crane_load', env.now) + + +def hostler_transfer(env, hostlers, container_type, chassis, chassis_id, container_id, truck_store, cranes, train_processing, outbound_containers_store, in_gate_resource, oc_chassis_filled_event, out_gate_resource): + global hostler_id_counter, chassis_status, outbound_containers_mapping, outbound_container_id, record_oc_label, HOSTLER_NUMBER + + with hostlers.request() as request: + yield request + + start_time = env.now + record_vehicle_event('hostler', hostler_id_counter, 'start', start_time) # performance record + + hostler_id = hostler_id_counter + hostler_id_counter = (hostler_id_counter % HOSTLER_NUMBER) + 1 + + with chassis.request() as chassis_request: + yield chassis_request + + if container_type == 'inbound' and chassis_status[chassis_id - 1] == 1: + d_h_dist = create_triang_distribution(d_h_min, d_h_avg, d_h_max).rvs() + HOSTLER_TRANSPORT_CONTAINER_TIME = d_h_dist / (2 * HOSTLER_SPEED_LIMIT) + print(f"Hostler pick-up time is:{HOSTLER_TRANSPORT_CONTAINER_TIME}") + yield env.timeout(HOSTLER_TRANSPORT_CONTAINER_TIME) + record_event(container_id, 'hostler_pickup', env.now) + print(f"Hostler {hostler_id} picks up inbound container {container_id} from chassis {chassis_id} and heads to parking area at {env.now}") + + chassis_status[chassis_id - 1] = -1 + + # Hostler drop off: different route for picking-up and dropping-off + d_h_dist = create_triang_distribution(d_h_min, d_h_avg, d_h_max).rvs() + HOSTLER_TRANSPORT_CONTAINER_TIME = d_h_dist / (2 * HOSTLER_SPEED_LIMIT) + print(f"Hostler drop-off time is:{HOSTLER_TRANSPORT_CONTAINER_TIME}") + yield env.timeout(HOSTLER_TRANSPORT_CONTAINER_TIME) + record_event(container_id, 'hostler_dropoff', env.now) + print(f"Hostler {hostler_id} drops off inbound container {container_id} from chassis {chassis_id} and moves toward the assigned outbound container at {env.now}") + + end_time = env.now + record_vehicle_event('hostler', hostler_id_counter, 'end', end_time) # performance record + + # Process functions of notify_truck and handle_outbound_container simultaneously + env.process(notify_truck(env, truck_store, container_id, out_gate_resource)) + + # Assign outbound container and chassis_id for the hostler which drops off an inbound container + chassis_id, outbound_container_id = yield env.process(outbound_container_decision_making( + env, hostlers, chassis, container_id, truck_store, cranes, train_processing, + outbound_containers_store, + in_gate_resource, oc_chassis_filled_event, out_gate_resource, chassis_status, + outbound_containers_mapping, + record_oc_label, outbound_container_id + )) + + # Process outbound containers + if chassis_id is not None and outbound_container_id is not None: + env.process(handle_outbound_container(env, hostler_id, chassis_id, outbound_container_id, truck_store, + cranes, train_processing, outbound_containers_store, in_gate_resource, + oc_chassis_filled_event)) + + +# When OC are fully processed, but IC are not +def hostler_transfer_IC_single_loop(env, hostlers, container_type, chassis, chassis_id, container_id, truck_store, cranes, train_processing, outbound_containers_store, in_gate_resource, oc_chassis_filled_event, out_gate_resource): + print(f"Starting single hostler transfer IC loop for chassis {chassis_id} at {env.now}") + global hostler_id_counter + + print(f"Requesting hostler for IC at chassis {chassis_id} at {env.now}") + + with hostlers.request() as request: + print(f"Request available hostlers: {hostlers.count} vs total hostlers {HOSTLER_NUMBER}, Hostlers capacity: {hostlers.capacity} at {env.now}") + yield request + + start_time = env.now + record_vehicle_event('hostler', hostler_id_counter, 'start', start_time) # performance record + + hostler_id = hostler_id_counter + hostler_id_counter = (hostler_id_counter % HOSTLER_NUMBER) + 1 + + with chassis.request() as chassis_request: + yield chassis_request + + if container_type == 'inbound' and chassis_status[chassis_id - 1] == 1: + chassis_status[chassis_id - 1] = -1 + print(f"Single loop chassis status {chassis_status}") + print(f"There are {chassis_status.count(1)} IC") + print(f"There are {chassis_status.count(-1)} empty") + print(f"There are {chassis_status.count(0)} OC") + d_h_dist = create_triang_distribution(d_h_min, d_h_avg, d_h_max).rvs() + HOSTLER_TRANSPORT_CONTAINER_TIME = d_h_dist / (2 * HOSTLER_SPEED_LIMIT) + + yield env.timeout(HOSTLER_TRANSPORT_CONTAINER_TIME) + # hostler picks up the rest of IC from the chassis + # chassis_status[chassis_id - 1] = -1 + record_event(container_id, 'hostler_pickup', env.now) + print(f"Hostler {hostler_id} picks up inbound container {container_id} from chassis {chassis_id} to parking area at {env.now}") + # hostler drops off the IC + yield env.timeout(HOSTLER_TRANSPORT_CONTAINER_TIME) + record_event(container_id, 'hostler_dropoff', env.now) + print(f"Hostler {hostler_id} drops off inbound container {container_id} from chassis {chassis_id} to parking area at {env.now}") + + # Check if all chassis filled + if chassis_status.count(0) == OUTBOUND_CONTAINER_NUMBER and chassis_status.count( + -1) == TRAIN_UNITS - OUTBOUND_CONTAINER_NUMBER and not oc_chassis_filled_event.triggered: + print(f"Chassis is fully filled with OC, and cranes start moving: {chassis_status}") + print(f"where there are {chassis_status.count(0)} chassis filled with OC (0)") + print(f"where there are {chassis_status.count(-1)} chassis filled with empty (-1)") + print(f"where there are {chassis_status.count(1)} chassis filled with IC (1)") + oc_chassis_filled_event.succeed() + return + else: + print(f"Chassis is not fully filled: {chassis_status}") + print(f"where there are {chassis_status.count(0)} chassis filled with OC (0)") + print(f"where there are {chassis_status.count(-1)} chassis filled with empty (-1)") + print(f"where there are {chassis_status.count(1)} chassis filled with IC (1)") + + end_time = env.now + record_vehicle_event('hostler', hostler_id, 'end', end_time) # performance record + + # trucks pick up IC + yield env.process(notify_truck(env, truck_store, container_id, out_gate_resource)) + + +def outbound_container_decision_making(env, hostlers, chassis, current_inbound_id, truck_store, cranes, train_processing, outbound_containers_store, in_gate_resource, oc_chassis_filled_event, out_gate_resource, chassis_status, outbound_containers_mapping, record_oc_label, outbound_container_id): + # Check if outbound_containers_store has outbound container + if len(outbound_containers_store.items) > 0: + outbound_container_id = yield outbound_containers_store.get() + print(f"Outbound containers remaining: {len(outbound_containers_store.items)}") + + if -1 in chassis_status: + chassis_id = chassis_status.index(-1) + 1 # find the first chassis + # If chassis are not assigned with outbound container + if chassis_id not in outbound_containers_mapping: + # outbound_container_id += record_oc_label + outbound_containers_mapping[chassis_id] = outbound_container_id + chassis_status[chassis_id - 1] = 0 # already assigned outbound container + print(f"OC mapping created: outbound container {outbound_container_id} assigned to chassis {chassis_id}") + else: + print(f"Chassis {chassis_id} is already mapped to an outbound container.") + else: + print("No empty chassis available for outbound container assignment.") + + # if outbound_containers_store is null, check if we need operate single loop + else: + chassis_id = None + outbound_container_id = None + # chassis_status = 1: inbound containers are not loaded + if chassis_status.count(1) != 0: + print(f"Haven't finished all IC yet at {env.now}. Starting single loop.") + chassis_id = chassis_status.index(1) + 1 + chassis_status[chassis_id - 1] = 0 # assigned with IC + # single loop takes rest inbound container + yield env.process(hostler_transfer_IC_single_loop(env, hostlers, 'inbound', chassis, chassis_id, current_inbound_id, + truck_store, cranes, train_processing, + outbound_containers_store, in_gate_resource, oc_chassis_filled_event, + out_gate_resource)) + else: + print("All inbound containers have been processed.") + + return chassis_id, outbound_container_id + + +def handle_outbound_container(env, hostler_id, chassis_id, outbound_container_id, truck_store, cranes, train_processing, outbound_containers_store, in_gate_resource, oc_chassis_filled_event): + global HOSTLER_FIND_CONTAINER_TIME + + d_h_dist = create_triang_distribution(d_h_min, d_h_avg, d_h_max).rvs() + HOSTLER_TRANSPORT_CONTAINER_TIME = d_h_dist / (2 * HOSTLER_SPEED_LIMIT) + + d_r_dist = create_triang_distribution(d_r_min, d_r_avg, d_r_max).rvs() + HOSTLER_FIND_CONTAINER_TIME = d_r_dist / (2 * TRUCK_SPEED_LIMIT) + yield env.timeout(HOSTLER_FIND_CONTAINER_TIME) + + record_event(outbound_container_id, 'hostler_pickup', env.now) + print(f"Hostler {hostler_id} picks up outbound container {outbound_container_id} from parking area to chassis {chassis_id} at {env.now}") + + yield env.timeout(HOSTLER_TRANSPORT_CONTAINER_TIME) + + record_event(outbound_container_id, 'hostler_dropoff', env.now) + print(f"Hostler {hostler_id} drops off outbound container {outbound_container_id} to chassis {chassis_id} at {env.now}") + + +# truck pick up IC +def notify_truck(env, truck_store, container_id, out_gate_resource): + truck_id = yield truck_store.get() + yield env.timeout(TRUCK_INGATE_TIME) + print(f"Truck {truck_id} arrives at parking area and prepare to pick up inbound container {container_id} at {env.now}") + yield env.process(truck_transfer(env, truck_id, container_id, out_gate_resource)) + + +def truck_transfer(env, truck_id, container_id, out_gate_resource): + global TRUCK_INGATE_TIME, TRUCK_TRANSPORT_CONTAINER_TIME, outbound_container_id_counter + + start_time = env.now + record_vehicle_event('truck', truck_id, 'start', start_time) # performance record + + # Truck moves to the parking area + yield env.timeout(TRUCK_TO_PARKING) + record_event(container_id, 'truck_pickup', env.now) + print(f"Truck {truck_id} picks up inbound container {container_id} at {env.now}") + + # Calculate the transport time for the truck + d_t_dist = create_triang_distribution(d_t_min, d_t_avg, d_t_max).rvs() + TRUCK_TRANSPORT_CONTAINER_TIME = d_t_dist / (2 * TRUCK_SPEED_LIMIT) + yield env.timeout(TRUCK_TRANSPORT_CONTAINER_TIME) + + # Request out_gate_resource resource before the truck exits + with out_gate_resource.request() as request: + yield request + + # Simulate the time it takes for the truck to pass through the gate + yield env.timeout(TRUCK_OUTGATE_TIME + random.uniform(0,TRUCK_OUTGATE_TIME_DEV)) + record_event(container_id, 'truck_exit', env.now) + print(f"Truck {truck_id} exits gate with inbound container {container_id} at {env.now}") + + # End performance recording + end_time = env.now + record_vehicle_event('truck', truck_id, 'end', end_time) + + +def train_departure(env, train_id): + global TRAIN_ID_FIXED, record_oc_label + + if env.now < TRAIN_DEPARTURE_HR: + yield env.timeout(TRAIN_DEPARTURE_HR - env.now) + yield env.timeout(TRAIN_INSPECTION_TIME) + print(f"Train {TRAIN_ID_FIXED} ({train_id} in the dictionary) departs at {env.now}") + + for container_id in range(record_oc_label - OUTBOUND_CONTAINER_NUMBER, record_oc_label): + record_event(container_id, 'train_depart', env.now) + + +def run_simulation(): + global all_trucks_ready_event, record_oc_label, TRUCK_NUMBERS, TRAIN_NUMBERS + + print(f"Starting simulation with No.{TRAIN_ID} trains, {HOSTLER_NUMBER} hostlers, {CRANE_NUMBER} cranes, and {TRUCK_NUMBERS} trucks.") + env = simpy.Environment() + + # Resources + train_processing = simpy.Resource(env, capacity=1) + cranes = simpy.Resource(env, capacity=CRANE_NUMBER) + chassis = simpy.Resource(env, capacity=CHASSIS_NUMBER) + hostlers = simpy.Resource(env, capacity=HOSTLER_NUMBER) + in_gate_resource = simpy.Resource(env, capacity=IN_GATE_NUMBERS) + out_gate_resource = simpy.Resource(env, capacity=OUT_GATE_NUMBERS) + outbound_containers_store = simpy.Store(env, capacity=100) + truck_store = simpy.Store(env, capacity=100) + + # Initialize trucks + truck_store.items.clear() + # print("TRUCK_NUMBERS:", TRUCK_NUMBERS) + for truck_id in range(1, TRUCK_NUMBERS + 1): + truck_store.put(truck_id) + # print("TRUCK_STORE:", truck_store.items) + + all_trucks_ready_event = env.event() + + # # toy case + # train_timetable = [ + # {"train_id": 19, "arrival_time": 187, "departure_time": 200, "empty_cars": 3, "full_cars":7, "oc_number": 2, "truck_number":7 }, + # {"train_id": 25, "arrival_time": 250, "departure_time": 350, "empty_cars": 4, "full_cars":6, "oc_number": 2, "truck_number":6 }, + # {"train_id": 49, "arrival_time": 400, "departure_time": 600, "empty_cars": 5, "full_cars":5, "oc_number": 2, "truck_number":5 }, + # {"train_id": 60, "arrival_time": 650, "departure_time": 750, "empty_cars": 6, "full_cars":4, "oc_number": 2, "truck_number":4 }, + # {"train_id": 12, "arrival_time": 800, "departure_time": 1000, "empty_cars": 7, "full_cars":3, "oc_number": 4, "truck_number":4 }, + # ] + + # REAL TEST + train_timetable = build_train_timetable(pl.read_csv(utilities.package_root() / 'demos' / 'starter_demo' / 'train_consist_plan.csv'), terminal, swap_arrive_depart = True, as_dicts = True) + TRAIN_NUMBERS = len(train_timetable) + + # env.process(train_arrival(env, train_processing, cranes, in_gate_resource, outbound_containers_store, truck_store, train_timetable)) + env.process(train_arrival(env, train_timetable, train_processing, cranes, hostlers, chassis, in_gate_resource, + outbound_containers_store, truck_store, out_gate_resource)) + + env.run(until=state.sim_time) + + # Performance Matrix: train processing time + avg_time_per_train = sum(time_per_train) / len(time_per_train) + print(f"Average train processing time: {sum(time_per_train) / len(time_per_train) if time_per_train else 0:.2f}") + print("Simulation completed. ") + with open("avg_time_per_train.txt", "w") as f: + f.write(str(avg_time_per_train)) + + # Create DataFrame for container events + container_data = [] + + for container_id, events in sorted(container_events.items()): + container_type = 'inbound' if container_id < 10001 else 'outbound' + if container_type == 'inbound': + container_process_time = events.get('truck_exit', '-') - events.get('train_arrival', '-') if 'truck_exit' in events and 'train_arrival' in events else '-' + else: + container_process_time = events.get('train_depart', '-') - events.get('truck_drop_off', '-') if 'train_depart' in events and 'truck_drop_off' in events else '-' + + container_data.append({ + 'container_id': container_id, + 'container_type': container_type, + 'train_arrival': events.get('train_arrival', '-'), + 'truck_arrival': events.get('truck_arrival', '-'), + 'crane_unload': events.get('crane_unload', '-'), + 'hostler_pickup': events.get('hostler_pickup', '-'), + 'hostler_dropoff': events.get('hostler_dropoff', '-'), + 'truck_drop_off': events.get('truck_drop_off', '-'), + 'truck_pickup': events.get('truck_pickup', '-'), + 'truck_exit': events.get('truck_exit', '-'), + 'crane_load': events.get('crane_load', '-'), + 'train_depart': events.get('train_depart', '-'), + 'container_processing_time': container_process_time + }) + + df = pd.DataFrame(container_data) + filename = f"C:/Users/Irena Tong/PycharmProjects/simulation_test/test/results/simulation_crane_{CRANE_NUMBER}_hostler_{HOSTLER_NUMBER}.xlsx" + df.to_excel(filename, index=False) + + # Use save_average_times and save_vehicle_logs for vehicle related logs + save_average_times() + save_vehicle_logs() + + print("Done!") + + +if __name__ == "__main__": + run_simulation() \ No newline at end of file diff --git a/python/altrios/lifts/utilities.py b/python/altrios/lifts/utilities.py new file mode 100644 index 00000000..428d4c8f --- /dev/null +++ b/python/altrios/lifts/utilities.py @@ -0,0 +1,17 @@ +"""Module for general functions, classes, and unit conversion factors.""" +from pathlib import Path + +def package_root() -> Path: + """ + Returns the package root directory. + """ + path = Path(__file__).parent + return path + + +def resources_root() -> Path: + """ + Returns the resources root directory. + """ + path = package_root() / "resources" + return path diff --git a/python/altrios/lifts/vehicle_performance.py b/python/altrios/lifts/vehicle_performance.py new file mode 100644 index 00000000..84f39f44 --- /dev/null +++ b/python/altrios/lifts/vehicle_performance.py @@ -0,0 +1,48 @@ +import pandas as pd + +vehicle_events = { + 'crane': [], + 'hostler': [], + 'truck': [] +} + +def record_vehicle_event(vehicle_type, vehicle_id, event_type, timestamp): + vehicle_events[vehicle_type].append({ + 'vehicle_id': vehicle_id, + 'event_type': event_type, + 'timestamp': timestamp + }) + +def calculate_average_times(): + averages = {} + for vehicle_type, events in vehicle_events.items(): + total_time = 0 + count = 0 + for event in events: + if event['event_type'] == 'start': + start_time = event['timestamp'] + elif event['event_type'] == 'end': + end_time = event['timestamp'] + total_time += (end_time - start_time) + count += 1 + if count > 0: + averages[vehicle_type] = total_time / count + else: + averages[vehicle_type] = 0 + return averages + +def save_average_times(): + averages = calculate_average_times() + with open("vehicle_average_times.txt", "w") as f: + for vehicle_type, avg_time in averages.items(): + f.write(f"{vehicle_type}: {avg_time}\n") + +def save_vehicle_logs(): + for vehicle_type, events in vehicle_events.items(): + log_file = f"{vehicle_type}_work_log.txt" + with open(log_file, "w") as f: + for event in events: + f.write(f"Vehicle ID: {event['vehicle_id']}, Event Type: {event['event_type']}, Timestamp: {event['timestamp']}\n") + +if __name__ == "__main__": + save_average_times() diff --git a/python/altrios/metric_calculator.py b/python/altrios/metric_calculator.py index bb15460f..32f7cfb0 100644 --- a/python/altrios/metric_calculator.py +++ b/python/altrios/metric_calculator.py @@ -8,6 +8,7 @@ import altrios as alt from altrios import utilities, defaults +from altrios.train_planner import planner_config MetricType = pl.DataFrame @@ -73,6 +74,7 @@ class ScenarioInfo: emissions_factors: pl.DataFrame = None nodal_energy_prices: pl.DataFrame = None count_unused_locomotives: bool = False + train_planner_config: planner_config = None def metric( name: str, @@ -117,11 +119,15 @@ def main( scenario_infos: Union[ScenarioInfo, List[ScenarioInfo]], annual_metrics: Union[Tuple[str, str], List[Tuple[str, str]]] = [ - ('Mt-km', 'million tonne-km'), + ('Freight_Moved', 'million tonne-mi'), + ('Freight_Moved', 'car-miles'), + ('Freight_Moved', 'cars'), + ('Freight_Moved', 'detailed'), ('GHG', 'tonne CO2-eq'), ('Count_Locomotives', 'assets'), ('Count_Refuelers', 'assets'), - ('Energy_Costs', 'USD') + ('Energy_Costs', 'USD'), + ('Energy_Per_Freight_Moved', 'kWh per car-mile') ], calculate_multiyear_metrics: bool = True ) -> pl.DataFrame: @@ -146,7 +152,7 @@ def main( for annual_metric in annual_metrics: for scenario_info in scenario_infos: annual_values.append(calculate_annual_metric(annual_metric[0], annual_metric[1], scenario_info)) - + values = pl.concat(annual_values, how="diagonal_relaxed").unique() if calculate_multiyear_metrics: @@ -157,7 +163,8 @@ def main( values = (values .filter(pl.col("Value").is_not_null()) .unique() - .sort(["Metric","Units","Year","Subset"], descending = [False, False, False, True]) + .sort(["Metric","Units","Year","Subset"], + descending = [False, False, False, True]) ) return values @@ -257,6 +264,47 @@ def calculate_rollout_lcotkm(values: MetricType) -> MetricType: metric("LCOTKM", "USD per million tonne-km (levelized)", lcotkm_all) ]) +def calculate_energy_per_freight(info: ScenarioInfo, + units: str) -> MetricType: + """ + Given a years' worth of simulation results, computes a single year energy usage per unit of freight moved. + Arguments: + ---------- + info: A scenario information object representing parameters and results for a single year + units: Requested units + Outputs: + ---------- + DataFrame of energy usage per freight moved (metric name, units, value, and scenario year) + """ + if "per car-mile" not in units and "per container-mile" not in units: + print(f"Units of {units} not supported for energy-per-freight calculation.") + return metric("Energy_Per_Freight_Moved", units, None) + + conversion_from_megajoule = 0 + if "MJ" in units: + conversion_from_megajoule = 1 + elif "MWh" in units: + conversion_from_megajoule = utilities.KWH_PER_MJ / 1e3 + elif "kWh" in units: + conversion_from_megajoule = utilities.KWH_PER_MJ + + diesel_mj = calculate_diesel_use(info, units="MJ") + electricity_mj = calculate_electricity_use(info, units="MJ") + total_mj = value_from_metrics(diesel_mj) + value_from_metrics(electricity_mj, subset="All") + total_energy = total_mj * conversion_from_megajoule + if "per car-mile" in units: + freight_moved = calculate_freight_moved(info, units="car-miles") + elif "per container-mile" in units: + freight_moved = calculate_freight_moved(info, units="container-miles") + freight_val = value_from_metrics(freight_moved) + return metrics_from_list([ + diesel_mj, + electricity_mj, + metric("Energy_Use", "MJ", total_mj), + metric("Energy_Per_Freight_Moved", units, total_energy / freight_val) + ]) + + def calculate_energy_cost(info: ScenarioInfo, units: str) -> MetricType: """ @@ -273,8 +321,7 @@ def calculate_energy_cost(info: ScenarioInfo, return metric("Cost_Energy", units, None) diesel_used = calculate_diesel_use(info, units="gallons") - electricity_used = (calculate_electricity_use(info, units="kWh") - ) + electricity_used = calculate_electricity_use(info, units="kWh") electricity_costs_disagg = (electricity_used .filter(pl.col("Subset") != "All") .join(info.nodal_energy_prices.filter(pl.col("Fuel")=="Electricity"), left_on="Subset", right_on="Node", how="left") @@ -292,11 +339,15 @@ def calculate_energy_cost(info: ScenarioInfo, # Diesel refueling is not yet tracked spatiotemporally; just use average price across the network. diesel_price = info.nodal_energy_prices.filter(pl.col("Fuel")=="Diesel").get_column("Price").mean() diesel_cost_value = value_from_metrics(diesel_used,"Diesel_Usage") * diesel_price - return metrics_from_list([diesel_used, - metric("Cost_Diesel", "USD", diesel_cost_value), - electricity_costs_disagg, - electricity_costs_agg, - metric("Cost_Energy", units, diesel_cost_value + electricity_cost_value)]) + if electricity_cost_value is None: electricity_cost_value = 0.0 + if diesel_cost_value is None: diesel_cost_value = 0 + return metrics_from_list([ + diesel_used, + electricity_used, + metric("Cost_Diesel", "USD", diesel_cost_value), + electricity_costs_disagg, + electricity_costs_agg, + metric("Cost_Energy", units, diesel_cost_value + electricity_cost_value)]) def calculate_diesel_use( info: ScenarioInfo, @@ -351,7 +402,7 @@ def calculate_electricity_use( print(f"Units of {units} not supported for electricity use calculation.") return metric("Electricity_Usage", units, None) - if info.refuel_sessions is None: + if (info.refuel_sessions is None) or (info.refuel_sessions.filter(pl.col("Fuel_Type")==pl.lit("Electricity")).height == 0): # No refueling session data: charging was not explicitly modeled, # so take total net energy at RES and apply charging efficiency factor return metric("Electricity_Usage", units, @@ -385,20 +436,79 @@ def calculate_electricity_use( disagg_energy]) -def calculate_freight( +def calculate_freight_moved( info: ScenarioInfo, units: str) -> MetricType: """ - Given a years' worth of simulation results, computes a single year gross million tonne-km of freight delivered + Given a years' worth of simulation results, computes a single year quantity of freight moved Arguments: ---------- info: A scenario information object representing parameters and results for a single year units: Requested units Outputs: ---------- - DataFrame of gross million tonne-km of freight (metric name, units, value, and scenario year) + DataFrame of quantity of freight (metric name, units, value, and scenario year) """ - return metric("Mt-km", units, info.sims.get_megagram_kilometers(annualize=info.annualize)/1.0e6) + if "-mi" in units: + conversion_from_km = utilities.MI_PER_KM + else: + conversion_from_km = 1.0 + + if units in ["million tonne-km", "million tonne-mi"]: + return metric("Freight_Moved", units, info.sims.get_megagram_kilometers(annualize=info.annualize) * conversion_from_km /1.0e6, year=info.scenario_year) + elif units in ["car-km", "car-miles"]: + return metric("Freight_Moved", units, info.sims.get_car_kilometers(annualize=info.annualize) * conversion_from_km, year=info.scenario_year) + elif units == "cars": + return metric("Freight_Moved", units, info.sims.get_cars_moved(annualize=info.annualize), year=info.scenario_year) + elif units in ["container-km", "container-miles"]: + assert info.consist_plan.filter(~pl.col("Train_Type").str.contains("Intermodal")).height == 0, "Can only count containers if the consist plan is all Intermodal" + car_distance = info.sims.get_car_kilometers(annualize=info.annualize) * conversion_from_km + return metric("Freight_Moved", units, car_distance * info.train_planner_config.containers_per_car, year=info.scenario_year) + + elif units == "containers": + container_counts = info.consist_plan.select("Train_ID", "Containers_Loaded", "Containers_Empty").unique().drop("Train_ID").sum() + if info.annualize: + annualizer = 365.25 / info.simulation_days + else: + annualizer = 1.0 + return metrics_from_list([ + metric("Freight_Moved", units, container_counts.get_column("Containers_Loaded").item() * annualizer, "Loaded", year=info.scenario_year), + metric("Freight_Moved", units, container_counts.get_column("Containers_Empty").item() * annualizer, "Loaded", year=info.scenario_year), + ]) + elif units == "detailed car counts": + kilometers = (pl.DataFrame(data = {"car-km": [sim.get_kilometers(annualize=info.annualize) for sim in info.sims.tolist()]}) + .with_row_index("idx") + .with_columns( + pl.col("car-km").mul(utilities.MI_PER_KM).alias("car-miles") + ) + ) + all_n_cars_by_type = [sim.n_cars_by_type for sim in info.sims.tolist()] + car_counts = ( + pl.concat([pl.from_dict(item)for item in all_n_cars_by_type], how="diagonal_relaxed") + .with_row_index("idx") + .melt(id_vars = "idx", value_name = "cars", variable_name = "Subset") + .filter(pl.col("cars").is_not_null()) + .join(kilometers, how="left", on="idx") + .drop("idx") + .group_by("Subset") + .agg(pl.col("*").sum()) + .sort("Subset") + .melt(id_vars = "Subset", variable_name = "Units", value_name = "Value") + .with_columns( + pl.lit(info.scenario_year).alias("Year"), + pl.lit("Freight_Moved").alias("Metric"), + pl.when( + info.annualize, + pl.col("Units") == pl.lit("cars")) + .then(pl.col("Value").mul(365.25 / info.simulation_days)) + .otherwise(pl.col("Value")) + .alias("Value") + ) + ) + return car_counts + else: + print(f"Units of {units} not supported for freight movement calculation.") + return metric("Freight_Moved", units, None) def calculate_ghg( info: ScenarioInfo, @@ -445,9 +555,6 @@ def calculate_ghg( to the desired region before passing the emissions factor dataframe into the metrics calculator.""") else: if electricity_MWh.filter(pl.col("Subset") != "All").height > 0: - print(diesel_MJ) - print(info.emissions_factors) - print(electricity_MWh) # Disaggregated results are available electricity_ghg_val = (electricity_MWh .filter(pl.col("Subset") != pl.lit("All")) @@ -714,7 +821,7 @@ def calculate_rollout_investments(values: MetricType) -> MetricType: .alias("Count")) .drop("Change") .join(early_retirements, on=item_id_cols, how="left") - .with_columns(pl.col("Count").cumsum().over(item_id_cols).alias("Retirements_Early_Cumsum")) + .with_columns(pl.col("Count").cum_sum().over(item_id_cols).alias("Retirements_Early_Cumsum")) .with_columns(pl.when(pl.col("Change") > 0) .then(pl.when(pl.col("Retirements_Early_Cumsum") <= pl.col("Change")) .then(pl.col("Count")) @@ -1011,7 +1118,8 @@ def add_battery_costs(loco_info: pd.DataFrame, year: int) -> pd.DataFrame: function_mappings = {'Energy_Costs': calculate_energy_cost, - 'Mt-km': calculate_freight, + 'Freight_Moved': calculate_freight_moved, + 'Energy_Per_Freight_Moved': calculate_energy_per_freight, 'GHG': calculate_ghg, 'Count_Locomotives': calculate_locomotive_counts, 'Count_Refuelers': calculate_refueler_counts diff --git a/python/altrios/resources/rolling_stock/Intermodal_Empty.yaml b/python/altrios/resources/rolling_stock/Intermodal_Empty.yaml index 2b140c0c..f257a251 100644 --- a/python/altrios/resources/rolling_stock/Intermodal_Empty.yaml +++ b/python/altrios/resources/rolling_stock/Intermodal_Empty.yaml @@ -1,5 +1,6 @@ --- car_type: Intermodal_Empty +freight_type: Intermodal_Empty length: 18.0 axle_count: 4 brake_count: 1 diff --git a/python/altrios/resources/rolling_stock/Intermodal_Loaded.yaml b/python/altrios/resources/rolling_stock/Intermodal_Loaded.yaml index 0054d89a..c0d0921f 100644 --- a/python/altrios/resources/rolling_stock/Intermodal_Loaded.yaml +++ b/python/altrios/resources/rolling_stock/Intermodal_Loaded.yaml @@ -1,5 +1,6 @@ --- car_type: Intermodal_Loaded +freight_type: Intermodal_Loaded length: 18.0 axle_count: 4 brake_count: 1 diff --git a/python/altrios/resources/rolling_stock/Manifest_Empty.yaml b/python/altrios/resources/rolling_stock/Manifest_Empty.yaml index a2c54e73..a304a398 100644 --- a/python/altrios/resources/rolling_stock/Manifest_Empty.yaml +++ b/python/altrios/resources/rolling_stock/Manifest_Empty.yaml @@ -1,5 +1,6 @@ --- car_type: Manifest_Empty +freight_type: Manifest_Empty length: 18.0 axle_count: 4 brake_count: 1 diff --git a/python/altrios/resources/rolling_stock/Manifest_Loaded.yaml b/python/altrios/resources/rolling_stock/Manifest_Loaded.yaml index 80140aee..8e398ba4 100644 --- a/python/altrios/resources/rolling_stock/Manifest_Loaded.yaml +++ b/python/altrios/resources/rolling_stock/Manifest_Loaded.yaml @@ -1,5 +1,6 @@ --- car_type: Manifest_Loaded +freight_type: Manifest_Loaded length: 18.0 axle_count: 4 brake_count: 1 diff --git a/python/altrios/resources/rolling_stock/Unit_Empty.yaml b/python/altrios/resources/rolling_stock/Unit_Empty.yaml index caa39153..33ce94dd 100644 --- a/python/altrios/resources/rolling_stock/Unit_Empty.yaml +++ b/python/altrios/resources/rolling_stock/Unit_Empty.yaml @@ -1,5 +1,6 @@ --- car_type: Unit_Empty +freight_type: Unit_Empty length: 10.7 axle_count: 4 brake_count: 1 diff --git a/python/altrios/resources/rolling_stock/Unit_Loaded.yaml b/python/altrios/resources/rolling_stock/Unit_Loaded.yaml index 584c244f..cb23f308 100644 --- a/python/altrios/resources/rolling_stock/Unit_Loaded.yaml +++ b/python/altrios/resources/rolling_stock/Unit_Loaded.yaml @@ -1,5 +1,6 @@ --- car_type: Unit_Loaded +freight_type: Unit_Loaded length: 10.7 axle_count: 4 brake_count: 1 diff --git a/python/altrios/rollout.py b/python/altrios/rollout.py index 6e8b430b..5c1f3ff2 100644 --- a/python/altrios/rollout.py +++ b/python/altrios/rollout.py @@ -1,8 +1,9 @@ from altrios import sim_manager -from altrios import metric_calculator, train_planner, defaults +from altrios import metric_calculator, defaults from altrios.metric_calculator import ScenarioInfo import altrios as alt +from altrios.train_planner import planner, planner_config import numpy as np import time import pandas as pd @@ -23,7 +24,7 @@ def simulate_prescribed_rollout( save_interval: Optional[int] = None, freight_demand_percent_growth:float = 0.0, demand_file: Union[pl.DataFrame, Path, str] = defaults.DEMAND_FILE, - train_planner_config: train_planner.TrainPlannerConfig = train_planner.TrainPlannerConfig(), + train_planner_config: planner_config.TrainPlannerConfig = planner_config.TrainPlannerConfig(), count_unused_locomotives = False, write_complete_results: Optional[bool] = False, write_metrics: Optional[bool] = False, diff --git a/python/altrios/sim_manager.py b/python/altrios/sim_manager.py index c15fbee9..0163224a 100644 --- a/python/altrios/sim_manager.py +++ b/python/altrios/sim_manager.py @@ -3,13 +3,13 @@ """ import polars as pl -from typing import Any, Union, Dict, List, Optional, Tuple +from typing import Any, Union, Dict, List, Tuple from pathlib import Path import time from altrios import defaults import altrios as alt -from altrios import train_planner as planner +from altrios.train_planner import planner, planner_config from altrios import metric_calculator as metrics def main( @@ -25,7 +25,7 @@ def main( refuelers: pl.DataFrame = None, grid_emissions_factors: pl.DataFrame = None, nodal_energy_prices: pl.DataFrame = None, - train_planner_config: planner.TrainPlannerConfig = planner.TrainPlannerConfig(), + train_planner_config: planner_config.TrainPlannerConfig = planner_config.TrainPlannerConfig(), train_type: alt.TrainType = alt.TrainType.Freight, demand_file: Union[pl.DataFrame, Path, str] = str(defaults.DEMAND_FILE), network_charging_guidelines: pl.DataFrame = None @@ -64,13 +64,15 @@ def main( str(loc.link_idx.idx) + " is invalid for network!") train_planner_config.loco_info = metrics.add_battery_costs(train_planner_config.loco_info, scenario_year) + train_planner_config.simulation_days = simulation_days + 2 * warm_start_days - if loco_pool is None: loco_pool = planner.build_locopool( - config = train_planner_config, - method="shares_twoway", - shares=[1-target_bel_share, target_bel_share], - demand_file=demand_file - ) + # TODO mbruchon: un-comment this and move it out into rollout.py so rollouts still work + #if loco_pool is None: loco_pool = planner.data_prep.build_locopool( + # config = train_planner_config, + # method="shares_twoway", + # shares=[1-target_bel_share, target_bel_share], + # demand_file=demand_file + # ) t0_ptc = time.perf_counter() ( @@ -85,7 +87,6 @@ def main( network = network, loco_pool= loco_pool, refuelers = refuelers, - simulation_days=simulation_days + 2 * warm_start_days, scenario_year = scenario_year, config = train_planner_config, demand_file = demand_file, @@ -129,7 +130,7 @@ def main( ) train_times = pl.DataFrame( - {'Train_ID': pl.Series([sim.train_id for sim in speed_limit_train_sims], dtype=pl.Int32).cast(pl.UInt32), + {'Train_ID': pl.Series([int(sim.train_id) for sim in speed_limit_train_sims], dtype=pl.Int32).cast(pl.UInt32), 'Origin_ID': pl.Series([sim.origs[0].location_id for sim in speed_limit_train_sims], dtype=str), 'Destination_ID': pl.Series([sim.dests[0].location_id for sim in speed_limit_train_sims], dtype=str), 'Departure_Time_Actual_Hr': pl.Series([this[0].time_hours for this in timed_paths], dtype=pl.Float64), diff --git a/python/altrios/tests/test_metric_calculator.py b/python/altrios/tests/test_metric_calculator.py index 004b6efe..e669950b 100644 --- a/python/altrios/tests/test_metric_calculator.py +++ b/python/altrios/tests/test_metric_calculator.py @@ -23,6 +23,6 @@ def test_dummy_sim(self): False)) for info in scenario_infos: - tkm = metric_calculator.calculate_freight(info,'Million_Tonne-KM') - self.assertEqual(tkm.filter(pl.col("Metric") == 'Mt-km').get_column("Units").len(), 1) + tkm = metric_calculator.calculate_freight_moved(info,'million tonne-mi') + self.assertEqual(tkm.filter(pl.col("Metric") == pl.lit("Freight_Moved")).get_column("Units").len(), 1) diff --git a/python/altrios/tests/test_train_planner.py b/python/altrios/tests/test_train_planner.py index 6d0eb0a4..86b70321 100644 --- a/python/altrios/tests/test_train_planner.py +++ b/python/altrios/tests/test_train_planner.py @@ -1,7 +1,7 @@ import unittest -from altrios import train_planner +from altrios.train_planner import planner class TestTrainPlanner(unittest.TestCase): diff --git a/python/altrios/train_planner.py b/python/altrios/train_planner.py deleted file mode 100644 index e8165e6f..00000000 --- a/python/altrios/train_planner.py +++ /dev/null @@ -1,1158 +0,0 @@ -from pathlib import Path -from typing import Union -import numpy as np -from scipy.stats import rankdata -import pandas as pd -import polars as pl -import polars.selectors as cs -import math -from typing import Tuple, List, Dict -from itertools import repeat -import altrios as alt -from altrios import defaults, utilities - -pl.enable_string_cache() - -class TrainPlannerConfig: - def __init__(self, - single_train_mode: bool = False, - min_cars_per_train: int = 60, - target_cars_per_train: int = 180, - manifest_empty_return_ratio: float = 0.6, - #TODO single vs double stacked operations on the corridor - cars_per_locomotive: int = 70, - refuelers_per_incoming_corridor: int = 4, - drag_coeff_function: List = None, - hp_required_per_ton: Dict = { - "Default": { - "Unit": 2.0, - "Manifest": 1.5, - "Intermodal": 2.0 + 2.0, - "Unit_Empty": 2.0, - "Manifest_Empty": 1.5, - "Intermodal_Empty": 2.0 + 2.0, - } - }, - dispatch_scaling_dict: Dict = { - "time_mult_factor": 1.4, - "hours_add": 2, - "energy_mult_factor": 1.25 - }, - loco_info = pd.DataFrame({ - "Diesel_Large": { - "Capacity_Cars": 20, - "Fuel_Type": "Diesel", - "Min_Servicing_Time_Hr": 3.0, - "Rust_Loco": alt.Locomotive.default(), - "Cost_USD": defaults.DIESEL_LOCO_COST_USD, - "Lifespan_Years": defaults.LOCO_LIFESPAN - }, - "BEL": { - "Capacity_Cars": 20, - "Fuel_Type": "Electricity", - "Min_Servicing_Time_Hr": 3.0, - "Rust_Loco": alt.Locomotive.default_battery_electric_loco(), - "Cost_USD": defaults.BEL_MINUS_BATTERY_COST_USD, - "Lifespan_Years": defaults.LOCO_LIFESPAN - } - }).transpose().reset_index(names='Locomotive_Type'), - refueler_info = pd.DataFrame({ - "Diesel_Fueler": { - "Locomotive_Type": "Diesel_Large", - "Fuel_Type": "Diesel", - "Refueler_J_Per_Hr": defaults.DIESEL_REFUEL_RATE_J_PER_HR, - "Refueler_Efficiency": defaults.DIESEL_REFUELER_EFFICIENCY, - "Cost_USD": defaults.DIESEL_REFUELER_COST_USD, - "Lifespan_Years": defaults.LOCO_LIFESPAN - }, - "BEL_Charger": { - "Locomotive_Type": "BEL", - "Fuel_Type": "Electricity", - "Refueler_J_Per_Hr": defaults.BEL_CHARGE_RATE_J_PER_HR, - "Refueler_Efficiency": defaults.BEL_CHARGER_EFFICIENCY, - "Cost_USD": defaults.BEL_CHARGER_COST_USD, - "Lifespan_Years": defaults.LOCO_LIFESPAN - } - }).transpose().reset_index(names='Refueler_Type') - ): - """ - Constructor for train planner configuration objects - Arguments: - ---------- - min_cars_per_train: the minimum length in number of cars to form a train - target_cars_per_train: the standard train length in number of cars - manifest_empty_return_ratio: Desired railcar reuse ratio to calculate the empty manifest car demand, (E_ij+E_ji)/(L_ij+L_ji) - cars_per_locomotive: Heuristic scaling factor used to size number of locomotives needed based on demand. - refuelers_per_incoming_corridor: - hp_required_per_ton: - dispatch_scaling_dict: - loco_info: - refueler_info: - """ - self.single_train_mode = single_train_mode - self.min_cars_per_train = min_cars_per_train - self.target_cars_per_train = target_cars_per_train - self.manifest_empty_return_ratio = manifest_empty_return_ratio - self.cars_per_locomotive = cars_per_locomotive - self.refuelers_per_incoming_corridor = refuelers_per_incoming_corridor - self.hp_required_per_ton = hp_required_per_ton - self.dispatch_scaling_dict = dispatch_scaling_dict - self.loco_info = loco_info - self.refueler_info = refueler_info - self.drag_coeff_function = drag_coeff_function - -def demand_loader( - demand_table: Union[pl.DataFrame, Path, str] -) -> Tuple[pl.DataFrame, pl.Series, int]: - """ - Load the user input csv file into a dataframe for later processing - Arguments: - ---------- - user_input_file: path to the input csv file that user import to the module - Example Input: - Origin Destination Train_Type Number_of_Cars Number_of_Containers - Barstow Stockton Unit 2394 0 - Barstow Stockton Manifest 2588 0 - Barstow Stockton Intermodal 2221 2221 - - Outputs: - ---------- - df_annual_demand: dataframe with all pair information including: - origin, destination, train type, number of cars - node_list: List of origin or destination demand nodes - """ - if type(demand_table) is not pl.DataFrame: - demand_table = pl.read_csv(demand_table, dtypes = {"Number_of_Cars": pl.UInt32, "Number_of_Containers": pl.UInt32}) - - nodes = pl.concat( - [demand_table.get_column("Origin"), - demand_table.get_column("Destination")]).unique().sort() - return demand_table, nodes - - -def generate_return_demand( - demand: pl.DataFrame, - config: TrainPlannerConfig -) -> pl.DataFrame: - """ - Create a dataframe for additional demand needed for empty cars of the return trains - Arguments: - ---------- - df_annual_demand: The user_input file loaded by previous functions - that contains laoded demand for each demand pair. - config: Object storing train planner configuration paramaters - Outputs: - ---------- - df_return_demand: The demand generated by the need - of returning the empty cars to their original nodes - """ - return (demand - .rename({"Origin": "Destination", - "Destination": "Origin"}) - .drop("Number_Of_Containers") - .with_columns( - pl.concat_str([pl.col("Train_Type").str.strip_suffix("_Empty").str.strip_suffix("_Loaded") - ,pl.lit("_Empty")]).alias("Train_Type"), - pl.when(pl.col("Train_Type") == pl.lit("Manifest")) - .then((pl.col("Number_of_Cars") * config.manifest_empty_return_ratio).floor().cast(pl.UInt32)) - .otherwise(pl.col("Number_of_Cars")) - .alias("Number_of_Cars")) - ) - -def generate_origin_manifest_demand( - demand: pl.DataFrame, - node_list: List[str], - config: TrainPlannerConfig -) -> pl.DataFrame: - """ - Create a dataframe for summarized view of all origins' manifest demand - in number of cars and received cars, both with loaded and empty counts - Arguments: - ---------- - demand: The user_input file loaded by previous functions - that contains laoded demand for each demand pair. - node_list: A list containing all the names of nodes in the system - config: Object storing train planner configuration paramaters - - Outputs: - ---------- - origin_manifest_demand: The dataframe that summarized all the manifest demand - originated from each node by number of loaded and empty cars - with additional columns for checking the unbalance quantity and serve as check columns - for the manifest empty car rebalancing function - """ - manifest_demand = (demand - .filter(pl.col("Train_Type").str.strip_suffix("_Loaded") == "Manifest") - .select(["Origin", "Destination","Number_of_Cars"]) - .rename({"Number_of_Cars": "Manifest"}) - .unique()) - - origin_volume = manifest_demand.group_by("Origin").agg(pl.col("Manifest").sum()) - destination_volume = manifest_demand.group_by("Destination").agg(pl.col("Manifest").sum().alias("Manifest_Reverse")) - origin_manifest_demand = (pl.DataFrame({"Origin": node_list}) - .join(origin_volume, left_on="Origin", right_on="Origin", how="left") - .join(destination_volume, left_on="Origin", right_on="Destination", how="left") - .with_columns( - (pl.col("Manifest_Reverse") * config.manifest_empty_return_ratio).floor().cast(pl.UInt32).alias("Manifest_Empty")) - .with_columns( - (pl.col("Manifest") + pl.col("Manifest_Empty")).alias("Manifest_Dispatched"), - (pl.col("Manifest_Reverse") + pl.col("Manifest") * config.manifest_empty_return_ratio).floor().cast(pl.UInt32).alias("Manifest_Received")) - .drop("Manifest_Reverse") - .filter((pl.col("Manifest").is_not_null()) | (pl.col("Manifest_Empty").is_not_null())) - ) - - return origin_manifest_demand - - -def balance_trains( - demand_origin_manifest: pl.DataFrame -) -> pl.DataFrame: - """ - Update the manifest demand, especially the empty car demand to maintain equilibrium of number of - cars dispatched and received at each node for manifest - Arguments: - ---------- - demand_origin_manifest: Dataframe that summarizes empty and loaded - manifest demand dispatched and received for each node by number cars - Outputs: - ---------- - demand_origin_manifest: Updated demand_origin_manifest with additional - manifest empty car demand added to each node - df_balance_storage: Documented additional manifest demand pairs and corresponding quantity for - rebalancing process - """ - df_balance_storage = pd.DataFrame(np.zeros(shape=(0, 4))) - df_balance_storage = df_balance_storage.rename( - columns={0: "Origin", - 1: "Destination", - 2: "Train_Type", - 3: "Number_of_Cars"}) - - train_type = "Manifest_Empty" - demand = demand_origin_manifest.to_pandas()[ - ["Origin","Manifest_Received","Manifest_Dispatched","Manifest_Empty"]] - demand = demand.rename(columns={"Manifest_Received": "Received", - "Manifest_Dispatched": "Dispatched", - "Manifest_Empty": "Empty"}) - - step = 0 - # Calculate the number of iterations needed - max_iter = len(demand) * (len(demand)-1) / 2 - while (~np.isclose(demand["Received"], demand["Dispatched"])).any() and (step <= max_iter): - rows_def = demand[demand["Received"] < demand["Dispatched"]] - rows_sur = demand[demand["Received"] > demand["Dispatched"]] - if((len(rows_def) == 0) | (len(rows_sur) == 0)): - break - # Find the first node that is in deficit of cars because of the empty return - row_def = rows_def.index[0] - # Find the first node that is in surplus of cars - row_sur = rows_sur.index[0] - surplus = demand.loc[row_sur, "Received"] - demand.loc[row_sur, "Dispatched"] - df_balance_storage.loc[len(df_balance_storage.index)] = \ - [demand.loc[row_sur, "Origin"], - demand.loc[row_def, "Origin"], - train_type, - surplus] - demand.loc[row_def, "Received"] += surplus - demand.loc[row_sur, "Dispatched"] = demand.loc[row_sur, "Received"] - step += 1 - - if (~np.isclose(demand["Received"], demand["Dispatched"])).any(): - raise Exception("While loop didn't converge") - return pl.from_pandas(df_balance_storage) - -def generate_demand_trains( - demand: pl.DataFrame, - demand_returns: pl.DataFrame, - demand_rebalancing: pl.DataFrame, - rail_vehicles: List[alt.RailVehicle], - config: TrainPlannerConfig -) -> pl.DataFrame: - """ - Generate a tabulated demand pair to indicate the final demand - for each demand pair for each train type in number of trains - Arguments: - ---------- - demand: Tabulated demand for each demand pair for each train type in number of cars - - demand: The user_input file loaded and prepared by previous functions - that contains loaded car demand for each demand pair. - demand_returns: The demand generated by the need - of returning the empty cars to their original nodes - demand_rebalancing: Documented additional manifest demand pairs and corresponding quantity for - rebalancing process - - config: Object storing train planner configuration paramaters - Outputs: - ---------- - demand: Tabulated demand for each demand pair in terms of number of cars and number of trains - """ - - demand = pl.concat([ - demand.drop("Number_of_Containers"), - demand_returns.drop("Number_of_Containers"), - demand_rebalancing], - how="diagonal_relaxed") - # if rowx[first three columns] == rowy[first three columns]: - # rowx[fourth column] + rowy[fourth column] - # delete rowy - # combined_row = demand.slice(10,12).select - # demand = demand.group_by() - #Prepare hp_per_ton requirements to merge onto the demand DataFrame - hp_per_ton = ( - pl.DataFrame(pd.DataFrame(config.hp_required_per_ton).reset_index(names="Train_Type")) - .melt(id_vars="Train_Type",variable_name="O_D",value_name="HP_Required_Per_Ton") - .with_columns(pl.col("O_D").str.split("_").list.first().alias("Origin"), - pl.col("O_D").str.split("_").list.last().alias("Destination")) - ) - - #MPrepare ton_per_car requirements to merge onto the demand DataFrame - # TODO: simplify mass API here. Is there a function on the Rust side to get total mass (or should there be)? - def get_kg_empty(veh): - return veh.mass_static_base_kilograms + veh.axle_count * veh.mass_rot_per_axle_kilograms - def get_kg(veh): - return veh.mass_static_base_kilograms + veh.mass_freight_kilograms + veh.axle_count * veh.mass_rot_per_axle_kilograms - - # NOTE: don't need to use this for PS; just need to use target platoon size (# of rail vehicles) then convert to containers - # based on single vs. double stacked. Target # of rail vehicle Other intermodals may need to use weight - ton_per_car = ( - pl.DataFrame({"Train_Type": pl.Series([rv.car_type for rv in rail_vehicles]).str.strip_suffix("_Loaded"), - "KG_Empty": [get_kg_empty(rv) for rv in rail_vehicles], - "KG": [get_kg(rv) for rv in rail_vehicles]}) - .with_columns(pl.when(pl.col("Train_Type").str.contains("_Empty")) - .then(pl.col("KG_Empty") / utilities.KG_PER_TON) - .otherwise(pl.col("KG") / utilities.KG_PER_TON) - .alias("Tons_Per_Car")) - .drop(["KG_Empty","KG_Loaded"]) - ) - - demand = demand.join(ton_per_car, on="Train_Type", how="left") - # Merge on OD-specific hp_per_ton if the user specified any - demand = demand.join(hp_per_ton.drop("O_D"), - on=["Origin","Destination","Train_Type"], - how="left") - # Second, merge on defaults per train type - demand = demand.join(hp_per_ton.filter((pl.col("O_D") =="Default")).drop(["O_D","Origin","Destination"]), - on=["Train_Type"], - how="left", - suffix="_Default") - # Fill in defaults per train type wherever the user didn't specify OD-specific hp_per_ton - demand = demand.with_columns(pl.coalesce("HP_Required_Per_Ton", "HP_Required_Per_Ton_Default").alias("HP_Required_Per_Ton")) - demand = demand.drop("HP_Required_Per_Ton_Default") - # Replace nulls with zero - demand = demand.with_columns(cs.float().fill_null(0.0), cs.by_dtype(pl.UInt32).fill_null(pl.lit(0).cast(pl.UInt32))) - # Convert total number of cars to total number of trains - demand = demand.with_columns( - (pl.col("Number_of_Cars") * pl.col("Tons_Per_Car")).alias("Tons_Aggregate"), - pl.when(config.single_train_mode) - .then(1) - .when(pl.col("Number_of_Cars") == 0) - .then(0) - .otherwise( - pl.max_horizontal([1, - ((pl.col("Number_of_Cars").floordiv(pl.lit(config.target_cars_per_train)) + 1)) - ]) - ).cast(pl.UInt32).alias("Number_of_Trains")) - # Calculate per-train car counts and tonnage - demand = demand.with_columns( - pl.col("Tons_Aggregate").truediv(pl.col("Number_of_Trains")).alias("Tons_Per_Train")) - demand = demand.with_columns( - (pl.when(pl.col("Train_Type").str.ends_with("_Empty")) - .then(pl.col("Number_of_Cars")) - .otherwise(0)) - .cast(pl.UInt32) - .alias("Cars_Empty"), - (pl.when(pl.col("Train_Type").str.ends_with("_Empty")) - .then(0) - .otherwise(pl.col("Number_of_Cars"))) - .cast(pl.UInt32) - .alias("Cars_Loaded") - ) - return demand - - -def calculate_dispatch_times( - demand: pl.DataFrame, - hours: int -) -> pl.DataFrame: - """ - Generate a tabulated demand pair to indicate the expected dispatching interval - and actual dispatching timesteps after rounding - Arguments: - ---------- - config: Object storing train planner configuration paramaters - demand_train: Dataframe of demand (number of trains) for each OD pair for each train type - hours: Number of hours in the simulation time period - Outputs: - ---------- - dispatch_times: Tabulated dispatching time for each demand pair for each train type - in hours - """ - demand = demand \ - .filter(pl.col("Number_of_Trains") > 0) \ - .select(["Origin","Destination","Train_Type","Number_of_Trains", - "Number_of_Cars", - "Tons_Per_Train","HP_Required_Per_Ton", "Cars_Loaded", "Cars_Empty"]) \ - .with_columns( - (hours / pl.col("Number_of_Trains")).alias("Interval"), - pl.col("Number_of_Trains").cast(pl.Int32).alias("Number_of_Trains"), - pl.col("Number_of_Cars").floordiv(pl.col("Number_of_Trains")).alias("Number_of_Cars"), - pl.col("Cars_Empty").floordiv(pl.col("Number_of_Trains")).alias("Cars_Empty"), - pl.col("Cars_Loaded").floordiv(pl.col("Number_of_Trains")).alias("Cars_Loaded"), - ).select(pl.exclude("Number_of_Trains").repeat_by("Number_of_Trains").explode()) \ - .with_columns( - ((pl.col("Interval").cumcount().over(["Origin","Destination","Train_Type"])) \ - * pl.col("Interval")).alias("Hour") - ).drop("Interval") \ - .sort(["Hour","Origin","Destination","Train_Type"]) - - return demand -def build_locopool( - config: TrainPlannerConfig, - demand_file: Union[pl.DataFrame, Path, str], - method: str = "tile", - shares: List[float] = [], -) -> pl.DataFrame: - """ - Generate default locomotive pool - Arguments: - ---------- - demand_file: Path to a file with origin-destination demand - method: Method to determine each locomotive's type ("tile" or "shares_twoway" currently implemented) - shares: List of shares for each locomotive type in loco_info (implemented for two-way shares only) - Outputs: - ---------- - loco_pool: Locomotive pool containing all locomotives' information that are within the system - """ - config.loco_info = append_loco_info(config.loco_info) - loco_types = list(config.loco_info.loc[:,'Locomotive_Type']) - demand, node_list = demand_loader(demand_file) - - num_nodes = len(node_list) - num_ods = demand.height - cars_per_od = demand.get_column("Number_of_Cars").mean() - if config.single_train_mode: - initial_size = math.ceil(cars_per_od / config.cars_per_locomotive) - rows = initial_size - else: - num_destinations_per_node = num_ods*1.0 / num_nodes*1.0 - initial_size = math.ceil((cars_per_od / config.cars_per_locomotive) * - num_destinations_per_node) # number of locomotives per node - rows = initial_size * num_nodes # number of locomotives in total - - - if config.single_train_mode: - sorted_nodes = np.tile([demand.select(pl.col("Origin").first()).item()],rows).tolist() - engine_numbers = range(0, rows) - print(engine_numbers) - else: - sorted_nodes = np.sort(np.tile(node_list, initial_size)).tolist() - engine_numbers = rankdata(sorted_nodes, method="dense") * 1000 + \ - np.tile(range(0, initial_size), num_nodes) - - if method == "tile": - repetitions = math.ceil(rows/len(loco_types)) - types = np.tile(loco_types, repetitions).tolist()[0:rows] - elif method == "shares_twoway": - if((len(loco_types) != 2) | (len(shares) != 2)): - raise ValueError( - f"""2-way prescribed locopool requested but number of locomotive types is not 2.""") - - idx_1 = np.argmin(shares) - idx_2 = 1 - idx_1 - share_type_one = shares[idx_1] - label_type_one = loco_types[idx_1] - label_type_two = loco_types[idx_2] - - num_type_one = round(initial_size * share_type_one) - if 0 == num_type_one: - types = pd.Series([label_type_two] * initial_size) - elif initial_size == num_type_one: - types = pd.Series([label_type_one] * initial_size) - else: - # Arrange repeated sequences of type 1 + {type_two_per_type_one, type_two_per_type_one+1} type 2 - # so as to match the required total counts of each. - type_two_per_type_one = ( - initial_size - num_type_one) / num_type_one - # Number of type 1 + {type_two_per_bel+1} type 2 sequences needed - num_extra_type_two = round( - num_type_one * (type_two_per_type_one % 1.0)) - series_fewer_type_two = pd.Series( - [label_type_one] + [label_type_two] * math.floor(type_two_per_type_one)) - series_more_type_two = pd.Series( - [label_type_one] + [label_type_two] * math.ceil(type_two_per_type_one)) - types = np.concatenate(( - np.tile(series_more_type_two, num_extra_type_two), - np.tile(series_fewer_type_two, num_type_one-num_extra_type_two)), - axis=None) - types = np.tile(types, num_nodes).tolist() - else: - raise ValueError( - f"""Locopool build method '{method}' invalid or not implemented.""") - - loco_pool = pl.DataFrame( - {'Locomotive_ID': pl.Series(engine_numbers, dtype=pl.UInt32), - 'Locomotive_Type': pl.Series(types, dtype=pl.Categorical), - 'Node': pl.Series(sorted_nodes, dtype=pl.Categorical), - 'Arrival_Time': pl.Series(np.zeros(rows), dtype=pl.Float64), - 'Servicing_Done_Time': pl.Series(np.zeros(rows), dtype=pl.Float64), - 'Refueling_Done_Time': pl.Series(np.tile(0, rows), dtype=pl.Float64), - 'Status': pl.Series(np.tile("Ready", rows), dtype=pl.Categorical), - 'SOC_Target_J': pl.Series(np.zeros(rows), dtype=pl.Float64), - 'Refuel_Duration': pl.Series(np.zeros(rows), dtype=pl.Float64), - 'Refueler_J_Per_Hr': pl.Series(np.zeros(rows), dtype=pl.Float64), - 'Refueler_Efficiency': pl.Series(np.zeros(rows), dtype=pl.Float64), - 'Port_Count': pl.Series(np.zeros(rows), dtype=pl.UInt32)} - ) - - loco_info_pl = pl.from_pandas(config.loco_info.drop(labels='Rust_Loco',axis=1), - schema_overrides={'Locomotive_Type': pl.Categorical, - 'Fuel_Type': pl.Categorical} - ) - - loco_pool = loco_pool.join(loco_info_pl, on="Locomotive_Type") - return loco_pool - - -def build_refuelers( - node_list: pd.Series, - loco_pool: pl.DataFrame, - refueler_info: pd.DataFrame, - refuelers_per_incoming_corridor: int, -) -> pl.DataFrame: - """ - Build the default set of refueling facilities. - Arguments: - ---------- - node_list: List of origin or destination demand nodes - loco_pool: Locomotive pool - refueler_info: DataFrame with information for each type of refueling infrastructure to use - refuelers_per_incoming_corridor: Queue size per corridor arriving at each node. - Outputs: - ---------- - refuelers: Polars dataframe of facility county by node and type of fuel - """ - ports_per_node = (loco_pool - .group_by(pl.col("Locomotive_Type", "Fuel_Type").cast(pl.Utf8)) - .agg([(pl.lit(refuelers_per_incoming_corridor) * pl.len() / pl.lit(loco_pool.height)) - .ceil() - .alias("Ports_Per_Node")]) - .join(pl.from_pandas(refueler_info), - on=["Locomotive_Type", "Fuel_Type"], - how="left") - ) - - locations = pd.DataFrame(data={ - 'Node': np.tile(node_list, ports_per_node.height)}) - locations = locations.sort_values(by=['Node']).reset_index(drop=True) - - refuelers = pl.DataFrame({ - 'Node': pl.Series(locations['Node'], dtype=pl.Categorical).cast(pl.Categorical), - 'Refueler_Type': pl.Series(np.tile( - ports_per_node.get_column("Refueler_Type").to_list(), len(node_list)), - dtype=pl.Categorical).cast(pl.Categorical), - 'Locomotive_Type': pl.Series(np.tile( - ports_per_node.get_column("Locomotive_Type").to_list(), len(node_list)), - dtype=pl.Categorical).cast(pl.Categorical), - 'Fuel_Type': pl.Series(np.tile( - ports_per_node.get_column("Fuel_Type").to_list(), len(node_list)), - dtype=pl.Categorical).cast(pl.Categorical), - 'Refueler_J_Per_Hr': pl.Series(np.tile( - ports_per_node.get_column("Refueler_J_Per_Hr").to_list(), len(node_list)), - dtype=pl.Float64), - 'Refueler_Efficiency': pl.Series(np.tile( - ports_per_node.get_column("Refueler_Efficiency").to_list(), len(node_list)), - dtype=pl.Float64), - 'Lifespan_Years': pl.Series(np.tile( - ports_per_node.get_column("Lifespan_Years").to_list(), len(node_list)), - dtype=pl.Float64), - 'Cost_USD': pl.Series(np.tile( - ports_per_node.get_column("Cost_USD").to_list(), len(node_list)), - dtype=pl.Float64), - 'Port_Count': pl.Series(np.tile( - ports_per_node.get_column("Ports_Per_Node").to_list(), len(node_list)), - dtype=pl.UInt32)}) - return refuelers - -def append_charging_guidelines( - refuelers: pl.DataFrame, - loco_pool: pl.DataFrame, - demand: pl.DataFrame, - network_charging_guidelines: pl.DataFrame -) -> pl.DataFrame: - active_ods = demand.select(["Origin","Destination"]).unique() - network_charging_guidelines = (network_charging_guidelines - .join(active_ods, on=["Origin","Destination"], how="inner") - .group_by(pl.col("Origin")) - .agg(pl.col("Allowable_Battery_Headroom_MWh").min() * 1e6 / utilities.MWH_PER_MJ) - .rename({"Allowable_Battery_Headroom_MWh": "Battery_Headroom_J"}) - .with_columns(pl.col("Origin").cast(pl.Categorical))) - refuelers = (refuelers - .join(network_charging_guidelines, left_on="Node", right_on="Origin", how="left") - .with_columns(pl.when(pl.col("Fuel_Type")=="Electricity") - .then(pl.col("Battery_Headroom_J")) - .otherwise(0) - .fill_null(0) - .alias("Battery_Headroom_J") - )) - loco_pool = (loco_pool - .join(network_charging_guidelines, left_on="Node", right_on="Origin", how="left") - .with_columns(pl.when(pl.col("Fuel_Type")=="Electricity") - .then(pl.col("Battery_Headroom_J")) - .otherwise(0) - .fill_null(0) - .alias("Battery_Headroom_J")) - .with_columns(pl.max_horizontal([pl.col('SOC_Max_J')-pl.col('Battery_Headroom_J'), pl.col('SOC_Min_J')]).alias("SOC_J"))) - return refuelers, loco_pool - -def append_loco_info(loco_info: pd.DataFrame) -> pd.DataFrame: - if all(item in loco_info.columns for item in [ - 'HP','Loco_Mass_Tons','SOC_J','SOC_Min_J','SOC_Max_J','Capacity_J' - ] - ): return loco_info - get_hp = lambda loco: loco.pwr_rated_kilowatts * 1e3 / alt.utils.W_PER_HP - get_mass_ton = lambda loco: 0 if not loco.mass_kg else loco.mass_kg / alt.utils.KG_PER_TON - get_starting_soc = lambda loco: defaults.DIESEL_TANK_CAPACITY_J if not loco.res else loco.res.state.soc * loco.res.energy_capacity_joules - get_min_soc = lambda loco: 0 if not loco.res else loco.res.min_soc * loco.res.energy_capacity_joules - get_max_soc = lambda loco: defaults.DIESEL_TANK_CAPACITY_J if not loco.res else loco.res.max_soc * loco.res.energy_capacity_joules - get_capacity = lambda loco: defaults.DIESEL_TANK_CAPACITY_J if not loco.res else loco.res.energy_capacity_joules - loco_info.loc[:,'HP'] = loco_info.loc[:,'Rust_Loco'].apply(get_hp) - loco_info.loc[:,'Loco_Mass_Tons'] = loco_info.loc[:,'Rust_Loco'].apply(get_mass_ton) - loco_info.loc[:,'SOC_J'] = loco_info.loc[:,'Rust_Loco'].apply(get_starting_soc) - loco_info.loc[:,'SOC_Min_J'] = loco_info.loc[:,'Rust_Loco'].apply(get_min_soc) - loco_info.loc[:,'SOC_Max_J'] = loco_info.loc[:,'Rust_Loco'].apply(get_max_soc) - loco_info.loc[:,'Capacity_J'] = loco_info.loc[:,'Rust_Loco'].apply(get_capacity) - return loco_info - -def dispatch( - dispatch_time: int, - ton: float, - origin: str, - loco_pool: pl.DataFrame, - hp_per_ton: float, -) -> pl.Series: - """ - Update the locomotive pool by identifying the desired locomotive to dispatch and assign to the - new location (destination) with corresponding updated ready time - Arguments: - ---------- - dispatch_time: time that a train is due - ton: required tonnage in the train - origin: origin node name of the train - loco_pool: locomotive pool dataframe containing all locomotives in the network - hp_per_ton: Horsepower per ton required for this train type on this origin-destination corridor - Outputs: - ---------- - selected: Indices of selected locomotives - """ - hp_required = ton * hp_per_ton - - # Candidate locomotives at the right place that are ready - - candidates = loco_pool.select((pl.col("Node") == origin) & - (pl.col("Status") == "Ready")).to_series() - if not candidates.any(): - message = f"""No available locomotives at node {origin} at hour {dispatch_time}.""" - waiting_counts = (loco_pool - .filter( - pl.col("Status").is_in(["Servicing","Refuel_Queue"]), - pl.col("Node") == origin - ) - .group_by(['Locomotive_Type']).agg(pl.len()) - ) - if waiting_counts.height == 0: - message = message + f"""\nNo locomotives are currently located there. Instead, they are at:""" - locations = loco_pool.group_by("Node").agg(pl.len()) - for row in locations.iter_rows(named = True): - message = message + f""" - {row['Node']}: {row['count']}""" - else: - message = message + f"""Count of locomotives refueling or waiting to refuel at {origin} are:""" - for row in waiting_counts.iter_rows(named = True): - message = message + f"""\n{row['Locomotive_Type']}: {row['count']}""" - - raise ValueError(message) - - # Running list of downselected candidates - selected = candidates - # First available diesel (in order of loco_pool) will be moved from candidates to selected - # TODO gracefully handle cases when there is no diesel locomotive to be dispatched - # (ex: hold the train until enough diesels are present) - diesel_filter = pl.col("Fuel_Type").cast(pl.Utf8).str.contains("(?i)diesel") - diesel_candidates = loco_pool.select(pl.lit(candidates) & diesel_filter).to_series() - if not diesel_candidates.any(): - refueling_diesel_count = loco_pool.filter( - pl.col("Node") == origin, - pl.col("Status").is_in(["Servicing","Refuel_Queue"]), - diesel_filter - ).select(pl.len())[0, 0] - message = f"""No available diesel locomotives at node {origin} at hour {dispatch_time}, so - the one-diesel-per-consist rule cannot be satisfied. {refueling_diesel_count} diesel locomotives at - {origin} are servicing, refueling, or queueing.""" - if refueling_diesel_count > 0: - diesel_port_count = loco_pool.filter( - pl.col("Node") == origin, - diesel_filter - ).select(pl.col("Port_Count").min()).item() - message += f""" (queue capacity {diesel_port_count}).""" - else: - message += "." - raise ValueError(message) - - diesel_to_require = diesel_candidates.eq(True).cumsum().eq(1).arg_max() - diesel_to_require_hp = loco_pool.filter(diesel_filter).select(pl.first("HP")) - # Need to mask this so it's not double-counted on next step - candidates[diesel_to_require] = False - # Get running sum, including first diesel, of hp of the candidates (in order of loco_pool) - enough_hp = loco_pool.select(( - ( - (pl.col("HP") - (pl.col("Loco_Mass_Tons") * pl.lit(hp_per_ton))) * pl.lit(candidates) - ).cumsum() + pl.lit(diesel_to_require_hp)) >= hp_required).to_series() - if not enough_hp.any(): - available_hp = loco_pool.select( - ( - (pl.col("HP") - (pl.col("Loco_Mass_Tons") * pl.lit(hp_per_ton))) * pl.lit(candidates) - ).cumsum().max())[0, 0] - message = f"""Outbound horsepower needed ({hp_required}) at {origin} at hour {dispatch_time} - is more than the available horsepower ({available_hp}). - Count of locomotives servicing, refueling, or queueing at {origin} are:""" - # Hold the train until enough diesels are present (future development) - waiting_counts = loco_pool.filter( - pl.col("Node") == origin, - pl.col("Status").is_in(["Servicing","Refuel_Queue"]) - ).select("Locomotive_Type").group_by(['Locomotive_Type']).len() - for row in waiting_counts.iter_rows(named = True): - message = message + f""" - {row['Locomotive_Type']}: {row['count']}""" - # Hold the train until enough locomotives are present (future development) - raise ValueError(message) - - last_row_to_use = enough_hp.eq(True).cumsum().eq(1).arg_max() - # Set false all the locomotives that would add unnecessary hp - selected[np.arange(last_row_to_use+1, len(selected))] = False - # Add first diesel (which could come after last_row_to_use) to selection list - selected[diesel_to_require] = True - return selected - -def update_refuel_queue( - loco_pool: pl.DataFrame, - refuelers: pl.DataFrame, - current_time: float, - event_tracker: pl.DataFrame -) -> Tuple[pl.DataFrame, pl.DataFrame]: - """ - Update the locomotive pool by identifying the desired locomotive to dispatch and assign to the - new location (destination) with corresponding updated ready time - Arguments: - ---------- - loco_pool: locomotive pool dataframe containing all locomotives in the network - refuelers: refuelers dataframe containing all refueling ports in the network - current_time: - event_tracker: - hp_per_ton: Horsepower per ton required for this train type on this origin-destination corridor - Outputs: - ---------- - loco_pool: Locomotive pool with updates made to servicing, refueling, or queued locomotives - """ - - # If any trains arrived, add arrivals to the service queue - arrived = loco_pool.select((pl.col("Status") == "Dispatched") & - (pl.col("Arrival_Time") <= current_time)).to_series() - if(arrived.sum() > 0): - loco_pool = (loco_pool - .drop(['Refueler_J_Per_Hr','Port_Count','Battery_Headroom_J']) - .join( - refuelers.select(["Node","Locomotive_Type","Fuel_Type","Refueler_J_Per_Hr","Port_Count",'Battery_Headroom_J']), - on=["Node", "Locomotive_Type" ,"Fuel_Type"], - how="left") - .with_columns( - pl.when(arrived) - .then(pl.lit("Refuel_Queue")) - .otherwise(pl.col("Status")).alias("Status"), - pl.when(arrived) - .then(pl.max_horizontal([pl.col('SOC_Max_J')-pl.col('Battery_Headroom_J'), pl.col('SOC_J')])) - .otherwise(pl.col("SOC_Target_J")).alias("SOC_Target_J")) - .with_columns( - pl.when(arrived) - .then((pl.col("SOC_Target_J")-pl.col("SOC_J"))/pl.col("Refueler_J_Per_Hr")) - .otherwise(pl.col("Refuel_Duration")).alias("Refuel_Duration")) - .sort("Node", "Locomotive_Type", "Fuel_Type", "Arrival_Time", "Locomotive_ID", descending = False, nulls_last = True)) - charger_type_breakouts = (loco_pool - .filter( - pl.col("Status") == "Refuel_Queue", - (pl.col("Refueling_Done_Time") >= current_time) | (pl.col("Refueling_Done_Time").is_null()) - ) - .partition_by(["Node","Locomotive_Type"]) - ) - charger_type_list = [] - for charger_type in charger_type_breakouts: - loco_ids = charger_type.get_column("Locomotive_ID") - arrival_times = charger_type.get_column("Arrival_Time") - refueling_done_times = charger_type.get_column("Refueling_Done_Time") - refueling_durations = charger_type.get_column("Refuel_Duration") - port_counts = charger_type.get_column("Port_Count") - for i in range(0, refueling_done_times.len()): - if refueling_done_times[i] is not None: continue - next_done = refueling_done_times.filter( - (refueling_done_times.is_not_null()) & - (refueling_done_times.rank(method='ordinal', descending = True).eq(port_counts[i]))) - if next_done.len() == 0: next_done = arrival_times[i] - else: next_done = max(next_done[0], arrival_times[i]) - refueling_done_times[i] = next_done + refueling_durations[i] - charger_type_list.append(pl.DataFrame([loco_ids, refueling_done_times])) - all_queues = pl.concat(charger_type_list, how="diagonal") - loco_pool = (loco_pool - .join(all_queues, on="Locomotive_ID", how="left", suffix="_right") - .with_columns(pl.when(pl.col("Refueling_Done_Time_right").is_not_null()) - .then(pl.col("Refueling_Done_Time_right")) - .otherwise(pl.col("Refueling_Done_Time")) - .alias("Refueling_Done_Time")) - .drop("Refueling_Done_Time_right")) - - # Remove locomotives that are done refueling from the refuel queue - refueling_finished = loco_pool.select( - (pl.col("Status") == "Refuel_Queue") & (pl.col("Refueling_Done_Time") <= current_time) - ).to_series() - refueling_finished_count = refueling_finished.sum() - if(refueling_finished_count > 0): - # Record the refueling event - new_rows = pl.DataFrame([ - np.concatenate([ - np.tile('Refueling_Start', refueling_finished_count), - np.tile('Refueling_End', refueling_finished_count)]), - np.concatenate([ - loco_pool.filter(refueling_finished).select(pl.col('Refueling_Done_Time') - pl.col("Refuel_Duration")).to_series(), - loco_pool.filter(refueling_finished).get_column('Refueling_Done_Time')]), - np.tile(loco_pool.filter(refueling_finished).get_column('Locomotive_ID'), 2)], - schema=event_tracker.columns, - orient="col") - event_tracker = pl.concat([event_tracker, new_rows]) - - loco_pool = loco_pool.with_columns( - pl.when(refueling_finished) - .then(pl.col("SOC_Target_J")) - .otherwise(pl.col('SOC_J')) - .alias("SOC_J"), - pl.when(refueling_finished) - .then(pl.lit(None)) - .otherwise(pl.col('Refueling_Done_Time')) - .alias("Refueling_Done_Time"), - pl.when(pl.lit(refueling_finished) & (pl.col("Servicing_Done_Time") <= current_time)) - .then(pl.lit("Ready")) - .when(pl.lit(refueling_finished) & (pl.col("Servicing_Done_Time") > current_time)) - .then(pl.lit("Servicing")) - .otherwise(pl.col('Status')) - .alias("Status")) - - servicing_finished = loco_pool.select( - (pl.col("Status") == "Servicing") & (pl.col("Servicing_Done_Time") <= current_time)).to_series() - if(servicing_finished.sum() > 0): - loco_pool = loco_pool.with_columns( - pl.when(servicing_finished) - .then(pl.lit("Ready")) - .otherwise(pl.col('Status')) - .alias("Status"), - pl.when(servicing_finished) - .then(pl.lit(None)) - .otherwise(pl.col("Servicing_Done_Time")) - .alias("Servicing_Done_Time") - ) - return loco_pool.sort("Locomotive_ID"), event_tracker - -def run_train_planner( - rail_vehicles: List[alt.RailVehicle], - location_map: Dict[str, List[alt.Location]], - network: List[alt.Link], - loco_pool: pl.DataFrame, - refuelers: pl.DataFrame, - simulation_days: int, - scenario_year: int, - train_type: alt.TrainType = alt.TrainType.Freight, - config: TrainPlannerConfig = TrainPlannerConfig(), - demand_file: Union[pl.DataFrame, Path, str] = defaults.DEMAND_FILE, - network_charging_guidelines: pl.DataFrame = None, -) -> Tuple[ - pl.DataFrame, - pl.DataFrame, - pl.DataFrame, - List[alt.SpeedLimitTrainSim], - List[alt.EstTimeNet] -]: - """ - Run the train planner - Arguments: - ---------- - rail_vehicles: - location_map: - network: - loco_pool: - refuelers: - simulation_days: - config: Object storing train planner configuration paramaters - demand_file: - Outputs: - ---------- - """ - config.loco_info = append_loco_info(config.loco_info) - demand, node_list = demand_loader(demand_file) - if refuelers is None: - refuelers = build_refuelers( - node_list, - loco_pool, - config.refueler_info, - config.refuelers_per_incoming_corridor) - - if network_charging_guidelines is None: - network_charging_guidelines = pl.read_csv(alt.resources_root() / "networks" / "network_charging_guidelines.csv") - - refuelers, loco_pool = append_charging_guidelines(refuelers, loco_pool, demand, network_charging_guidelines) - if config.single_train_mode: - demand = generate_demand_trains(demand, - demand_returns = pl.DataFrame(), - demand_rebalancing = pl.DataFrame(), - rail_vehicles = rail_vehicles, - config = config) - dispatch_times = (demand - .with_row_index(name="index") - .with_columns(pl.col("index").mul(24.0).alias("Hour")) - .drop("index") - ) - else: - demand_returns = generate_return_demand(demand, config) - demand_rebalancing = pl.DataFrame() - if demand.filter(pl.col("Train_Type").str.contains("Manifest")).height > 0: - demand_origin_manifest = generate_origin_manifest_demand(demand, node_list, config) - demand_rebalancing = balance_trains(demand_origin_manifest) - demand = generate_demand_trains(demand, demand_returns, demand_rebalancing, rail_vehicles, config) - dispatch_times = calculate_dispatch_times(demand, simulation_days * 24) - - #TODO eliminate the naming convention that rail vehicles (train types from demand file) must end in `_Loaded` or `_Empty` - dispatch_times = (dispatch_times.with_columns( - pl.when(pl.col("Train_Type").str.ends_with("_Empty")) - .then(pl.col("Train_Type")) - .otherwise(pl.concat_str(pl.col("Train_Type").str.strip_suffix("_Loaded"), - pl.lit("_Loaded"))) - .alias("Train_Type") - ) - ) - - final_departure = dispatch_times.get_column("Hour").max() - train_consist_plan = pl.DataFrame(schema= - {'Train_ID': pl.Int64, - 'Train_Type': pl.Utf8, - 'Locomotive_ID': pl.UInt32, - 'Locomotive_Type': pl.Categorical, - 'Origin_ID': pl.Utf8, - 'Destination_ID': pl.Utf8, - 'Cars_Loaded': pl.Float64, - 'Cars_Empty': pl.Float64, - 'Departure_SOC_J': pl.Float64, - 'Departure_Time_Planned_Hr': pl.Float64, - 'Arrival_Time_Planned_Hr': pl.Float64}) - event_tracker = pl.DataFrame(schema=[ - ("Event_Type", pl.Utf8), - ("Time_Hr", pl.Float64), - ("Locomotive_ID", pl.UInt32)]) - - train_id_counter = 1 - speed_limit_train_sims = [] - est_time_nets = [] - - done = False - # start at first departure time - current_time = dispatch_times.get_column("Hour").min() - while not done: - # Dispatch new train consists - current_dispatches = dispatch_times.filter(pl.col("Hour") == current_time) - if(current_dispatches.height > 0): - loco_pool, event_tracker = update_refuel_queue(loco_pool, refuelers, current_time, event_tracker) - - for this_train in current_dispatches.iter_rows(named = True): - if this_train['Tons_Per_Train'] > 0: - train_id=str(train_id_counter) - if config.single_train_mode: - selected = loco_pool.select(pl.col("Locomotive_ID").is_not_null().alias("selected")).to_series() - dispatched = loco_pool - else: - selected = dispatch( - current_time, - this_train['Tons_Per_Train'], - this_train['Origin'], - loco_pool, - this_train['HP_Required_Per_Ton'] - ) - dispatched = loco_pool.filter(selected) - - train_config = alt.TrainConfig( - rail_vehicles = [vehicle for vehicle in rail_vehicles if vehicle.car_type==this_train['Train_Type']], - n_cars_by_type = { - this_train['Train_Type']: this_train['Number_of_Cars'] - }, - train_type = train_type, - cd_area_vec = config.drag_coeff_function - ) - - loco_start_soc_j = dispatched.get_column("SOC_J") - dispatch_order = (dispatched.select( - pl.col('Locomotive_ID') - .rank().alias('rank').cast(pl.UInt32) - ).with_row_count().sort('row_nr')) - dispatched = dispatched.sort('Locomotive_ID') - loco_start_soc_pct = dispatched.select(pl.col('SOC_J') / pl.col('Capacity_J')).to_series() - locos = [ - config.loco_info[config.loco_info['Locomotive_Type']==loco_type]['Rust_Loco'].to_list()[0].clone() - for loco_type in dispatched.get_column('Locomotive_Type') - ] - [alt.set_param_from_path( - locos[i], - "res.state.soc", - loco_start_soc_pct[i] - ) for i in range(len(locos)) if dispatched.get_column('Fuel_Type')[i] == 'Electricity'] - - loco_con = alt.Consist( - loco_vec=locos, - save_interval=None, - ) - - init_train_state = alt.InitTrainState( - time_seconds=current_time * 3600 - ) - tsb = alt.TrainSimBuilder( - train_id=train_id, - origin_id=this_train['Origin'], - destination_id=this_train['Destination'], - train_config=train_config, - loco_con=loco_con, - init_train_state=init_train_state, - ) - - slts = tsb.make_speed_limit_train_sim( - location_map=location_map, - save_interval=None, - simulation_days=simulation_days, - scenario_year=scenario_year - ) - - (est_time_net, loco_con_out) = alt.make_est_times(slts, network) - travel_time = ( - est_time_net.get_running_time_hours() - * config.dispatch_scaling_dict["time_mult_factor"] - + config.dispatch_scaling_dict["hours_add"] - ) - - locos = loco_con_out.loco_vec.tolist() - energy_use_locos = [loco.res.state.energy_out_chemical_joules if loco.res else loco.fc.state.energy_fuel_joules if loco.fc else 0 for loco in locos] - energy_use_j = np.zeros(len(loco_pool)) - energy_use_j[selected] = [energy_use_locos[i-1] for i in dispatch_order.get_column('rank').to_list()] - energy_use_j *= config.dispatch_scaling_dict["energy_mult_factor"] - energy_use_j = pl.Series(energy_use_j) - speed_limit_train_sims.append(slts) - est_time_nets.append(est_time_net) - loco_pool = loco_pool.with_columns( - pl.when(selected) - .then(pl.lit(this_train['Destination'])) - .otherwise(pl.col('Node')).alias("Node"), - pl.when(selected) - .then(pl.lit(current_time + travel_time)) - .otherwise(pl.col('Arrival_Time')).alias("Arrival_Time"), - pl.when(selected) - .then(pl.lit(current_time + travel_time) + pl.col('Min_Servicing_Time_Hr')) - .otherwise(pl.col('Servicing_Done_Time')).alias("Servicing_Done_Time"), - pl.when(selected) - .then(None) - .otherwise(pl.col('Refueling_Done_Time')).alias("Refueling_Done_Time"), - pl.when(selected) - .then(pl.lit("Dispatched")) - .otherwise(pl.col('Status')).alias("Status"), - pl.when(selected) - .then(pl.max_horizontal( - pl.col('SOC_Min_J'), - pl.min_horizontal( - pl.col('SOC_J') - pl.lit(energy_use_j), - pl.col('SOC_Max_J')))) - .otherwise(pl.col('SOC_J')).alias("SOC_J") - ) - - # Populate the output dataframe with the dispatched trains - new_row_count = selected.sum() - new_rows = pl.DataFrame([ - pl.Series(repeat(train_id_counter, new_row_count)), - pl.Series(repeat(this_train['Train_Type'], new_row_count)), - loco_pool.filter(selected).get_column('Locomotive_ID'), - loco_pool.filter(selected).get_column('Locomotive_Type'), - pl.Series(repeat(this_train['Origin'], new_row_count)), - pl.Series(repeat(this_train['Destination'], new_row_count)), - pl.Series(repeat(this_train['Cars_Loaded'], new_row_count)), - pl.Series(repeat(this_train['Cars_Empty'], new_row_count)), - # pl.Series(repeat(this_train['Number_of_Cars'], new_row_count)), - loco_start_soc_j, - pl.Series(repeat(current_time, new_row_count)), - pl.Series(repeat(current_time + travel_time, new_row_count))], - schema = train_consist_plan.columns, - orient="col") - train_consist_plan = pl.concat([train_consist_plan, new_rows], how="diagonal_relaxed") - train_id_counter += 1 - - if current_time >= final_departure: - current_time = float("inf") - loco_pool, event_tracker = update_refuel_queue(loco_pool, refuelers, current_time, event_tracker) - done = True - else: - current_time = dispatch_times.filter(pl.col("Hour").gt(current_time)).get_column("Hour").min() - - train_consist_plan = (train_consist_plan - .with_columns( - cs.categorical().cast(str), - pl.col("Train_ID", "Locomotive_ID").cast(pl.UInt32) - ) - .sort(["Locomotive_ID", "Train_ID"], descending=False) - ) - loco_pool = loco_pool.with_columns(cs.categorical().cast(str)) - refuelers = refuelers.with_columns(cs.categorical().cast(str)) - - event_tracker = event_tracker.sort(["Locomotive_ID","Time_Hr","Event_Type"]) - service_starts = (event_tracker - .filter(pl.col("Event_Type") == "Refueling_Start") - .get_column("Time_Hr") - .rename("Refuel_Start_Time_Planned_Hr")) - service_ends = (event_tracker - .filter(pl.col("Event_Type") == "Refueling_End") - .get_column("Time_Hr") - .rename("Refuel_End_Time_Planned_Hr")) - - train_consist_plan = train_consist_plan.with_columns( - service_starts, service_ends - ) - - return train_consist_plan, loco_pool, refuelers, speed_limit_train_sims, est_time_nets - - -if __name__ == "__main__": - - rail_vehicles=[alt.RailVehicle.from_file(vehicle_file) - for vehicle_file in Path(alt.resources_root() / "rolling_stock/").glob('*.yaml')] - - location_map = alt.import_locations( - str(alt.resources_root() / "networks/default_locations.csv") - ) - network = alt.Network.from_file( - str(alt.resources_root() / "networks/Taconite-NoBalloon.yaml") - ) - config = TrainPlannerConfig() - loco_pool = build_locopool(config, defaults.DEMAND_FILE) - demand, node_list = demand_loader(defaults.DEMAND_FILE) - refuelers = build_refuelers( - node_list, - loco_pool, - config.refueler_info, - config.refuelers_per_incoming_corridor) - - output = run_train_planner( - rail_vehicles=rail_vehicles, - location_map=location_map, - network=network, - loco_pool=loco_pool, - refuelers=refuelers, - simulation_days=defaults.SIMULATION_DAYS + 2 * defaults.WARM_START_DAYS, - scenario_year=defaults.BASE_ANALYSIS_YEAR, - config=config) diff --git a/python/altrios/train_planner/data_prep.py b/python/altrios/train_planner/data_prep.py new file mode 100644 index 00000000..a5f335c3 --- /dev/null +++ b/python/altrios/train_planner/data_prep.py @@ -0,0 +1,466 @@ +from typing import Union, List, Tuple, Dict +from pathlib import Path +import polars as pl +import polars.selectors as cs +import pandas as pd +import numpy as np +import math +from scipy.stats import rankdata +import altrios as alt +from altrios import defaults, utilities +from altrios.train_planner import planner_config + +day_order_map = { + "Mon": 1, + "Tue": 2, + "Wed": 3, + "Thu": 4, + "Fri": 5, + "Sat": 6, + "Sun": 7 +} + +def convert_demand_to_sim_days( + demand_table: Union[pl.DataFrame, pl.LazyFrame], + simulation_days: int +) -> Union[pl.DataFrame, pl.LazyFrame]: + if "Number_of_Days" in demand_table.collect_schema(): + return demand_table.with_columns( + cs.starts_with("Number_of_").truediv(pl.col("Number_of_Days").truediv(simulation_days)) + ) + + else: + print("`Number_of_Days` not specified in demand file. Assuming demand in the file is expressed per week.") + return demand_table.with_columns( + cs.starts_with("Number_of_").mul(simulation_days / 7.0) + ) + + +def load_freight_demand( + demand_table: Union[pl.DataFrame, pl.LazyFrame, Path, str], + config: planner_config.TrainPlannerConfig, +) -> Tuple[pl.DataFrame, pl.Series, int]: + """ + Load the user input csv file into a dataframe for later processing + Arguments: + ---------- + user_input_file: path to the input csv file that user import to the module + Example Input: + Origin Destination Train_Type Number_of_Cars Number_of_Containers + Barstow Stockton Unit 2394 0 + Barstow Stockton Manifest 2588 0 + Barstow Stockton Intermodal 2221 2221 + + Outputs: + ---------- + df_annual_demand: dataframe with all pair information including: + origin, destination, train type, number of cars + node_list: List of origin or destination demand nodes + """ + if isinstance(demand_table, (Path, str)): + demand_table = (pl.read_csv(demand_table) + .pipe(convert_demand_to_sim_days, simulation_days = config.simulation_days) + ) + elif "Hour" not in demand_table.collect_schema(): + demand_table = (demand_table + .pipe(convert_demand_to_sim_days, simulation_days = config.simulation_days) + ) + + nodes = pl.concat( + [demand_table.get_column("Origin"), + demand_table.get_column("Destination")]).unique().sort() + return demand_table, nodes + +def prep_hourly_demand( + total_demand: Union[pl.DataFrame, pl.LazyFrame], + hourly_demand_density: Union[pl.DataFrame, pl.LazyFrame], + daily_demand_density: Union[pl.DataFrame, pl.LazyFrame], + simulation_weeks = 1 +) -> Union[pl.DataFrame, pl.LazyFrame]: + if "Number_of_Containers" in total_demand.collect_schema(): + demand_col = "Number_of_Containers" + else: + demand_col = "Number_of_Cars" + + total_demand = total_demand.pipe(convert_demand_to_sim_days, simulation_days = simulation_weeks * 7) + + hourly_demand_density = (hourly_demand_density + .group_by("Terminal_Type", "Hour_Of_Day") + .agg(pl.col("Share").sum()) + .with_columns(pl.col("Share").truediv(pl.col("Share").sum().over("Terminal_Type"))) + ) + daily_demand_density = (daily_demand_density + .group_by("Terminal_Type", "Day_Of_Week") + .agg(pl.col("Share").sum()) + .with_columns(pl.col("Share").truediv(pl.col("Share").sum().over("Terminal_Type"))) + ) + one_week = (total_demand + .join(daily_demand_density, how="inner", on=["Terminal_Type"]) + .with_columns( + (pl.col(demand_col) * pl.col("Share")).alias(f'{demand_col}_Daily'), + pl.col("Day_Of_Week").replace_strict(day_order_map).alias("Day_Order") + ) + .pipe(utilities.allocateItems, grouping_vars=["Origin", "Destination", "Train_Type"], count_target=f'{demand_col}_Daily') + .drop(f'{demand_col}_Daily', "Share") + .rename({"Count": f'{demand_col}_Daily'}) + .join(hourly_demand_density, how="inner", on=["Terminal_Type"]) + .sort("Origin", "Destination", "Day_Order", "Hour_Of_Day") + .with_columns( + (pl.col(f'{demand_col}_Daily') * pl.col("Share")).alias(demand_col), + pl.concat_str(pl.col("Origin"), pl.lit("-"), pl.col("Destination")).alias("OD_Pair"), + pl.int_range(0, pl.len()).over("Origin", "Destination").alias("Hour") + ) + .pipe(utilities.allocateItems, grouping_vars=["Origin", "Destination", "Train_Type", "Day_Order"], count_target=demand_col) + .drop(demand_col) + .rename({"Count": demand_col}) + .select("Origin", "Destination", "Train_Type", "Hour", "Number_of_Days", demand_col) + ) + return ( + pl.concat([ + one_week, + one_week.with_columns(pl.col("Hour").add(24*7)), + one_week.with_columns(pl.col("Hour").add(24*7*2)) + ]) + .with_columns(pl.col("Number_of_Days").mul(3)) + .sort("Origin", "Destination", "Train_Type", "Hour") + ) + +def append_loco_info(loco_info: pd.DataFrame) -> pd.DataFrame: + if all(item in loco_info.columns for item in [ + 'HP','Loco_Mass_Tons','SOC_J','SOC_Min_J','SOC_Max_J','Capacity_J' + ] + ): return loco_info + get_hp = lambda loco: loco.pwr_rated_kilowatts * 1e3 / alt.utils.W_PER_HP + get_mass_ton = lambda loco: 0 if not loco.mass_kg else loco.mass_kg / alt.utils.KG_PER_TON + get_starting_soc = lambda loco: defaults.DIESEL_TANK_CAPACITY_J if not loco.res else loco.res.state.soc * loco.res.energy_capacity_joules + get_min_soc = lambda loco: 0 if not loco.res else loco.res.min_soc * loco.res.energy_capacity_joules + get_max_soc = lambda loco: defaults.DIESEL_TANK_CAPACITY_J if not loco.res else loco.res.max_soc * loco.res.energy_capacity_joules + get_capacity = lambda loco: defaults.DIESEL_TANK_CAPACITY_J if not loco.res else loco.res.energy_capacity_joules + loco_info.loc[:,'HP'] = loco_info.loc[:,'Rust_Loco'].apply(get_hp) + loco_info.loc[:,'Loco_Mass_Tons'] = loco_info.loc[:,'Rust_Loco'].apply(get_mass_ton) + loco_info.loc[:,'SOC_J'] = loco_info.loc[:,'Rust_Loco'].apply(get_starting_soc) + loco_info.loc[:,'SOC_Min_J'] = loco_info.loc[:,'Rust_Loco'].apply(get_min_soc) + loco_info.loc[:,'SOC_Max_J'] = loco_info.loc[:,'Rust_Loco'].apply(get_max_soc) + loco_info.loc[:,'Capacity_J'] = loco_info.loc[:,'Rust_Loco'].apply(get_capacity) + return loco_info + +def build_locopool( + config: planner_config.TrainPlannerConfig, + demand_file: Union[pl.DataFrame, pl.LazyFrame, Path, str], + dispatch_schedule: Union[pl.DataFrame, pl.LazyFrame] = None, + method: str = "tile", + shares: List[float] = [], + locomotives_per_node: int = None +) -> pl.DataFrame: + """ + Generate default locomotive pool + Arguments: + ---------- + demand_file: Path to a file with origin-destination demand + method: Method to determine each locomotive's type ("tile" or "shares_twoway" currently implemented) + shares: List of shares for each locomotive type in loco_info (implemented for two-way shares only) + Outputs: + ---------- + loco_pool: Locomotive pool containing all locomotives' information that are within the system + """ + config.loco_info = append_loco_info(config.loco_info) + loco_types = list(config.loco_info.loc[:,'Locomotive_Type']) + demand, node_list = load_freight_demand(demand_file, config) + #TODO: handle different train types (or mixed train types?) + + num_nodes = len(node_list) + if locomotives_per_node is None: + num_ods = demand.select("Origin", "Destination").unique().height + if "Number_of_Cars" in demand.collect_schema(): + cars_per_od = (demand + .group_by("Origin","Destination") + .agg(pl.col("Number_of_Cars").sum()) + .get_column("Number_of_Cars").mean() + ) + elif "Number_of_Containers" in demand.collect_schema(): + cars_per_od = (demand + .group_by("Origin","Destination") + .agg(pl.col("Number_of_Containers").sum()) + .get_column("Number_of_Containers").mean() + ) / config.containers_per_car + else: + assert("No valid columns in demand DataFrame") + if config.single_train_mode: + initial_size = math.ceil(cars_per_od / min(config.cars_per_locomotive.values())) + rows = initial_size + else: + num_destinations_per_node = num_ods*1.0 / num_nodes*1.0 + initial_size_demand = math.ceil((cars_per_od / min(config.cars_per_locomotive.values())) * + num_destinations_per_node) # number of locomotives per node + initial_size_hp = 0 + if dispatch_schedule is not None: + # Compute the 24-hour window with the most total locomotives needed + # (assuming each loco is only dispatched once in a given day) + loco_mass = config.loco_info['Loco_Mass_Tons'].mean() + hp_per_ton = config.hp_required_per_ton['Default'][dispatch_schedule.select(pl.col("Train_Type").mode()).item()] + hp_per_loco = config.loco_info['HP'].mean() - loco_mass * hp_per_ton + initial_size_hp = (dispatch_schedule + .with_columns((pl.col("Hour") // 24).cast(pl.Int32).alias("Day"), + pl.col("HP_Required").truediv(hp_per_loco).ceil().mul(config.loco_pool_safety_factor).alias("Locos_Per_Dispatch")) + .group_by("Day", "Origin") + .agg(pl.col("Locos_Per_Dispatch").ceil().sum().alias("Locos_Per_Day_Per_Origin")) + .select(pl.col("Locos_Per_Day_Per_Origin").max().cast(pl.Int64)).item() + ) + initial_size = max(initial_size_demand, initial_size_hp) + rows = initial_size * num_nodes # number of locomotives in total + else: + initial_size = locomotives_per_node + rows = locomotives_per_node * num_nodes + + if config.single_train_mode: + sorted_nodes = np.tile([demand.select(pl.col("Origin").first()).item()],rows).tolist() + engine_numbers = range(0, rows) + else: + sorted_nodes = np.sort(np.tile(node_list, initial_size)).tolist() + engine_numbers = rankdata(sorted_nodes, method="dense") * 1000 + \ + np.tile(range(0, initial_size), num_nodes) + + if method == "tile": + repetitions = math.ceil(rows/len(loco_types)) + types = np.tile(loco_types, repetitions).tolist()[0:rows] + elif method == "shares_twoway": + # TODO: this logic can be replaced (and generalized to >2 types) using altrios.utilities.allocateItems + if((len(loco_types) != 2) | (len(shares) != 2)): + raise ValueError( + f"""2-way prescribed locopool requested but number of locomotive types is not 2.""") + + idx_1 = np.argmin(shares) + idx_2 = 1 - idx_1 + share_type_one = shares[idx_1] + label_type_one = loco_types[idx_1] + label_type_two = loco_types[idx_2] + + num_type_one = round(initial_size * share_type_one) + if 0 == num_type_one: + types = pd.Series([label_type_two] * initial_size) + elif initial_size == num_type_one: + types = pd.Series([label_type_one] * initial_size) + else: + # Arrange repeated sequences of type 1 + {type_two_per_type_one, type_two_per_type_one+1} type 2 + # so as to match the required total counts of each. + type_two_per_type_one = ( + initial_size - num_type_one) / num_type_one + # Number of type 1 + {type_two_per_bel+1} type 2 sequences needed + num_extra_type_two = round( + num_type_one * (type_two_per_type_one % 1.0)) + series_fewer_type_two = pd.Series( + [label_type_one] + [label_type_two] * math.floor(type_two_per_type_one)) + series_more_type_two = pd.Series( + [label_type_one] + [label_type_two] * math.ceil(type_two_per_type_one)) + types = np.concatenate(( + np.tile(series_more_type_two, num_extra_type_two), + np.tile(series_fewer_type_two, num_type_one-num_extra_type_two)), + axis=None) + types = np.tile(types, num_nodes).tolist() + else: + raise ValueError( + f"""Locopool build method '{method}' invalid or not implemented.""") + + loco_pool = pl.DataFrame( + {'Locomotive_ID': pl.Series(engine_numbers, dtype=pl.UInt32), + 'Locomotive_Type': pl.Series(types, dtype=pl.Categorical), + 'Node': pl.Series(sorted_nodes, dtype=pl.Categorical), + 'Arrival_Time': pl.Series(np.zeros(rows), dtype=pl.Float64), + 'Servicing_Done_Time': pl.Series(np.zeros(rows), dtype=pl.Float64), + 'Refueling_Done_Time': pl.Series(np.tile(0, rows), dtype=pl.Float64), + 'Status': pl.Series(np.tile("Ready", rows), dtype=pl.Categorical), + 'SOC_Target_J': pl.Series(np.zeros(rows), dtype=pl.Float64), + 'Refuel_Duration': pl.Series(np.zeros(rows), dtype=pl.Float64), + 'Refueler_J_Per_Hr': pl.Series(np.zeros(rows), dtype=pl.Float64), + 'Refueler_Efficiency': pl.Series(np.zeros(rows), dtype=pl.Float64), + 'Port_Count': pl.Series(np.zeros(rows), dtype=pl.UInt32)} + ) + + loco_info_pl = pl.from_pandas(config.loco_info.drop(labels='Rust_Loco',axis=1), + schema_overrides={'Locomotive_Type': pl.Categorical, + 'Fuel_Type': pl.Categorical} + ) + + loco_pool = loco_pool.join(loco_info_pl, on="Locomotive_Type") + return loco_pool + +def build_refuelers( + node_list: pd.Series, + loco_pool: pl.DataFrame, + refueler_info: pd.DataFrame, + refuelers_per_incoming_corridor: int, +) -> pl.DataFrame: + """ + Build the default set of refueling facilities. + Arguments: + ---------- + node_list: List of origin or destination demand nodes + loco_pool: Locomotive pool + refueler_info: DataFrame with information for each type of refueling infrastructure to use + refuelers_per_incoming_corridor: Queue size per corridor arriving at each node. + Outputs: + ---------- + refuelers: Polars dataframe of facility county by node and type of fuel + """ + ports_per_node = (loco_pool + .group_by(pl.col("Locomotive_Type", "Fuel_Type").cast(pl.Utf8)) + .agg([(pl.lit(refuelers_per_incoming_corridor) * pl.len() / pl.lit(loco_pool.height)) + .ceil() + .alias("Ports_Per_Node")]) + .join(pl.from_pandas(refueler_info), + on=["Locomotive_Type", "Fuel_Type"], + how="left") + ) + + locations = pd.DataFrame(data={ + 'Node': np.tile(node_list, ports_per_node.height)}) + locations = locations.sort_values(by=['Node']).reset_index(drop=True) + + refuelers = pl.DataFrame({ + 'Node': pl.Series(locations['Node'], dtype=pl.Categorical).cast(pl.Categorical), + 'Refueler_Type': pl.Series(np.tile( + ports_per_node.get_column("Refueler_Type").to_list(), len(node_list)), + dtype=pl.Categorical).cast(pl.Categorical), + 'Locomotive_Type': pl.Series(np.tile( + ports_per_node.get_column("Locomotive_Type").to_list(), len(node_list)), + dtype=pl.Categorical).cast(pl.Categorical), + 'Fuel_Type': pl.Series(np.tile( + ports_per_node.get_column("Fuel_Type").to_list(), len(node_list)), + dtype=pl.Categorical).cast(pl.Categorical), + 'Refueler_J_Per_Hr': pl.Series(np.tile( + ports_per_node.get_column("Refueler_J_Per_Hr").to_list(), len(node_list)), + dtype=pl.Float64), + 'Refueler_Efficiency': pl.Series(np.tile( + ports_per_node.get_column("Refueler_Efficiency").to_list(), len(node_list)), + dtype=pl.Float64), + 'Lifespan_Years': pl.Series(np.tile( + ports_per_node.get_column("Lifespan_Years").to_list(), len(node_list)), + dtype=pl.Float64), + 'Cost_USD': pl.Series(np.tile( + ports_per_node.get_column("Cost_USD").to_list(), len(node_list)), + dtype=pl.Float64), + 'Port_Count': pl.Series(np.tile( + ports_per_node.get_column("Ports_Per_Node").to_list(), len(node_list)), + dtype=pl.UInt32)}) + return refuelers + +def append_charging_guidelines( + refuelers: pl.DataFrame, + loco_pool: pl.DataFrame, + demand: pl.DataFrame, + network_charging_guidelines: pl.DataFrame +) -> pl.DataFrame: + active_ods = demand.select(["Origin","Destination"]).unique() + network_charging_guidelines = (network_charging_guidelines + .join(active_ods, on=["Origin","Destination"], how="inner") + .group_by(pl.col("Origin")) + .agg(pl.col("Allowable_Battery_Headroom_MWh").min() * 1e6 / utilities.MWH_PER_MJ) + .rename({"Allowable_Battery_Headroom_MWh": "Battery_Headroom_J"}) + .with_columns(pl.col("Origin").cast(pl.Categorical))) + refuelers = (refuelers + .join(network_charging_guidelines, left_on="Node", right_on="Origin", how="left") + .with_columns(pl.when(pl.col("Fuel_Type")=="Electricity") + .then(pl.col("Battery_Headroom_J")) + .otherwise(0) + .fill_null(0) + .alias("Battery_Headroom_J") + )) + loco_pool = (loco_pool + .join(network_charging_guidelines, left_on="Node", right_on="Origin", how="left") + .with_columns(pl.when(pl.col("Fuel_Type")=="Electricity") + .then(pl.col("Battery_Headroom_J")) + .otherwise(0) + .fill_null(0) + .alias("Battery_Headroom_J")) + .with_columns(pl.max_horizontal([pl.col('SOC_Max_J')-pl.col('Battery_Headroom_J'), pl.col('SOC_Min_J')]).alias("SOC_J"))) + return refuelers, loco_pool + +def configure_rail_vehicles( + single_train_dispatch: Dict, + available_rail_vehicles: List[alt.RailVehicle], + freight_type_to_car_type: Dict +) -> (List[alt.RailVehicle], Dict[str, int]): + freight_types = [] + n_cars_by_type = {} + this_train_type = single_train_dispatch['Train_Type'] + if single_train_dispatch['Cars_Loaded'] > 0: + freight_type = f'{this_train_type}_Loaded' + freight_types.append(freight_type) + car_type = None + if freight_type in freight_type_to_car_type: + car_type = freight_type_to_car_type[freight_type] + else: + assert(f'Rail vehicle car type not found for freight type {freight_type}.') + n_cars_by_type[car_type] = int(single_train_dispatch['Cars_Loaded']) + if single_train_dispatch['Cars_Empty'] > 0: + freight_type = f'{this_train_type}_Empty' + freight_types.append(freight_type) + car_type = None + if freight_type in freight_type_to_car_type: + car_type = freight_type_to_car_type[freight_type] + else: + assert(f'Rail vehicle car type not found for freight type {freight_type}.') + n_cars_by_type[car_type] = int(single_train_dispatch['Cars_Empty']) + + rv_to_use = [vehicle for vehicle in available_rail_vehicles if vehicle.freight_type in freight_types] + return rv_to_use, n_cars_by_type + +def appendTonsAndHP( + df: Union[pl.DataFrame, pl.LazyFrame], + rail_vehicles, + freight_type_to_car_type, + config +) -> Union[pl.DataFrame, pl.LazyFrame]: + + hp_per_ton = pl.concat([ + (pl.DataFrame(this_dict) + .melt(variable_name="Train_Type", value_name="HP_Required_Per_Ton") + .with_columns(pl.lit(this_item).alias("O_D")) + .with_columns(pl.col("O_D").str.split("_").list.first().alias("Origin"), + pl.col("O_D").str.split("_").list.last().alias("Destination")) + ) + for this_item, this_dict in config.hp_required_per_ton.items() + ], how="horizontal_relaxed") + + def get_kg_empty(veh): + return veh.mass_static_base_kilograms + veh.axle_count * veh.mass_rot_per_axle_kilograms + def get_kg(veh): + return veh.mass_static_base_kilograms + veh.mass_freight_kilograms + veh.axle_count * veh.mass_rot_per_axle_kilograms + + tons_per_car = ( + pl.DataFrame({"Car_Type": pl.Series([rv.car_type for rv in rail_vehicles]), + "KG": [get_kg(rv) for rv in rail_vehicles], + "KG_Empty": [get_kg_empty(rv) for rv in rail_vehicles] + }) + .with_columns( + pl.when(pl.col("Car_Type").str.to_lowercase().str.contains("_empty")) + .then(pl.col("KG_Empty") / utilities.KG_PER_TON) + .otherwise(pl.col("KG") / utilities.KG_PER_TON) + .alias("Tons_Per_Car") + ) + .drop(["KG_Empty","KG"]) + ) + + return (df + .with_columns( + pl.when(pl.col("Train_Type").str.contains(pl.lit("_Empty"))) + .then(pl.col("Train_Type")) + .otherwise(pl.concat_str(pl.col("Train_Type").str.strip_suffix("_Loaded"), pl.lit("_Loaded"))) + .replace_strict(freight_type_to_car_type) + .alias("Car_Type") + ) + .join(tons_per_car, how="left", on="Car_Type") + # Merge on OD-specific hp_per_ton if the user specified any + .join(hp_per_ton.filter(pl.col("O_D") != pl.lit("Default")).drop("O_D"), + on=[pl.col("Origin"), pl.col("Destination"), pl.col("Train_Type").str.strip_suffix("_Empty").str.strip_suffix("_Loaded")], + how="left") + # Second, merge on defaults per train type + .join(hp_per_ton.filter((pl.col("O_D") =="Default")).drop(["O_D","Origin","Destination"]), + on=[pl.col("Train_Type").str.strip_suffix("_Empty").str.strip_suffix("_Loaded")], + how="left", + suffix="_Default") + .with_columns( + pl.coalesce("HP_Required_Per_Ton", "HP_Required_Per_Ton_Default").alias("HP_Required_Per_Ton") + ) + .drop(cs.ends_with("_Default") | cs.ends_with("_right")) + ) \ No newline at end of file diff --git a/python/altrios/train_planner/planner.py b/python/altrios/train_planner/planner.py new file mode 100644 index 00000000..1bfc94f6 --- /dev/null +++ b/python/altrios/train_planner/planner.py @@ -0,0 +1,597 @@ +from __future__ import annotations +from pathlib import Path +from altrios.train_planner import data_prep, schedulers, planner_config, train_demand_generators +import numpy as np +import polars as pl +import polars.selectors as cs +from typing import Union, Tuple, List, Dict +from itertools import repeat +import altrios as alt +from altrios import defaults + +def dispatch( + dispatch_time: int, + origin: str, + loco_pool: pl.DataFrame, + train_tonnage: float, + hp_required: float, + total_cars: float, + config: planner_config.TrainPlannerConfig, +) -> pl.Series: + """ + Update the locomotive pool by identifying the desired locomotive to dispatch and assign to the + new location (destination) with corresponding updated ready time + Arguments: + ---------- + dispatch_time: time that a train is due + origin: origin node name of the train + loco_pool: locomotive pool dataframe containing all locomotives in the network + hp_required: Horsepower required for this train type on this origin-destination corridor + total_cars: Total number of cars (loaded, empty, or otherwise) included on the train + config: TrainPlannerConfig object + Outputs: + ---------- + selected: Indices of selected locomotives + """ + hp_per_ton = hp_required / train_tonnage + # Candidate locomotives at the right place that are ready + candidates = loco_pool.select((pl.col("Node") == origin) & + (pl.col("Status") == "Ready")).to_series() + if not candidates.any(): + message = f"""No available locomotives at node {origin} at hour {dispatch_time}.""" + waiting_counts = (loco_pool + .filter( + pl.col("Status").is_in(["Servicing","Refuel_Queue"]), + pl.col("Node") == origin + ) + .group_by(['Locomotive_Type']).agg(pl.len()) + ) + if waiting_counts.height == 0: + message = message + f"""\nNo locomotives are currently located there. Instead, they are at:""" + locations = loco_pool.group_by("Node").agg(pl.len()) + for row in locations.iter_rows(named = True): + message = message + f""" + {row['Node']}: {row['count']}""" + else: + message = message + f"""Count of locomotives refueling or waiting to refuel at {origin} are:""" + for row in waiting_counts.iter_rows(named = True): + message = message + f"""\n{row['Locomotive_Type']}: {row['count']}""" + + raise ValueError(message) + + # Running list of downselected candidates + selected = candidates + diesel_to_require_hp = 0 + if config.require_diesel: + # First available diesel (in order of loco_pool) will be moved from candidates to selected + # TODO gracefully handle cases when there is no diesel locomotive to be dispatched + # (ex: hold the train until enough diesels are present) + diesel_filter = pl.col("Fuel_Type").cast(pl.Utf8).str.contains("(?i)diesel") + diesel_candidates = loco_pool.select(pl.lit(candidates) & diesel_filter).to_series() + if not diesel_candidates.any(): + refueling_diesel_count = loco_pool.filter( + pl.col("Node") == origin, + pl.col("Status").is_in(["Servicing","Refuel_Queue"]), + diesel_filter + ).select(pl.len())[0, 0] + message = f"""No available diesel locomotives at node {origin} at hour {dispatch_time}, so + the one-diesel-per-consist rule cannot be satisfied. {refueling_diesel_count} diesel locomotives at + {origin} are servicing, refueling, or queueing.""" + if refueling_diesel_count > 0: + diesel_port_count = loco_pool.filter( + pl.col("Node") == origin, + diesel_filter + ).select(pl.col("Port_Count").min()).item() + message += f""" (queue capacity {diesel_port_count}).""" + else: + message += "." + raise ValueError(message) + + diesel_to_require = diesel_candidates.eq(True).cum_sum().eq(1).arg_max() + diesel_to_require_hp = loco_pool.filter(diesel_filter).select(pl.first("HP")) + # Need to mask this so it's not double-counted on next step + candidates[diesel_to_require] = False + + message = "" + if config.cars_per_locomotive_fixed: + # Get as many available locomotives as are needed (in order of loco_pool) + enough = loco_pool.select( + (pl.lit(1.0) * pl.lit(candidates)).cum_sum() >= total_cars).to_series() + if not enough.any(): + message = f"""Locomotives needed ({total_cars}) at {origin} at hour {dispatch_time} + is more than the available locomotives ({candidates.sum()}). + Count of locomotives servicing, refueling, or queueing at {origin} are:""" + else: + # Get running sum, including first diesel, of hp of the candidates (in order of loco_pool) + enough = loco_pool.select(( + ( + (pl.col("HP") - (pl.col("Loco_Mass_Tons") * pl.lit(hp_per_ton))) * pl.lit(candidates) + ).cum_sum() + pl.lit(diesel_to_require_hp)) >= hp_required).to_series() + if not enough.any(): + available_hp = loco_pool.select( + ( + (pl.col("HP") - (pl.col("Loco_Mass_Tons") * pl.lit(hp_per_ton))) * pl.lit(candidates) + ).cum_sum().max())[0, 0] + message = f"""Outbound horsepower needed ({hp_required}) at {origin} at hour {dispatch_time} + is more than the available horsepower ({available_hp}). + Count of locomotives servicing, refueling, or queueing at {origin} are:""" + + if not enough.any(): + # Hold the train until enough diesels are present (future development) + waiting_counts = (loco_pool + .filter( + pl.col("Node") == origin, + pl.col("Status").is_in(["Servicing","Refuel_Queue"]) + ) + .group_by(['Locomotive_Type']) + .agg(pl.count().alias("count")) + ) + for row in waiting_counts.iter_rows(named = True): + message = message + f""" + {row['Locomotive_Type']}: {row['count']}""" + # Hold the train until enough locomotives are present (future development) + raise ValueError(message) + + last_row_to_use = enough.eq(True).cum_sum().eq(1).arg_max() + # Set false all the locomotives that would add unnecessary hp + selected[np.arange(last_row_to_use+1, len(selected))] = False + + if config.require_diesel: + # Add first diesel (which could come after last_row_to_use) to selection list + selected[diesel_to_require] = True + return selected + +def update_refuel_queue( + loco_pool: pl.DataFrame, + refuelers: pl.DataFrame, + current_time: float, + event_tracker: pl.DataFrame +) -> Tuple[pl.DataFrame, pl.DataFrame]: + """ + Update the locomotive pool by identifying the desired locomotive to dispatch and assign to the + new location (destination) with corresponding updated ready time + Arguments: + ---------- + loco_pool: locomotive pool dataframe containing all locomotives in the network + refuelers: refuelers dataframe containing all refueling ports in the network + current_time: + event_tracker: + hp_per_ton: Horsepower per ton required for this train type on this origin-destination corridor + Outputs: + ---------- + loco_pool: Locomotive pool with updates made to servicing, refueling, or queued locomotives + """ + + # If any trains arrived, add arrivals to the service queue + arrived = loco_pool.select((pl.col("Status") == "Dispatched") & + (pl.col("Arrival_Time") <= current_time)).to_series() + if(arrived.sum() > 0): + loco_pool = (loco_pool + .drop(['Refueler_J_Per_Hr','Port_Count','Battery_Headroom_J']) + .join( + refuelers.select(["Node","Locomotive_Type","Fuel_Type","Refueler_J_Per_Hr","Port_Count",'Battery_Headroom_J']), + on=["Node", "Locomotive_Type" ,"Fuel_Type"], + how="left") + .with_columns( + pl.when(arrived) + .then(pl.lit("Refuel_Queue")) + .otherwise(pl.col("Status")).alias("Status"), + pl.when(arrived) + .then(pl.max_horizontal([pl.col('SOC_Max_J')-pl.col('Battery_Headroom_J'), pl.col('SOC_J')])) + .otherwise(pl.col("SOC_Target_J")).alias("SOC_Target_J")) + .with_columns( + pl.when(arrived) + .then((pl.col("SOC_Target_J")-pl.col("SOC_J"))/pl.col("Refueler_J_Per_Hr")) + .otherwise(pl.col("Refuel_Duration")).alias("Refuel_Duration")) + .sort("Node", "Locomotive_Type", "Fuel_Type", "Arrival_Time", "Locomotive_ID", descending = False, nulls_last = True)) + charger_type_breakouts = (loco_pool + .filter( + pl.col("Status") == "Refuel_Queue", + (pl.col("Refueling_Done_Time") >= current_time) | (pl.col("Refueling_Done_Time").is_null()) + ) + .partition_by(["Node","Locomotive_Type"]) + ) + charger_type_list = [] + for charger_type in charger_type_breakouts: + loco_ids = charger_type.get_column("Locomotive_ID") + arrival_times = charger_type.get_column("Arrival_Time") + refueling_done_times = charger_type.get_column("Refueling_Done_Time") + refueling_durations = charger_type.get_column("Refuel_Duration") + port_counts = charger_type.get_column("Port_Count") + for i in range(0, refueling_done_times.len()): + if refueling_done_times[i] is not None: continue + next_done = refueling_done_times.filter( + (refueling_done_times.is_not_null()) & + (refueling_done_times.rank(method='ordinal', descending = True).eq(port_counts[i]))) + if next_done.len() == 0: next_done = arrival_times[i] + else: next_done = max(next_done[0], arrival_times[i]) + refueling_done_times[i] = next_done + refueling_durations[i] + charger_type_list.append(pl.DataFrame([loco_ids, refueling_done_times])) + all_queues = pl.concat(charger_type_list, how="diagonal") + loco_pool = (loco_pool + .join(all_queues, on="Locomotive_ID", how="left", suffix="_right") + .with_columns(pl.when(pl.col("Refueling_Done_Time_right").is_not_null()) + .then(pl.col("Refueling_Done_Time_right")) + .otherwise(pl.col("Refueling_Done_Time")) + .alias("Refueling_Done_Time")) + .drop("Refueling_Done_Time_right")) + + # Remove locomotives that are done refueling from the refuel queue + refueling_finished = loco_pool.select( + (pl.col("Status") == "Refuel_Queue") & (pl.col("Refueling_Done_Time") <= current_time) + ).to_series() + refueling_finished_count = refueling_finished.sum() + if(refueling_finished_count > 0): + # Record the refueling event + new_rows = pl.DataFrame([ + np.concatenate([ + np.tile('Refueling_Start', refueling_finished_count), + np.tile('Refueling_End', refueling_finished_count)]), + np.concatenate([ + loco_pool.filter(refueling_finished).select(pl.col('Refueling_Done_Time') - pl.col("Refuel_Duration")).to_series(), + loco_pool.filter(refueling_finished).get_column('Refueling_Done_Time')]), + np.tile(loco_pool.filter(refueling_finished).get_column('Locomotive_ID'), 2)], + schema=event_tracker.columns, + orient="col") + event_tracker = pl.concat([event_tracker, new_rows]) + + loco_pool = loco_pool.with_columns( + pl.when(refueling_finished) + .then(pl.col("SOC_Target_J")) + .otherwise(pl.col('SOC_J')) + .alias("SOC_J"), + pl.when(refueling_finished) + .then(pl.lit(None)) + .otherwise(pl.col('Refueling_Done_Time')) + .alias("Refueling_Done_Time"), + pl.when(pl.lit(refueling_finished) & (pl.col("Servicing_Done_Time") <= current_time)) + .then(pl.lit("Ready")) + .when(pl.lit(refueling_finished) & (pl.col("Servicing_Done_Time") > current_time)) + .then(pl.lit("Servicing")) + .otherwise(pl.col('Status')) + .alias("Status")) + + servicing_finished = loco_pool.select( + (pl.col("Status") == "Servicing") & (pl.col("Servicing_Done_Time") <= current_time)).to_series() + if(servicing_finished.sum() > 0): + loco_pool = loco_pool.with_columns( + pl.when(servicing_finished) + .then(pl.lit("Ready")) + .otherwise(pl.col('Status')) + .alias("Status"), + pl.when(servicing_finished) + .then(pl.lit(None)) + .otherwise(pl.col("Servicing_Done_Time")) + .alias("Servicing_Done_Time") + ) + return loco_pool.sort("Locomotive_ID"), event_tracker + +def run_train_planner( + rail_vehicles: List[alt.RailVehicle], + location_map: Dict[str, List[alt.Location]], + network: List[alt.Link], + loco_pool: pl.DataFrame, + refuelers: pl.DataFrame, + scenario_year: int, + train_type: alt.TrainType = alt.TrainType.Freight, + config: planner_config.TrainPlannerConfig = planner_config.TrainPlannerConfig(), + demand_file: Union[pl.DataFrame, Path, str] = defaults.DEMAND_FILE, + network_charging_guidelines: pl.DataFrame = None, +) -> Tuple[ + pl.DataFrame, + pl.DataFrame, + pl.DataFrame, + List[alt.SpeedLimitTrainSim], + List[alt.EstTimeNet] +]: + """ + Run the train planner + Arguments: + ---------- + rail_vehicles: + location_map: + network: + loco_pool: + refuelers: + config: Object storing train planner configuration paramaters + demand_file: + Outputs: + ---------- + """ + config.loco_info = data_prep.append_loco_info(config.loco_info) + demand, node_list = data_prep.load_freight_demand(demand_file, config) + + if config.return_demand_generators is None: + config.return_demand_generators = train_demand_generators.get_default_return_demand_generators() + + freight_type_to_car_type = {} + for rv in rail_vehicles: + if rv.freight_type in freight_type_to_car_type: + assert(f'More than one rail vehicle car type for freight type {rv.freight_type}') + else: + freight_type_to_car_type[rv.freight_type] = rv.car_type + + if config.single_train_mode: + demand = train_demand_generators.generate_demand_trains(demand, + demand_returns = pl.DataFrame(), + demand_rebalancing = pl.DataFrame(), + rail_vehicles = rail_vehicles, + config = config) + dispatch_schedule = (demand + .with_row_index(name="index") + .with_columns(pl.col("index").mul(24.0).alias("Hour")) + .drop("index") + ) + else: + demand_returns = pl.DataFrame() + demand_rebalancing = pl.DataFrame() + if (config.dispatch_scheduler is None) and ("Hour" in demand.collect_schema()): + if "Number_of_Containers" in demand.collect_schema(): + demand = (demand + .group_by("Origin", "Destination", "Number_of_Days", "Train_Type") + .agg(pl.col("Number_of_Containers").sum()) + .with_columns(pl.col("Number_of_Containers").truediv(config.containers_per_car).ceil().alias("Number_of_Cars")) + ) + else: + demand = (demand + .group_by("Origin", "Destination", "Number_of_Days", "Train_Type") + .agg(pl.col("Number_of_Cars").sum()) + ) + if "Hour" not in demand.schema: + demand_returns = train_demand_generators.generate_return_demand(demand, config) + if demand.filter(pl.col("Train_Type").str.contains("Manifest")).height > 0: + demand_rebalancing = train_demand_generators.generate_manifest_rebalancing_demand(demand, node_list, config) + + if config.dispatch_scheduler is None: + demand = train_demand_generators.generate_demand_trains(demand, demand_returns, demand_rebalancing, rail_vehicles, freight_type_to_car_type, config) + config.dispatch_scheduler = schedulers.dispatch_uniform_demand_uniform_departure + + dispatch_schedule = config.dispatch_scheduler(demand, rail_vehicles, freight_type_to_car_type, config) + + if loco_pool is None: + loco_pool = data_prep.build_locopool(config=config, demand_file=demand, dispatch_schedule=dispatch_schedule) + + if refuelers is None: + refuelers = data_prep.build_refuelers( + node_list, + loco_pool, + config.refueler_info, + config.refuelers_per_incoming_corridor) + + if network_charging_guidelines is None: + network_charging_guidelines = pl.read_csv(alt.resources_root() / "networks" / "network_charging_guidelines.csv") + + refuelers, loco_pool = data_prep.append_charging_guidelines(refuelers, loco_pool, demand, network_charging_guidelines) + + final_departure = dispatch_schedule.get_column("Hour").max() + train_consist_plan = pl.DataFrame(schema= + {'Train_ID': pl.Int64, + 'Train_Type': pl.Utf8, + 'Locomotive_ID': pl.UInt32, + 'Locomotive_Type': pl.Categorical, + 'Origin_ID': pl.Utf8, + 'Destination_ID': pl.Utf8, + 'Cars_Loaded': pl.Float64, + 'Cars_Empty': pl.Float64, + 'Containers_Loaded': pl.Float64, + 'Containers_Empty': pl.Float64, + 'Departure_SOC_J': pl.Float64, + 'Departure_Time_Planned_Hr': pl.Float64, + 'Arrival_Time_Planned_Hr': pl.Float64}) + event_tracker = pl.DataFrame(schema=[ + ("Event_Type", pl.Utf8), + ("Time_Hr", pl.Float64), + ("Locomotive_ID", pl.UInt32)]) + + train_id_counter = 1 + speed_limit_train_sims = [] + est_time_nets = [] + + done = False + # start at first departure time + current_time = dispatch_schedule.get_column("Hour").min() + while not done: + # Dispatch new train consists + current_dispatches = dispatch_schedule.filter(pl.col("Hour") == current_time) + if(current_dispatches.height > 0): + loco_pool, event_tracker = update_refuel_queue(loco_pool, refuelers, current_time, event_tracker) + + for this_train in current_dispatches.iter_rows(named = True): + if this_train['Tons_Per_Train'] > 0: + train_id=str(train_id_counter) + if config.single_train_mode: + selected = loco_pool.select(pl.col("Locomotive_ID").is_not_null().alias("selected")).to_series() + dispatched = loco_pool + else: + selected = dispatch( + current_time, + this_train['Origin'], + loco_pool, + this_train['Tons_Per_Train'], + this_train['HP_Required'], + this_train['Cars_Loaded'] + this_train['Cars_Empty'], + config + ) + dispatched = loco_pool.filter(selected) + + if config.drag_coeff_function is not None: + cd_area_vec = config.drag_coeff_function( + int(this_train['Number_of_Cars']) + ) + else: + cd_area_vec = None + + rv_to_use, n_cars_by_type = data_prep.configure_rail_vehicles(this_train, rail_vehicles, freight_type_to_car_type) + + train_config = alt.TrainConfig( + rail_vehicles = rv_to_use, + n_cars_by_type = n_cars_by_type, + train_type = train_type, + cd_area_vec = cd_area_vec + ) + + loco_start_soc_j = dispatched.get_column("SOC_J") + dispatch_order = (dispatched.select( + pl.col('Locomotive_ID') + .rank().alias('rank').cast(pl.UInt32) + ).with_row_count().sort('row_nr')) + dispatched = dispatched.sort('Locomotive_ID') + loco_start_soc_pct = dispatched.select(pl.col('SOC_J') / pl.col('Capacity_J')).to_series() + locos = [ + config.loco_info[config.loco_info['Locomotive_Type']==loco_type]['Rust_Loco'].to_list()[0].clone() + for loco_type in dispatched.get_column('Locomotive_Type') + ] + [alt.set_param_from_path( + locos[i], + "res.state.soc", + loco_start_soc_pct[i] + ) for i in range(len(locos)) if dispatched.get_column('Fuel_Type')[i] == 'Electricity'] + + loco_con = alt.Consist( + loco_vec=locos, + save_interval=None, + ) + + init_train_state = alt.InitTrainState( + time_seconds=current_time * 3600 + ) + tsb = alt.TrainSimBuilder( + train_id=train_id, + origin_id=this_train['Origin'], + destination_id=this_train['Destination'], + train_config=train_config, + loco_con=loco_con, + init_train_state=init_train_state, + ) + + slts = tsb.make_speed_limit_train_sim( + location_map=location_map, + save_interval=None, + simulation_days=config.simulation_days, + scenario_year=scenario_year + ) + + (est_time_net, loco_con_out) = alt.make_est_times(slts, network) + travel_time = ( + est_time_net.get_running_time_hours() + * config.dispatch_scaling_dict["time_mult_factor"] + + config.dispatch_scaling_dict["hours_add"] + ) + + locos = loco_con_out.loco_vec.tolist() + energy_use_locos = [loco.res.state.energy_out_chemical_joules if loco.res else loco.fc.state.energy_fuel_joules if loco.fc else 0 for loco in locos] + energy_use_j = np.zeros(len(loco_pool)) + energy_use_j[selected] = [energy_use_locos[i-1] for i in dispatch_order.get_column('rank').to_list()] + energy_use_j *= config.dispatch_scaling_dict["energy_mult_factor"] + energy_use_j = pl.Series(energy_use_j) + speed_limit_train_sims.append(slts) + est_time_nets.append(est_time_net) + loco_pool = loco_pool.with_columns( + pl.when(selected) + .then(pl.lit(this_train['Destination'])) + .otherwise(pl.col('Node')).alias("Node"), + pl.when(selected) + .then(pl.lit(current_time + travel_time)) + .otherwise(pl.col('Arrival_Time')).alias("Arrival_Time"), + pl.when(selected) + .then(pl.lit(current_time + travel_time) + pl.col('Min_Servicing_Time_Hr')) + .otherwise(pl.col('Servicing_Done_Time')).alias("Servicing_Done_Time"), + pl.when(selected) + .then(None) + .otherwise(pl.col('Refueling_Done_Time')).alias("Refueling_Done_Time"), + pl.when(selected) + .then(pl.lit("Dispatched")) + .otherwise(pl.col('Status')).alias("Status"), + pl.when(selected) + .then(pl.max_horizontal( + pl.col('SOC_Min_J'), + pl.min_horizontal( + pl.col('SOC_J') - pl.lit(energy_use_j), + pl.col('SOC_Max_J')))) + .otherwise(pl.col('SOC_J')).alias("SOC_J") + ) + + # Populate the output dataframe with the dispatched trains + new_row_count = selected.sum() + new_rows = pl.DataFrame([ + pl.Series(repeat(train_id_counter, new_row_count)), + pl.Series(repeat(this_train['Train_Type'], new_row_count)), + loco_pool.filter(selected).get_column('Locomotive_ID'), + loco_pool.filter(selected).get_column('Locomotive_Type'), + pl.Series(repeat(this_train['Origin'], new_row_count)), + pl.Series(repeat(this_train['Destination'], new_row_count)), + pl.Series(repeat(this_train['Cars_Loaded'], new_row_count)), + pl.Series(repeat(this_train['Cars_Empty'], new_row_count)), + pl.Series(repeat(this_train['Containers_Loaded'], new_row_count)), + pl.Series(repeat(this_train['Containers_Empty'], new_row_count)), + loco_start_soc_j, + pl.Series(repeat(current_time, new_row_count)), + pl.Series(repeat(current_time + travel_time, new_row_count))], + schema = train_consist_plan.columns, + orient="col") + train_consist_plan = pl.concat([train_consist_plan, new_rows], how="diagonal_relaxed") + train_id_counter += 1 + + if current_time >= final_departure: + current_time = float("inf") + loco_pool, event_tracker = update_refuel_queue(loco_pool, refuelers, current_time, event_tracker) + done = True + else: + current_time = dispatch_schedule.filter(pl.col("Hour").gt(current_time)).get_column("Hour").min() + + train_consist_plan = (train_consist_plan + .with_columns( + cs.categorical().cast(str), + pl.col("Train_ID", "Locomotive_ID").cast(pl.UInt32) + ) + .sort(["Locomotive_ID", "Train_ID"], descending=False) + ) + loco_pool = loco_pool.with_columns(cs.categorical().cast(str)) + refuelers = refuelers.with_columns(cs.categorical().cast(str)) + + event_tracker = event_tracker.sort(["Locomotive_ID","Time_Hr","Event_Type"]) + service_starts = (event_tracker + .filter(pl.col("Event_Type") == "Refueling_Start") + .get_column("Time_Hr") + .rename("Refuel_Start_Time_Planned_Hr")) + service_ends = (event_tracker + .filter(pl.col("Event_Type") == "Refueling_End") + .get_column("Time_Hr") + .rename("Refuel_End_Time_Planned_Hr")) + + train_consist_plan = train_consist_plan.with_columns( + service_starts, service_ends + ) + + return train_consist_plan, loco_pool, refuelers, speed_limit_train_sims, est_time_nets + + +if __name__ == "__main__": + + rail_vehicles=[alt.RailVehicle.from_file(vehicle_file) + for vehicle_file in Path(alt.resources_root() / "rolling_stock/").glob('*.yaml')] + + location_map = alt.import_locations( + str(alt.resources_root() / "networks/default_locations.csv") + ) + network = alt.Network.from_file( + str(alt.resources_root() / "networks/Taconite-NoBalloon.yaml") + ) + config = planner_config.TrainPlannerConfig() + config.simulation_days=defaults.SIMULATION_DAYS + 2 * defaults.WARM_START_DAYS + loco_pool = data_prep.build_locopool(config, defaults.DEMAND_FILE) + demand, node_list = data_prep.load_freight_demand(defaults.DEMAND_FILE) + refuelers = data_prep.build_refuelers( + node_list, + loco_pool, + config.refueler_info, + config.refuelers_per_incoming_corridor) + + output = run_train_planner( + rail_vehicles=rail_vehicles, + location_map=location_map, + network=network, + loco_pool=loco_pool, + refuelers=refuelers, + scenario_year=defaults.BASE_ANALYSIS_YEAR, + config=config) diff --git a/python/altrios/train_planner/planner_config.py b/python/altrios/train_planner/planner_config.py new file mode 100644 index 00000000..679047fd --- /dev/null +++ b/python/altrios/train_planner/planner_config.py @@ -0,0 +1,102 @@ +from __future__ import annotations +import polars as pl +import pandas as pd +from typing import Dict, Callable, Optional +from dataclasses import dataclass, field +import altrios as alt +from altrios import defaults + +pl.enable_string_cache() + +@dataclass +class TrainPlannerConfig: + """ + Dataclass class for train planner configuration parameters. + + Attributes: + ---------- + - `single_train_mode`: `True` to only run one round-trip train and schedule its charging; `False` to plan train consists + - `min_cars_per_train`: `Dict` of the minimum length in number of cars to form a train for each train type + - `target_cars_per_train`: `Dict` of the standard train length in number of cars for each train type + - `manifest_empty_return_ratio`: Desired railcar reuse ratio to calculate the empty manifest car demand, (E_ij+E_ji)/(L_ij+L_ji) + - `cars_per_locomotive`: Heuristic scaling factor used to size number of locomotives needed based on demand. + - `cars_per_locomotive_fixed`: If `True`, `cars_per_locomotive` overrides `hp_per_ton` calculations used for dispatching decisions. + - `refuelers_per_incoming_corridor`: Heuristic scaling factor used to scale number of refuelers needed at each node based on number of incoming corridors. + - `containers_per_car`: Containers stacked on each car (applicable only for intermodal containers) + - `require_diesel`: `True` to require each consist to have at least one diesel locomotive. + - `manifest_empty_return_ratio`: `Dict` + - `drag_coeff_function`: `Dict` + - `hp_required_per_ton`: `Dict` + - `dispatch_scaling_dict`: `Dict` + - `loco_info`: `Dict` + - `refueler_info`: `Dict` + - `return_demand_generators`: `Dict` + """ + simulation_days: int = 21 + single_train_mode: bool = False + min_cars_per_train: Dict = field(default_factory = lambda: { + "Default": 60 + }) + target_cars_per_train: Dict = field(default_factory = lambda: { + "Default": 180 + }) + cars_per_locomotive: Dict = field(default_factory = lambda: { + "Default": 70 + }) + cars_per_locomotive_fixed: bool = False + refuelers_per_incoming_corridor: int = 4 + containers_per_car: int = 2 + require_diesel: bool = False + manifest_empty_return_ratio: float = 0.6 + loco_pool_safety_factor: float = 1.1 + hp_required_per_ton: Dict = field(default_factory = lambda: { + "Default": { + "Unit": 2.0, + "Manifest": 1.5, + "Intermodal": 4.0 + } + }) + dispatch_scaling_dict: Dict = field(default_factory = lambda: { + "time_mult_factor": 1.4, + "hours_add": 2, + "energy_mult_factor": 1.25 + }) + loco_info: pd.DataFrame = field(default_factory = lambda: pd.DataFrame({ + "Diesel_Large": { + "Capacity_Cars": 20, + "Fuel_Type": "Diesel", + "Min_Servicing_Time_Hr": 3.0, + "Rust_Loco": alt.Locomotive.default(), + "Cost_USD": defaults.DIESEL_LOCO_COST_USD, + "Lifespan_Years": defaults.LOCO_LIFESPAN + }, + "BEL": { + "Capacity_Cars": 20, + "Fuel_Type": "Electricity", + "Min_Servicing_Time_Hr": 3.0, + "Rust_Loco": alt.Locomotive.default_battery_electric_loco(), + "Cost_USD": defaults.BEL_MINUS_BATTERY_COST_USD, + "Lifespan_Years": defaults.LOCO_LIFESPAN + } + }).transpose().reset_index(names='Locomotive_Type')) + refueler_info: pd.DataFrame = field(default_factory = lambda: pd.DataFrame({ + "Diesel_Fueler": { + "Locomotive_Type": "Diesel_Large", + "Fuel_Type": "Diesel", + "Refueler_J_Per_Hr": defaults.DIESEL_REFUEL_RATE_J_PER_HR, + "Refueler_Efficiency": defaults.DIESEL_REFUELER_EFFICIENCY, + "Cost_USD": defaults.DIESEL_REFUELER_COST_USD, + "Lifespan_Years": defaults.LOCO_LIFESPAN + }, + "BEL_Charger": { + "Locomotive_Type": "BEL", + "Fuel_Type": "Electricity", + "Refueler_J_Per_Hr": defaults.BEL_CHARGE_RATE_J_PER_HR, + "Refueler_Efficiency": defaults.BEL_CHARGER_EFFICIENCY, + "Cost_USD": defaults.BEL_CHARGER_COST_USD, + "Lifespan_Years": defaults.LOCO_LIFESPAN + } + }).transpose().reset_index(names='Refueler_Type')) + drag_coeff_function: Optional[Callable]= None + dispatch_scheduler: Optional[Callable] = None + return_demand_generators: Optional[Dict] = None #default defined in train_demand_generators.py diff --git a/python/altrios/train_planner/schedulers.py b/python/altrios/train_planner/schedulers.py new file mode 100644 index 00000000..2d1d1e1f --- /dev/null +++ b/python/altrios/train_planner/schedulers.py @@ -0,0 +1,682 @@ +import math +from typing import List, Union, Dict +from collections import defaultdict +import polars as pl +import polars.selectors as cs +import altrios as alt +from altrios import utilities +from altrios.train_planner import planner_config, data_prep, train_demand_generators + +def calculate_waiting_time_single_dispatch( + cumulative_demand_control: int, + last_dispatch: int, + demand_hourly: pl.DataFrame, + dispatch_hour: int, + remaining_demand_list: pl.DataFrame, + remaining_demand_list_control: pl.DataFrame, + search_range: int, + od_pair_loop: str, + min_num_cars_per_train: int, + target_num_cars_per_train: int, + config: planner_config.TrainPlannerConfig +) -> tuple: + """ + Calculate the waiting time for a single dispatch using Polars DataFrames. + """ + # Initialize variables + direction_demand = demand_hourly.filter(pl.col("OD_Pair") == od_pair_loop) + remaining_demand = 0 + total_waiting_time = 0 + total_waiting_time_before_dispatch = 0 + dispatched = 0 + cumulative_demand = cumulative_demand_control + remaining_demand_list = remaining_demand_list_control.clone() #Copy to avoid modifying original + remaining_demand_tem = [] + # Calculate cumulative demand up to the dispatch hour + end_hour = min(dispatch_hour + 1, direction_demand.get_column("Hour").max()) + hourly_demand = direction_demand.slice(last_dispatch, end_hour - last_dispatch) + cumulative_demand += hourly_demand["Number_of_Cars"].sum() + + if remaining_demand_list.is_empty(): + hourly_demand = hourly_demand.with_columns( + ((pl.col("Number_of_Cars") * (dispatch_hour - pl.col("Hour"))).alias("Waiting_Time")) + ) + total_waiting_time_before_dispatch = hourly_demand["Waiting_Time"].sum() + total_waiting_time += hourly_demand["Waiting_Time"].sum() + + # If there is remaining demand, calculate waiting time for new and remaining demand + else: + hourly_demand = hourly_demand.with_columns( + ((pl.col("Number_of_Cars") * (dispatch_hour - pl.col("Hour"))).alias("Waiting_Time")) + ) + total_waiting_time_before_dispatch = hourly_demand["Waiting_Time"].sum() + total_waiting_time += hourly_demand["Waiting_Time"].sum() + + # Calculate waiting time for each entry in the remaining demand list + remaining_waiting_times = remaining_demand_list.with_columns( + (pl.col("Remaining_Demand") * (dispatch_hour - pl.col("Hour"))).alias("Remaining_Waiting_Time") + ) + total_waiting_time_before_dispatch += remaining_waiting_times["Remaining_Waiting_Time"].sum() + total_waiting_time += remaining_waiting_times["Remaining_Waiting_Time"].sum() + + + # Handle remaining demands if cumulative demand exceeds thresholds + if cumulative_demand >= min_num_cars_per_train: + if cumulative_demand >= target_num_cars_per_train: + dispatched = target_num_cars_per_train + dispatched_split = target_num_cars_per_train + remaining_demand = cumulative_demand - dispatched_split # Carry over remaining demand + # Update remaining demand list if there's no prior remaining demand + if remaining_demand_list.height == 0: + #remaining_demand_tem = [] + for row in hourly_demand.iter_rows(named=True): + # Number_of_Containers is located at the 4th column + if row['Number_of_Cars'] > 0: + if dispatched_split >= row['Number_of_Cars']: + dispatched_split -= row['Number_of_Cars'] + else: + remaining_demand_for_hour = row['Number_of_Cars'] - dispatched_split + # Hour is located at the 5th column + remaining_demand_tem.append((remaining_demand_for_hour,row['Hour'])) + # dispatched_split stop working from this hour to the end of this loop behavior + dispatched_split = 0 + # Filter `remaining_demand_tem` to include only positive Remaining_Demand values + filtered_remaining_demand_tem = [(rd[0], rd[1]) for rd in remaining_demand_tem if rd[0] > 0] + + # Construct the DataFrame directly from the filtered list + remaining_demand_list = pl.DataFrame({ + "Remaining_Demand": [rd[0] for rd in filtered_remaining_demand_tem], + "Hour": [rd[1] for rd in filtered_remaining_demand_tem] + }) + #remaining_demand_list = pl.DataFrame({"Remaining_Demand": [rd[0] for rd in remaining_demand_tem],"Hour": [rd[1] for rd in remaining_demand_tem]}).filter(pl.col("Remaining_Demand") > 0) + cumulative_demand = remaining_demand + else: + # Prepare the cumulative transformation approach + dispatched_split -= min(dispatched_split, remaining_demand_list.get_column("Remaining_Demand").sum()) + # If there is still dispatched capacity left, apply it to new demand within the range + if dispatched_split > 0: + for row in hourly_demand.iter_rows(named=True): + if row['Number_of_Cars'] > 0: + if dispatched_split >= row['Number_of_Cars']: + dispatched_split -= row['Number_of_Cars'] + else: + remaining_demand_for_hour = row['Number_of_Cars'] - dispatched_split + remaining_demand_tem.append((remaining_demand_for_hour,row['Hour'])) + dispatched_split = 0 + remaining_demand_list = pl.DataFrame({"Remaining_Demand": [rd[0] for rd in remaining_demand_tem],"Hour": [rd[1] for rd in remaining_demand_tem]}).filter(pl.col("Remaining_Demand") > 0) + cumulative_demand = remaining_demand + else: + dispatched = cumulative_demand + cumulative_demand = 0 # Reset cumulative demand if all is dispatched + + # Accumulate demand if below minimum threshold + else: + cumulative_demand = cumulative_demand + + # Filter demand for future hours in the specified search range + future_demand = direction_demand.filter( + (pl.col("Hour") > dispatch_hour) & (pl.col("Hour") < min(last_dispatch + search_range, direction_demand.get_column("Hour").max())) + ) + + # Calculate waiting time for each future hour + future_demand = future_demand.with_columns( + ((last_dispatch + search_range - 1 - pl.col("Hour")) * pl.col("Number_of_Cars")).alias("Waiting_Time") + ) + + # Sum up all waiting times for future demand + total_waiting_time += future_demand["Waiting_Time"].sum() + + # Calculate waiting time for remaining demand from previous hours, if any + if not remaining_demand_list.is_empty(): + # Add waiting time for remaining demand entries + remaining_waiting = remaining_demand_list.with_columns( + ((last_dispatch + search_range - 1 - pl.col("Hour")) * pl.col("Remaining_Demand")).alias("Remaining_Waiting_Time") + ) + + # Sum up the waiting times from remaining demand list + total_waiting_time += remaining_waiting["Remaining_Waiting_Time"].sum() +# Return results + return total_waiting_time_before_dispatch, total_waiting_time, remaining_demand_list, cumulative_demand, dispatched + +def find_minimum_waiting_time( + num_iterations: int, + demand_hourly: pl.DataFrame, + border_time_list: list, + min_num_cars_per_train: int, + target_num_cars_per_train: int, + config: planner_config.TrainPlannerConfig +) -> pl.DataFrame: + """ + Find the minimum waiting time for dispatches using Polars DataFrame. + """ + group_cols = ["Origin", "Destination", "OD_Pair", "Train_Type"] + new_accumulated_carloads = get_new_accumulated_carloads(demand_hourly, group_cols, containers_per_car = config.containers_per_car).rename({"New_Carloads": "Number_of_Cars"}) + demand_hourly = (demand_hourly + .join(new_accumulated_carloads, how="left", on=["Origin", "Destination", "Train_Type", "Hour"]) + .with_columns(pl.col("Number_of_Cars").fill_null(0.0)) + .drop(cs.ends_with("_right") | cs.by_name("Number_of_Containers")) + ) + for i in range(len(border_time_list)): + od_pair_loop = border_time_list[i][0] + reverse_pair = "-".join(od_pair_loop.split("-")[::-1]) + directional_total_cars = new_accumulated_carloads.filter(pl.col("OD_Pair") == od_pair_loop)["Number_of_Cars"].sum() + reverse_total_cars = new_accumulated_carloads.filter(pl.col("OD_Pair") == reverse_pair)["Number_of_Cars"].sum() + if directional_total_cars > reverse_total_cars: + empty_cars = directional_total_cars - reverse_total_cars + empty_cars_o_d = reverse_pair + else: + empty_cars = reverse_total_cars - directional_total_cars + empty_cars_o_d = od_pair_loop + print(f"total cars for {od_pair_loop} is {directional_total_cars}") + print(f"reverse_total_cars for {reverse_pair} is {reverse_total_cars}") + #print(f"empty_containers for {empty_cars_o_d} is {empty_containers}") + print(f"empty_containers for {empty_cars_o_d} is {empty_cars}") + print(f"There are {len(border_time_list[0])-1} trains to dispatch") + final_dispatch_rows = [] + for j in range(len(border_time_list)): + start_hour = 0 + total_dispatched = 0 + dispatch_time = [] + waiting_time_total = 0 + waiting_time_total_before_dispatch = 0 + cumulative_demand_control = 0 + last_dispatch = 0 + remaining_demand_list_control = pl.DataFrame({ + "Remaining_Demand": pl.Series([], dtype=pl.Int64), + "Hour": pl.Series([], dtype=pl.Float64), + }) + dispatched_list = [] + #print(f"border_time_list[j][0] is {border_time_list[j][0]}") + od_pair_loop = border_time_list[j][0] + #print(f"od_pair_loop is {od_pair_loop}") + origin, destination = border_time_list[j][0].split('-') + #print(f"origin is {origin}") + #print(f"destination is {destination}") + #print(demand) + total_cars = new_accumulated_carloads.filter(pl.col("OD_Pair") == od_pair_loop)["Number_of_Cars"].sum() + for i in range(2, num_iterations): + if total_cars - total_dispatched == 0: + dispatched_list.append(0.0) + search_range = border_time_list[j][i] - start_hour + # DataFrame to accumulate dispatch hour info + total_waiting_time_demand_list = pl.DataFrame({ + "Dispatch_Hour": pl.Series([], dtype=pl.Int64), + "Waiting_Before_Dispatch": pl.Series([], dtype=pl.Float64), + "Total_Waiting": pl.Series([], dtype=pl.Float64), + "Remaining_Demand_List": pl.Series([], dtype=pl.Object), + "Cumulative_Demand": pl.Series([], dtype=pl.Float64), + "Dispatched": pl.Series([], dtype=pl.Float64) + }) + for dispatch_hour in range(start_hour, start_hour + search_range): + total_waiting_time_before_dispatch, total_waiting_time, remaining_demand_list, cumulative_demand, dispatched = calculate_waiting_time_single_dispatch( + cumulative_demand_control, last_dispatch, demand_hourly, dispatch_hour, remaining_demand_list_control, remaining_demand_list_control.clone(), search_range,od_pair_loop,min_num_cars_per_train, target_num_cars_per_train, config + ) + # Append data for each dispatch hour, ensuring consistent types + new_row = pl.DataFrame({ + "Dispatch_Hour": [dispatch_hour], + "Waiting_Before_Dispatch": [float(total_waiting_time_before_dispatch)], + "Total_Waiting": [float(total_waiting_time)], + "Remaining_Demand_List": [remaining_demand_list], + "Cumulative_Demand": [float(cumulative_demand)], + "Dispatched": [float(dispatched)] + }) + total_waiting_time_demand_list = total_waiting_time_demand_list.vstack(new_row) + + # Find the row with the minimum "Total_Waiting" + min_waiting_row = total_waiting_time_demand_list.sort("Total_Waiting").head(1) + min_waiting_time_hour = min_waiting_row[0, "Dispatch_Hour"] + min_waiting_time_before_dispatch = min_waiting_row[0, "Waiting_Before_Dispatch"] + min_waiting_time = min_waiting_row[0, "Total_Waiting"] + remaining_demand_list = min_waiting_row[0, "Remaining_Demand_List"] + cumulative_demand = min_waiting_row[0, "Cumulative_Demand"] + dispatched = min_waiting_row[0, "Dispatched"] + + + # Track dispatched containers for each dispatch hour + dispatched_list.append(dispatched) + # Reset remaining demand if cumulative demand is zero + if cumulative_demand == 0: + remaining_demand_list = pl.DataFrame(schema=["Remaining_Demand_List"]) + + # Update control values for the next iteration + remaining_demand_list_control = remaining_demand_list + cumulative_demand_control = cumulative_demand + last_dispatch = min_waiting_time_hour + 1 + start_hour = min_waiting_time_hour + 1 + total_dispatched += dispatched + waiting_time_total += min_waiting_time + + # Accumulate total waiting time for before dispatch and overall + if i == num_iterations - 1: + waiting_time_total_before_dispatch += min_waiting_time + else: + waiting_time_total_before_dispatch += min_waiting_time_before_dispatch + + # Add the dispatch hour to the list + dispatch_time.append(min_waiting_time_hour) + + remaining_to_dispatch = total_cars - total_dispatched + final_waiting_time = remaining_to_dispatch * (demand_hourly.get_column("Hour").max()+1 - start_hour) + waiting_time_total += final_waiting_time + dispatch_time.append(demand_hourly.get_column("Hour").max()) # Assuming final dispatch at the end of the period + dispatched_list.append(remaining_to_dispatch) + dispatch_df_row = [] + for i in range(len(dispatched_list)): + dispatch_df_row.append({ + "Origin": origin, + "Destination": destination, + "Train_Type": "Intermodal", + "Cars_Per_Train_Loaded": dispatched_list[i], + "Cars_Per_Train_Empty": 0.0, + "Target_Cars_Per_Train": float(target_num_cars_per_train), + "Number_of_Cars_Total": dispatched_list[i], + "Hour":float(dispatch_time[i]) + }) + if od_pair_loop == empty_cars_o_d: + dispatch_df_row = [] + for i in range(len(dispatched_list)): + dispatch_df_row.append({ + "Origin": origin, + "Destination": destination, + "Train_Type": "Intermodal", + "Cars_Per_Train_Loaded": dispatched_list[i], + "Cars_Per_Train_Empty": 0.0, + "Target_Cars_Per_Train": float(target_num_cars_per_train), + "Number_of_Cars_Total": dispatched_list[i], + "Hour":float(dispatch_time[i]) + }) + #print(f"dispatch_df_row is {dispatch_df_row}") + final_dispatch_rows.extend(dispatch_df_row) + #print(f"final_dispatch_rows is {final_dispatch_rows}") + dispatch_times = pl.DataFrame(final_dispatch_rows) + dispatch_times = dispatch_times.sort("Hour") + return dispatch_times + +def formatScheduleColumns( + df: Union[pl.DataFrame, pl.LazyFrame], + config: planner_config.TrainPlannerConfig +) -> Union[pl.DataFrame, pl.LazyFrame]: + return (df + .with_columns( + (pl.col("Tons_Per_Car_Loaded").mul("Number_of_Cars_Loaded") + pl.col("Tons_Per_Car_Empty").mul("Number_of_Cars_Empty")).alias("Tons_Per_Train"), + (pl.col("HP_Required_Per_Ton_Loaded").mul("Tons_Per_Car_Loaded").mul("Number_of_Cars_Loaded") + + pl.col("HP_Required_Per_Ton_Empty").mul("Tons_Per_Car_Empty").mul("Number_of_Cars_Empty") + ).alias("HP_Required"), + pl.when(pl.col("Train_Type").str.contains("Intermodal")) + .then(pl.col("Number_of_Cars_Loaded").mul(config.containers_per_car)) + .otherwise(0) + .alias("Containers_Loaded"), + pl.when(pl.col("Train_Type").str.contains("Intermodal")) + .then(pl.col("Number_of_Cars_Empty").mul(config.containers_per_car)) + .otherwise(0) + .alias("Containers_Empty"), + ) + .select("Hour", "Origin", "Destination", "Train_Type", + "Number_of_Cars", "Number_of_Cars_Loaded", "Number_of_Cars_Empty", "Containers_Empty", "Containers_Loaded", + "Tons_Per_Train", "HP_Required" + ) + .rename({"Number_of_Cars_Loaded": "Cars_Loaded", + "Number_of_Cars_Empty": "Cars_Empty"}) + .sort(["Hour","Origin","Destination","Train_Type"]) + ) + +def get_new_accumulated_carloads( + demand_hourly: Union[pl.DataFrame, pl.LazyFrame], + group_cols: List[str], + containers_per_car: int, + filter_zero: bool = True +) -> Union[pl.DataFrame, pl.LazyFrame]: + if group_cols is None: + df = (demand_hourly + .sort("Hour") + .with_columns(pl.col("Number_of_Containers").cum_sum().floordiv(containers_per_car).alias("Accumulated_Carloads")) + .with_columns((pl.col("Accumulated_Carloads") - pl.col("Accumulated_Carloads").shift(1).fill_null(0.0)).alias("New_Carloads")) + .select("Hour", "New_Carloads") + ) + else: + df = (demand_hourly + .sort(group_cols + ["Hour"]) + .with_columns(pl.col("Number_of_Containers").cum_sum().over(group_cols).floordiv(containers_per_car).alias("Accumulated_Carloads")) + .with_columns((pl.col("Accumulated_Carloads") - pl.col("Accumulated_Carloads").shift(1).over(group_cols).fill_null(0.0)).alias("New_Carloads")) + .select(group_cols + ["Hour", "New_Carloads"]) + ) + if filter_zero: + df = (df + .filter(pl.col("New_Carloads") > 0) + ) + return df + + +def calculate_dispatch_data( + total_containers, + target_num_cars_per_train, + od_pair, + demand_hourly, + max_min_trains, + containers_per_car): + remaining_containers = total_containers % (target_num_cars_per_train * containers_per_car) + num_trains = ( + total_containers // (target_num_cars_per_train * containers_per_car) + (1 if remaining_containers > 0 else 0) + ) + num_trains = int(max(num_trains, max_min_trains)) + + new_accumulated_carloads = get_new_accumulated_carloads(demand_hourly, group_cols = None, containers_per_car = containers_per_car) + + planned_train_lengths = ( + pl.DataFrame({ + "Group": [1] * num_trains, #Unused, just needed for allocateIntegerEvenly to work, + "Train_ID": list(range(num_trains)), + "Cars": [total_containers / containers_per_car] * num_trains + }) + # Divide containers into trains as evenly as possible + .pipe(utilities.allocateIntegerEvenly, target="Cars", grouping_vars = ["Group"]) + .sort("Train_ID") + .get_column("Cars") + .to_list() + ) + + train_dispatch_times = [] + dispatched_train_lengths = [] + accumulated_demand = 0 + p = 0 + for arrival in new_accumulated_carloads.iter_rows(named=True): + accumulated_demand += arrival['New_Carloads'] + if p == len(planned_train_lengths) - 1: + train_dispatch_times.append(new_accumulated_carloads.get_column("Hour").max()) + break + if accumulated_demand >= planned_train_lengths[p]: + train_dispatch_times.append(arrival['Hour']) + dispatched_train_lengths.append(accumulated_demand) + total_containers -= (accumulated_demand * containers_per_car) + accumulated_demand = 0 + p += 1 + if p >= len(planned_train_lengths): + break + + if len(train_dispatch_times) < len(planned_train_lengths): + for _ in range(len(planned_train_lengths) - len(train_dispatch_times)): + train_dispatch_times.append(train_dispatch_times[-1]+_+1) + dispatched_train_lengths.append(total_containers / containers_per_car) + if len(dispatched_train_lengths) < len(planned_train_lengths): + for _ in range(len(planned_train_lengths) - len(dispatched_train_lengths)): + dispatched_train_lengths.append(0) + + return ( + pl.DataFrame({ + "Dispatch_Time": train_dispatch_times, + "Number_of_Cars_Planned": planned_train_lengths, + "Number_of_Cars_Dispatched": dispatched_train_lengths + }) + .with_columns( + (pl.col("Number_of_Cars_Planned") * containers_per_car).alias("Number_of_Containers_Planned"), + (pl.col("Number_of_Cars_Dispatched") * containers_per_car).alias("Number_of_Containers_Dispatched") + ) + ) + +# Define the main function to generate demand trains with the updated rule +def generate_trains_deterministic_hourly( + demand_hourly: pl.DataFrame, + target_num_cars_per_train: int, + containers_per_car: int +) -> pl.DataFrame: + grouped_data = (demand_hourly + .group_by("Origin", "Destination", "OD_Pair") + .agg( + pl.col("Number_of_Containers").sum().alias("Total_Containers"), + pl.col("Number_of_Containers").sum().mod(target_num_cars_per_train * containers_per_car).alias("Remaining_Containers") + ) + .with_columns( + pl.col("Total_Containers").floordiv(target_num_cars_per_train * containers_per_car).add(pl.col("Remaining_Containers").gt(0)).alias("Min_Trains") + ) + .with_columns( + pl.col("Min_Trains").max().over(pl.concat_str( + pl.min_horizontal(pl.col("Origin","Destination")), + pl.lit("_"), + pl.max_horizontal(pl.col("Origin","Destination"))) + ).alias("Max_Min_Trains") + ) + ) + + # Prepare a list to collect the results for all OD pairs + all_dispatch_data = [] + # Step 4: Loop through each unique OD pair to calculate dispatch data + for row in grouped_data.iter_rows(named=True): + # Calculate dispatch data for the current OD pair with the updated rule + dispatch_data = calculate_dispatch_data( + total_containers = row['Total_Containers'], + target_num_cars_per_train = target_num_cars_per_train, + od_pair = row['OD_Pair'], + demand_hourly = demand_hourly.filter(pl.col("OD_Pair") == row['OD_Pair']).sort("Hour"), + max_min_trains =row['Max_Min_Trains'], + containers_per_car = containers_per_car) + # Append the result to the list + all_dispatch_data.append( + dispatch_data.with_columns( + pl.lit(row['Origin']).alias("Origin"), + pl.lit(row['Destination']).alias("Destination"), + pl.lit(row['OD_Pair']).alias("OD_Pair") + ) + ) + + return pl.concat(all_dispatch_data, how="diagonal_relaxed") + +def dispatch_hourly_demand_optimized_departure( + demand_hourly: pl.DataFrame, + rail_vehicles: List[alt.RailVehicle], + freight_type_to_car_type: Dict[str, str], + config: planner_config.TrainPlannerConfig +) -> pl.DataFrame: + """ + Converts a table of demand into a dispatch plan where trains depart from each origin in uniformly spaced intervals. + Arguments: + ---------- + demand: `DataFrame` or `LazyFrame` representing origin-destination demands (number of trains). + rail_vehicles: List of `altrios.RailVehicle` objects. + config: `TrainPlannerConfig` object. + Outputs: + ---------- + Updated demand `DataFrame` or `LazyFrame` representing dispatches, each defined with an origin, destination, train type, number of (loaded and empty) cars, tonnage, and HP per ton requirement. + """ + min_num_cars_per_train=config.min_cars_per_train['Intermodal_Loaded'] #TODO make this flexible + target_num_cars_per_train=config.target_cars_per_train['Intermodal_Loaded'] #TODO make this flexible + demand_hourly = demand_hourly.with_columns((pl.col("Origin") + "-" + pl.col("Destination")).alias("OD_Pair")) + dispatch_df = generate_trains_deterministic_hourly(demand_hourly,target_num_cars_per_train, config.containers_per_car) + od_dispatch_times = [] + dispatch_times = dispatch_df["Dispatch_Time"].to_list() + od_pair_list = dispatch_df["OD_Pair"].to_list() + for i in range(len(od_pair_list)): + od_border_list_sub =[] + od_border_list_sub.append(od_pair_list[i]) + od_border_list_sub.append(dispatch_times[i]) + od_dispatch_times.append(od_border_list_sub) + grouped_data = defaultdict(list) + for od_pair, value in od_dispatch_times: + grouped_data[od_pair].append(value) + border_time_list= [[key] + values for key, values in grouped_data.items()] + num_iterations = len(border_time_list[0]) + + schedule = find_minimum_waiting_time(num_iterations=num_iterations, + demand_hourly=demand_hourly, + border_time_list=border_time_list, + min_num_cars_per_train=min_num_cars_per_train, + target_num_cars_per_train=target_num_cars_per_train, + config=config + ) + return (schedule + #TODO: this doesn't handle tons correctly for train type empty + .pipe(data_prep.appendTonsAndHP, rail_vehicles, freight_type_to_car_type, config) + .rename({"Cars_Per_Train_Loaded": "Cars_Loaded", + "Cars_Per_Train_Empty": "Cars_Empty"}) + .with_columns( + (pl.col("Cars_Loaded") + pl.col("Cars_Empty")).alias("Number_of_Cars"), + pl.col("Tons_Per_Car").mul("Cars_Loaded").alias("Tons_Per_Train"), + pl.col("Tons_Per_Car").mul("Cars_Loaded").mul("HP_Required_Per_Ton").alias("HP_Required"), + pl.when(pl.col("Train_Type").str.contains("Intermodal")) + .then(pl.col("Cars_Loaded").mul(config.containers_per_car)) + .otherwise(0) + .alias("Containers_Loaded"), + pl.when(pl.col("Train_Type").str.contains("Intermodal")) + .then(pl.col("Cars_Empty").mul(config.containers_per_car)) + .otherwise(0) + .alias("Containers_Empty"), + ) + .select("Hour", "Origin", "Destination", "Train_Type", + "Number_of_Cars", "Cars_Loaded", "Cars_Empty", "Containers_Empty", "Containers_Loaded", + "Tons_Per_Train", "HP_Required" + ) + .sort(["Hour","Origin","Destination","Train_Type"]) + ) + +def dispatch_uniform_demand_uniform_departure( + demand: pl.DataFrame, + rail_vehicles: List[alt.RailVehicle], + freight_type_to_car_type: Dict[str, str], + config: planner_config.TrainPlannerConfig +) -> pl.DataFrame: + """ + Generate a tabulated demand pair to indicate the expected dispatching interval + and actual dispatching timesteps after rounding, with departures from each terminal + spaced as evenly as possible + Arguments: + ---------- + demand: `DataFrame` or `LazyFrame` representing origin-destination demands (number of trains). + rail_vehicles: List of `altrios.RailVehicle` objects. + config: `TrainPlannerConfig` object. + Outputs: + ---------- + schedule: Tabulated dispatching time for each demand pair for each train type + in hours + """ + hours = config.simulation_days * 24 + grouping_vars = ["Origin", "Destination", "Train_Type"] + return (demand + .select(pl.exclude("Number_of_Trains").repeat_by("Number_of_Trains").explode()) + .pipe(utilities.allocateIntegerEvenly, target = "Number_of_Cars_Loaded", grouping_vars = grouping_vars) + .drop("Percent_Within_Group_Cumulative") + .pipe(utilities.allocateIntegerEvenly, target = "Number_of_Cars_Empty", grouping_vars = grouping_vars) + .drop("Percent_Within_Group_Cumulative") + .group_by(pl.exclude("Number_of_Cars_Empty", "Number_of_Cars_Loaded")) + .agg(pl.col("Number_of_Cars_Empty", "Number_of_Cars_Loaded")) + .with_columns(pl.col("Number_of_Cars_Loaded").list.sort(descending=True), + pl.col("Number_of_Cars_Empty").list.sort(descending=False)) + .explode("Number_of_Cars_Empty", "Number_of_Cars_Loaded") + .with_columns((pl.col("Number_of_Cars_Empty") + pl.col("Number_of_Cars_Loaded")).alias("Number_of_Cars")) + #TODO: space out trains with slightly more vs less demand, instead of ordering randomly + .with_columns(pl.int_range(pl.len()).shuffle().alias("random_int")) + .sort("Origin", "Destination", "Train_Type", "random_int") + .drop("random_int") + .with_columns( + (hours * 1.0 / pl.len().over("Origin", "Destination")).alias("Interval") + ) + .with_columns( + ((pl.col("Interval").cum_count().over(["Origin","Destination"])) \ + * pl.col("Interval")).alias("Hour") + ) + .pipe(formatScheduleColumns, config = config) + ) + +def dispatch_hourly_demand_uniform_departure( + demand_hourly: pl.DataFrame, + rail_vehicles: List[alt.RailVehicle], + freight_type_to_car_type: Dict[str, str], + config: planner_config.TrainPlannerConfig +) -> pl.DataFrame: + + if "Number_of_Containers" in demand_hourly.collect_schema(): + demand_aggregate = (demand_hourly + .group_by("Origin", "Destination", "Number_of_Days", "Train_Type") + .agg(pl.col("Number_of_Containers").sum()) + .with_columns(pl.col("Number_of_Containers").truediv(config.containers_per_car).ceil().alias("Number_of_Cars")) + ) + else: + demand_aggregate = (demand_hourly + .group_by("Origin", "Destination", "Number_of_Days", "Train_Type") + .agg(pl.col("Number_of_Cars").sum()) + ) + + demand_returns = train_demand_generators.generate_return_demand(demand_aggregate, config) + demand_rebalancing = pl.DataFrame() + if demand_aggregate.filter(pl.col("Train_Type").str.contains("Manifest")).height > 0: + nodes = pl.concat( + [demand_aggregate.get_column("Origin"), + demand_aggregate.get_column("Destination")]).unique().sort() + demand_rebalancing = train_demand_generators.generate_manifest_rebalancing_demand(demand_aggregate, nodes, config) + + demand = train_demand_generators.generate_demand_trains(demand_aggregate, demand_returns, demand_rebalancing, rail_vehicles, freight_type_to_car_type, config) + + departure_schedule = ( + dispatch_uniform_demand_uniform_departure(demand, rail_vehicles, freight_type_to_car_type, config) + .select("Hour", "Origin", "Destination", "Train_Type") + ) + new_carloads = ( + get_new_accumulated_carloads(demand_hourly, group_cols=["Origin", "Destination", "Train_Type"], containers_per_car=config.containers_per_car, filter_zero=False) + .with_columns(pl.col("New_Carloads").cum_sum().over("Origin", "Destination", "Train_Type").alias("Cumulative_Carloads")) + .drop("New_Carloads") + .sort("Origin", "Destination", "Train_Type", "Hour") + ) + departure_schedule = (departure_schedule + .sort("Origin", "Destination", "Train_Type", "Hour") + .join_asof( + new_carloads.with_columns(pl.col("Hour").cast(pl.Float64)), + by=["Origin", "Destination", "Train_Type"], + on="Hour", + strategy="backward") + .sort("Origin", "Destination", "Train_Type", "Hour") + .with_columns((pl.col("Cumulative_Carloads") - pl.col("Cumulative_Carloads").shift(1).over("Origin", "Destination", "Train_Type").fill_null(0.0)).alias("New_Cumulative_Carloads")) + ) + + od_departures_revised = [] + max_train_length = math.floor(config.target_cars_per_train["Intermodal_Loaded"] * 1.1) + min_train_length = config.min_cars_per_train["Intermodal_Loaded"] + for od_departures in departure_schedule.partition_by(["Origin", "Destination", "Train_Type"], maintain_order=True): + train_lengths = od_departures.get_column("New_Cumulative_Carloads").to_list() + for i in range(len(train_lengths)): + if (train_lengths[i] > max_train_length) and (i < len(train_lengths) - 1): + train_lengths[i+1] += (train_lengths[i] - max_train_length) + train_lengths[i] = max_train_length + elif (train_lengths[i] < min_train_length) and (i < len(train_lengths) - 1): + train_lengths[i+1] += train_lengths[i] + train_lengths[i] = 0 + + if train_lengths[len(train_lengths) - 1] > max_train_length: + print(f'Unsupported case: final train too long ({train_lengths[len(train_lengths) - 1]} cars)') + elif train_lengths[len(train_lengths) - 1] < min_train_length: + if train_lengths[len(train_lengths) - 1] + train_lengths[len(train_lengths) - 2] <= max_train_length: + train_lengths[len(train_lengths) - 2] += train_lengths[len(train_lengths) - 1] + train_lengths[len(train_lengths) - 1] = 0 + else: + new_val = (train_lengths[len(train_lengths) - 2] + train_lengths[len(train_lengths) - 1]) / 2 + train_lengths[len(train_lengths) - 2] = math.ceil(new_val) + train_lengths[len(train_lengths) - 1] = math.floor(new_val) + + od_departures = od_departures.with_columns(pl.Series("Carloads", train_lengths, strict=False)) + od_departures_revised.append(od_departures) + + departure_schedule = (pl.concat(od_departures_revised, how="diagonal_relaxed") + .rename({"Carloads": "Cars_Per_Train_Loaded"}) + .filter(pl.col("Cars_Per_Train_Loaded") > 0) + .select("Hour", "Origin", "Destination", "Train_Type", "Cars_Per_Train_Loaded") + .with_columns(pl.lit(0).alias("Cars_Per_Train_Empty")) + .pipe(data_prep.appendTonsAndHP, rail_vehicles, freight_type_to_car_type, config) + .rename({"Cars_Per_Train_Loaded": "Cars_Loaded", + "Cars_Per_Train_Empty": "Cars_Empty"}) + .with_columns( + (pl.col("Cars_Loaded") + pl.col("Cars_Empty")).alias("Number_of_Cars"), + pl.col("Tons_Per_Car").mul("Cars_Loaded").alias("Tons_Per_Train"), + pl.col("Tons_Per_Car").mul("Cars_Loaded").mul("HP_Required_Per_Ton").alias("HP_Required"), + pl.when(pl.col("Train_Type").str.contains("Intermodal")) + .then(pl.col("Cars_Loaded").mul(config.containers_per_car)) + .otherwise(0) + .alias("Containers_Loaded"), + pl.when(pl.col("Train_Type").str.contains("Intermodal")) + .then(pl.col("Cars_Empty").mul(config.containers_per_car)) + .otherwise(0) + .alias("Containers_Empty"), + ) + .select("Hour", "Origin", "Destination", "Train_Type", + "Number_of_Cars", "Cars_Loaded", "Cars_Empty", "Containers_Empty", "Containers_Loaded", + "Tons_Per_Train", "HP_Required" + ) + .sort(["Hour","Origin","Destination","Train_Type"]) + ) + return departure_schedule \ No newline at end of file diff --git a/python/altrios/train_planner/train_demand_generators.py b/python/altrios/train_planner/train_demand_generators.py new file mode 100644 index 00000000..c0f461f0 --- /dev/null +++ b/python/altrios/train_planner/train_demand_generators.py @@ -0,0 +1,352 @@ +from typing import Union, List, Dict, Callable +import polars as pl +import polars.selectors as cs +import pandas as pd +import numpy as np +import altrios as alt +from altrios.train_planner import planner_config, data_prep + +def get_default_return_demand_generators() -> Dict[str, Callable]: + return { + 'Unit': generate_return_demand_unit, + 'Manifest': generate_return_demand_manifest, + 'Intermodal': generate_return_demand_intermodal + } + +def initialize_reverse_empties(demand: Union[pl.LazyFrame, pl.DataFrame]) -> Union[pl.LazyFrame, pl.DataFrame]: + """ + Swap `Origin` and `Destination` and append `_Empty` to `Train_Type`. + Arguments: + ---------- + demand: `DataFrame` or `LazyFrame` representing origin-destination demand. + + Outputs: + ---------- + Updated demand `DataFrame` or `LazyFrame`. + """ + return (demand + .rename({"Origin": "Destination", "Destination": "Origin"}) + .with_columns((pl.concat_str(pl.col("Train_Type"),pl.lit("_Empty"))).alias("Train_Type")) + ) + +def generate_return_demand_unit(demand_subset: Union[pl.LazyFrame, pl.DataFrame], config: planner_config.TrainPlannerConfig) -> Union[pl.LazyFrame, pl.DataFrame]: + """ + Given a set of Unit train demand for one or more origin-destination pairs, generate demand in the reverse direction(s). + Arguments: + ---------- + demand: `DataFrame` or `LazyFrame` representing origin-destination demand for Unit trains. + + Outputs: + ---------- + Updated demand `DataFrame` or `LazyFrame` representing demand in the reverse direction(s) for each origin-destination pair. + """ + return (demand_subset + .pipe(initialize_reverse_empties) + ) + +def generate_return_demand_manifest(demand_subset: Union[pl.LazyFrame, pl.DataFrame], config: planner_config.TrainPlannerConfig) -> Union[pl.LazyFrame, pl.DataFrame]: + """ + Given a set of Manifest train demand for one or more origin-destination pairs, generate demand in the reverse direction(s). + Arguments: + ---------- + demand: `DataFrame` or `LazyFrame` representing origin-destination demand for Unit trains. + + Outputs: + ---------- + Updated demand `DataFrame` or `LazyFrame` representing demand in the reverse direction(s) for each origin-destination pair. + """ + return(demand_subset + .pipe(initialize_reverse_empties) + .with_columns((pl.col("Number_of_Cars") * config.manifest_empty_return_ratio).floor().cast(pl.UInt32)) + ) + +def generate_return_demand_intermodal(demand_subset: Union[pl.LazyFrame, pl.DataFrame], config: planner_config.TrainPlannerConfig) -> Union[pl.LazyFrame, pl.DataFrame]: + """ + Given a set of Intermodal train demand for one or more origin-destination pairs, generate demand in the reverse direction(s). + Arguments: + ---------- + demand: `DataFrame` or `LazyFrame` representing origin-destination demand for Unit trains. + + Outputs: + ---------- + Updated demand `DataFrame` or `LazyFrame` representing demand in the reverse direction(s) for each origin-destination pair. + """ + return (demand_subset + .pipe(initialize_reverse_empties) + .with_columns( + pl.concat_str(pl.min_horizontal("Origin", "Destination"), pl.lit("_"), pl.max_horizontal("Origin", "Destination")).alias("OD") + ) + .with_columns( + pl.col("Number_of_Cars", "Number_of_Containers").range().over("OD").name.suffix("_Return") + ) + .filter( + pl.col("Number_of_Cars") == pl.col("Number_of_Cars").max().over("OD"), + pl.col("Number_of_Cars_Return") > 0 + ) + .drop("OD", "Number_of_Cars", "Number_of_Containers") + .rename({"Number_of_Cars_Return": "Number_of_Cars", + "Number_of_Containers_Return": "Number_of_Containers"}) + ) + +def generate_return_demand( + demand: pl.DataFrame, + config: planner_config.TrainPlannerConfig +) -> pl.DataFrame: + """ + Create a dataframe for additional demand needed for empty cars of the return trains + Arguments: + ---------- + df_annual_demand: The user_input file loaded by previous functions + that contains loaded demand for each demand pair. + config: Object storing train planner configuration paramaters + Outputs: + ---------- + df_return_demand: The demand generated by the need + of returning the empty cars to their original nodes + """ + demand_subsets = demand.partition_by("Train_Type", as_dict = True) + return_demands = [] + for train_type, demand_subset in demand_subsets.items(): + train_type_label = train_type[0] + if train_type_label in config.return_demand_generators: + return_demand_generator = config.return_demand_generators[train_type_label] + return_demand = return_demand_generator(demand_subset, config) + return_demands.append(return_demand) + else: + print(f'Return demand generator not implemented for train type: {train_type_label}') + + demand_return = (pl.concat(return_demands, how="diagonal_relaxed") + .filter(pl.col("Number_of_Cars") > 0) + ) + return demand_return + +def generate_manifest_rebalancing_demand( + demand: pl.DataFrame, + node_list: List[str], + config: planner_config.TrainPlannerConfig +) -> pl.DataFrame: + """ + Create a dataframe for summarized view of all origins' manifest demand + in number of cars and received cars, both with loaded and empty counts + Arguments: + ---------- + demand: The user_input file loaded by previous functions + that contains laoded demand for each demand pair. + node_list: A list containing all the names of nodes in the system + config: Object storing train planner configuration paramaters + + Outputs: + ---------- + origin_manifest_demand: The dataframe that summarized all the manifest demand + originated from each node by number of loaded and empty cars + with additional columns for checking the unbalance quantity and serve as check columns + for the manifest empty car rebalancing function + """ + def balance_trains( + demand_origin_manifest: pl.DataFrame + ) -> pl.DataFrame: + """ + Update the manifest demand, especially the empty car demand to maintain equilibrium of number of + cars dispatched and received at each node for manifest + Arguments: + ---------- + demand_origin_manifest: Dataframe that summarizes empty and loaded + manifest demand dispatched and received for each node by number cars + Outputs: + ---------- + demand_origin_manifest: Updated demand_origin_manifest with additional + manifest empty car demand added to each node + df_balance_storage: Documented additional manifest demand pairs and corresponding quantity for + rebalancing process + """ + df_balance_storage = pd.DataFrame(np.zeros(shape=(0, 4))) + df_balance_storage = df_balance_storage.rename( + columns={0: "Origin", + 1: "Destination", + 2: "Train_Type", + 3: "Number_of_Cars"}) + + train_type = "Manifest_Empty" + demand = demand_origin_manifest.to_pandas()[ + ["Origin","Manifest_Received","Manifest_Dispatched","Manifest_Empty"]] + demand = demand.rename(columns={"Manifest_Received": "Received", + "Manifest_Dispatched": "Dispatched", + "Manifest_Empty": "Empty"}) + + step = 0 + # Calculate the number of iterations needed + max_iter = len(demand) * (len(demand)-1) / 2 + while (~np.isclose(demand["Received"], demand["Dispatched"])).any() and (step <= max_iter): + rows_def = demand[demand["Received"] < demand["Dispatched"]] + rows_sur = demand[demand["Received"] > demand["Dispatched"]] + if((len(rows_def) == 0) | (len(rows_sur) == 0)): + break + # Find the first node that is in deficit of cars because of the empty return + row_def = rows_def.index[0] + # Find the first node that is in surplus of cars + row_sur = rows_sur.index[0] + surplus = demand.loc[row_sur, "Received"] - demand.loc[row_sur, "Dispatched"] + df_balance_storage.loc[len(df_balance_storage.index)] = \ + [demand.loc[row_sur, "Origin"], + demand.loc[row_def, "Origin"], + train_type, + surplus] + demand.loc[row_def, "Received"] += surplus + demand.loc[row_sur, "Dispatched"] = demand.loc[row_sur, "Received"] + step += 1 + + if (~np.isclose(demand["Received"], demand["Dispatched"])).any(): + raise Exception("While loop didn't converge") + return pl.from_pandas(df_balance_storage) + + manifest_demand = (demand + .filter(pl.col("Train_Type").str.strip_suffix("_Loaded") == "Manifest") + .select(["Origin", "Destination","Number_of_Cars"]) + .rename({"Number_of_Cars": "Manifest"}) + .unique()) + + origin_volume = manifest_demand.group_by("Origin").agg(pl.col("Manifest").sum()) + destination_volume = manifest_demand.group_by("Destination").agg(pl.col("Manifest").sum().alias("Manifest_Reverse")) + origin_manifest_demand = (pl.DataFrame({"Origin": node_list}) + .join(origin_volume, left_on="Origin", right_on="Origin", how="left") + .join(destination_volume, left_on="Origin", right_on="Destination", how="left") + .with_columns( + (pl.col("Manifest_Reverse") * config.manifest_empty_return_ratio).floor().cast(pl.UInt32).alias("Manifest_Empty")) + .with_columns( + (pl.col("Manifest") + pl.col("Manifest_Empty")).alias("Manifest_Dispatched"), + (pl.col("Manifest_Reverse") + pl.col("Manifest") * config.manifest_empty_return_ratio).floor().cast(pl.UInt32).alias("Manifest_Received")) + .drop("Manifest_Reverse") + .filter((pl.col("Manifest").is_not_null()) | (pl.col("Manifest_Empty").is_not_null())) + ) + + return balance_trains(origin_manifest_demand) + +def generate_demand_trains( + demand: pl.DataFrame, + demand_returns: pl.DataFrame, + demand_rebalancing: pl.DataFrame, + rail_vehicles: List[alt.RailVehicle], + freight_type_to_car_type: Dict[str, str], + config: planner_config.TrainPlannerConfig +) -> pl.DataFrame: + """ + Generate a tabulated demand pair to indicate the final demand + for each demand pair for each train type in number of trains + Arguments: + ---------- + demand: Tabulated demand for each demand pair for each train type in number of cars + + demand: The user_input file loaded and prepared by previous functions + that contains loaded car demand for each demand pair. + demand_returns: The demand generated by the need + of returning the empty cars to their original nodes + demand_rebalancing: Documented additional manifest demand pairs and corresponding quantity for + rebalancing process + + config: Object storing train planner configuration paramaters + Outputs: + ---------- + demand: Tabulated demand for each demand pair in terms of number of cars and number of trains + """ + cars_per_train_min = (pl.from_dict(config.min_cars_per_train) + .melt(variable_name="Train_Type", value_name="Cars_Per_Train_Min") + ) + cars_per_train_min_default = (cars_per_train_min + .filter(pl.col("Train_Type") == pl.lit("Default")) + .select("Cars_Per_Train_Min").item() + ) + cars_per_train_target = (pl.from_dict(config.target_cars_per_train) + .melt(variable_name="Train_Type", value_name="Cars_Per_Train_Target") + ) + cars_per_train_target_default = (cars_per_train_target + .filter(pl.col("Train_Type") == pl.lit("Default")) + .select("Cars_Per_Train_Target").item() + ) + + demand = (pl.concat([demand, demand_returns, demand_rebalancing], how="diagonal_relaxed") + .group_by("Origin","Destination", "Train_Type") + .agg(pl.col("Number_of_Cars").sum()) + .filter(pl.col("Number_of_Cars") > 0) + .pipe(data_prep.appendTonsAndHP, rail_vehicles, freight_type_to_car_type, config) + # Merge on cars_per_train_min if the user specified any + .join(cars_per_train_min, + left_on = pl.when(pl.col("Train_Type").str.contains(pl.lit("_Empty"))) + .then(pl.col("Train_Type")) + .otherwise(pl.concat_str(pl.col("Train_Type").str.strip_suffix("_Loaded"), pl.lit("_Loaded"))), + right_on=["Train_Type"], + how="left") + # Merge on cars_per_train_target if the user specified any + .join(cars_per_train_target, + left_on = pl.when(pl.col("Train_Type").str.contains(pl.lit("_Empty"))) + .then(pl.col("Train_Type")) + .otherwise(pl.concat_str(pl.col("Train_Type").str.strip_suffix("_Loaded"), pl.lit("_Loaded"))), + right_on=["Train_Type"], + how="left") + # Fill in defaults per train type wherever the user didn't specify OD-specific hp_per_ton + .with_columns( + pl.col("Cars_Per_Train_Min").fill_null(cars_per_train_min_default), + pl.col("Cars_Per_Train_Target").fill_null(cars_per_train_target_default), + ) + ) + loaded = (demand + .filter(~pl.col("Train_Type").str.contains("_Empty")) + .with_columns( + pl.col("Number_of_Cars", "Tons_Per_Car", "HP_Required_Per_Ton", "Cars_Per_Train_Min", "Cars_Per_Train_Target").name.suffix("_Loaded") + ) + ) + empty = (demand + .filter(pl.col("Train_Type").str.contains("_Empty")) + .with_columns( + pl.col("Number_of_Cars", "Tons_Per_Car", "HP_Required_Per_Ton", "Cars_Per_Train_Min", "Cars_Per_Train_Target").name.suffix("_Empty"), + pl.col("Train_Type").str.strip_suffix("_Empty") + ) + ) + demand = (demand + .select(pl.col("Origin", "Destination"), pl.col("Train_Type").str.strip_suffix("_Empty")) + .unique() + .join(loaded.select(cs.by_name("Origin", "Destination", "Train_Type") | cs.ends_with("_Loaded")), on=["Origin", "Destination", "Train_Type"], how="left") + .join(empty.select(cs.by_name("Origin", "Destination", "Train_Type") | cs.ends_with("_Empty")), on=["Origin", "Destination", "Train_Type"], how="left") + # Replace nulls with zero + .with_columns(cs.float().fill_null(0.0), + cs.by_dtype(pl.UInt32).fill_null(pl.lit(0).cast(pl.UInt32)), + cs.by_dtype(pl.Int64).fill_null(pl.lit(0).cast(pl.Int64)), + ) + .group_by("Origin", "Destination", "Train_Type") + .agg( + pl.col("Number_of_Cars_Loaded", "Number_of_Cars_Empty").sum(), + pl.col("Tons_Per_Car_Loaded", "Tons_Per_Car_Empty", + "HP_Required_Per_Ton_Loaded", "HP_Required_Per_Ton_Empty", + "Cars_Per_Train_Min_Loaded", "Cars_Per_Train_Min_Empty", + "Cars_Per_Train_Target_Loaded", "Cars_Per_Train_Target_Empty").mean(), + pl.sum_horizontal("Number_of_Cars_Loaded", "Number_of_Cars_Empty").sum().alias("Number_of_Cars") + ) + .with_columns( + # If Cars_Per_Train_Min and Cars_Per_Train_Target "disagree" for empty vs. loaded, take the average weighted by number of cars + ((pl.col("Cars_Per_Train_Min_Loaded").mul("Number_of_Cars_Loaded") + pl.col("Cars_Per_Train_Min_Empty").mul("Number_of_Cars_Empty")) / pl.col("Number_of_Cars")).alias("Cars_Per_Train_Min"), + ((pl.col("Cars_Per_Train_Target_Loaded").mul("Number_of_Cars_Loaded") + pl.col("Cars_Per_Train_Target_Empty").mul("Number_of_Cars_Empty")) / pl.col("Number_of_Cars")).alias("Cars_Per_Train_Target") + ) + .with_columns( + pl.when(config.single_train_mode) + .then(1) + .when(pl.col("Number_of_Cars") == 0) + .then(0) + .when(pl.col("Cars_Per_Train_Target") == pl.col("Number_of_Cars")) + .then(1) + .when(pl.col("Cars_Per_Train_Target") <= 1.0) + .then(pl.col("Number_of_Cars")) + .otherwise( + pl.max_horizontal([ + 1, + pl.min_horizontal([ + pl.col("Number_of_Cars").floordiv("Cars_Per_Train_Target") + 1, + pl.col("Number_of_Cars").floordiv("Cars_Per_Train_Min") + ]) + ]) + ).cast(pl.UInt32).alias("Number_of_Trains"), + pl.col("Number_of_Cars_Loaded").mul(config.containers_per_car).alias("Number_of_Containers_Loaded"), + pl.col("Number_of_Cars_Empty").mul(config.containers_per_car).alias("Number_of_Containers_Empty"), + pl.lit(config.simulation_days).alias("Number_of_Days") + ) + .drop("Cars_Per_Train_Target_Loaded", "Cars_Per_Train_Target_Empty", "Cars_Per_Train_Min_Empty", "Cars_Per_Train_Min_Loaded") + ) + return demand diff --git a/python/altrios/utilities.py b/python/altrios/utilities.py index 77072f81..d2b27645 100644 --- a/python/altrios/utilities.py +++ b/python/altrios/utilities.py @@ -3,8 +3,9 @@ from __future__ import annotations import re import numpy as np -from typing import Tuple, Union, Optional, Dict, Any, TYPE_CHECKING +from typing import Tuple, Union, Optional, List, Dict, Any, TYPE_CHECKING import pandas as pd +import polars as pl import datetime import numpy.typing as npt import logging @@ -177,6 +178,59 @@ def _get_list(path_elem, container): return model +def range_minmax(self) -> pl.Expr: + return self.max() - self.min() +pl.Expr.range=range_minmax +del range_minmax + +def cumPctWithinGroup( + df: Union[pl.DataFrame, pl.LazyFrame], + grouping_vars: List[str] +) -> Union[pl.DataFrame, pl.LazyFrame]: + return (df + .with_columns( + ((pl.int_range(pl.len(), dtype=pl.UInt32).over(grouping_vars).add(1)) / + pl.count().over(grouping_vars)) + .alias("Percent_Within_Group_Cumulative") + ) + ) + +def allocateIntegerEvenly( + df: Union[pl.DataFrame, pl.LazyFrame], + target: str, + grouping_vars: List[str] +) -> Union[pl.DataFrame, pl.LazyFrame]: + return (df + .sort(grouping_vars) + .pipe(cumPctWithinGroup, grouping_vars = grouping_vars) + .with_columns( + pl.col(target).mul("Percent_Within_Group_Cumulative").round().alias(f'{target}_Group_Cumulative') + ) + .with_columns( + (pl.col(f'{target}_Group_Cumulative') - pl.col(f'{target}_Group_Cumulative').shift(1).over(grouping_vars)) + .fill_null(pl.col(f'{target}_Group_Cumulative')) + .alias(f'{target}') + ) + .drop(f'{target}_Group_Cumulative') +) + +def allocateItems( + df: Union[pl.DataFrame, pl.LazyFrame], + grouping_vars: list[str], + count_target: str +) -> Union[pl.DataFrame, pl.LazyFrame]: + return (df + .sort(grouping_vars+ [count_target], descending = True) + .with_columns( + pl.col(count_target).sum().over(grouping_vars).round().alias(f'{count_target}_Group'), + (pl.col(count_target).sum().over(grouping_vars).round() * + ( + pl.col(count_target).cum_sum().over(grouping_vars) / + pl.col(count_target).sum().over(grouping_vars) + ) + ).round().alias(f'{count_target}_Group_Cumulative')) + .with_columns((pl.col(f'{count_target}_Group_Cumulative') - pl.col(f'{count_target}_Group_Cumulative').shift(1).over(grouping_vars)).fill_null(pl.col(f'{count_target}_Group_Cumulative')).alias("Count")) + ) def resample( df: pd.DataFrame, @@ -206,7 +260,7 @@ def resample( for col in df.columns: if col in rate_vars: # calculate average value over time step - cumu_vals = (df[time_col].diff().fillna(0) * df[col]).cumsum() + cumu_vals = (df[time_col].diff().fillna(0) * df[col]).cum_sum() new_dict[col] = ( np.diff( np.interp( diff --git a/rust/altrios-core/src/train/rail_vehicle.rs b/rust/altrios-core/src/train/rail_vehicle.rs index 5971db4b..dd29fe34 100644 --- a/rust/altrios-core/src/train/rail_vehicle.rs +++ b/rust/altrios-core/src/train/rail_vehicle.rs @@ -8,6 +8,10 @@ pub struct RailVehicle { #[serde(alias = "Car Type")] pub car_type: String, + /// Identifier for the freight type carried by this car type (e.g., Intermodal). + #[serde(alias = "Freight Type")] + pub freight_type: String, + /// Railcar length (between pulling-faces) #[serde(alias = "Length (m)")] pub length: si::Length, diff --git a/rust/altrios-core/src/train/set_speed_train_sim.rs b/rust/altrios-core/src/train/set_speed_train_sim.rs index f31289f8..515a70ba 100644 --- a/rust/altrios-core/src/train/set_speed_train_sim.rs +++ b/rust/altrios-core/src/train/set_speed_train_sim.rs @@ -185,6 +185,7 @@ pub struct SpeedTraceElement { #[new] fn __new__( loco_con: Consist, + n_cars_by_type: HashMap, state: TrainState, speed_trace: SpeedTrace, train_res_file: Option, @@ -200,7 +201,7 @@ pub struct SpeedTraceElement { None => TrainRes::valid() }; - Self::new(loco_con, state, speed_trace, train_res, path_tpc, save_interval) + Self::new(loco_con, n_cars_by_type, state, speed_trace, train_res, path_tpc, save_interval) } #[setter] @@ -266,6 +267,7 @@ pub struct SpeedTraceElement { /// should be less than 0.1%) pub struct SetSpeedTrainSim { pub loco_con: Consist, + pub n_cars_by_type: HashMap, #[serde(default)] #[serde(skip_serializing_if = "EqDefault::eq_default")] pub state: TrainState, @@ -285,6 +287,7 @@ pub struct SetSpeedTrainSim { impl SetSpeedTrainSim { pub fn new( loco_con: Consist, + n_cars_by_type: HashMap, state: TrainState, speed_trace: SpeedTrace, train_res: TrainRes, @@ -293,6 +296,7 @@ impl SetSpeedTrainSim { ) -> Self { let mut train_sim = Self { loco_con, + n_cars_by_type, state, train_res, path_tpc, @@ -462,6 +466,7 @@ impl Default for SetSpeedTrainSim { fn default() -> Self { Self { loco_con: Consist::default(), + n_cars_by_type: Default::default(), state: TrainState::valid(), train_res: TrainRes::valid(), path_tpc: PathTpc::valid(), diff --git a/rust/altrios-core/src/train/speed_limit_train_sim.rs b/rust/altrios-core/src/train/speed_limit_train_sim.rs index cebdddae..5a142d04 100644 --- a/rust/altrios-core/src/train/speed_limit_train_sim.rs +++ b/rust/altrios-core/src/train/speed_limit_train_sim.rs @@ -57,14 +57,24 @@ impl From<&Vec> for TimedLinkPath { Ok(self.get_save_interval()) } + #[pyo3(name = "get_kilometers")] + pub fn get_kilometers_py(&self, annualize: bool) -> f64 { + self.get_kilometers(annualize) + } + #[pyo3(name = "get_megagram_kilometers")] pub fn get_megagram_kilometers_py(&self, annualize: bool) -> f64 { self.get_megagram_kilometers(annualize) } - #[pyo3(name = "get_kilometers")] - pub fn get_kilometers_py(&self, annualize: bool) -> f64 { - self.get_kilometers(annualize) + #[pyo3(name = "get_car_kilometers")] + pub fn get_car_kilometers_py(&self, annualize: bool) -> f64 { + self.get_car_kilometers(annualize) + } + + #[pyo3(name = "get_cars_moved")] + pub fn get_cars_moved_py(&self, annualize: bool) -> f64 { + self.get_cars_moved(annualize) } #[pyo3(name = "get_res_kilometers")] @@ -141,6 +151,8 @@ pub struct SpeedLimitTrainSim { pub origs: Vec, pub dests: Vec, pub loco_con: Consist, + /// Number of railcars by type on the train + pub n_cars_by_type: HashMap, #[serde(default)] #[serde(skip_serializing_if = "EqDefault::eq_default")] pub state: TrainState, @@ -167,6 +179,7 @@ impl SpeedLimitTrainSim { origs: &[Location], dests: &[Location], loco_con: Consist, + n_cars_by_type: HashMap, state: TrainState, train_res: TrainRes, path_tpc: PathTpc, @@ -180,6 +193,7 @@ impl SpeedLimitTrainSim { origs: origs.to_vec(), dests: dests.to_vec(), loco_con, + n_cars_by_type, state, train_res, path_tpc, @@ -208,14 +222,25 @@ impl SpeedLimitTrainSim { } } + pub fn get_kilometers(&self, annualize: bool) -> f64 { + self.state.total_dist.get::() * self.get_scaling_factor(annualize) + } + pub fn get_megagram_kilometers(&self, annualize: bool) -> f64 { self.state.mass_freight.get::() * self.state.total_dist.get::() * self.get_scaling_factor(annualize) } - pub fn get_kilometers(&self, annualize: bool) -> f64 { - self.state.total_dist.get::() * self.get_scaling_factor(annualize) + pub fn get_car_kilometers(&self, annualize: bool) -> f64 { + let n_cars = self.get_cars_moved(annualize) as f64; + // Note: n_cars already includes an annualization scaling factor; no need to multiply twice. + self.state.total_dist.get::() * n_cars + } + + pub fn get_cars_moved(&self, annualize: bool) -> f64 { + let n_cars: f64 = self.n_cars_by_type.values().fold(0, |acc, n| *n + acc) as f64; + n_cars * self.get_scaling_factor(annualize) } pub fn get_res_kilometers(&mut self, annualize: bool) -> f64 { @@ -684,6 +709,7 @@ impl Default for SpeedLimitTrainSim { origs: Default::default(), dests: Default::default(), loco_con: Default::default(), + n_cars_by_type: Default::default(), state: TrainState::valid(), train_res: TrainRes::valid(), path_tpc: PathTpc::default(), diff --git a/rust/altrios-core/src/train/train_config.rs b/rust/altrios-core/src/train/train_config.rs index 148563d9..77c880df 100644 --- a/rust/altrios-core/src/train/train_config.rs +++ b/rust/altrios-core/src/train/train_config.rs @@ -104,7 +104,8 @@ pub struct TrainConfig { /// Optional vector of drag areas (i.e. drag coeff. times frontal area) /// for each car. If provided, the total drag area (drag coefficient /// times frontal area) calculated from this vector is the sum of these - /// coefficients. + /// coefficients. Otherwise, each rail car's drag contribution based on its + /// drag coefficient and frontal area will be summed across the train. pub cd_area_vec: Option>, } @@ -572,6 +573,7 @@ impl TrainSimBuilder { path_tpc.extend(network, link_path)?; Ok(SetSpeedTrainSim::new( self.loco_con.clone(), + self.train_config.n_cars_by_type.clone(), state, speed_trace, train_res, @@ -601,6 +603,7 @@ impl TrainSimBuilder { Ok(( SetSpeedTrainSim::new( self.loco_con.clone(), + self.train_config.n_cars_by_type.clone(), state, speed_trace, train_res.clone(), @@ -657,6 +660,7 @@ impl TrainSimBuilder { )) })?, self.loco_con.clone(), + self.train_config.n_cars_by_type.clone(), state, train_res, path_tpc, @@ -710,6 +714,7 @@ impl TrainSimBuilder { )) })?, self.loco_con.clone(), + self.train_config.n_cars_by_type.clone(), state, train_res.clone(), path_tpc.clone(), @@ -1235,14 +1240,24 @@ pub fn run_speed_limit_train_sims( self.get_net_energy_res(annualize).get::() } + #[pyo3(name = "get_kilometers")] + pub fn get_kilometers_py(&self, annualize: bool) -> f64 { + self.get_kilometers(annualize) + } + #[pyo3(name = "get_megagram_kilometers")] pub fn get_megagram_kilometers_py(&self, annualize: bool) -> f64 { self.get_megagram_kilometers(annualize) } - #[pyo3(name = "get_kilometers")] - pub fn get_kilometers_py(&self, annualize: bool) -> f64 { - self.get_kilometers(annualize) + #[pyo3(name = "get_car_kilometers")] + pub fn get_car_kilometers_py(&self, annualize: bool) -> f64 { + self.get_car_kilometers(annualize) + } + + #[pyo3(name = "get_cars_moved")] + pub fn get_cars_moved_py(&self, annualize: bool) -> f64 { + self.get_cars_moved(annualize) } #[pyo3(name = "get_res_kilometers")] @@ -1278,6 +1293,10 @@ impl SpeedLimitTrainSimVec { .sum() } + pub fn get_kilometers(&self, annualize: bool) -> f64 { + self.0.iter().map(|sim| sim.get_kilometers(annualize)).sum() + } + pub fn get_megagram_kilometers(&self, annualize: bool) -> f64 { self.0 .iter() @@ -1285,8 +1304,18 @@ impl SpeedLimitTrainSimVec { .sum() } - pub fn get_kilometers(&self, annualize: bool) -> f64 { - self.0.iter().map(|sim| sim.get_kilometers(annualize)).sum() + pub fn get_car_kilometers(&self, annualize: bool) -> f64 { + self.0 + .iter() + .map(|sim| sim.get_car_kilometers(annualize)) + .sum() + } + + pub fn get_cars_moved(&self, annualize: bool) -> f64 { + self.0 + .iter() + .map(|sim| sim.get_cars_moved(annualize)) + .sum() } pub fn get_res_kilometers(&mut self, annualize: bool) -> f64 {