Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix intrazonal saving #97

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions config/base.toml
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,11 @@ weight_max_dev = 0.2
weight_total_dev = 0.8
max_zones = 8 # maximum number of feasible zones to include in the optimization problem (less zones makes problem smaller - so faster, but at the cost of a better solution)

[secondary_assignment]
# Probablity of choosing a secondary zone. Same idea as a gravity model. We use floor_space / distance^n, where n is the power value used here
# See here to understand how this probability matrix is used https://github.com/arup-group/pam/blob/main/examples/17_advanced_discretionary_locations.ipynb
visit_probability_power = 2.0 # Default power value

[postprocessing]
pam_jitter = 30
pam_min_duration = 10
Expand Down
7 changes: 6 additions & 1 deletion config/base_all_msoa.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ seed = 0
# this is used to query poi data from osm and to load in SPC data
region = "leeds"
# how many people from the SPC do we want to run the model for? Comment out if you want to run the analysis on the entire SPC populaiton
number_of_households = 25000
number_of_households = 2500
# "OA21CD": OA level, "MSOA11CD": MSOA level
zone_id = "MSOA21CD"
# Only set to true if you have travel time matrix at the level specified in boundary_geography
Expand Down Expand Up @@ -61,6 +61,11 @@ weight_total_dev = 0.8
# maximum number of feasible zones to include in the optimization problem (less zones makes problem smaller - so faster, but at the cost of a better solution)
max_zones = 10

[secondary_assignment]
# Probablity of choosing a secondary zone. Same idea as a gravity model. We use floor_space / distance^n, where n is the power value used here
# See here to understand how this probability matrix is used https://github.com/arup-group/pam/blob/main/examples/17_advanced_discretionary_locations.ipynb
visit_probability_power = 2 # Default power value

[postprocessing]
pam_jitter = 30
pam_min_duration = 10
Expand Down
6 changes: 3 additions & 3 deletions scripts/3.1_assign_primary_feasible_zones.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,9 +82,6 @@ def main(config_file):
)
logger.info("Travel time estimates created")

# save travel_time_etstimates as parquet
travel_time_estimates.to_parquet(config.travel_times_estimates_filepath)

if config.parameters.travel_times:
logger.info("Loading travel time matrix")
try:
Expand Down Expand Up @@ -133,6 +130,9 @@ def main(config_file):

logger.info("Intrazonal travel times replaced")

# save travel_time_etstimates as parquet
travel_times.to_parquet(config.travel_times_estimates_filepath)

# --- Activity locations (Facilities)
#
# Activity locations are obtained from OSM using the [osmox](https://github.com/arup-group/osmox)
Expand Down
5 changes: 4 additions & 1 deletion scripts/3.2.3_assign_secondary_zone.py
Original file line number Diff line number Diff line change
Expand Up @@ -354,7 +354,10 @@ def merge_columns_from_other(df: pd.DataFrame, other: pd.DataFrame) -> pd.DataFr
# Calculate the visit_probability: it is a funciton of floor_area and travel time
merged_df["visit_prob"] = np.where(
merged_df["time"] != 0, # avoid division by zero
round(merged_df["floor_area"] / np.sqrt(merged_df["time"])),
round(
merged_df["floor_area"]
/ (merged_df["time"] ** config.secondary_assignment.visit_probability_power)
),
round(merged_df["floor_area"]),
)

Expand Down
10 changes: 10 additions & 0 deletions src/acbm/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,11 @@ def validate_commute_level(commute_level: str) -> str:
return commute_level.upper()


@dataclass(frozen=True)
class SecondaryAssignmentParams(BaseModel):
visit_probability_power: float = 2.0


@dataclass(frozen=True)
class Postprocessing(BaseModel):
pam_jitter: int
Expand Down Expand Up @@ -109,6 +114,11 @@ class Config(BaseModel):
work_assignment: WorkAssignmentParams = Field(
description="Config: parameters for work assignment."
)

secondary_assignment: SecondaryAssignmentParams = Field(
description="Config: parameters for secondary assignment."
)

postprocessing: Postprocessing = Field(
description="Config: parameters for postprocessing."
)
Expand Down
2 changes: 1 addition & 1 deletion tests/test_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,4 @@ def config():


def test_id(config):
assert config.id == "0ebb8c3ee7"
assert config.id == "0b1ab47552"
Loading