Skip to content

Commit

Permalink
Add ptax fix logic
Browse files Browse the repository at this point in the history
  • Loading branch information
wagnerlmichael committed Jan 18, 2024
1 parent f94fa8b commit e445e93
Show file tree
Hide file tree
Showing 3 changed files with 85 additions and 30 deletions.
62 changes: 48 additions & 14 deletions glue/sales_val_flagging.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def add_rolling_window(df, num_months):
return df


def ptax_adjustment(df, groups, ptax_sd):
def ptax_adjustment(df, groups, ptax_sd, condos: bool):
"""
This function manually applies a ptax adjustment, keeping only
ptax flags that are outside of a certain standard deviation
Expand All @@ -102,12 +102,35 @@ def ptax_adjustment(df, groups, ptax_sd):

group_string = "_".join(groups)

df["ptax_flag_w_deviation"] = df["ptax_flag_original"] & (
(df[f"sv_price_deviation_{group_string}"] >= ptax_sd[1])
| (df[f"sv_price_deviation_{group_string}"] <= -ptax_sd[0])
| (df[f"sv_price_per_sqft_deviation_{group_string}"] >= ptax_sd[1])
| (df[f"sv_price_per_sqft_deviation_{group_string}"] <= -ptax_sd[0])
)
if condos == False:
df["ptax_flag_w_deviation"] = df["ptax_flag_original"] & (
(df[f"sv_price_deviation_{group_string}"] >= ptax_sd[1])
| (df[f"sv_price_deviation_{group_string}"] <= -ptax_sd[0])
| (df[f"sv_price_per_sqft_deviation_{group_string}"] >= ptax_sd[1])
| (df[f"sv_price_per_sqft_deviation_{group_string}"] <= -ptax_sd[0])
)

# Determine the ptax direction
conditions = [
(df[f"sv_price_deviation_{group_string}"] >= ptax_sd[1])
| (df[f"sv_price_per_sqft_deviation_{group_string}"] >= ptax_sd[1]),
(df[f"sv_price_deviation_{group_string}"] <= -ptax_sd[0])
| (df[f"sv_price_per_sqft_deviation_{group_string}"] <= -ptax_sd[0]),
]
else:
df["ptax_flag_w_deviation"] = df["ptax_flag_original"] & (
(df[f"sv_price_deviation_{group_string}"] >= ptax_sd[1])
| (df[f"sv_price_deviation_{group_string}"] <= -ptax_sd[0])
)

# Determine the ptax direction
conditions = [
(df[f"sv_price_deviation_{group_string}"] >= ptax_sd[1]),
(df[f"sv_price_deviation_{group_string}"] <= -ptax_sd[0]),
]

directions = ["High", "Low"]
df["ptax_direction"] = np.select(conditions, directions, default=np.nan)

return df

Expand Down Expand Up @@ -846,18 +869,29 @@ def write_to_table(df, table_name, s3_warehouse_bucket_path, run_id):
condos=True,
)

df_flagged_merged = pd.concat(
[df_res_flagged_updated, df_condo_flagged_updated]
).reset_index(drop=True)
# Update the PTAX flag column with an additional std dev conditional
df_res_flagged_updated_ptax = ptax_adjustment(
df=df_res_flagged_updated,
groups=stat_groups_list,
ptax_sd=ptax_sd_list,
condos=False,
)

# Update the PTAX flag column with an additional std dev conditional
df_flagged_ptax = ptax_adjustment(
df=df_flagged_merged, groups=stat_groups_list, ptax_sd=ptax_sd_list
df_condo_flagged_updated_ptax = ptax_adjustment(
df=df_condo_flagged_updated,
groups=condo_stat_groups,
ptax_sd=ptax_sd_list,
condos=True,
)

# Finish flagging
df_flagged_ptax_merged = pd.concat(
[df_res_flagged_updated_ptax, df_condo_flagged_updated_ptax]
).reset_index(drop=True)

# Finish flagging and subset to write to flag table
df_flagged_final, run_id, timestamp = finish_flags(
df=df_flagged_ptax,
df=df_flagged_ptax_merged,
start_date=args["time_frame_start"],
manual_update=False,
)
Expand Down
25 changes: 18 additions & 7 deletions manual_flagging/initial_flagging.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,18 +175,29 @@
condos=True,
)

df_flagged_merged = pd.concat(
[df_res_flagged_updated, df_condo_flagged_updated]
).reset_index(drop=True)
# Update the PTAX flag column with an additional std dev conditional w/ res groups
df_res_flagged_updated_ptax = flg.ptax_adjustment(
df=df_res_flagged_updated,
groups=inputs["stat_groups"],
ptax_sd=inputs["ptax_sd"],
condos=False,
)

# Update the PTAX flag column with an additional std dev conditional
df_flagged_ptax = flg.ptax_adjustment(
df=df_flagged_merged, groups=inputs["stat_groups"], ptax_sd=inputs["ptax_sd"]
# Update the PTAX flag column with an additional std dev conditional w/ condo groups
df_condo_flagged_updated_ptax = flg.ptax_adjustment(
df=df_condo_flagged_updated,
groups=condo_stat_groups,
ptax_sd=inputs["ptax_sd"],
condos=True,
)

df_flagged_ptax_merged = pd.concat(
[df_res_flagged_updated_ptax, df_condo_flagged_updated_ptax]
).reset_index(drop=True)

# Finish flagging and subset to write to flag table
df_to_write, run_id, timestamp = flg.finish_flags(
df=df_flagged_ptax,
df=df_flagged_ptax_merged,
start_date=inputs["time_frame"]["start"],
manual_update=False,
)
Expand Down
28 changes: 19 additions & 9 deletions manual_flagging/manual_update.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,23 +186,33 @@
condos=True,
)

df_flagged_merged = pd.concat(
[df_res_flagged_updated, df_condo_flagged_updated]
).reset_index(drop=True)
# Update the PTAX flag column with an additional std dev conditional w/ res groups
df_res_flagged_updated_ptax = flg.ptax_adjustment(
df=df_res_flagged_updated,
groups=inputs["stat_groups"],
ptax_sd=inputs["ptax_sd"],
condos=False,
)

# Update the PTAX flag column with an additional std dev conditional
df_flagged_ptax = flg.ptax_adjustment(
df=df_flagged_merged, groups=inputs["stat_groups"], ptax_sd=inputs["ptax_sd"]
# Update the PTAX flag column with an additional std dev conditional w/ condo groups
df_condo_flagged_updated_ptax = flg.ptax_adjustment(
df=df_condo_flagged_updated,
groups=condo_stat_groups,
ptax_sd=inputs["ptax_sd"],
condos=True,
)

df_flagged_ptax_merged = pd.concat(
[df_res_flagged_updated_ptax, df_condo_flagged_updated_ptax]
).reset_index(drop=True)

# Finish flagging and subset to write to flag table
df_flagged_final, run_id, timestamp = flg.finish_flags(
df=df_flagged_ptax,
df=df_flagged_ptax_merged,
start_date=inputs["time_frame"]["start"],
manual_update=True,
manual_update=False,
)


# -----------------------------------------------------------------------------
# Update version of re-flagged sales
# -----------------------------------------------------------------------------
Expand Down

0 comments on commit e445e93

Please sign in to comment.