result_validate.py 4.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115
  1. import os
  2. import datetime
  3. import pandas as pd
  4. from data_loader import mongo_con_parse, validate_one_line, fill_hourly_crawl_date
  5. def validate_process(node, date):
  6. output_dir = f"./validate/{node}_{date}"
  7. os.makedirs(output_dir, exist_ok=True)
  8. object_dir = "./data_shards"
  9. csv_file = 'future_predictions.csv'
  10. csv_path = os.path.join(object_dir, csv_file)
  11. try:
  12. df_predict = pd.read_csv(csv_path)
  13. except Exception as e:
  14. print(f"read {csv_path} error: {str(e)}")
  15. df_predict = pd.DataFrame()
  16. if df_predict.empty:
  17. print(f"预测数据为空")
  18. return
  19. # fly_day = df_predict['flight_day'].unique()[0]
  20. client, db = mongo_con_parse()
  21. count = 0
  22. for idx, row in df_predict.iterrows():
  23. city_pair = row['city_pair']
  24. flight_day = row['flight_day']
  25. flight_number_1 = row['flight_number_1']
  26. flight_number_2 = row['flight_number_2']
  27. baggage = row['baggage']
  28. valid_begin_hour = row['valid_begin_hour']
  29. df_val= validate_one_line(db, city_pair, flight_day, flight_number_1, flight_number_2, baggage, valid_begin_hour)
  30. if not df_val.empty:
  31. df_val_f = fill_hourly_crawl_date(df_val, rear_fill=2)
  32. df_val_f = df_val_f[df_val_f['is_filled']==0] # 只要原始数据,不要补齐的
  33. if df_val_f.empty:
  34. drop_flag = 0
  35. first_drop_amount = pd.NA
  36. first_drop_hours = pd.NA
  37. last_hours_util = pd.NA
  38. last_update_hour = pd.NA
  39. list_change_price = []
  40. list_change_hours = []
  41. else:
  42. # 有效数据的最后一行
  43. last_row = df_val_f.iloc[-1]
  44. last_hours_util = last_row['hours_until_departure']
  45. last_update_hour = last_row['update_hour']
  46. # 价格变化过滤
  47. df_price_changes = df_val_f.loc[
  48. df_val_f["adult_total_price"].shift() != df_val_f["adult_total_price"]
  49. ].copy()
  50. # 价格变化幅度
  51. df_price_changes['change_amount'] = df_price_changes['adult_total_price'].diff().fillna(0)
  52. # 找到第一个 change_amount 小于 -10 的行
  53. first_negative_change = df_price_changes[df_price_changes['change_amount'] < -10].head(1)
  54. # 提取所需的值
  55. if not first_negative_change.empty:
  56. drop_flag = 1
  57. first_drop_amount = first_negative_change['change_amount'].iloc[0].round(2)
  58. first_drop_hours = first_negative_change['hours_until_departure'].iloc[0]
  59. else:
  60. drop_flag = 0
  61. first_drop_amount = pd.NA
  62. first_drop_hours = pd.NA
  63. list_change_price = df_price_changes['adult_total_price'].tolist()
  64. list_change_hours = df_price_changes['hours_until_departure'].tolist()
  65. else:
  66. drop_flag = 0
  67. first_drop_amount = pd.NA
  68. first_drop_hours = pd.NA
  69. last_hours_util = pd.NA
  70. last_update_hour = pd.NA
  71. list_change_price = []
  72. list_change_hours = []
  73. safe_sep = "; "
  74. df_predict.at[idx, 'change_prices'] = safe_sep.join(map(str, list_change_price))
  75. df_predict.at[idx, 'change_hours'] = safe_sep.join(map(str, list_change_hours))
  76. df_predict.at[idx, 'last_hours_util'] = last_hours_util
  77. df_predict.at[idx, 'last_update_hour'] = last_update_hour
  78. df_predict.at[idx, 'first_drop_amount'] = first_drop_amount * -1 # 负数转正数
  79. df_predict.at[idx, 'first_drop_hours'] = first_drop_hours
  80. df_predict.at[idx, 'drop_flag'] = drop_flag
  81. count += 1
  82. if count % 5 == 0:
  83. print(f"cal count: {count}")
  84. print(f"计算结束")
  85. client.close()
  86. timestamp_str = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
  87. save_scv = f"result_validate_{node}_{date}_{timestamp_str}.csv"
  88. output_path = os.path.join(output_dir, save_scv)
  89. df_predict.to_csv(output_path, index=False, encoding="utf-8-sig")
  90. print(f"保存完成: {output_path}")
  91. if __name__ == "__main__":
  92. node, date = "node0105", "0107"
  93. validate_process(node, date)