data_process.py 8.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172
  1. import pandas as pd
  2. import numpy as np
  3. import gc
  4. import os
  5. def preprocess_data_simple(df_input, is_train=False):
  6. print(">>> 开始数据预处理")
  7. # 城市码映射成数字(不用)
  8. # 更新日期是周几
  9. df_input['update_week'] = df_input['update_hour'].dt.dayofweek + 1
  10. # gid:基于指定字段的分组标记(整数)
  11. df_input['gid'] = (
  12. df_input
  13. .groupby(
  14. ['citypair', 'flight_numbers', 'from_date'], # 'baggage_weight' 先不进分组
  15. sort=False
  16. )
  17. .ngroup()
  18. )
  19. # 在 gid 与 baggage_weight 内按时间降序
  20. df_input = df_input.sort_values(
  21. by=['gid', 'baggage_weight', 'hours_until_departure'],
  22. ascending=[True, True, False]
  23. ).reset_index(drop=True)
  24. df_input = df_input[df_input['hours_until_departure'] <= 480]
  25. df_input = df_input[df_input['baggage_weight'] == 20] # 先保留20公斤行李的
  26. # 在hours_until_departure 的末尾 保留真实的而不是补齐的数据
  27. if not is_train:
  28. _tail_filled = df_input.groupby(['gid', 'baggage_weight'])['is_filled'].transform(
  29. lambda s: s.iloc[::-1].cummin().iloc[::-1]
  30. )
  31. df_input = df_input[~((df_input['is_filled'] == 1) & (_tail_filled == 1))]
  32. # 价格变化最小量阈值
  33. price_change_amount_threshold = 5
  34. df_input['_raw_price_diff'] = df_input.groupby(['gid', 'baggage_weight'], group_keys=False)['price_total'].diff()
  35. # 计算价格变化量
  36. df_input['price_change_amount'] = (
  37. df_input['_raw_price_diff']
  38. .mask(df_input['_raw_price_diff'].abs() < price_change_amount_threshold, 0)
  39. .replace(0, np.nan)
  40. .groupby([df_input['gid'], df_input['baggage_weight']], group_keys=False)
  41. .ffill()
  42. .fillna(0)
  43. .round(2)
  44. )
  45. # 计算价格变化百分比(相对于上一时间点的变化率)
  46. df_input['price_change_percent'] = (
  47. df_input.groupby(['gid', 'baggage_weight'], group_keys=False)['price_total']
  48. .pct_change()
  49. .mask(df_input['_raw_price_diff'].abs() < price_change_amount_threshold, 0)
  50. .replace(0, np.nan)
  51. .groupby([df_input['gid'], df_input['baggage_weight']], group_keys=False)
  52. .ffill()
  53. .fillna(0)
  54. .round(4)
  55. )
  56. # 第一步:标记价格变化段
  57. df_input['price_change_segment'] = (
  58. df_input.groupby(['gid', 'baggage_weight'], group_keys=False)['price_change_amount']
  59. .apply(lambda s: (s != s.shift()).cumsum())
  60. )
  61. # 第二步:计算每个变化段内的持续时间
  62. df_input['price_duration_hours'] = (
  63. df_input.groupby(['gid', 'baggage_weight', 'price_change_segment'], group_keys=False)
  64. .cumcount()
  65. .add(1)
  66. )
  67. # 可选:删除临时列
  68. df_input = df_input.drop(columns=['price_change_segment', '_raw_price_diff'])
  69. # 训练过程
  70. if is_train:
  71. df_target = df_input[(df_input['hours_until_departure'] >= 24) & (df_input['hours_until_departure'] <= 360)].copy()
  72. df_target = df_target.sort_values(
  73. by=['gid', 'baggage_weight', 'hours_until_departure'],
  74. ascending=[True, True, False]
  75. ).reset_index(drop=True)
  76. # 对于先升后降的分析
  77. prev_pct = df_target.groupby(['gid', 'baggage_weight'], group_keys=False)['price_change_percent'].shift(1)
  78. prev_amo = df_target.groupby(['gid', 'baggage_weight'], group_keys=False)['price_change_amount'].shift(1)
  79. prev_dur = df_target.groupby(['gid', 'baggage_weight'], group_keys=False)['price_duration_hours'].shift(1)
  80. prev_price = df_target.groupby(['gid', 'baggage_weight'], group_keys=False)['price_total'].shift(1)
  81. drop_mask = (prev_pct > 0) & (df_target['price_change_percent'] < 0)
  82. df_drop_nodes = df_target.loc[drop_mask, ['gid', 'baggage_weight', 'hours_until_departure', 'days_to_departure', 'update_hour', 'update_week']].copy()
  83. df_drop_nodes.rename(columns={'hours_until_departure': 'drop_hours_until_departure'}, inplace=True)
  84. df_drop_nodes.rename(columns={'days_to_departure': 'drop_days_to_departure'}, inplace=True)
  85. df_drop_nodes.rename(columns={'update_hour': 'drop_update_hour'}, inplace=True)
  86. df_drop_nodes.rename(columns={'update_week': 'drop_update_week'}, inplace=True)
  87. df_drop_nodes['drop_price_change_percent'] = df_target.loc[drop_mask, 'price_change_percent'].astype(float).round(4).to_numpy()
  88. df_drop_nodes['drop_price_change_amount'] = df_target.loc[drop_mask, 'price_change_amount'].astype(float).round(2).to_numpy()
  89. df_drop_nodes['high_price_duration_hours'] = prev_dur.loc[drop_mask].astype(float).to_numpy()
  90. df_drop_nodes['high_price_change_percent'] = prev_pct.loc[drop_mask].astype(float).round(4).to_numpy()
  91. df_drop_nodes['high_price_change_amount'] = prev_amo.loc[drop_mask].astype(float).round(2).to_numpy()
  92. df_drop_nodes['high_price_amount'] = prev_price.loc[drop_mask].astype(float).round(2).to_numpy()
  93. df_drop_nodes = df_drop_nodes.reset_index(drop=True)
  94. flight_info_cols = [
  95. 'citypair', 'flight_numbers', 'from_time', 'from_date', 'currency',
  96. ]
  97. flight_info_cols = [c for c in flight_info_cols if c in df_target.columns]
  98. df_gid_info = df_target[['gid', 'baggage_weight'] + flight_info_cols].drop_duplicates(subset=['gid', 'baggage_weight']).reset_index(drop=True)
  99. df_drop_nodes = df_drop_nodes.merge(df_gid_info, on=['gid', 'baggage_weight'], how='left')
  100. drop_info_cols = [
  101. 'drop_update_hour', 'drop_update_week',
  102. 'drop_days_to_departure', 'drop_hours_until_departure', 'drop_price_change_percent', 'drop_price_change_amount',
  103. 'high_price_duration_hours', 'high_price_change_percent', 'high_price_change_amount', 'high_price_amount',
  104. ]
  105. # 按顺序排列 去掉gid
  106. df_drop_nodes = df_drop_nodes[flight_info_cols + ['baggage_weight'] + drop_info_cols]
  107. # 对于“上涨后再次上涨”的分析(连续两个正向变价段)
  108. seg_start_mask = df_target['price_duration_hours'].eq(1)
  109. rise_mask = seg_start_mask & (prev_pct > 0) & (df_target['price_change_percent'] > 0)
  110. df_rise_nodes = df_target.loc[rise_mask, ['gid', 'baggage_weight', 'hours_until_departure', 'days_to_departure', 'update_hour', 'update_week']].copy()
  111. df_rise_nodes.rename(columns={'hours_until_departure': 'rise_hours_until_departure'}, inplace=True)
  112. df_rise_nodes.rename(columns={'days_to_departure': 'rise_days_to_departure'}, inplace=True)
  113. df_rise_nodes.rename(columns={'update_hour': 'rise_update_hour'}, inplace=True)
  114. df_rise_nodes.rename(columns={'update_week': 'rise_update_week'}, inplace=True)
  115. df_rise_nodes['rise_price_change_percent'] = df_target.loc[rise_mask, 'price_change_percent'].astype(float).round(4).to_numpy()
  116. df_rise_nodes['rise_price_change_amount'] = df_target.loc[rise_mask, 'price_change_amount'].astype(float).round(2).to_numpy()
  117. df_rise_nodes['prev_rise_duration_hours'] = prev_dur.loc[rise_mask].astype(float).to_numpy()
  118. df_rise_nodes['prev_rise_change_percent'] = prev_pct.loc[rise_mask].astype(float).round(4).to_numpy()
  119. df_rise_nodes['prev_rise_change_amount'] = prev_amo.loc[rise_mask].astype(float).round(2).to_numpy()
  120. df_rise_nodes['prev_rise_amount'] = prev_price.loc[rise_mask].astype(float).round(2).to_numpy()
  121. df_rise_nodes = df_rise_nodes.reset_index(drop=True)
  122. df_rise_nodes = df_rise_nodes.merge(df_gid_info, on=['gid', 'baggage_weight'], how='left')
  123. rise_info_cols = [
  124. 'rise_update_hour', 'rise_update_week',
  125. 'rise_days_to_departure', 'rise_hours_until_departure', 'rise_price_change_percent', 'rise_price_change_amount',
  126. 'prev_rise_duration_hours', 'prev_rise_change_percent', 'prev_rise_change_amount', 'prev_rise_amount',
  127. ]
  128. df_rise_nodes = df_rise_nodes[flight_info_cols + ['baggage_weight'] + rise_info_cols]
  129. # 制作历史包络线
  130. envelope_group = ['citypair', 'flight_numbers', 'from_date', 'baggage_weight']
  131. idx_peak = df_input.groupby(envelope_group)['price_total'].idxmax()
  132. df_envelope = df_input.loc[idx_peak, envelope_group + [
  133. 'from_time', 'price_total', 'hours_until_departure', 'days_to_departure', 'update_hour', 'update_week',
  134. ]].rename(columns={
  135. 'price_total': 'peak_price',
  136. 'hours_until_departure': 'peak_hours',
  137. 'days_to_departure': 'peak_days',
  138. 'update_hour': 'peak_time',
  139. 'update_week': 'peak_week',
  140. }).reset_index(drop=True)
  141. del df_gid_info
  142. del df_target
  143. return df_input, df_drop_nodes, df_rise_nodes, df_envelope
  144. return df_input, None, None, None