在pyspark中获取两个日期之间的开始和结束



我一直在尝试从2个给定日期获取月范围,但它没有按预期工作。

  • start_date (dd-mm-yyyy) = 12-01-2022
  • end_date (dd-mm-yyyy) = 03-06-2022预期输出:

<表类> Valid_From Valid_To tbody><<tr>2022-01-122022-01-312022-02-012022-02-282022-03-012022-03-312022-04-012022-04-302022-05-012022-05-312022-06-012022-06-03

尝试以下内容:

import findspark
from pyspark.sql import SparkSession, Window
from pyspark.sql import functions as F
findspark.init()
spark = SparkSession.builder.appName("local").getOrCreate()
columns = ["start_date", "end_date"]
data = [("12-01-2022", "03-06-2022")]
df = spark.createDataFrame(data).toDF(*columns)
df = (
df.withColumn(
"start_date", F.to_date(F.col("start_date"), "dd-MM-yyyy").cast("DATE")
)
.withColumn(
"end_date", F.to_date(F.col("end_date"), "dd-MM-yyyy").cast("DATE")
)
.withColumn(
"months_between",
F.round(
F.months_between(F.col("end_date"), F.col("start_date"), True)
).cast("Integer"),
)
.withColumn(
"months_between_seq", F.sequence(F.lit(1), F.col("months_between"))
)
.withColumn("months_between_seq", F.explode(F.col("months_between_seq")))
.withColumn(
"end_of_month",
F.expr(
"""
LAST_DAY(ADD_MONTHS(start_date, months_between_seq - 1))
"""
),
)
.withColumn(
"begin_of_month",
F.expr(
"""
LAST_DAY(ADD_MONTHS(start_date, months_between_seq - 1)) + 1
"""
),
)
)
start_window_agg = Window.partitionBy().orderBy("Valid_From")
start_union_sdf = (
df.select(
F.col("start_date").alias("Valid_From")
)
.unionByName(
df.select(
F.col("begin_of_month").alias("Valid_From")
)
)
.drop_duplicates()
.withColumn(
"row_number",
F.row_number().over(start_window_agg)
)
)
end_window_agg = Window.partitionBy().orderBy("Valid_To")
end_union_sdf = (
df.select(
F.col("end_date").alias("Valid_To")
)
.unionByName(
df.select(
F.col("end_of_month").alias("Valid_To")
)
)
.drop_duplicates()
.withColumn(
"row_number",
F.row_number().over(end_window_agg)
)
)
join_sdf = (
end_union_sdf
.join(
start_union_sdf,
how="inner",
on=["row_number"]
)
.drop("row_number")
.withColumn("Valid_To", F.col("Valid_To").cast("DATE"))
.withColumn("Valid_From", F.col("Valid_From").cast("DATE"))
.select("Valid_From", "Valid_To")
.orderBy("Valid_From")
)
join_sdf.show()

它返回:

+----------+----------+
|Valid_From|  Valid_To|
+----------+----------+
|2022-01-12|2022-01-31|
|2022-02-01|2022-02-28|
|2022-03-01|2022-03-31|
|2022-04-01|2022-04-30|
|2022-05-01|2022-05-31|
|2022-06-01|2022-06-03|
+----------+----------+

相关内容

  • 没有找到相关文章

最新更新