I have a function that is in main_file.py
def transform_table(table_name, start, end):
return sparkSession.table(table_name).filter(column.isBetween(start,end))
I wonder if it's possible to mock the sparkSession.table function call. This is what I have so far...I get Column not iterable error
@mock.patch("main_file.pyspark.sql.SparkSession")
@pytest.mark.usefixtures("spark")
def test(self,mocked,spark):
injected_df = spark.createDataFrame([(1, 100, "ketchup"), (2, 200, "mayo")], ["id", "qty", "condiment"])
expected_df = spark.createDataFrame([(1, 100, "ketchup"),["id", "qty", "condiment"])
mocked.table.return_value = injected_df
obj = Class(spark)
obj.transform_table("useless table name", 100, 150)
assert obj == expected_df