- read from kafka this worked
raw_kafka_test = (spark.readStream
.etc
)
@dlt.table(
table_properties={"pipelines.reset.allowed":"false"}
)
def raw_kafka():
return raw_kafka_test
- read from delta live table not worked
@dlt.table(
comment="real schema for Kakfa payload",
temporary=True
)
def data_kafka():
return (
dlt.read_stream("raw_kafka")
)
what should i do with read from delta live table