trying to drop a nested column from a dataframe in pyspark doesn't work.
This is the sinppet from my code:
`from pyspark.sql import functions as F from pyspark.sql.types import IntegerType , BooleanType from pyspark.sql.functions import udf
#simple filter function
@F.udf(returnType=BooleanType())
def my_filter(x):
if (x != df_new.a.b) :
return True
else :
return False
#df.filter(my_filter('id', 'category')).show()
def drop_col(df, struct_nm, delete_struct_child_col_nm):
#fields_to_keep = filter(lambda x: x != delete_struct_child_col_nm, df.select("{}.*".format(struct_nm)).columns)
fields_to_keep = filter(lambda x: my_filter(x) , df.select("{}.*".format(struct_nm)).columns)
fields_to_keep = list(map(lambda x: "{}.{}".format(struct_nm, x), fields_to_keep))
return df.withColumn(struct_nm, struct(fields_to_keep))
drop_col(df_new, "a", df_new.a.b)`
I used UDF because trying the following line didn't work. As this not != symbol doesn't work nor does using tilde
#fields_to_keep = filter(lambda x: x != delete_struct_child_col_nm, df.select("{}.*".format
EDIT: Someone asked for schema in the comment so i am providing it
root
|-- a: struct (nullable = false)
| |-- rawEntity: string (nullable = true)
| |-- entityType: string (nullable = true)
| | |-- element: struct (containsNull = true)
| | | |-- StoreId: string (nullable = true)
| | | |-- AppId: string (nullable = true)
| |-- Timestamps: array (nullable = true)
| | |-- element: long (containsNull = true)
| |-- User: string (nullable = true)
| |-- b: array (nullable = true) //trying to drop this
| | |-- element: string (containsNull = true)
| |-- KeywordsFull: array (nullable = true)
| | |-- element: struct (containsNull = true)
| | | |-- Keywords: string (nullable = true)
| | |-- element: string (containsNull = true)