# Find Count of Null, None, NaN of All DataFrame Columns
from pyspark.sql.functions import col,isnan, when, count
df.select([count(when(isnan(c) | col(c).isNull(), c)).alias(c) for c in df.columns]
).show()
+----+-----+------+
|name|state|number|
+----+-----+------+
| 0| 1| 3|
+----+-----+------+