|
| 1 | +package com.target.data_validator.validator |
| 2 | + |
| 3 | +import com.target.data_validator.{JsonEncoders, ValidatorError, VarSubstitution} |
| 4 | +import com.target.data_validator.JsonUtils.debugJson |
| 5 | +import com.target.data_validator.validator.ValidatorBase._ |
| 6 | +import com.typesafe.scalalogging.LazyLogging |
| 7 | +import io.circe.{DecodingFailure, HCursor, Json} |
| 8 | +import io.circe.syntax._ |
| 9 | +import org.apache.spark.sql.DataFrame |
| 10 | +import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute |
| 11 | +import org.apache.spark.sql.catalyst.expressions._ |
| 12 | +import org.apache.spark.sql.types.{IntegerType, StringType, StructType} |
| 13 | + |
| 14 | +case class StringLengthCheck( |
| 15 | + column: String, |
| 16 | + minValue: Option[Json], |
| 17 | + maxValue: Option[Json] |
| 18 | + ) extends RowBased { |
| 19 | + |
| 20 | + override def substituteVariables(dict: VarSubstitution): ValidatorBase = { |
| 21 | + |
| 22 | + val ret = StringLengthCheck( |
| 23 | + getVarSub(column, "column", dict), |
| 24 | + minValue.map(getVarSubJson(_, "minValue", dict)), |
| 25 | + maxValue.map(getVarSubJson(_, "maxValue", dict)) |
| 26 | + ) |
| 27 | + getEvents.foreach(ret.addEvent) |
| 28 | + ret |
| 29 | + } |
| 30 | + |
| 31 | + private def cmpExpr(colExpr: Expression, |
| 32 | + value: Option[Json], |
| 33 | + cmp: (Expression, Expression) => Expression |
| 34 | + ): Option[Expression] = { |
| 35 | + value.map { v => cmp(colExpr, createLiteralOrUnresolvedAttribute(IntegerType, v)) } |
| 36 | + } |
| 37 | + |
| 38 | + override def colTest(schema: StructType, dict: VarSubstitution): Expression = { |
| 39 | + |
| 40 | + val colExp = Length(UnresolvedAttribute(column)) |
| 41 | + |
| 42 | + val minValueExpression = cmpExpr(colExp, minValue, LessThan) |
| 43 | + val maxValueExpression = cmpExpr(colExp, maxValue, GreaterThan) |
| 44 | + |
| 45 | + val ret = (minValueExpression, maxValueExpression) match { |
| 46 | + case (Some(x), None) => x |
| 47 | + case (None, Some(y)) => y |
| 48 | + case (Some(x), Some(y)) => Or(x, y) |
| 49 | + case _ => throw new RuntimeException("Must define min or max value.") |
| 50 | + } |
| 51 | + logger.debug(s"Expr: $ret") |
| 52 | + ret |
| 53 | + } |
| 54 | + |
| 55 | + private def checkMinLessThanOrEqualToMax(values: List[Json]): Unit = { |
| 56 | + |
| 57 | + if (values.forall(_.isNumber)) { |
| 58 | + values.flatMap(_.asNumber) match { |
| 59 | + case mv :: xv :: Nil if mv.toDouble > xv.toDouble => |
| 60 | + addEvent(ValidatorError(s"min: ${minValue.get} must be less than or equal to max: ${maxValue.get}")) |
| 61 | + case _ => |
| 62 | + } |
| 63 | + } else if (values.forall(_.isString)) { |
| 64 | + values.flatMap(_.asString) match { |
| 65 | + case mv :: xv :: Nil if mv == xv => |
| 66 | + addEvent(ValidatorError(s"Min[String]: $mv must be less than max[String]: $xv")) |
| 67 | + case _ => |
| 68 | + } |
| 69 | + } else { |
| 70 | + // Not Strings or Numbers |
| 71 | + addEvent(ValidatorError(s"Unsupported type in ${values.map(debugJson).mkString(", ")}")) |
| 72 | + } |
| 73 | + } |
| 74 | + |
| 75 | + override def configCheck(df: DataFrame): Boolean = { |
| 76 | + |
| 77 | + // Verify if at least one of min or max is specified. |
| 78 | + val values = (minValue::maxValue::Nil).flatten |
| 79 | + if (values.isEmpty) { |
| 80 | + addEvent(ValidatorError("Must define minValue or maxValue or both.")) |
| 81 | + } |
| 82 | + |
| 83 | + // Verify that min is less than max |
| 84 | + checkMinLessThanOrEqualToMax(values) |
| 85 | + |
| 86 | + // Verify that the data type of the specified column is a String. |
| 87 | + val colType = findColumnInDataFrame(df, column) |
| 88 | + if (colType.isDefined) { |
| 89 | + val dataType = colType.get.dataType |
| 90 | + if (!(dataType.isInstanceOf[StringType])) { |
| 91 | + addEvent(ValidatorError(s"Data type of column '$column' must be String, but was found to be $dataType")) |
| 92 | + } |
| 93 | + } |
| 94 | + |
| 95 | + failed |
| 96 | + } |
| 97 | + |
| 98 | + override def toJson: Json = { |
| 99 | + import JsonEncoders.eventEncoder |
| 100 | + val fields = Seq( |
| 101 | + ("type", Json.fromString("stringLengthCheck")), |
| 102 | + ("column", Json.fromString(column)) |
| 103 | + ) ++ |
| 104 | + minValue.map(mv => ("minValue", mv)) ++ |
| 105 | + maxValue.map(mv => ("maxValue", mv)) ++ |
| 106 | + Seq( |
| 107 | + ("events", getEvents.asJson) |
| 108 | + ) |
| 109 | + Json.obj(fields: _*) |
| 110 | + } |
| 111 | +} |
| 112 | + |
| 113 | +object StringLengthCheck extends LazyLogging { |
| 114 | + def fromJson(c: HCursor): Either[DecodingFailure, ValidatorBase] = { |
| 115 | + val column = c.downField("column").as[String].right.get |
| 116 | + val minValueJ = c.downField("minValue").as[Json].right.toOption |
| 117 | + val maxValueJ = c.downField("maxValue").as[Json].right.toOption |
| 118 | + |
| 119 | + logger.debug(s"column: $column") |
| 120 | + logger.debug(s"minValue: $minValueJ type: ${minValueJ.getClass.getCanonicalName}") |
| 121 | + logger.debug(s"maxValue: $maxValueJ type: ${maxValueJ.getClass.getCanonicalName}") |
| 122 | + |
| 123 | + c.focus.foreach {f => logger.info(s"StringLengthCheckJson: ${f.spaces2}")} |
| 124 | + scala.util.Right(StringLengthCheck(column, minValueJ, maxValueJ)) |
| 125 | + } |
| 126 | +} |
0 commit comments