1 /***********************************************************************
2  * Copyright (c) 2013-2025 General Atomics Integrated Intelligence, Inc.
3  * All rights reserved. This program and the accompanying materials
4  * are made available under the terms of the Apache License, Version 2.0
5  * which accompanies this distribution and is available at
6  * https://www.apache.org/licenses/LICENSE-2.0
7  ***********************************************************************/
8 
9 package org.locationtech.geomesa.spark.sql
10 
11 import com.typesafe.scalalogging.LazyLogging
12 import org.apache.spark.sql.catalyst.expressions._
13 import org.apache.spark.sql.catalyst.plans.logical._
14 import org.apache.spark.sql.catalyst.rules.Rule
15 import org.apache.spark.sql.execution.datasources.LogicalRelation
16 import org.apache.spark.sql.execution.{ProjectExec, SparkPlan}
17 import org.apache.spark.sql.sedona_sql.UDT.{GeometryUDT => Sedona_GeometryUDT}
18 import org.apache.spark.sql.sedona_sql.expressions.{ST_Predicate => Sedona_ST_Predicate}
19 import org.apache.spark.sql.types.DataTypes
20 import org.apache.spark.sql.{SQLContext, Strategy}
21 import org.geotools.api.filter.expression.{Expression => GTExpression, Literal => GTLiteral}
22 import org.geotools.api.filter.{FilterFactory, Filter => GTFilter}
23 import org.geotools.factory.CommonFactoryFinder
24 import org.locationtech.geomesa.filter.FilterHelper
25 import org.locationtech.geomesa.spark.haveSedona
26 import org.locationtech.geomesa.spark.jts.rules.GeometryLiteral
27 import org.locationtech.geomesa.spark.jts.udf.SpatialRelationFunctions._
28 import org.locationtech.geomesa.spark.sql.GeoMesaRelation.PartitionedIndexedRDD
29 import org.locationtech.jts.geom.{Envelope, Geometry}
30 
31 import java.time.{LocalDateTime, ZoneId, ZoneOffset}
32 import java.util.Date
33 
34 object SQLRules extends LazyLogging {
35   @transient
36   private val ff: FilterFactory = CommonFactoryFinder.getFilterFactory
37 
38   def scalaUDFtoGTFilter(udf: Expression): Option[GTFilter] = {
39     udf match {
40       case u: ScalaUDF if u.children.length == 2 => buildGTFilter(u.function, u.children.head, u.children.last)
41       case _ => None
42     }
43   }
44 
45   private def buildGTFilter(func: AnyRef, exprA: Expression, exprB: Expression): Option[GTFilter] =
46     for {
47       builder <- funcToFF(func)
48       gtExprA <- sparkExprToGTExpr(exprA)
49       gtExprB <- sparkExprToGTExpr(exprB)
50     } yield {
51       builder(gtExprA, gtExprB)
52     }
53 
54   def funcToFF(func: AnyRef): Option[(GTExpression, GTExpression) => GTFilter] = {
55     func match {
56       case ST_Contains => Some((expr1: GTExpression, expr2: GTExpression) =>
57         ff.contains(expr1, expr2))
58       case ST_Crosses => Some((expr1: GTExpression, expr2: GTExpression) =>
59         ff.crosses(expr1, expr2))
60       case ST_Disjoint => Some((expr1: GTExpression, expr2: GTExpression) =>
61         ff.disjoint(expr1, expr2))
62       case ST_Equals => Some((expr1: GTExpression, expr2: GTExpression) =>
63         ff.equal(expr1, expr2))
64       case ST_Intersects => Some((expr1: GTExpression, expr2: GTExpression) =>
65         ff.intersects(expr1, expr2))
66       case ST_Overlaps => Some((expr1: GTExpression, expr2: GTExpression) =>
67         ff.overlaps(expr1, expr2))
68       case ST_Touches => Some((expr1: GTExpression, expr2: GTExpression) =>
69         ff.touches(expr1, expr2))
70       case ST_Within => Some((expr1: GTExpression, expr2: GTExpression) =>
71         ff.within(expr1, expr2))
72       case _ => None
73     }
74   }
75 
76   def sedonaExprToGTFilter(pred: Sedona_ST_Predicate): Option[GTFilter] = {
77     sparkExprToGTExpr(pred.children.head).flatMap { expr1 =>
78       sparkExprToGTExpr(pred.children.last).flatMap { expr2 =>
79         // sedona classes are private, so we have to match on the class name instead of the class itself
80         pred.getClass.getSimpleName match {
81           case "ST_Contains"   => Some(ff.contains(expr1, expr2))
82           case "ST_Crosses"    => Some(ff.crosses(expr1, expr2))
83           case "ST_Overlaps"   => Some(ff.overlaps(expr1, expr2))
84           case "ST_Intersects" => Some(ff.intersects(expr1, expr2))
85           case "ST_Within"     => Some(ff.within(expr1, expr2))
86           case "ST_Touches"    => Some(ff.touches(expr1, expr2))
87           case "ST_Equals"     => Some(ff.equal(expr1, expr2))
88           case "ST_Disjoint"   => Some(ff.disjoint(expr1, expr2))
89           case _ => None
90         }
91       }
92     }
93   }
94 
95   def sparkFilterToGTFilter(expr: Expression): Option[GTFilter] = {
96     expr match {
97       case udf: ScalaUDF => scalaUDFtoGTFilter(udf)
98       case binaryComp@BinaryComparison(left, right) =>
99         val leftExpr = sparkExprToGTExpr(left)
100         val rightExpr = sparkExprToGTExpr(right)
101         if (leftExpr.isEmpty || rightExpr.isEmpty) {
102           None
103         } else {
104           binaryComp match {
105             case _: EqualTo            => Some(ff.equals(leftExpr.get, rightExpr.get))
106             case _: LessThan           => Some(ff.less(leftExpr.get, rightExpr.get))
107             case _: LessThanOrEqual    => Some(ff.lessOrEqual(leftExpr.get, rightExpr.get))
108             case _: GreaterThan        => Some(ff.greater(leftExpr.get, rightExpr.get))
109             case _: GreaterThanOrEqual => Some(ff.greaterOrEqual(leftExpr.get, rightExpr.get))
110             case _ => None
111           }
112         }
113       case unary: UnaryExpression =>
114         val sparkExpr = unary.child
115         val gtExpr = sparkExprToGTExpr(sparkExpr)
116         if (gtExpr.isEmpty)
117           None
118         else {
119           unary match {
120             case _: IsNotNull => Some(ff.not(ff.isNull(gtExpr.get)))
121             case _: IsNull => Some(ff.isNull(gtExpr.get))
122             case _ => None
123           }
124         }
125       case _ =>
126         if (haveSedona && expr.isInstanceOf[Sedona_ST_Predicate]) {
127           sedonaExprToGTFilter(expr.asInstanceOf[Sedona_ST_Predicate])
128         } else {
129           logger.debug(s"Got expr: $expr.  Don't know how to turn this into a GeoTools Expression.")
130           None
131         }
132     }
133   }
134 
135   def sparkExprToGTExpr(expression: Expression): Option[GTExpression] = expression match {
136     case g: GeometryLiteral =>
137       Some(ff.literal(g.geom))
138 
139     case a: AttributeReference if a.name != "__fid__" =>
140       Some(ff.property(a.name))
141 
142     case c: Cast =>
143       lazy val zone = c.timeZoneId.map(ZoneId.of).orNull
144       sparkExprToGTExpr(c.child).map {
145         case lit: GTLiteral if lit.getValue.isInstanceOf[Date] && zone != null =>
146           val date = LocalDateTime.ofInstant(lit.getValue.asInstanceOf[Date].toInstant, zone)
147           ff.literal(new Date(date.atZone(ZoneOffset.UTC).toInstant.toEpochMilli))
148         case e => e
149       }
150 
151     case lit: Literal if lit.dataType == DataTypes.StringType =>
152       // the actual class is org.apache.spark.unsafe.types.UTF8String, we need to make it
153       // a normal string so that geotools can handle it
154       Some(ff.literal(Option(lit.value).map(_.toString).orNull))
155 
156     case lit: Literal if lit.dataType == DataTypes.TimestampType =>
157       // timestamps are defined as microseconds
158       Some(ff.literal(new Date(lit.value.asInstanceOf[Long] / 1000)))
159 
160     case lit: Literal if haveSedona && lit.dataType.isInstanceOf[Sedona_GeometryUDT] =>
161       Some(ff.literal(lit.dataType.asInstanceOf[Sedona_GeometryUDT].deserialize(lit.value)))
162 
163     case lit: Literal =>
164       Some(ff.literal(lit.value))
165 
166     case _ =>
167       logger.debug(s"Can't turn expression into geotools: $expression")
168       None
169   }
170 
171   // new optimizations rules
172   object SpatialOptimizationsRule extends Rule[LogicalPlan] with PredicateHelper {
173 
174     def extractGeometry(e: org.apache.spark.sql.catalyst.expressions.Expression): Option[Geometry] = e match {
175       case GeometryLiteral(_, geom) => Some(geom)
176       case And(l, r) => extractGeometry(l).orElse(extractGeometry(r))
177       case u: ScalaUDF => u.children.collectFirst { case GeometryLiteral(_, geom) => geom }
178       case _ => None
179     }
180 
181     private def extractGridId(envelopes: List[Envelope],
182                               e: org.apache.spark.sql.catalyst.expressions.Expression): Option[List[Int]] =
183       extractGeometry(e).map(RelationUtils.gridIdMapper(_, envelopes))
184 
185     // Replace the relation in a join with a GeoMesaJoin Relation
186     private def alterJoin(join: Join): LogicalPlan = {
187       val isSpatialUDF = join.condition.exists {
188         case u: ScalaUDF if u.function.isInstanceOf[(Geometry, Geometry) => java.lang.Boolean] =>
189           u.children.head.isInstanceOf[AttributeReference] && u.children(1).isInstanceOf[AttributeReference]
190         case _ => false
191       }
192 
193       (join.left, join.right) match {
194         case (left: LogicalRelation, right: LogicalRelation) if isSpatialUDF =>
195           (left.relation, right.relation) match {
196             case (leftRel: GeoMesaRelation, rightRel: GeoMesaRelation) =>
197               leftRel.join(rightRel, join.condition.get) match {
198                 case None => join
199                 case Some(joinRelation) =>
200                   val newLogicalRelLeft = SparkVersions.copy(left)(output = left.output ++ right.output, relation = joinRelation)
201                   SparkVersions.copy(join)(left = newLogicalRelLeft)
202               }
203 
204             case _ => join
205           }
206 
207         case (leftProject @ Project(leftProjectList, left: LogicalRelation),
208             Project(rightProjectList, right: LogicalRelation)) if isSpatialUDF =>
209           (left.relation, right.relation) match {
210             case (leftRel: GeoMesaRelation, rightRel: GeoMesaRelation) =>
211               leftRel.join(rightRel, join.condition.get) match {
212                 case None => join
213                 case Some(joinRelation) =>
214                   val newLogicalRelLeft = SparkVersions.copy(left)(output = left.output ++ right.output, relation = joinRelation)
215                   val newProjectLeft = leftProject.copy(projectList = leftProjectList ++ rightProjectList, child = newLogicalRelLeft)
216                   SparkVersions.copy(join)(left = newProjectLeft)
217               }
218 
219             case _ => join
220           }
221 
222         case _ => join
223       }
224     }
225 
226     override def apply(plan: LogicalPlan): LogicalPlan = {
227       logger.debug(s"Optimizer sees $plan")
228 
229       // NOTE: The number of arguments in Aggregate constructor is 3 on community spark and 4 on DataBricks
230       // so we cannot use pattern matching to unapply the constructor also
231       // need to use reflection to safely create new Aggregate instance
232       val optimizeAggregate: PartialFunction[LogicalPlan, LogicalPlan] =
233         Function.unlift { plan: LogicalPlan =>
234           plan match {
235             case agg: Aggregate =>
236               agg.child match {
237                 case Project(projectList, join: Join) =>
238                   val alteredJoin = SpatialOptimizationsRule.alterJoin(join)
239                   Some(Aggregates.instance(agg.groupingExpressions, agg.aggregateExpressions, Project(projectList, alteredJoin), None))
240                 case join: Join =>
241                   val alteredJoin = SpatialOptimizationsRule.alterJoin(join)
242                   Some(Aggregates.instance(agg.groupingExpressions, agg.aggregateExpressions, alteredJoin, None))
243                 case _ => None
244               }
245             case _ => None
246           }
247         }
248 
249       val optimizeRest: PartialFunction[LogicalPlan, LogicalPlan] = {
250         case join: Join =>
251           alterJoin(join)
252         case sort @ Sort(_, _, _) => sort    // No-op.  Just realizing what we can do:)
253         case filt @ Filter(f, lr: LogicalRelation) if lr.relation.isInstanceOf[GeoMesaRelation] =>
254           // TODO: deal with `or`
255 
256           val gmRel = lr.relation.asInstanceOf[GeoMesaRelation]
257           // split up conjunctive predicates and extract the st_contains variable
258           val sparkFilters: Seq[Expression] =  splitConjunctivePredicates(f)
259 
260           val (gtFilters: Seq[GTFilter], sFilters: Seq[Expression]) = sparkFilters.foldLeft((Seq[GTFilter](), Seq[Expression]())) {
261             case ((gts: Seq[GTFilter], sfilters), expression: Expression) =>
262               sparkFilterToGTFilter(expression) match {
263                 case Some(gtf) => (gts.+:(gtf), sfilters)
264                 case None      => (gts,         sfilters.+:(expression))
265               }
266           }
267 
268           if (gtFilters.nonEmpty) {
269             // if we have a partitioned cache, exclude partitions that don't match the query filter
270             val partitioned = gmRel.cached.map {
271               case c: PartitionedIndexedRDD =>
272                 val hints = sparkFilters.flatMap(extractGridId(c.envelopes, _)).flatten
273                 if (hints.isEmpty) { c } else {
274                   c.copy(rdd = c.rdd.filter { case (key, _) => hints.contains(key) })
275                 }
276 
277               case c => c
278             }
279             val filt = FilterHelper.filterListAsAnd(gmRel.filter.toSeq ++ gtFilters)
280             val relation = gmRel.copy(filter = filt, cached = partitioned)
281             val newrel = SparkVersions.copy(lr)(output = lr.output, relation = relation)
282             if (sFilters.nonEmpty) {
283               // Keep filters that couldn't be transformed at the top level
284               Filter(sFilters.reduce(And), newrel)
285             } else {
286               // if all filters could be transformed to GeoTools filters, just return the new relation
287               newrel
288             }
289           } else {
290             filt
291           }
292       }
293       plan.transform(optimizeAggregate orElse optimizeRest)
294     }
295 
296   }
297 
298   // A catch for when we are able to precompute the join using the sweepline algorithm.
299   // Skips doing a full cartesian product with catalyst.
300   object SpatialJoinStrategy extends Strategy {
301 
302     import org.apache.spark.sql.catalyst.plans.logical._
303 
304     def alterJoin(logicalPlan: Join): Seq[SparkPlan] = {
305       logicalPlan.left match {
306         case Project(projectList, lr: LogicalRelation) if lr.relation.isInstanceOf[GeoMesaJoinRelation] =>
307            ProjectExec(projectList, planLater(lr)) :: Nil
308 
309         case lr: LogicalRelation if lr.relation.isInstanceOf[GeoMesaJoinRelation] => planLater(lr) :: Nil
310 
311         case _ => Nil
312       }
313     }
314 
315     override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
316       //TODO: handle other kinds of joins
317       case Project(_, logicalPlan: Join) => alterJoin(logicalPlan)
318       case join: Join => alterJoin(join)
319       case _ => Nil
320     }
321   }
322 
323   def registerOptimizations(sqlContext: SQLContext): Unit = {
324 
325     Seq(SpatialOptimizationsRule).foreach { r =>
326       if(!sqlContext.experimental.extraOptimizations.contains(r))
327         sqlContext.experimental.extraOptimizations ++= Seq(r)
328     }
329 
330     Seq(SpatialJoinStrategy).foreach { s =>
331       if(!sqlContext.experimental.extraStrategies.contains(s))
332         sqlContext.experimental.extraStrategies ++= Seq(s)
333     }
334   }
335 }
Line Stmt Id Pos Tree Symbol Tests Code
36 904 1843 - 1879 Apply org.geotools.factory.CommonFactoryFinder.getFilterFactory org.geotools.factory.CommonFactoryFinder.getFilterFactory()
40 905 1987 - 2009 Apply scala.Int.== u.children.length.==(2)
40 906 2027 - 2037 Select org.apache.spark.sql.catalyst.expressions.ScalaUDF.function u.function
40 907 2039 - 2054 Select scala.collection.IterableLike.head u.children.head
40 908 2056 - 2071 Select scala.collection.TraversableLike.last u.children.last
40 909 2013 - 2072 Apply org.locationtech.geomesa.spark.sql.SQLRules.buildGTFilter SQLRules.this.buildGTFilter(u.function, u.children.head, u.children.last)
40 910 2013 - 2072 Block org.locationtech.geomesa.spark.sql.SQLRules.buildGTFilter SQLRules.this.buildGTFilter(u.function, u.children.head, u.children.last)
41 911 2089 - 2093 Select scala.None scala.None
41 912 2089 - 2093 Block scala.None scala.None
47 916 2209 - 2376 Apply scala.Option.flatMap SQLRules.this.funcToFF(func).flatMap[org.geotools.api.filter.Filter](((builder: (org.geotools.api.filter.expression.Expression, org.geotools.api.filter.expression.Expression) => org.geotools.api.filter.Filter) => SQLRules.this.sparkExprToGTExpr(exprA).flatMap[org.geotools.api.filter.Filter](((gtExprA: org.geotools.api.filter.expression.Expression) => SQLRules.this.sparkExprToGTExpr(exprB).map[org.geotools.api.filter.Filter](((gtExprB: org.geotools.api.filter.expression.Expression) => builder.apply(gtExprA, gtExprB)))))))
48 915 2253 - 2376 Apply scala.Option.flatMap SQLRules.this.sparkExprToGTExpr(exprA).flatMap[org.geotools.api.filter.Filter](((gtExprA: org.geotools.api.filter.expression.Expression) => SQLRules.this.sparkExprToGTExpr(exprB).map[org.geotools.api.filter.Filter](((gtExprB: org.geotools.api.filter.expression.Expression) => builder.apply(gtExprA, gtExprB)))))
49 914 2295 - 2376 Apply scala.Option.map SQLRules.this.sparkExprToGTExpr(exprB).map[org.geotools.api.filter.Filter](((gtExprB: org.geotools.api.filter.expression.Expression) => builder.apply(gtExprA, gtExprB)))
51 913 2351 - 2376 Apply scala.Function2.apply builder.apply(gtExprA, gtExprB)
56 918 2510 - 2595 Apply scala.Some.apply scala.Some.apply[(org.geotools.api.filter.expression.Expression, org.geotools.api.filter.expression.Expression) => org.geotools.api.filter.spatial.Contains](((expr1: org.geotools.api.filter.expression.Expression, expr2: org.geotools.api.filter.expression.Expression) => SQLRules.this.ff.contains(expr1, expr2)))
56 919 2510 - 2595 Block scala.Some.apply scala.Some.apply[(org.geotools.api.filter.expression.Expression, org.geotools.api.filter.expression.Expression) => org.geotools.api.filter.spatial.Contains](((expr1: org.geotools.api.filter.expression.Expression, expr2: org.geotools.api.filter.expression.Expression) => SQLRules.this.ff.contains(expr1, expr2)))
57 917 2569 - 2594 Apply org.geotools.api.filter.FilterFactory.contains SQLRules.this.ff.contains(expr1, expr2)
58 921 2621 - 2705 Apply scala.Some.apply scala.Some.apply[(org.geotools.api.filter.expression.Expression, org.geotools.api.filter.expression.Expression) => org.geotools.api.filter.spatial.Crosses](((expr1: org.geotools.api.filter.expression.Expression, expr2: org.geotools.api.filter.expression.Expression) => SQLRules.this.ff.crosses(expr1, expr2)))
58 922 2621 - 2705 Block scala.Some.apply scala.Some.apply[(org.geotools.api.filter.expression.Expression, org.geotools.api.filter.expression.Expression) => org.geotools.api.filter.spatial.Crosses](((expr1: org.geotools.api.filter.expression.Expression, expr2: org.geotools.api.filter.expression.Expression) => SQLRules.this.ff.crosses(expr1, expr2)))
59 920 2680 - 2704 Apply org.geotools.api.filter.FilterFactory.crosses SQLRules.this.ff.crosses(expr1, expr2)
60 924 2732 - 2817 Apply scala.Some.apply scala.Some.apply[(org.geotools.api.filter.expression.Expression, org.geotools.api.filter.expression.Expression) => org.geotools.api.filter.spatial.Disjoint](((expr1: org.geotools.api.filter.expression.Expression, expr2: org.geotools.api.filter.expression.Expression) => SQLRules.this.ff.disjoint(expr1, expr2)))
60 925 2732 - 2817 Block scala.Some.apply scala.Some.apply[(org.geotools.api.filter.expression.Expression, org.geotools.api.filter.expression.Expression) => org.geotools.api.filter.spatial.Disjoint](((expr1: org.geotools.api.filter.expression.Expression, expr2: org.geotools.api.filter.expression.Expression) => SQLRules.this.ff.disjoint(expr1, expr2)))
61 923 2791 - 2816 Apply org.geotools.api.filter.FilterFactory.disjoint SQLRules.this.ff.disjoint(expr1, expr2)
62 927 2842 - 2924 Apply scala.Some.apply scala.Some.apply[(org.geotools.api.filter.expression.Expression, org.geotools.api.filter.expression.Expression) => org.geotools.api.filter.spatial.Equals](((expr1: org.geotools.api.filter.expression.Expression, expr2: org.geotools.api.filter.expression.Expression) => SQLRules.this.ff.equal(expr1, expr2)))
62 928 2842 - 2924 Block scala.Some.apply scala.Some.apply[(org.geotools.api.filter.expression.Expression, org.geotools.api.filter.expression.Expression) => org.geotools.api.filter.spatial.Equals](((expr1: org.geotools.api.filter.expression.Expression, expr2: org.geotools.api.filter.expression.Expression) => SQLRules.this.ff.equal(expr1, expr2)))
63 926 2901 - 2923 Apply org.geotools.api.filter.FilterFactory.equal SQLRules.this.ff.equal(expr1, expr2)
64 930 2953 - 3040 Apply scala.Some.apply scala.Some.apply[(org.geotools.api.filter.expression.Expression, org.geotools.api.filter.expression.Expression) => org.geotools.api.filter.spatial.Intersects](((expr1: org.geotools.api.filter.expression.Expression, expr2: org.geotools.api.filter.expression.Expression) => SQLRules.this.ff.intersects(expr1, expr2)))
64 931 2953 - 3040 Block scala.Some.apply scala.Some.apply[(org.geotools.api.filter.expression.Expression, org.geotools.api.filter.expression.Expression) => org.geotools.api.filter.spatial.Intersects](((expr1: org.geotools.api.filter.expression.Expression, expr2: org.geotools.api.filter.expression.Expression) => SQLRules.this.ff.intersects(expr1, expr2)))
65 929 3012 - 3039 Apply org.geotools.api.filter.FilterFactory.intersects SQLRules.this.ff.intersects(expr1, expr2)
66 933 3067 - 3152 Apply scala.Some.apply scala.Some.apply[(org.geotools.api.filter.expression.Expression, org.geotools.api.filter.expression.Expression) => org.geotools.api.filter.spatial.Overlaps](((expr1: org.geotools.api.filter.expression.Expression, expr2: org.geotools.api.filter.expression.Expression) => SQLRules.this.ff.overlaps(expr1, expr2)))
66 934 3067 - 3152 Block scala.Some.apply scala.Some.apply[(org.geotools.api.filter.expression.Expression, org.geotools.api.filter.expression.Expression) => org.geotools.api.filter.spatial.Overlaps](((expr1: org.geotools.api.filter.expression.Expression, expr2: org.geotools.api.filter.expression.Expression) => SQLRules.this.ff.overlaps(expr1, expr2)))
67 932 3126 - 3151 Apply org.geotools.api.filter.FilterFactory.overlaps SQLRules.this.ff.overlaps(expr1, expr2)
68 936 3178 - 3262 Apply scala.Some.apply scala.Some.apply[(org.geotools.api.filter.expression.Expression, org.geotools.api.filter.expression.Expression) => org.geotools.api.filter.spatial.Touches](((expr1: org.geotools.api.filter.expression.Expression, expr2: org.geotools.api.filter.expression.Expression) => SQLRules.this.ff.touches(expr1, expr2)))
68 937 3178 - 3262 Block scala.Some.apply scala.Some.apply[(org.geotools.api.filter.expression.Expression, org.geotools.api.filter.expression.Expression) => org.geotools.api.filter.spatial.Touches](((expr1: org.geotools.api.filter.expression.Expression, expr2: org.geotools.api.filter.expression.Expression) => SQLRules.this.ff.touches(expr1, expr2)))
69 935 3237 - 3261 Apply org.geotools.api.filter.FilterFactory.touches SQLRules.this.ff.touches(expr1, expr2)
70 939 3287 - 3370 Apply scala.Some.apply scala.Some.apply[(org.geotools.api.filter.expression.Expression, org.geotools.api.filter.expression.Expression) => org.geotools.api.filter.spatial.Within](((expr1: org.geotools.api.filter.expression.Expression, expr2: org.geotools.api.filter.expression.Expression) => SQLRules.this.ff.within(expr1, expr2)))
70 940 3287 - 3370 Block scala.Some.apply scala.Some.apply[(org.geotools.api.filter.expression.Expression, org.geotools.api.filter.expression.Expression) => org.geotools.api.filter.spatial.Within](((expr1: org.geotools.api.filter.expression.Expression, expr2: org.geotools.api.filter.expression.Expression) => SQLRules.this.ff.within(expr1, expr2)))
71 938 3346 - 3369 Apply org.geotools.api.filter.FilterFactory.within SQLRules.this.ff.within(expr1, expr2)
72 941 3387 - 3391 Select scala.None scala.None
72 942 3387 - 3391 Block scala.None scala.None
77 943 3501 - 3519 Select scala.collection.IterableLike.head pred.children.head
77 972 3483 - 4323 Apply scala.Option.flatMap SQLRules.this.sparkExprToGTExpr(pred.children.head).flatMap[org.geotools.api.filter.Filter](((expr1: org.geotools.api.filter.expression.Expression) => SQLRules.this.sparkExprToGTExpr(pred.children.last).flatMap[org.geotools.api.filter.Filter](((expr2: org.geotools.api.filter.expression.Expression) => pred.getClass().getSimpleName() match { case "ST_Contains" => scala.Some.apply[org.geotools.api.filter.spatial.Contains](SQLRules.this.ff.contains(expr1, expr2)) case "ST_Crosses" => scala.Some.apply[org.geotools.api.filter.spatial.Crosses](SQLRules.this.ff.crosses(expr1, expr2)) case "ST_Overlaps" => scala.Some.apply[org.geotools.api.filter.spatial.Overlaps](SQLRules.this.ff.overlaps(expr1, expr2)) case "ST_Intersects" => scala.Some.apply[org.geotools.api.filter.spatial.Intersects](SQLRules.this.ff.intersects(expr1, expr2)) case "ST_Within" => scala.Some.apply[org.geotools.api.filter.spatial.Within](SQLRules.this.ff.within(expr1, expr2)) case "ST_Touches" => scala.Some.apply[org.geotools.api.filter.spatial.Touches](SQLRules.this.ff.touches(expr1, expr2)) case "ST_Equals" => scala.Some.apply[org.geotools.api.filter.spatial.Equals](SQLRules.this.ff.equal(expr1, expr2)) case "ST_Disjoint" => scala.Some.apply[org.geotools.api.filter.spatial.Disjoint](SQLRules.this.ff.disjoint(expr1, expr2)) case _ => scala.None }))))
78 944 3564 - 3582 Select scala.collection.TraversableLike.last pred.children.last
78 971 3546 - 4317 Apply scala.Option.flatMap SQLRules.this.sparkExprToGTExpr(pred.children.last).flatMap[org.geotools.api.filter.Filter](((expr2: org.geotools.api.filter.expression.Expression) => pred.getClass().getSimpleName() match { case "ST_Contains" => scala.Some.apply[org.geotools.api.filter.spatial.Contains](SQLRules.this.ff.contains(expr1, expr2)) case "ST_Crosses" => scala.Some.apply[org.geotools.api.filter.spatial.Crosses](SQLRules.this.ff.crosses(expr1, expr2)) case "ST_Overlaps" => scala.Some.apply[org.geotools.api.filter.spatial.Overlaps](SQLRules.this.ff.overlaps(expr1, expr2)) case "ST_Intersects" => scala.Some.apply[org.geotools.api.filter.spatial.Intersects](SQLRules.this.ff.intersects(expr1, expr2)) case "ST_Within" => scala.Some.apply[org.geotools.api.filter.spatial.Within](SQLRules.this.ff.within(expr1, expr2)) case "ST_Touches" => scala.Some.apply[org.geotools.api.filter.spatial.Touches](SQLRules.this.ff.touches(expr1, expr2)) case "ST_Equals" => scala.Some.apply[org.geotools.api.filter.spatial.Equals](SQLRules.this.ff.equal(expr1, expr2)) case "ST_Disjoint" => scala.Some.apply[org.geotools.api.filter.spatial.Disjoint](SQLRules.this.ff.disjoint(expr1, expr2)) case _ => scala.None }))
81 945 3791 - 3816 Apply org.geotools.api.filter.FilterFactory.contains SQLRules.this.ff.contains(expr1, expr2)
81 946 3786 - 3817 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.spatial.Contains](SQLRules.this.ff.contains(expr1, expr2))
81 947 3786 - 3817 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.spatial.Contains](SQLRules.this.ff.contains(expr1, expr2))
82 948 3857 - 3881 Apply org.geotools.api.filter.FilterFactory.crosses SQLRules.this.ff.crosses(expr1, expr2)
82 949 3852 - 3882 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.spatial.Crosses](SQLRules.this.ff.crosses(expr1, expr2))
82 950 3852 - 3882 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.spatial.Crosses](SQLRules.this.ff.crosses(expr1, expr2))
83 951 3922 - 3947 Apply org.geotools.api.filter.FilterFactory.overlaps SQLRules.this.ff.overlaps(expr1, expr2)
83 952 3917 - 3948 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.spatial.Overlaps](SQLRules.this.ff.overlaps(expr1, expr2))
83 953 3917 - 3948 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.spatial.Overlaps](SQLRules.this.ff.overlaps(expr1, expr2))
84 954 3988 - 4015 Apply org.geotools.api.filter.FilterFactory.intersects SQLRules.this.ff.intersects(expr1, expr2)
84 955 3983 - 4016 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.spatial.Intersects](SQLRules.this.ff.intersects(expr1, expr2))
84 956 3983 - 4016 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.spatial.Intersects](SQLRules.this.ff.intersects(expr1, expr2))
85 957 4056 - 4079 Apply org.geotools.api.filter.FilterFactory.within SQLRules.this.ff.within(expr1, expr2)
85 958 4051 - 4080 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.spatial.Within](SQLRules.this.ff.within(expr1, expr2))
85 959 4051 - 4080 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.spatial.Within](SQLRules.this.ff.within(expr1, expr2))
86 960 4120 - 4144 Apply org.geotools.api.filter.FilterFactory.touches SQLRules.this.ff.touches(expr1, expr2)
86 961 4115 - 4145 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.spatial.Touches](SQLRules.this.ff.touches(expr1, expr2))
86 962 4115 - 4145 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.spatial.Touches](SQLRules.this.ff.touches(expr1, expr2))
87 963 4185 - 4207 Apply org.geotools.api.filter.FilterFactory.equal SQLRules.this.ff.equal(expr1, expr2)
87 964 4180 - 4208 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.spatial.Equals](SQLRules.this.ff.equal(expr1, expr2))
87 965 4180 - 4208 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.spatial.Equals](SQLRules.this.ff.equal(expr1, expr2))
88 966 4248 - 4273 Apply org.geotools.api.filter.FilterFactory.disjoint SQLRules.this.ff.disjoint(expr1, expr2)
88 967 4243 - 4274 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.spatial.Disjoint](SQLRules.this.ff.disjoint(expr1, expr2))
88 968 4243 - 4274 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.spatial.Disjoint](SQLRules.this.ff.disjoint(expr1, expr2))
89 969 4295 - 4299 Select scala.None scala.None
89 970 4295 - 4299 Block scala.None scala.None
97 973 4442 - 4465 Apply org.locationtech.geomesa.spark.sql.SQLRules.scalaUDFtoGTFilter SQLRules.this.scalaUDFtoGTFilter(udf)
97 974 4442 - 4465 Block org.locationtech.geomesa.spark.sql.SQLRules.scalaUDFtoGTFilter SQLRules.this.scalaUDFtoGTFilter(udf)
98 1009 4518 - 5226 Block <nosymbol> { val leftExpr: Option[org.geotools.api.filter.expression.Expression] = SQLRules.this.sparkExprToGTExpr(left); val rightExpr: Option[org.geotools.api.filter.expression.Expression] = SQLRules.this.sparkExprToGTExpr(right); if (leftExpr.isEmpty.||(rightExpr.isEmpty)) scala.None else binaryComp match { case (_: org.apache.spark.sql.catalyst.expressions.EqualTo) => scala.Some.apply[org.geotools.api.filter.PropertyIsEqualTo](SQLRules.this.ff.equals(leftExpr.get, rightExpr.get)) case (_: org.apache.spark.sql.catalyst.expressions.LessThan) => scala.Some.apply[org.geotools.api.filter.PropertyIsLessThan](SQLRules.this.ff.less(leftExpr.get, rightExpr.get)) case (_: org.apache.spark.sql.catalyst.expressions.LessThanOrEqual) => scala.Some.apply[org.geotools.api.filter.PropertyIsLessThanOrEqualTo](SQLRules.this.ff.lessOrEqual(leftExpr.get, rightExpr.get)) case (_: org.apache.spark.sql.catalyst.expressions.GreaterThan) => scala.Some.apply[org.geotools.api.filter.PropertyIsGreaterThan](SQLRules.this.ff.greater(leftExpr.get, rightExpr.get)) case (_: org.apache.spark.sql.catalyst.expressions.GreaterThanOrEqual) => scala.Some.apply[org.geotools.api.filter.PropertyIsGreaterThanOrEqualTo](SQLRules.this.ff.greaterOrEqual(leftExpr.get, rightExpr.get)) case _ => scala.None } }
99 975 4544 - 4567 Apply org.locationtech.geomesa.spark.sql.SQLRules.sparkExprToGTExpr SQLRules.this.sparkExprToGTExpr(left)
100 976 4592 - 4616 Apply org.locationtech.geomesa.spark.sql.SQLRules.sparkExprToGTExpr SQLRules.this.sparkExprToGTExpr(right)
101 977 4649 - 4666 Select scala.Option.isEmpty rightExpr.isEmpty
101 978 4629 - 4666 Apply scala.Boolean.|| leftExpr.isEmpty.||(rightExpr.isEmpty)
102 979 4680 - 4684 Select scala.None scala.None
102 980 4680 - 4684 Block scala.None scala.None
104 1008 4712 - 5216 Match <nosymbol> binaryComp match { case (_: org.apache.spark.sql.catalyst.expressions.EqualTo) => scala.Some.apply[org.geotools.api.filter.PropertyIsEqualTo](SQLRules.this.ff.equals(leftExpr.get, rightExpr.get)) case (_: org.apache.spark.sql.catalyst.expressions.LessThan) => scala.Some.apply[org.geotools.api.filter.PropertyIsLessThan](SQLRules.this.ff.less(leftExpr.get, rightExpr.get)) case (_: org.apache.spark.sql.catalyst.expressions.LessThanOrEqual) => scala.Some.apply[org.geotools.api.filter.PropertyIsLessThanOrEqualTo](SQLRules.this.ff.lessOrEqual(leftExpr.get, rightExpr.get)) case (_: org.apache.spark.sql.catalyst.expressions.GreaterThan) => scala.Some.apply[org.geotools.api.filter.PropertyIsGreaterThan](SQLRules.this.ff.greater(leftExpr.get, rightExpr.get)) case (_: org.apache.spark.sql.catalyst.expressions.GreaterThanOrEqual) => scala.Some.apply[org.geotools.api.filter.PropertyIsGreaterThanOrEqualTo](SQLRules.this.ff.greaterOrEqual(leftExpr.get, rightExpr.get)) case _ => scala.None }
105 981 4788 - 4800 Select scala.Option.get leftExpr.get
105 982 4802 - 4815 Select scala.Option.get rightExpr.get
105 983 4778 - 4816 Apply org.geotools.api.filter.FilterFactory.equals SQLRules.this.ff.equals(leftExpr.get, rightExpr.get)
105 984 4773 - 4817 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.PropertyIsEqualTo](SQLRules.this.ff.equals(leftExpr.get, rightExpr.get))
105 985 4773 - 4817 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.PropertyIsEqualTo](SQLRules.this.ff.equals(leftExpr.get, rightExpr.get))
106 986 4873 - 4885 Select scala.Option.get leftExpr.get
106 987 4887 - 4900 Select scala.Option.get rightExpr.get
106 988 4865 - 4901 Apply org.geotools.api.filter.FilterFactory.less SQLRules.this.ff.less(leftExpr.get, rightExpr.get)
106 989 4860 - 4902 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.PropertyIsLessThan](SQLRules.this.ff.less(leftExpr.get, rightExpr.get))
106 990 4860 - 4902 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.PropertyIsLessThan](SQLRules.this.ff.less(leftExpr.get, rightExpr.get))
107 991 4965 - 4977 Select scala.Option.get leftExpr.get
107 992 4979 - 4992 Select scala.Option.get rightExpr.get
107 993 4950 - 4993 Apply org.geotools.api.filter.FilterFactory.lessOrEqual SQLRules.this.ff.lessOrEqual(leftExpr.get, rightExpr.get)
107 994 4945 - 4994 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.PropertyIsLessThanOrEqualTo](SQLRules.this.ff.lessOrEqual(leftExpr.get, rightExpr.get))
107 995 4945 - 4994 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.PropertyIsLessThanOrEqualTo](SQLRules.this.ff.lessOrEqual(leftExpr.get, rightExpr.get))
108 996 5053 - 5065 Select scala.Option.get leftExpr.get
108 997 5067 - 5080 Select scala.Option.get rightExpr.get
108 998 5042 - 5081 Apply org.geotools.api.filter.FilterFactory.greater SQLRules.this.ff.greater(leftExpr.get, rightExpr.get)
108 999 5037 - 5082 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.PropertyIsGreaterThan](SQLRules.this.ff.greater(leftExpr.get, rightExpr.get))
108 1000 5037 - 5082 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.PropertyIsGreaterThan](SQLRules.this.ff.greater(leftExpr.get, rightExpr.get))
109 1001 5148 - 5160 Select scala.Option.get leftExpr.get
109 1002 5162 - 5175 Select scala.Option.get rightExpr.get
109 1003 5130 - 5176 Apply org.geotools.api.filter.FilterFactory.greaterOrEqual SQLRules.this.ff.greaterOrEqual(leftExpr.get, rightExpr.get)
109 1004 5125 - 5177 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.PropertyIsGreaterThanOrEqualTo](SQLRules.this.ff.greaterOrEqual(leftExpr.get, rightExpr.get))
109 1005 5125 - 5177 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.PropertyIsGreaterThanOrEqualTo](SQLRules.this.ff.greaterOrEqual(leftExpr.get, rightExpr.get))
110 1006 5200 - 5204 Select scala.None scala.None
110 1007 5200 - 5204 Block scala.None scala.None
113 1027 5261 - 5607 Block <nosymbol> { val sparkExpr: org.apache.spark.sql.catalyst.expressions.Expression = unary.child; val gtExpr: Option[org.geotools.api.filter.expression.Expression] = SQLRules.this.sparkExprToGTExpr(sparkExpr); if (gtExpr.isEmpty) scala.None else unary match { case (_: org.apache.spark.sql.catalyst.expressions.IsNotNull) => scala.Some.apply[org.geotools.api.filter.Not](SQLRules.this.ff.not(SQLRules.this.ff.isNull(gtExpr.get))) case (_: org.apache.spark.sql.catalyst.expressions.IsNull) => scala.Some.apply[org.geotools.api.filter.PropertyIsNull](SQLRules.this.ff.isNull(gtExpr.get)) case _ => scala.None } }
114 1010 5288 - 5299 Select org.apache.spark.sql.catalyst.trees.UnaryLike.child unary.child
115 1011 5321 - 5349 Apply org.locationtech.geomesa.spark.sql.SQLRules.sparkExprToGTExpr SQLRules.this.sparkExprToGTExpr(sparkExpr)
116 1012 5362 - 5376 Select scala.Option.isEmpty gtExpr.isEmpty
117 1013 5388 - 5392 Select scala.None scala.None
117 1014 5388 - 5392 Block scala.None scala.None
119 1026 5418 - 5597 Match <nosymbol> unary match { case (_: org.apache.spark.sql.catalyst.expressions.IsNotNull) => scala.Some.apply[org.geotools.api.filter.Not](SQLRules.this.ff.not(SQLRules.this.ff.isNull(gtExpr.get))) case (_: org.apache.spark.sql.catalyst.expressions.IsNull) => scala.Some.apply[org.geotools.api.filter.PropertyIsNull](SQLRules.this.ff.isNull(gtExpr.get)) case _ => scala.None }
120 1015 5487 - 5497 Select scala.Option.get gtExpr.get
120 1016 5477 - 5498 Apply org.geotools.api.filter.FilterFactory.isNull SQLRules.this.ff.isNull(gtExpr.get)
120 1017 5470 - 5499 Apply org.geotools.api.filter.FilterFactory.not SQLRules.this.ff.not(SQLRules.this.ff.isNull(gtExpr.get))
120 1018 5465 - 5500 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.Not](SQLRules.this.ff.not(SQLRules.this.ff.isNull(gtExpr.get)))
120 1019 5465 - 5500 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.Not](SQLRules.this.ff.not(SQLRules.this.ff.isNull(gtExpr.get)))
121 1020 5546 - 5556 Select scala.Option.get gtExpr.get
121 1021 5536 - 5557 Apply org.geotools.api.filter.FilterFactory.isNull SQLRules.this.ff.isNull(gtExpr.get)
121 1022 5531 - 5558 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.PropertyIsNull](SQLRules.this.ff.isNull(gtExpr.get))
121 1023 5531 - 5558 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.PropertyIsNull](SQLRules.this.ff.isNull(gtExpr.get))
122 1024 5581 - 5585 Select scala.None scala.None
122 1025 5581 - 5585 Block scala.None scala.None
126 1028 5650 - 5688 TypeApply scala.Any.isInstanceOf expr.isInstanceOf[org.apache.spark.sql.sedona_sql.expressions.ST_Predicate]
126 1029 5636 - 5688 Apply scala.Boolean.&& org.locationtech.geomesa.spark.`package`.haveSedona.&&(expr.isInstanceOf[org.apache.spark.sql.sedona_sql.expressions.ST_Predicate])
126 1035 5632 - 5905 If <nosymbol> if (org.locationtech.geomesa.spark.`package`.haveSedona.&&(expr.isInstanceOf[org.apache.spark.sql.sedona_sql.expressions.ST_Predicate])) SQLRules.this.sedonaExprToGTFilter(expr.asInstanceOf[org.apache.spark.sql.sedona_sql.expressions.ST_Predicate]) else { (if (SQLRules.this.logger.underlying.isDebugEnabled()) SQLRules.this.logger.underlying.debug("Got expr: {}. Don\'t know how to turn this into a GeoTools Expression.", (expr: AnyRef)) else (): Unit); scala.None }
127 1030 5723 - 5761 TypeApply scala.Any.asInstanceOf expr.asInstanceOf[org.apache.spark.sql.sedona_sql.expressions.ST_Predicate]
127 1031 5702 - 5762 Apply org.locationtech.geomesa.spark.sql.SQLRules.sedonaExprToGTFilter SQLRules.this.sedonaExprToGTFilter(expr.asInstanceOf[org.apache.spark.sql.sedona_sql.expressions.ST_Predicate])
127 1032 5702 - 5762 Block org.locationtech.geomesa.spark.sql.SQLRules.sedonaExprToGTFilter SQLRules.this.sedonaExprToGTFilter(expr.asInstanceOf[org.apache.spark.sql.sedona_sql.expressions.ST_Predicate])
128 1034 5778 - 5905 Block <nosymbol> { (if (SQLRules.this.logger.underlying.isDebugEnabled()) SQLRules.this.logger.underlying.debug("Got expr: {}. Don\'t know how to turn this into a GeoTools Expression.", (expr: AnyRef)) else (): Unit); scala.None }
130 1033 5891 - 5895 Select scala.None scala.None
137 1036 6061 - 6067 Select org.locationtech.geomesa.spark.jts.rules.GeometryLiteral.geom g.geom
137 1037 6050 - 6068 Apply org.geotools.api.filter.FilterFactory.literal SQLRules.this.ff.literal(g.geom)
137 1038 6045 - 6069 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.expression.Literal](SQLRules.this.ff.literal(g.geom))
137 1039 6045 - 6069 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.expression.Literal](SQLRules.this.ff.literal(g.geom))
139 1040 6105 - 6124 Apply java.lang.Object.!= a.name.!=("__fid__")
140 1041 6151 - 6157 Select org.apache.spark.sql.catalyst.expressions.AttributeReference.name a.name
140 1042 6139 - 6158 Apply org.geotools.api.filter.FilterFactory.property SQLRules.this.ff.property(a.name)
140 1043 6134 - 6159 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.expression.PropertyName](SQLRules.this.ff.property(a.name))
140 1044 6134 - 6159 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.expression.PropertyName](SQLRules.this.ff.property(a.name))
142 1056 6178 - 6563 Block <nosymbol> { <stable> <accessor> lazy val zone: java.time.ZoneId = c.timeZoneId.map[java.time.ZoneId]({ ((x$1: String) => java.time.ZoneId.of(x$1)) }).orNull[java.time.ZoneId](scala.Predef.$conforms[Null]); SQLRules.this.sparkExprToGTExpr(c.child).map[org.geotools.api.filter.expression.Expression](((x0$1: org.geotools.api.filter.expression.Expression) => x0$1 match { case (lit @ (_: org.geotools.api.filter.expression.Literal)) if lit.getValue().isInstanceOf[java.util.Date].&&(zone.!=(null)) => { val date: java.time.LocalDateTime = java.time.LocalDateTime.ofInstant(lit.getValue().asInstanceOf[java.util.Date].toInstant(), zone); SQLRules.this.ff.literal(new java.util.Date(date.atZone(java.time.ZoneOffset.UTC).toInstant().toEpochMilli())) } case (e @ _) => e })) }
144 1045 6262 - 6269 Select org.apache.spark.sql.catalyst.expressions.Cast.child c.child
144 1055 6244 - 6563 Apply scala.Option.map SQLRules.this.sparkExprToGTExpr(c.child).map[org.geotools.api.filter.expression.Expression](((x0$1: org.geotools.api.filter.expression.Expression) => x0$1 match { case (lit @ (_: org.geotools.api.filter.expression.Literal)) if lit.getValue().isInstanceOf[java.util.Date].&&(zone.!=(null)) => { val date: java.time.LocalDateTime = java.time.LocalDateTime.ofInstant(lit.getValue().asInstanceOf[java.util.Date].toInstant(), zone); SQLRules.this.ff.literal(new java.util.Date(date.atZone(java.time.ZoneOffset.UTC).toInstant().toEpochMilli())) } case (e @ _) => e }))
145 1046 6343 - 6355 Apply java.lang.Object.!= zone.!=(null)
145 1047 6308 - 6355 Apply scala.Boolean.&& lit.getValue().isInstanceOf[java.util.Date].&&(zone.!=(null))
145 1053 6356 - 6535 Block <nosymbol> { val date: java.time.LocalDateTime = java.time.LocalDateTime.ofInstant(lit.getValue().asInstanceOf[java.util.Date].toInstant(), zone); SQLRules.this.ff.literal(new java.util.Date(date.atZone(java.time.ZoneOffset.UTC).toInstant().toEpochMilli())) }
146 1048 6404 - 6445 Apply java.util.Date.toInstant lit.getValue().asInstanceOf[java.util.Date].toInstant()
146 1049 6380 - 6452 Apply java.time.LocalDateTime.ofInstant java.time.LocalDateTime.ofInstant(lit.getValue().asInstanceOf[java.util.Date].toInstant(), zone)
147 1050 6483 - 6533 Apply java.time.Instant.toEpochMilli date.atZone(java.time.ZoneOffset.UTC).toInstant().toEpochMilli()
147 1051 6474 - 6534 Apply java.util.Date.<init> new java.util.Date(date.atZone(java.time.ZoneOffset.UTC).toInstant().toEpochMilli())
147 1052 6463 - 6535 Apply org.geotools.api.filter.FilterFactory.literal SQLRules.this.ff.literal(new java.util.Date(date.atZone(java.time.ZoneOffset.UTC).toInstant().toEpochMilli()))
148 1054 6554 - 6555 Ident org.locationtech.geomesa.spark.sql.SQLRules.e e
151 1057 6606 - 6626 Select org.apache.spark.sql.types.DataTypes.StringType org.apache.spark.sql.types.DataTypes.StringType
151 1058 6590 - 6626 Apply java.lang.Object.== lit.dataType.==(org.apache.spark.sql.types.DataTypes.StringType)
154 1059 6805 - 6814 Select org.apache.spark.sql.catalyst.expressions.Literal.value lit.value
154 1060 6820 - 6830 Apply scala.Any.toString x$1.toString()
154 1061 6832 - 6832 TypeApply scala.Predef.$conforms scala.Predef.$conforms[Null]
154 1062 6798 - 6838 ApplyToImplicitArgs scala.Option.orNull scala.Option.apply[Any](lit.value).map[String](((x$1: Any) => x$1.toString())).orNull[String](scala.Predef.$conforms[Null])
154 1063 6787 - 6839 Apply org.geotools.api.filter.FilterFactory.literal SQLRules.this.ff.literal(scala.Option.apply[Any](lit.value).map[String](((x$1: Any) => x$1.toString())).orNull[String](scala.Predef.$conforms[Null]))
154 1064 6782 - 6840 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.expression.Literal](SQLRules.this.ff.literal(scala.Option.apply[Any](lit.value).map[String](((x$1: Any) => x$1.toString())).orNull[String](scala.Predef.$conforms[Null])))
154 1065 6782 - 6840 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.expression.Literal](SQLRules.this.ff.literal(scala.Option.apply[Any](lit.value).map[String](((x$1: Any) => x$1.toString())).orNull[String](scala.Predef.$conforms[Null])))
156 1066 6883 - 6906 Select org.apache.spark.sql.types.DataTypes.TimestampType org.apache.spark.sql.types.DataTypes.TimestampType
156 1067 6867 - 6906 Apply java.lang.Object.== lit.dataType.==(org.apache.spark.sql.types.DataTypes.TimestampType)
158 1068 6989 - 7024 Apply scala.Long./ lit.value.asInstanceOf[Long]./(1000)
158 1069 6980 - 7025 Apply java.util.Date.<init> new java.util.Date(lit.value.asInstanceOf[Long]./(1000))
158 1070 6969 - 7026 Apply org.geotools.api.filter.FilterFactory.literal SQLRules.this.ff.literal(new java.util.Date(lit.value.asInstanceOf[Long]./(1000)))
158 1071 6964 - 7027 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.expression.Literal](SQLRules.this.ff.literal(new java.util.Date(lit.value.asInstanceOf[Long]./(1000))))
158 1072 6964 - 7027 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.expression.Literal](SQLRules.this.ff.literal(new java.util.Date(lit.value.asInstanceOf[Long]./(1000))))
160 1073 7068 - 7113 TypeApply scala.Any.isInstanceOf lit.dataType.isInstanceOf[org.apache.spark.sql.sedona_sql.UDT.GeometryUDT]
160 1074 7054 - 7113 Apply scala.Boolean.&& org.locationtech.geomesa.spark.`package`.haveSedona.&&(lit.dataType.isInstanceOf[org.apache.spark.sql.sedona_sql.UDT.GeometryUDT])
161 1075 7197 - 7206 Select org.apache.spark.sql.catalyst.expressions.Literal.value lit.value
161 1076 7139 - 7207 Apply org.apache.spark.sql.sedona_sql.UDT.GeometryUDT.deserialize lit.dataType.asInstanceOf[org.apache.spark.sql.sedona_sql.UDT.GeometryUDT].deserialize(lit.value)
161 1077 7128 - 7208 Apply org.geotools.api.filter.FilterFactory.literal SQLRules.this.ff.literal(lit.dataType.asInstanceOf[org.apache.spark.sql.sedona_sql.UDT.GeometryUDT].deserialize(lit.value))
161 1078 7123 - 7209 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.expression.Literal](SQLRules.this.ff.literal(lit.dataType.asInstanceOf[org.apache.spark.sql.sedona_sql.UDT.GeometryUDT].deserialize(lit.value)))
161 1079 7123 - 7209 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.expression.Literal](SQLRules.this.ff.literal(lit.dataType.asInstanceOf[org.apache.spark.sql.sedona_sql.UDT.GeometryUDT].deserialize(lit.value)))
164 1080 7258 - 7267 Select org.apache.spark.sql.catalyst.expressions.Literal.value lit.value
164 1081 7247 - 7268 Apply org.geotools.api.filter.FilterFactory.literal SQLRules.this.ff.literal(lit.value)
164 1082 7242 - 7269 Apply scala.Some.apply scala.Some.apply[org.geotools.api.filter.expression.Literal](SQLRules.this.ff.literal(lit.value))
164 1083 7242 - 7269 Block scala.Some.apply scala.Some.apply[org.geotools.api.filter.expression.Literal](SQLRules.this.ff.literal(lit.value))
166 1085 7282 - 7367 Block <nosymbol> { (if (SQLRules.this.logger.underlying.isDebugEnabled()) SQLRules.this.logger.underlying.debug("Can\'t turn expression into geotools: {}", (expression: AnyRef)) else (): Unit); scala.None }
168 1084 7363 - 7367 Select scala.None scala.None
175 1086 7636 - 7646 Apply scala.Some.apply scala.Some.apply[org.locationtech.jts.geom.Geometry](geom)
175 1087 7636 - 7646 Block scala.Some.apply scala.Some.apply[org.locationtech.jts.geom.Geometry](geom)
176 1088 7697 - 7715 Apply org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.extractGeometry SpatialOptimizationsRule.this.extractGeometry(r)
176 1089 7671 - 7716 Apply scala.Option.orElse SpatialOptimizationsRule.this.extractGeometry(l).orElse[org.locationtech.jts.geom.Geometry](SpatialOptimizationsRule.this.extractGeometry(r))
176 1090 7671 - 7716 Block scala.Option.orElse SpatialOptimizationsRule.this.extractGeometry(l).orElse[org.locationtech.jts.geom.Geometry](SpatialOptimizationsRule.this.extractGeometry(r))
177 1091 7802 - 7806 Ident org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.$anonfun.geom geom
177 1092 7767 - 7767 Apply org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.$anonfun.<init> new $anonfun()
177 1093 7743 - 7808 Apply scala.collection.TraversableOnce.collectFirst u.children.collectFirst[org.locationtech.jts.geom.Geometry](({ @SerialVersionUID(value = 0) final <synthetic> class $anonfun extends scala.runtime.AbstractPartialFunction[org.apache.spark.sql.catalyst.expressions.Expression,org.locationtech.jts.geom.Geometry] with Serializable { def <init>(): <$anon: org.apache.spark.sql.catalyst.expressions.Expression => org.locationtech.jts.geom.Geometry> = { $anonfun.super.<init>(); () }; final override def applyOrElse[A1 <: org.apache.spark.sql.catalyst.expressions.Expression, B1 >: org.locationtech.jts.geom.Geometry](x1: A1, default: A1 => B1): B1 = ((x1.asInstanceOf[org.apache.spark.sql.catalyst.expressions.Expression]: org.apache.spark.sql.catalyst.expressions.Expression): org.apache.spark.sql.catalyst.expressions.Expression @unchecked) match { case (repr: Any, geom: org.locationtech.jts.geom.Geometry)org.locationtech.geomesa.spark.jts.rules.GeometryLiteral(_, (geom @ _)) => geom case (defaultCase$ @ _) => default.apply(x1) }; final def isDefinedAt(x1: org.apache.spark.sql.catalyst.expressions.Expression): Boolean = ((x1.asInstanceOf[org.apache.spark.sql.catalyst.expressions.Expression]: org.apache.spark.sql.catalyst.expressions.Expression): org.apache.spark.sql.catalyst.expressions.Expression @unchecked) match { case (repr: Any, geom: org.locationtech.jts.geom.Geometry)org.locationtech.geomesa.spark.jts.rules.GeometryLiteral(_, (geom @ _)) => true case (defaultCase$ @ _) => false } }; new $anonfun() }: PartialFunction[org.apache.spark.sql.catalyst.expressions.Expression,org.locationtech.jts.geom.Geometry]))
177 1094 7743 - 7808 Block scala.collection.TraversableOnce.collectFirst u.children.collectFirst[org.locationtech.jts.geom.Geometry](({ @SerialVersionUID(value = 0) final <synthetic> class $anonfun extends scala.runtime.AbstractPartialFunction[org.apache.spark.sql.catalyst.expressions.Expression,org.locationtech.jts.geom.Geometry] with Serializable { def <init>(): <$anon: org.apache.spark.sql.catalyst.expressions.Expression => org.locationtech.jts.geom.Geometry> = { $anonfun.super.<init>(); () }; final override def applyOrElse[A1 <: org.apache.spark.sql.catalyst.expressions.Expression, B1 >: org.locationtech.jts.geom.Geometry](x1: A1, default: A1 => B1): B1 = ((x1.asInstanceOf[org.apache.spark.sql.catalyst.expressions.Expression]: org.apache.spark.sql.catalyst.expressions.Expression): org.apache.spark.sql.catalyst.expressions.Expression @unchecked) match { case (repr: Any, geom: org.locationtech.jts.geom.Geometry)org.locationtech.geomesa.spark.jts.rules.GeometryLiteral(_, (geom @ _)) => geom case (defaultCase$ @ _) => default.apply(x1) }; final def isDefinedAt(x1: org.apache.spark.sql.catalyst.expressions.Expression): Boolean = ((x1.asInstanceOf[org.apache.spark.sql.catalyst.expressions.Expression]: org.apache.spark.sql.catalyst.expressions.Expression): org.apache.spark.sql.catalyst.expressions.Expression @unchecked) match { case (repr: Any, geom: org.locationtech.jts.geom.Geometry)org.locationtech.geomesa.spark.jts.rules.GeometryLiteral(_, (geom @ _)) => true case (defaultCase$ @ _) => false } }; new $anonfun() }: PartialFunction[org.apache.spark.sql.catalyst.expressions.Expression,org.locationtech.jts.geom.Geometry]))
178 1095 7825 - 7829 Select scala.None scala.None
178 1096 7825 - 7829 Block scala.None scala.None
183 1097 8031 - 8071 Apply org.locationtech.geomesa.spark.sql.RelationUtils.gridIdMapper RelationUtils.gridIdMapper(x$2, envelopes)
183 1098 8008 - 8072 Apply scala.Option.map SpatialOptimizationsRule.this.extractGeometry(e).map[List[Int]](((x$2: org.locationtech.jts.geom.Geometry) => RelationUtils.gridIdMapper(x$2, envelopes)))
187 1106 8220 - 8482 Apply scala.Option.exists join.condition.exists(((x0$1: org.apache.spark.sql.catalyst.expressions.Expression) => x0$1 match { case (u @ (_: org.apache.spark.sql.catalyst.expressions.ScalaUDF)) if u.function.isInstanceOf[(org.locationtech.jts.geom.Geometry, org.locationtech.jts.geom.Geometry) => Boolean] => u.children.head.isInstanceOf[org.apache.spark.sql.catalyst.expressions.AttributeReference].&&(u.children.apply(1).isInstanceOf[org.apache.spark.sql.catalyst.expressions.AttributeReference]) case _ => false }))
188 1099 8272 - 8338 TypeApply scala.Any.isInstanceOf u.function.isInstanceOf[(org.locationtech.jts.geom.Geometry, org.locationtech.jts.geom.Geometry) => Boolean]
189 1100 8415 - 8416 Literal <nosymbol> 1
189 1101 8404 - 8450 TypeApply scala.Any.isInstanceOf u.children.apply(1).isInstanceOf[org.apache.spark.sql.catalyst.expressions.AttributeReference]
189 1102 8352 - 8450 Apply scala.Boolean.&& u.children.head.isInstanceOf[org.apache.spark.sql.catalyst.expressions.AttributeReference].&&(u.children.apply(1).isInstanceOf[org.apache.spark.sql.catalyst.expressions.AttributeReference])
189 1103 8352 - 8450 Block scala.Boolean.&& u.children.head.isInstanceOf[org.apache.spark.sql.catalyst.expressions.AttributeReference].&&(u.children.apply(1).isInstanceOf[org.apache.spark.sql.catalyst.expressions.AttributeReference])
190 1104 8469 - 8474 Literal <nosymbol> false
190 1105 8469 - 8474 Block <nosymbol> false
197 1107 8763 - 8781 Select scala.Option.get join.condition.get
197 1108 8740 - 8782 Apply org.locationtech.geomesa.spark.sql.GeoMesaRelation.join leftRel.join(rightRel, join.condition.get)
198 1109 8820 - 8824 Ident org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.join join
199 1117 8865 - 9066 Block <nosymbol> { val newLogicalRelLeft: org.apache.spark.sql.execution.datasources.LogicalRelation = { <artifact> val qual$1: org.locationtech.geomesa.spark.sql.SparkVersions.CopyLogicalRelation = SparkVersions.copy(left); <artifact> val x$1: Seq[org.apache.spark.sql.catalyst.expressions.AttributeReference] @scala.reflect.internal.annotations.uncheckedBounds = left.output.++[org.apache.spark.sql.catalyst.expressions.AttributeReference, Seq[org.apache.spark.sql.catalyst.expressions.AttributeReference]](right.output)(collection.this.Seq.canBuildFrom[org.apache.spark.sql.catalyst.expressions.AttributeReference]); <artifact> val x$2: org.locationtech.geomesa.spark.sql.GeoMesaJoinRelation = joinRelation; qual$1.apply(x$2, x$1) }; { <artifact> val qual$2: org.locationtech.geomesa.spark.sql.SparkVersions.CopyJoin = SparkVersions.copy(join); <artifact> val x$3: org.apache.spark.sql.execution.datasources.LogicalRelation = newLogicalRelLeft; <artifact> val x$4: org.apache.spark.sql.catalyst.plans.logical.LogicalPlan = qual$2.apply$default$2; <artifact> val x$5: org.apache.spark.sql.catalyst.plans.JoinType = qual$2.apply$default$3; <artifact> val x$6: Option[org.apache.spark.sql.catalyst.expressions.Expression] @scala.reflect.internal.annotations.uncheckedBounds = qual$2.apply$default$4; qual$2.apply(x$3, x$4, x$5, x$6) } }
200 1110 8910 - 8934 Apply org.locationtech.geomesa.spark.sql.SparkVersions.copy SparkVersions.copy(left)
200 1111 8959 - 8971 Select org.apache.spark.sql.execution.datasources.LogicalRelation.output right.output
200 1112 8956 - 8956 TypeApply scala.collection.Seq.canBuildFrom collection.this.Seq.canBuildFrom[org.apache.spark.sql.catalyst.expressions.AttributeReference]
200 1113 8944 - 8971 ApplyToImplicitArgs scala.collection.TraversableLike.++ left.output.++[org.apache.spark.sql.catalyst.expressions.AttributeReference, Seq[org.apache.spark.sql.catalyst.expressions.AttributeReference]](right.output)(collection.this.Seq.canBuildFrom[org.apache.spark.sql.catalyst.expressions.AttributeReference])
200 1114 8910 - 8997 Apply org.locationtech.geomesa.spark.sql.SparkVersions.CopyLogicalRelation.apply qual$1.apply(x$2, x$1)
201 1115 9016 - 9040 Apply org.locationtech.geomesa.spark.sql.SparkVersions.copy SparkVersions.copy(join)
201 1116 9016 - 9066 Apply org.locationtech.geomesa.spark.sql.SparkVersions.CopyJoin.apply qual$2.apply(x$3, x$4, x$5, x$6)
211 1118 9444 - 9462 Select scala.Option.get join.condition.get
211 1119 9421 - 9463 Apply org.locationtech.geomesa.spark.sql.GeoMesaRelation.join leftRel.join(rightRel, join.condition.get)
212 1120 9501 - 9505 Ident org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.join join
213 1131 9546 - 9878 Block <nosymbol> { val newLogicalRelLeft: org.apache.spark.sql.execution.datasources.LogicalRelation = { <artifact> val qual$3: org.locationtech.geomesa.spark.sql.SparkVersions.CopyLogicalRelation = SparkVersions.copy(left); <artifact> val x$7: Seq[org.apache.spark.sql.catalyst.expressions.AttributeReference] @scala.reflect.internal.annotations.uncheckedBounds = left.output.++[org.apache.spark.sql.catalyst.expressions.AttributeReference, Seq[org.apache.spark.sql.catalyst.expressions.AttributeReference]](right.output)(collection.this.Seq.canBuildFrom[org.apache.spark.sql.catalyst.expressions.AttributeReference]); <artifact> val x$8: org.locationtech.geomesa.spark.sql.GeoMesaJoinRelation = joinRelation; qual$3.apply(x$8, x$7) }; val newProjectLeft: org.apache.spark.sql.catalyst.plans.logical.Project = leftProject.copy(leftProjectList.++[org.apache.spark.sql.catalyst.expressions.NamedExpression, Seq[org.apache.spark.sql.catalyst.expressions.NamedExpression]](rightProjectList)(collection.this.Seq.canBuildFrom[org.apache.spark.sql.catalyst.expressions.NamedExpression]), newLogicalRelLeft); { <artifact> val qual$4: org.locationtech.geomesa.spark.sql.SparkVersions.CopyJoin = SparkVersions.copy(join); <artifact> val x$9: org.apache.spark.sql.catalyst.plans.logical.Project = newProjectLeft; <artifact> val x$10: org.apache.spark.sql.catalyst.plans.logical.LogicalPlan = qual$4.apply$default$2; <artifact> val x$11: org.apache.spark.sql.catalyst.plans.JoinType = qual$4.apply$default$3; <artifact> val x$12: Option[org.apache.spark.sql.catalyst.expressions.Expression] @scala.reflect.internal.annotations.uncheckedBounds = qual$4.apply$default$4; qual$4.apply(x$9, x$10, x$11, x$12) } }
214 1121 9591 - 9615 Apply org.locationtech.geomesa.spark.sql.SparkVersions.copy SparkVersions.copy(left)
214 1122 9640 - 9652 Select org.apache.spark.sql.execution.datasources.LogicalRelation.output right.output
214 1123 9637 - 9637 TypeApply scala.collection.Seq.canBuildFrom collection.this.Seq.canBuildFrom[org.apache.spark.sql.catalyst.expressions.AttributeReference]
214 1124 9625 - 9652 ApplyToImplicitArgs scala.collection.TraversableLike.++ left.output.++[org.apache.spark.sql.catalyst.expressions.AttributeReference, Seq[org.apache.spark.sql.catalyst.expressions.AttributeReference]](right.output)(collection.this.Seq.canBuildFrom[org.apache.spark.sql.catalyst.expressions.AttributeReference])
214 1125 9591 - 9678 Apply org.locationtech.geomesa.spark.sql.SparkVersions.CopyLogicalRelation.apply qual$3.apply(x$8, x$7)
215 1126 9765 - 9765 TypeApply scala.collection.Seq.canBuildFrom collection.this.Seq.canBuildFrom[org.apache.spark.sql.catalyst.expressions.NamedExpression]
215 1127 9749 - 9784 ApplyToImplicitArgs scala.collection.TraversableLike.++ leftProjectList.++[org.apache.spark.sql.catalyst.expressions.NamedExpression, Seq[org.apache.spark.sql.catalyst.expressions.NamedExpression]](rightProjectList)(collection.this.Seq.canBuildFrom[org.apache.spark.sql.catalyst.expressions.NamedExpression])
215 1128 9718 - 9812 Apply org.apache.spark.sql.catalyst.plans.logical.Project.copy leftProject.copy(leftProjectList.++[org.apache.spark.sql.catalyst.expressions.NamedExpression, Seq[org.apache.spark.sql.catalyst.expressions.NamedExpression]](rightProjectList)(collection.this.Seq.canBuildFrom[org.apache.spark.sql.catalyst.expressions.NamedExpression]), newLogicalRelLeft)
216 1129 9831 - 9855 Apply org.locationtech.geomesa.spark.sql.SparkVersions.copy SparkVersions.copy(join)
216 1130 9831 - 9878 Apply org.locationtech.geomesa.spark.sql.SparkVersions.CopyJoin.apply qual$4.apply(x$9, x$10, x$11, x$12)
233 1153 10414 - 11134 Apply scala.Function.unlift scala.Function.unlift[org.apache.spark.sql.catalyst.plans.logical.LogicalPlan, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan](((plan: org.apache.spark.sql.catalyst.plans.logical.LogicalPlan) => plan match { case (agg @ (_: org.apache.spark.sql.catalyst.plans.logical.Aggregate)) => agg.child match { case (projectList: Seq[org.apache.spark.sql.catalyst.expressions.NamedExpression], child: org.apache.spark.sql.catalyst.plans.logical.LogicalPlan)org.apache.spark.sql.catalyst.plans.logical.Project((projectList @ _), (join @ (_: org.apache.spark.sql.catalyst.plans.logical.Join))) => { val alteredJoin: org.apache.spark.sql.catalyst.plans.logical.LogicalPlan = SQLRules.this.SpatialOptimizationsRule.alterJoin(join); scala.Some.apply[org.apache.spark.sql.catalyst.plans.logical.Aggregate](Aggregates.instance(agg.groupingExpressions, agg.aggregateExpressions, org.apache.spark.sql.catalyst.plans.logical.Project.apply(projectList, alteredJoin), scala.None)) } case (join @ (_: org.apache.spark.sql.catalyst.plans.logical.Join)) => { val alteredJoin: org.apache.spark.sql.catalyst.plans.logical.LogicalPlan = SQLRules.this.SpatialOptimizationsRule.alterJoin(join); scala.Some.apply[org.apache.spark.sql.catalyst.plans.logical.Aggregate](Aggregates.instance(agg.groupingExpressions, agg.aggregateExpressions, alteredJoin, scala.None)) } case _ => scala.None } case _ => scala.None }))
236 1132 10525 - 10534 Select org.apache.spark.sql.catalyst.plans.logical.Aggregate.child agg.child
236 1150 10525 - 11085 Match <nosymbol> agg.child match { case (projectList: Seq[org.apache.spark.sql.catalyst.expressions.NamedExpression], child: org.apache.spark.sql.catalyst.plans.logical.LogicalPlan)org.apache.spark.sql.catalyst.plans.logical.Project((projectList @ _), (join @ (_: org.apache.spark.sql.catalyst.plans.logical.Join))) => { val alteredJoin: org.apache.spark.sql.catalyst.plans.logical.LogicalPlan = SQLRules.this.SpatialOptimizationsRule.alterJoin(join); scala.Some.apply[org.apache.spark.sql.catalyst.plans.logical.Aggregate](Aggregates.instance(agg.groupingExpressions, agg.aggregateExpressions, org.apache.spark.sql.catalyst.plans.logical.Project.apply(projectList, alteredJoin), scala.None)) } case (join @ (_: org.apache.spark.sql.catalyst.plans.logical.Join)) => { val alteredJoin: org.apache.spark.sql.catalyst.plans.logical.LogicalPlan = SQLRules.this.SpatialOptimizationsRule.alterJoin(join); scala.Some.apply[org.apache.spark.sql.catalyst.plans.logical.Aggregate](Aggregates.instance(agg.groupingExpressions, agg.aggregateExpressions, alteredJoin, scala.None)) } case _ => scala.None }
237 1140 10597 - 10812 Block <nosymbol> { val alteredJoin: org.apache.spark.sql.catalyst.plans.logical.LogicalPlan = SQLRules.this.SpatialOptimizationsRule.alterJoin(join); scala.Some.apply[org.apache.spark.sql.catalyst.plans.logical.Aggregate](Aggregates.instance(agg.groupingExpressions, agg.aggregateExpressions, org.apache.spark.sql.catalyst.plans.logical.Project.apply(projectList, alteredJoin), scala.None)) }
238 1133 10636 - 10676 Apply org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.alterJoin SQLRules.this.SpatialOptimizationsRule.alterJoin(join)
239 1134 10720 - 10743 Select org.apache.spark.sql.catalyst.plans.logical.Aggregate.groupingExpressions agg.groupingExpressions
239 1135 10745 - 10769 Select org.apache.spark.sql.catalyst.plans.logical.Aggregate.aggregateExpressions agg.aggregateExpressions
239 1136 10771 - 10804 Apply org.apache.spark.sql.catalyst.plans.logical.Project.apply org.apache.spark.sql.catalyst.plans.logical.Project.apply(projectList, alteredJoin)
239 1137 10806 - 10810 Select scala.None scala.None
239 1138 10700 - 10811 Apply org.locationtech.geomesa.spark.sql.Aggregates.instance Aggregates.instance(agg.groupingExpressions, agg.aggregateExpressions, org.apache.spark.sql.catalyst.plans.logical.Project.apply(projectList, alteredJoin), scala.None)
239 1139 10695 - 10812 Apply scala.Some.apply scala.Some.apply[org.apache.spark.sql.catalyst.plans.logical.Aggregate](Aggregates.instance(agg.groupingExpressions, agg.aggregateExpressions, org.apache.spark.sql.catalyst.plans.logical.Project.apply(projectList, alteredJoin), scala.None))
240 1147 10845 - 11038 Block <nosymbol> { val alteredJoin: org.apache.spark.sql.catalyst.plans.logical.LogicalPlan = SQLRules.this.SpatialOptimizationsRule.alterJoin(join); scala.Some.apply[org.apache.spark.sql.catalyst.plans.logical.Aggregate](Aggregates.instance(agg.groupingExpressions, agg.aggregateExpressions, alteredJoin, scala.None)) }
241 1141 10884 - 10924 Apply org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.alterJoin SQLRules.this.SpatialOptimizationsRule.alterJoin(join)
242 1142 10968 - 10991 Select org.apache.spark.sql.catalyst.plans.logical.Aggregate.groupingExpressions agg.groupingExpressions
242 1143 10993 - 11017 Select org.apache.spark.sql.catalyst.plans.logical.Aggregate.aggregateExpressions agg.aggregateExpressions
242 1144 11032 - 11036 Select scala.None scala.None
242 1145 10948 - 11037 Apply org.locationtech.geomesa.spark.sql.Aggregates.instance Aggregates.instance(agg.groupingExpressions, agg.aggregateExpressions, alteredJoin, scala.None)
242 1146 10943 - 11038 Apply scala.Some.apply scala.Some.apply[org.apache.spark.sql.catalyst.plans.logical.Aggregate](Aggregates.instance(agg.groupingExpressions, agg.aggregateExpressions, alteredJoin, scala.None))
243 1148 11065 - 11069 Select scala.None scala.None
243 1149 11065 - 11069 Block scala.None scala.None
245 1151 11108 - 11112 Select scala.None scala.None
245 1152 11108 - 11112 Block scala.None scala.None
249 1195 11204 - 11204 Apply org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.$anonfun.<init> new $anonfun()
251 1154 11243 - 11258 Apply org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.alterJoin SpatialOptimizationsRule.this.alterJoin(join)
251 1155 11243 - 11258 Block org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.alterJoin SpatialOptimizationsRule.this.alterJoin(join)
252 1156 11296 - 11300 Ident org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.$anonfun.sort sort
253 1157 11401 - 11442 TypeApply scala.Any.isInstanceOf lr.relation.isInstanceOf[org.locationtech.geomesa.spark.sql.GeoMesaRelation]
253 1194 11443 - 13262 Block <nosymbol> { val gmRel: org.locationtech.geomesa.spark.sql.GeoMesaRelation = lr.relation.asInstanceOf[org.locationtech.geomesa.spark.sql.GeoMesaRelation]; val sparkFilters: Seq[org.apache.spark.sql.catalyst.expressions.Expression] = SpatialOptimizationsRule.this.splitConjunctivePredicates(f); <synthetic> <artifact> private[this] val x$3: (Seq[org.geotools.api.filter.Filter], Seq[org.apache.spark.sql.catalyst.expressions.Expression]) = (sparkFilters.foldLeft[(Seq[org.geotools.api.filter.Filter], Seq[org.apache.spark.sql.catalyst.expressions.Expression])](scala.Tuple2.apply[Seq[org.geotools.api.filter.Filter], Seq[org.apache.spark.sql.catalyst.expressions.Expression]](scala.collection.Seq.apply[org.geotools.api.filter.Filter](), scala.collection.Seq.apply[org.apache.spark.sql.catalyst.expressions.Expression]()))(((x0$1: (Seq[org.geotools.api.filter.Filter], Seq[org.apache.spark.sql.catalyst.expressions.Expression]), x1$1: org.apache.spark.sql.catalyst.expressions.Expression) => scala.Tuple2.apply[(Seq[org.geotools.api.filter.Filter], Seq[org.apache.spark.sql.catalyst.expressions.Expression]), org.apache.spark.sql.catalyst.expressions.Expression](x0$1, x1$1) match { case (_1: (Seq[org.geotools.api.filter.Filter], Seq[org.apache.spark.sql.catalyst.expressions.Expression]), _2: org.apache.spark.sql.catalyst.expressions.Expression)((Seq[org.geotools.api.filter.Filter], Seq[org.apache.spark.sql.catalyst.expressions.Expression]), org.apache.spark.sql.catalyst.expressions.Expression)((_1: Seq[org.geotools.api.filter.Filter], _2: Seq[org.apache.spark.sql.catalyst.expressions.Expression])(Seq[org.geotools.api.filter.Filter], Seq[org.apache.spark.sql.catalyst.expressions.Expression])((gts @ (_: Seq[org.geotools.api.filter.Filter])), (sfilters @ _)), (expression @ (_: org.apache.spark.sql.catalyst.expressions.Expression))) => SQLRules.this.sparkFilterToGTFilter(expression) match { case (value: org.geotools.api.filter.Filter)Some[org.geotools.api.filter.Filter]((gtf @ _)) => scala.Tuple2.apply[Seq[org.geotools.api.filter.Filter], Seq[org.apache.spark.sql.catalyst.expressions.Expression]](gts.+:[org.geotools.api.filter.Filter, Seq[org.geotools.api.filter.Filter]](gtf)(collection.this.Seq.canBuildFrom[org.geotools.api.filter.Filter]), sfilters) case scala.None => scala.Tuple2.apply[Seq[org.geotools.api.filter.Filter], Seq[org.apache.spark.sql.catalyst.expressions.Expression]](gts, sfilters.+:[org.apache.spark.sql.catalyst.expressions.Expression, Seq[org.apache.spark.sql.catalyst.expressions.Expression]](expression)(collection.this.Seq.canBuildFrom[org.apache.spark.sql.catalyst.expressions.Expression])) } })): (Seq[org.geotools.api.filter.Filter], Seq[org.apache.spark.sql.catalyst.expressions.Expression]) @unchecked) match { case (_1: Seq[org.geotools.api.filter.Filter], _2: Seq[org.apache.spark.sql.catalyst.expressions.Expression])(Seq[org.geotools.api.filter.Filter], Seq[org.apache.spark.sql.catalyst.expressions.Expression])((gtFilters @ (_: Seq[org.geotools.api.filter.Filter])), (sFilters @ (_: Seq[org.apache.spark.sql.catalyst.expressions.Expression]))) => scala.Tuple2.apply[Seq[org.geotools.api.filter.Filter], Seq[org.apache.spark.sql.catalyst.expressions.Expression]](gtFilters, sFilters) }; val gtFilters: Seq[org.geotools.api.filter.Filter] = x$3._1; val sFilters: Seq[org.apache.spark.sql.catalyst.expressions.Expression] = x$3._2; if (gtFilters.nonEmpty) { val partitioned: Option[org.locationtech.geomesa.spark.sql.GeoMesaRelation.CachedRDD] = gmRel.cached.map[org.locationtech.geomesa.spark.sql.GeoMesaRelation.CachedRDD](((x0$2: org.locationtech.geomesa.spark.sql.GeoMesaRelation.CachedRDD) => x0$2 match { case (c @ (_: org.locationtech.geomesa.spark.sql.GeoMesaRelation.PartitionedIndexedRDD)) => { val hints: Seq[Int] = sparkFilters.flatMap[List[Int], Seq[List[Int]]](((x$4: org.apache.spark.sql.catalyst.expressions.Expression) => scala.this.Option.option2Iterable[List[Int]](SpatialOptimizationsRule.this.extractGridId(c.envelopes, x$4))))(collection.this.Seq.canBuildFrom[List[Int]]).flatten[Int](scala.Predef.$conforms[List[Int]]); if (hints.isEmpty) c else c.copy(c.rdd.filter(((x0$3: (Int, org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)) => x0$3 match { case (_1: Int, _2: org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)(Int, org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)((key @ _), _) => hints.contains[Int](key) })), c.copy$default$2) } case (c @ _) => c })); val filt: Option[org.geotools.api.filter.Filter] = org.locationtech.geomesa.filter.FilterHelper.filterListAsAnd(scala.this.Option.option2Iterable[org.geotools.api.filter.Filter](gmRel.filter).toSeq.++[org.geotools.api.filter.Filter, Seq[org.geotools.api.filter.Filter]](gtFilters)(collection.this.Seq.canBuildFrom[org.geotools.api.filter.Filter])); val relation: org.locationtech.geomesa.spark.sql.GeoMesaRelation = { <artifact> val x$1: Option[org.geotools.api.filter.Filter] @scala.reflect.internal.annotations.uncheckedBounds = filt; <artifact> val x$2: Option[org.locationtech.geomesa.spark.sql.GeoMesaRelation.CachedRDD] @scala.reflect.internal.annotations.uncheckedBounds = partitioned; <artifact> val x$3: org.apache.spark.sql.SQLContext = gmRel.copy$default$1; <artifact> val x$4: org.geotools.api.feature.simple.SimpleFeatureType = gmRel.copy$default$2; <artifact> val x$5: org.apache.spark.sql.types.StructType = gmRel.copy$default$3; <artifact> val x$6: Map[String,String] @scala.reflect.internal.annotations.uncheckedBounds = gmRel.copy$default$4; <artifact> val x$7: Option[org.locationtech.geomesa.spark.sql.GeoMesaRelation.PartitionedRDD] @scala.reflect.internal.annotations.uncheckedBounds = gmRel.copy$default$7; gmRel.copy(x$3, x$4, x$5, x$6, x$1, x$2, x$7) }; val newrel: org.apache.spark.sql.execution.datasources.LogicalRelation = { <artifact> val qual$1: org.locationtech.geomesa.spark.sql.SparkVersions.CopyLogicalRelation = SparkVersions.copy(lr); <artifact> val x$8: Seq[org.apache.spark.sql.catalyst.expressions.AttributeReference] @scala.reflect.internal.annotations.uncheckedBounds = lr.output; <artifact> val x$9: org.locationtech.geomesa.spark.sql.GeoMesaRelation = relation; qual$1.apply(x$9, x$8) }; if (sFilters.nonEmpty) org.apache.spark.sql.catalyst.plans.logical.Filter.apply(sFilters.reduce[org.apache.spark.sql.catalyst.expressions.Expression](org.apache.spark.sql.catalyst.expressions.And), newrel) else newrel } else filt }
256 1158 11503 - 11544 TypeApply scala.Any.asInstanceOf lr.relation.asInstanceOf[org.locationtech.geomesa.spark.sql.GeoMesaRelation]
258 1159 11674 - 11703 Apply org.apache.spark.sql.catalyst.expressions.PredicateHelper.splitConjunctivePredicates SpatialOptimizationsRule.this.splitConjunctivePredicates(f)
260 1160 11720 - 11720 Select scala.Tuple2._1 x$3._1
260 1161 11746 - 11746 Select scala.Tuple2._2 x$3._2
268 1162 12144 - 12162 Select scala.collection.TraversableOnce.nonEmpty gtFilters.nonEmpty
268 1192 12164 - 13226 Block <nosymbol> { val partitioned: Option[org.locationtech.geomesa.spark.sql.GeoMesaRelation.CachedRDD] = gmRel.cached.map[org.locationtech.geomesa.spark.sql.GeoMesaRelation.CachedRDD](((x0$2: org.locationtech.geomesa.spark.sql.GeoMesaRelation.CachedRDD) => x0$2 match { case (c @ (_: org.locationtech.geomesa.spark.sql.GeoMesaRelation.PartitionedIndexedRDD)) => { val hints: Seq[Int] = sparkFilters.flatMap[List[Int], Seq[List[Int]]](((x$4: org.apache.spark.sql.catalyst.expressions.Expression) => scala.this.Option.option2Iterable[List[Int]](SpatialOptimizationsRule.this.extractGridId(c.envelopes, x$4))))(collection.this.Seq.canBuildFrom[List[Int]]).flatten[Int](scala.Predef.$conforms[List[Int]]); if (hints.isEmpty) c else c.copy(c.rdd.filter(((x0$3: (Int, org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)) => x0$3 match { case (_1: Int, _2: org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)(Int, org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)((key @ _), _) => hints.contains[Int](key) })), c.copy$default$2) } case (c @ _) => c })); val filt: Option[org.geotools.api.filter.Filter] = org.locationtech.geomesa.filter.FilterHelper.filterListAsAnd(scala.this.Option.option2Iterable[org.geotools.api.filter.Filter](gmRel.filter).toSeq.++[org.geotools.api.filter.Filter, Seq[org.geotools.api.filter.Filter]](gtFilters)(collection.this.Seq.canBuildFrom[org.geotools.api.filter.Filter])); val relation: org.locationtech.geomesa.spark.sql.GeoMesaRelation = { <artifact> val x$1: Option[org.geotools.api.filter.Filter] @scala.reflect.internal.annotations.uncheckedBounds = filt; <artifact> val x$2: Option[org.locationtech.geomesa.spark.sql.GeoMesaRelation.CachedRDD] @scala.reflect.internal.annotations.uncheckedBounds = partitioned; <artifact> val x$3: org.apache.spark.sql.SQLContext = gmRel.copy$default$1; <artifact> val x$4: org.geotools.api.feature.simple.SimpleFeatureType = gmRel.copy$default$2; <artifact> val x$5: org.apache.spark.sql.types.StructType = gmRel.copy$default$3; <artifact> val x$6: Map[String,String] @scala.reflect.internal.annotations.uncheckedBounds = gmRel.copy$default$4; <artifact> val x$7: Option[org.locationtech.geomesa.spark.sql.GeoMesaRelation.PartitionedRDD] @scala.reflect.internal.annotations.uncheckedBounds = gmRel.copy$default$7; gmRel.copy(x$3, x$4, x$5, x$6, x$1, x$2, x$7) }; val newrel: org.apache.spark.sql.execution.datasources.LogicalRelation = { <artifact> val qual$1: org.locationtech.geomesa.spark.sql.SparkVersions.CopyLogicalRelation = SparkVersions.copy(lr); <artifact> val x$8: Seq[org.apache.spark.sql.catalyst.expressions.AttributeReference] @scala.reflect.internal.annotations.uncheckedBounds = lr.output; <artifact> val x$9: org.locationtech.geomesa.spark.sql.GeoMesaRelation = relation; qual$1.apply(x$9, x$8) }; if (sFilters.nonEmpty) org.apache.spark.sql.catalyst.plans.logical.Filter.apply(sFilters.reduce[org.apache.spark.sql.catalyst.expressions.Expression](org.apache.spark.sql.catalyst.expressions.And), newrel) else newrel }
270 1178 12296 - 12642 Apply scala.Option.map gmRel.cached.map[org.locationtech.geomesa.spark.sql.GeoMesaRelation.CachedRDD](((x0$2: org.locationtech.geomesa.spark.sql.GeoMesaRelation.CachedRDD) => x0$2 match { case (c @ (_: org.locationtech.geomesa.spark.sql.GeoMesaRelation.PartitionedIndexedRDD)) => { val hints: Seq[Int] = sparkFilters.flatMap[List[Int], Seq[List[Int]]](((x$4: org.apache.spark.sql.catalyst.expressions.Expression) => scala.this.Option.option2Iterable[List[Int]](SpatialOptimizationsRule.this.extractGridId(c.envelopes, x$4))))(collection.this.Seq.canBuildFrom[List[Int]]).flatten[Int](scala.Predef.$conforms[List[Int]]); if (hints.isEmpty) c else c.copy(c.rdd.filter(((x0$3: (Int, org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)) => x0$3 match { case (_1: Int, _2: org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)(Int, org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)((key @ _), _) => hints.contains[Int](key) })), c.copy$default$2) } case (c @ _) => c }))
271 1176 12359 - 12601 Block <nosymbol> { val hints: Seq[Int] = sparkFilters.flatMap[List[Int], Seq[List[Int]]](((x$4: org.apache.spark.sql.catalyst.expressions.Expression) => scala.this.Option.option2Iterable[List[Int]](SpatialOptimizationsRule.this.extractGridId(c.envelopes, x$4))))(collection.this.Seq.canBuildFrom[List[Int]]).flatten[Int](scala.Predef.$conforms[List[Int]]); if (hints.isEmpty) c else c.copy(c.rdd.filter(((x0$3: (Int, org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)) => x0$3 match { case (_1: Int, _2: org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)(Int, org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)((key @ _), _) => hints.contains[Int](key) })), c.copy$default$2) }
272 1163 12425 - 12436 Select org.locationtech.geomesa.spark.sql.GeoMesaRelation.PartitionedIndexedRDD.envelopes c.envelopes
272 1164 12411 - 12440 Apply org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.extractGridId SpatialOptimizationsRule.this.extractGridId(c.envelopes, x$4)
272 1165 12411 - 12440 ApplyImplicitView scala.Option.option2Iterable scala.this.Option.option2Iterable[List[Int]](SpatialOptimizationsRule.this.extractGridId(c.envelopes, x$4))
272 1166 12410 - 12410 TypeApply scala.collection.Seq.canBuildFrom collection.this.Seq.canBuildFrom[List[Int]]
272 1167 12442 - 12442 TypeApply scala.Predef.$conforms scala.Predef.$conforms[List[Int]]
272 1168 12390 - 12449 ApplyToImplicitArgs scala.collection.generic.GenericTraversableTemplate.flatten sparkFilters.flatMap[List[Int], Seq[List[Int]]](((x$4: org.apache.spark.sql.catalyst.expressions.Expression) => scala.this.Option.option2Iterable[List[Int]](SpatialOptimizationsRule.this.extractGridId(c.envelopes, x$4))))(collection.this.Seq.canBuildFrom[List[Int]]).flatten[Int](scala.Predef.$conforms[List[Int]])
273 1169 12470 - 12483 Select scala.collection.SeqLike.isEmpty hints.isEmpty
273 1170 12487 - 12488 Ident org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.$anonfun.c c
274 1171 12561 - 12580 Apply scala.collection.SeqLike.contains hints.contains[Int](key)
274 1172 12561 - 12580 Block scala.collection.SeqLike.contains hints.contains[Int](key)
274 1173 12529 - 12582 Apply org.apache.spark.rdd.RDD.filter c.rdd.filter(((x0$3: (Int, org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)) => x0$3 match { case (_1: Int, _2: org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)(Int, org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)((key @ _), _) => hints.contains[Int](key) }))
274 1174 12516 - 12583 Apply org.locationtech.geomesa.spark.sql.GeoMesaRelation.PartitionedIndexedRDD.copy c.copy(c.rdd.filter(((x0$3: (Int, org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)) => x0$3 match { case (_1: Int, _2: org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)(Int, org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)((key @ _), _) => hints.contains[Int](key) })), c.copy$default$2)
274 1175 12516 - 12583 Block org.locationtech.geomesa.spark.sql.GeoMesaRelation.PartitionedIndexedRDD.copy c.copy(c.rdd.filter(((x0$3: (Int, org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)) => x0$3 match { case (_1: Int, _2: org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)(Int, org.locationtech.geomesa.memory.cqengine.datastore.GeoCQEngineDataStore)((key @ _), _) => hints.contains[Int](key) })), c.copy$default$2)
277 1177 12627 - 12628 Ident org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.$anonfun.c c
279 1179 12695 - 12707 Select org.locationtech.geomesa.spark.sql.GeoMesaRelation.filter gmRel.filter
279 1180 12714 - 12714 TypeApply scala.collection.Seq.canBuildFrom collection.this.Seq.canBuildFrom[org.geotools.api.filter.Filter]
279 1181 12695 - 12726 ApplyToImplicitArgs scala.collection.TraversableLike.++ scala.this.Option.option2Iterable[org.geotools.api.filter.Filter](gmRel.filter).toSeq.++[org.geotools.api.filter.Filter, Seq[org.geotools.api.filter.Filter]](gtFilters)(collection.this.Seq.canBuildFrom[org.geotools.api.filter.Filter])
279 1182 12666 - 12727 Apply org.locationtech.geomesa.filter.FilterHelper.filterListAsAnd org.locationtech.geomesa.filter.FilterHelper.filterListAsAnd(scala.this.Option.option2Iterable[org.geotools.api.filter.Filter](gmRel.filter).toSeq.++[org.geotools.api.filter.Filter, Seq[org.geotools.api.filter.Filter]](gtFilters)(collection.this.Seq.canBuildFrom[org.geotools.api.filter.Filter]))
280 1183 12755 - 12802 Apply org.locationtech.geomesa.spark.sql.GeoMesaRelation.copy gmRel.copy(x$3, x$4, x$5, x$6, x$1, x$2, x$7)
281 1184 12828 - 12850 Apply org.locationtech.geomesa.spark.sql.SparkVersions.copy SparkVersions.copy(lr)
281 1185 12860 - 12869 Select org.apache.spark.sql.execution.datasources.LogicalRelation.output lr.output
281 1186 12828 - 12891 Apply org.locationtech.geomesa.spark.sql.SparkVersions.CopyLogicalRelation.apply qual$1.apply(x$9, x$8)
282 1187 12908 - 12925 Select scala.collection.TraversableOnce.nonEmpty sFilters.nonEmpty
284 1188 13026 - 13046 Apply scala.collection.TraversableOnce.reduce sFilters.reduce[org.apache.spark.sql.catalyst.expressions.Expression](org.apache.spark.sql.catalyst.expressions.And)
284 1189 13019 - 13055 Apply org.apache.spark.sql.catalyst.plans.logical.Filter.apply org.apache.spark.sql.catalyst.plans.logical.Filter.apply(sFilters.reduce[org.apache.spark.sql.catalyst.expressions.Expression](org.apache.spark.sql.catalyst.expressions.And), newrel)
284 1190 13019 - 13055 Block org.apache.spark.sql.catalyst.plans.logical.Filter.apply org.apache.spark.sql.catalyst.plans.logical.Filter.apply(sFilters.reduce[org.apache.spark.sql.catalyst.expressions.Expression](org.apache.spark.sql.catalyst.expressions.And), newrel)
287 1191 13194 - 13200 Ident org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.$anonfun.newrel newrel
290 1193 13246 - 13250 Ident org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.$anonfun.filt filt
293 1196 13292 - 13329 Apply scala.PartialFunction.orElse optimizeAggregate.orElse[org.apache.spark.sql.catalyst.plans.logical.LogicalPlan, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan](optimizeRest)
293 1197 13277 - 13330 Apply org.apache.spark.sql.catalyst.trees.TreeNode.transform plan.transform(optimizeAggregate.orElse[org.apache.spark.sql.catalyst.plans.logical.LogicalPlan, org.apache.spark.sql.catalyst.plans.logical.LogicalPlan](optimizeRest))
305 1198 13658 - 13674 Select org.apache.spark.sql.catalyst.plans.logical.Join.left logicalPlan.left
306 1199 13741 - 13786 TypeApply scala.Any.isInstanceOf lr.relation.isInstanceOf[org.locationtech.geomesa.spark.sql.GeoMesaJoinRelation]
307 1200 13801 - 13847 Apply scala.collection.immutable.List.:: scala.collection.immutable.Nil.::[org.apache.spark.sql.execution.ProjectExec](x$5)
307 1201 13801 - 13847 Block <nosymbol> { <synthetic> <artifact> val x$5: org.apache.spark.sql.execution.ProjectExec = org.apache.spark.sql.execution.ProjectExec.apply(projectList, SpatialJoinStrategy.this.planLater(lr)); scala.collection.immutable.Nil.::[org.apache.spark.sql.execution.ProjectExec](x$5) }
309 1202 13885 - 13930 TypeApply scala.Any.isInstanceOf lr.relation.isInstanceOf[org.locationtech.geomesa.spark.sql.GeoMesaJoinRelation]
309 1203 13934 - 13954 Apply scala.collection.immutable.List.:: scala.collection.immutable.Nil.::[org.apache.spark.sql.execution.SparkPlan](x$6)
309 1204 13934 - 13954 Block <nosymbol> { <synthetic> <artifact> val x$6: org.apache.spark.sql.execution.SparkPlan = SpatialJoinStrategy.this.planLater(lr); scala.collection.immutable.Nil.::[org.apache.spark.sql.execution.SparkPlan](x$6) }
311 1205 13974 - 13977 Select scala.collection.immutable.Nil scala.collection.immutable.Nil
311 1206 13974 - 13977 Block scala.collection.immutable.Nil scala.collection.immutable.Nil
317 1207 14152 - 14174 Apply org.locationtech.geomesa.spark.sql.SQLRules.SpatialJoinStrategy.alterJoin SpatialJoinStrategy.this.alterJoin(logicalPlan)
317 1208 14152 - 14174 Block org.locationtech.geomesa.spark.sql.SQLRules.SpatialJoinStrategy.alterJoin SpatialJoinStrategy.this.alterJoin(logicalPlan)
318 1209 14200 - 14215 Apply org.locationtech.geomesa.spark.sql.SQLRules.SpatialJoinStrategy.alterJoin SpatialJoinStrategy.this.alterJoin(join)
318 1210 14200 - 14215 Block org.locationtech.geomesa.spark.sql.SQLRules.SpatialJoinStrategy.alterJoin SpatialJoinStrategy.this.alterJoin(join)
319 1211 14232 - 14235 Select scala.collection.immutable.Nil scala.collection.immutable.Nil
319 1212 14232 - 14235 Block scala.collection.immutable.Nil scala.collection.immutable.Nil
325 1213 14318 - 14342 Select org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule SQLRules.this.SpatialOptimizationsRule
325 1222 14314 - 14492 Apply scala.collection.IterableLike.foreach scala.collection.Seq.apply[org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.type](SQLRules.this.SpatialOptimizationsRule).foreach[Unit](((r: org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.type) => if (sqlContext.experimental.extraOptimizations.contains[org.apache.spark.sql.catalyst.rules.Rule[org.apache.spark.sql.catalyst.plans.logical.LogicalPlan]](r).unary_!) { <synthetic> val ev$1: org.apache.spark.sql.ExperimentalMethods = sqlContext.experimental; ev$1.extraOptimizations_=(ev$1.extraOptimizations.++[org.apache.spark.sql.catalyst.rules.Rule[org.apache.spark.sql.catalyst.plans.logical.LogicalPlan], Seq[org.apache.spark.sql.catalyst.rules.Rule[org.apache.spark.sql.catalyst.plans.logical.LogicalPlan]]](scala.collection.Seq.apply[org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.type](r))(collection.this.Seq.canBuildFrom[org.apache.spark.sql.catalyst.rules.Rule[org.apache.spark.sql.catalyst.plans.logical.LogicalPlan]])) } else ()))
326 1214 14368 - 14423 Select scala.Boolean.unary_! sqlContext.experimental.extraOptimizations.contains[org.apache.spark.sql.catalyst.rules.Rule[org.apache.spark.sql.catalyst.plans.logical.LogicalPlan]](r).unary_!
326 1220 14365 - 14365 Literal <nosymbol> ()
326 1221 14365 - 14365 Block <nosymbol> ()
327 1215 14480 - 14486 Apply scala.collection.generic.GenericCompanion.apply scala.collection.Seq.apply[org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.type](r)
327 1216 14476 - 14476 TypeApply scala.collection.Seq.canBuildFrom collection.this.Seq.canBuildFrom[org.apache.spark.sql.catalyst.rules.Rule[org.apache.spark.sql.catalyst.plans.logical.LogicalPlan]]
327 1217 14433 - 14486 ApplyToImplicitArgs scala.collection.TraversableLike.++ ev$1.extraOptimizations.++[org.apache.spark.sql.catalyst.rules.Rule[org.apache.spark.sql.catalyst.plans.logical.LogicalPlan], Seq[org.apache.spark.sql.catalyst.rules.Rule[org.apache.spark.sql.catalyst.plans.logical.LogicalPlan]]](scala.collection.Seq.apply[org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.type](r))(collection.this.Seq.canBuildFrom[org.apache.spark.sql.catalyst.rules.Rule[org.apache.spark.sql.catalyst.plans.logical.LogicalPlan]])
327 1218 14433 - 14486 Apply org.apache.spark.sql.ExperimentalMethods.extraOptimizations_= ev$1.extraOptimizations_=(ev$1.extraOptimizations.++[org.apache.spark.sql.catalyst.rules.Rule[org.apache.spark.sql.catalyst.plans.logical.LogicalPlan], Seq[org.apache.spark.sql.catalyst.rules.Rule[org.apache.spark.sql.catalyst.plans.logical.LogicalPlan]]](scala.collection.Seq.apply[org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.type](r))(collection.this.Seq.canBuildFrom[org.apache.spark.sql.catalyst.rules.Rule[org.apache.spark.sql.catalyst.plans.logical.LogicalPlan]]))
327 1219 14433 - 14486 Block <nosymbol> { <synthetic> val ev$1: org.apache.spark.sql.ExperimentalMethods = sqlContext.experimental; ev$1.extraOptimizations_=(ev$1.extraOptimizations.++[org.apache.spark.sql.catalyst.rules.Rule[org.apache.spark.sql.catalyst.plans.logical.LogicalPlan], Seq[org.apache.spark.sql.catalyst.rules.Rule[org.apache.spark.sql.catalyst.plans.logical.LogicalPlan]]](scala.collection.Seq.apply[org.locationtech.geomesa.spark.sql.SQLRules.SpatialOptimizationsRule.type](r))(collection.this.Seq.canBuildFrom[org.apache.spark.sql.catalyst.rules.Rule[org.apache.spark.sql.catalyst.plans.logical.LogicalPlan]])) }
330 1223 14502 - 14521 Select org.locationtech.geomesa.spark.sql.SQLRules.SpatialJoinStrategy SQLRules.this.SpatialJoinStrategy
330 1232 14498 - 14665 Apply scala.collection.IterableLike.foreach scala.collection.Seq.apply[org.locationtech.geomesa.spark.sql.SQLRules.SpatialJoinStrategy.type](SQLRules.this.SpatialJoinStrategy).foreach[Unit](((s: org.locationtech.geomesa.spark.sql.SQLRules.SpatialJoinStrategy.type) => if (sqlContext.experimental.extraStrategies.contains[org.apache.spark.sql.Strategy](s).unary_!) { <synthetic> val ev$2: org.apache.spark.sql.ExperimentalMethods = sqlContext.experimental; ev$2.extraStrategies_=(ev$2.extraStrategies.++[org.apache.spark.sql.Strategy, Seq[org.apache.spark.sql.Strategy]](scala.collection.Seq.apply[org.locationtech.geomesa.spark.sql.SQLRules.SpatialJoinStrategy.type](s))(collection.this.Seq.canBuildFrom[org.apache.spark.sql.Strategy])) } else ()))
331 1224 14547 - 14599 Select scala.Boolean.unary_! sqlContext.experimental.extraStrategies.contains[org.apache.spark.sql.Strategy](s).unary_!
331 1230 14544 - 14544 Literal <nosymbol> ()
331 1231 14544 - 14544 Block <nosymbol> ()
332 1225 14653 - 14659 Apply scala.collection.generic.GenericCompanion.apply scala.collection.Seq.apply[org.locationtech.geomesa.spark.sql.SQLRules.SpatialJoinStrategy.type](s)
332 1226 14649 - 14649 TypeApply scala.collection.Seq.canBuildFrom collection.this.Seq.canBuildFrom[org.apache.spark.sql.Strategy]
332 1227 14609 - 14659 ApplyToImplicitArgs scala.collection.TraversableLike.++ ev$2.extraStrategies.++[org.apache.spark.sql.Strategy, Seq[org.apache.spark.sql.Strategy]](scala.collection.Seq.apply[org.locationtech.geomesa.spark.sql.SQLRules.SpatialJoinStrategy.type](s))(collection.this.Seq.canBuildFrom[org.apache.spark.sql.Strategy])
332 1228 14609 - 14659 Apply org.apache.spark.sql.ExperimentalMethods.extraStrategies_= ev$2.extraStrategies_=(ev$2.extraStrategies.++[org.apache.spark.sql.Strategy, Seq[org.apache.spark.sql.Strategy]](scala.collection.Seq.apply[org.locationtech.geomesa.spark.sql.SQLRules.SpatialJoinStrategy.type](s))(collection.this.Seq.canBuildFrom[org.apache.spark.sql.Strategy]))
332 1229 14609 - 14659 Block <nosymbol> { <synthetic> val ev$2: org.apache.spark.sql.ExperimentalMethods = sqlContext.experimental; ev$2.extraStrategies_=(ev$2.extraStrategies.++[org.apache.spark.sql.Strategy, Seq[org.apache.spark.sql.Strategy]](scala.collection.Seq.apply[org.locationtech.geomesa.spark.sql.SQLRules.SpatialJoinStrategy.type](s))(collection.this.Seq.canBuildFrom[org.apache.spark.sql.Strategy])) }