Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions docs/sql-ref-ansi-compliance.md
Original file line number Diff line number Diff line change
Expand Up @@ -429,6 +429,7 @@ Below is a list of all the keywords in Spark SQL.
|ATOMIC|non-reserved|non-reserved|non-reserved|
|AUTHORIZATION|reserved|non-reserved|reserved|
|BEGIN|non-reserved|non-reserved|non-reserved|
|BERNOULLI|non-reserved|non-reserved|non-reserved|
|BETWEEN|non-reserved|non-reserved|reserved|
|BIGINT|non-reserved|non-reserved|reserved|
|BINARY|non-reserved|non-reserved|reserved|
Expand Down Expand Up @@ -753,6 +754,7 @@ Below is a list of all the keywords in Spark SQL.
|SUBSTR|non-reserved|non-reserved|non-reserved|
|SUBSTRING|non-reserved|non-reserved|non-reserved|
|SYNC|non-reserved|non-reserved|non-reserved|
|SYSTEM|non-reserved|non-reserved|reserved|
|SYSTEM_TIME|non-reserved|non-reserved|non-reserved|
|SYSTEM_VERSION|non-reserved|non-reserved|non-reserved|
|TABLE|reserved|non-reserved|reserved|
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,7 @@ AT: 'AT';
ATOMIC: 'ATOMIC';
AUTHORIZATION: 'AUTHORIZATION';
BEGIN: 'BEGIN';
BERNOULLI: 'BERNOULLI';
BETWEEN: 'BETWEEN';
BIGINT: 'BIGINT';
BINARY: 'BINARY';
Expand Down Expand Up @@ -471,6 +472,7 @@ STRUCT: 'STRUCT' {incComplexTypeLevelCounter();};
SUBSTR: 'SUBSTR';
SUBSTRING: 'SUBSTRING';
SYNC: 'SYNC';
SYSTEM: 'SYSTEM';
SYSTEM_TIME: 'SYSTEM_TIME';
SYSTEM_VERSION: 'SYSTEM_VERSION';
TABLE: 'TABLE';
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1049,7 +1049,9 @@ joinCriteria
;

sample
: TABLESAMPLE LEFT_PAREN sampleMethod? RIGHT_PAREN (REPEATABLE LEFT_PAREN seed=integerValue RIGHT_PAREN)?
: TABLESAMPLE (sampleType=(SYSTEM | BERNOULLI))?
LEFT_PAREN sampleMethod? RIGHT_PAREN
(REPEATABLE LEFT_PAREN seed=integerValue RIGHT_PAREN)?
;

sampleMethod
Expand Down Expand Up @@ -1917,6 +1919,7 @@ ansiNonReserved
| AT
| ATOMIC
| BEGIN
| BERNOULLI
| BETWEEN
| BIGINT
| BINARY
Expand Down Expand Up @@ -2183,6 +2186,7 @@ ansiNonReserved
| SUBSTR
| SUBSTRING
| SYNC
| SYSTEM
| SYSTEM_TIME
| SYSTEM_VERSION
| TABLES
Expand Down Expand Up @@ -2287,6 +2291,7 @@ nonReserved
| ATOMIC
| AUTHORIZATION
| BEGIN
| BERNOULLI
| BETWEEN
| BIGINT
| BINARY
Expand Down Expand Up @@ -2599,6 +2604,7 @@ nonReserved
| SUBSTR
| SUBSTRING
| SYNC
| SYSTEM
| SYSTEM_TIME
| SYSTEM_VERSION
| TABLE
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,4 +36,24 @@ boolean pushTableSample(
double upperBound,
boolean withReplacement,
long seed);

/**
* Pushes down SAMPLE to the data source with sample method awareness.
* Data sources can override this to distinguish SYSTEM (block) from BERNOULLI (row) sampling.
* By default, rejects SYSTEM sampling for backward compatibility and delegates BERNOULLI to
* the 4-parameter version.
*/
default boolean pushTableSample(
double lowerBound,
double upperBound,
boolean withReplacement,
long seed,
boolean isSystemSampling) {
if (isSystemSampling) {
// If the data source hasn't overridden this method, it must have not added support
// for SYSTEM sampling. Don't apply sample pushdown.
return false;
}
return pushTableSample(lowerBound, upperBound, withReplacement, seed);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -530,7 +530,7 @@ object UnsupportedOperationChecker extends Logging {
throwError("Sorting is not supported on streaming DataFrames/Datasets, unless it is on " +
"aggregated DataFrame/Dataset in Complete output mode")

case Sample(_, _, _, _, child) if child.isStreaming =>
case Sample(_, _, _, _, child, _) if child.isStreaming =>
throwError("Sampling is not supported on streaming DataFrames/Datasets")

case Window(windowExpression, _, _, child, _) if child.isStreaming =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1288,7 +1288,7 @@ object CollapseProject extends Rule[LogicalPlan] with AliasHelper {
limit.copy(child = p2.copy(projectList = newProjectList))
case Project(l1, r @ Repartition(_, _, p @ Project(l2, _))) if isRenaming(l1, l2) =>
r.copy(child = p.copy(projectList = buildCleanedProjectList(l1, p.projectList)))
case Project(l1, s @ Sample(_, _, _, _, p2 @ Project(l2, _))) if isRenaming(l1, l2) =>
case Project(l1, s @ Sample(_, _, _, _, p2 @ Project(l2, _), _)) if isRenaming(l1, l2) =>
s.copy(child = p2.copy(projectList = buildCleanedProjectList(l1, p2.projectList)))
case o => o
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2250,10 +2250,14 @@ class AstBuilder extends DataTypeAstBuilder
* - TABLESAMPLE(x ROWS): Sample the table down to the given number of rows.
* - TABLESAMPLE(x PERCENT) [REPEATABLE (y)]: Sample the table down to the given percentage with
* seed 'y'. Note that percentages are defined as a number between 0 and 100.
* - TABLESAMPLE SYSTEM(x PERCENT): Sample by data source dependent blocks or file splits.
* - TABLESAMPLE(BUCKET x OUT OF y) [REPEATABLE (z)]: Sample the table down to a 'x' divided by
* 'y' fraction with seed 'z'.
*/
private def withSample(ctx: SampleContext, query: LogicalPlan): LogicalPlan = withOrigin(ctx) {
val isSystem = ctx.sampleType != null &&
ctx.sampleType.getType == SqlBaseParser.SYSTEM

// Create a sampled plan if we need one.
def sample(fraction: Double, seed: Long): Sample = {
// The range of fraction accepted by Sample is [0, 1]. Because Hive's block sampling
Expand All @@ -2263,13 +2267,18 @@ class AstBuilder extends DataTypeAstBuilder
validate(fraction >= 0.0 - eps && fraction <= 1.0 + eps,
s"Sampling fraction ($fraction) must be on interval [0, 1]",
ctx)
Sample(0.0, fraction, withReplacement = false, seed, query)
val method = if (isSystem) SampleMethod.System else SampleMethod.Bernoulli
Sample(0.0, fraction, withReplacement = false, seed, query, method)
}

if (ctx.sampleMethod() == null) {
throw QueryParsingErrors.emptyInputForTableSampleError(ctx)
}

if (isSystem && ctx.seed != null) {
operationNotAllowed("TABLESAMPLE SYSTEM does not support REPEATABLE", ctx)
}

val seed = if (ctx.seed != null) {
ctx.seed.getText.toLong
} else {
Expand All @@ -2278,6 +2287,9 @@ class AstBuilder extends DataTypeAstBuilder

ctx.sampleMethod() match {
case ctx: SampleByRowsContext =>
if (isSystem) {
operationNotAllowed("TABLESAMPLE SYSTEM only supports PERCENT sampling", ctx)
}
Limit(expression(ctx.expression), query)

case ctx: SampleByPercentileContext =>
Expand All @@ -2289,6 +2301,9 @@ class AstBuilder extends DataTypeAstBuilder
sample(sign * fraction / 100.0d, seed)

case ctx: SampleByBytesContext =>
if (isSystem) {
operationNotAllowed("TABLESAMPLE SYSTEM only supports PERCENT sampling", ctx)
}
val bytesStr = ctx.bytes.getText
if (bytesStr.matches("[0-9]+[bBkKmMgG]")) {
throw QueryParsingErrors.tableSampleByBytesUnsupportedError("byteLengthLiteral", ctx)
Expand All @@ -2297,6 +2312,9 @@ class AstBuilder extends DataTypeAstBuilder
}

case ctx: SampleByBucketContext if ctx.ON() != null =>
if (isSystem) {
operationNotAllowed("TABLESAMPLE SYSTEM only supports PERCENT sampling", ctx)
}
if (ctx.identifier != null) {
throw QueryParsingErrors.tableSampleByBytesUnsupportedError(
"BUCKET x OUT OF y ON colname", ctx)
Expand All @@ -2306,6 +2324,9 @@ class AstBuilder extends DataTypeAstBuilder
}

case ctx: SampleByBucketContext =>
if (isSystem) {
operationNotAllowed("TABLESAMPLE SYSTEM only supports PERCENT sampling", ctx)
}
sample(ctx.numerator.getText.toDouble / ctx.denominator.getText.toDouble, seed)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1912,6 +1912,14 @@ object SubqueryAlias {
}
}

sealed trait SampleMethod extends Serializable
object SampleMethod {
/** Row-level sampling (BERNOULLI). Each row independently selected. No I/O savings. */
case object Bernoulli extends SampleMethod
/** System-level sampling (SYSTEM). Entire partitions/splits included or skipped. */
case object System extends SampleMethod
}

/**
* Sample the dataset.
*
Expand All @@ -1921,13 +1929,15 @@ object SubqueryAlias {
* @param withReplacement Whether to sample with replacement.
* @param seed the random seed
* @param child the LogicalPlan
* @param sampleMethod the sampling method (Bernoulli or System)
*/
case class Sample(
lowerBound: Double,
upperBound: Double,
withReplacement: Boolean,
seed: Long,
child: LogicalPlan) extends UnaryNode {
child: LogicalPlan,
sampleMethod: SampleMethod = SampleMethod.Bernoulli) extends UnaryNode {

val eps = RandomSampler.roundingEpsilon
val fraction = upperBound - lowerBound
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -870,6 +870,160 @@ class PlanParserSuite extends AnalysisTest {
stop = 65))
}

test("SPARK-55978: TABLESAMPLE SYSTEM and BERNOULLI - basic parsing") {
val sql = "select * from t"
// SYSTEM produces SampleMethod.System
assertEqual(
s"$sql tablesample system (43 percent) as x",
Sample(0, .43d, withReplacement = false, 10L,
table("t").as("x"), SampleMethod.System).select(star()))
// BERNOULLI produces SampleMethod.Bernoulli
assertEqual(
s"$sql tablesample bernoulli (43 percent) as x",
Sample(0, .43d, withReplacement = false, 10L,
table("t").as("x"), SampleMethod.Bernoulli).select(star()))
// No qualifier defaults to Bernoulli (backward compat)
assertEqual(
s"$sql tablesample(43 percent) as x",
Sample(0, .43d, withReplacement = false, 10L,
table("t").as("x")).select(star()))
}

test("SPARK-55978: TABLESAMPLE SYSTEM - case insensitivity") {
val sql = "select * from t"
// Keywords are case-insensitive
assertEqual(
s"$sql TABLESAMPLE SYSTEM (43 PERCENT) as x",
Sample(0, .43d, withReplacement = false, 10L,
table("t").as("x"), SampleMethod.System).select(star()))
assertEqual(
s"$sql TabLeSaMpLe SyStEm (43 PeRcEnT) as x",
Sample(0, .43d, withReplacement = false, 10L,
table("t").as("x"), SampleMethod.System).select(star()))
assertEqual(
s"$sql TABLESAMPLE BERNOULLI (43 PERCENT) as x",
Sample(0, .43d, withReplacement = false, 10L,
table("t").as("x"), SampleMethod.Bernoulli).select(star()))
}

test("SPARK-55978: TABLESAMPLE SYSTEM - boundary fractions") {
val sql = "select * from t"
// 0 PERCENT
assertEqual(
s"$sql tablesample system (0 percent) as x",
Sample(0, 0d, withReplacement = false, 10L,
table("t").as("x"), SampleMethod.System).select(star()))
// 100 PERCENT
assertEqual(
s"$sql tablesample system (100 percent) as x",
Sample(0, 1d, withReplacement = false, 10L,
table("t").as("x"), SampleMethod.System).select(star()))
// Fractional percent
assertEqual(
s"$sql tablesample system (0.1 percent) as x",
Sample(0, 0.001d, withReplacement = false, 10L,
table("t").as("x"), SampleMethod.System).select(star()))
}

test("SPARK-55978: TABLESAMPLE SYSTEM - unsupported sample methods") {
val sql = "select * from t"
// SYSTEM + ROWS -> error
checkError(
exception = parseException(s"$sql tablesample system (100 rows)"),
condition = "_LEGACY_ERROR_TEMP_0035",
parameters = Map(
"message" -> "TABLESAMPLE SYSTEM only supports PERCENT sampling"),
context = ExpectedContext(
fragment = "tablesample system (100 rows)",
start = 16,
stop = 44))
// SYSTEM + BYTES -> error
checkError(
exception = parseException(s"$sql tablesample system (300M)"),
condition = "_LEGACY_ERROR_TEMP_0035",
parameters = Map(
"message" -> "TABLESAMPLE SYSTEM only supports PERCENT sampling"),
context = ExpectedContext(
fragment = "tablesample system (300M)",
start = 16,
stop = 40))
// SYSTEM + BUCKET -> error
checkError(
exception = parseException(s"$sql tablesample system (bucket 4 out of 10)"),
condition = "_LEGACY_ERROR_TEMP_0035",
parameters = Map(
"message" -> "TABLESAMPLE SYSTEM only supports PERCENT sampling"),
context = ExpectedContext(
fragment = "tablesample system (bucket 4 out of 10)",
start = 16,
stop = 54))
}

test("SPARK-55978: TABLESAMPLE BERNOULLI - REPEATABLE is supported") {
assertEqual(
"select * from t tablesample bernoulli (43 percent) repeatable (123)",
Sample(0, .43d, withReplacement = false, 123L,
table("t"), SampleMethod.Bernoulli).select(star()))
}

test("SPARK-55978: TABLESAMPLE SYSTEM - REPEATABLE not supported") {
val sql = "select * from t"
checkError(
exception = parseException(s"$sql tablesample system (43 percent) repeatable (123)"),
condition = "_LEGACY_ERROR_TEMP_0035",
parameters = Map(
"message" -> "TABLESAMPLE SYSTEM does not support REPEATABLE"),
context = ExpectedContext(
fragment = "tablesample system (43 percent) repeatable (123)",
start = 16,
stop = 63))
}

test("SPARK-55978: TABLESAMPLE SYSTEM - fraction out of range") {
val sql = "select * from t"
// > 100 PERCENT
checkError(
exception = parseException(s"$sql tablesample system (150 percent) as x"),
condition = "_LEGACY_ERROR_TEMP_0064",
parameters = Map("msg" -> "Sampling fraction (1.5) must be on interval [0, 1]"),
context = ExpectedContext(
fragment = "tablesample system (150 percent)",
start = 16,
stop = 47))
// Negative PERCENT
checkError(
exception = parseException(s"$sql tablesample system (-10 percent) as x"),
condition = "_LEGACY_ERROR_TEMP_0064",
parameters = Map("msg" -> "Sampling fraction (-0.1) must be on interval [0, 1]"),
context = ExpectedContext(
fragment = "tablesample system (-10 percent)",
start = 16,
stop = 47))
}

test("SPARK-55978: TABLESAMPLE SYSTEM and BERNOULLI as identifiers") {
// SYSTEM usable as column name (nonReserved)
assertEqual("SELECT system FROM t",
table("t").select($"system"))
// BERNOULLI usable as column name
assertEqual("SELECT bernoulli FROM t",
table("t").select($"bernoulli"))
// Usable as table alias
assertEqual("SELECT * FROM t system",
table("t").as("system").select(star()))
assertEqual("SELECT * FROM t bernoulli",
table("t").as("bernoulli").select(star()))
}

test("SPARK-55978: TABLESAMPLE SYSTEM - subquery and join contexts") {
// SYSTEM sample in subquery
assertEqual(
"SELECT * FROM (SELECT * FROM t TABLESAMPLE SYSTEM (50 PERCENT)) sub",
Sample(0, .5d, withReplacement = false, 10L,
table("t"), SampleMethod.System)
.select(star()).as("sub").select(star()))
}

test("sub-query") {
val plan = table("t0").select($"id")
assertEqual("select id from (t0)", plan)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ class SparkConnectDatabaseMetaDataSuite extends ConnectFunSuite with RemoteSpark
withConnection { conn =>
val metadata = conn.getMetaData
// scalastyle:off line.size.limit
assert(metadata.getSQLKeywords === "ADD,AFTER,AGGREGATE,ALWAYS,ANALYZE,ANTI,ANY_VALUE,ARCHIVE,ASC,BINDING,BUCKET,BUCKETS,BYTE,CACHE,CASCADE,CATALOG,CATALOGS,CHANGE,CHANGES,CLEAR,CLUSTER,CLUSTERED,CODEGEN,COLLATION,COLLECTION,COLUMNS,COMMENT,COMPACT,COMPACTIONS,COMPENSATION,COMPUTE,CONCATENATE,CONTAINS,CONTINUE,COST,DATA,DATABASE,DATABASES,DATEADD,DATEDIFF,DATE_ADD,DATE_DIFF,DAYOFYEAR,DAYS,DBPROPERTIES,DEFINED,DEFINER,DELAY,DELIMITED,DESC,DFS,DIRECTORIES,DIRECTORY,DISTRIBUTE,DIV,DO,ELSEIF,ENFORCED,ESCAPED,EVOLUTION,EXCHANGE,EXCLUDE,EXCLUSIVE,EXIT,EXPLAIN,EXPORT,EXTEND,EXTENDED,FIELDS,FILEFORMAT,FIRST,FLOW,FOLLOWING,FORMAT,FORMATTED,FOUND,FUNCTIONS,GENERATED,GEOGRAPHY,GEOMETRY,HANDLER,HOURS,IDENTIFIED,IDENTIFIER,IF,IGNORE,ILIKE,IMMEDIATE,INCLUDE,INCLUSIVE,INCREMENT,INDEX,INDEXES,INPATH,INPUT,INPUTFORMAT,INVOKER,ITEMS,ITERATE,JSON,KEY,KEYS,LAST,LAZY,LEAVE,LEVEL,LIMIT,LINES,LIST,LOAD,LOCATION,LOCK,LOCKS,LOGICAL,LONG,LOOP,MACRO,MAP,MATCHED,MATERIALIZED,MEASURE,METRICS,MICROSECOND,MICROSECONDS,MILLISECOND,MILLISECONDS,MINUS,MINUTES,MONTHS,MSCK,NAME,NAMESPACE,NAMESPACES,NANOSECOND,NANOSECONDS,NORELY,NULLS,OFFSET,OPTION,OPTIONS,OUTPUTFORMAT,OVERWRITE,PARTITIONED,PARTITIONS,PERCENT,PIVOT,PLACING,PRECEDING,PRINCIPALS,PROCEDURES,PROPERTIES,PURGE,QUARTER,QUERY,RECORDREADER,RECORDWRITER,RECOVER,RECURSION,REDUCE,REFRESH,RELY,RENAME,REPAIR,REPEAT,REPEATABLE,REPLACE,RESET,RESPECT,RESTRICT,ROLE,ROLES,SCHEMA,SCHEMAS,SECONDS,SECURITY,SEMI,SEPARATED,SERDE,SERDEPROPERTIES,SETS,SHORT,SHOW,SINGLE,SKEWED,SORT,SORTED,SOURCE,STATISTICS,STORED,STRATIFY,STREAM,STREAMING,STRING,STRUCT,SUBSTR,SYNC,SYSTEM_TIME,SYSTEM_VERSION,TABLES,TARGET,TBLPROPERTIES,TERMINATED,TIMEDIFF,TIMESTAMPADD,TIMESTAMPDIFF,TIMESTAMP_LTZ,TIMESTAMP_NTZ,TINYINT,TOUCH,TRANSACTION,TRANSACTIONS,TRANSFORM,TRUNCATE,TRY_CAST,TYPE,UNARCHIVE,UNBOUNDED,UNCACHE,UNLOCK,UNPIVOT,UNSET,UNTIL,USE,VAR,VARIABLE,VARIANT,VERSION,VIEW,VIEWS,VOID,WATERMARK,WEEK,WEEKS,WHILE,X,YEARS,ZONE")
assert(metadata.getSQLKeywords === "ADD,AFTER,AGGREGATE,ALWAYS,ANALYZE,ANTI,ANY_VALUE,ARCHIVE,ASC,BERNOULLI,BINDING,BUCKET,BUCKETS,BYTE,CACHE,CASCADE,CATALOG,CATALOGS,CHANGE,CHANGES,CLEAR,CLUSTER,CLUSTERED,CODEGEN,COLLATION,COLLECTION,COLUMNS,COMMENT,COMPACT,COMPACTIONS,COMPENSATION,COMPUTE,CONCATENATE,CONTAINS,CONTINUE,COST,DATA,DATABASE,DATABASES,DATEADD,DATEDIFF,DATE_ADD,DATE_DIFF,DAYOFYEAR,DAYS,DBPROPERTIES,DEFINED,DEFINER,DELAY,DELIMITED,DESC,DFS,DIRECTORIES,DIRECTORY,DISTRIBUTE,DIV,DO,ELSEIF,ENFORCED,ESCAPED,EVOLUTION,EXCHANGE,EXCLUDE,EXCLUSIVE,EXIT,EXPLAIN,EXPORT,EXTEND,EXTENDED,FIELDS,FILEFORMAT,FIRST,FLOW,FOLLOWING,FORMAT,FORMATTED,FOUND,FUNCTIONS,GENERATED,GEOGRAPHY,GEOMETRY,HANDLER,HOURS,IDENTIFIED,IDENTIFIER,IF,IGNORE,ILIKE,IMMEDIATE,INCLUDE,INCLUSIVE,INCREMENT,INDEX,INDEXES,INPATH,INPUT,INPUTFORMAT,INVOKER,ITEMS,ITERATE,JSON,KEY,KEYS,LAST,LAZY,LEAVE,LEVEL,LIMIT,LINES,LIST,LOAD,LOCATION,LOCK,LOCKS,LOGICAL,LONG,LOOP,MACRO,MAP,MATCHED,MATERIALIZED,MEASURE,METRICS,MICROSECOND,MICROSECONDS,MILLISECOND,MILLISECONDS,MINUS,MINUTES,MONTHS,MSCK,NAME,NAMESPACE,NAMESPACES,NANOSECOND,NANOSECONDS,NORELY,NULLS,OFFSET,OPTION,OPTIONS,OUTPUTFORMAT,OVERWRITE,PARTITIONED,PARTITIONS,PERCENT,PIVOT,PLACING,PRECEDING,PRINCIPALS,PROCEDURES,PROPERTIES,PURGE,QUARTER,QUERY,RECORDREADER,RECORDWRITER,RECOVER,RECURSION,REDUCE,REFRESH,RELY,RENAME,REPAIR,REPEAT,REPEATABLE,REPLACE,RESET,RESPECT,RESTRICT,ROLE,ROLES,SCHEMA,SCHEMAS,SECONDS,SECURITY,SEMI,SEPARATED,SERDE,SERDEPROPERTIES,SETS,SHORT,SHOW,SINGLE,SKEWED,SORT,SORTED,SOURCE,STATISTICS,STORED,STRATIFY,STREAM,STREAMING,STRING,STRUCT,SUBSTR,SYNC,SYSTEM_TIME,SYSTEM_VERSION,TABLES,TARGET,TBLPROPERTIES,TERMINATED,TIMEDIFF,TIMESTAMPADD,TIMESTAMPDIFF,TIMESTAMP_LTZ,TIMESTAMP_NTZ,TINYINT,TOUCH,TRANSACTION,TRANSACTIONS,TRANSFORM,TRUNCATE,TRY_CAST,TYPE,UNARCHIVE,UNBOUNDED,UNCACHE,UNLOCK,UNPIVOT,UNSET,UNTIL,USE,VAR,VARIABLE,VARIANT,VERSION,VIEW,VIEWS,VOID,WATERMARK,WEEK,WEEKS,WHILE,X,YEARS,ZONE")
// scalastyle:on line.size.limit
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
Sample 0.0, 0.43, false, 9890823
Sample 0.0, 0.43, false, 9890823, Bernoulli
+- LocalRelation <empty>, [id#0L, a#0, b#0]
Loading