Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 0 additions & 19 deletions python/pyspark/sql/connect/expressions.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
import decimal
import datetime
import warnings
from threading import Lock

import numpy as np

Expand Down Expand Up @@ -686,10 +685,6 @@ def __repr__(self) -> str:


class UnresolvedNamedLambdaVariable(Expression):

_lock: Lock = Lock()
_nextVarNameId: int = 0

def __init__(
self,
name_parts: Sequence[str],
Expand All @@ -712,20 +707,6 @@ def to_plan(self, session: "SparkConnectClient") -> proto.Expression:
def __repr__(self) -> str:
return f"(UnresolvedNamedLambdaVariable({', '.join(self._name_parts)})"

@staticmethod
def fresh_var_name(name: str) -> str:
assert isinstance(name, str) and str != ""

_id: Optional[int] = None

with UnresolvedNamedLambdaVariable._lock:
_id = UnresolvedNamedLambdaVariable._nextVarNameId
UnresolvedNamedLambdaVariable._nextVarNameId += 1

assert _id is not None

return f"{name}_{_id}"


class LambdaFunction(Expression):
def __init__(
Expand Down
5 changes: 1 addition & 4 deletions python/pyspark/sql/connect/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,10 +160,7 @@ def _create_lambda(f: Callable) -> LambdaFunction:
parameters = _get_lambda_parameters(f)

arg_names = ["x", "y", "z"][: len(parameters)]
arg_exprs = [
UnresolvedNamedLambdaVariable([UnresolvedNamedLambdaVariable.fresh_var_name(arg_name)])
for arg_name in arg_names
]
arg_exprs = [UnresolvedNamedLambdaVariable([arg_name]) for arg_name in arg_names]
arg_cols = [Column(arg_expr) for arg_expr in arg_exprs]

result = f(*arg_cols)
Expand Down