PySpark local unittest - py4j.protocol.Py4JJavaError: An error occurred while calling o55.collectToPython - unit-testing

I am doing some tests how to start with Spark unittesting. I have the following scenario.
Folder structure as follows:
src/
transformers.py
tests/
unit/
test_transformers.py
The code in the transofomers.py file looks like this (very basic):
from pyspark.sql import DataFrame
import pyspark.sql.functions as F
def convert_sales_cent_to_euro(df: DataFrame) -> DataFrame:
df = df.withColumn('sales_in_euro', F.round(F.col('sales')/100., 2))
return df
def groupBy_sum(df:DataFrame) -> DataFrame:
df = df.groupBy('store').agg({'sales_in_euro':'sum'})
return df
I only wrote a unit test for the convert_sales_cent_to_euro function yet. The test_transfomers.py looks like this:
from pyspark.sql import SparkSession
import pyspark.sql.types as T
import src.transformers as tr
def get_spark():
spark = (SparkSession.builder.master("local[1]") \
.appName('unittest') \
.getOrCreate())
return spark
def test_convert_sales_to_euro():
spark = get_spark()
input_schema = T.StructType([
T.StructField('store', T.StringType(), True),
T.StructField('sales', T.FloatType(), True),
])
input_data = [('Madrid', 10301.75),
('London', 0.),
('Paris', 103013.)]
input_df = spark.createDataFrame(data=input_data, schema=input_schema)
expected_schema = T.StructType([
T.StructField('store', T.StringType(), True),
T.StructField('sales', T.FloatType(), True),
T.StructField('sales_in_euro', T.FloatType(), True),
])
expected_data = [('Madrid', 10301.75, 103.02),
('London', 0., 0.),
('Paris', 103013., 1030.13)]
expected_df = spark.createDataFrame(data=expected_data, schema=expected_schema)
transformed_df = tr.convert_sales_cent_to_euro(input_df)
assert set(expected_df.columns) == set(transformed_df.columns)
assert sorted(expected_df.collect()) == sorted(transformed_df.collect())
When I execute python -m pytest tests/unit/, I get the following error:
FAILED tests/unit/test_transformers.py::test_convert_sales_to_euro - py4j.protocol.Py4JJavaError: An error occurred while calling o53.collectToPython.
I also get this warning (but not sure if it is related to it):
RuntimeWarning: numpy.ufunc size changed, may indicate binary incompatibility. Expected 112 from C header, got 124 from PyObject
return f(*args, **kwds)
Anyone who knows what the problem is? I executes the script in Databricks, and there I didn't get a problem. So I assume it is related to my local spark session.
Edit: Full error below
================================================== FAILURES ==================================================
_________________________________________ test_convert_sales_to_euro _________________________________________
def test_convert_sales_to_euro():
spark = get_spark()
input_schema = T.StructType([
T.StructField('store', T.StringType(), True),
T.StructField('sales', T.FloatType(), True),
])
input_data = [('Madrid', 10301.75),
('London', 0.),
('Paris', 103013.)]
input_df = spark.createDataFrame(data=input_data, schema=input_schema)
expected_schema = T.StructType([
T.StructField('store', T.StringType(), True),
T.StructField('sales', T.FloatType(), True),
T.StructField('sales_in_euro', T.FloatType(), True),
])
expected_data = [('Madrid', 10301.75, 103.02),
('London', 0., 0.),
('Paris', 103013., 1030.13)]
expected_df = spark.createDataFrame(data=expected_data, schema=expected_schema)
transformed_df = tr.convert_sales_cent_to_euro(input_df)
assert set(expected_df.columns) == set(transformed_df.columns)
> assert sorted(expected_df.collect()) == sorted(transformed_df.collect())
tests\unit\test_transformers.py:35:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
..\..\..\..\AppData\Roaming\Python\Python37\site-packages\pyspark\sql\dataframe.py:817: in collect
sock_info = self._jdf.collectToPython()
..\..\..\..\AppData\Roaming\Python\Python37\site-packages\py4j\java_gateway.py:1322: in __call__
answer, self.gateway_client, self.target_id, self.name)
..\..\..\..\AppData\Roaming\Python\Python37\site-packages\pyspark\sql\utils.py:190: in deco
return f(*a, **kw)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
answer = 'xro59', gateway_client = <py4j.clientserver.JavaClient object at 0x110512F0>, target_id = 'o53'
name = 'collectToPython'
def get_return_value(answer, gateway_client, target_id=None, name=None):
"""Converts an answer received from the Java gateway into a Python object.
For example, string representation of integers are converted to Python
integer, string representation of objects are converted to JavaObject
instances, etc.
:param answer: the string returned by the Java gateway
:param gateway_client: the gateway client used to communicate with the Java
Gateway. Only necessary if the answer is a reference (e.g., object,
list, map)
:param target_id: the name of the object from which the answer comes from
(e.g., *object1* in `object1.hello()`). Optional.
:param name: the name of the member from which the answer comes from
(e.g., *hello* in `object1.hello()`). Optional.
"""
if is_error(answer)[0]:
if len(answer) > 1:
type = answer[1]
value = OUTPUT_CONVERTER[type](answer[2:], gateway_client)
if answer[1] == REFERENCE_TYPE:
raise Py4JJavaError(
"An error occurred while calling {0}{1}{2}.\n".
> format(target_id, ".", name), value)
E py4j.protocol.Py4JJavaError: An error occurred while calling o53.collectToPython.
E : org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage 0.0 (TID 0) (PC6607.dc.tess.elex.be executor driver): org.apache.spark.SparkException: Python worker failed to connect back.
E at org.apache.spark.api.python.PythonWorkerFactory.createSimpleWorker(PythonWorkerFactory.scala:189)
E at org.apache.spark.api.python.PythonWorkerFactory.create(PythonWorkerFactory.scala:109)
E at org.apache.spark.SparkEnv.createPythonWorker(SparkEnv.scala:124)
E at org.apache.spark.api.python.BasePythonRunner.compute(PythonRunner.scala:164)
E at org.apache.spark.api.python.PythonRDD.compute(PythonRDD.scala:65)
E at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
E at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
E at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
E at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
E at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
E at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
E at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
E at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
E at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
E at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
E at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
E at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
E at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
E at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
E at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
E at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
E at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
E at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
E at org.apache.spark.scheduler.Task.run(Task.scala:136)
E at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:548)
E at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1504)
E at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:551)
E at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
E at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
E at java.lang.Thread.run(Unknown Source)
E Caused by: java.net.SocketTimeoutException: Accept timed out
E at java.net.DualStackPlainSocketImpl.waitForNewConnection(Native Method)
E at java.net.DualStackPlainSocketImpl.socketAccept(Unknown Source)
E at java.net.AbstractPlainSocketImpl.accept(Unknown Source)
E at java.net.PlainSocketImpl.accept(Unknown Source)
E at java.net.ServerSocket.implAccept(Unknown Source)
E at java.net.ServerSocket.accept(Unknown Source)
E at org.apache.spark.api.python.PythonWorkerFactory.createSimpleWorker(PythonWorkerFactory.scala:176)
E ... 29 more
E
E Driver stacktrace:
E at org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2672)
E at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2608)
E at org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2607)
E at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
E at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
E at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
E at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2607)
E at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:1182)
E at org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:1182)
E at scala.Option.foreach(Option.scala:407)
E at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1182)E at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2860)
E at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2802)
E at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2791)
E at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)
E at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:952)
E at org.apache.spark.SparkContext.runJob(SparkContext.scala:2228)
E at org.apache.spark.SparkContext.runJob(SparkContext.scala:2249)
E at org.apache.spark.SparkContext.runJob(SparkContext.scala:2268)
E at org.apache.spark.SparkContext.runJob(SparkContext.scala:2293)
E at org.apache.spark.rdd.RDD.$anonfun$collect$1(RDD.scala:1021)
E at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
E at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
E at org.apache.spark.rdd.RDD.withScope(RDD.scala:406)
E at org.apache.spark.rdd.RDD.collect(RDD.scala:1020)
E at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:424)
E at org.apache.spark.sql.Dataset.$anonfun$collectToPython$1(Dataset.scala:3688)
E at org.apache.spark.sql.Dataset.$anonfun$withAction$2(Dataset.scala:3858)
E at org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:510)
E at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3856)
E at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:109)
E at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:169)
E at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:95)
E at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
E at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
E at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3856)
E at org.apache.spark.sql.Dataset.collectToPython(Dataset.scala:3685)
E at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
E at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
E at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
E at java.lang.reflect.Method.invoke(Unknown Source)
E at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
E at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
E at py4j.Gateway.invoke(Gateway.java:282)
E at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
E at py4j.commands.CallCommand.execute(CallCommand.java:79)
E at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)
E at py4j.ClientServerConnection.run(ClientServerConnection.java:106)
E at java.lang.Thread.run(Unknown Source)
E Caused by: org.apache.spark.SparkException: Python worker failed to connect back.
E at org.apache.spark.api.python.PythonWorkerFactory.createSimpleWorker(PythonWorkerFactory.scala:189)
E at org.apache.spark.api.python.PythonWorkerFactory.create(PythonWorkerFactory.scala:109)
E at org.apache.spark.SparkEnv.createPythonWorker(SparkEnv.scala:124)
E at org.apache.spark.api.python.BasePythonRunner.compute(PythonRunner.scala:164)
E at org.apache.spark.api.python.PythonRDD.compute(PythonRDD.scala:65)
E at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
E at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
E at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
E at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
E at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
E at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
E at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
E at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
E at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
E at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
E at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
E at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
E at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
E at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
E at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
E at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
E at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
E at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
E at org.apache.spark.scheduler.Task.run(Task.scala:136)
E at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:548)
E at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1504)
E at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:551)
E at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
E at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
E ... 1 more
E Caused by: java.net.SocketTimeoutException: Accept timed out
E at java.net.DualStackPlainSocketImpl.waitForNewConnection(Native Method)
E at java.net.DualStackPlainSocketImpl.socketAccept(Unknown Source)
E at java.net.AbstractPlainSocketImpl.accept(Unknown Source)
E at java.net.PlainSocketImpl.accept(Unknown Source)
E at java.net.ServerSocket.implAccept(Unknown Source)
E at java.net.ServerSocket.accept(Unknown Source)
E at org.apache.spark.api.python.PythonWorkerFactory.createSimpleWorker(PythonWorkerFactory.scala:176)
E ... 29 more
..\..\..\..\AppData\Roaming\Python\Python37\site-packages\py4j\protocol.py:328: Py4JJavaError
-------------------------------------------- Captured stdout call --------------------------------------------
23/01/19 13:23:47 WARN Shell: Did not find winutils.exe: java.io.FileNotFoundException: java.io.FileNotFoundException: HADOOP_HOME and hadoop.home.dir are unset. -see https://wiki.apache.org/hadoop/WindowsProblems
23/01/19 13:23:48 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
23/01/19 13:23:59 WARN SizeEstimator: Failed to check whether UseCompressedOops is set; assuming yes
23/01/19 13:24:09 ERROR Executor: Exception in task 0.0 in stage 0.0 (TID 0)
org.apache.spark.SparkException: Python worker failed to connect back.
at org.apache.spark.api.python.PythonWorkerFactory.createSimpleWorker(PythonWorkerFactory.scala:189)
at org.apache.spark.api.python.PythonWorkerFactory.create(PythonWorkerFactory.scala:109)
at org.apache.spark.SparkEnv.createPythonWorker(SparkEnv.scala:124)
at org.apache.spark.api.python.BasePythonRunner.compute(PythonRunner.scala:164)
at org.apache.spark.api.python.PythonRDD.compute(PythonRDD.scala:65)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
at org.apache.spark.scheduler.Task.run(Task.scala:136)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:548)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1504)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:551)
at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
at java.lang.Thread.run(Unknown Source)
Caused by: java.net.SocketTimeoutException: Accept timed out
at java.net.DualStackPlainSocketImpl.waitForNewConnection(Native Method)
at java.net.DualStackPlainSocketImpl.socketAccept(Unknown Source)
at java.net.AbstractPlainSocketImpl.accept(Unknown Source)
at java.net.PlainSocketImpl.accept(Unknown Source)
at java.net.ServerSocket.implAccept(Unknown Source)
at java.net.ServerSocket.accept(Unknown Source)
at org.apache.spark.api.python.PythonWorkerFactory.createSimpleWorker(PythonWorkerFactory.scala:176)
... 29 more
23/01/19 13:24:09 WARN TaskSetManager: Lost task 0.0 in stage 0.0 (TID 0) (PC6607.dc.tess.elex.be executor driver): org.apache.spark.SparkException: Python worker failed to connect back.
at org.apache.spark.api.python.PythonWorkerFactory.createSimpleWorker(PythonWorkerFactory.scala:189)
at org.apache.spark.api.python.PythonWorkerFactory.create(PythonWorkerFactory.scala:109)
at org.apache.spark.SparkEnv.createPythonWorker(SparkEnv.scala:124)
at org.apache.spark.api.python.BasePythonRunner.compute(PythonRunner.scala:164)
at org.apache.spark.api.python.PythonRDD.compute(PythonRDD.scala:65)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)
at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:365)
at org.apache.spark.rdd.RDD.iterator(RDD.scala:329)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
at org.apache.spark.scheduler.Task.run(Task.scala:136)
at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:548)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1504)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:551)
at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
at java.lang.Thread.run(Unknown Source)
Caused by: java.net.SocketTimeoutException: Accept timed out
at java.net.DualStackPlainSocketImpl.waitForNewConnection(Native Method)
at java.net.DualStackPlainSocketImpl.socketAccept(Unknown Source)
at java.net.AbstractPlainSocketImpl.accept(Unknown Source)
at java.net.PlainSocketImpl.accept(Unknown Source)
at java.net.ServerSocket.implAccept(Unknown Source)
at java.net.ServerSocket.accept(Unknown Source)
at org.apache.spark.api.python.PythonWorkerFactory.createSimpleWorker(PythonWorkerFactory.scala:176)
... 29 more
23/01/19 13:24:09 ERROR TaskSetManager: Task 0 in stage 0.0 failed 1 times; aborting job
-------------------------------------------- Captured stderr call --------------------------------------------
Setting default log level to "WARN".
To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).
Python was not found; run without arguments to install from the Microsoft Store, or disable this shortcut from
Settings > Manage App Execution Aliases.
============================================== warnings summary ==============================================
tests/unit/test_transformers.py::test_convert_sales_to_euro
C:\Program Files (x86)\Python3\lib\importlib\_bootstrap.py:219: RuntimeWarning: numpy.ufunc size changed, may indicate binary incompatibility. Expected 112 from C header, got 124 from PyObject
return f(*args, **kwds)
-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html
========================================== short test summary info ===========================================
FAILED tests/unit/test_transformers.py::test_convert_sales_to_euro - py4j.protocol.Py4JJavaError: An error occurred while calling o53.collectToPython.
======================================= 1 failed, 1 warning in 28.84s ========================================

Related

EMR date string NoViableAltException

I have a table in hive whose structure is like:
id int
app_code string
platform_code string
date string
downloads bigint
If I use the string type column "date" in my where condition, there are always NoViableAltException. For example, when running select * from my_table where date = '2022-06-28';, the trace is:
NoViableAltException(18#[412:1: atomExpression : ( constant | ( intervalExpression )=> intervalExpression | castExpression | extractExpression | floorExpression | caseExpression | whenExpression | ( subQueryExpression )=> ( subQueryExpression ) -> ^( TOK_SUBQUERY_EXPR TOK_SUBQUERY_OP subQueryExpression ) | ( functionName LPAREN )=> function | tableOrColumn | expressionsInParenthesis[true] );])
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser$DFA36.specialStateTransition(HiveParser_IdentifiersParser.java:31810)
at org.antlr.runtime.DFA.predict(DFA.java:80)
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.atomExpression(HiveParser_IdentifiersParser.java:6746)
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.precedenceFieldExpression(HiveParser_IdentifiersParser.java:6988)
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.precedenceUnaryPrefixExpression(HiveParser_IdentifiersParser.java:7324)
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.precedenceUnarySuffixExpression(HiveParser_IdentifiersParser.java:7380)
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.precedenceBitwiseXorExpression(HiveParser_IdentifiersParser.java:7542)
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.precedenceStarExpression(HiveParser_IdentifiersParser.java:7685)
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.precedencePlusExpression(HiveParser_IdentifiersParser.java:7828)
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.precedenceConcatenateExpression(HiveParser_IdentifiersParser.java:7967)
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.precedenceAmpersandExpression(HiveParser_IdentifiersParser.java:8177)
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.precedenceBitwiseOrExpression(HiveParser_IdentifiersParser.java:8314)
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.precedenceSimilarExpressionMain(HiveParser_IdentifiersParser.java:8801)
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.precedenceSimilarExpression(HiveParser_IdentifiersParser.java:8697)
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.precedenceEqualExpression(HiveParser_IdentifiersParser.java:9537)
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.precedenceNotExpression(HiveParser_IdentifiersParser.java:9703)
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.precedenceAndExpression(HiveParser_IdentifiersParser.java:9812)
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.precedenceOrExpression(HiveParser_IdentifiersParser.java:9953)
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.expression(HiveParser_IdentifiersParser.java:6686)
at org.apache.hadoop.hive.ql.parse.HiveParser.expression(HiveParser.java:41926)
at org.apache.hadoop.hive.ql.parse.HiveParser_FromClauseParser.searchCondition(HiveParser_FromClauseParser.java:6446)
at org.apache.hadoop.hive.ql.parse.HiveParser_FromClauseParser.whereClause(HiveParser_FromClauseParser.java:6364)
at org.apache.hadoop.hive.ql.parse.HiveParser.whereClause(HiveParser.java:42108)
at org.apache.hadoop.hive.ql.parse.HiveParser.atomSelectStatement(HiveParser.java:36755)
at org.apache.hadoop.hive.ql.parse.HiveParser.selectStatement(HiveParser.java:36987)
at org.apache.hadoop.hive.ql.parse.HiveParser.regularBody(HiveParser.java:36633)
at org.apache.hadoop.hive.ql.parse.HiveParser.queryStatementExpressionBody(HiveParser.java:35822)
at org.apache.hadoop.hive.ql.parse.HiveParser.queryStatementExpression(HiveParser.java:35710)
at org.apache.hadoop.hive.ql.parse.HiveParser.execStatement(HiveParser.java:2284)
at org.apache.hadoop.hive.ql.parse.HiveParser.statement(HiveParser.java:1333)
at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:208)
at org.apache.hadoop.hive.ql.parse.ParseUtils.parse(ParseUtils.java:77)
at org.apache.hadoop.hive.ql.parse.ParseUtils.parse(ParseUtils.java:70)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:468)
at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1317)
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1457)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1237)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1227)
at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:233)
at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:184)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:403)
at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:821)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:759)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:686)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.util.RunJar.run(RunJar.java:239)
at org.apache.hadoop.util.RunJar.main(RunJar.java:153)
FAILED: ParseException line 1:42 cannot recognize input near 'date' '=' ''2022-06-28'' in expression specification

How to create dictionary from different dataframes with different lenght and commun column?

I need to create a dictionary from 2 dataframes,
DF1 :
A C D E
10.12.13.16 604 2345 4572
10.12.13.16 3300 3456 5664
10.24.35.14 200 9753 75498
10.24.35.14 360 9874 56789
10.24.35.14 740 4563 7643
10.24.35.14 634 0867 8754
16.22.27.11 212 7653 7543
16.22.27.11 786 2356 7644
16.22.27.11 345 9765 3467
DF2 : # is the len(A)
A B
10.12.13.16 2
10.24.35.14 4
16.22.27.11 3
I need to create a dictionary, for each i in A, the lenght of A, C, D and E.
For example:
10.12.13.16 appear 2 times, with
[C : 604, D: 2345, E : 4572] and [C : 3300, D : 3456, E : 5664]
10.24.35.14 appear 4 times, with
[C : 200, D: 9753, E : 75498] and [C : 360, D : 9874, E : 56789] and
[C : 740, D: 4563, E : 7643] and [C : 634, D : 0867, E : 8754]
16.22.27.11 appear 3 times, with
[C : 212, D : 7653, E : 7543] and [C : 786, D:2356, E : 7644] and
[C : 345, D : 9765, E : 3467]
unique_A = DF1.groupby("A").size().to_frame().rename(columns={0:'count'})
dictionary = { identifier: {"total_appearances": count['count'], "observations": [] } for identifier, count in unique_A.iterrows() }
for index, series in myFrame.iterrows():
nested_dict = dictionary[series['A']]
nested_dict["observations"].append(dict(series.iloc[1:]))

Test exceptions using espec in Elixir app

I'm just started to learn Elixir.
I'm trying to test exception with espec (https://github.com/antonmi/espec) and I stucked.
Here is my function
defp call_xml(request_body) do
resp = HTTPotion.post("http://foo.bar", [body: request_body, headers: ["Content-Type": "text/xml"]])
if resp.status_code in [200, 201] do
{:ok, elem(XMLRPC.decode(resp.body), 1).param}
else
raise AbpError, message: "Message body"
end
end
def create_some_stuff(a, b, c) do
req = %XMLRPC.MethodCall{method_name: "Foo.Bar",
params:[a, b, c]} |> XMLPRC.encode!
call_xml(req)
end
# tests
use ESpec
use HyperMock
import :meck
context "when exception rised" do
it "returns err message" do
# stubbed with :meck
expect(MyModule, : create_some_stuff, fn("foo", "bar", "baz") -> raise CustomError end)
expect(MyModule. create_some_stuff("foo", "bar", "baz")).to eq("???")
end # it
end
In that case I'm getting error raised in my expectation
** (AbpError) Error occured!
spec/lib/ex_abpclient_spec.exs:135: anonymous fn/7 in ExAbpclientSpec.example_returns_created_payback_eqcpjlrszudikwyovtmxbgfnha/1
(ex_abpclient) ExAbpclient.create_payment_payback("tr-TR", 10966, 10, "R", 495, 10, "DESC")
spec/lib/ex_abpclient_spec.exs:136: ExAbpclientSpec.example_returns_created_payback_eqcpjlrszudikwyovtmxbgfnha/1
(espec) lib/espec/example_runner.ex:33: ESpec.ExampleRunner.run_example/2
(elixir) lib/enum.ex:1088: Enum."-map/2-lists^map/1-0-"/2
(elixir) lib/enum.ex:1088: Enum."-map/2-lists^map/1-0-"/2
(espec) lib/espec/runner.ex:70: ESpec.Runner.run_examples/1
(espec) lib/espec/runner.ex:43: ESpec.Runner.do_run/2
How can I get stubbed exception?
Thanks in advance.
UPDATE
I tried to use HyperMock (https://github.com/stevegraham/hypermock) to stub the request, but with no luck too
context "when payback created" do
it "returns created payback" do
HyperMock.intercept do
request = %Request{body: "<?xml version=\"1.0\" encoding=\"UTF-8\"?><methodCall>.....",
headers: ["Content-Type": "text/xml"],
method: :post,
uri: "http://foo.bar/webApiXmlRpcServlet"}
response = %Response{body: "fooooo", status: 500}
stub_request request, response
expect MyModule.create_some_stuff("a", "b", "c") |> to(raise_exception AbpError, "fooooo")
end
end # it
end # exception
Here is the result
/Users/retgoat/workspace/offside/ex_abpclient/spec/lib/ex_abpclient_spec.exs:135
** (AbpError) Error: "fooooo"
(ex_abpclient) lib/ex_abpclient.ex:55: ExAbpclient.call_xml/1
spec/lib/ex_abpclient_spec.exs:143: ExAbpclientSpec.example_returns_created_payback_nqfwohpurlvtzskdjxigeybamc/1
(espec) lib/espec/example_runner.ex:33: ESpec.ExampleRunner.run_example/2
(elixir) lib/enum.ex:1088: Enum."-map/2-lists^map/1-0-"/2
(elixir) lib/enum.ex:1088: Enum."-map/2-lists^map/1-0-"/2
(espec) lib/espec/runner.ex:70: ESpec.Runner.run_examples/1
(espec) lib/espec/runner.ex:43: ESpec.Runner.do_run/2
(espec) lib/espec/runner.ex:28: ESpec.Runner.handle_call/3
10 examples, 1 failures
Finished in 1.28 seconds (0.14s on load, 1.14s on specs)
Exception is rised, but I can't test it.
Roman!
You must pass a function to expect, not a result of a function call.
So, just wrap MyModule.create_some_stuff("a", "b", "c") by fn -> end like you do in the ExUnit expample:
elixir
it "raises exception" do
expect(fn -> MyModule.create_some_stuff("a", "b", "c") end)
|> to(raise_exception AbpError, "fooooo")
end

Grails gant script to run test cases first and then run the application but It is giving cliScriptException

I am writing a GANT script in grails to run unit test cases first and then it will run the if unit test cases passed .
My Script is
includeTargets << grailsScript("_GrailsInit")
includeTargets << grailsScript("_GrailsSettings")
includeTargets << grailsScript("_GrailsClean")
includeTargets << grailsScript("_GrailsTest")
includeTargets << grailsScript("_GrailsRun")
import org.codehaus.groovy.grails.cli.GrailsScriptRunner as GSR
//includeTargets << new File ( "${grailsHome}/scripts/RunApp.groovy" )
/*target(customRun: "The description of the script goes here!") {
// TODO: Implement script here
depends(checkVersion, configureProxy, parseArguments, cleanTestReports)
runApp()
//runApp()
}*/
target(customRun: "The description of the script goes here!") {
// TODO: Implement script here
depends(checkVersion, configureProxy, parseArguments, cleanTestReports)
System.setProperty("grails.env", "test")
allTests()
System.setProperty("grails.env", "development")
runApp()
}
setDefaultTarget(customRun)
But I am getting scriptException once the test report is generated . I am not sure what wrong is in my script .
Exception is as
Error |
org.codehaus.groovy.grails.cli.ScriptExitException
Error |
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
Error |
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
Error |
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
Error |
at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
Error |
at org.codehaus.groovy.reflection.CachedConstructor.invoke(CachedConstructor.java:77)
Error |
at org.codehaus.groovy.runtime.callsite.ConstructorSite$ConstructorSiteNoUnwrapNoCoerce.callConstructor(ConstructorSite.java:102)
Error |
at org.codehaus.groovy.runtime.callsite.AbstractCallSite.callConstructor(AbstractCallSite.java:194)
Error |
at gant.Gant$_dispatch_closure5.doCall(Gant.groovy:391)
Error |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
Error |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
Error |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
Error |
at java.lang.reflect.Method.invoke(Method.java:606)
Error |
at org.codehaus.groovy.runtime.callsite.PogoMetaMethodSite$PogoCachedMethodSiteNoUnwrapNoCoerce.invoke(PogoMetaMethodSite.java:207)
Error |
at org.codehaus.groovy.runtime.callsite.PogoMetaMethodSite.call(PogoMetaMethodSite.java:68)
Error |
at org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:116)
Error |
at gant.Gant$_dispatch_closure7.doCall(Gant.groovy:415)
Error |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
Error |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
Error |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
Error |
at java.lang.reflect.Method.invoke(Method.java:606)
Error |
at org.codehaus.groovy.runtime.callsite.PogoMetaMethodSite$PogoCachedMethodSite.invoke(PogoMetaMethodSite.java:166)
Error |
at org.codehaus.groovy.runtime.callsite.PogoMetaMethodSite.callCurrent(PogoMetaMethodSite.java:56)
Error |
at org.codehaus.groovy.runtime.callsite.AbstractCallSite.callCurrent(AbstractCallSite.java:141)
Error |
at gant.Gant$_dispatch_closure7.doCall(Gant.groovy)
Error |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
Error |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
Error |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
Error |
at java.lang.reflect.Method.invoke(Method.java:606)
Error |
at org.codehaus.groovy.runtime.callsite.PogoMetaMethodSite$PogoCachedMethodSiteNoUnwrapNoCoerce.invoke(PogoMetaMethodSite.java:207)
Error |
at org.codehaus.groovy.runtime.callsite.PogoMetaMethodSite.call(PogoMetaMethodSite.java:68)
Error |
at org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:112)
Error |
at gant.Gant.withBuildListeners(Gant.groovy:427)
Error |
at gant.Gant.this$2$withBuildListeners(Gant.groovy)
Error |
at gant.Gant$this$2$withBuildListeners$0.callCurrent(Unknown Source)
Error |
at org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallCurrent(CallSiteArray.java:49)
Error |
at gant.Gant$this$2$withBuildListeners$0.callCurrent(Unknown Source)
Error |
at gant.Gant.dispatch(Gant.groovy:415)
Error |
at gant.Gant.this$2$dispatch(Gant.groovy)
Error |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
Error |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
Error |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
Error |
at java.lang.reflect.Method.invoke(Method.java:606)
Error |
at org.codehaus.groovy.reflection.CachedMethod.invoke(CachedMethod.java:90)
Error |
at groovy.lang.MetaMethod.doMethodInvoke(MetaMethod.java:233)
Error |
at groovy.lang.MetaClassImpl.invokeMethod(MetaClassImpl.java:1085)
Error |
at groovy.lang.ExpandoMetaClass.invokeMethod(ExpandoMetaClass.java:1110)
Error |
at groovy.lang.MetaClassImpl.invokeMethod(MetaClassImpl.java:909)
Error |
at groovy.lang.MetaClassImpl.invokeMethod(MetaClassImpl.java:732)
Error |
at gant.Gant.invokeMethod(Gant.groovy)
Error |
at groovy.lang.GroovyObject$invokeMethod.callCurrent(Unknown Source)
Error |
at org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallCurrent(CallSiteArray.java:49)
Error |
at groovy.lang.GroovyObject$invokeMethod.callCurrent(Unknown Source)
Error |
at gant.Gant.executeTargets(Gant.groovy:591)
Error |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
Error |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
Error |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
Error |
at java.lang.reflect.Method.invoke(Method.java:606)
Error |
at org.codehaus.groovy.runtime.callsite.PogoMetaMethodSite$PogoCachedMethodSite.invoke(PogoMetaMethodSite.java:166)
Error |
at org.codehaus.groovy.runtime.callsite.PogoMetaMethodSite.callCurrent(PogoMetaMethodSite.java:56)
Error |
at org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCallCurrent(CallSiteArray.java:49)
Error |
at org.codehaus.groovy.runtime.callsite.PogoMetaMethodSite.callCurrent(PogoMetaMethodSite.java:61)

Drupal 8 Additional uncaught exception thrown while handling exception

Additional uncaught exception thrown while handling exception.
Original
Drupal\Core\Database\DatabaseExceptionWrapper: SQLSTATE[42S02]: Base table or view not found: 1146 Table 'drupal8new.users' doesn't exist: SELECT u.*, s.* FROM {users} u INNER JOIN {sessions} s ON u.uid = s.uid WHERE s.sid = :sid; Array ( [:sid] => jTk-DdIj4XlzQsnH86R22ktqje2PUFx7RcfnngiQJJg ) in Drupal\Core\Database\Connection->query() (line 569 of /var/www/drupal8new/core/lib/Drupal/Core/Database/Connection.php).
Drupal\Core\Database\Connection->query('SELECT u.*, s.* FROM {users} u INNER JOIN {sessions} s ON u.uid = s.uid WHERE s.sid = :sid', Array)
Drupal\Core\Session\SessionHandler->read('IIBgOUpsx2qnDOPFcwY_1VKaZrjxC48SVzaOOoKEi78')
Symfony\Component\HttpFoundation\Session\Storage\Handler\WriteCheckSessionHandler->read('IIBgOUpsx2qnDOPFcwY_1VKaZrjxC48SVzaOOoKEi78')
Symfony\Component\HttpFoundation\Session\Storage\Proxy\SessionHandlerProxy->read('IIBgOUpsx2qnDOPFcwY_1VKaZrjxC48SVzaOOoKEi78')
session_start()
Symfony\Component\HttpFoundation\Session\Storage\NativeSessionStorage->start()
Drupal\Core\Session\SessionManager->start()
Drupal\Core\Session\SessionManager->initialize()
Drupal\Core\Authentication\Provider\Cookie->authenticate(Object)
Drupal\Core\Authentication\AuthenticationManager->authenticate(Object)
Drupal\Core\Session\AccountProxy->getAccount()
Drupal\Core\Session\AccountProxy->id()
Drupal\Core\Logger\LoggerChannel->log(6, '%module module installed.', Array)
watchdog('system', '%module module installed.', Array, 6)
Drupal\Core\Extension\ModuleHandler->install(Array, )
drupal_install_system(Array)
install_base_system(Array)
install_run_task(Array, Array)
install_run_tasks(Array)
install_drupal()
Additional
RuntimeException: Failed to start the session: already started by PHP. in Symfony\Component\HttpFoundation\Session\Storage\NativeSessionStorage->start() (line 134 of /var/www/drupal8new/core/vendor/symfony/http-foundation/Symfony/Component/HttpFoundation/Session/Storage/NativeSessionStorage.php).
Symfony\Component\HttpFoundation\Session\Storage\NativeSessionStorage->start()
Drupal\Core\Session\SessionManager->start()
Drupal\Core\Session\SessionManager->initialize()
Drupal\Core\Authentication\Provider\Cookie->authenticate(Object)
Drupal\Core\Authentication\AuthenticationManager->authenticate(Object)
Drupal\Core\Session\AccountProxy->getAccount()
Drupal\Core\Session\AccountProxy->id()
Drupal\Core\Logger\LoggerChannel->log(3, '%type: !message in %function (line %line of %file).', Array)
watchdog('php', '%type: !message in %function (line %line of %file).', Array, 3)
_drupal_log_error(Array, 1)
_drupal_exception_handler(Object)
How to fix this erro