Nate Loker
11/20/2020, 9:46 PMpy4j.protocol.Py4JJavaError: An error occurred while calling o103.jdbc.
: org.postgresql.util.PSQLException: FATAL: role "test" does not exist
at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:2440)
at org.postgresql.core.v3.QueryExecutorImpl.readStartupMessages(QueryExecutorImpl.java:2559)
at org.postgresql.core.v3.QueryExecutorImpl.<init>(QueryExecutorImpl.java:133)
at org.postgresql.core.v3.ConnectionFactoryImpl.openConnectionImpl(ConnectionFactoryImpl.java:250)
at org.postgresql.core.ConnectionFactory.openConnection(ConnectionFactory.java:49)
at org.postgresql.jdbc.PgConnection.<init>(PgConnection.java:195)
at org.postgresql.Driver.makeConnection(Driver.java:454)
at org.postgresql.Driver.connect(Driver.java:256)
at org.apache.spark.sql.execution.datasources.jdbc.DriverWrapper.connect(DriverWrapper.scala:45)
at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.$anonfun$createConnectionFactory$1(JdbcUtils.scala:64)
at org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider.createRelation(JdbcRelationProvider.scala:48)
at org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:46)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:90)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:175)
at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:213)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:210)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:171)
at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:122)
at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:121)
at org.apache.spark.sql.DataFrameWriter.$anonfun$runCommand$1(DataFrameWriter.scala:963)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:963)
at org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:415)
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:399)
at org.apache.spark.sql.DataFrameWriter.jdbc(DataFrameWriter.scala:791)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:64)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:564)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
at py4j.Gateway.invoke(Gateway.java:282)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.GatewayConnection.run(GatewayConnection.java:238)
at java.base/java.lang.Thread.run(Thread.java:832)
File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/dagster/core/errors.py", line 180, in user_code_error_boundary
yield
File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/dagster/core/execution/plan/execute_step.py", line 479, in _user_event_sequence_for_step_compute_fn
for event in iterate_with_context(raise_interrupts_immediately, gen):
File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/dagster/utils/__init__.py", line 443, in iterate_with_context
next_output = next(iterator)
File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/dagster/core/execution/plan/compute.py", line 107, in _execute_core_compute
for step_output in _yield_compute_results(compute_context, inputs, compute_fn):
File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/dagster/core/execution/plan/compute.py", line 78, in _yield_compute_results
for event in user_event_sequence:
File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/dagster/core/definitions/decorators/solid.py", line 230, in compute
for item in result:
File "/Users/nateloker/dev/dagster/examples/airline_demo/airline_demo/solids.py", line 228, in load_data_to_database_from_spark
context.resources.db_info.load_table(data_frame, context.solid_config["table_name"])
File "/Users/nateloker/dev/dagster/examples/airline_demo/airline_demo/resources.py", line 131, in _do_load
db_url_jdbc, table_name
File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/pyspark/sql/readwriter.py", line 1082, in jdbc
self.mode(mode)._jwrite.jdbc(url, table, jprop)
File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/py4j/java_gateway.py", line 1305, in __call__
answer, self.gateway_client, self.target_id, self.name)
File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/pyspark/sql/utils.py", line 128, in deco
return f(*a, **kw)
File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/py4j/protocol.py", line 328, in get_return_value
format(target_id, ".", name), value)
sandy
11/21/2020, 1:10 AMNate Loker
11/21/2020, 1:36 AM