Hey, I am working through the airline_demo and I k...
# announcements
n
Hey, I am working through the airline_demo and I keep getting an error when I execute load_q2_sfo_weather.compute. I will add the error output in the thread. Any help is appreciated.
Copy code
py4j.protocol.Py4JJavaError: An error occurred while calling o103.jdbc.
: org.postgresql.util.PSQLException: FATAL: role "test" does not exist
	at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:2440)
	at org.postgresql.core.v3.QueryExecutorImpl.readStartupMessages(QueryExecutorImpl.java:2559)
	at org.postgresql.core.v3.QueryExecutorImpl.<init>(QueryExecutorImpl.java:133)
	at org.postgresql.core.v3.ConnectionFactoryImpl.openConnectionImpl(ConnectionFactoryImpl.java:250)
	at org.postgresql.core.ConnectionFactory.openConnection(ConnectionFactory.java:49)
	at org.postgresql.jdbc.PgConnection.<init>(PgConnection.java:195)
	at org.postgresql.Driver.makeConnection(Driver.java:454)
	at org.postgresql.Driver.connect(Driver.java:256)
	at org.apache.spark.sql.execution.datasources.jdbc.DriverWrapper.connect(DriverWrapper.scala:45)
	at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.$anonfun$createConnectionFactory$1(JdbcUtils.scala:64)
	at org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider.createRelation(JdbcRelationProvider.scala:48)
	at org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:46)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
	at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:90)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:175)
	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:213)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:210)
	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:171)
	at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:122)
	at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:121)
	at org.apache.spark.sql.DataFrameWriter.$anonfun$runCommand$1(DataFrameWriter.scala:963)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:100)
	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:160)
	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:87)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
	at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:963)
	at org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:415)
	at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:399)
	at org.apache.spark.sql.DataFrameWriter.jdbc(DataFrameWriter.scala:791)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:64)
	at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.base/java.lang.reflect.Method.invoke(Method.java:564)
	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
	at py4j.Gateway.invoke(Gateway.java:282)
	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
	at py4j.commands.CallCommand.execute(CallCommand.java:79)
	at py4j.GatewayConnection.run(GatewayConnection.java:238)
	at java.base/java.lang.Thread.run(Thread.java:832)

  File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/dagster/core/errors.py", line 180, in user_code_error_boundary
    yield
  File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/dagster/core/execution/plan/execute_step.py", line 479, in _user_event_sequence_for_step_compute_fn
    for event in iterate_with_context(raise_interrupts_immediately, gen):
  File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/dagster/utils/__init__.py", line 443, in iterate_with_context
    next_output = next(iterator)
  File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/dagster/core/execution/plan/compute.py", line 107, in _execute_core_compute
    for step_output in _yield_compute_results(compute_context, inputs, compute_fn):
  File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/dagster/core/execution/plan/compute.py", line 78, in _yield_compute_results
    for event in user_event_sequence:
  File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/dagster/core/definitions/decorators/solid.py", line 230, in compute
    for item in result:
  File "/Users/nateloker/dev/dagster/examples/airline_demo/airline_demo/solids.py", line 228, in load_data_to_database_from_spark
    context.resources.db_info.load_table(data_frame, context.solid_config["table_name"])
  File "/Users/nateloker/dev/dagster/examples/airline_demo/airline_demo/resources.py", line 131, in _do_load
    db_url_jdbc, table_name
  File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/pyspark/sql/readwriter.py", line 1082, in jdbc
    self.mode(mode)._jwrite.jdbc(url, table, jprop)
  File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/py4j/java_gateway.py", line 1305, in __call__
    answer, self.gateway_client, self.target_id, self.name)
  File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/pyspark/sql/utils.py", line 128, in deco
    return f(*a, **kw)
  File "/Users/nateloker/anaconda3/lib/python3.7/site-packages/py4j/protocol.py", line 328, in get_return_value
    format(target_id, ".", name), value)
It says role "test" does not exist but I have my docker container up and running so I am not sure why this is happening
s
hey Nate - their airline demo isn't super well supported any more. are there particular things you're looking to learn how to do?
n
@sandy ok no problem. Thanks for letting me know.. I am just trying to get a good overview of dagster. I will take a look at some of the other examples.