Qwame
02/10/2023, 2:28 PMload_assets_from_dbt_cloud_job
and it affected my schedules. Deployment errors because of this
dagster._core.definitions.events.Failure: Exceeded max number of retries.
File "/usr/local/lib/python3.8/site-packages/dagster/_grpc/server.py", line 242, in __init__
self._loaded_repositories: Optional[LoadedRepositories] = LoadedRepositories(
File "/usr/local/lib/python3.8/site-packages/dagster/_grpc/server.py", line 120, in __init__
repo_def = recon_repo.get_definition()
File "/usr/local/lib/python3.8/site-packages/dagster/_core/definitions/reconstruct.py", line 117, in get_definition
return repository_def_from_pointer(self.pointer, self.repository_load_data)
File "/usr/local/lib/python3.8/site-packages/dagster/_core/definitions/reconstruct.py", line 787, in repository_def_from_pointer
repo_def = repository_def_from_target_def(target, repository_load_data)
File "/usr/local/lib/python3.8/site-packages/dagster/_core/definitions/reconstruct.py", line 776, in repository_def_from_target_def
return target.compute_repository_definition()
File "/usr/local/lib/python3.8/site-packages/dagster/_core/definitions/repository_definition.py", line 1548, in compute_repository_definition
repository_load_data = self._compute_repository_load_data()
File "/usr/local/lib/python3.8/site-packages/dagster/_core/definitions/repository_definition.py", line 1496, in _compute_repository_load_data
cached_data_by_key={
File "/usr/local/lib/python3.8/site-packages/dagster/_core/definitions/repository_definition.py", line 1497, in <dictcomp>
defn.unique_id: defn.compute_cacheable_data()
File "/usr/local/lib/python3.8/site-packages/dagster/_core/definitions/cacheable_assets.py", line 171, in compute_cacheable_data
return self._wrapped.compute_cacheable_data()
File "/usr/local/lib/python3.8/site-packages/dagster_dbt/cloud/asset_defs.py", line 80, in compute_cacheable_data
dbt_nodes, dbt_dependencies = self._get_dbt_nodes_and_dependencies()
File "/usr/local/lib/python3.8/site-packages/dagster_dbt/cloud/asset_defs.py", line 179, in _get_dbt_nodes_and_dependencies
compile_run_dbt_output = self._dbt_cloud.run_job_and_poll(
File "/usr/local/lib/python3.8/site-packages/dagster_dbt/cloud/resources.py", line 490, in run_job_and_poll
final_run_details = self.poll_run(
File "/usr/local/lib/python3.8/site-packages/dagster_dbt/cloud/resources.py", line 451, in poll_run
self.cancel_run(run_id)
File "/usr/local/lib/python3.8/site-packages/dagster_dbt/cloud/resources.py", line 296, in cancel_run
return self.make_request("POST", f"{self._account_id}/runs/{run_id}/cancel/")
File "/usr/local/lib/python3.8/site-packages/dagster_dbt/cloud/resources.py", line 129, in make_request
raise Failure("Exceeded max number of retries.")
I see in the new release that
Does this fix these errors?rex
02/10/2023, 2:41 PMrex
02/10/2023, 2:41 PMQwame
02/10/2023, 3:12 PMrex
02/10/2023, 3:57 PMrex
02/10/2023, 7:37 PMrex
02/10/2023, 7:37 PM