Hi! I am trying to install the azure-identity pack...
# ask-community
b
Hi! I am trying to install the azure-identity package. VS code recognizes the module and finds no error in the code editor when I import somthing from it (e.g.
from azure.identity import ClientSecretCredential
). However, when I run a job, it breaks with the following error:
Traceback (most recent call last):
File "/databricks/python/lib/python3.8/site-packages/dagster/core/code_pointer.py", line 125, in load_python_module
return importlib.import_module(module_name)
File "/usr/local/lib/python3.8/importlib/__init__.py", line 127, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 1014, in _gcd_import
File "<frozen importlib._bootstrap>", line 991, in _find_and_load
File "<frozen importlib._bootstrap>", line 975, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 671, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 783, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/tmp/tmpwdrf892f/cashback_spark_tests/utilities/update_test_delta_tables.py", line 3, in <module>
from cashback_spark.utilities.resources import prod_adls2
File "/databricks/python_shell/dbruntime/PythonPackageImportsInstrumentation/__init__.py", line 158, in import_patch
original_result = python_builtin_import(name, globals, locals, fromlist, level)
File "/tmp/tmpwdrf892f/cashback_spark/__init__.py", line 1, in <module>
from .repository import cashback_spark
File "/databricks/python_shell/dbruntime/PythonPackageImportsInstrumentation/__init__.py", line 158, in import_patch
original_result = python_builtin_import(name, globals, locals, fromlist, level)
File "/tmp/tmpwdrf892f/cashback_spark/repository.py", line 3, in <module>
from cashback_spark.jobs.accounting_balances_job import accounting_balances_job
File "/databricks/python_shell/dbruntime/PythonPackageImportsInstrumentation/__init__.py", line 158, in import_patch
original_result = python_builtin_import(name, globals, locals, fromlist, level)
File "/tmp/tmpwdrf892f/cashback_spark/jobs/accounting_balances_job.py", line 2, in <module>
from cashback_spark.utilities.resources import prod_adls2
File "/databricks/python_shell/dbruntime/PythonPackageImportsInstrumentation/__init__.py", line 158, in import_patch
original_result = python_builtin_import(name, globals, locals, fromlist, level)
File "/tmp/tmpwdrf892f/cashback_spark/utilities/resources.py", line 5, in <module>
from cashback_spark.resources.adls_txt_resource import adls_txt_resource
File "/databricks/python_shell/dbruntime/PythonPackageImportsInstrumentation/__init__.py", line 158, in import_patch
original_result = python_builtin_import(name, globals, locals, fromlist, level)
File "/tmp/tmpwdrf892f/cashback_spark/resources/adls_txt_resource.py", line 5, in <module>
from cashback_spark.utilities.sensors import convert_date_str
File "/databricks/python_shell/dbruntime/PythonPackageImportsInstrumentation/__init__.py", line 158, in import_patch
original_result = python_builtin_import(name, globals, locals, fromlist, level)
File "/tmp/tmpwdrf892f/cashback_spark/utilities/sensors.py", line 5, in <module>
from cashback_spark.utilities.service_client import \
File "/databricks/python_shell/dbruntime/PythonPackageImportsInstrumentation/__init__.py", line 158, in import_patch
original_result = python_builtin_import(name, globals, locals, fromlist, level)
File "/tmp/tmpwdrf892f/cashback_spark/utilities/service_client.py", line 3, in <module>
from azure.identity import ClientSecretCredential
File "/databricks/python_shell/dbruntime/PythonPackageImportsInstrumentation/__init__.py", line 158, in import_patch
original_result = python_builtin_import(name, globals, locals, fromlist, level)
ModuleNotFoundError: No module named 'azure.identity'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/tmp/tmpempoz_5u.py", line 117, in main
list(run_step_from_ref(step_run_ref, instance))
File "/databricks/python/lib/python3.8/site-packages/dagster/core/execution/plan/external_step.py", line 237, in run_step_from_ref
step_context = step_run_ref_to_step_context(step_run_ref, instance)
File "/databricks/python/lib/python3.8/site-packages/dagster/core/execution/plan/external_step.py", line 201, in step_run_ref_to_step_context
execution_plan = create_execution_plan(
File "/databricks/python/lib/python3.8/site-packages/dagster/core/execution/api.py", line 745, in create_execution_plan
pipeline_def = pipeline.get_definition()
File "/databricks/python/lib/python3.8/site-packages/dagster/core/definitions/reconstructable.py", line 152, in get_definition
defn = self.repository.get_definition().get_pipeline(self.pipeline_name)
File "/databricks/python/lib/python3.8/site-packages/dagster/core/definitions/reconstructable.py", line 74, in get_definition
return repository_def_from_pointer(self.pointer)
File "/databricks/python/lib/python3.8/site-packages/dagster/core/definitions/reconstructable.py", line 628, in repository_def_from_pointer
target = def_from_pointer(pointer)
File "/databricks/python/lib/python3.8/site-packages/dagster/core/definitions/reconstructable.py", line 549, in def_from_pointer
target = pointer.load_target()
File "/databricks/python/lib/python3.8/site-packages/dagster/core/code_pointer.py", line 201, in load_target
module = load_python_module(self.module, self.working_directory)
File "/databricks/python/lib/python3.8/site-packages/dagster/core/code_pointer.py", line 130, in load_python_module
raise DagsterImportError(
dagster.core.errors.DagsterImportError: Encountered ImportError:
No module named 'azure.identity'
while importing module cashback_spark_tests.utilities.update_test_delta_tables. Local modules were resolved using the working directory
/home/cortez/cashback_spark
. If another working directory should be used, please explicitly specify the appropriate path using the
-d
or
--working-directory
for CLI based targets or the
working_directory
configuration option for workspace targets.
I have tried with both conda and pip, I have uninstalled and re-installed my virtual environments, but the error remains... Has anyone faced this issue before?