Skip to content

issue with running the notebook related to impact functions #899

Closed
@sunt05

Description

@sunt05

Hi, I'm trying to run the notebook associated with #692 but can't move forward.

I followed this guide for the installation of CLIMADA. Below is the version info:

CLIMADA Version: 4.1.0
python: 3.9.18

The issue was occurring while importing climada:

import logging
import climada

logging.getLogger("climada").setLevel("WARNING")
Error message below (Click to expand)

The issue seems to be related to an upstream package.

{
	"name": "AttributeError",
	"message": "module 'pandas.core.strings' has no attribute 'StringMethods'",
	"stack": "---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
Cell In[1], line 2
      1 import logging
----> 2 import climada
      4 logging.getLogger(\"climada\").setLevel(\"WARNING\")

File ~/micromamba/envs/climada/lib/python3.9/site-packages/climada/__init__.py:24
     21 from shutil import copyfile
     22 from pathlib import Path
---> 24 from .util.config import CONFIG
     25 from .util.constants import *
     28 GSDP_DIR = SYSTEM_DIR.joinpath('GSDP')

File ~/micromamba/envs/climada/lib/python3.9/site-packages/climada/util/__init__.py:26
     24 from .config import *
     25 from .constants import *
---> 26 from .coordinates import *
     27 from .save import *
     29 ureg = UnitRegistry()

File ~/micromamba/envs/climada/lib/python3.9/site-packages/climada/util/coordinates.py:33
     30 import zipfile
     32 from cartopy.io import shapereader
---> 33 import dask.dataframe as dd
     34 import geopandas as gpd
     35 import numba

File ~/.local/lib/python3.9/site-packages/dask/dataframe/__init__.py:4
      2 import dask.dataframe._pyarrow_compat
      3 from dask.base import compute
----> 4 from dask.dataframe import backends, dispatch, rolling
      5 from dask.dataframe.core import (
      6     DataFrame,
      7     Index,
   (...)
     13     to_timedelta,
     14 )
     15 from dask.dataframe.groupby import Aggregation

File ~/.local/lib/python3.9/site-packages/dask/dataframe/backends.py:21
     19 from dask.array.percentile import _percentile
     20 from dask.backends import CreationDispatch, DaskBackendEntrypoint
---> 21 from dask.dataframe.core import DataFrame, Index, Scalar, Series, _Frame
     22 from dask.dataframe.dispatch import (
     23     categorical_dtype_dispatch,
     24     concat,
   (...)
     36     union_categoricals_dispatch,
     37 )
     38 from dask.dataframe.extensions import make_array_nonempty, make_scalar

File ~/.local/lib/python3.9/site-packages/dask/dataframe/core.py:35
     33 from dask.blockwise import Blockwise, BlockwiseDep, BlockwiseDepDict, blockwise
     34 from dask.context import globalmethod
---> 35 from dask.dataframe import methods
     36 from dask.dataframe._compat import (
     37     PANDAS_GT_140,
     38     PANDAS_GT_150,
     39     check_numeric_only_deprecation,
     40 )
     41 from dask.dataframe.accessor import CachedAccessor, DatetimeAccessor, StringAccessor

File ~/.local/lib/python3.9/site-packages/dask/dataframe/methods.py:22
     10 #  preserve compatibility while moving dispatch objects
     11 from dask.dataframe.dispatch import (  # noqa: F401
     12     concat,
     13     concat_dispatch,
   (...)
     20     union_categoricals,
     21 )
---> 22 from dask.dataframe.utils import is_dataframe_like, is_index_like, is_series_like
     24 # cuDF may try to import old dispatch functions
     25 hash_df = hash_object_dispatch

File ~/.local/lib/python3.9/site-packages/dask/dataframe/utils.py:19
     17 from dask.base import get_scheduler, is_dask_collection
     18 from dask.core import get_deps
---> 19 from dask.dataframe import (  # noqa: F401 register pandas extension types
     20     _dtypes,
     21     methods,
     22 )
     23 from dask.dataframe._compat import PANDAS_GT_110, PANDAS_GT_120, tm  # noqa: F401
     24 from dask.dataframe.dispatch import (  # noqa : F401
     25     make_meta,
     26     make_meta_obj,
     27     meta_nonempty,
     28 )

File ~/.local/lib/python3.9/site-packages/dask/dataframe/_dtypes.py:3
      1 import pandas as pd
----> 3 from dask.dataframe.extensions import make_array_nonempty, make_scalar
      6 @make_array_nonempty.register(pd.DatetimeTZDtype)
      7 def _(dtype):
      8     return pd.array([pd.Timestamp(1), pd.NaT], dtype=dtype)

File ~/.local/lib/python3.9/site-packages/dask/dataframe/extensions.py:6
      1 \"\"\"
      2 Support for pandas ExtensionArray in dask.dataframe.
      3 
      4 See :ref:`extensionarrays` for more.
      5 \"\"\"
----> 6 from dask.dataframe.accessor import (
      7     register_dataframe_accessor,
      8     register_index_accessor,
      9     register_series_accessor,
     10 )
     11 from dask.utils import Dispatch
     13 make_array_nonempty = Dispatch(\"make_array_nonempty\")

File ~/.local/lib/python3.9/site-packages/dask/dataframe/accessor.py:190
    129     _accessor_methods = (
    130         \"asfreq\",
    131         \"ceil\",
   (...)
    145         \"tz_localize\",
    146     )
    148     _accessor_properties = (
    149         \"components\",
    150         \"date\",
   (...)
    186         \"year\",
    187     )
--> 190 class StringAccessor(Accessor):
    191     \"\"\"Accessor object for string properties of the Series values.
    192 
    193     Examples
   (...)
    196     >>> s.str.lower()  # doctest: +SKIP
    197     \"\"\"
    199     _accessor_name = \"str\"

File ~/.local/lib/python3.9/site-packages/dask/dataframe/accessor.py:276, in StringAccessor()
    272         meta = (self._series.name, object)
    273     return self._function_map(method, pat=pat, n=n, expand=expand, meta=meta)
    275 @derived_from(
--> 276     pd.core.strings.StringMethods,
    277     inconsistencies=\"``expand=True`` with unknown ``n`` will raise a ``NotImplementedError``\",
    278 )
    279 def split(self, pat=None, n=-1, expand=False):
    280     \"\"\"Known inconsistencies: ``expand=True`` with unknown ``n`` will raise a ``NotImplementedError``.\"\"\"
    281     return self._split(\"split\", pat=pat, n=n, expand=expand)

AttributeError: module 'pandas.core.strings' has no attribute 'StringMethods'"
}

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions