Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement a few more ruff fixes #6469

Open
wants to merge 7 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -496,6 +496,7 @@
" time in seconds between two reads of the temperature\n",
" n_reads:\n",
" total number of reads to perform\n",
"\n",
" \"\"\"\n",
"\n",
" # Make a widget for a text display that is contantly being updated\n",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -487,6 +487,7 @@
" time in seconds between two reads of the temperature\n",
" n_reads:\n",
" total number of reads to perform\n",
"\n",
" \"\"\"\n",
"\n",
" # Make a widget for a text display that is contantly being updated\n",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -304,6 +304,7 @@
" channel: The name used by the Keithley, i.e. either\n",
" 'smua' or 'smub'\n",
" **kwargs: Forwarded to base class.\n",
"\n",
" \"\"\"\n",
"\n",
" if channel not in [\"smua\", \"smub\"]:\n",
Expand Down Expand Up @@ -383,6 +384,7 @@
" name: Name to use internally in QCoDeS\n",
" address: VISA ressource address\n",
" **kwargs: kwargs are forwarded to the base class.\n",
"\n",
" \"\"\"\n",
" super().__init__(name, address, **kwargs)\n",
"\n",
Expand Down Expand Up @@ -556,6 +558,7 @@
" maintain the present settings of the real Ithaco amp.\n",
" name: the name of the current output. Default 'curr'.\n",
" Also used as the name of the whole parameter.\n",
"\n",
" \"\"\"\n",
"\n",
" def __init__(\n",
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ extend-exclude = ["typings"]
# TID253 banned-module-level-imports
# W pydocstyle
# PLx pylint
select = ["E", "F", "PT025", "UP", "RUF010", "RUF012", "RUF100", "RUF101", "RUF200", "I", "G", "ISC", "TID253", "NPY", "PLE", "PLR", "PLC", "PLW", "W", "D417", "D416", "D410", "D411", "D412", "D405", "D214", "TCH"]
select = ["E", "F", "PT025", "UP","RUF008", "RUF010", "RUF012", "RUF100", "RUF101", "RUF200", "I", "G", "ISC", "TID253", "NPY", "PLE", "PLR", "PLC", "PLW", "W","D214", "D300", "D402", "D403", "D405", "D410", "D411", "D412", "D413", "D414", "D416", "D417", "D418", "D419", "TCH"]
# G004 We have a lot of use of f strings in log messages
# so disable that lint for now
# NPY002 We have a lot of use of the legacy
Expand Down
9 changes: 9 additions & 0 deletions src/qcodes/configuration/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ def __init__(self, path: str | None = None) -> None:
Args:
path: Optional path to directory containing
a `qcodesrc.json` config file

"""
self._loaded_config_files = [self.default_file_name]
self._diff_config: dict[str, Any] = {}
Expand Down Expand Up @@ -125,6 +126,7 @@ def update_config(self, path: str | None = None) -> dict[str, Any]:
Args:
path: Optional path to directory containing a `qcodesrc.json`
config file

"""
config = copy.deepcopy(self.defaults)
self.current_schema = copy.deepcopy(self.defaults_schema)
Expand Down Expand Up @@ -166,6 +168,7 @@ def _update_config_from_file(
file_path: Path to `qcodesrc.json` config file
schema: Path to `qcodesrc_schema.json` to be used
config: Config dictionary to be updated.

"""
if os.path.isfile(file_path):
self._loaded_config_files.append(file_path)
Expand All @@ -190,6 +193,7 @@ def validate(
schema: schema dictionary
extra_schema_path: schema path that contains extra validators to be
added to schema dictionary

"""
if schema is None:
if self.current_schema is None:
Expand Down Expand Up @@ -266,6 +270,7 @@ def add(
Todo:
- Add enum support for value_type
- finish _diffing

"""
if self.current_config is None:
raise RuntimeError("Cannot add value to empty config")
Expand Down Expand Up @@ -323,6 +328,7 @@ def load_config(path: str) -> DotDict:
a dot accessible dictionary config object
Raises:
FileNotFoundError: if config is missing

"""
with open(path) as fp:
config = json.load(fp)
Expand All @@ -338,6 +344,7 @@ def save_config(self, path: str) -> None:

Args:
path: path of new file

"""
with open(path, "w") as fp:
json.dump(self.current_config, fp, indent=4)
Expand All @@ -348,6 +355,7 @@ def save_schema(self, path: str) -> None:

Args:
path: path of new file

"""
with open(path, "w") as fp:
json.dump(self.current_schema, fp, indent=4)
Expand All @@ -374,6 +382,7 @@ def describe(self, name: str) -> str:
Args:
name: name of entry to describe in 'dotdict' notation,
e.g. name="user.scriptfolder"

"""
val = self.current_config
if val is None:
Expand Down
8 changes: 8 additions & 0 deletions src/qcodes/dataset/data_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ def _all_steps_multiples_of_min_step(rows: np.ndarray) -> bool:

Returns:
The answer to the question

"""

steps_list: list[np.ndarray] = []
Expand Down Expand Up @@ -101,6 +102,7 @@ def _rows_from_datapoints(inputsetpoints: np.ndarray) -> np.ndarray:

Returns:
A ndarray of the rows

"""

rows = []
Expand Down Expand Up @@ -140,6 +142,7 @@ def _all_in_group_or_subgroup(rows: np.ndarray) -> bool:
Returns:
A boolean indicating whether the setpoints meet the
criterion

"""

groups = 1
Expand Down Expand Up @@ -178,6 +181,7 @@ def _strings_as_ints(inputarray: np.ndarray) -> np.ndarray:

Args:
inputarray: A 1D array of strings

"""
newdata = np.zeros(len(inputarray))
for n, word in enumerate(np.unique(inputarray)):
Expand All @@ -200,6 +204,7 @@ def get_1D_plottype(xpoints: np.ndarray, ypoints: np.ndarray) -> str:

Returns:
Determined plot type as a string

"""

if isinstance(xpoints[0], str) and not isinstance(ypoints[0], str):
Expand Down Expand Up @@ -227,6 +232,7 @@ def datatype_from_setpoints_1d(setpoints: np.ndarray) -> str:

Returns:
A string representing the plot type as described above

"""
if np.allclose(setpoints, setpoints[0]):
return "1D_point"
Expand Down Expand Up @@ -254,6 +260,7 @@ def get_2D_plottype(

Returns:
Determined plot type as a string

"""

plottype = datatype_from_setpoints_2d(xpoints, ypoints)
Expand All @@ -279,6 +286,7 @@ def datatype_from_setpoints_2d(xpoints: np.ndarray, ypoints: np.ndarray) -> str:

Returns:
A string with the name of the determined plot type

"""
# We represent categorical data as integer-valued data
if isinstance(xpoints[0], str):
Expand Down
19 changes: 19 additions & 0 deletions src/qcodes/dataset/data_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -495,6 +495,7 @@ def parent_dataset_links(self, links: list[Link]) -> None:

Args:
links: The links to assign to this dataset

"""
if not self.pristine:
raise RuntimeError(
Expand Down Expand Up @@ -576,6 +577,7 @@ def add_metadata(self, tag: str, metadata: Any) -> None:
Args:
tag: represents the key in the metadata dictionary
metadata: actual metadata

"""

self._metadata[tag] = metadata
Expand All @@ -593,6 +595,7 @@ def add_snapshot(self, snapshot: str, overwrite: bool = False) -> None:
Args:
snapshot: the raw JSON dump of the snapshot
overwrite: force overwrite an existing snapshot

"""
if self.snapshot is None or overwrite:
with atomic(self.conn) as conn:
Expand Down Expand Up @@ -652,6 +655,7 @@ def mark_started(self, start_bg_writer: bool = False) -> None:
Args:
start_bg_writer: If True, the add_results method will write to the
database in a separate thread.

"""
if not self._started:
self._perform_start_actions(start_bg_writer=start_bg_writer)
Expand Down Expand Up @@ -740,6 +744,7 @@ def add_results(self, results: Sequence[Mapping[str, VALUE]]) -> None:
the name of a parameter in this :class:`.DataSet`.

It is an error to add results to a completed :class:`.DataSet`.

"""

self._raise_if_not_writable()
Expand Down Expand Up @@ -840,6 +845,7 @@ def get_parameter_data(
Dictionary from requested parameters to Dict of parameter names
to numpy arrays containing the data points of type numeric,
array or string.

"""
if len(params) == 0:
valid_param_names = [
Expand Down Expand Up @@ -892,6 +898,7 @@ def to_pandas_dataframe_dict(
:py:class:`pandas.DataFrame` s with the requested parameter as
a column and a indexed by a :py:class:`pandas.MultiIndex` formed
by the dependencies.

"""
datadict = self.get_parameter_data(*params, start=start, end=end)
dfs_dict = load_to_dataframe_dict(datadict)
Expand Down Expand Up @@ -939,6 +946,7 @@ def to_pandas_dataframe(
Example:
Return a pandas DataFrame with
df = ds.to_pandas_dataframe()

"""
datadict = self.get_parameter_data(*params, start=start, end=end)
return load_to_concatenated_dataframe(datadict)
Expand Down Expand Up @@ -1001,6 +1009,7 @@ def to_xarray_dataarray_dict(
Return a dict of xr.DataArray with

dataarray_dict = ds.to_xarray_dataarray_dict()

"""
data = self.get_parameter_data(*params, start=start, end=end)
datadict = load_to_xarray_dataarray_dict(
Expand Down Expand Up @@ -1064,6 +1073,7 @@ def to_xarray_dataset(
Return a concatenated xr.Dataset with

xds = ds.to_xarray_dataset()

"""
data = self.get_parameter_data(*params, start=start, end=end)

Expand Down Expand Up @@ -1106,6 +1116,7 @@ def write_data_to_text_file(
length and wanted to be merged in a single file.
DataPathException: If the data of multiple parameters are wanted to be merged
in a single file but no filename provided.

"""
dfdict = self.to_pandas_dataframe_dict()
dataframe_to_csv(
Expand Down Expand Up @@ -1140,6 +1151,7 @@ def subscribe_from_config(self, name: str) -> str:
Args:
name: identifier of the subscriber. Equal to the key of the entry
in ``qcodesrc.json::subscription.subscribers``.

"""
subscribers = qcodes.config.subscription.subscribers
try:
Expand Down Expand Up @@ -1580,6 +1592,7 @@ def load_by_run_spec(
:class:`qcodes.dataset.data_set.DataSet` or
:class:`.DataSetInMemory` matching the provided
specification.

"""
internal_conn = conn or connect(get_DB_location())
d: DataSetProtocol | None = None
Expand Down Expand Up @@ -1645,6 +1658,7 @@ def get_guids_by_run_spec(

Returns:
List of guids matching the run spec.

"""
internal_conn = conn or connect(get_DB_location())
try:
Expand Down Expand Up @@ -1689,6 +1703,7 @@ def load_by_id(run_id: int, conn: ConnectionPlus | None = None) -> DataSetProtoc
Returns:
:class:`qcodes.dataset.data_set.DataSet` or
:class:`.DataSetInMemory` with the given run id

"""
if run_id is None:
raise ValueError("run_id has to be a positive integer, not None.")
Expand Down Expand Up @@ -1731,6 +1746,7 @@ def load_by_guid(guid: str, conn: ConnectionPlus | None = None) -> DataSetProtoc
Raises:
NameError: if no run with the given GUID exists in the database
RuntimeError: if several runs with the given GUID are found

"""
internal_conn = conn or connect(get_DB_location())
d: DataSetProtocol | None = None
Expand Down Expand Up @@ -1774,6 +1790,7 @@ def load_by_counter(
:class:`DataSet` or
:class:`.DataSetInMemory` of the given counter in
the given experiment

"""
internal_conn = conn or connect(get_DB_location())
d: DataSetProtocol | None = None
Expand Down Expand Up @@ -1855,6 +1872,7 @@ def new_data_set(

Return:
the newly created :class:`qcodes.dataset.data_set.DataSet`

"""
# note that passing `conn` is a secret feature that is unfortunately used
# in `Runner` to pass a connection from an existing `Experiment`.
Expand Down Expand Up @@ -1885,6 +1903,7 @@ def generate_dataset_table(
conn: A ConnectionPlus object with a connection to the database.

Returns: ASCII art table of information about the supplied guids.

"""
from tabulate import tabulate

Expand Down
4 changes: 4 additions & 0 deletions src/qcodes/dataset/data_set_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ def data(self) -> ParameterData:

Returns:
The cached dataset.

"""
if not self.live:
self.load_data_from_db()
Expand Down Expand Up @@ -140,6 +141,7 @@ def to_pandas_dataframe_dict(self) -> dict[str, pd.DataFrame]:
Returns:
A dict from parameter name to Pandas Dataframes. Each dataframe
represents one parameter tree.

"""
data = self.data()
return load_to_dataframe_dict(data)
Expand All @@ -152,6 +154,7 @@ def to_pandas_dataframe(self) -> pd.DataFrame:
Returns:
A dict from parameter name to Pandas Dataframes. Each dataframe
represents one parameter tree.

"""
data = self.data()
return load_to_concatenated_dataframe(data)
Expand Down Expand Up @@ -260,6 +263,7 @@ def append_shaped_parameter_data_to_existing_arrays(

Returns:
Updated write and read status, and the updated ``data``

"""
parameters = tuple(ps.name for ps in rundescriber.interdeps.non_dependencies)
merged_data = {}
Expand Down
Loading
Loading