-
Notifications
You must be signed in to change notification settings - Fork 317
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #4408 from jenshnielsen/build/add_data_init_files
Add __init__.py files to data directories
- Loading branch information
Showing
17 changed files
with
89 additions
and
65 deletions.
There are no files selected for viewing
18 changes: 9 additions & 9 deletions
18
docs/examples/DataSet/import-data-from-legacy-dat-files.ipynb
Large diffs are not rendered by default.
Oops, something went wrong.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
File renamed without changes.
File renamed without changes.
Empty file.
File renamed without changes.
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,79 @@ | ||
import json | ||
from pathlib import Path | ||
|
||
import pytest | ||
|
||
from qcodes.dataset import import_dat_file, load_by_id | ||
from qcodes.dataset.data_set import DataSet | ||
|
||
|
||
@pytest.mark.usefixtures("experiment") | ||
def test_load_legacy_files_2d(): | ||
full_location = ( | ||
Path(__file__).parent.parent | ||
/ "fixtures" | ||
/ "data_2018_01_17" | ||
/ "data_002_2D_test_15_43_14" | ||
) | ||
run_ids = import_dat_file(str(full_location)) | ||
run_id = run_ids[0] | ||
data = load_by_id(run_id) | ||
assert isinstance(data, DataSet) | ||
assert data.parameters == "dac_ch1_set,dac_ch2_set,dmm_voltage" | ||
assert data.number_of_results == 36 | ||
expected_names = ["dac_ch1_set", "dac_ch2_set", "dmm_voltage"] | ||
expected_labels = ["Gate ch1", "Gate ch2", "Gate voltage"] | ||
expected_units = ["V", "V", "V"] | ||
expected_depends_on = ["", "", "dac_ch1_set, dac_ch2_set"] | ||
for i, parameter in enumerate(data.get_parameters()): | ||
assert parameter.name == expected_names[i] | ||
assert parameter.label == expected_labels[i] | ||
assert parameter.unit == expected_units[i] | ||
assert parameter.depends_on == expected_depends_on[i] | ||
assert parameter.type == "numeric" | ||
snapshot = json.loads(data.get_metadata("snapshot")) | ||
assert sorted(list(snapshot.keys())) == [ | ||
"__class__", | ||
"arrays", | ||
"formatter", | ||
"io", | ||
"location", | ||
"loop", | ||
"station", | ||
] | ||
|
||
|
||
@pytest.mark.usefixtures("experiment") | ||
def test_load_legacy_files_1d(): | ||
full_location = ( | ||
Path(__file__).parent.parent | ||
/ "fixtures" | ||
/ "data_2018_01_17" | ||
/ "data_001_testsweep_15_42_57" | ||
) | ||
run_ids = import_dat_file(str(full_location)) | ||
run_id = run_ids[0] | ||
data = load_by_id(run_id) | ||
assert isinstance(data, DataSet) | ||
assert data.parameters == "dac_ch1_set,dmm_voltage" | ||
assert data.number_of_results == 201 | ||
expected_names = ["dac_ch1_set", "dmm_voltage"] | ||
expected_labels = ["Gate ch1", "Gate voltage"] | ||
expected_units = ["V", "V"] | ||
expected_depends_on = ["", "dac_ch1_set"] | ||
for i, parameter in enumerate(data.get_parameters()): | ||
assert parameter.name == expected_names[i] | ||
assert parameter.label == expected_labels[i] | ||
assert parameter.unit == expected_units[i] | ||
assert parameter.depends_on == expected_depends_on[i] | ||
assert parameter.type == "numeric" | ||
snapshot = json.loads(data.get_metadata("snapshot")) | ||
assert sorted(list(snapshot.keys())) == [ | ||
"__class__", | ||
"arrays", | ||
"formatter", | ||
"io", | ||
"location", | ||
"loop", | ||
"station", | ||
] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters