{ "cells": [ { "cell_type": "code", "execution_count": 1, "id": "fa33d6d8", "metadata": { "execution": { "iopub.execute_input": "2023-09-26T17:43:39.605913Z", "iopub.status.busy": "2023-09-26T17:43:39.605733Z", "iopub.status.idle": "2023-09-26T17:43:40.784239Z", "shell.execute_reply": "2023-09-26T17:43:40.783572Z" } }, "outputs": [], "source": [ "from pathlib import Path\n", "\n", "import matplotlib.pyplot as plt\n", "import xarray as xr\n", "from rich import pretty\n", "\n", "import quantify_core.data.dataset_adapters as dadapters\n", "import quantify_core.data.dataset_attrs as dattrs\n", "from quantify_core.data import handling as dh\n", "from quantify_core.utilities import dataset_examples\n", "from quantify_core.utilities.examples_support import round_trip_dataset\n", "from quantify_core.utilities.inspect_utils import display_source_code\n", "\n", "pretty.install()\n", "\n", "dh.set_datadir(Path.home() / \"quantify-data\") # change me!" ] }, { "cell_type": "code", "execution_count": 2, "id": "95aa4f0e", "metadata": { "execution": { "iopub.execute_input": "2023-09-26T17:43:40.787086Z", "iopub.status.busy": "2023-09-26T17:43:40.786758Z", "iopub.status.idle": "2023-09-26T17:43:40.843101Z", "shell.execute_reply": "2023-09-26T17:43:40.842370Z" } }, "outputs": [ { "data": { "text/html": [ "
def mk_two_qubit_chevron_dataset(**kwargs) -> xr.Dataset:\n",
" """\n",
" Generates a dataset that look similar to a two-qubit Chevron experiment.\n",
"\n",
" Parameters\n",
" ----------\n",
" **kwargs\n",
" Keyword arguments passed to :func:`~.mk_two_qubit_chevron_data`.\n",
"\n",
" Returns\n",
" -------\n",
" :\n",
" A mock Quantify dataset.\n",
" """\n",
" amp_values, time_values, pop_q0, pop_q1 = mk_two_qubit_chevron_data(**kwargs)\n",
"\n",
" dims_q0 = dims_q1 = ("repetitions", "main_dim")\n",
" pop_q0_attrs = mk_main_var_attrs(\n",
" long_name="Population Q0", unit="", has_repetitions=True\n",
" )\n",
" pop_q1_attrs = mk_main_var_attrs(\n",
" long_name="Population Q1", unit="", has_repetitions=True\n",
" )\n",
" data_vars = dict(\n",
" pop_q0=(dims_q0, pop_q0, pop_q0_attrs),\n",
" pop_q1=(dims_q1, pop_q1, pop_q1_attrs),\n",
" )\n",
"\n",
" dims_amp = dims_time = ("main_dim",)\n",
" amp_attrs = mk_main_coord_attrs(long_name="Amplitude", unit="V")\n",
" time_attrs = mk_main_coord_attrs(long_name="Time", unit="s")\n",
" coords = dict(\n",
" amp=(dims_amp, amp_values, amp_attrs),\n",
" time=(dims_time, time_values, time_attrs),\n",
" )\n",
"\n",
" dataset_attrs = mk_dataset_attrs()\n",
" dataset = xr.Dataset(data_vars=data_vars, coords=coords, attrs=dataset_attrs)\n",
"\n",
" return dataset\n",
"
<xarray.Dataset>\n", "Dimensions: (repetitions: 5, main_dim: 1200)\n", "Coordinates:\n", " amp (main_dim) float64 0.45 0.4534 0.4569 0.4603 ... 0.5431 0.5466 0.55\n", " time (main_dim) float64 0.0 0.0 0.0 0.0 0.0 ... 1e-07 1e-07 1e-07 1e-07\n", "Dimensions without coordinates: repetitions, main_dim\n", "Data variables:\n", " pop_q0 (repetitions, main_dim) float64 0.5 0.5 0.5 ... 0.4886 0.4818 0.5\n", " pop_q1 (repetitions, main_dim) float64 0.5 0.5 0.5 ... 0.5243 0.5371 0.5\n", "Attributes:\n", " tuid: 20230926-194340-851-8a0f58\n", " dataset_name: \n", " dataset_state: None\n", " timestamp_start: None\n", " timestamp_end: None\n", " quantify_dataset_version: 2.0.0\n", " software_versions: {}\n", " relationships: []\n", " json_serialize_exclude: []
<xarray.Dataset>\n", "Dimensions: (repetitions: 5)\n", "Coordinates:\n", " * repetitions (repetitions) <U1 'A' 'B' 'C' 'D' 'E'\n", "Data variables:\n", " *empty*
<xarray.Dataset>\n", "Dimensions: (repetitions: 5, main_dim: 1200)\n", "Coordinates:\n", " amp (main_dim) float64 0.45 0.4534 0.4569 ... 0.5431 0.5466 0.55\n", " time (main_dim) float64 0.0 0.0 0.0 0.0 ... 1e-07 1e-07 1e-07 1e-07\n", " * repetitions (repetitions) <U1 'A' 'B' 'C' 'D' 'E'\n", "Dimensions without coordinates: main_dim\n", "Data variables:\n", " pop_q0 (repetitions, main_dim) float64 0.5 0.5 0.5 ... 0.4818 0.5\n", " pop_q1 (repetitions, main_dim) float64 0.5 0.5 0.5 ... 0.5371 0.5\n", "Attributes:\n", " tuid: 20230926-194340-851-8a0f58\n", " dataset_name: \n", " dataset_state: None\n", " timestamp_start: None\n", " timestamp_end: None\n", " quantify_dataset_version: 2.0.0\n", " software_versions: {}\n", " relationships: []\n", " json_serialize_exclude: []
<xarray.Dataset>\n", "Dimensions: (amp: 30, time: 40, repetitions: 5)\n", "Coordinates:\n", " * amp (amp) float64 0.45 0.4534 0.4569 0.4603 ... 0.5431 0.5466 0.55\n", " * time (time) float64 0.0 2.564e-09 5.128e-09 ... 9.744e-08 1e-07\n", " * repetitions (repetitions) <U1 'A' 'B' 'C' 'D' 'E'\n", "Data variables:\n", " pop_q0 (repetitions, amp, time) float64 0.5 0.5 0.5 ... 0.5 0.5 0.5\n", " pop_q1 (repetitions, amp, time) float64 0.5 0.5 0.5 ... 0.5 0.5 0.5\n", "Attributes:\n", " tuid: 20230926-194340-851-8a0f58\n", " dataset_name: \n", " dataset_state: None\n", " timestamp_start: None\n", " timestamp_end: None\n", " quantify_dataset_version: 2.0.0\n", " software_versions: {}\n", " relationships: []\n", " json_serialize_exclude: []
\n", "{\n", " 'tuid': '20230926-194343-406-e5bcea',\n", " 'dataset_name': 'Bias scan',\n", " 'dataset_state': 'done',\n", " 'timestamp_start': '2023-09-26T19:43:43.406197+02:00',\n", " 'timestamp_end': '2023-09-26T19:45:43.406238+02:00',\n", " 'quantify_dataset_version': '2.0.0',\n", " 'software_versions': {},\n", " 'relationships': [],\n", " 'json_serialize_exclude': []\n", "}\n", "\n" ], "text/plain": [ "\n", "\u001b[1m{\u001b[0m\n", " \u001b[32m'tuid'\u001b[0m: \u001b[32m'20230926-194343-406-e5bcea'\u001b[0m,\n", " \u001b[32m'dataset_name'\u001b[0m: \u001b[32m'Bias scan'\u001b[0m,\n", " \u001b[32m'dataset_state'\u001b[0m: \u001b[32m'done'\u001b[0m,\n", " \u001b[32m'timestamp_start'\u001b[0m: \u001b[32m'2023-09-26T19:43:43.406197+02:00'\u001b[0m,\n", " \u001b[32m'timestamp_end'\u001b[0m: \u001b[32m'2023-09-26T19:45:43.406238+02:00'\u001b[0m,\n", " \u001b[32m'quantify_dataset_version'\u001b[0m: \u001b[32m'2.0.0'\u001b[0m,\n", " \u001b[32m'software_versions'\u001b[0m: \u001b[1m{\u001b[0m\u001b[1m}\u001b[0m,\n", " \u001b[32m'relationships'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", " \u001b[32m'json_serialize_exclude'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m\n", "\u001b[1m}\u001b[0m\n" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "import pendulum\n", "\n", "from quantify_core.utilities import examples_support\n", "\n", "examples_support.mk_dataset_attrs(\n", " dataset_name=\"Bias scan\",\n", " timestamp_start=pendulum.now().to_iso8601_string(),\n", " timestamp_end=pendulum.now().add(minutes=2).to_iso8601_string(),\n", " dataset_state=\"done\",\n", ")" ] }, { "cell_type": "code", "execution_count": 13, "id": "08efb357", "metadata": { "execution": { "iopub.execute_input": "2023-09-26T17:43:43.417293Z", "iopub.status.busy": "2023-09-26T17:43:43.416609Z", "iopub.status.idle": "2023-09-26T17:43:43.419971Z", "shell.execute_reply": "2023-09-26T17:43:43.419348Z" } }, "outputs": [], "source": [ "# pylint: disable=line-too-long\n", "# pylint: disable=wrong-import-order\n", "# pylint: disable=wrong-import-position\n", "# pylint: disable=pointless-string-statement\n", "# pylint: disable=duplicate-code" ] }, { "cell_type": "code", "execution_count": 14, "id": "b5875ec8", "metadata": { "execution": { "iopub.execute_input": "2023-09-26T17:43:43.422218Z", "iopub.status.busy": "2023-09-26T17:43:43.422030Z", "iopub.status.idle": "2023-09-26T17:43:43.430051Z", "shell.execute_reply": "2023-09-26T17:43:43.429347Z" } }, "outputs": [ { "data": { "text/html": [ "
\n", "{\n", " 'tuid': '20230926-194343-423-7ca250',\n", " 'dataset_name': 'My experiment',\n", " 'dataset_state': None,\n", " 'timestamp_start': '2023-09-26T19:43:43.423773+02:00',\n", " 'timestamp_end': '2023-09-26T19:45:43.423819+02:00',\n", " 'quantify_dataset_version': '2.0.0',\n", " 'software_versions': {\n", " 'lab_fridge_magnet_driver': 'v1.4.2',\n", " 'my_lab_repo': '9d8acf63f48c469c1b9fa9f2c3cf230845f67b18'\n", " },\n", " 'relationships': [],\n", " 'json_serialize_exclude': []\n", "}\n", "\n" ], "text/plain": [ "\n", "\u001b[1m{\u001b[0m\n", " \u001b[32m'tuid'\u001b[0m: \u001b[32m'20230926-194343-423-7ca250'\u001b[0m,\n", " \u001b[32m'dataset_name'\u001b[0m: \u001b[32m'My experiment'\u001b[0m,\n", " \u001b[32m'dataset_state'\u001b[0m: \u001b[3;35mNone\u001b[0m,\n", " \u001b[32m'timestamp_start'\u001b[0m: \u001b[32m'2023-09-26T19:43:43.423773+02:00'\u001b[0m,\n", " \u001b[32m'timestamp_end'\u001b[0m: \u001b[32m'2023-09-26T19:45:43.423819+02:00'\u001b[0m,\n", " \u001b[32m'quantify_dataset_version'\u001b[0m: \u001b[32m'2.0.0'\u001b[0m,\n", " \u001b[32m'software_versions'\u001b[0m: \u001b[1m{\u001b[0m\n", " \u001b[32m'lab_fridge_magnet_driver'\u001b[0m: \u001b[32m'v1.4.2'\u001b[0m,\n", " \u001b[32m'my_lab_repo'\u001b[0m: \u001b[32m'9d8acf63f48c469c1b9fa9f2c3cf230845f67b18'\u001b[0m\n", " \u001b[1m}\u001b[0m,\n", " \u001b[32m'relationships'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", " \u001b[32m'json_serialize_exclude'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m\n", "\u001b[1m}\u001b[0m\n" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "import pendulum\n", "\n", "from quantify_core.utilities import examples_support\n", "\n", "examples_support.mk_dataset_attrs(\n", " dataset_name=\"My experiment\",\n", " timestamp_start=pendulum.now().to_iso8601_string(),\n", " timestamp_end=pendulum.now().add(minutes=2).to_iso8601_string(),\n", " software_versions={\n", " \"lab_fridge_magnet_driver\": \"v1.4.2\", # software version/tag\n", " \"my_lab_repo\": \"9d8acf63f48c469c1b9fa9f2c3cf230845f67b18\", # git commit hash\n", " },\n", ")" ] }, { "cell_type": "code", "execution_count": 15, "id": "ea04d5a6", "metadata": { "execution": { "iopub.execute_input": "2023-09-26T17:43:43.432596Z", "iopub.status.busy": "2023-09-26T17:43:43.432183Z", "iopub.status.idle": "2023-09-26T17:43:43.435128Z", "shell.execute_reply": "2023-09-26T17:43:43.434423Z" } }, "outputs": [], "source": [ "# pylint: disable=duplicate-code\n", "# pylint: disable=wrong-import-position" ] }, { "cell_type": "code", "execution_count": 16, "id": "5de80ea5", "metadata": { "execution": { "iopub.execute_input": "2023-09-26T17:43:43.437429Z", "iopub.status.busy": "2023-09-26T17:43:43.437239Z", "iopub.status.idle": "2023-09-26T17:43:43.441210Z", "shell.execute_reply": "2023-09-26T17:43:43.440608Z" } }, "outputs": [], "source": [ "from quantify_core.data.dataset_attrs import QDatasetIntraRelationship\n", "from quantify_core.utilities import examples_support\n", "\n", "attrs = examples_support.mk_dataset_attrs(\n", " relationships=[\n", " QDatasetIntraRelationship(\n", " item_name=\"q0\",\n", " relation_type=\"calibration\",\n", " related_names=[\"q0_cal\"],\n", " ).to_dict()\n", " ]\n", ")" ] }, { "cell_type": "code", "execution_count": 17, "id": "fa12dcbf", "metadata": { "execution": { "iopub.execute_input": "2023-09-26T17:43:43.443538Z", "iopub.status.busy": "2023-09-26T17:43:43.443344Z", "iopub.status.idle": "2023-09-26T17:43:43.449935Z", "shell.execute_reply": "2023-09-26T17:43:43.449320Z" } }, "outputs": [ { "data": { "text/html": [ "
\n", "{\n", " 'tuid': None,\n", " 'dataset_name': '',\n", " 'dataset_state': None,\n", " 'timestamp_start': None,\n", " 'timestamp_end': None,\n", " 'quantify_dataset_version': '2.0.0',\n", " 'software_versions': {},\n", " 'relationships': [],\n", " 'json_serialize_exclude': []\n", "}\n", "\n" ], "text/plain": [ "\n", "\u001b[1m{\u001b[0m\n", " \u001b[32m'tuid'\u001b[0m: \u001b[3;35mNone\u001b[0m,\n", " \u001b[32m'dataset_name'\u001b[0m: \u001b[32m''\u001b[0m,\n", " \u001b[32m'dataset_state'\u001b[0m: \u001b[3;35mNone\u001b[0m,\n", " \u001b[32m'timestamp_start'\u001b[0m: \u001b[3;35mNone\u001b[0m,\n", " \u001b[32m'timestamp_end'\u001b[0m: \u001b[3;35mNone\u001b[0m,\n", " \u001b[32m'quantify_dataset_version'\u001b[0m: \u001b[32m'2.0.0'\u001b[0m,\n", " \u001b[32m'software_versions'\u001b[0m: \u001b[1m{\u001b[0m\u001b[1m}\u001b[0m,\n", " \u001b[32m'relationships'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", " \u001b[32m'json_serialize_exclude'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m\n", "\u001b[1m}\u001b[0m\n" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "from quantify_core.data.dataset_attrs import QDatasetAttrs\n", "\n", "# tip: to_json and from_dict, from_json are also available\n", "dataset.attrs = QDatasetAttrs().to_dict()\n", "dataset.attrs" ] }, { "cell_type": "code", "execution_count": 18, "id": "372c9538", "metadata": { "execution": { "iopub.execute_input": "2023-09-26T17:43:43.452223Z", "iopub.status.busy": "2023-09-26T17:43:43.452032Z", "iopub.status.idle": "2023-09-26T17:43:43.457522Z", "shell.execute_reply": "2023-09-26T17:43:43.456898Z" } }, "outputs": [ { "data": { "text/html": [ "
('2.0.0', None)\n", "\n" ], "text/plain": [ "\u001b[1m(\u001b[0m\u001b[32m'2.0.0'\u001b[0m, \u001b[3;35mNone\u001b[0m\u001b[1m)\u001b[0m\n" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "dataset.quantify_dataset_version, dataset.tuid" ] }, { "cell_type": "code", "execution_count": 19, "id": "f1aec9ff", "metadata": { "execution": { "iopub.execute_input": "2023-09-26T17:43:43.459790Z", "iopub.status.busy": "2023-09-26T17:43:43.459600Z", "iopub.status.idle": "2023-09-26T17:43:43.462430Z", "shell.execute_reply": "2023-09-26T17:43:43.461872Z" } }, "outputs": [], "source": [ "# pylint: disable=line-too-long\n", "# pylint: disable=wrong-import-order\n", "# pylint: disable=wrong-import-position\n", "# pylint: disable=pointless-string-statement\n", "# pylint: disable=duplicate-code" ] }, { "cell_type": "code", "execution_count": 20, "id": "e31b4558", "metadata": { "execution": { "iopub.execute_input": "2023-09-26T17:43:43.464608Z", "iopub.status.busy": "2023-09-26T17:43:43.464417Z", "iopub.status.idle": "2023-09-26T17:43:43.470662Z", "shell.execute_reply": "2023-09-26T17:43:43.470032Z" } }, "outputs": [ { "data": { "text/html": [ "
\n", "{\n", " 'unit': '',\n", " 'long_name': '',\n", " 'is_main_coord': True,\n", " 'uniformly_spaced': True,\n", " 'is_dataset_ref': False,\n", " 'json_serialize_exclude': []\n", "}\n", "\n" ], "text/plain": [ "\n", "\u001b[1m{\u001b[0m\n", " \u001b[32m'unit'\u001b[0m: \u001b[32m''\u001b[0m,\n", " \u001b[32m'long_name'\u001b[0m: \u001b[32m''\u001b[0m,\n", " \u001b[32m'is_main_coord'\u001b[0m: \u001b[3;92mTrue\u001b[0m,\n", " \u001b[32m'uniformly_spaced'\u001b[0m: \u001b[3;92mTrue\u001b[0m,\n", " \u001b[32m'is_dataset_ref'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", " \u001b[32m'json_serialize_exclude'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m\n", "\u001b[1m}\u001b[0m\n" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "from quantify_core.utilities import examples_support\n", "\n", "examples_support.mk_main_coord_attrs()" ] }, { "cell_type": "code", "execution_count": 21, "id": "cd9db098", "metadata": { "execution": { "iopub.execute_input": "2023-09-26T17:43:43.472949Z", "iopub.status.busy": "2023-09-26T17:43:43.472759Z", "iopub.status.idle": "2023-09-26T17:43:43.478776Z", "shell.execute_reply": "2023-09-26T17:43:43.478137Z" } }, "outputs": [ { "data": { "text/html": [ "
\n", "{\n", " 'unit': '',\n", " 'long_name': '',\n", " 'is_main_coord': False,\n", " 'uniformly_spaced': True,\n", " 'is_dataset_ref': False,\n", " 'json_serialize_exclude': []\n", "}\n", "\n" ], "text/plain": [ "\n", "\u001b[1m{\u001b[0m\n", " \u001b[32m'unit'\u001b[0m: \u001b[32m''\u001b[0m,\n", " \u001b[32m'long_name'\u001b[0m: \u001b[32m''\u001b[0m,\n", " \u001b[32m'is_main_coord'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", " \u001b[32m'uniformly_spaced'\u001b[0m: \u001b[3;92mTrue\u001b[0m,\n", " \u001b[32m'is_dataset_ref'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", " \u001b[32m'json_serialize_exclude'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m\n", "\u001b[1m}\u001b[0m\n" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "examples_support.mk_secondary_coord_attrs()" ] }, { "cell_type": "code", "execution_count": 22, "id": "c4f8dd4b", "metadata": { "execution": { "iopub.execute_input": "2023-09-26T17:43:43.481091Z", "iopub.status.busy": "2023-09-26T17:43:43.480903Z", "iopub.status.idle": "2023-09-26T17:43:43.486850Z", "shell.execute_reply": "2023-09-26T17:43:43.486169Z" } }, "outputs": [ { "data": { "text/html": [ "
\n", "{\n", " 'unit': 'V',\n", " 'long_name': 'Amplitude',\n", " 'is_main_coord': True,\n", " 'uniformly_spaced': True,\n", " 'is_dataset_ref': False,\n", " 'json_serialize_exclude': []\n", "}\n", "\n" ], "text/plain": [ "\n", "\u001b[1m{\u001b[0m\n", " \u001b[32m'unit'\u001b[0m: \u001b[32m'V'\u001b[0m,\n", " \u001b[32m'long_name'\u001b[0m: \u001b[32m'Amplitude'\u001b[0m,\n", " \u001b[32m'is_main_coord'\u001b[0m: \u001b[3;92mTrue\u001b[0m,\n", " \u001b[32m'uniformly_spaced'\u001b[0m: \u001b[3;92mTrue\u001b[0m,\n", " \u001b[32m'is_dataset_ref'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", " \u001b[32m'json_serialize_exclude'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m\n", "\u001b[1m}\u001b[0m\n" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "dataset.amp.attrs" ] }, { "cell_type": "code", "execution_count": 23, "id": "f27f5caa", "metadata": { "execution": { "iopub.execute_input": "2023-09-26T17:43:43.489112Z", "iopub.status.busy": "2023-09-26T17:43:43.488923Z", "iopub.status.idle": "2023-09-26T17:43:43.491665Z", "shell.execute_reply": "2023-09-26T17:43:43.491152Z" } }, "outputs": [], "source": [ "# pylint: disable=line-too-long\n", "# pylint: disable=wrong-import-order\n", "# pylint: disable=wrong-import-position\n", "# pylint: disable=pointless-string-statement\n", "# pylint: disable=duplicate-code" ] }, { "cell_type": "code", "execution_count": 24, "id": "60a7875c", "metadata": { "execution": { "iopub.execute_input": "2023-09-26T17:43:43.493922Z", "iopub.status.busy": "2023-09-26T17:43:43.493735Z", "iopub.status.idle": "2023-09-26T17:43:43.499767Z", "shell.execute_reply": "2023-09-26T17:43:43.499312Z" } }, "outputs": [ { "data": { "text/html": [ "
\n", "{\n", " 'unit': '',\n", " 'long_name': '',\n", " 'is_main_var': True,\n", " 'uniformly_spaced': True,\n", " 'grid': True,\n", " 'is_dataset_ref': False,\n", " 'has_repetitions': False,\n", " 'json_serialize_exclude': [],\n", " 'coords': ['time']\n", "}\n", "\n" ], "text/plain": [ "\n", "\u001b[1m{\u001b[0m\n", " \u001b[32m'unit'\u001b[0m: \u001b[32m''\u001b[0m,\n", " \u001b[32m'long_name'\u001b[0m: \u001b[32m''\u001b[0m,\n", " \u001b[32m'is_main_var'\u001b[0m: \u001b[3;92mTrue\u001b[0m,\n", " \u001b[32m'uniformly_spaced'\u001b[0m: \u001b[3;92mTrue\u001b[0m,\n", " \u001b[32m'grid'\u001b[0m: \u001b[3;92mTrue\u001b[0m,\n", " \u001b[32m'is_dataset_ref'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", " \u001b[32m'has_repetitions'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", " \u001b[32m'json_serialize_exclude'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", " \u001b[32m'coords'\u001b[0m: \u001b[1m[\u001b[0m\u001b[32m'time'\u001b[0m\u001b[1m]\u001b[0m\n", "\u001b[1m}\u001b[0m\n" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "from quantify_core.utilities import examples_support\n", "\n", "examples_support.mk_main_var_attrs(coords=[\"time\"])" ] }, { "cell_type": "code", "execution_count": 25, "id": "fa0c44fc", "metadata": { "execution": { "iopub.execute_input": "2023-09-26T17:43:43.501995Z", "iopub.status.busy": "2023-09-26T17:43:43.501806Z", "iopub.status.idle": "2023-09-26T17:43:43.508140Z", "shell.execute_reply": "2023-09-26T17:43:43.507522Z" } }, "outputs": [ { "data": { "text/html": [ "
\n", "{\n", " 'unit': '',\n", " 'long_name': '',\n", " 'is_main_var': False,\n", " 'uniformly_spaced': True,\n", " 'grid': True,\n", " 'is_dataset_ref': False,\n", " 'has_repetitions': False,\n", " 'json_serialize_exclude': [],\n", " 'coords': ['cal']\n", "}\n", "\n" ], "text/plain": [ "\n", "\u001b[1m{\u001b[0m\n", " \u001b[32m'unit'\u001b[0m: \u001b[32m''\u001b[0m,\n", " \u001b[32m'long_name'\u001b[0m: \u001b[32m''\u001b[0m,\n", " \u001b[32m'is_main_var'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", " \u001b[32m'uniformly_spaced'\u001b[0m: \u001b[3;92mTrue\u001b[0m,\n", " \u001b[32m'grid'\u001b[0m: \u001b[3;92mTrue\u001b[0m,\n", " \u001b[32m'is_dataset_ref'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", " \u001b[32m'has_repetitions'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", " \u001b[32m'json_serialize_exclude'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m,\n", " \u001b[32m'coords'\u001b[0m: \u001b[1m[\u001b[0m\u001b[32m'cal'\u001b[0m\u001b[1m]\u001b[0m\n", "\u001b[1m}\u001b[0m\n" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "examples_support.mk_secondary_var_attrs(coords=[\"cal\"])" ] }, { "cell_type": "code", "execution_count": 26, "id": "1b1ee042", "metadata": { "execution": { "iopub.execute_input": "2023-09-26T17:43:43.510395Z", "iopub.status.busy": "2023-09-26T17:43:43.510204Z", "iopub.status.idle": "2023-09-26T17:43:43.516228Z", "shell.execute_reply": "2023-09-26T17:43:43.515620Z" } }, "outputs": [ { "data": { "text/html": [ "
\n", "{\n", " 'unit': '',\n", " 'long_name': 'Population Q0',\n", " 'is_main_var': True,\n", " 'uniformly_spaced': True,\n", " 'grid': True,\n", " 'is_dataset_ref': False,\n", " 'has_repetitions': True,\n", " 'json_serialize_exclude': []\n", "}\n", "\n" ], "text/plain": [ "\n", "\u001b[1m{\u001b[0m\n", " \u001b[32m'unit'\u001b[0m: \u001b[32m''\u001b[0m,\n", " \u001b[32m'long_name'\u001b[0m: \u001b[32m'Population Q0'\u001b[0m,\n", " \u001b[32m'is_main_var'\u001b[0m: \u001b[3;92mTrue\u001b[0m,\n", " \u001b[32m'uniformly_spaced'\u001b[0m: \u001b[3;92mTrue\u001b[0m,\n", " \u001b[32m'grid'\u001b[0m: \u001b[3;92mTrue\u001b[0m,\n", " \u001b[32m'is_dataset_ref'\u001b[0m: \u001b[3;91mFalse\u001b[0m,\n", " \u001b[32m'has_repetitions'\u001b[0m: \u001b[3;92mTrue\u001b[0m,\n", " \u001b[32m'json_serialize_exclude'\u001b[0m: \u001b[1m[\u001b[0m\u001b[1m]\u001b[0m\n", "\u001b[1m}\u001b[0m\n" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "dataset.pop_q0.attrs" ] }, { "cell_type": "code", "execution_count": 27, "id": "95537e41", "metadata": { "execution": { "iopub.execute_input": "2023-09-26T17:43:43.518517Z", "iopub.status.busy": "2023-09-26T17:43:43.518322Z", "iopub.status.idle": "2023-09-26T17:43:43.534408Z", "shell.execute_reply": "2023-09-26T17:43:43.533693Z" } }, "outputs": [ { "data": { "text/html": [ "
def write_dataset(path: Union[Path, str], dataset: xr.Dataset) -> None:\n",
" """\n",
" Writes a :class:`~xarray.Dataset` to a file with the `h5netcdf` engine.\n",
"\n",
" Before writing the\n",
" :meth:`AdapterH5NetCDF.adapt() <quantify_core.data.dataset_adapters.AdapterH5NetCDF.adapt>`\n",
" is applied.\n",
"\n",
" To accommodate for complex-type numbers and arrays ``invalid_netcdf=True`` is used.\n",
"\n",
" Parameters\n",
" ----------\n",
" path\n",
" Path to the file including filename and extension\n",
" dataset\n",
" The :class:`~xarray.Dataset` to be written to file.\n",
" """ # pylint: disable=line-too-long\n",
" _xarray_numpy_bool_patch(dataset) # See issue #161 in quantify-core\n",
" # Only quantify_dataset_version=>2.0.0 requires the adapter\n",
" if "quantify_dataset_version" in dataset.attrs:\n",
" dataset = da.AdapterH5NetCDF.adapt(dataset)\n",
" dataset.to_netcdf(path, engine="h5netcdf", invalid_netcdf=True)\n",
"
def load_dataset(\n",
" tuid: TUID, datadir: str = None, name: str = DATASET_NAME\n",
") -> xr.Dataset:\n",
" """\n",
" Loads a dataset specified by a tuid.\n",
"\n",
" .. tip::\n",
"\n",
" This method also works when specifying only the first part of a\n",
" :class:`~quantify_core.data.types.TUID`.\n",
"\n",
" .. note::\n",
"\n",
" This method uses :func:`~.load_dataset` to ensure the file is closed after\n",
" loading as datasets are intended to be immutable after performing the initial\n",
" experiment.\n",
"\n",
" Parameters\n",
" ----------\n",
" tuid\n",
" A :class:`~quantify_core.data.types.TUID` string. It is also possible to specify\n",
" only the first part of a tuid.\n",
" datadir\n",
" Path of the data directory. If ``None``, uses :meth:`~get_datadir` to determine\n",
" the data directory.\n",
" Returns\n",
" -------\n",
" :\n",
" The dataset.\n",
" Raises\n",
" ------\n",
" FileNotFoundError\n",
" No data found for specified date.\n",
" """\n",
" return load_dataset_from_path(_locate_experiment_file(tuid, datadir, name))\n",
"
class AdapterH5NetCDF(DatasetAdapterBase):\n",
" """\n",
" Quantify dataset adapter for the ``h5netcdf`` engine.\n",
"\n",
" It has the functionality of adapting the Quantify dataset to a format compatible\n",
" with the ``h5netcdf`` xarray backend engine that is used to write and load the\n",
" dataset to/from disk.\n",
"\n",
" .. warning::\n",
"\n",
" The ``h5netcdf`` engine has minor issues when performing a two-way trip of the\n",
" dataset. The ``type`` of some attributes are not preserved. E.g., list- and\n",
" tuple-like objects are loaded as numpy arrays of ``dtype=object``.\n",
" """\n",
"\n",
" @classmethod\n",
" def adapt(cls, dataset: xr.Dataset) -> xr.Dataset:\n",
" """\n",
" Serializes to JSON the dataset and variables attributes.\n",
"\n",
" To prevent the JSON serialization for specific items, their names should be\n",
" listed under the attribute named ``json_serialize_exclude`` (for each ``attrs``\n",
" dictionary).\n",
"\n",
" Parameters\n",
" ----------\n",
" dataset\n",
" Dataset that needs to be adapted.\n",
"\n",
" Returns\n",
" -------\n",
" :\n",
" Dataset in which the attributes have been replaced with their JSON strings\n",
" version.\n",
" """\n",
"\n",
" return cls._transform(dataset, vals_converter=json.dumps)\n",
"\n",
" @classmethod\n",
" def recover(cls, dataset: xr.Dataset) -> xr.Dataset:\n",
" """\n",
" Reverts the action of ``.adapt()``.\n",
"\n",
" To prevent the JSON de-serialization for specific items, their names should be\n",
" listed under the attribute named ``json_serialize_exclude``\n",
" (for each ``attrs`` dictionary).\n",
"\n",
" Parameters\n",
" ----------\n",
" dataset\n",
" Dataset from which to recover the original format.\n",
"\n",
" Returns\n",
" -------\n",
" :\n",
" Dataset in which the attributes have been replaced with their python objects\n",
" version.\n",
" """\n",
"\n",
" return cls._transform(dataset, vals_converter=json.loads)\n",
"\n",
" @staticmethod\n",
" def attrs_convert(\n",
" attrs: dict,\n",
" inplace: bool = False,\n",
" vals_converter: Callable[Any, Any] = json.dumps,\n",
" ) -> dict:\n",
" """\n",
" Converts to/from JSON string the values of the keys which are not listed in the\n",
" ``json_serialize_exclude`` list.\n",
"\n",
" Parameters\n",
" ----------\n",
" attrs\n",
" The input dictionary.\n",
" inplace\n",
" If ``True`` the values are replaced in place, otherwise a deepcopy of\n",
" ``attrs`` is performed first.\n",
" """\n",
" json_serialize_exclude = attrs.get("json_serialize_exclude", [])\n",
"\n",
" attrs = attrs if inplace else deepcopy(attrs)\n",
" for attr_name, attr_val in attrs.items():\n",
" if attr_name not in json_serialize_exclude:\n",
" attrs[attr_name] = vals_converter(attr_val)\n",
" return attrs\n",
"\n",
" @classmethod\n",
" def _transform(\n",
" cls, dataset: xr.Dataset, vals_converter: Callable[Any, Any] = json.dumps\n",
" ) -> xr.Dataset:\n",
" dataset = xr.Dataset(\n",
" dataset,\n",
" attrs=cls.attrs_convert(\n",
" dataset.attrs, inplace=False, vals_converter=vals_converter\n",
" ),\n",
" )\n",
"\n",
" for var_name in dataset.variables.keys():\n",
" # The new dataset generated above has already a deepcopy of the attributes.\n",
" _ = cls.attrs_convert(\n",
" dataset[var_name].attrs, inplace=True, vals_converter=vals_converter\n",
" )\n",
"\n",
" return dataset\n",
"