diff --git "a/models/spabert/notebooks/SpaBertEmbeddingTest1.ipynb" "b/models/spabert/notebooks/SpaBertEmbeddingTest1.ipynb" deleted file mode 100644--- "a/models/spabert/notebooks/SpaBertEmbeddingTest1.ipynb" +++ /dev/null @@ -1 +0,0 @@ -{"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"provenance":[],"gpuType":"T4","collapsed_sections":["HcIS1Il6F0qk","Uj7jIEwwofAQ","3s2fSL9hgQCJ","CTZqYlpCZ0rK","a_nf_V2sZ5kl","JCWtE8CGVNWz"],"machine_shape":"hm","authorship_tag":"ABX9TyPpRCAG5fax3ZcoYr5Oi+ic"},"kernelspec":{"name":"python3","display_name":"Python 3"},"language_info":{"name":"python"},"accelerator":"GPU"},"cells":[{"cell_type":"markdown","source":["##Install Spacy and Other Modules\n","We do this first because we need to restart the runtime after installation"],"metadata":{"id":"HcIS1Il6F0qk"}},{"cell_type":"code","source":["#Install Spacy and Download the Transformer model\n","!pip install spacy\n","!pip install cupy # Using cupy-cuda112 for compatibility\n","!pip install thinc-gpu-ops\n","!python -m spacy download en_core_web_trf"],"metadata":{"id":"Qk9k9OEOo4i5","collapsed":true,"colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"status":"ok","timestamp":1725586957473,"user_tz":420,"elapsed":54499,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"c49af6e0-a8cc-47cf-a329-ef4902aef5a7"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["Requirement already satisfied: spacy in /usr/local/lib/python3.10/dist-packages (3.7.6)\n","Requirement already satisfied: spacy-legacy<3.1.0,>=3.0.11 in /usr/local/lib/python3.10/dist-packages (from spacy) (3.0.12)\n","Requirement already satisfied: spacy-loggers<2.0.0,>=1.0.0 in /usr/local/lib/python3.10/dist-packages (from spacy) (1.0.5)\n","Requirement already satisfied: murmurhash<1.1.0,>=0.28.0 in /usr/local/lib/python3.10/dist-packages (from spacy) (1.0.10)\n","Requirement already satisfied: cymem<2.1.0,>=2.0.2 in /usr/local/lib/python3.10/dist-packages (from spacy) (2.0.8)\n","Requirement already satisfied: preshed<3.1.0,>=3.0.2 in /usr/local/lib/python3.10/dist-packages (from spacy) (3.0.9)\n","Requirement already satisfied: thinc<8.3.0,>=8.2.2 in /usr/local/lib/python3.10/dist-packages (from spacy) (8.2.5)\n","Requirement already satisfied: wasabi<1.2.0,>=0.9.1 in /usr/local/lib/python3.10/dist-packages (from spacy) (1.1.3)\n","Requirement already satisfied: srsly<3.0.0,>=2.4.3 in /usr/local/lib/python3.10/dist-packages (from spacy) (2.4.8)\n","Requirement already satisfied: catalogue<2.1.0,>=2.0.6 in /usr/local/lib/python3.10/dist-packages (from spacy) (2.0.10)\n","Requirement already satisfied: weasel<0.5.0,>=0.1.0 in /usr/local/lib/python3.10/dist-packages (from spacy) (0.4.1)\n","Requirement already satisfied: typer<1.0.0,>=0.3.0 in /usr/local/lib/python3.10/dist-packages (from spacy) (0.12.5)\n","Requirement already satisfied: tqdm<5.0.0,>=4.38.0 in /usr/local/lib/python3.10/dist-packages (from spacy) (4.66.5)\n","Requirement already satisfied: requests<3.0.0,>=2.13.0 in /usr/local/lib/python3.10/dist-packages (from spacy) (2.32.3)\n","Requirement already satisfied: pydantic!=1.8,!=1.8.1,<3.0.0,>=1.7.4 in /usr/local/lib/python3.10/dist-packages (from spacy) (2.8.2)\n","Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from spacy) (3.1.4)\n","Requirement already satisfied: setuptools in /usr/local/lib/python3.10/dist-packages (from spacy) (71.0.4)\n","Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from spacy) (24.1)\n","Requirement already satisfied: langcodes<4.0.0,>=3.2.0 in /usr/local/lib/python3.10/dist-packages (from spacy) (3.4.0)\n","Requirement already satisfied: numpy>=1.19.0 in /usr/local/lib/python3.10/dist-packages (from spacy) (1.26.4)\n","Requirement already satisfied: language-data>=1.2 in /usr/local/lib/python3.10/dist-packages (from langcodes<4.0.0,>=3.2.0->spacy) (1.2.0)\n","Requirement already satisfied: annotated-types>=0.4.0 in /usr/local/lib/python3.10/dist-packages (from pydantic!=1.8,!=1.8.1,<3.0.0,>=1.7.4->spacy) (0.7.0)\n","Requirement already satisfied: pydantic-core==2.20.1 in /usr/local/lib/python3.10/dist-packages (from pydantic!=1.8,!=1.8.1,<3.0.0,>=1.7.4->spacy) (2.20.1)\n","Requirement already satisfied: typing-extensions>=4.6.1 in /usr/local/lib/python3.10/dist-packages (from pydantic!=1.8,!=1.8.1,<3.0.0,>=1.7.4->spacy) (4.12.2)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests<3.0.0,>=2.13.0->spacy) (3.3.2)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests<3.0.0,>=2.13.0->spacy) (3.8)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests<3.0.0,>=2.13.0->spacy) (2.0.7)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests<3.0.0,>=2.13.0->spacy) (2024.8.30)\n","Requirement already satisfied: blis<0.8.0,>=0.7.8 in /usr/local/lib/python3.10/dist-packages (from thinc<8.3.0,>=8.2.2->spacy) (0.7.11)\n","Requirement already satisfied: confection<1.0.0,>=0.0.1 in /usr/local/lib/python3.10/dist-packages (from thinc<8.3.0,>=8.2.2->spacy) (0.1.5)\n","Requirement already satisfied: click>=8.0.0 in /usr/local/lib/python3.10/dist-packages (from typer<1.0.0,>=0.3.0->spacy) (8.1.7)\n","Requirement already satisfied: shellingham>=1.3.0 in /usr/local/lib/python3.10/dist-packages (from typer<1.0.0,>=0.3.0->spacy) (1.5.4)\n","Requirement already satisfied: rich>=10.11.0 in /usr/local/lib/python3.10/dist-packages (from typer<1.0.0,>=0.3.0->spacy) (13.8.0)\n","Requirement already satisfied: cloudpathlib<1.0.0,>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from weasel<0.5.0,>=0.1.0->spacy) (0.19.0)\n","Requirement already satisfied: smart-open<8.0.0,>=5.2.1 in /usr/local/lib/python3.10/dist-packages (from weasel<0.5.0,>=0.1.0->spacy) (7.0.4)\n","Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->spacy) (2.1.5)\n","Requirement already satisfied: marisa-trie>=0.7.7 in /usr/local/lib/python3.10/dist-packages (from language-data>=1.2->langcodes<4.0.0,>=3.2.0->spacy) (1.2.0)\n","Requirement already satisfied: markdown-it-py>=2.2.0 in /usr/local/lib/python3.10/dist-packages (from rich>=10.11.0->typer<1.0.0,>=0.3.0->spacy) (3.0.0)\n","Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /usr/local/lib/python3.10/dist-packages (from rich>=10.11.0->typer<1.0.0,>=0.3.0->spacy) (2.16.1)\n","Requirement already satisfied: wrapt in /usr/local/lib/python3.10/dist-packages (from smart-open<8.0.0,>=5.2.1->weasel<0.5.0,>=0.1.0->spacy) (1.16.0)\n","Requirement already satisfied: mdurl~=0.1 in /usr/local/lib/python3.10/dist-packages (from markdown-it-py>=2.2.0->rich>=10.11.0->typer<1.0.0,>=0.3.0->spacy) (0.1.2)\n","Collecting cupy\n"," Downloading cupy-13.3.0.tar.gz (3.4 MB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.4/3.4 MB\u001b[0m \u001b[31m50.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25h Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n","Requirement already satisfied: numpy<2.3,>=1.22 in /usr/local/lib/python3.10/dist-packages (from cupy) (1.26.4)\n","Requirement already satisfied: fastrlock>=0.5 in /usr/local/lib/python3.10/dist-packages (from cupy) (0.8.2)\n","Building wheels for collected packages: cupy\n"," \u001b[1;31merror\u001b[0m: \u001b[1msubprocess-exited-with-error\u001b[0m\n"," \n"," \u001b[31m×\u001b[0m \u001b[32mpython setup.py bdist_wheel\u001b[0m did not run successfully.\n"," \u001b[31m│\u001b[0m exit code: \u001b[1;36m1\u001b[0m\n"," \u001b[31m╰─>\u001b[0m See above for output.\n"," \n"," \u001b[1;35mnote\u001b[0m: This error originates from a subprocess, and is likely not a problem with pip.\n"," Building wheel for cupy (setup.py) ... \u001b[?25lerror\n","\u001b[31m ERROR: Failed building wheel for cupy\u001b[0m\u001b[31m\n","\u001b[0m\u001b[?25h Running setup.py clean for cupy\n","Failed to build cupy\n","\u001b[31mERROR: ERROR: Failed to build installable wheels for some pyproject.toml based projects (cupy)\u001b[0m\u001b[31m\n","\u001b[0mCollecting thinc-gpu-ops\n"," Downloading thinc_gpu_ops-0.0.4.tar.gz (483 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m483.5/483.5 kB\u001b[0m \u001b[31m17.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25h Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n","Requirement already satisfied: numpy>=1.7.0 in /usr/local/lib/python3.10/dist-packages (from thinc-gpu-ops) (1.26.4)\n","Building wheels for collected packages: thinc-gpu-ops\n"," \u001b[1;31merror\u001b[0m: \u001b[1msubprocess-exited-with-error\u001b[0m\n"," \n"," \u001b[31m×\u001b[0m \u001b[32mpython setup.py bdist_wheel\u001b[0m did not run successfully.\n"," \u001b[31m│\u001b[0m exit code: \u001b[1;36m1\u001b[0m\n"," \u001b[31m╰─>\u001b[0m See above for output.\n"," \n"," \u001b[1;35mnote\u001b[0m: This error originates from a subprocess, and is likely not a problem with pip.\n"," Building wheel for thinc-gpu-ops (setup.py) ... \u001b[?25lerror\n","\u001b[31m ERROR: Failed building wheel for thinc-gpu-ops\u001b[0m\u001b[31m\n","\u001b[0m\u001b[?25h Running setup.py clean for thinc-gpu-ops\n","Failed to build thinc-gpu-ops\n","\u001b[31mERROR: ERROR: Failed to build installable wheels for some pyproject.toml based projects (thinc-gpu-ops)\u001b[0m\u001b[31m\n","\u001b[0mCollecting en-core-web-trf==3.7.3\n"," Downloading https://github.com/explosion/spacy-models/releases/download/en_core_web_trf-3.7.3/en_core_web_trf-3.7.3-py3-none-any.whl (457.4 MB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m457.4/457.4 MB\u001b[0m \u001b[31m2.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hRequirement already satisfied: spacy<3.8.0,>=3.7.2 in /usr/local/lib/python3.10/dist-packages (from en-core-web-trf==3.7.3) (3.7.6)\n","Collecting spacy-curated-transformers<0.3.0,>=0.2.0 (from en-core-web-trf==3.7.3)\n"," Downloading spacy_curated_transformers-0.2.2-py2.py3-none-any.whl.metadata (2.7 kB)\n","Requirement already satisfied: spacy-legacy<3.1.0,>=3.0.11 in /usr/local/lib/python3.10/dist-packages (from spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (3.0.12)\n","Requirement already satisfied: spacy-loggers<2.0.0,>=1.0.0 in /usr/local/lib/python3.10/dist-packages (from spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (1.0.5)\n","Requirement already satisfied: murmurhash<1.1.0,>=0.28.0 in /usr/local/lib/python3.10/dist-packages (from spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (1.0.10)\n","Requirement already satisfied: cymem<2.1.0,>=2.0.2 in /usr/local/lib/python3.10/dist-packages (from spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (2.0.8)\n","Requirement already satisfied: preshed<3.1.0,>=3.0.2 in /usr/local/lib/python3.10/dist-packages (from spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (3.0.9)\n","Requirement already satisfied: thinc<8.3.0,>=8.2.2 in /usr/local/lib/python3.10/dist-packages (from spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (8.2.5)\n","Requirement already satisfied: wasabi<1.2.0,>=0.9.1 in /usr/local/lib/python3.10/dist-packages (from spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (1.1.3)\n","Requirement already satisfied: srsly<3.0.0,>=2.4.3 in /usr/local/lib/python3.10/dist-packages (from spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (2.4.8)\n","Requirement already satisfied: catalogue<2.1.0,>=2.0.6 in /usr/local/lib/python3.10/dist-packages (from spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (2.0.10)\n","Requirement already satisfied: weasel<0.5.0,>=0.1.0 in /usr/local/lib/python3.10/dist-packages (from spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (0.4.1)\n","Requirement already satisfied: typer<1.0.0,>=0.3.0 in /usr/local/lib/python3.10/dist-packages (from spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (0.12.5)\n","Requirement already satisfied: tqdm<5.0.0,>=4.38.0 in /usr/local/lib/python3.10/dist-packages (from spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (4.66.5)\n","Requirement already satisfied: requests<3.0.0,>=2.13.0 in /usr/local/lib/python3.10/dist-packages (from spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (2.32.3)\n","Requirement already satisfied: pydantic!=1.8,!=1.8.1,<3.0.0,>=1.7.4 in /usr/local/lib/python3.10/dist-packages (from spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (2.8.2)\n","Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (3.1.4)\n","Requirement already satisfied: setuptools in /usr/local/lib/python3.10/dist-packages (from spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (71.0.4)\n","Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (24.1)\n","Requirement already satisfied: langcodes<4.0.0,>=3.2.0 in /usr/local/lib/python3.10/dist-packages (from spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (3.4.0)\n","Requirement already satisfied: numpy>=1.19.0 in /usr/local/lib/python3.10/dist-packages (from spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (1.26.4)\n","Collecting curated-transformers<0.2.0,>=0.1.0 (from spacy-curated-transformers<0.3.0,>=0.2.0->en-core-web-trf==3.7.3)\n"," Downloading curated_transformers-0.1.1-py2.py3-none-any.whl.metadata (965 bytes)\n","Collecting curated-tokenizers<0.1.0,>=0.0.9 (from spacy-curated-transformers<0.3.0,>=0.2.0->en-core-web-trf==3.7.3)\n"," Downloading curated_tokenizers-0.0.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (1.9 kB)\n","Requirement already satisfied: torch>=1.12.0 in /usr/local/lib/python3.10/dist-packages (from spacy-curated-transformers<0.3.0,>=0.2.0->en-core-web-trf==3.7.3) (2.4.0+cu121)\n","Requirement already satisfied: regex>=2022 in /usr/local/lib/python3.10/dist-packages (from curated-tokenizers<0.1.0,>=0.0.9->spacy-curated-transformers<0.3.0,>=0.2.0->en-core-web-trf==3.7.3) (2024.5.15)\n","Requirement already satisfied: language-data>=1.2 in /usr/local/lib/python3.10/dist-packages (from langcodes<4.0.0,>=3.2.0->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (1.2.0)\n","Requirement already satisfied: annotated-types>=0.4.0 in /usr/local/lib/python3.10/dist-packages (from pydantic!=1.8,!=1.8.1,<3.0.0,>=1.7.4->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (0.7.0)\n","Requirement already satisfied: pydantic-core==2.20.1 in /usr/local/lib/python3.10/dist-packages (from pydantic!=1.8,!=1.8.1,<3.0.0,>=1.7.4->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (2.20.1)\n","Requirement already satisfied: typing-extensions>=4.6.1 in /usr/local/lib/python3.10/dist-packages (from pydantic!=1.8,!=1.8.1,<3.0.0,>=1.7.4->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (4.12.2)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests<3.0.0,>=2.13.0->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (3.3.2)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests<3.0.0,>=2.13.0->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (3.8)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests<3.0.0,>=2.13.0->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (2.0.7)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests<3.0.0,>=2.13.0->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (2024.8.30)\n","Requirement already satisfied: blis<0.8.0,>=0.7.8 in /usr/local/lib/python3.10/dist-packages (from thinc<8.3.0,>=8.2.2->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (0.7.11)\n","Requirement already satisfied: confection<1.0.0,>=0.0.1 in /usr/local/lib/python3.10/dist-packages (from thinc<8.3.0,>=8.2.2->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (0.1.5)\n","Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from torch>=1.12.0->spacy-curated-transformers<0.3.0,>=0.2.0->en-core-web-trf==3.7.3) (3.15.4)\n","Requirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from torch>=1.12.0->spacy-curated-transformers<0.3.0,>=0.2.0->en-core-web-trf==3.7.3) (1.13.2)\n","Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch>=1.12.0->spacy-curated-transformers<0.3.0,>=0.2.0->en-core-web-trf==3.7.3) (3.3)\n","Requirement already satisfied: fsspec in /usr/local/lib/python3.10/dist-packages (from torch>=1.12.0->spacy-curated-transformers<0.3.0,>=0.2.0->en-core-web-trf==3.7.3) (2024.6.1)\n","Requirement already satisfied: click>=8.0.0 in /usr/local/lib/python3.10/dist-packages (from typer<1.0.0,>=0.3.0->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (8.1.7)\n","Requirement already satisfied: shellingham>=1.3.0 in /usr/local/lib/python3.10/dist-packages (from typer<1.0.0,>=0.3.0->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (1.5.4)\n","Requirement already satisfied: rich>=10.11.0 in /usr/local/lib/python3.10/dist-packages (from typer<1.0.0,>=0.3.0->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (13.8.0)\n","Requirement already satisfied: cloudpathlib<1.0.0,>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from weasel<0.5.0,>=0.1.0->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (0.19.0)\n","Requirement already satisfied: smart-open<8.0.0,>=5.2.1 in /usr/local/lib/python3.10/dist-packages (from weasel<0.5.0,>=0.1.0->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (7.0.4)\n","Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (2.1.5)\n","Requirement already satisfied: marisa-trie>=0.7.7 in /usr/local/lib/python3.10/dist-packages (from language-data>=1.2->langcodes<4.0.0,>=3.2.0->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (1.2.0)\n","Requirement already satisfied: markdown-it-py>=2.2.0 in /usr/local/lib/python3.10/dist-packages (from rich>=10.11.0->typer<1.0.0,>=0.3.0->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (3.0.0)\n","Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /usr/local/lib/python3.10/dist-packages (from rich>=10.11.0->typer<1.0.0,>=0.3.0->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (2.16.1)\n","Requirement already satisfied: wrapt in /usr/local/lib/python3.10/dist-packages (from smart-open<8.0.0,>=5.2.1->weasel<0.5.0,>=0.1.0->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (1.16.0)\n","Requirement already satisfied: mpmath<1.4,>=1.1.0 in /usr/local/lib/python3.10/dist-packages (from sympy->torch>=1.12.0->spacy-curated-transformers<0.3.0,>=0.2.0->en-core-web-trf==3.7.3) (1.3.0)\n","Requirement already satisfied: mdurl~=0.1 in /usr/local/lib/python3.10/dist-packages (from markdown-it-py>=2.2.0->rich>=10.11.0->typer<1.0.0,>=0.3.0->spacy<3.8.0,>=3.7.2->en-core-web-trf==3.7.3) (0.1.2)\n","Downloading spacy_curated_transformers-0.2.2-py2.py3-none-any.whl (236 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m236.3/236.3 kB\u001b[0m \u001b[31m9.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hDownloading curated_tokenizers-0.0.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (731 kB)\n","\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m731.6/731.6 kB\u001b[0m \u001b[31m44.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n","\u001b[?25hDownloading curated_transformers-0.1.1-py2.py3-none-any.whl (25 kB)\n","Installing collected packages: curated-tokenizers, curated-transformers, spacy-curated-transformers, en-core-web-trf\n","Successfully installed curated-tokenizers-0.0.9 curated-transformers-0.1.1 en-core-web-trf-3.7.3 spacy-curated-transformers-0.2.2\n","\u001b[38;5;2m✔ Download and installation successful\u001b[0m\n","You can now load the package via spacy.load('en_core_web_trf')\n","\u001b[38;5;3m⚠ Restart to reload dependencies\u001b[0m\n","If you are in a Jupyter or Colab notebook, you may need to restart Python in\n","order to load all the package's dependencies. You can do this by selecting the\n","'Restart kernel' or 'Restart runtime' option.\n"]}]},{"cell_type":"code","source":["!sudo update-alternatives --config python3"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"kB-c21Vwh9T6","executionInfo":{"status":"ok","timestamp":1725586957473,"user_tz":420,"elapsed":4,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"c4b6ad5a-82c2-45b3-b534-2bf6ab0bd161"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["update-alternatives: error: no alternatives for python3\n"]}]},{"cell_type":"markdown","source":["##Mount and Import Google Drive\n"],"metadata":{"id":"Uj7jIEwwofAQ"}},{"cell_type":"code","execution_count":2,"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"collapsed":true,"id":"li_B2uR4oWB0","executionInfo":{"status":"ok","timestamp":1726182724901,"user_tz":420,"elapsed":18281,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"4f819bc6-a3e0-4a24-9be2-45f296101c5a"},"outputs":[{"output_type":"stream","name":"stdout","text":["Mounted at /content/drive\n","/content/drive\n"]}],"source":["#Mount Google Drive\n","from google.colab import drive\n","drive.mount('/content/drive')\n","%cd '/content/drive'"]},{"cell_type":"code","source":["#Import sys and append the google drive paths\n","import sys\n","models_path = '/content/drive/MyDrive/spaBERT/spabert'\n","sys.path.append(models_path)\n","sys.path.append('/content/drive/MyDrive/spaBERT/spabert/datasets')\n","sys.path.append(\"../\")\n","print(sys.path)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"collapsed":true,"id":"tDfdRYnVovVH","executionInfo":{"status":"ok","timestamp":1726182734348,"user_tz":420,"elapsed":386,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"f486fa16-a593-4cc2-aeb4-687e15655f7a"},"execution_count":3,"outputs":[{"output_type":"stream","name":"stdout","text":["['/content', '/env/python', '/usr/lib/python310.zip', '/usr/lib/python3.10', '/usr/lib/python3.10/lib-dynload', '', '/usr/local/lib/python3.10/dist-packages', '/usr/lib/python3/dist-packages', '/usr/local/lib/python3.10/dist-packages/IPython/extensions', '/usr/local/lib/python3.10/dist-packages/setuptools/_vendor', '/root/.ipython', '/content/drive/MyDrive/spaBERT/spabert', '/content/drive/MyDrive/spaBERT/spabert/datasets', '../']\n"]}]},{"cell_type":"markdown","source":["##Import Spacy and Load the Transformer Model\n"],"metadata":{"id":"zp5hj1ZaGV1x"}},{"cell_type":"code","source":["#Import Spacy\n","import spacy\n","from spacy import displacy\n","\n","if spacy.prefer_gpu():\n"," print(\"GPU is enabled for spaCy\")\n","else:\n"," print(\"GPU is not enabled for spaCy\")\n","#Load the Transformer Model\n","nlp = spacy.load('en_core_web_trf')\n","\n","#Print out the Pipeline\n","print(nlp.pipe_names)"],"metadata":{"id":"H21mzrGPGLui"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["##EXAMPLE: Import sample sentence to test out the model and ensure spacy is working.\n","#from spacy.lang.en.examples import sentences\n","#\n","#spacy.require_gpu()\n","#doc = nlp(sentences[0] + \"\\n\")\n","#\n","## Display Entities\n","#from IPython.core.display import display, HTML\n","#display(HTML(displacy.render(doc, style=\"ent\")))\n","#\n","#\n","## document level\n","#ents = [(e.text, e.start_char, e.end_char, e.label_, e.kb_id_) for e in doc.ents]\n","#print(ents)"],"metadata":{"id":"8S3etakKGhbq"},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":["##Define Some Functions and Import some Packages\n","Various user defined functions and pacakages that will be used throughout the notebook"],"metadata":{"id":"duk6hSa_Gyvi"}},{"cell_type":"code","source":["# install pip\n","!curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py\n","!python3 get-pip.py --force-reinstall\n","\n","#install colab's dependencies\n","!python3 -m pip install ipython ipython_genutils ipykernel jupyter_console prompt_toolkit httplib2 astor\n","\n","# link to the old google package\n","!ln -s /usr/local/lib/python3.9/dist-packages/google \\\n"," /usr/local/lib/python3.8/dist-packages/google\n","\n","# There has got to be a better way to do this...but there's a bad import in some of the colab files\n","# IPython no longer exposes traitlets like this, it's a separate package now\n","!sed -i \"s/from IPython.utils import traitlets as _traitlets/import traitlets as _traitlets/\" /usr/local/lib/python3.8/dist-packages/google/colab/*.py\n","!sed -i \"s/from IPython.utils import traitlets/import traitlets/\" /usr/local/lib/python3.8/dist-packages/google/colab/*.py"],"metadata":{"collapsed":true,"id":"mxvQr-YeG6EW","colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"status":"ok","timestamp":1726182771762,"user_tz":420,"elapsed":4747,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"49e15d8b-58ab-4084-f464-2b501b8ae769"},"execution_count":4,"outputs":[{"output_type":"stream","name":"stdout","text":[" % Total % Received % Xferd Average Speed Time Time Time Current\n"," Dload Upload Total Spent Left Speed\n","\r 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0\r 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0Warning: Failed to create the file get-pip.py: Operation not supported\n","\r 0 2213k 0 16384 0 0 206k 0 0:00:10 --:--:-- 0:00:10 205k\n","curl: (23) Failure writing output to destination\n","python3: can't open file '/content/drive/get-pip.py': [Errno 2] No such file or directory\n","Requirement already satisfied: ipython in /usr/local/lib/python3.10/dist-packages (7.34.0)\n","Requirement already satisfied: ipython_genutils in /usr/local/lib/python3.10/dist-packages (0.2.0)\n","Requirement already satisfied: ipykernel in /usr/local/lib/python3.10/dist-packages (5.5.6)\n","Requirement already satisfied: jupyter_console in /usr/local/lib/python3.10/dist-packages (6.1.0)\n","Requirement already satisfied: prompt_toolkit in /usr/local/lib/python3.10/dist-packages (3.0.47)\n","Requirement already satisfied: httplib2 in /usr/local/lib/python3.10/dist-packages (0.22.0)\n","Collecting astor\n"," Downloading astor-0.8.1-py2.py3-none-any.whl.metadata (4.2 kB)\n","Requirement already satisfied: setuptools>=18.5 in /usr/local/lib/python3.10/dist-packages (from ipython) (71.0.4)\n","Collecting jedi>=0.16 (from ipython)\n"," Using cached jedi-0.19.1-py2.py3-none-any.whl.metadata (22 kB)\n","Requirement already satisfied: decorator in /usr/local/lib/python3.10/dist-packages (from ipython) (4.4.2)\n","Requirement already satisfied: pickleshare in /usr/local/lib/python3.10/dist-packages (from ipython) (0.7.5)\n","Requirement already satisfied: traitlets>=4.2 in /usr/local/lib/python3.10/dist-packages (from ipython) (5.7.1)\n","Requirement already satisfied: pygments in /usr/local/lib/python3.10/dist-packages (from ipython) (2.16.1)\n","Requirement already satisfied: backcall in /usr/local/lib/python3.10/dist-packages (from ipython) (0.2.0)\n","Requirement already satisfied: matplotlib-inline in /usr/local/lib/python3.10/dist-packages (from ipython) (0.1.7)\n","Requirement already satisfied: pexpect>4.3 in /usr/local/lib/python3.10/dist-packages (from ipython) (4.9.0)\n","Requirement already satisfied: jupyter-client in /usr/local/lib/python3.10/dist-packages (from ipykernel) (6.1.12)\n","Requirement already satisfied: tornado>=4.2 in /usr/local/lib/python3.10/dist-packages (from ipykernel) (6.3.3)\n","Requirement already satisfied: wcwidth in /usr/local/lib/python3.10/dist-packages (from prompt_toolkit) (0.2.13)\n","Requirement already satisfied: pyparsing!=3.0.0,!=3.0.1,!=3.0.2,!=3.0.3,<4,>=2.4.2 in /usr/local/lib/python3.10/dist-packages (from httplib2) (3.1.4)\n","Requirement already satisfied: parso<0.9.0,>=0.8.3 in /usr/local/lib/python3.10/dist-packages (from jedi>=0.16->ipython) (0.8.4)\n","Requirement already satisfied: ptyprocess>=0.5 in /usr/local/lib/python3.10/dist-packages (from pexpect>4.3->ipython) (0.7.0)\n","Requirement already satisfied: jupyter-core>=4.6.0 in /usr/local/lib/python3.10/dist-packages (from jupyter-client->ipykernel) (5.7.2)\n","Requirement already satisfied: pyzmq>=13 in /usr/local/lib/python3.10/dist-packages (from jupyter-client->ipykernel) (24.0.1)\n","Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.10/dist-packages (from jupyter-client->ipykernel) (2.8.2)\n","Requirement already satisfied: platformdirs>=2.5 in /usr/local/lib/python3.10/dist-packages (from jupyter-core>=4.6.0->jupyter-client->ipykernel) (4.3.2)\n","Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.1->jupyter-client->ipykernel) (1.16.0)\n","Downloading astor-0.8.1-py2.py3-none-any.whl (27 kB)\n","Using cached jedi-0.19.1-py2.py3-none-any.whl (1.6 MB)\n","Installing collected packages: jedi, astor\n","Successfully installed astor-0.8.1 jedi-0.19.1\n","ln: failed to create symbolic link '/usr/local/lib/python3.8/dist-packages/google': No such file or directory\n","sed: can't read /usr/local/lib/python3.8/dist-packages/google/colab/*.py: No such file or directory\n","sed: can't read /usr/local/lib/python3.8/dist-packages/google/colab/*.py: No such file or directory\n"]}]},{"cell_type":"code","source":["#check python version\n","import sys\n","print(sys.version)\n","!python3 --version\n","!python --version"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"3lVfNBakjQTK","executionInfo":{"status":"ok","timestamp":1726182772144,"user_tz":420,"elapsed":391,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"c111277d-022c-47ff-dcd3-4a91fcfe5bdf","collapsed":true},"execution_count":5,"outputs":[{"output_type":"stream","name":"stdout","text":["3.10.12 (main, Jul 29 2024, 16:56:48) [GCC 11.4.0]\n","Python 3.10.12\n","Python 3.10.12\n"]}]},{"cell_type":"code","source":["!pip list | grep packaging"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"PiMBgYI1jS4R","executionInfo":{"status":"ok","timestamp":1726182772955,"user_tz":420,"elapsed":812,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"f93f3ad1-0e13-44df-e2a4-d494e4a48bf1","collapsed":true},"execution_count":6,"outputs":[{"output_type":"stream","name":"stdout","text":["packaging 24.1\n"]}]},{"cell_type":"code","source":["pip install packaging==21.3"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"rGOv-UN7jZMG","executionInfo":{"status":"ok","timestamp":1726182824830,"user_tz":420,"elapsed":2277,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"02924b0b-5e25-4718-cf37-462ee403903d","collapsed":true},"execution_count":1,"outputs":[{"output_type":"stream","name":"stdout","text":["Requirement already satisfied: packaging==21.3 in /usr/local/lib/python3.10/dist-packages (21.3)\n","Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /usr/local/lib/python3.10/dist-packages (from packaging==21.3) (3.1.4)\n"]}]},{"cell_type":"code","source":["!pip install transformers==4.3.2\n","!pip install torch==1.7.1+cu101 torchvision==0.8.2+cu101 -f https://download.pytorch.org/whl/torch_stable.html\n","!pip install sentencepiece\n","!pip install pandas\n","\n","import torch\n","import io\n","import torch.nn.functional as F\n","import random\n","import numpy as np\n","import time\n","import math\n","import datetime\n","import torch.nn as nn\n","from transformers import *\n","from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n","import random\n","import pandas as pd\n","import pickle\n","import csv\n","\n","\n","##Set random values\n","seed_val = 42\n","random.seed(seed_val)\n","np.random.seed(seed_val)\n","torch.manual_seed(seed_val)\n","if torch.cuda.is_available():\n"," torch.cuda.manual_seed_all(seed_val)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"TyI1oVu9jimT","executionInfo":{"status":"ok","timestamp":1726184032494,"user_tz":420,"elapsed":15354,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"232a2a63-bf15-48e5-cca4-063d4155d17f","collapsed":true},"execution_count":28,"outputs":[{"output_type":"stream","name":"stdout","text":["Collecting transformers==4.3.2\n"," Using cached transformers-4.3.2-py3-none-any.whl.metadata (36 kB)\n","Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from transformers==4.3.2) (3.16.0)\n","Requirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.10/dist-packages (from transformers==4.3.2) (1.26.4)\n","Requirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from transformers==4.3.2) (21.3)\n","Requirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.10/dist-packages (from transformers==4.3.2) (2024.5.15)\n","Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from transformers==4.3.2) (2.32.3)\n","Collecting sacremoses (from transformers==4.3.2)\n"," Using cached sacremoses-0.1.1-py3-none-any.whl.metadata (8.3 kB)\n","Collecting tokenizers<0.11,>=0.10.1 (from transformers==4.3.2)\n"," Using cached tokenizers-0.10.3.tar.gz (212 kB)\n"," Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n"," Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n"," Preparing metadata (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n","Requirement already satisfied: tqdm>=4.27 in /usr/local/lib/python3.10/dist-packages (from transformers==4.3.2) (4.66.5)\n","Requirement already satisfied: pyparsing!=3.0.5,>=2.0.2 in /usr/local/lib/python3.10/dist-packages (from packaging->transformers==4.3.2) (3.1.4)\n","Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->transformers==4.3.2) (3.3.2)\n","Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->transformers==4.3.2) (3.8)\n","Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->transformers==4.3.2) (2.0.7)\n","Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->transformers==4.3.2) (2024.8.30)\n","Requirement already satisfied: click in /usr/local/lib/python3.10/dist-packages (from sacremoses->transformers==4.3.2) (8.1.7)\n","Requirement already satisfied: joblib in /usr/local/lib/python3.10/dist-packages (from sacremoses->transformers==4.3.2) (1.4.2)\n","Using cached transformers-4.3.2-py3-none-any.whl (1.8 MB)\n","Using cached sacremoses-0.1.1-py3-none-any.whl (897 kB)\n","Building wheels for collected packages: tokenizers\n"," \u001b[1;31merror\u001b[0m: \u001b[1msubprocess-exited-with-error\u001b[0m\n"," \n"," \u001b[31m×\u001b[0m \u001b[32mBuilding wheel for tokenizers \u001b[0m\u001b[1;32m(\u001b[0m\u001b[32mpyproject.toml\u001b[0m\u001b[1;32m)\u001b[0m did not run successfully.\n"," \u001b[31m│\u001b[0m exit code: \u001b[1;36m1\u001b[0m\n"," \u001b[31m╰─>\u001b[0m See above for output.\n"," \n"," \u001b[1;35mnote\u001b[0m: This error originates from a subprocess, and is likely not a problem with pip.\n"," Building wheel for tokenizers (pyproject.toml) ... \u001b[?25l\u001b[?25herror\n","\u001b[31m ERROR: Failed building wheel for tokenizers\u001b[0m\u001b[31m\n","\u001b[0mFailed to build tokenizers\n","\u001b[31mERROR: ERROR: Failed to build installable wheels for some pyproject.toml based projects (tokenizers)\u001b[0m\u001b[31m\n","\u001b[0mLooking in links: https://download.pytorch.org/whl/torch_stable.html\n","\u001b[31mERROR: Could not find a version that satisfies the requirement torch==1.7.1+cu101 (from versions: 1.11.0, 1.11.0+cpu, 1.11.0+cu102, 1.11.0+cu113, 1.11.0+cu115, 1.11.0+rocm4.3.1, 1.11.0+rocm4.5.2, 1.12.0, 1.12.0+cpu, 1.12.0+cu102, 1.12.0+cu113, 1.12.0+cu116, 1.12.0+rocm5.0, 1.12.0+rocm5.1.1, 1.12.1, 1.12.1+cpu, 1.12.1+cu102, 1.12.1+cu113, 1.12.1+cu116, 1.12.1+rocm5.0, 1.12.1+rocm5.1.1, 1.13.0, 1.13.0+cpu, 1.13.0+cu116, 1.13.0+cu117, 1.13.0+cu117.with.pypi.cudnn, 1.13.0+rocm5.1.1, 1.13.0+rocm5.2, 1.13.1, 1.13.1+cpu, 1.13.1+cu116, 1.13.1+cu117, 1.13.1+cu117.with.pypi.cudnn, 1.13.1+rocm5.1.1, 1.13.1+rocm5.2, 2.0.0, 2.0.0+cpu, 2.0.0+cpu.cxx11.abi, 2.0.0+cu117, 2.0.0+cu117.with.pypi.cudnn, 2.0.0+cu118, 2.0.0+rocm5.3, 2.0.0+rocm5.4.2, 2.0.1, 2.0.1+cpu, 2.0.1+cpu.cxx11.abi, 2.0.1+cu117, 2.0.1+cu117.with.pypi.cudnn, 2.0.1+cu118, 2.0.1+rocm5.3, 2.0.1+rocm5.4.2, 2.1.0, 2.1.0+cpu, 2.1.0+cpu.cxx11.abi, 2.1.0+cu118, 2.1.0+cu121, 2.1.0+cu121.with.pypi.cudnn, 2.1.0+rocm5.5, 2.1.0+rocm5.6, 2.1.1, 2.1.1+cpu, 2.1.1+cpu.cxx11.abi, 2.1.1+cu118, 2.1.1+cu121, 2.1.1+cu121.with.pypi.cudnn, 2.1.1+rocm5.5, 2.1.1+rocm5.6, 2.1.2, 2.1.2+cpu, 2.1.2+cpu.cxx11.abi, 2.1.2+cu118, 2.1.2+cu121, 2.1.2+cu121.with.pypi.cudnn, 2.1.2+rocm5.5, 2.1.2+rocm5.6, 2.2.0, 2.2.0+cpu, 2.2.0+cpu.cxx11.abi, 2.2.0+cu118, 2.2.0+cu121, 2.2.0+rocm5.6, 2.2.0+rocm5.7, 2.2.1, 2.2.1+cpu, 2.2.1+cpu.cxx11.abi, 2.2.1+cu118, 2.2.1+cu121, 2.2.1+rocm5.6, 2.2.1+rocm5.7, 2.2.2, 2.2.2+cpu, 2.2.2+cpu.cxx11.abi, 2.2.2+cu118, 2.2.2+cu121, 2.2.2+rocm5.6, 2.2.2+rocm5.7, 2.3.0, 2.3.0+cpu, 2.3.0+cpu.cxx11.abi, 2.3.0+cu118, 2.3.0+cu121, 2.3.0+rocm5.7, 2.3.0+rocm6.0, 2.3.1, 2.3.1+cpu, 2.3.1+cpu.cxx11.abi, 2.3.1+cu118, 2.3.1+cu121, 2.3.1+rocm5.7, 2.3.1+rocm6.0, 2.4.0, 2.4.1)\u001b[0m\u001b[31m\n","\u001b[0m\u001b[31mERROR: No matching distribution found for torch==1.7.1+cu101\u001b[0m\u001b[31m\n","\u001b[0mRequirement already satisfied: sentencepiece in /usr/local/lib/python3.10/dist-packages (0.1.99)\n","Requirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (2.1.4)\n","Requirement already satisfied: numpy<2,>=1.22.4 in /usr/local/lib/python3.10/dist-packages (from pandas) (1.26.4)\n","Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from pandas) (2.8.2)\n","Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas) (2024.1)\n","Requirement already satisfied: tzdata>=2022.1 in /usr/local/lib/python3.10/dist-packages (from pandas) (2024.1)\n","Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.8.2->pandas) (1.16.0)\n"]}]},{"cell_type":"code","source":["# If there's a GPU available...\n","if torch.cuda.is_available():\n"," # Tell PyTorch to use the GPU.\n"," device = torch.device(\"cuda\")\n"," print('There are %d GPU(s) available.' % torch.cuda.device_count())\n"," print('We will use the GPU:', torch.cuda.get_device_name(0))\n","# If not...\n","else:\n"," print('No GPU available, using the CPU instead.')\n"," device = torch.device(\"cpu\")"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"AX5bDlixjtoj","executionInfo":{"status":"ok","timestamp":1726182808505,"user_tz":420,"elapsed":368,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"724f3a0e-9b75-4467-ad6f-88db9eb5976a"},"execution_count":9,"outputs":[{"output_type":"stream","name":"stdout","text":["There are 1 GPU(s) available.\n","We will use the GPU: Tesla T4\n"]}]},{"cell_type":"code","source":["#--------------------------------\n","# Transformer parameters\n","#--------------------------------\n","max_seq_length = 64\n","batch_size = 64\n","\n","#--------------------------------\n","# GAN-BERT specific parameters\n","#--------------------------------\n","# number of hidden layers in the generator,\n","# each of the size of the output space\n","num_hidden_layers_g = 1;\n","# number of hidden layers in the discriminator,\n","# each of the size of the input space\n","num_hidden_layers_d = 1;\n","# size of the generator's input noisy vectors\n","noise_size = 100\n","# dropout to be applied to discriminator's input vectors\n","out_dropout_rate = 0.2\n","\n","# Replicate labeled data to balance poorly represented datasets,\n","# e.g., less than 1% of labeled material\n","apply_balance = True\n","\n","#--------------------------------\n","# Optimization parameters\n","#--------------------------------\n","learning_rate_discriminator = 5e-5\n","learning_rate_generator = 5e-5\n","epsilon = 1e-8\n","num_train_epochs = 10\n","multi_gpu = True\n","# Scheduler\n","apply_scheduler = False\n","warmup_proportion = 0.1\n","# Print\n","print_each_n_step = 10\n","\n","#--------------------------------\n","# Adopted Tranformer model\n","#--------------------------------\n","# Since this version is compatible with Huggingface transformers, you can uncomment\n","# (or add) transformer models compatible with GAN\n","\n","#model_name = \"bert-base-cased\"\n","model_name = \"bert-base-uncased\"\n","#model_name = \"roberta-base\"\n","#model_name = \"albert-base-v2\"\n","#model_name = \"xlm-roberta-base\"\n","#model_name = \"amazon/bort\"\n","\n","#--------------------------------\n","# Retrieve the TREC QC Dataset\n","#--------------------------------\n","#! git clone https://github.com/crux82/ganbert\n","\n","# NOTE: in this setting 50 classes are involved\n","labeled_file = \"/content/ganbert/data/labeled.tsv\"\n","unlabeled_file = \"/content/ganbert/data/unlabeled.tsv\"\n","test_filename = \"/content/ganbert/data/test.tsv\"\n","opspam = \"./data/opspam.txt\" #Don't have\n","chicago_unlab = \"./data/chicago_unlab.txt\" #Don't Have\n","opspam_test = \"./data/opspam_test.txt\" #Don't have\n","review_content = \"/content/drive/MyDrive/Master_Project_2024_JP/yelpZip/YelpZip/reviewContent\"\n","metadata = \"/content/drive/MyDrive/Master_Project_2024_JP/yelpZip/YelpZip/metadata\"\n","outlabsp = \"/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/Yelp_dataset_Sirish/outlabsp.txt\"\n","outrevsp = \"/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/Yelp_dataset_Sirish/outrevsp.txt\"\n","#review_content = \"./Yelp/YelpNYC/reviewContent\"\n","#metadata = \"./Yelp/YelpNYC/metadata\""],"metadata":{"id":"zL9XFZNmjxOA","executionInfo":{"status":"ok","timestamp":1726182970577,"user_tz":420,"elapsed":402,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}}},"execution_count":6,"outputs":[]},{"cell_type":"code","source":["transformer = AutoModel.from_pretrained(model_name)\n","tokenizer = AutoTokenizer.from_pretrained(model_name)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"j2PYlfekj0Vt","executionInfo":{"status":"ok","timestamp":1726184040066,"user_tz":420,"elapsed":1407,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"e27e42d1-4e84-438a-fc07-f1b8d4173fe1","collapsed":true},"execution_count":29,"outputs":[{"output_type":"stream","name":"stderr","text":["loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/config.json\n","Model config BertConfig {\n"," \"_name_or_path\": \"bert-base-uncased\",\n"," \"architectures\": [\n"," \"BertForMaskedLM\"\n"," ],\n"," \"attention_probs_dropout_prob\": 0.1,\n"," \"classifier_dropout\": null,\n"," \"gradient_checkpointing\": false,\n"," \"hidden_act\": \"gelu\",\n"," \"hidden_dropout_prob\": 0.1,\n"," \"hidden_size\": 768,\n"," \"initializer_range\": 0.02,\n"," \"intermediate_size\": 3072,\n"," \"layer_norm_eps\": 1e-12,\n"," \"max_position_embeddings\": 512,\n"," \"model_type\": \"bert\",\n"," \"num_attention_heads\": 12,\n"," \"num_hidden_layers\": 12,\n"," \"pad_token_id\": 0,\n"," \"position_embedding_type\": \"absolute\",\n"," \"transformers_version\": \"4.44.2\",\n"," \"type_vocab_size\": 2,\n"," \"use_cache\": true,\n"," \"vocab_size\": 30522\n","}\n","\n","loading weights file model.safetensors from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/model.safetensors\n","Some weights of the model checkpoint at bert-base-uncased were not used when initializing BertModel: ['cls.predictions.bias', 'cls.predictions.transform.LayerNorm.bias', 'cls.predictions.transform.LayerNorm.weight', 'cls.predictions.transform.dense.bias', 'cls.predictions.transform.dense.weight', 'cls.seq_relationship.bias', 'cls.seq_relationship.weight']\n","- This IS expected if you are initializing BertModel from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n","- This IS NOT expected if you are initializing BertModel from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n","All the weights of BertModel were initialized from the model checkpoint at bert-base-uncased.\n","If your task is similar to the task the model of the checkpoint was trained on, you can already use BertModel for predictions without further training.\n","loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/config.json\n","Model config BertConfig {\n"," \"_name_or_path\": \"bert-base-uncased\",\n"," \"architectures\": [\n"," \"BertForMaskedLM\"\n"," ],\n"," \"attention_probs_dropout_prob\": 0.1,\n"," \"classifier_dropout\": null,\n"," \"gradient_checkpointing\": false,\n"," \"hidden_act\": \"gelu\",\n"," \"hidden_dropout_prob\": 0.1,\n"," \"hidden_size\": 768,\n"," \"initializer_range\": 0.02,\n"," \"intermediate_size\": 3072,\n"," \"layer_norm_eps\": 1e-12,\n"," \"max_position_embeddings\": 512,\n"," \"model_type\": \"bert\",\n"," \"num_attention_heads\": 12,\n"," \"num_hidden_layers\": 12,\n"," \"pad_token_id\": 0,\n"," \"position_embedding_type\": \"absolute\",\n"," \"transformers_version\": \"4.44.2\",\n"," \"type_vocab_size\": 2,\n"," \"use_cache\": true,\n"," \"vocab_size\": 30522\n","}\n","\n","loading file vocab.txt from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/vocab.txt\n","loading file tokenizer.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/tokenizer.json\n","loading file added_tokens.json from cache at None\n","loading file special_tokens_map.json from cache at None\n","loading file tokenizer_config.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/tokenizer_config.json\n","loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/config.json\n","Model config BertConfig {\n"," \"_name_or_path\": \"bert-base-uncased\",\n"," \"architectures\": [\n"," \"BertForMaskedLM\"\n"," ],\n"," \"attention_probs_dropout_prob\": 0.1,\n"," \"classifier_dropout\": null,\n"," \"gradient_checkpointing\": false,\n"," \"hidden_act\": \"gelu\",\n"," \"hidden_dropout_prob\": 0.1,\n"," \"hidden_size\": 768,\n"," \"initializer_range\": 0.02,\n"," \"intermediate_size\": 3072,\n"," \"layer_norm_eps\": 1e-12,\n"," \"max_position_embeddings\": 512,\n"," \"model_type\": \"bert\",\n"," \"num_attention_heads\": 12,\n"," \"num_hidden_layers\": 12,\n"," \"pad_token_id\": 0,\n"," \"position_embedding_type\": \"absolute\",\n"," \"transformers_version\": \"4.44.2\",\n"," \"type_vocab_size\": 2,\n"," \"use_cache\": true,\n"," \"vocab_size\": 30522\n","}\n","\n","/usr/local/lib/python3.10/dist-packages/transformers/tokenization_utils_base.py:1601: FutureWarning: `clean_up_tokenization_spaces` was not set. It will be set to `True` by default. This behavior will be depracted in transformers v4.45, and will be then set to `False` by default. For more details check this issue: https://github.com/huggingface/transformers/issues/31884\n"," warnings.warn(\n"]}]},{"cell_type":"code","source":["import random\n","import pickle\n","import csv\n","import json\n","import pandas as pd\n","\n","# Function to convert a list to a string\n","def listToString(s):\n"," # initialize an empty string\n"," str1 = \"\"\n"," # traverse in the string\n"," for ele in s:\n"," str1 += ele\n"," # return string\n"," return str1\n","\n","\n","# Function to create a list from the dataset\n","def get_lines(input_file):\n"," \"\"\"Creates examples for the training and dev sets.\"\"\"\n"," data_list = []\n","\n"," with open(input_file, 'r', encoding=\"utf-8\") as f:\n"," contents = f.read()\n"," file_as_list = contents.splitlines()\n"," for line in file_as_list[:]:\n"," data = listToString(line)\n"," data_list.append(data)\n"," f.close()\n","\n"," return data_list\n","\n","def format_time(elapsed):\n"," '''\n"," Takes a time in seconds and returns a string hh:mm:ss\n"," '''\n"," # Round to the nearest second.\n"," elapsed_rounded = int(round((elapsed)))\n"," # Format as hh:mm:ss\n"," return str(datetime.timedelta(seconds=elapsed_rounded))"],"metadata":{"id":"LdXcUiCBG-LX","executionInfo":{"status":"ok","timestamp":1726182812262,"user_tz":420,"elapsed":2,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}}},"execution_count":12,"outputs":[]},{"cell_type":"markdown","source":["##Import Data\n","Test Dataset\n","\n","Real Dataset\n","\n","Fake Dataset\n","\n"],"metadata":{"id":"R18blulvElkA"}},{"cell_type":"code","source":["label_list = [\"1\", \"0\"]\n","label_list.append('UNL')\n","label_list"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"AT3IUZeam857","executionInfo":{"status":"ok","timestamp":1726182896397,"user_tz":420,"elapsed":388,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"7edda63b-ca64-429f-8ae3-4991b024cf6b"},"execution_count":3,"outputs":[{"output_type":"execute_result","data":{"text/plain":["['1', '0', 'UNL']"]},"metadata":{},"execution_count":3}]},{"cell_type":"code","source":["#For now, use the same dataset from the GAN Bert Notebook for consistency.\n","with open('/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/models/GANBERT/data/yelp_test_ns_400.pkl', 'rb') as f:\n"," test_examples = pickle.load(f)\n","\n","with open('/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/models/GANBERT/data/yelp_unlabeled_90_ns_400.pkl', 'rb') as f:\n"," unlabeled_examples = pickle.load(f)\n","\n","with open('/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/models/GANBERT/data/yelp_labeled_90_ns_400.pkl', 'rb') as f:\n"," labeled_examples = pickle.load(f)"],"metadata":{"id":"SOc9N67rGDGe","executionInfo":{"status":"ok","timestamp":1726182903099,"user_tz":420,"elapsed":5443,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}}},"execution_count":4,"outputs":[]},{"cell_type":"code","source":["#For now, use the same dataset from the GAN Bert Notebook for consistency.\n","#with open('/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/models/GANBERT/data/yelp_test_s_50.pkl', 'rb') as f:\n","# test_examples = pickle.load(f)\n","#\n","#with open('/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/models/GANBERT/data/yelp_unlabeled_90_s_50.pkl', 'rb') as f:\n","# unlabeled_examples = pickle.load(f)\n","#\n","#with open('/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/models/GANBERT/data/yelp_labeled_90_s_50.pkl', 'rb') as f:\n","# labeled_examples = pickle.load(f)"],"metadata":{"id":"PDyXiAxa2Gp3"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["len(test_examples)"],"metadata":{"id":"-pFwphLWHfTJ","colab":{"base_uri":"https://localhost:8080/"},"executionInfo":{"status":"ok","timestamp":1725587168153,"user_tz":420,"elapsed":264,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"1e0f3279-4f43-4b8b-ab41-549ca14959e4","collapsed":true},"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":["160"]},"metadata":{},"execution_count":7}]},{"cell_type":"code","source":["test_examples[1]"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"Nxk9xAzjHhLQ","executionInfo":{"status":"ok","timestamp":1725587169442,"user_tz":420,"elapsed":1,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"db26d153-fefd-4c85-e79b-c89d15ca8b88","collapsed":true},"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":["(\"We chose to stay at the Hilton Chicago because it was in such a centralized location- everything that our family wanted to do in town was located so close! What I didn't expect was for the beds to be so comfortable. I can't remember when I got a better night's sleep. The staff was very friendly and the hotel grounds were impeccably kept. We'll be returning to the Hilton Chicago the next time we're in town!\",\n"," '1')"]},"metadata":{},"execution_count":8}]},{"cell_type":"code","source":["len(labeled_examples)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"_tWEmFp7Hu3s","executionInfo":{"status":"ok","timestamp":1725587171404,"user_tz":420,"elapsed":278,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"4d124b36-d6fb-409c-f359-f0e18bc72c1c","collapsed":true},"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":["320"]},"metadata":{},"execution_count":9}]},{"cell_type":"code","source":["labeled_examples[1]"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"HTGjXV7NHz2f","executionInfo":{"status":"ok","timestamp":1725587173405,"user_tz":420,"elapsed":435,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"d417da27-fa33-4887-9de6-0a81d9469bb3","collapsed":true},"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":["(\"The Omni Chicago really delivers on all fronts, from the spaciousness of the rooms to the helpful staff to the prized location on Michigan Avenue. While this address in Chicago requires a high level of quality, the Omni delivers. Check in for myself and a whole group of people with me was under 3 minutes, the staff had plentiful recommendations for dining and events, and the rooms are some of the largest you'll find at this price range in Chicago. Even the 'standard' room has a separate living area and work desk. The fitness center has free weights, weight machines, and two rows of cardio equipment. I shared the room with 7 others and did not feel cramped in any way! All in all, a great property! \",\n"," '0')"]},"metadata":{},"execution_count":10}]},{"cell_type":"code","source":["len(unlabeled_examples)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"-VLJlrHhHtm1","executionInfo":{"status":"ok","timestamp":1725587175226,"user_tz":420,"elapsed":291,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"7ef32f29-6bcc-484f-fa34-7011b7bd9a2d","collapsed":true},"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":["320"]},"metadata":{},"execution_count":11}]},{"cell_type":"code","source":["unlabeled_examples[1]"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"SoRgl8D7HxOh","executionInfo":{"status":"ok","timestamp":1725587177029,"user_tz":420,"elapsed":560,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"28bab758-a3c2-4697-8d12-dc9e5ddff369","collapsed":true},"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":["(\"The Conrad Chicago was by far the best experience I have ever had in travel leisure. Not only was the room beautifully decorated and very comfortable, but the food in the restaurant was exquisite. Not to mention the perfect service from the resort's employees! I was very impressed by how quick and caring the staff was of my own personal needs. I'm naturally a fitness freak, so I was pleased to learn of their massive fitness area on the 11th floor. Best of all, it was available to guests at any time, day or night. The resort was conveniently located by a large shopping area, so it wasn't a far walk to scope out the shops. The most memorable part of my stay was looking out at the city after dark and seeing how gorgeous Chicago looks all lit up. It was so relaxing just watching the city come to life. I was very disappointed when I had to pack and leave. Any time that I make my way back to Chicago, you better believe I will be staying at the Conrad Chicago. I recommend this resort to everyone I know who does a lot of traveling.\",\n"," 'UNL')"]},"metadata":{},"execution_count":12}]},{"cell_type":"markdown","source":["##Just for testing the Geo-entities in each dataset.\n","Lets us view each geo entity from the sentence."],"metadata":{"id":"qur3E2VtPwj5"}},{"cell_type":"code","source":["#MODIFIED VERSION. ONLY PULLS UNIQUE GEO-ENTITIES AND NOT THE SENTENCE.\n","#Create two lists that will hold spatial and non-spatial reviews.\n","#geo_entities_list_fake = [] # List to hold geo-entities\n","#index = 0\n","#\n","##comment out when not creating a test set\n","#examples_fake = test_examples[:5]\n","#spacy.require_gpu()\n","#\n","##For every review in the fake review list, check to see if it has any Geo-Entities that are defined below.\n","#for entry in examples_fake:\n","# index+=1\n","# review = entry[0] #The actual review\n","# if True:\n","# print(\"Sentence \" + str(index) + \": \" + entry[0] + \"\\n\")\n","# doc = nlp(review)\n","# # for each Geo-Entity found in this sentence\n","# print(\"Geo-Entites in Sentence \" + str(index) + \": \")\n","# for ent in doc.ents:\n","# if ent.label_ in ['FAC', 'ORG', 'LOC', 'GPE']:\n","# geo_entity = ent.text\n","# geo_entities_list_fake.append(geo_entity) # Append the geo-entity to the end of the list\n","# print(geo_entity)\n","# ##break\n","# print(\"\\n\")\n","#"],"metadata":{"collapsed":true,"id":"sjQDt2K0Pgrh"},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":["##Define function to get BERT embeddings\n"],"metadata":{"id":"hRw87KJ-NFxb"}},{"cell_type":"code","source":["#from transformers import BertTokenizer, BertModel\n","#import torch\n","#\n","## Load the BERT tokenizer and model\n","#bertTokenizer = BertTokenizer.from_pretrained('bert-base-uncased')\n","#bertModel = BertModel.from_pretrained('bert-base-uncased')\n","#\n","## Initialize list for ordered embeddings\n","#ordered_embeddings = []\n","#\n","## Function to get BERT embeddings\n","#def get_bert_embedding(text):\n","# inputs = bertTokenizer(text, return_tensors='pt', truncation=True, max_length=512)\n","# outputs = bertModel(**inputs)\n","# return outputs.last_hidden_state.mean(dim=1).detach().numpy()\n"],"metadata":{"collapsed":true,"id":"Aksb3Wh3PRWP"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["#TESTING NEW METHODOLOGY FOR RICHER CONTEXT.\n","#from transformers import BertTokenizer, BertModel\n","#import torch\n","#\n","## Load the BERT tokenizer and model\n","#bertTokenizer = BertTokenizer.from_pretrained('bert-base-uncased')\n","#bertModel = BertModel.from_pretrained('bert-base-uncased')\n","#\n","## Initialize list for ordered embeddings\n","#ordered_embeddings = []\n","#\n","## Function to get BERT embeddings for an entire review\n","#def get_bert_embedding(review):\n","# with torch.no_grad(): # Disable gradient tracking\n","# inputs = bertTokenizer(review, return_tensors='pt', padding='max_length', truncation=True, max_length=512)\n","#\n","# # Extract the attention mask and input IDs\n","# input_ids = inputs['input_ids']\n","# attention_mask = inputs['attention_mask']\n","#\n","# # Get the BERT embeddings\n","# outputs = bertModel(input_ids=input_ids, attention_mask=attention_mask)\n","#\n","# # Extract the last hidden state (embeddings) for all tokens\n","# return outputs.last_hidden_state.squeeze(0).detach().numpy()"],"metadata":{"id":"BQYfhYjYW2CX"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["bertModel = AutoModel.from_pretrained(model_name)\n","bertTokenizer = AutoTokenizer.from_pretrained(model_name)\n","max_seq_length = 64\n","batch_size = 64"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"Pwez4rPFlS4m","executionInfo":{"status":"ok","timestamp":1726182978511,"user_tz":420,"elapsed":764,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"a1f29aab-d55c-483f-d96f-c27b84da97bf","collapsed":true},"execution_count":7,"outputs":[{"output_type":"stream","name":"stderr","text":["loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/config.json\n","Model config BertConfig {\n"," \"_name_or_path\": \"bert-base-uncased\",\n"," \"architectures\": [\n"," \"BertForMaskedLM\"\n"," ],\n"," \"attention_probs_dropout_prob\": 0.1,\n"," \"classifier_dropout\": null,\n"," \"gradient_checkpointing\": false,\n"," \"hidden_act\": \"gelu\",\n"," \"hidden_dropout_prob\": 0.1,\n"," \"hidden_size\": 768,\n"," \"initializer_range\": 0.02,\n"," \"intermediate_size\": 3072,\n"," \"layer_norm_eps\": 1e-12,\n"," \"max_position_embeddings\": 512,\n"," \"model_type\": \"bert\",\n"," \"num_attention_heads\": 12,\n"," \"num_hidden_layers\": 12,\n"," \"pad_token_id\": 0,\n"," \"position_embedding_type\": \"absolute\",\n"," \"transformers_version\": \"4.44.2\",\n"," \"type_vocab_size\": 2,\n"," \"use_cache\": true,\n"," \"vocab_size\": 30522\n","}\n","\n","loading weights file model.safetensors from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/model.safetensors\n","A pretrained model of type `BertModel` contains parameters that have been renamed internally (a few are listed below but more are present in the model):\n","* `bert.embeddings.LayerNorm.gamma` -> `bert.embeddings.LayerNorm.weight`\n","* `bert.encoder.layer.0.attention.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.0.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.1.attention.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.1.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.10.attention.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.10.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.11.attention.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.11.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.2.attention.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.2.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.3.attention.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.3.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.4.attention.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.4.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.5.attention.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.5.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.6.attention.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.6.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.7.attention.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.7.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.8.attention.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.8.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.9.attention.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.encoder.layer.9.output.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `cls.predictions.transform.LayerNorm.gamma` -> `{'bert.embeddings.LayerNorm.gamma': 'bert.embeddings.LayerNorm.weight', 'bert.encoder.layer.0.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.0.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.1.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.10.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.11.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.2.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.3.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.4.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.5.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.6.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.7.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.8.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.gamma': {...}, 'bert.encoder.layer.9.output.LayerNorm.gamma': {...}, 'cls.predictions.transform.LayerNorm.gamma': {...}}`\n","* `bert.embeddings.LayerNorm.beta` -> `bert.embeddings.LayerNorm.bias`\n","* `bert.encoder.layer.0.attention.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.0.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.1.attention.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.1.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.10.attention.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.10.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.11.attention.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.11.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.2.attention.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.2.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.3.attention.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.3.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.4.attention.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.4.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.5.attention.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.5.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.6.attention.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.6.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.7.attention.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.7.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.8.attention.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.8.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.9.attention.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `bert.encoder.layer.9.output.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","* `cls.predictions.transform.LayerNorm.beta` -> `{'bert.embeddings.LayerNorm.beta': 'bert.embeddings.LayerNorm.bias', 'bert.encoder.layer.0.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.0.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.1.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.10.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.11.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.2.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.3.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.4.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.5.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.6.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.7.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.8.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.attention.output.LayerNorm.beta': {...}, 'bert.encoder.layer.9.output.LayerNorm.beta': {...}, 'cls.predictions.transform.LayerNorm.beta': {...}}`\n","If you are using a model from the Hub, consider submitting a PR to adjust these weights and help future users.\n","Some weights of the model checkpoint at bert-base-uncased were not used when initializing BertModel: ['cls.predictions.bias', 'cls.predictions.transform.LayerNorm.bias', 'cls.predictions.transform.LayerNorm.weight', 'cls.predictions.transform.dense.bias', 'cls.predictions.transform.dense.weight', 'cls.seq_relationship.bias', 'cls.seq_relationship.weight']\n","- This IS expected if you are initializing BertModel from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n","- This IS NOT expected if you are initializing BertModel from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n","All the weights of BertModel were initialized from the model checkpoint at bert-base-uncased.\n","If your task is similar to the task the model of the checkpoint was trained on, you can already use BertModel for predictions without further training.\n","loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/config.json\n","Model config BertConfig {\n"," \"_name_or_path\": \"bert-base-uncased\",\n"," \"architectures\": [\n"," \"BertForMaskedLM\"\n"," ],\n"," \"attention_probs_dropout_prob\": 0.1,\n"," \"classifier_dropout\": null,\n"," \"gradient_checkpointing\": false,\n"," \"hidden_act\": \"gelu\",\n"," \"hidden_dropout_prob\": 0.1,\n"," \"hidden_size\": 768,\n"," \"initializer_range\": 0.02,\n"," \"intermediate_size\": 3072,\n"," \"layer_norm_eps\": 1e-12,\n"," \"max_position_embeddings\": 512,\n"," \"model_type\": \"bert\",\n"," \"num_attention_heads\": 12,\n"," \"num_hidden_layers\": 12,\n"," \"pad_token_id\": 0,\n"," \"position_embedding_type\": \"absolute\",\n"," \"transformers_version\": \"4.44.2\",\n"," \"type_vocab_size\": 2,\n"," \"use_cache\": true,\n"," \"vocab_size\": 30522\n","}\n","\n","loading file vocab.txt from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/vocab.txt\n","loading file tokenizer.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/tokenizer.json\n","loading file added_tokens.json from cache at None\n","loading file special_tokens_map.json from cache at None\n","loading file tokenizer_config.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/tokenizer_config.json\n","loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/config.json\n","Model config BertConfig {\n"," \"_name_or_path\": \"bert-base-uncased\",\n"," \"architectures\": [\n"," \"BertForMaskedLM\"\n"," ],\n"," \"attention_probs_dropout_prob\": 0.1,\n"," \"classifier_dropout\": null,\n"," \"gradient_checkpointing\": false,\n"," \"hidden_act\": \"gelu\",\n"," \"hidden_dropout_prob\": 0.1,\n"," \"hidden_size\": 768,\n"," \"initializer_range\": 0.02,\n"," \"intermediate_size\": 3072,\n"," \"layer_norm_eps\": 1e-12,\n"," \"max_position_embeddings\": 512,\n"," \"model_type\": \"bert\",\n"," \"num_attention_heads\": 12,\n"," \"num_hidden_layers\": 12,\n"," \"pad_token_id\": 0,\n"," \"position_embedding_type\": \"absolute\",\n"," \"transformers_version\": \"4.44.2\",\n"," \"type_vocab_size\": 2,\n"," \"use_cache\": true,\n"," \"vocab_size\": 30522\n","}\n","\n","/usr/local/lib/python3.10/dist-packages/transformers/tokenization_utils_base.py:1601: FutureWarning: `clean_up_tokenization_spaces` was not set. It will be set to `True` by default. This behavior will be depracted in transformers v4.45, and will be then set to `False` by default. For more details check this issue: https://github.com/huggingface/transformers/issues/31884\n"," warnings.warn(\n"]}]},{"cell_type":"code","source":["# Load the BERT tokenizer and model\n","#bertTokenizer = BertTokenizer.from_pretrained('bert-base-uncased')\n","#bertModel = BertModel.from_pretrained('bert-base-uncased')\n","#\n","#max_seq_length = 64\n","#batch_size = 64"],"metadata":{"id":"CAgH50xq56KV"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["#Returns the tokenized review from bert tokenizer as a dataloader.\n","#Need to unpack dataloader and then feed to model to get embedding per review.\n","def generate_Bert_data_loader(input_examples, label_masks, label_map, do_shuffle = False, balance_label_examples = False):\n"," '''\n"," Generate a Dataloader given the input examples, eventually masked if they are\n"," to be considered NOT labeled.\n"," '''\n"," bertModel.eval()\n"," examples = []\n","\n"," # Count the percentage of labeled examples\n"," num_labeled_examples = 0\n"," for label_mask in label_masks:\n"," if label_mask:\n"," num_labeled_examples += 1\n"," label_mask_rate = num_labeled_examples/len(input_examples)\n","\n"," # if required it applies the balance\n"," for index, ex in enumerate(input_examples):\n"," if label_mask_rate == 1 or not balance_label_examples:\n"," examples.append((ex, label_masks[index]))\n"," else:\n"," # IT SIMULATE A LABELED EXAMPLE\n"," if label_masks[index]:\n"," balance = int(1/label_mask_rate)\n"," balance = int(math.log(balance,2))\n"," if balance < 1:\n"," balance = 1\n"," for b in range(0, int(balance)):\n"," examples.append((ex, label_masks[index]))\n"," else:\n"," examples.append((ex, label_masks[index]))\n","\n"," #-----------------------------------------------\n"," # Generate input examples to the Transformer\n"," #-----------------------------------------------\n"," input_ids = []\n"," input_mask_array = []\n"," label_mask_array = []\n"," label_id_array = []\n","\n"," # Tokenization\n"," for (text, label_mask) in examples:\n"," encoded_sent = bertTokenizer.encode(text[0], add_special_tokens=True, max_length=max_seq_length, padding=\"max_length\", truncation=True)\n"," input_ids.append(encoded_sent)\n"," label_id_array.append(label_map[text[1]])\n"," label_mask_array.append(label_mask)\n","\n"," # Attention to token (to ignore padded input wordpieces)\n"," for sent in input_ids:\n"," att_mask = [int(token_id > 0) for token_id in sent]\n"," input_mask_array.append(att_mask)\n"," # Convertion to Tensor\n"," input_ids = torch.tensor(input_ids)\n"," input_mask_array = torch.tensor(input_mask_array)\n"," label_id_array = torch.tensor(label_id_array, dtype=torch.long)\n"," label_mask_array = torch.tensor(label_mask_array)\n","\n"," # Building the TensorDataset\n"," dataset = TensorDataset(input_ids, input_mask_array, label_id_array, label_mask_array)\n","\n"," if do_shuffle:\n"," sampler = RandomSampler\n"," else:\n"," sampler = SequentialSampler\n","\n"," # Building the DataLoader\n"," return DataLoader(\n"," dataset, # The training samples.\n"," sampler = sampler(dataset),\n"," batch_size = 64) # Trains with this batch size."],"metadata":{"id":"-Ad6TxZx1Rnd","executionInfo":{"status":"ok","timestamp":1726184688612,"user_tz":420,"elapsed":366,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}}},"execution_count":38,"outputs":[]},{"cell_type":"code","source":["from transformers import BertTokenizer, BertModel\n","import torch\n","from tqdm import tqdm\n","\n","label_map = {}\n","for (i, label) in enumerate(label_list):\n"," label_map[label] = i\n","#------------------------------\n","# Load the train dataset\n","#------------------------------\n","train_examples = labeled_examples\n","#The labeled (train) dataset is assigned with a mask set to True\n","train_label_masks = np.ones(len(labeled_examples), dtype=bool)\n","#If unlabel examples are available\n","if unlabeled_examples:\n"," train_examples = train_examples + unlabeled_examples\n"," #The unlabeled (train) dataset is assigned with a mask set to False\n"," tmp_masks = np.zeros(len(unlabeled_examples), dtype=bool)\n"," train_label_masks = np.concatenate([train_label_masks,tmp_masks])\n","\n","# Create labels for the combined dataset\n","train_labels = [example[1] for example in labeled_examples] + [example[1] for example in unlabeled_examples]\n","\n","train_dataloader = generate_Bert_data_loader(train_examples, train_label_masks, label_map, do_shuffle = True, balance_label_examples = apply_balance)\n","\n","#------------------------------\n","# Load the test dataset\n","#------------------------------\n","#The labeled (test) dataset is assigned with a mask set to True\n","test_label_masks = np.ones(len(test_examples), dtype=bool)\n","test_labels = [example[1] for example in test_examples]\n","\n","test_dataloader = generate_Bert_data_loader(test_examples, test_label_masks, label_map, do_shuffle = False, balance_label_examples = False)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"Bm9-est42eWL","executionInfo":{"status":"ok","timestamp":1726187047033,"user_tz":420,"elapsed":1121,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"1c26ebca-9c6c-42d8-dd5b-b9292907d920"},"execution_count":69,"outputs":[{"output_type":"stream","name":"stderr","text":[":57: DeprecationWarning: In future, it will be an error for 'np.bool_' scalars to be interpreted as an index\n"," label_mask_array = torch.tensor(label_mask_array)\n"]}]},{"cell_type":"code","source":["# Set the device to GPU if available, otherwise CPU\n","device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n","bertModel.to(device)\n","bertModel.eval() # Ensure model is in evaluation mode\n","\n","# Function to Get BERT Embeddings using DataLoader\n","def get_bert_embeddings_from_loader(dataloader, use_cls_token=False):\n"," embeddings = []\n","\n"," for batch in tqdm(dataloader): # Use tqdm to show progress\n"," # Unpack the batch from the DataLoader\n"," input_ids, input_masks, labels, label_masks = batch\n","\n"," # Move the input tensors to the device (GPU/CPU)\n"," input_ids = input_ids.to(device)\n"," input_masks = input_masks.to(device)\n","\n"," with torch.no_grad():\n"," # Get the output from BERT\n"," outputs = bertModel(input_ids=input_ids, attention_mask=input_masks)\n","\n"," if use_cls_token:\n"," # Use [CLS] token for sentence embedding\n"," batch_embeddings = outputs.last_hidden_state[:, 0, :].detach() # Shape: [batch_size, hidden_size]\n"," else:\n"," # Use mean pooling for sentence embedding\n"," mask_expanded = input_masks.unsqueeze(-1).expand(outputs.last_hidden_state.size()).float()\n"," sum_embeddings = torch.sum(outputs.last_hidden_state * mask_expanded, 1)\n"," sum_mask = torch.clamp(mask_expanded.sum(1), min=1e-9)\n"," batch_embeddings = (sum_embeddings / sum_mask).detach() # Shape: [batch_size, hidden_size]\n","\n"," # Append embeddings for the entire batch\n"," embeddings.extend(batch_embeddings.cpu())\n","\n"," # Concatenate embeddings into a single tensor for further processing\n"," return [torch.tensor(embedding) for embedding in embeddings]\n","\n","\n","# Example usage\n","my_train_embeddings = get_bert_embeddings_from_loader(train_dataloader, use_cls_token=True)\n","my_test_embeddings = get_bert_embeddings_from_loader(test_dataloader, use_cls_token=True)\n","\n"],"metadata":{"id":"X4qkRc5HntKu"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["my_test_embeddings[0]"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"collapsed":true,"id":"gFMZrOTgsntE","executionInfo":{"status":"ok","timestamp":1726185057631,"user_tz":420,"elapsed":406,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"eb30d14a-df0a-48a6-8504-f5a41eb555da"},"execution_count":46,"outputs":[{"output_type":"execute_result","data":{"text/plain":["tensor([ 2.4728e-01, 5.5689e-02, -6.6209e-02, -2.8055e-01, 4.0048e-01,\n"," -3.9691e-01, -1.1721e-01, 8.8398e-01, 1.0851e-01, -5.4167e-01,\n"," -5.3535e-02, 2.2157e-01, 1.9331e-01, 7.7336e-01, 6.3088e-01,\n"," -7.8134e-02, -5.4874e-02, 5.7723e-01, 4.6242e-01, -1.7209e-01,\n"," -9.8612e-02, -1.4471e-01, 4.0004e-01, -1.0885e-01, 1.5454e-02,\n"," -1.1839e-01, -1.4307e-01, -6.5804e-01, 3.2920e-01, 3.4471e-02,\n"," -3.3936e-01, 7.6668e-01, -6.8208e-01, -5.0621e-01, 7.5052e-01,\n"," -2.8949e-01, 1.2506e-01, -3.9372e-01, 1.9043e-01, -3.5878e-02,\n"," -4.2980e-01, 8.9760e-02, 4.8064e-01, -1.5585e-01, 2.6396e-01,\n"," -2.1653e-01, -3.3829e+00, -5.3293e-02, 1.9409e-01, 1.2259e-01,\n"," 1.9822e-01, -4.4249e-01, -1.2900e-01, 3.8003e-01, 3.3839e-01,\n"," 6.8283e-01, -1.1929e+00, 4.9785e-01, 7.0478e-02, 1.9047e-01,\n"," 4.5050e-01, -2.8579e-01, 1.4516e-01, 1.0959e-01, 2.3425e-02,\n"," 3.0366e-01, 3.4124e-01, 1.8001e-01, -4.5699e-01, 6.4546e-01,\n"," -3.4174e-01, -4.4034e-01, 5.3624e-01, -1.2146e-01, -4.1661e-03,\n"," 2.6823e-01, -1.6923e-01, 1.3725e-01, -5.9342e-02, 6.0899e-02,\n"," 1.3519e-01, 1.1046e+00, 1.8853e-01, 1.0935e-01, 2.1649e-01,\n"," 6.6773e-01, -6.4403e-01, -3.6938e-01, 3.5870e-01, 3.8489e-01,\n"," -1.2450e-01, -1.4690e-01, -2.3451e-02, 1.1725e-01, 7.2092e-01,\n"," -4.2128e-01, 3.2993e-01, 1.7417e-01, 1.0175e-01, 7.6347e-01,\n"," 2.8569e-01, 1.0484e-01, -1.1479e-01, -6.5636e-01, -2.7073e-01,\n"," 1.8645e-01, -3.6154e-02, -1.4187e-01, 9.0343e-01, -2.1932e+00,\n"," 2.7624e-01, 4.5249e-01, -3.9104e-01, -2.7941e-02, -8.1744e-02,\n"," 6.8115e-01, 6.1612e-01, -3.6050e-01, 3.1935e-01, -2.8609e-01,\n"," -6.2414e-01, 3.5514e-01, 2.0369e-02, -8.2057e-03, -8.4474e-02,\n"," 2.9538e-01, 1.4673e-01, -4.4522e-01, 1.5443e-04, 4.9704e-01,\n"," 2.2263e-01, 6.8456e-01, -8.9618e-02, -6.5516e-01, -3.0818e-01,\n"," 7.7658e-02, -1.6474e-02, -2.8211e-01, -3.8360e-01, 1.6775e-01,\n"," -3.1706e-01, -4.3990e-01, -3.1916e+00, 3.9572e-01, 7.8798e-01,\n"," 3.8261e-01, 4.1778e-02, 1.2555e-01, -3.2404e-02, -2.0536e-01,\n"," 3.2160e-01, -8.9961e-02, -1.1090e-01, 1.0102e-01, -1.4002e-01,\n"," 2.0409e-01, -5.7336e-01, 5.0224e-02, 3.7593e-01, 4.2583e-01,\n"," 5.2435e-01, 8.1847e-02, 1.0794e-01, -1.2563e-01, -6.2759e-01,\n"," 4.1612e-01, 5.2418e-01, 4.6829e-01, -2.2277e-01, -2.5147e-01,\n"," -2.1087e-01, 1.8188e-01, 4.6482e-01, -9.9777e-02, 4.2335e-01,\n"," 9.8228e-03, 7.4027e-02, 1.9548e-01, 6.3745e-02, -2.4884e-01,\n"," -4.0777e-01, 4.3640e-01, 1.4538e-01, 1.8873e-01, 6.1621e-01,\n"," -4.7291e-01, 4.3806e-01, 9.8761e-02, -3.5916e-01, 3.1893e-01,\n"," -1.7293e-01, -6.4798e-01, 5.7442e-01, -9.3848e-03, 5.7684e-01,\n"," -8.9468e-02, 3.0808e-01, -4.5594e-01, 1.8164e-02, 4.2269e-01,\n"," 2.5488e-01, -7.2478e-02, -4.3131e-02, 8.1786e-02, -7.2346e-01,\n"," 3.5358e+00, 1.6245e-01, 2.0771e-01, 2.6056e-01, 5.7544e-01,\n"," -1.8492e-01, 4.1778e-01, -3.6114e-01, 1.0678e-01, -3.4291e-01,\n"," -1.0802e-01, 1.5422e-01, 1.6199e-01, -3.1995e-02, -3.4752e-01,\n"," 1.4159e-01, 6.4655e-01, -6.5894e-01, 2.8021e-01, -2.0873e-01,\n"," -5.7572e-01, -2.9525e-01, -3.0802e-01, -9.1838e-02, -1.8776e+00,\n"," -4.7163e-01, -7.0682e-01, -1.7581e-01, 6.5006e-02, -1.1075e-01,\n"," 3.9557e-01, 2.3064e-01, -4.1564e-01, 2.6158e-01, 2.8360e-02,\n"," 4.9451e-01, -2.9150e-03, -3.1283e-01, 3.4212e-01, -3.3986e-01,\n"," 2.6484e-01, -2.9415e-02, -4.3553e-01, 6.9976e-01, -6.4233e-02,\n"," -1.5636e-01, 9.7480e-02, 4.8727e-01, -8.4616e-02, 2.9312e-01,\n"," 3.4380e-01, 1.6528e-01, 2.8714e-01, -5.7836e-01, -1.2600e-01,\n"," -2.3399e-01, 1.2641e-01, 2.8053e-01, 9.3780e-02, -6.2839e-01,\n"," -5.3646e-01, 4.5192e-02, -9.1469e-01, -1.4973e-01, -2.7269e-02,\n"," 3.0365e-01, -1.6072e-01, -2.7005e-01, -2.9124e+00, 5.9721e-01,\n"," -2.9232e-01, 4.3646e-01, 3.2984e-01, 6.0617e-02, -1.2092e-01,\n"," 5.7507e-01, 2.5659e-01, -5.4678e-01, 8.2141e-02, -1.2433e-03,\n"," -5.8416e-01, 1.7965e-01, -2.6242e-03, 1.7222e-01, -1.7916e-02,\n"," -2.1500e-01, -3.9879e-01, -2.4411e-01, -4.8798e-01, 1.7114e-01,\n"," -4.0716e-01, 3.4469e-01, 6.4860e-03, -4.2317e-01, -3.4212e-02,\n"," -4.6743e-01, 1.5963e-01, -3.7003e-02, 1.0129e-02, 3.4823e-01,\n"," 1.1849e-01, -2.9370e-02, -5.6710e-01, -2.9244e+00, 2.6707e-01,\n"," -1.1014e-01, -1.1838e+00, -2.3739e-01, -1.4881e-01, 8.0356e-01,\n"," -2.3443e-01, -6.1434e-01, -1.9672e-02, -4.9354e-02, 6.4731e-01,\n"," 1.5041e-01, 3.1859e-01, 6.1409e-01, 8.1252e-02, 6.2499e-01,\n"," 1.4858e-01, 3.7164e-02, 6.1262e-01, -3.3883e-01, 4.6539e-01,\n"," 1.7910e-01, 1.1297e-01, 5.5600e-01, 7.1316e-01, -4.1731e-01,\n"," -9.3348e-02, -3.8452e-01, 4.4620e-01, 1.5095e-01, -2.1646e-01,\n"," -3.7694e-01, -2.0707e-01, -3.8599e-01, -3.0334e-01, 4.5978e-01,\n"," 2.3372e-02, 5.0803e-01, -8.5928e-02, 3.3033e-02, 1.2061e-02,\n"," 1.0389e-01, -2.0106e-01, 7.9501e-01, 5.2624e-02, -1.6488e-01,\n"," -1.8760e-02, -9.9352e-02, 4.5211e-01, 1.8223e-02, 1.3455e-01,\n"," 1.3448e+00, 2.8818e-01, -3.5079e-01, -5.0636e-01, 6.8492e-01,\n"," 5.8704e-01, -9.7668e-02, 3.0120e-01, 4.1749e-01, -5.1424e-01,\n"," 4.3444e-01, -1.3577e-01, -2.6636e-01, -5.6375e-01, 3.2390e-01,\n"," -2.4899e-01, -5.5025e-01, -4.5992e-03, 2.7298e-01, -1.5347e-01,\n"," -2.5488e-01, -1.2391e+00, -2.5427e-01, 1.1460e-01, 8.0403e-02,\n"," 7.3598e-01, 1.4806e-01, -3.1418e-01, 4.3771e-02, 2.5487e-02,\n"," -6.7586e-01, 4.2747e-01, -2.3556e-01, -3.4988e-01, -2.0427e-01,\n"," 4.8391e-01, -7.6433e-01, -6.8661e-01, -1.4608e-01, 5.7180e-02,\n"," -2.5810e-02, 2.6754e-01, -5.9040e-02, -2.9569e-01, 2.4899e-01,\n"," -7.0193e-01, 4.8756e-01, -1.5031e-01, 2.3414e-01, 2.4391e-01,\n"," 1.4483e-01, 3.0564e-01, -3.0587e-01, 1.0915e-01, -6.9560e-02,\n"," 3.0353e-01, 2.4475e-01, 3.2291e-01, -3.3678e-01, -1.3959e-01,\n"," -1.8885e-01, 1.1993e-01, 7.5373e-01, -4.3565e-01, 2.5426e-01,\n"," -3.8199e-02, -1.0886e-01, -1.9315e-02, 1.6350e-01, -1.0029e-01,\n"," 4.5342e-01, -1.4902e-01, -1.0884e+00, -1.5965e-01, 3.6970e-02,\n"," -1.2038e-01, -1.0706e+00, -1.9181e-01, 5.3642e-02, -9.3272e-02,\n"," 1.9716e-01, -1.1497e-02, -2.8033e-01, -2.6237e-01, 2.4878e-02,\n"," -1.8885e-01, 2.3742e-01, 6.2200e-03, 2.1946e-01, 2.4280e-01,\n"," -5.1007e-01, 7.1253e-01, -5.9081e-01, 6.4363e-01, -1.4443e-01,\n"," 1.7502e-01, -6.0674e-01, -4.5545e-02, -3.6466e-01, 1.0566e-01,\n"," -2.7325e-01, -1.0813e+00, 3.8277e-01, 4.1507e-01, 1.5968e-01,\n"," 2.7016e-01, -1.7854e-01, 9.0312e-02, 2.2581e-01, 3.6890e-02,\n"," -1.7558e+00, 7.4645e-01, 5.2774e-01, 8.8009e-03, 4.0611e-01,\n"," -6.0845e-01, -1.2085e-01, 6.7941e-01, 1.5795e-01, 3.3265e-01,\n"," -4.6415e-01, -4.3449e-01, 2.1750e-01, 3.6136e-01, 2.5268e-01,\n"," 1.6479e-01, 1.2732e-01, -1.5256e-01, -2.6312e-01, -4.0670e-01,\n"," -1.7869e-01, -3.2828e-02, 9.0670e-02, 4.1396e-01, -9.3319e-02,\n"," -4.8599e-01, -3.4683e-01, 7.0292e-01, -1.6436e-01, 6.1730e-01,\n"," -3.1671e-01, -4.7969e-01, -8.8790e-01, -5.2184e-01, -7.8627e-02,\n"," -4.8133e-01, -3.9172e-02, 8.4360e-02, 5.9460e-01, 4.2820e-01,\n"," -3.0172e-01, 7.3119e-01, 5.6936e-01, -1.6716e-01, 4.8137e-01,\n"," 6.8636e-01, 3.3923e-02, 1.9620e-01, -1.3869e-01, -1.7936e-01,\n"," -1.4513e-02, -4.7034e-02, -2.4514e-01, -1.2169e-01, 4.2491e-01,\n"," -6.1979e-02, -4.6970e-01, 1.3697e-01, -1.9822e-01, -1.7337e-01,\n"," 5.5937e-02, -2.2663e-01, -8.4808e-01, 3.0924e-01, -1.7292e-01,\n"," -6.2740e-01, -1.4020e-02, -4.7685e-01, -4.2860e-01, 4.2326e-01,\n"," 7.4873e-01, -1.0128e-01, -7.2561e-01, 4.9503e-01, -2.2950e-01,\n"," -6.6467e-02, 2.4093e-01, 2.7812e-01, 2.5788e-01, -3.9307e-01,\n"," 5.2880e-01, -3.3592e-01, -2.0200e-01, -9.4098e-02, -2.3400e-01,\n"," 7.7405e-01, -2.6041e-01, -3.1245e-01, -2.4261e-01, -3.2875e-01,\n"," -3.9520e-01, -4.3225e-01, 7.6138e-01, -1.8020e-01, 4.3916e-01,\n"," 2.6152e-03, 2.3809e-01, -2.1411e-02, -1.9164e-01, -3.3813e-01,\n"," -6.6233e-01, 7.0483e-01, 7.3828e-01, 2.5800e-01, 5.2366e-01,\n"," 4.1860e-01, 3.7310e-01, -6.1669e-02, -5.4149e-03, 3.1148e-02,\n"," -1.7058e-01, 2.4510e-01, -1.1341e-01, 7.9824e-03, 1.5424e-01,\n"," -3.4539e-01, 6.8410e-01, -6.4076e-01, 1.9248e+00, 1.4679e-01,\n"," 5.8656e-01, -3.5286e-01, 4.4247e-01, -7.5670e-02, -6.3431e-01,\n"," 2.4492e-01, -4.7837e-01, 5.2391e-01, -5.5646e-01, 1.7981e-01,\n"," -4.1236e-01, 2.0996e-01, 8.4286e-01, -2.5622e-01, 4.7555e-01,\n"," -5.9354e-01, -5.6621e-01, 1.1770e-01, -5.0190e-01, 4.3073e-01,\n"," -2.2719e-01, -1.4796e-01, 2.2541e-01, -3.4610e-03, 3.3717e-01,\n"," -6.5641e-02, 7.2462e-01, 8.3054e-01, -2.7836e-01, 1.4697e-01,\n"," -1.1004e-01, 6.4779e-01, -3.3782e-01, -4.3401e-01, -3.1559e-01,\n"," -1.5325e-01, -7.1178e-02, 2.9861e-02, 5.5683e-02, -6.3709e-01,\n"," 5.1926e-01, 1.9995e-03, -2.6423e-01, 8.0307e-01, -1.8436e-01,\n"," -2.1609e-01, 4.5931e-01, 1.1079e-01, 2.2106e-01, -1.0442e-01,\n"," -5.0832e-01, -6.6965e-02, 3.0345e-02, -1.4364e-01, -9.5033e-02,\n"," -5.7206e-01, -4.4117e-01, -2.6169e-01, -5.7261e-02, 1.4146e-01,\n"," 3.2522e-01, -9.5701e-02, -5.5691e-01, -2.6213e-01, -4.4237e-01,\n"," 7.0015e-01, 2.3865e-01, -8.8036e-01, -9.4929e-02, 4.6505e-01,\n"," 5.9337e-01, -2.7140e-01, 4.1775e-01, 2.7234e-01, 2.6793e-01,\n"," -2.0266e-01, -1.0387e-01, -2.4212e+00, 1.6537e-01, 1.9481e-02,\n"," 2.7807e-01, -7.6611e-02, 1.7862e-01, 2.8848e-01, -5.2520e-02,\n"," -1.7690e-01, -5.4217e-01, 9.7919e-02, 3.5155e-01, 6.4190e-01,\n"," -1.1932e-01, -2.0287e-02, 2.3724e-01, 2.1010e-01, -6.8872e-01,\n"," -2.5493e-01, 1.8812e-01, 2.0457e-01, 2.9989e-01, 3.1538e-02,\n"," -4.7102e-03, -3.4125e-01, 3.7603e-01, -1.1085e-01, -4.8374e-01,\n"," 2.6210e-01, 4.5322e-01, 6.8082e-02, 8.8563e-01, -1.3080e-01,\n"," 1.1106e-01, 2.0989e-01, 2.5725e-01, -8.9381e-01, -2.4548e-01,\n"," 1.4792e-01, -2.0026e-02, -1.8623e-01, 5.4242e-01, -5.5992e-01,\n"," 5.3379e-01, -4.8887e-02, -1.2747e-02, 7.9939e-01, -2.1456e-01,\n"," 5.3517e-01, -3.7313e-01, -4.1161e-02, 1.8101e-01, 2.3933e-01,\n"," -3.9958e-01, -1.5802e-02, 1.3603e-01, 3.2568e-01, 2.7703e-01,\n"," 4.8665e-02, -3.5556e-01, -4.3036e-01, 7.0556e-02, -1.1781e-01,\n"," 1.9100e-01, 2.2501e-01, 3.6116e-02, -1.6238e-01, -8.2850e-02,\n"," -5.4520e-02, -8.1384e-02, -3.7458e-01, -2.6042e-01, 3.9473e-01,\n"," 3.8877e-01, -1.1774e-01, -3.6666e-01, 6.3714e-01, 3.2010e-01,\n"," 3.2424e-01, -2.0595e-01, 1.2082e-01, -1.6577e-01, -1.9219e-01,\n"," -1.3692e-01, -1.0819e-01, -6.5422e+00, -1.1744e-01, -1.4177e-01,\n"," -3.0025e-01, -1.1447e-02, -7.8719e-01, 8.8488e-02, -3.0344e-01,\n"," -1.1717e-02, -1.4552e-01, 2.4330e-01, 2.8540e-01, -2.6164e-01,\n"," -2.6209e-01, 4.9786e-01, 1.7458e-01])"]},"metadata":{},"execution_count":46}]},{"cell_type":"code","source":["my_test_embeddings[0].shape"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"eevyzb12nMmj","executionInfo":{"status":"ok","timestamp":1726185180975,"user_tz":420,"elapsed":2,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"a6280f8b-211f-423f-dbe8-13b71247493e"},"execution_count":49,"outputs":[{"output_type":"execute_result","data":{"text/plain":["torch.Size([768])"]},"metadata":{},"execution_count":49}]},{"cell_type":"code","source":["import torch\n","from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, TensorDataset\n","\n","def generate_my_data_loader(embeddings, labels, label_map, label_masks, do_shuffle=True, balance_label_examples=False):\n"," '''\n"," Generate a DataLoader for the training set using precomputed embeddings.\n"," This is similar to the test data loader but with shuffling and optional balancing.\n"," '''\n"," examples = []\n","\n"," # Count the percentage of labeled examples\n"," num_labeled_examples = sum(label_masks)\n"," label_mask_rate = num_labeled_examples / len(embeddings)\n","\n"," # if required it applies the balance\n"," for index, embedding in enumerate(embeddings):\n"," label = labels[index]\n"," label_mask = label_masks[index]\n","\n"," if label_mask_rate == 1 or not balance_label_examples:\n"," examples.append((embedding, label, label_mask))\n"," else:\n"," if label_mask:\n"," balance = int(1/label_mask_rate)\n"," balance = int(math.log(balance,2))\n"," if balance < 1:\n"," balance = 1\n"," for b in range(0, int(balance)):\n"," examples.append((embedding, label, label_mask))\n"," else:\n"," examples.append((embedding, label, label_mask))\n","\n"," # Prepare the tensors\n"," input_ids = []\n"," label_id_array = []\n"," label_mask_array = []\n","\n"," for (embedding, label, label_mask) in examples:\n"," input_ids.append(embedding) # Embeddings are already in shape [hidden_size], no need for further adjustment\n","\n"," label_id_array.append(label_map[label])\n"," label_mask_array.append(label_mask)\n","\n"," # Convert lists to tensors\n"," input_ids = torch.stack(input_ids) # Shape: [batch_size, hidden_size]\n"," label_id_array = torch.tensor(label_id_array, dtype=torch.long) # Labels\n"," label_mask_array = torch.tensor(label_mask_array, dtype=torch.bool) # Label masks\n","\n"," # Build the TensorDataset\n"," dataset = TensorDataset(input_ids, label_id_array, label_mask_array)\n","\n"," # Set up the sampler\n"," sampler = RandomSampler(dataset) if do_shuffle else SequentialSampler(dataset)\n","\n"," # Create DataLoader\n"," return DataLoader(\n"," dataset, # The training samples\n"," sampler=sampler,\n"," batch_size=batch_size\n"," )"],"metadata":{"id":"rzJaNm5ztVmk","executionInfo":{"status":"ok","timestamp":1726187212820,"user_tz":420,"elapsed":374,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}}},"execution_count":72,"outputs":[]},{"cell_type":"code","source":["label_list = [\"1\", \"0\"]\n","label_list.append('UNL')\n","label_list"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"CDq2EHhRvazU","executionInfo":{"status":"ok","timestamp":1726186632684,"user_tz":420,"elapsed":376,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"8b149531-b5fe-4187-9fab-8bd7d488a5c5"},"execution_count":64,"outputs":[{"output_type":"execute_result","data":{"text/plain":["['1', '0', 'UNL']"]},"metadata":{},"execution_count":64}]},{"cell_type":"code","source":["print(train_label_masks)"],"metadata":{"id":"KBP0YX2twLpp"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["print(label_map)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"4AUZ67EdwP9R","executionInfo":{"status":"ok","timestamp":1726186861879,"user_tz":420,"elapsed":358,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"9ed0f7c1-987d-4d8e-ae77-7f4342006389"},"execution_count":67,"outputs":[{"output_type":"stream","name":"stdout","text":["{'1': 0, '0': 1, 'UNL': 2}\n"]}]},{"cell_type":"code","source":["print(train_label_masks)"],"metadata":{"id":"jpP9rqkcwY26"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["label_map = {}\n","for (i, label) in enumerate(label_list):\n"," label_map[label] = i\n","\n","#------------------------------\n","# Combine Embeddings and Load the Train Dataset\n","#------------------------------\n","\n","# Combine labeled and unlabeled embeddings\n","train_embeddings = my_train_embeddings\n","\n","# Generate the train data loader using the modified generate_data_loader function\n","train_dataloader = generate_my_data_loader(train_averaged_embeddings, train_labels, label_map, train_label_masks, do_shuffle=True, balance_label_examples=apply_balance)\n","\n","averaged_embeddings = my_test_embeddings\n","\n","test_dataloader = generate_my_data_loader(averaged_embeddings, test_labels, label_map, test_label_masks, do_shuffle=False, balance_label_examples=False)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"0ZCUdutWuLD1","executionInfo":{"status":"ok","timestamp":1726187223794,"user_tz":420,"elapsed":363,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"501be335-e4ea-4e1e-eaee-0500611ff5da"},"execution_count":73,"outputs":[{"output_type":"stream","name":"stderr","text":[":47: DeprecationWarning: In future, it will be an error for 'np.bool_' scalars to be interpreted as an index\n"," label_mask_array = torch.tensor(label_mask_array, dtype=torch.bool) # Label masks\n"]}]},{"cell_type":"code","source":["from transformers import BertTokenizer, BertModel\n","import torch\n","\n","max_seq_length = 64\n","batch_size = 64\n","\n","# Load the BERT tokenizer and model\n","bertModel = AutoModel.from_pretrained(model_name)\n","bertTokenizer = AutoTokenizer.from_pretrained(model_name)\n","\n","bertModel.eval()\n","\n","# Set the device to GPU if available, otherwise CPU\n","device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n","bertModel.to(device)\n","\n","# Function to Get BERT Embeddings\n","def get_bert_embedding(texts, use_cls_token=False):\n"," # Tokenize the inputs and move to the correct device\n"," inputs = bertTokenizer(texts, add_special_tokens=True, return_tensors='pt', truncation=True, max_length=max_seq_length, padding=\"max_length\")\n"," inputs = {key: val.to(device) for key, val in inputs.items()} # Move inputs to the same device as the model\n","\n"," with torch.no_grad():\n"," outputs = bertModel(**inputs)\n","\n"," if use_cls_token:\n"," # Use [CLS] token for sentence embedding\n"," return outputs.last_hidden_state[:, 0, :].detach().cpu().numpy() # Move the output back to CPU if necessary\n"," else:\n"," # Use mean pooling for sentence embedding\n"," attention_mask = inputs['attention_mask']\n"," mask_expanded = attention_mask.unsqueeze(-1).expand(outputs.last_hidden_state.size()).float()\n"," sum_embeddings = torch.sum(outputs.last_hidden_state * mask_expanded, 1)\n"," sum_mask = torch.clamp(mask_expanded.sum(1), min=1e-9)\n"," return (sum_embeddings / sum_mask).detach().cpu().numpy() # Move the result back to CPU\n","\n","# Example usage\n","#batch_of_reviews = [\"This is a sample review.\", \"Another example review.\", \"A third review for embedding.\"]\n","#embeddings = get_bert_embedding(batch_of_reviews, use_cls_token=True)"],"metadata":{"id":"WjuzQn1DBrhj","colab":{"base_uri":"https://localhost:8080/"},"collapsed":true,"executionInfo":{"status":"ok","timestamp":1726100318498,"user_tz":420,"elapsed":835,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"5f295d9f-a95d-41ae-ddf7-8e4d1b86fe22"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stderr","text":["loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/config.json\n","Model config BertConfig {\n"," \"_name_or_path\": \"bert-base-uncased\",\n"," \"architectures\": [\n"," \"BertForMaskedLM\"\n"," ],\n"," \"attention_probs_dropout_prob\": 0.1,\n"," \"classifier_dropout\": null,\n"," \"gradient_checkpointing\": false,\n"," \"hidden_act\": \"gelu\",\n"," \"hidden_dropout_prob\": 0.1,\n"," \"hidden_size\": 768,\n"," \"initializer_range\": 0.02,\n"," \"intermediate_size\": 3072,\n"," \"layer_norm_eps\": 1e-12,\n"," \"max_position_embeddings\": 512,\n"," \"model_type\": \"bert\",\n"," \"num_attention_heads\": 12,\n"," \"num_hidden_layers\": 12,\n"," \"pad_token_id\": 0,\n"," \"position_embedding_type\": \"absolute\",\n"," \"transformers_version\": \"4.44.2\",\n"," \"type_vocab_size\": 2,\n"," \"use_cache\": true,\n"," \"vocab_size\": 30522\n","}\n","\n","loading weights file model.safetensors from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/model.safetensors\n","Some weights of the model checkpoint at bert-base-uncased were not used when initializing BertModel: ['cls.predictions.bias', 'cls.predictions.transform.LayerNorm.bias', 'cls.predictions.transform.LayerNorm.weight', 'cls.predictions.transform.dense.bias', 'cls.predictions.transform.dense.weight', 'cls.seq_relationship.bias', 'cls.seq_relationship.weight']\n","- This IS expected if you are initializing BertModel from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n","- This IS NOT expected if you are initializing BertModel from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n","All the weights of BertModel were initialized from the model checkpoint at bert-base-uncased.\n","If your task is similar to the task the model of the checkpoint was trained on, you can already use BertModel for predictions without further training.\n","loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/config.json\n","Model config BertConfig {\n"," \"_name_or_path\": \"bert-base-uncased\",\n"," \"architectures\": [\n"," \"BertForMaskedLM\"\n"," ],\n"," \"attention_probs_dropout_prob\": 0.1,\n"," \"classifier_dropout\": null,\n"," \"gradient_checkpointing\": false,\n"," \"hidden_act\": \"gelu\",\n"," \"hidden_dropout_prob\": 0.1,\n"," \"hidden_size\": 768,\n"," \"initializer_range\": 0.02,\n"," \"intermediate_size\": 3072,\n"," \"layer_norm_eps\": 1e-12,\n"," \"max_position_embeddings\": 512,\n"," \"model_type\": \"bert\",\n"," \"num_attention_heads\": 12,\n"," \"num_hidden_layers\": 12,\n"," \"pad_token_id\": 0,\n"," \"position_embedding_type\": \"absolute\",\n"," \"transformers_version\": \"4.44.2\",\n"," \"type_vocab_size\": 2,\n"," \"use_cache\": true,\n"," \"vocab_size\": 30522\n","}\n","\n","loading file vocab.txt from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/vocab.txt\n","loading file tokenizer.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/tokenizer.json\n","loading file added_tokens.json from cache at None\n","loading file special_tokens_map.json from cache at None\n","loading file tokenizer_config.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/tokenizer_config.json\n","loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/config.json\n","Model config BertConfig {\n"," \"_name_or_path\": \"bert-base-uncased\",\n"," \"architectures\": [\n"," \"BertForMaskedLM\"\n"," ],\n"," \"attention_probs_dropout_prob\": 0.1,\n"," \"classifier_dropout\": null,\n"," \"gradient_checkpointing\": false,\n"," \"hidden_act\": \"gelu\",\n"," \"hidden_dropout_prob\": 0.1,\n"," \"hidden_size\": 768,\n"," \"initializer_range\": 0.02,\n"," \"intermediate_size\": 3072,\n"," \"layer_norm_eps\": 1e-12,\n"," \"max_position_embeddings\": 512,\n"," \"model_type\": \"bert\",\n"," \"num_attention_heads\": 12,\n"," \"num_hidden_layers\": 12,\n"," \"pad_token_id\": 0,\n"," \"position_embedding_type\": \"absolute\",\n"," \"transformers_version\": \"4.44.2\",\n"," \"type_vocab_size\": 2,\n"," \"use_cache\": true,\n"," \"vocab_size\": 30522\n","}\n","\n"]}]},{"cell_type":"code","source":["embeddings.shape"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"FgrnBK5PHHi5","executionInfo":{"status":"ok","timestamp":1726100324331,"user_tz":420,"elapsed":267,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"e8c0da0c-9959-4d55-815e-17c2ccefdfd7"},"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":["(3, 768)"]},"metadata":{},"execution_count":16}]},{"cell_type":"markdown","source":["##Define Functions to get the SpaBERT embedding\n","The following code uses our pre-trained SpaBert model to take spatial data and return the embedding for this"],"metadata":{"id":"dhs3U0gqH_CI"}},{"cell_type":"code","source":["from transformers.models.bert.modeling_bert import BertForMaskedLM\n","from transformers import BertTokenizer\n","from models.spatial_bert_model import SpatialBertConfig\n","from utils.common_utils import load_spatial_bert_pretrained_weights\n","from models.spatial_bert_model import SpatialBertForMaskedLM\n","from models.spatial_bert_model import SpatialBertModel\n","\n","\n","# load dataset we just created\n","data_file_path = '/content/drive/MyDrive/Master_Project_2024_JP/Spacy Notebook/SPABERT_Coordinate_data_combined.json' #This data is the spatial entities along with their neighbor information.\n","pretrained_model = '/content/drive/MyDrive/Master_Project_2024_JP/Spacy Notebook/fine-spabert-base-uncased-finetuned-osm-mn.pth'\n","#pretrained_model = '/content/drive/MyDrive/spaBERT/spabert/notebooks/tutorial_datasets/mlm_mem_keeppos_ep0_iter06000_0.2936.pth'\n","#pretrained_model = '/content/drive/MyDrive/spaBERT/spabert/notebooks/tutorial_datasets/spabert-base-uncased-finetuned-osm-mn.pth'\n","\n","# load bert model and tokenizer\n","bert_model = BertForMaskedLM.from_pretrained('bert-base-uncased')\n","tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')\n","\n","\n","# load pre-trained spabert model and its config\n","config = SpatialBertConfig()\n","config.output_hidden_states = True\n","\n","model = SpatialBertForMaskedLM(config) #Should I be using masked or unmasked for the downstream tasks we are trying to perform?\n","#model = SpatialBertModel(config) #We fine-tuned the Masked version of the model so the weights won't load correctly\n","\n","model.load_state_dict(bert_model.state_dict() , strict = False)\n","\n","pre_trained_model = torch.load(pretrained_model)\n","\n","# load pretrained weights\n","model_keys = model.state_dict()\n","cnt_layers = 0\n","for key in model_keys:\n"," if key in pre_trained_model:\n"," model_keys[key] = pre_trained_model[key]\n"," cnt_layers += 1\n"," else:\n"," print(\"No weight for\", key)\n","print(cnt_layers, 'layers loaded')\n","\n","model.load_state_dict(model_keys)\n","\n","#Select a CPU or GPU\n","device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')\n","model.to(device)\n","\n","#Set the model to evaluation mode\n","model.eval()"],"metadata":{"id":"U1DhtHhoiIyH"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["!ls /content/drive/MyDrive/spaBERT/spabert/datasets/"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"W6covIWifg2B","executionInfo":{"status":"ok","timestamp":1725052332255,"user_tz":420,"elapsed":680,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"fc07e722-8638-491a-f3e6-e0c8906d5136"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["const.py\t dataset_loader_ver2.py osm_sample_loader.py usgs_os_sample_loader.py\n","dataset_loader.py __init__.py\t\t __pycache__\t\t wikidata_sample_loader.py\n"]}]},{"cell_type":"code","source":["from datasets.osm_sample_loader import PbfMapDataset\n","#from datasets.dataset_loader import SpatialDataset\n","from datasets.dataset_loader_ver2 import SpatialDataset\n","from torch.utils.data import DataLoader\n","\n","# Load data using SpatialDataset\n","spatialDataset = PbfMapDataset(data_file_path = data_file_path,\n"," tokenizer = tokenizer,\n"," max_token_len = 300,\n"," distance_norm_factor = 0.0001,\n"," spatial_dist_fill = 20,\n"," with_type = False,\n"," sep_between_neighbors = False, #Initially false, play around with this potentially?\n"," label_encoder = None, #Initially None, potentially change this because we do have real/fake reviews.\n"," mode = None) #If set to None it will use the full dataset for mlm\n","\n","data_loader = DataLoader(spatialDataset, batch_size=1, num_workers=0, shuffle=False, pin_memory=False, drop_last=True) #issue needs to be fixed with num_workers not stopping after finished"],"metadata":{"id":"00-1ZGpGjnMK"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["for batch in data_loader:\n"," print(batch)\n"," break"],"metadata":{"id":"7oywn007Olmb"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["from tqdm import tqdm\n","\n","# Function to process each entity and get embeddings\n","def process_entity(batch, model, device):\n"," input_ids = batch['masked_input'].to(device)\n"," attention_mask = batch['attention_mask'].to(device)\n"," position_list_x = batch['norm_lng_list'].to(device)\n"," position_list_y = batch['norm_lat_list'].to(device)\n"," sent_position_ids = batch['sent_position_ids'].to(device)\n","\n"," with torch.no_grad():\n"," outputs = model(input_ids=input_ids,\n"," attention_mask=attention_mask,\n"," sent_position_ids=sent_position_ids,\n"," position_list_x=position_list_x,\n"," position_list_y=position_list_y)\n"," #NOTE: we are ommitting the pseudo_sentence here. Verify that this is correct\n","\n"," # Extract embeddings\n"," #embeddings = outputs[0] # Extracting the last hidden state from outputs\n"," embeddings = outputs.hidden_states[-1]\n","\n"," pivot_token_len = batch['pivot_token_len'].item()\n"," pivot_embeddings = embeddings[:, :pivot_token_len, :]\n","\n"," return pivot_embeddings.cpu().numpy(), input_ids.cpu().numpy()\n","\n","all_embeddings = []\n","# Process the first 5 rows and print embeddings\n","# NOTE: fix this to make actual batches instead of just one at a time.\n","#for i, batch in enumerate(data_loader):\n","# if i >= 5:\n","# break\n","# embeddings, input_ids = process_entity(batch, model, device)\n","# sequence_length = input_ids.shape[1]\n","#\n","# print(f\"Embeddings for entity {i+1}: {embeddings}\")\n","# print(f\"Shape for entity {i+1}: {embeddings.shape}\")\n","# print(f\"Sequence Length for entity {i+1}: {sequence_length}\")\n","# print(f\"Input IDs for entity {i+1}: {input_ids}\")\n","# print(f\"Decoded Tokens for entity {i+1}: {tokenizer.decode(input_ids[0])}\")\n","# all_embeddings.append(embeddings)\n","\n","\n","#process the entire dataset and store the embeddings (uncomment when ready)\n","all_embeddings = []\n","for batch in tqdm(data_loader):\n"," embeddings = process_entity(batch, model, device)\n"," all_embeddings.append(embeddings)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"SMySLNvikXco","executionInfo":{"status":"ok","timestamp":1725046815280,"user_tz":420,"elapsed":40044,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"0a69346e-afdf-4746-f835-0217841608c8"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stderr","text":[" 0%| | 0/1513 [00:00:2: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature.\n"," test_embeddings = torch.load('/content/test_embeddings3.pt')\n",":3: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature.\n"," unlabled_embeddings = torch.load('/content/unlabled_embeddings3.pt')\n",":4: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature.\n"," labeled_embeddings = torch.load('/content/labled_embeddings3.pt')\n"]}]},{"cell_type":"markdown","source":["##Import Data and create label list"],"metadata":{"id":"qGHkpg80aafB"}},{"cell_type":"code","source":["label_list = [\"1\", \"0\"]\n","label_list.append('UNL')\n","label_list"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"ShSAOUeRIy34","executionInfo":{"status":"ok","timestamp":1726100393007,"user_tz":420,"elapsed":558,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"b50240a2-81c0-438d-9037-8a80bccb932d"},"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":["['1', '0', 'UNL']"]},"metadata":{},"execution_count":21}]},{"cell_type":"code","source":["with open('/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/models/GANBERT/data/yelp_test_ns_400.pkl', 'rb') as f:\n"," test_examples = pickle.load(f)\n","\n","with open('/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/models/GANBERT/data/yelp_unlabeled_90_ns_400.pkl', 'rb') as f:\n"," unlabeled_examples = pickle.load(f)\n","\n","with open('/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/models/GANBERT/data/yelp_labeled_90_ns_400.pkl', 'rb') as f:\n"," labeled_examples = pickle.load(f)"],"metadata":{"id":"iNOx8XmeJAJo"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["with open('/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/models/GANBERT/data/yelp_test_s_50.pkl', 'rb') as f:\n"," test_examples = pickle.load(f)\n","\n","with open('/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/models/GANBERT/data/yelp_unlabeled_90_s_50.pkl', 'rb') as f:\n"," unlabeled_examples = pickle.load(f)\n","\n","with open('/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/models/GANBERT/data/yelp_labeled_90_s_50.pkl', 'rb') as f:\n"," labeled_examples = pickle.load(f)"],"metadata":{"id":"QHJzgxPV13Ny"},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":["##Define our dataloaders"],"metadata":{"id":"D0JPAvjxakxu"}},{"cell_type":"code","source":["#def average_embeddings(review_embeddings):\n","# stacked_embeddings = torch.stack(review_embeddings)\n","# #return torch.mean(stacked_embeddings.squeeze(), dim=0) # Squeeze before averaging\n","# return torch.mean(stacked_embeddings, dim=0) # Squeeze before averaging\n","\n","def format_time(elapsed):\n"," '''\n"," Takes a time in seconds and returns a string hh:mm:ss\n"," '''\n"," # Round to the nearest second.\n"," elapsed_rounded = int(round((elapsed)))\n"," # Format as hh:mm:ss\n"," return str(datetime.timedelta(seconds=elapsed_rounded))"],"metadata":{"id":"QVCYTeDze2un","executionInfo":{"status":"ok","timestamp":1726183224688,"user_tz":420,"elapsed":365,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}}},"execution_count":15,"outputs":[]},{"cell_type":"code","source":["\n","#def generate_train_data_loader(embeddings, labels, label_map, label_masks, do_shuffle=True, balance_label_examples=False):\n","# '''\n","# Generate a DataLoader for the training set using precomputed embeddings.\n","# This is similar to the test data loader but with shuffling and optional balancing.\n","# '''\n","# examples = []\n","#\n","# # Count the percentage of labeled examples\n","# num_labeled_examples = sum(label_masks)\n","# label_mask_rate = num_labeled_examples / len(embeddings)\n","#\n","# # Process examples based on the mask and balance settings\n","# for index, embedding in enumerate(embeddings):\n","# label = labels[index]\n","# label_mask = label_masks[index]\n","#\n","# if label_mask_rate == 1 or not balance_label_examples:\n","# examples.append((embedding, label, label_mask))\n","# else:\n","# if label_mask:\n","# balance = int(1 / label_mask_rate)\n","# balance = max(1, int(math.log(balance, 2)))\n","# for _ in range(0, balance):\n","# examples.append((embedding, label, label_mask))\n","# else:\n","# examples.append((embedding, label, label_mask))\n","#\n","# # Prepare the tensors\n","# input_ids = []\n","# input_mask_array = []\n","# label_id_array = []\n","# label_mask_array = []\n","#\n","# for (embedding, label, label_mask) in examples:\n","# # Ensure embeddings have shape [seq_length, hidden_size]\n","# if len(embedding.shape) == 1: # If it's [hidden_size], add sequence length dimension\n","# embedding = embedding.unsqueeze(0) # Shape becomes [1, hidden_size]\n","# input_ids.append(embedding)\n","#\n","# # Create attention mask: 1 for non-zero embeddings, 0 for zero embeddings\n","# attention_mask = torch.where(embedding.abs().sum(dim=-1) > 0, 1, 0)\n","# input_mask_array.append(attention_mask.unsqueeze(0)) # Add batch dimension\n","#\n","# label_id_array.append(label_map[label])\n","# label_mask_array.append(label_mask)\n","#\n","# # Convert lists to tensors\n","# input_ids = torch.stack(input_ids) # Embeddings\n","# input_mask_array = torch.stack(input_mask_array) # Attention masks, [batch_size, seq_length]\n","# label_id_array = torch.tensor(label_id_array, dtype=torch.long) # Labels\n","# label_mask_array = torch.tensor(label_mask_array, dtype=torch.bool) # Label masks\n","#\n","# # Building the TensorDataset, including label masks similar to the original function\n","# dataset = TensorDataset(input_ids, input_mask_array, label_id_array, label_mask_array)\n","#\n","# # Set up the sampler\n","# sampler = RandomSampler(dataset) if do_shuffle else SequentialSampler(dataset)\n","#\n","# # Create DataLoader\n","# return DataLoader(\n","# dataset, # The training samples\n","# sampler=sampler,\n","# batch_size=batch_size\n","# )"],"metadata":{"id":"XnWcH6e3Ptn5"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["import torch\n","from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, TensorDataset\n","\n","def generate_train_data_loader(embeddings, labels, label_map, label_masks, do_shuffle=True, balance_label_examples=False):\n"," '''\n"," Generate a DataLoader for the training set using precomputed embeddings.\n"," This is similar to the test data loader but with shuffling and optional balancing.\n"," '''\n"," examples = []\n","\n"," # Count the percentage of labeled examples\n"," num_labeled_examples = sum(label_masks)\n"," label_mask_rate = num_labeled_examples / len(embeddings)\n","\n"," # if required it applies the balance\n"," for index, embedding in enumerate(embeddings):\n"," label = labels[index]\n"," label_mask = label_masks[index]\n","\n"," if label_mask_rate == 1 or not balance_label_examples:\n"," examples.append((embedding, label, label_mask))\n"," else:\n"," if label_mask:\n"," balance = int(1/label_mask_rate)\n"," balance = int(math.log(balance,2))\n"," if balance < 1:\n"," balance = 1\n"," for b in range(0, int(balance)):\n"," examples.append((embedding, label, label_mask))\n"," else:\n"," examples.append((embedding, label, label_mask))\n","\n"," # Prepare the tensors\n"," input_ids = []\n"," label_id_array = []\n"," label_mask_array = []\n","\n"," for (embedding, label, label_mask) in examples:\n"," input_ids.append(embedding) # Embeddings are already in shape [hidden_size], no need for further adjustment\n","\n"," label_id_array.append(label_map[label])\n"," label_mask_array.append(label_mask)\n","\n"," # Convert lists to tensors\n"," input_ids = torch.stack(input_ids) # Shape: [batch_size, hidden_size]\n"," label_id_array = torch.tensor(label_id_array, dtype=torch.long) # Labels\n"," label_mask_array = torch.tensor(label_mask_array, dtype=torch.bool) # Label masks\n","\n"," # Build the TensorDataset\n"," dataset = TensorDataset(input_ids, label_id_array, label_mask_array)\n","\n"," # Set up the sampler\n"," sampler = RandomSampler(dataset) if do_shuffle else SequentialSampler(dataset)\n","\n"," # Create DataLoader\n"," return DataLoader(\n"," dataset, # The training samples\n"," sampler=sampler,\n"," batch_size=batch_size\n"," )"],"metadata":{"id":"y2flz0okp7Vf","executionInfo":{"status":"ok","timestamp":1726185216487,"user_tz":420,"elapsed":352,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}}},"execution_count":50,"outputs":[]},{"cell_type":"code","source":["#import torch\n","#\n","#def generate_test_data_loader(embeddings, labels, label_map, label_masks, do_shuffle=False, balance_label_examples=False):\n","# '''\n","# Generate a DataLoader for the test set using precomputed embeddings, similar to the original data loader.\n","# '''\n","# examples = []\n","#\n","# # Count the percentage of labeled examples\n","# num_labeled_examples = sum(label_masks)\n","# label_mask_rate = num_labeled_examples / len(embeddings)\n","#\n","# # Process examples based on the mask and balance settings\n","# for index, embedding in enumerate(embeddings):\n","# label = labels[index]\n","# #print(\"Label: \", label)\n","# label_mask = label_masks[index]\n","#\n","# if label_mask_rate == 1 or not balance_label_examples:\n","# examples.append((embedding, label, label_mask))\n","# else:\n","# if label_mask:\n","# balance = int(1 / label_mask_rate)\n","# balance = max(1, int(math.log(balance, 2)))\n","# for b in range(0, balance):\n","# examples.append((embedding, label, label_mask))\n","# else:\n","# examples.append((embedding, label, label_mask))\n","#\n","# # Prepare the tensors\n","# input_ids = []\n","# input_mask_array = []\n","# label_id_array = []\n","# label_mask_array = []\n","#\n","# for (embedding, label, label_mask) in examples:\n","# # No need for tokenization as embeddings are precomputed\n","# if len(embedding.shape) == 1:\n","# # If the embedding is [hidden_size], add the sequence length dimension\n","# embedding = embedding.unsqueeze(0) # Now it's [1, hidden_size]\n","# elif len(embedding.shape) != 2:\n","# raise ValueError(f\"Embedding has invalid shape: {embedding.shape}\")\n","# # Now embedding has shape [seq_length, hidden_size]\n","# input_ids.append(embedding)\n","#\n","# # Generate attention mask\n","# attention_mask = (embedding.abs().sum(dim=-1) > 0).long() # Shape: [seq_length]\n","# input_mask_array.append(attention_mask)\n","#\n","# label_id_array.append(label_map[label])\n","# label_mask_array.append(label_mask)\n","#\n","# # Convert lists to tensors\n","# input_ids = torch.stack(input_ids) # Embeddings\n","# input_mask_array = torch.stack(input_mask_array) # Shape: [batch_size, seq_length]\n","# label_id_array = torch.tensor(label_id_array, dtype=torch.long) # Labels\n","# label_mask_array = torch.tensor(label_mask_array, dtype=torch.bool) # Label masks\n","#\n","# # Building the TensorDataset, including label masks similar to the original function\n","# dataset = TensorDataset(input_ids, input_mask_array, label_id_array, label_mask_array)\n","#\n","# # Set up the sampler\n","# sampler = RandomSampler(dataset) if do_shuffle else SequentialSampler(dataset)\n","#\n","# # Create DataLoader\n","# return DataLoader(\n","# dataset, # The test samples\n","# sampler=sampler,\n","# batch_size=batch_size\n","# )"],"metadata":{"id":"D6jkUNhlnYte"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["import torch\n","from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, TensorDataset\n","\n","def generate_test_data_loader(embeddings, labels, label_map, label_masks, do_shuffle=False, balance_label_examples=False):\n"," '''\n"," Generate a DataLoader for the test set using precomputed embeddings.\n"," '''\n"," examples = []\n","\n"," # Count the percentage of labeled examples\n"," num_labeled_examples = sum(label_masks)\n"," label_mask_rate = num_labeled_examples / len(embeddings)\n","\n"," # Process examples based on the mask and balance settings\n"," for index, embedding in enumerate(embeddings):\n"," label = labels[index]\n"," label_mask = label_masks[index]\n","\n"," if label_mask_rate == 1 or not balance_label_examples:\n"," examples.append((embedding, label, label_mask))\n"," else:\n"," if label_mask:\n"," balance = int(1/label_mask_rate)\n"," balance = int(math.log(balance,2))\n"," if balance < 1:\n"," balance = 1\n"," for b in range(0, int(balance)):\n"," examples.append((embedding, label, label_mask))\n"," else:\n"," examples.append((embedding, label, label_mask))\n","\n"," # Prepare the tensors\n"," input_ids = []\n"," label_id_array = []\n"," label_mask_array = []\n","\n"," for (embedding, label, label_mask) in examples:\n"," input_ids.append(embedding) # No need for sequence length handling, just use the [hidden_size] embedding\n","\n"," label_id_array.append(label_map[label])\n"," label_mask_array.append(label_mask)\n","\n"," # Convert lists to tensors\n"," input_ids = torch.stack(input_ids) # Shape: [batch_size, hidden_size]\n"," label_id_array = torch.tensor(label_id_array, dtype=torch.long) # Shape: [batch_size]\n"," label_mask_array = torch.tensor(label_mask_array, dtype=torch.bool) # Shape: [batch_size]\n","\n"," # Build the TensorDataset (no attention masks)\n"," dataset = TensorDataset(input_ids, label_id_array, label_mask_array)\n","\n"," # Set up the sampler\n"," sampler = RandomSampler(dataset) if do_shuffle else SequentialSampler(dataset)\n","\n"," # Create DataLoader\n"," return DataLoader(\n"," dataset,\n"," sampler=sampler,\n"," batch_size=batch_size\n"," )"],"metadata":{"id":"1ybwBdJbqSag","executionInfo":{"status":"ok","timestamp":1726185222260,"user_tz":420,"elapsed":373,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}}},"execution_count":51,"outputs":[]},{"cell_type":"code","source":["label_map = {}\n","for (i, label) in enumerate(label_list):\n"," label_map[label] = i\n","\n","#------------------------------\n","# Combine Embeddings and Load the Train Dataset\n","#------------------------------\n","\n","# Combine labeled and unlabeled embeddings\n","train_embeddings = my_train_embeddings\n","\n","# The labeled (train) dataset is assigned with a mask set to True\n","train_label_masks = np.ones(len(labeled_embeddings), dtype=bool)\n","\n","# If unlabeled examples are available\n","if unlabeled_examples:\n"," # The unlabeled (train) dataset is assigned with a mask set to False\n"," tmp_masks = np.zeros(len(unlabled_embeddings), dtype=bool)\n"," train_label_masks = np.concatenate([train_label_masks, tmp_masks])\n","\n","# Create labels for the combined dataset\n","train_labels = [example[1] for example in labeled_examples] + [example[1] for example in unlabeled_examples]\n","\n","# Average embeddings for each review\n","#train_averaged_embeddings = [\n","# average_embeddings(review_embeddings)\n","# for review_embeddings in train_embeddings\n","#]\n","\n","train_averaged_embeddings = train_embeddings\n","\n","# Generate the train data loader using the modified generate_data_loader function\n","train_dataloader = generate_train_data_loader(train_averaged_embeddings, train_labels, label_map, train_label_masks, do_shuffle=True, balance_label_examples=apply_balance)\n","\n","\n","#------------------------------\n","# Load the test dataset\n","#------------------------------\n","# Average embeddings for each review\n","#averaged_embeddings = [\n","# average_embeddings(review_embeddings)\n","# for review_embeddings in final_ordered_embeddings\n","#]\n","\n","averaged_embeddings = my_test_embeddings\n","\n","#The labeled (test) dataset is assigned with a mask set to True\n","test_label_masks = np.ones(len(test_examples), dtype=bool)\n","test_labels = [example[1] for example in test_examples]\n","test_dataloader = generate_test_data_loader(averaged_embeddings, test_labels, label_map, test_label_masks, do_shuffle=False, balance_label_examples=False)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":332},"id":"nvHk1zyKqYl4","executionInfo":{"status":"error","timestamp":1726185402200,"user_tz":420,"elapsed":405,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"f2d5013d-78c5-460e-ec41-e0642890246e"},"execution_count":56,"outputs":[{"output_type":"error","ename":"KeyError","evalue":"True","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)","\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 14\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 15\u001b[0m \u001b[0;31m# Generate the train data loader using the modified generate_data_loader function\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 16\u001b[0;31m \u001b[0mtrain_dataloader\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgenerate_train_data_loader\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrain_averaged_embeddings\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrain_label_masks\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlabel_map\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrain_label_masks\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdo_shuffle\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbalance_label_examples\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mapply_balance\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 17\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 18\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m\u001b[0m in \u001b[0;36mgenerate_train_data_loader\u001b[0;34m(embeddings, labels, label_map, label_masks, do_shuffle, balance_label_examples)\u001b[0m\n\u001b[1;32m 39\u001b[0m \u001b[0minput_ids\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0membedding\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# Embeddings are already in shape [hidden_size], no need for further adjustment\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 40\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 41\u001b[0;31m \u001b[0mlabel_id_array\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlabel_map\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mlabel\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 42\u001b[0m \u001b[0mlabel_mask_array\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlabel_mask\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 43\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;31mKeyError\u001b[0m: True"]}]},{"cell_type":"code","source":["label_map = {}\n","for (i, label) in enumerate(label_list):\n"," label_map[label] = i\n","\n","#------------------------------\n","# Combine Embeddings and Load the Train Dataset\n","#------------------------------\n","\n","# Combine labeled and unlabeled embeddings\n","train_embeddings = my_train_embeddings\n","\n","# The labeled (train) dataset is assigned with a mask set to True\n","train_label_masks = np.ones(len(labeled_embeddings), dtype=bool)\n","\n","# If unlabeled examples are available\n","if unlabeled_examples:\n"," # The unlabeled (train) dataset is assigned with a mask set to False\n"," tmp_masks = np.zeros(len(unlabled_embeddings), dtype=bool)\n"," train_label_masks = np.concatenate([train_label_masks, tmp_masks])\n","\n","# Create labels for the combined dataset\n","train_labels = [example[1] for example in labeled_examples] + [example[1] for example in unlabeled_examples]\n","\n","# Average embeddings for each review\n","#train_averaged_embeddings = [\n","# average_embeddings(review_embeddings)\n","# for review_embeddings in train_embeddings\n","#]\n","\n","train_averaged_embeddings = train_embeddings\n","\n","# Generate the train data loader using the modified generate_data_loader function\n","train_dataloader = generate_train_data_loader(train_averaged_embeddings, train_labels, label_map, train_label_masks, do_shuffle=True, balance_label_examples=apply_balance)\n","\n","\n","#------------------------------\n","# Load the test dataset\n","#------------------------------\n","# Average embeddings for each review\n","#averaged_embeddings = [\n","# average_embeddings(review_embeddings)\n","# for review_embeddings in final_ordered_embeddings\n","#]\n","\n","averaged_embeddings = my_test_embeddings\n","\n","#The labeled (test) dataset is assigned with a mask set to True\n","test_label_masks = np.ones(len(test_examples), dtype=bool)\n","test_labels = [example[1] for example in test_examples]\n","test_dataloader = generate_test_data_loader(averaged_embeddings, test_labels, label_map, test_label_masks, do_shuffle=False, balance_label_examples=False)"],"metadata":{"id":"cxNUJ63dx-p3"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["label_map = {}\n","for (i, label) in enumerate(label_list):\n"," label_map[label] = i\n","\n","#------------------------------\n","# Combine Embeddings and Load the Train Dataset\n","#------------------------------\n","\n","# Combine labeled and unlabeled embeddings\n","train_embeddings = labeled_embeddings + unlabled_embeddings\n","\n","# The labeled (train) dataset is assigned with a mask set to True\n","train_label_masks = np.ones(len(labeled_embeddings), dtype=bool)\n","\n","# If unlabeled examples are available\n","if unlabeled_examples:\n"," # The unlabeled (train) dataset is assigned with a mask set to False\n"," tmp_masks = np.zeros(len(unlabled_embeddings), dtype=bool)\n"," train_label_masks = np.concatenate([train_label_masks, tmp_masks])\n","\n","# Create labels for the combined dataset\n","train_labels = [example[1] for example in labeled_examples] + [example[1] for example in unlabeled_examples]\n","\n","# Average embeddings for each review\n","#train_averaged_embeddings = [\n","# average_embeddings(review_embeddings)\n","# for review_embeddings in train_embeddings\n","#]\n","\n","train_averaged_embeddings = train_embeddings\n","\n","# Generate the train data loader using the modified generate_data_loader function\n","train_dataloader = generate_train_data_loader(train_averaged_embeddings, train_labels, label_map, train_label_masks, do_shuffle=True, balance_label_examples=apply_balance)\n","\n","\n","#------------------------------\n","# Load the test dataset\n","#------------------------------\n","# Average embeddings for each review\n","#averaged_embeddings = [\n","# average_embeddings(review_embeddings)\n","# for review_embeddings in final_ordered_embeddings\n","#]\n","\n","averaged_embeddings = test_embeddings\n","\n","#The labeled (test) dataset is assigned with a mask set to True\n","test_label_masks = np.ones(len(test_examples), dtype=bool)\n","test_labels = [example[1] for example in test_examples]\n","test_dataloader = generate_test_data_loader(averaged_embeddings, test_labels, label_map, test_label_masks, do_shuffle=False, balance_label_examples=False)\n","\n"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":211},"id":"caEk4noCP0Zu","executionInfo":{"status":"error","timestamp":1726185300389,"user_tz":420,"elapsed":362,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"ee2b3aab-a762-4612-cd33-97b2670cdbe3"},"execution_count":53,"outputs":[{"output_type":"error","ename":"NameError","evalue":"name 'labeled_embeddings' is not defined","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)","\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 8\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 9\u001b[0m \u001b[0;31m# Combine labeled and unlabeled embeddings\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 10\u001b[0;31m \u001b[0mtrain_embeddings\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlabeled_embeddings\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0munlabled_embeddings\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 11\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 12\u001b[0m \u001b[0;31m# The labeled (train) dataset is assigned with a mask set to True\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;31mNameError\u001b[0m: name 'labeled_embeddings' is not defined"]}]},{"cell_type":"code","source":["averaged_embeddings[0].shape"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"XBYRHqT0nYeR","executionInfo":{"status":"ok","timestamp":1725598383890,"user_tz":420,"elapsed":291,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"f3014f47-80a3-488e-da60-b0dfe0c0c3f1"},"execution_count":null,"outputs":[{"output_type":"execute_result","data":{"text/plain":["torch.Size([768])"]},"metadata":{},"execution_count":101}]},{"cell_type":"code","source":["test_labels"],"metadata":{"id":"xl_vbZRbLB_P"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["train_labels"],"metadata":{"id":"YQfCdOkhf3un"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["test_label_masks"],"metadata":{"id":"OBgnofLSEY9x"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["train_label_masks\n"],"metadata":{"id":"zORrKDcqDnW8"},"execution_count":null,"outputs":[]},{"cell_type":"markdown","source":["##Setup the Generator and Descriminator"],"metadata":{"id":"PRCttwGna7jK"}},{"cell_type":"code","source":["#------------------------------\n","# The Generator as in\n","# https://www.aclweb.org/anthology/2020.acl-main.191/\n","# https://github.com/crux82/ganbert\n","#------------------------------\n","#class Generator(nn.Module):\n","# def __init__(self, noise_size=100, output_size=512, hidden_sizes=[512], dropout_rate=0.1):\n","# super(Generator, self).__init__()\n","# layers = []\n","# hidden_sizes = [noise_size] + hidden_sizes\n","# for i in range(len(hidden_sizes)-1):\n","# layers.extend([nn.Linear(hidden_sizes[i], hidden_sizes[i+1]), nn.LeakyReLU(0.2, inplace=True), nn.Dropout(dropout_rate)])\n","#\n","# layers.append(nn.Linear(hidden_sizes[-1],output_size))\n","# self.layers = nn.Sequential(*layers)\n","#\n","# def forward(self, noise):\n","# output_rep = self.layers(noise)\n","# return output_rep\n","\n","class Generator(nn.Module):\n"," def __init__(self, noise_size=100, output_size=512, hidden_sizes=[512], dropout_rate=0.1):\n"," super(Generator, self).__init__()\n"," layers = []\n"," hidden_sizes = [noise_size] + hidden_sizes\n"," for i in range(len(hidden_sizes)-1):\n"," layers.extend([nn.Linear(hidden_sizes[i], hidden_sizes[i+1]), nn.LeakyReLU(0.2, inplace=True), nn.Dropout(dropout_rate)])\n","\n"," layers.append(nn.Linear(hidden_sizes[-1], output_size)) # Adjust output_size to 768\n"," self.layers = nn.Sequential(*layers)\n","\n"," def forward(self, noise):\n"," output_rep = self.layers(noise)\n"," return output_rep\n","\n","#------------------------------\n","# The Discriminator\n","# https://www.aclweb.org/anthology/2020.acl-main.191/\n","# https://github.com/crux82/ganbert\n","#------------------------------\n","#class Discriminator(nn.Module):\n","# def __init__(self, input_size=512, hidden_sizes=[512], num_labels=2, dropout_rate=0.1):\n","# super(Discriminator, self).__init__()\n","# self.input_dropout = nn.Dropout(p=dropout_rate)\n","# layers = []\n","# hidden_sizes = [input_size] + hidden_sizes\n","# for i in range(len(hidden_sizes)-1):\n","# layers.extend([nn.Linear(hidden_sizes[i], hidden_sizes[i+1]), nn.LeakyReLU(0.2, inplace=True), nn.Dropout(dropout_rate)])\n","#\n","# self.layers = nn.Sequential(*layers) #per il flatten\n","# self.logit = nn.Linear(hidden_sizes[-1],num_labels+1) # +1 for the probability of this sample being fake/real.\n","# self.softmax = nn.Softmax(dim=-1)\n","#\n","# def forward(self, input_rep):\n","# input_rep = self.input_dropout(input_rep)\n","# last_rep = self.layers(input_rep)\n","# logits = self.logit(last_rep)\n","# probs = self.softmax(logits)\n","# return last_rep, logits, probs\n","\n","class Discriminator(nn.Module):\n"," def __init__(self, input_size=512, hidden_sizes=[512], num_labels=2, dropout_rate=0.1):\n"," super(Discriminator, self).__init__()\n"," self.input_dropout = nn.Dropout(p=dropout_rate)\n"," layers = []\n"," hidden_sizes = [input_size] + hidden_sizes\n"," for i in range(len(hidden_sizes)-1):\n"," layers.extend([nn.Linear(hidden_sizes[i], hidden_sizes[i+1]), nn.LeakyReLU(0.2, inplace=True), nn.Dropout(dropout_rate)])\n","\n"," self.layers = nn.Sequential(*layers)\n"," self.logit = nn.Linear(hidden_sizes[-1], num_labels+1) # +1 for the probability of this sample being fake/real.\n"," self.softmax = nn.Softmax(dim=-1)\n","\n"," def forward(self, input_rep):\n"," print(f\"Input shape: {input_rep.shape}\") # Add this line to debug\n"," input_rep = self.input_dropout(input_rep)\n"," last_rep = self.layers(input_rep)\n"," logits = self.logit(last_rep)\n"," probs = self.softmax(logits)\n"," return last_rep, logits, probs"],"metadata":{"id":"yb9XoDRpOeSq","executionInfo":{"status":"ok","timestamp":1726183979509,"user_tz":420,"elapsed":356,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}}},"execution_count":25,"outputs":[]},{"cell_type":"code","source":["# The config file is required to get the dimension of the vector produced by\n","# the underlying transformer\n","config = AutoConfig.from_pretrained(model_name)\n","hidden_size = int(config.hidden_size)\n","# Define the number and width of hidden layers\n","hidden_levels_g = [hidden_size for i in range(0, num_hidden_layers_g)]\n","hidden_levels_d = [hidden_size for i in range(0, num_hidden_layers_d)]\n","\n","#-------------------------------------------------\n","# Instantiate the Generator and Discriminator\n","#-------------------------------------------------\n","generator = Generator(noise_size=noise_size, output_size=768, hidden_sizes=hidden_levels_g, dropout_rate=out_dropout_rate)\n","discriminator = Discriminator(input_size=768, hidden_sizes=hidden_levels_d, num_labels=len(label_list), dropout_rate=out_dropout_rate)\n","\n","# Put everything in the GPU if available\n","if torch.cuda.is_available():\n"," generator.cuda()\n"," discriminator.cuda()\n"," transformer.cuda()\n"," if multi_gpu:\n"," transformer = torch.nn.DataParallel(transformer)\n","\n","# print(config)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"5qUTFiwJOg2m","executionInfo":{"status":"ok","timestamp":1726184135772,"user_tz":420,"elapsed":1056,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"d1d88c07-3814-45e0-fe6b-2896cf861698","collapsed":true},"execution_count":33,"outputs":[{"output_type":"stream","name":"stderr","text":["loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/config.json\n","Model config BertConfig {\n"," \"_name_or_path\": \"bert-base-uncased\",\n"," \"architectures\": [\n"," \"BertForMaskedLM\"\n"," ],\n"," \"attention_probs_dropout_prob\": 0.1,\n"," \"classifier_dropout\": null,\n"," \"gradient_checkpointing\": false,\n"," \"hidden_act\": \"gelu\",\n"," \"hidden_dropout_prob\": 0.1,\n"," \"hidden_size\": 768,\n"," \"initializer_range\": 0.02,\n"," \"intermediate_size\": 3072,\n"," \"layer_norm_eps\": 1e-12,\n"," \"max_position_embeddings\": 512,\n"," \"model_type\": \"bert\",\n"," \"num_attention_heads\": 12,\n"," \"num_hidden_layers\": 12,\n"," \"pad_token_id\": 0,\n"," \"position_embedding_type\": \"absolute\",\n"," \"transformers_version\": \"4.44.2\",\n"," \"type_vocab_size\": 2,\n"," \"use_cache\": true,\n"," \"vocab_size\": 30522\n","}\n","\n"]}]},{"cell_type":"markdown","source":["##New Attempt\n"],"metadata":{"id":"5d-Z0MU-7q4y"}},{"cell_type":"code","source":["for z in range(1, 3):\n"," label_map = {}\n"," for (i, label) in enumerate(label_list):\n"," label_map[label] = i\n","\n"," # The config file is required to get the dimension of the vector produced by the underlying transformer\n"," config = AutoConfig.from_pretrained(model_name)\n"," hidden_size = int(config.hidden_size)\n"," # Define the number and width of hidden layers\n"," hidden_levels_g = [hidden_size for _ in range(num_hidden_layers_g)]\n"," hidden_levels_d = [hidden_size for _ in range(num_hidden_layers_d)]\n","\n"," #-------------------------------------------------\n"," # Instantiate the Generator and Discriminator\n"," #-------------------------------------------------\n"," generator = Generator(noise_size=noise_size, output_size=hidden_size, hidden_sizes=hidden_levels_g, dropout_rate=out_dropout_rate).to(device)\n"," discriminator = Discriminator(input_size=hidden_size, hidden_sizes=hidden_levels_d, num_labels=len(label_list), dropout_rate=out_dropout_rate).to(device)\n"," transformer = transformer.to(device)\n","\n"," # If using multiple GPUs\n"," if multi_gpu and torch.cuda.device_count() > 1:\n"," transformer = torch.nn.DataParallel(transformer)\n","\n"," ###########################################################\n","\n"," training_stats = []\n"," total_t0 = time.time() # Total training time\n","\n"," # Model parameters\n"," transformer_vars = list(transformer.parameters())\n"," d_vars = transformer_vars + list(discriminator.parameters())\n"," g_vars = list(generator.parameters())\n","\n"," # Optimizer\n"," dis_optimizer = torch.optim.AdamW(d_vars, lr=learning_rate_discriminator)\n"," gen_optimizer = torch.optim.AdamW(g_vars, lr=learning_rate_generator)\n","\n"," # Scheduler\n"," if apply_scheduler:\n"," num_train_examples = len(train_embeddings)\n"," num_train_steps = int(num_train_examples / batch_size * num_train_epochs)\n"," num_warmup_steps = int(num_train_steps * warmup_proportion)\n","\n"," scheduler_d = get_constant_schedule_with_warmup(dis_optimizer, num_warmup_steps=num_warmup_steps)\n"," scheduler_g = get_constant_schedule_with_warmup(gen_optimizer, num_warmup_steps=num_warmup_steps)\n","\n"," # For each epoch...\n"," for epoch_i in range(0, num_train_epochs):\n"," # ========================================\n"," # Training\n"," # ========================================\n"," print(f'\\n======== Epoch {epoch_i + 1} / {num_train_epochs} ========')\n"," print('Training...')\n","\n"," t0 = time.time() # Track time for this epoch\n","\n"," tr_g_loss = 0\n"," tr_d_loss = 0\n","\n"," # Set the models to training mode\n"," transformer.train()\n"," generator.train()\n"," discriminator.train()\n","\n"," for step, batch in enumerate(train_dataloader):\n"," if step % print_each_n_step == 0 and not step == 0:\n"," elapsed = format_time(time.time() - t0)\n"," print(f' Batch {step} of {len(train_dataloader)}. Elapsed: {elapsed}.')\n","\n"," # Unpack the batch from the dataloader\n"," b_embeddings = batch[0].to(device).float()\n"," b_labels = batch[1].to(device).long()\n"," b_label_mask = batch[2].to(device).bool()\n","\n"," real_batch_size = b_embeddings.shape[0]\n","\n"," # Generate fake data\n"," noise = torch.zeros(real_batch_size, noise_size, device=device).uniform_(0, 1)\n"," gen_rep = generator(noise)\n","\n"," # Discriminator output\n"," discriminator_input = torch.cat([b_embeddings, gen_rep], dim=0)\n"," features, logits, probs = discriminator(discriminator_input)\n","\n"," features_list = torch.split(features, real_batch_size)\n"," D_real_features = features_list[0]\n"," D_fake_features = features_list[1]\n","\n"," logits_list = torch.split(logits, real_batch_size)\n"," D_real_logits = logits_list[0]\n"," D_fake_logits = logits_list[1]\n","\n"," probs_list = torch.split(probs, real_batch_size)\n"," D_real_probs = probs_list[0]\n"," D_fake_probs = probs_list[1]\n","\n"," # Loss evaluation\n"," g_loss_d = -1 * torch.mean(torch.log(1 - D_fake_probs[:, -1] + epsilon))\n"," g_feat_reg = torch.mean(torch.pow(torch.mean(D_real_features, dim=0) - torch.mean(D_fake_features, dim=0), 2))\n"," g_loss = g_loss_d + g_feat_reg\n","\n"," logits = D_real_logits[:, :-1]\n"," log_probs = F.log_softmax(logits, dim=-1)\n"," label2one_hot = torch.nn.functional.one_hot(b_labels, len(label_list))\n"," per_example_loss = -torch.sum(label2one_hot * log_probs, dim=-1)\n"," per_example_loss = torch.masked_select(per_example_loss, b_label_mask.to(device))\n"," labeled_example_count = per_example_loss.type(torch.float32).numel()\n","\n"," D_L_Supervised = torch.div(torch.sum(per_example_loss.to(device)), labeled_example_count) if labeled_example_count > 0 else 0\n"," D_L_unsupervised1U = -1 * torch.mean(torch.log(1 - D_real_probs[:, -1] + epsilon))\n"," D_L_unsupervised2U = -1 * torch.mean(torch.log(D_fake_probs[:, -1] + epsilon))\n"," d_loss = D_L_Supervised + D_L_unsupervised1U + D_L_unsupervised2U\n","\n"," # Optimization\n"," gen_optimizer.zero_grad()\n"," dis_optimizer.zero_grad()\n"," g_loss.backward(retain_graph=True)\n"," d_loss.backward()\n","\n"," gen_optimizer.step()\n"," dis_optimizer.step()\n","\n"," tr_g_loss += g_loss.item()\n"," tr_d_loss += d_loss.item()\n","\n"," if apply_scheduler:\n"," scheduler_d.step()\n"," scheduler_g.step()\n","\n"," torch.cuda.empty_cache() # Clear CUDA cache\n","\n"," avg_train_loss_g = tr_g_loss / len(train_dataloader)\n"," avg_train_loss_d = tr_d_loss / len(train_dataloader)\n"," training_time = format_time(time.time() - t0)\n","\n"," print(f\" Average training loss generator: {avg_train_loss_g:.3f}\")\n"," print(f\" Average training loss discriminator: {avg_train_loss_d:.3f}\")\n"," print(f\" Training epoch took: {training_time}\")\n","\n"," # ========================================\n"," # TEST ON THE EVALUATION DATASET\n"," # ========================================\n"," print(\"Running Test...\")\n","\n"," t0 = time.time() # Track test time\n","\n"," # Set the models to evaluation mode\n"," transformer.eval()\n"," discriminator.eval()\n"," generator.eval()\n","\n"," total_test_loss = 0\n"," all_preds = []\n"," all_labels_ids = []\n","\n"," nll_loss = torch.nn.CrossEntropyLoss(ignore_index=-1)\n","\n"," for batch in test_dataloader:\n"," b_embeddings = batch[0].to(device).float()\n"," b_labels = batch[1].to(device).long()\n","\n"," with torch.no_grad():\n"," _, logits, probs = discriminator(b_embeddings)\n"," filtered_logits = logits.squeeze(1)\n","\n"," print(f\"filtered_logits shape: {filtered_logits.shape}, b_labels shape: {b_labels.shape}\")\n","\n"," total_test_loss += nll_loss(filtered_logits, b_labels)\n","\n"," _, preds = torch.max(filtered_logits, 1)\n"," all_preds += preds.detach().cpu()\n"," all_labels_ids += b_labels.detach().cpu()\n","\n"," all_preds = torch.stack(all_preds).numpy()\n"," all_labels_ids = torch.stack(all_labels_ids).numpy()\n"," test_accuracy = np.sum(all_preds == all_labels_ids) / len(all_preds)\n"," print(f\" Accuracy: {test_accuracy:.3f}\")\n","\n"," avg_test_loss = total_test_loss / len(test_dataloader)\n"," avg_test_loss = avg_test_loss.item()\n"," test_time = format_time(time.time() - t0)\n","\n"," print(f\" Test Loss: {avg_test_loss:.3f}\")\n"," print(f\" Test took: {test_time}\")\n","\n"," # Record statistics\n"," training_stats.append({\n"," 'epoch': epoch_i + 1,\n"," 'Training Loss generator': avg_train_loss_g,\n"," 'Training Loss discriminator': avg_train_loss_d,\n"," 'Valid. Loss': avg_test_loss,\n"," 'Valid. Accur.': test_accuracy,\n"," 'Training Time': training_time,\n"," 'Test Time': test_time,\n"," 'Preds': all_preds,\n"," 'Labels': all_labels_ids\n"," })\n","\n"," last_pred = []\n"," last_label = []\n"," for stat in training_stats:\n"," last_pred = stat['Preds']\n"," last_label = stat['Labels']\n","\n"," print(\"\\nTraining complete!\")\n"," print(f\"Total training took {format_time(time.time() - total_t0)} (h:mm:ss)\")\n","\n"," with open(f'/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/models/GANBERT/runs/{z}/SpaBert_training_stats_lab_yelp_100L_ns_400R.pkl', 'wb') as f:\n"," pickle.dump(training_stats, f)\n"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"tjMK1EI2_j5G","executionInfo":{"status":"ok","timestamp":1726187321810,"user_tz":420,"elapsed":5440,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"fca5e037-865f-46e2-a9ae-12d0453a81cf"},"execution_count":74,"outputs":[{"output_type":"stream","name":"stderr","text":["loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/config.json\n","Model config BertConfig {\n"," \"_name_or_path\": \"bert-base-uncased\",\n"," \"architectures\": [\n"," \"BertForMaskedLM\"\n"," ],\n"," \"attention_probs_dropout_prob\": 0.1,\n"," \"classifier_dropout\": null,\n"," \"gradient_checkpointing\": false,\n"," \"hidden_act\": \"gelu\",\n"," \"hidden_dropout_prob\": 0.1,\n"," \"hidden_size\": 768,\n"," \"initializer_range\": 0.02,\n"," \"intermediate_size\": 3072,\n"," \"layer_norm_eps\": 1e-12,\n"," \"max_position_embeddings\": 512,\n"," \"model_type\": \"bert\",\n"," \"num_attention_heads\": 12,\n"," \"num_hidden_layers\": 12,\n"," \"pad_token_id\": 0,\n"," \"position_embedding_type\": \"absolute\",\n"," \"transformers_version\": \"4.44.2\",\n"," \"type_vocab_size\": 2,\n"," \"use_cache\": true,\n"," \"vocab_size\": 30522\n","}\n","\n"]},{"output_type":"stream","name":"stdout","text":["\n","======== Epoch 1 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.406\n"," Average training loss discriminator: 2.397\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.388\n"," Test Loss: 1.058\n"," Test took: 0:00:00\n","\n","======== Epoch 2 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.641\n"," Average training loss discriminator: 1.784\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.438\n"," Test Loss: 0.894\n"," Test took: 0:00:00\n","\n","======== Epoch 3 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.821\n"," Average training loss discriminator: 1.485\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.463\n"," Test Loss: 0.810\n"," Test took: 0:00:00\n","\n","======== Epoch 4 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.739\n"," Average training loss discriminator: 1.497\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.475\n"," Test Loss: 0.770\n"," Test took: 0:00:00\n","\n","======== Epoch 5 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.697\n"," Average training loss discriminator: 1.517\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.562\n"," Test Loss: 0.752\n"," Test took: 0:00:00\n","\n","======== Epoch 6 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.748\n"," Average training loss discriminator: 1.449\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.512\n"," Test Loss: 0.735\n"," Test took: 0:00:00\n","\n","======== Epoch 7 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.733\n"," Average training loss discriminator: 1.443\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.556\n"," Test Loss: 0.726\n"," Test took: 0:00:00\n","\n","======== Epoch 8 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.724\n"," Average training loss discriminator: 1.442\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.588\n"," Test Loss: 0.719\n"," Test took: 0:00:00\n","\n","======== Epoch 9 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.730\n"," Average training loss discriminator: 1.422\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.594\n"," Test Loss: 0.715\n"," Test took: 0:00:00\n","\n","======== Epoch 10 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.722\n"," Average training loss discriminator: 1.421\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.594\n"," Test Loss: 0.712\n"," Test took: 0:00:00\n","\n","Training complete!\n","Total training took 0:00:01 (h:mm:ss)\n"]},{"output_type":"stream","name":"stderr","text":["loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/config.json\n","Model config BertConfig {\n"," \"_name_or_path\": \"bert-base-uncased\",\n"," \"architectures\": [\n"," \"BertForMaskedLM\"\n"," ],\n"," \"attention_probs_dropout_prob\": 0.1,\n"," \"classifier_dropout\": null,\n"," \"gradient_checkpointing\": false,\n"," \"hidden_act\": \"gelu\",\n"," \"hidden_dropout_prob\": 0.1,\n"," \"hidden_size\": 768,\n"," \"initializer_range\": 0.02,\n"," \"intermediate_size\": 3072,\n"," \"layer_norm_eps\": 1e-12,\n"," \"max_position_embeddings\": 512,\n"," \"model_type\": \"bert\",\n"," \"num_attention_heads\": 12,\n"," \"num_hidden_layers\": 12,\n"," \"pad_token_id\": 0,\n"," \"position_embedding_type\": \"absolute\",\n"," \"transformers_version\": \"4.44.2\",\n"," \"type_vocab_size\": 2,\n"," \"use_cache\": true,\n"," \"vocab_size\": 30522\n","}\n","\n"]},{"output_type":"stream","name":"stdout","text":["\n","======== Epoch 1 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.394\n"," Average training loss discriminator: 2.534\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.519\n"," Test Loss: 1.145\n"," Test took: 0:00:00\n","\n","======== Epoch 2 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.593\n"," Average training loss discriminator: 1.904\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.544\n"," Test Loss: 0.941\n"," Test took: 0:00:00\n","\n","======== Epoch 3 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.805\n"," Average training loss discriminator: 1.530\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.469\n"," Test Loss: 0.836\n"," Test took: 0:00:00\n","\n","======== Epoch 4 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.759\n"," Average training loss discriminator: 1.503\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.556\n"," Test Loss: 0.788\n"," Test took: 0:00:00\n","\n","======== Epoch 5 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.692\n"," Average training loss discriminator: 1.528\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.550\n"," Test Loss: 0.760\n"," Test took: 0:00:00\n","\n","======== Epoch 6 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.736\n"," Average training loss discriminator: 1.464\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.519\n"," Test Loss: 0.738\n"," Test took: 0:00:00\n","\n","======== Epoch 7 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.735\n"," Average training loss discriminator: 1.444\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.606\n"," Test Loss: 0.730\n"," Test took: 0:00:00\n","\n","======== Epoch 8 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.721\n"," Average training loss discriminator: 1.445\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.613\n"," Test Loss: 0.721\n"," Test took: 0:00:00\n","\n","======== Epoch 9 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.725\n"," Average training loss discriminator: 1.442\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.650\n"," Test Loss: 0.719\n"," Test took: 0:00:00\n","\n","======== Epoch 10 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.724\n"," Average training loss discriminator: 1.424\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.606\n"," Test Loss: 0.712\n"," Test took: 0:00:00\n","\n","Training complete!\n","Total training took 0:00:01 (h:mm:ss)\n"]}]},{"cell_type":"markdown","source":["##Begin the Procedure for Training and Testing the Architecture"],"metadata":{"id":"2UF2zclPbBeJ"}},{"cell_type":"code","source":["for z in range(1, 3):\n"," # Create the label map\n"," label_map = {label: i for i, label in enumerate(label_list)}\n","\n"," train_dataloader = generate_train_data_loader(train_averaged_embeddings, train_labels, label_map, train_label_masks, do_shuffle=True, balance_label_examples=apply_balance)\n"," test_dataloader = generate_test_data_loader(averaged_embeddings, test_labels, label_map, test_label_masks, do_shuffle=False, balance_label_examples=False)\n","\n"," ###########################################################\n","\n"," # Model configuration\n"," config = AutoConfig.from_pretrained(model_name)\n"," hidden_size = int(config.hidden_size)\n"," hidden_levels_g = [hidden_size for _ in range(num_hidden_layers_g)]\n"," hidden_levels_d = [hidden_size for _ in range(num_hidden_layers_d)]\n","\n"," # Instantiate the Generator and Discriminator\n"," generator = Generator(noise_size=noise_size, output_size=hidden_size, hidden_sizes=hidden_levels_g, dropout_rate=out_dropout_rate)\n"," discriminator = Discriminator(input_size=hidden_size, hidden_sizes=hidden_levels_d, num_labels=len(label_list), dropout_rate=out_dropout_rate)\n","\n"," # Use GPU if available\n"," if torch.cuda.is_available():\n"," generator.cuda()\n"," discriminator.cuda()\n"," transformer.cuda()\n"," if multi_gpu:\n"," transformer = torch.nn.DataParallel(transformer)\n","\n"," ###########################################################\n","\n"," training_stats = []\n"," total_t0 = time.time()\n","\n"," # Model parameters\n"," transformer_vars = [i for i in transformer.parameters()]\n"," d_vars = transformer_vars + [v for v in discriminator.parameters()]\n"," g_vars = [v for v in generator.parameters()]\n","\n"," # Optimizer\n"," dis_optimizer = torch.optim.AdamW(d_vars, lr=learning_rate_discriminator)\n"," gen_optimizer = torch.optim.AdamW(g_vars, lr=learning_rate_generator)\n","\n"," # Scheduler\n"," if apply_scheduler:\n"," num_train_examples = len(train_examples)\n"," num_train_steps = int(num_train_examples / batch_size * num_train_epochs)\n"," num_warmup_steps = int(num_train_steps * warmup_proportion)\n","\n"," scheduler_d = get_constant_schedule_with_warmup(dis_optimizer, num_warmup_steps=num_warmup_steps)\n"," scheduler_g = get_constant_schedule_with_warmup(gen_optimizer, num_warmup_steps=num_warmup_steps)\n","\n"," # Training loop\n"," for epoch_i in range(0, num_train_epochs):\n"," print(f'\\n======== Epoch {epoch_i + 1} / {num_train_epochs} ========')\n"," print('Training...')\n","\n"," t0 = time.time()\n"," tr_g_loss = 0\n"," tr_d_loss = 0\n","\n"," transformer.train()\n"," generator.train()\n"," discriminator.train()\n","\n"," for step, batch in enumerate(train_dataloader):\n"," if step % print_each_n_step == 0 and not step == 0:\n"," elapsed = format_time(time.time() - t0)\n"," print(f' Batch {step} of {len(train_dataloader)}. Elapsed: {elapsed}.')\n","\n"," # Unpack the batch from the dataloader\n"," b_embeddings = batch[0].to(device).float()\n"," b_labels = batch[1].to(device).long()\n"," b_label_mask = batch[2].to(device).bool()\n","\n"," #print(f\"b_embeddings shape: {b_embeddings.shape}\")\n"," #print(f\"b_labels shape: {b_labels.shape}\")\n"," #print(f\"b_label_mask shape: {b_label_mask.shape}\")\n"," #print(f\"b_lables: { b_labels}\")\n","\n"," real_batch_size = b_embeddings.shape[0]\n","\n"," if len(b_embeddings.shape) == 2:\n"," b_embeddings = b_embeddings.unsqueeze(1) # This makes the shape [batch_size, 1, hidden_size]\n","\n"," # Encode real data in the Transformer\n"," model_outputs = transformer(inputs_embeds=b_embeddings)\n"," hidden_states = model_outputs[-1]\n","\n"," # Generate fake data\n"," noise = torch.zeros(real_batch_size, noise_size, device=device).uniform_(0, 1)\n"," gen_rep = generator(noise)\n","\n"," # Discriminator output\n"," disciminator_input = torch.cat([hidden_states, gen_rep], dim=0)\n"," features, logits, probs = discriminator(disciminator_input)\n","\n"," features_list = torch.split(features, real_batch_size)\n"," D_real_features = features_list[0]\n"," D_fake_features = features_list[1]\n","\n"," logits_list = torch.split(logits, real_batch_size)\n"," D_real_logits = logits_list[0]\n"," D_fake_logits = logits_list[1]\n","\n"," probs_list = torch.split(probs, real_batch_size)\n"," D_real_probs = probs_list[0]\n"," D_fake_probs = probs_list[1]\n","\n"," # Loss evaluation\n"," g_loss_d = -1 * torch.mean(torch.log(1 - D_fake_probs[:, -1] + epsilon))\n"," g_feat_reg = torch.mean(torch.pow(torch.mean(D_real_features, dim=0) - torch.mean(D_fake_features, dim=0), 2))\n"," g_loss = g_loss_d + g_feat_reg\n","\n"," logits = D_real_logits[:, 0:-1]\n"," log_probs = F.log_softmax(logits, dim=-1)\n"," label2one_hot = torch.nn.functional.one_hot(b_labels, len(label_list))\n"," per_example_loss = -torch.sum(label2one_hot * log_probs, dim=-1)\n"," per_example_loss = torch.masked_select(per_example_loss, b_label_mask.to(device))\n"," labeled_example_count = per_example_loss.type(torch.float32).numel()\n","\n"," D_L_Supervised = torch.div(torch.sum(per_example_loss.to(device)), labeled_example_count) if labeled_example_count > 0 else 0\n"," D_L_unsupervised1U = -1 * torch.mean(torch.log(1 - D_real_probs[:, -1] + epsilon))\n"," D_L_unsupervised2U = -1 * torch.mean(torch.log(D_fake_probs[:, -1] + epsilon))\n"," d_loss = D_L_Supervised + D_L_unsupervised1U + D_L_unsupervised2U\n","\n"," # Optimization\n"," gen_optimizer.zero_grad()\n"," dis_optimizer.zero_grad()\n"," g_loss.backward(retain_graph=True)\n"," d_loss.backward()\n","\n"," gen_optimizer.step()\n"," dis_optimizer.step()\n","\n"," tr_g_loss += g_loss.item()\n"," tr_d_loss += d_loss.item()\n","\n"," if apply_scheduler:\n"," scheduler_d.step()\n"," scheduler_g.step()\n","\n"," avg_train_loss_g = tr_g_loss / len(train_dataloader)\n"," avg_train_loss_d = tr_d_loss / len(train_dataloader)\n"," training_time = format_time(time.time() - t0)\n","\n"," print(f\" Average training loss generator: {avg_train_loss_g:.3f}\")\n"," print(f\" Average training loss discriminator: {avg_train_loss_d:.3f}\")\n"," print(f\" Training epoch took: {training_time}\")\n","\n"," # Testing loop\n"," print(\"Running Test...\")\n","\n"," t0 = time.time()\n"," transformer.eval()\n"," discriminator.eval()\n"," generator.eval()\n","\n"," total_test_accuracy = 0\n"," total_test_loss = 0\n"," all_preds = []\n"," all_labels_ids = []\n","\n"," #loss\n"," nll_loss = torch.nn.CrossEntropyLoss(ignore_index=-1)\n","\n"," for batch in test_dataloader:\n"," b_embeddings = batch[0].to(device).float()\n"," b_labels = batch[1].to(device).long()\n"," b_label_masks = batch[2].to(device).bool()\n","\n"," #print(f\"b_embeddings shape: {b_embeddings.shape}\")\n"," #print(f\"b_labels shape: {b_labels.shape}\")\n"," #print(f\"b_label_mask shape: {b_label_mask.shape}\")\n"," #print(f\"b_lables: { b_labels}\")\n","\n"," with torch.no_grad():\n"," _, logits, probs = discriminator(b_embeddings)\n"," #filtered_logits = logits[:, 0:-1]\n"," filtered_logits = logits.squeeze(1)\n","\n"," print(f\"filtered_logits shape: {filtered_logits.shape}, b_labels shape: {b_labels.shape}\")\n","\n"," total_test_loss += nll_loss(filtered_logits, b_labels)\n","\n"," _, preds = torch.max(filtered_logits, 1)\n"," all_preds += preds.detach().cpu()\n"," all_labels_ids += b_labels.detach().cpu()\n","\n"," all_preds = torch.stack(all_preds).numpy()\n"," all_labels_ids = torch.stack(all_labels_ids).numpy()\n"," test_accuracy = np.sum(all_preds == all_labels_ids) / len(all_preds)\n"," print(f\" Accuracy: {test_accuracy:.3f}\")\n","\n"," avg_test_loss = total_test_loss / len(test_dataloader)\n"," avg_test_loss = avg_test_loss.item()\n"," test_time = format_time(time.time() - t0)\n","\n"," print(f\" Test Loss: {avg_test_loss:.3f}\")\n"," print(f\" Test took: {test_time}\")\n","\n"," # Record statistics\n"," training_stats.append({\n"," 'epoch': epoch_i + 1,\n"," 'Training Loss generator': avg_train_loss_g,\n"," 'Training Loss discriminator': avg_train_loss_d,\n"," 'Valid. Loss': avg_test_loss,\n"," 'Valid. Accur.': test_accuracy,\n"," 'Training Time': training_time,\n"," 'Test Time': test_time,\n"," 'Preds': all_preds,\n"," 'Labels': all_labels_ids\n"," })\n","\n"," last_pred = []\n"," last_label = []\n"," for stat in training_stats:\n"," last_pred = stat['Preds']\n"," last_label = stat['Labels']\n","\n"," print(\"\\nTraining complete!\")\n"," print(f\"Total training took {format_time(time.time() - total_t0)} (h:mm:ss)\")\n","\n"," with open(f'/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/models/GANBERT/runs/{z}/SpaBert_training_stats_lab_yelp_100L_ns_400R.pkl', 'wb') as f:\n"," pickle.dump(training_stats, f)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":211},"id":"7USUweU1gauK","executionInfo":{"status":"error","timestamp":1726184121300,"user_tz":420,"elapsed":1003,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"845fe4c2-b464-497c-d271-29942633864f"},"execution_count":32,"outputs":[{"output_type":"error","ename":"NameError","evalue":"name 'train_labels' is not defined","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)","\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mlabel_map\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0mlabel\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlabel\u001b[0m \u001b[0;32min\u001b[0m \u001b[0menumerate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlabel_list\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 5\u001b[0;31m \u001b[0mtrain_dataloader\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgenerate_train_data_loader\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrain_averaged_embeddings\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrain_labels\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlabel_map\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrain_label_masks\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdo_shuffle\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbalance_label_examples\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mapply_balance\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 6\u001b[0m \u001b[0mtest_dataloader\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgenerate_test_data_loader\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0maveraged_embeddings\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtest_labels\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlabel_map\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtest_label_masks\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdo_shuffle\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbalance_label_examples\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;31mNameError\u001b[0m: name 'train_labels' is not defined"]}]},{"cell_type":"code","source":["##OUTDATED\n","for z in range(1, 3):\n"," label_map = {}\n"," for (i, label) in enumerate(label_list):\n"," label_map[label] = i\n"," #------------------------------\n"," # Load the train dataset\n"," #------------------------------\n"," train_examples = labeled_examples\n"," #The labeled (train) dataset is assigned with a mask set to True\n"," train_label_masks = np.ones(len(labeled_examples), dtype=bool)\n"," #If unlabel examples are available\n"," if unlabeled_examples:\n"," train_examples = train_examples + unlabeled_examples\n"," #The unlabeled (train) dataset is assigned with a mask set to False\n"," tmp_masks = np.zeros(len(unlabeled_examples), dtype=bool)\n"," train_label_masks = np.concatenate([train_label_masks,tmp_masks])\n","\n"," train_dataloader = generate_data_loader(train_examples, train_label_masks, label_map, do_shuffle = True, balance_label_examples = apply_balance)\n","\n"," #------------------------------\n"," # Load the test dataset\n"," #------------------------------\n"," #The labeled (test) dataset is assigned with a mask set to True\n"," test_label_masks = np.ones(len(test_examples), dtype=bool)\n"," #test_dataloader = generate_data_loader(test_examples, test_label_masks, label_map, do_shuffle = False, balance_label_examples = False)\n"," #test_label_masks = [example[1] for example in test_examples]\n","\n"," #test_dataloader = generate_test_data_loader(averaged_embeddings, test_label_masks, label_map, do_shuffle=False)\n"," test_dataloader = generate_test_data_loader(averaged_embeddings, test_labels, label_map, test_label_masks, do_shuffle=False, balance_label_examples=False)\n","\n","\n"," ###########################################################\n","\n","\n"," # The config file is required to get the dimension of the vector produced by\n"," # the underlying transformer\n"," config = AutoConfig.from_pretrained(model_name)\n"," hidden_size = int(config.hidden_size)\n"," # Define the number and width of hidden layers\n"," hidden_levels_g = [hidden_size for i in range(0, num_hidden_layers_g)]\n"," hidden_levels_d = [hidden_size for i in range(0, num_hidden_layers_d)]\n","\n"," #-------------------------------------------------\n"," # Instantiate the Generator and Discriminator\n"," #-------------------------------------------------\n"," generator = Generator(noise_size=noise_size, output_size=hidden_size, hidden_sizes=hidden_levels_g, dropout_rate=out_dropout_rate)\n"," discriminator = Discriminator(input_size=hidden_size, hidden_sizes=hidden_levels_d, num_labels=len(label_list), dropout_rate=out_dropout_rate)\n","\n"," # Put everything in the GPU if available\n"," if torch.cuda.is_available():\n"," generator.cuda()\n"," discriminator.cuda()\n"," transformer.cuda()\n"," if multi_gpu:\n"," transformer = torch.nn.DataParallel(transformer)\n","\n"," # print(config)\n","\n","\n"," ###########################################################\n","\n","\n","\n"," training_stats = []\n","\n"," # Measure the total training time for the whole run.\n"," total_t0 = time.time()\n","\n"," #models parameters\n"," transformer_vars = [i for i in transformer.parameters()]\n"," d_vars = transformer_vars + [v for v in discriminator.parameters()]\n"," g_vars = [v for v in generator.parameters()]\n","\n"," #optimizer\n"," dis_optimizer = torch.optim.AdamW(d_vars, lr=learning_rate_discriminator)\n"," gen_optimizer = torch.optim.AdamW(g_vars, lr=learning_rate_generator)\n","\n"," #scheduler\n"," if apply_scheduler:\n"," num_train_examples = len(train_examples)\n"," num_train_steps = int(num_train_examples / batch_size * num_train_epochs)\n"," num_warmup_steps = int(num_train_steps * warmup_proportion)\n","\n"," scheduler_d = get_constant_schedule_with_warmup(dis_optimizer,\n"," num_warmup_steps = num_warmup_steps)\n"," scheduler_g = get_constant_schedule_with_warmup(gen_optimizer,\n"," num_warmup_steps = num_warmup_steps)\n","\n"," # For each epoch...\n"," for epoch_i in range(0, num_train_epochs):\n"," # ========================================\n"," # Training\n"," # ========================================\n"," # Perform one full pass over the training set.\n"," print(\"\")\n"," print('======== Epoch {:} / {:} ========'.format(epoch_i + 1, num_train_epochs))\n"," print('Training...')\n","\n"," # Measure how long the training epoch takes.\n"," t0 = time.time()\n","\n"," # Reset the total loss for this epoch.\n"," tr_g_loss = 0\n"," tr_d_loss = 0\n","\n"," # Put the model into training mode.\n"," transformer.train()\n"," generator.train()\n"," discriminator.train()\n","\n"," # For each batch of training data...\n"," for step, batch in enumerate(train_dataloader):\n","\n"," # Progress update every print_each_n_step batches.\n"," if step % print_each_n_step == 0 and not step == 0:\n"," # Calculate elapsed time in minutes.\n"," elapsed = format_time(time.time() - t0)\n","\n"," # Report progress.\n"," print(' Batch {:>5,} of {:>5,}. Elapsed: {:}.'.format(step, len(train_dataloader), elapsed))\n","\n"," # Unpack this training batch from our dataloader.\n"," b_input_ids = batch[0].to(device)\n"," b_input_mask = batch[1].to(device)\n"," b_labels = batch[2].to(device)\n"," b_label_mask = batch[3].to(device)\n","\n"," real_batch_size = b_input_ids.shape[0]\n","\n"," # Encode real data in the Transformer\n"," model_outputs = transformer(b_input_ids, attention_mask=b_input_mask)\n"," hidden_states = model_outputs[-1]\n","\n"," # Generate fake data that should have the same distribution of the ones\n"," # encoded by the transformer.\n"," # First noisy input are used in input to the Generator\n"," noise = torch.zeros(real_batch_size, noise_size, device=device).uniform_(0, 1)\n"," # Gnerate Fake data\n"," gen_rep = generator(noise)\n","\n"," # Generate the output of the Discriminator for real and fake data.\n"," # First, we put together the output of the tranformer and the generator\n"," disciminator_input = torch.cat([hidden_states, gen_rep], dim=0)\n"," # Then, we select the output of the disciminator\n"," features, logits, probs = discriminator(disciminator_input)\n","\n"," # Finally, we separate the discriminator's output for the real and fake\n"," # data\n"," features_list = torch.split(features, real_batch_size)\n"," D_real_features = features_list[0]\n"," D_fake_features = features_list[1]\n","\n"," logits_list = torch.split(logits, real_batch_size)\n"," D_real_logits = logits_list[0]\n"," D_fake_logits = logits_list[1]\n","\n"," probs_list = torch.split(probs, real_batch_size)\n"," D_real_probs = probs_list[0]\n"," D_fake_probs = probs_list[1]\n","\n"," #---------------------------------\n"," # LOSS evaluation\n"," #---------------------------------\n"," # Generator's LOSS estimation\n"," g_loss_d = -1 * torch.mean(torch.log(1 - D_fake_probs[:,-1] + epsilon))\n"," g_feat_reg = torch.mean(torch.pow(torch.mean(D_real_features, dim=0) - torch.mean(D_fake_features, dim=0), 2))\n"," g_loss = g_loss_d + g_feat_reg\n","\n"," # Disciminator's LOSS estimation\n"," logits = D_real_logits[:,0:-1]\n"," log_probs = F.log_softmax(logits, dim=-1)\n"," # The discriminator provides an output for labeled and unlabeled real data\n"," # so the loss evaluated for unlabeled data is ignored (masked)\n"," label2one_hot = torch.nn.functional.one_hot(b_labels, len(label_list))\n"," per_example_loss = -torch.sum(label2one_hot * log_probs, dim=-1)\n"," per_example_loss = torch.masked_select(per_example_loss, b_label_mask.to(device))\n"," labeled_example_count = per_example_loss.type(torch.float32).numel()\n","\n"," # It may be the case that a batch does not contain labeled examples,\n"," # so the \"supervised loss\" in this case is not evaluated\n"," if labeled_example_count == 0:\n"," D_L_Supervised = 0\n"," else:\n"," D_L_Supervised = torch.div(torch.sum(per_example_loss.to(device)), labeled_example_count)\n","\n"," D_L_unsupervised1U = -1 * torch.mean(torch.log(1 - D_real_probs[:, -1] + epsilon))\n"," D_L_unsupervised2U = -1 * torch.mean(torch.log(D_fake_probs[:, -1] + epsilon))\n"," d_loss = D_L_Supervised + D_L_unsupervised1U + D_L_unsupervised2U\n","\n"," #---------------------------------\n"," # OPTIMIZATION\n"," #---------------------------------\n"," # Avoid gradient accumulation\n"," gen_optimizer.zero_grad()\n"," dis_optimizer.zero_grad()\n","\n"," # Calculate weigth updates\n"," # retain_graph=True is required since the underlying graph will be deleted after backward\n"," g_loss.backward(retain_graph=True)\n"," d_loss.backward()\n","\n"," # Apply modifications\n"," gen_optimizer.step()\n"," dis_optimizer.step()\n","\n"," # A detail log of the individual losses\n"," #print(\"{0:.4f}\\t{1:.4f}\\t{2:.4f}\\t{3:.4f}\\t{4:.4f}\".\n"," # format(D_L_Supervised, D_L_unsupervised1U, D_L_unsupervised2U,\n"," # g_loss_d, g_feat_reg))\n","\n"," # Save the losses to print them later\n"," tr_g_loss += g_loss.item()\n"," tr_d_loss += d_loss.item()\n","\n"," # Update the learning rate with the scheduler\n"," if apply_scheduler:\n"," scheduler_d.step()\n"," scheduler_g.step()\n","\n"," # Calculate the average loss over all of the batches.\n"," avg_train_loss_g = tr_g_loss / len(train_dataloader)\n"," avg_train_loss_d = tr_d_loss / len(train_dataloader)\n","\n"," # Measure how long this epoch took.\n"," training_time = format_time(time.time() - t0)\n","\n"," print(\"\")\n"," print(\" Average training loss generetor: {0:.3f}\".format(avg_train_loss_g))\n"," print(\" Average training loss discriminator: {0:.3f}\".format(avg_train_loss_d))\n"," print(\" Training epcoh took: {:}\".format(training_time))\n","\n"," # ========================================\n"," # TEST ON THE EVALUATION DATASET\n"," # ========================================\n"," # After the completion of each training epoch, measure our performance on\n"," # our test set.\n"," print(\"\")\n"," print(\"Running Test...\")\n","\n"," t0 = time.time()\n","\n"," # Put the model in evaluation mode--the dropout layers behave differently\n"," # during evaluation.\n"," transformer.eval() #maybe redundant\n"," discriminator.eval()\n"," generator.eval()\n","\n"," # Tracking variables\n"," total_test_accuracy = 0\n","\n"," total_test_loss = 0\n"," nb_test_steps = 0\n","\n"," all_preds = []\n"," all_labels_ids = []\n","\n"," #loss\n"," nll_loss = torch.nn.CrossEntropyLoss(ignore_index=-1)\n","\n"," # Evaluate data for one epoch\n"," for batch in test_dataloader:\n","\n"," # Unpack this training batch from our dataloader.\n"," #b_input_ids = batch[0].to(device)\n"," #b_input_mask = batch[1].to(device)\n"," #b_labels = batch[2].to(device)\n","\n"," b_embeddings = batch[0].to(device).float() # embeddings from test_dataloader\n"," b_labels = batch[1].to(device).long() # labels from test_dataloader\n"," b_label_masks = batch[2].to(device).bool()\n","\n"," # Since embeddings are precomputed, we don't need to pass through transformer\n"," with torch.no_grad():\n"," # Directly pass the embeddings to the discriminator\n"," _, logits, probs = discriminator(b_embeddings)\n"," filtered_logits = logits[:,0:-1] # Exclude the last logit if it's the fake/real label\n"," total_test_loss += nll_loss(filtered_logits, b_labels)\n","\n"," # Accumulate the predictions and the input labels\n"," _, preds = torch.max(filtered_logits, 1)\n"," all_preds += preds.detach().cpu()\n"," all_labels_ids += b_labels.detach().cpu()\n","\n"," # Report the final accuracy for this validation run.\n"," all_preds = torch.stack(all_preds).numpy()\n"," all_labels_ids = torch.stack(all_labels_ids).numpy()\n"," test_accuracy = np.sum(all_preds == all_labels_ids) / len(all_preds)\n"," print(\" Accuracy: {0:.3f}\".format(test_accuracy))\n","\n"," # Calculate the average loss over all of the batches.\n"," avg_test_loss = total_test_loss / len(test_dataloader)\n"," avg_test_loss = avg_test_loss.item()\n","\n"," # Measure how long the validation run took.\n"," test_time = format_time(time.time() - t0)\n","\n"," print(\" Test Loss: {0:.3f}\".format(avg_test_loss))\n"," print(\" Test took: {:}\".format(test_time))\n","\n"," # Record all statistics from this epoch.\n"," training_stats.append(\n"," {\n"," 'epoch': epoch_i + 1,\n"," 'Training Loss generator': avg_train_loss_g,\n"," 'Training Loss discriminator': avg_train_loss_d,\n"," 'Valid. Loss': avg_test_loss,\n"," 'Valid. Accur.': test_accuracy,\n"," 'Training Time': training_time,\n"," 'Test Time': test_time,\n"," 'Preds': all_preds,\n"," 'Labels': all_labels_ids\n"," }\n"," )\n","\n"," last_pred = []\n"," last_label = []\n"," for stat in training_stats:\n"," last_pred = stat['Preds']\n"," last_label = stat['Labels']\n","\n","\n"," print(\"\\nTraining complete!\")\n","\n"," print(\"Total training took {:} (h:mm:ss)\".format(format_time(time.time()-total_t0)))\n","\n"," with open('/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/models/GANBERT/runs/'+ str(z) +'/SpaBert_training_stats_lab_yelp_100L_ns_400R.pkl', 'wb') as f:\n"," pickle.dump(training_stats, f)"],"metadata":{"collapsed":true,"id":"3J9qu2l5lduy","colab":{"base_uri":"https://localhost:8080/","height":211},"executionInfo":{"status":"error","timestamp":1724997854810,"user_tz":420,"elapsed":384,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"02d7dfd6-8846-4133-8870-8d75b70cc36c"},"execution_count":null,"outputs":[{"output_type":"error","ename":"NameError","evalue":"name 'generate_data_loader' is not defined","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)","\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 16\u001b[0m \u001b[0mtrain_label_masks\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconcatenate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mtrain_label_masks\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mtmp_masks\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 17\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 18\u001b[0;31m \u001b[0mtrain_dataloader\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgenerate_data_loader\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrain_examples\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrain_label_masks\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlabel_map\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdo_shuffle\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbalance_label_examples\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mapply_balance\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 19\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 20\u001b[0m \u001b[0;31m#------------------------------\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;31mNameError\u001b[0m: name 'generate_data_loader' is not defined"]}]},{"cell_type":"code","source":["for z in range(1, 3):\n"," label_map = {}\n"," for (i, label) in enumerate(label_list):\n"," label_map[label] = i\n","\n"," #------------------------------\n"," # Load the train dataset\n"," #------------------------------\n"," train_embeddings = labeled_embeddings + unlabled_embeddings\n"," # The labeled (train) dataset is assigned with a mask set to True\n"," train_label_masks = np.ones(len(labeled_embeddings), dtype=bool)\n"," # If unlabeled examples are available\n"," if unlabeled_examples:\n"," # The unlabeled (train) dataset is assigned with a mask set to False\n"," tmp_masks = np.zeros(len(unlabled_embeddings), dtype=bool)\n"," train_label_masks = np.concatenate([train_label_masks, tmp_masks])\n","\n"," # Create labels for the combined dataset because our data is now embeddings only and not the reviews.\n"," train_labels = [example[1] for example in labeled_examples] + [example[1] for example in unlabeled_examples]\n","\n"," # Generate the train data loader using the modified generate_data_loader function\n"," #NOTE: New dataloader because we are sending embeddings instead of the reviews.\n"," train_dataloader = generate_train_data_loader(train_embeddings, train_labels, label_map, train_label_masks, do_shuffle=True, balance_label_examples=apply_balance)\n","\n"," #------------------------------\n"," # Load the test dataset\n"," #------------------------------\n"," #The labeled (test) dataset is assigned with a mask set to True\n"," test_label_masks = np.ones(len(test_examples), dtype=bool)\n","\n"," # Create labels for the test dataset because our data is now embeddings only and not the reviews.\n"," test_labels = [example[1] for example in test_examples]\n"," #NOTE: New dataloader because we are sending embeddings instead of the reviews.\n"," test_dataloader = generate_test_data_loader(averaged_embeddings, test_labels, label_map, test_label_masks, do_shuffle=False, balance_label_examples=False)\n","\n"," ###########################################################\n","\n"," # The config file is required to get the dimension of the vector produced by\n"," # the underlying transformer\n"," config = AutoConfig.from_pretrained(model_name)\n"," hidden_size = int(config.hidden_size)\n"," # Define the number and width of hidden layers\n"," hidden_levels_g = [hidden_size for _ in range(num_hidden_layers_g)]\n"," hidden_levels_d = [hidden_size for _ in range(num_hidden_layers_d)]\n","\n"," #-------------------------------------------------\n"," # Instantiate the Generator and Discriminator\n"," #-------------------------------------------------\n"," generator = Generator(noise_size=noise_size, output_size=hidden_size, hidden_sizes=hidden_levels_g, dropout_rate=out_dropout_rate).to(device)\n"," discriminator = Discriminator(input_size=hidden_size, hidden_sizes=hidden_levels_d, num_labels=len(label_list), dropout_rate=out_dropout_rate).to(device)\n"," transformer = transformer.to(device)\n","\n"," # Put everything in the GPU if available\n"," if multi_gpu and torch.cuda.device_count() > 1:\n"," transformer = torch.nn.DataParallel(transformer)\n","\n"," ###########################################################\n","\n"," training_stats = []\n","\n"," # Measure the total training time for the whole run.\n"," total_t0 = time.time()\n","\n"," # Model parameters\n"," transformer_vars = list(transformer.parameters())\n"," d_vars = transformer_vars + list(discriminator.parameters())\n"," g_vars = list(generator.parameters())\n","\n"," # Optimizer\n"," dis_optimizer = torch.optim.AdamW(d_vars, lr=learning_rate_discriminator)\n"," gen_optimizer = torch.optim.AdamW(g_vars, lr=learning_rate_generator)\n","\n"," # Scheduler\n"," if apply_scheduler:\n"," num_train_examples = len(train_examples)\n"," num_train_steps = int(num_train_examples / batch_size * num_train_epochs)\n"," num_warmup_steps = int(num_train_steps * warmup_proportion)\n","\n"," scheduler_d = get_constant_schedule_with_warmup(dis_optimizer, num_warmup_steps=num_warmup_steps)\n"," scheduler_g = get_constant_schedule_with_warmup(gen_optimizer, num_warmup_steps=num_warmup_steps)\n","\n"," # For each epoch...\n"," for epoch_i in range(0, num_train_epochs):\n"," # ========================================\n"," # Training\n"," # ========================================\n"," # Perform one full pass over the training set.\n"," print(f'\\n======== Epoch {epoch_i + 1} / {num_train_epochs} ========')\n"," print('Training...')\n","\n"," # Measure how long the training epoch takes.\n"," t0 = time.time()\n","\n"," # Reset the total loss for this epoch.\n"," tr_g_loss = 0\n"," tr_d_loss = 0\n","\n"," # Put the model into training mode.\n"," transformer.train()\n"," generator.train()\n"," discriminator.train()\n","\n"," # For each batch of training data...\n"," for step, batch in enumerate(train_dataloader):\n","\n"," # Progress update every print_each_n_step batches.\n"," if step % print_each_n_step == 0 and not step == 0:\n"," # Calculate elapsed time in minutes.\n"," elapsed = format_time(time.time() - t0)\n","\n"," # Report progress.\n"," print(f' Batch {step} of {len(train_dataloader)}. Elapsed: {elapsed}.')\n","\n"," # Unpack the batch from the dataloader\n"," b_embeddings = batch[0].to(device).float()\n"," b_input_mask = batch[1].to(device).bool()\n"," b_labels = batch[2].to(device).long()\n"," b_label_mask = batch[3].to(device).bool()\n","\n"," real_batch_size = b_embeddings.shape[0]\n","\n"," # Encode real data in the Transformer\n"," #model_outputs = transformer(inputs_embeds=b_embeddings, attention_mask=b_input_mask)\n"," #hidden_states = model_outputs[-1]\n","\n"," # Generate fake data\n"," noise = torch.zeros(real_batch_size, noise_size, device=device).uniform_(0, 1)\n"," gen_rep = generator(noise)\n","\n"," # Discriminator output\n"," discriminator_input = torch.cat([b_embeddings, gen_rep], dim=0)\n"," features, logits, probs = discriminator(discriminator_input)\n","\n"," features_list = torch.split(features, real_batch_size)\n"," D_real_features = features_list[0]\n"," D_fake_features = features_list[1]\n","\n"," logits_list = torch.split(logits, real_batch_size)\n"," D_real_logits = logits_list[0]\n"," D_fake_logits = logits_list[1]\n","\n"," probs_list = torch.split(probs, real_batch_size)\n"," D_real_probs = probs_list[0]\n"," D_fake_probs = probs_list[1]\n","\n"," # Loss evaluation\n"," g_loss_d = -1 * torch.mean(torch.log(1 - D_fake_probs[:, -1] + epsilon))\n"," g_feat_reg = torch.mean(torch.pow(torch.mean(D_real_features, dim=0) - torch.mean(D_fake_features, dim=0), 2))\n"," g_loss = g_loss_d + g_feat_reg\n","\n"," logits = D_real_logits[:, :-1]\n"," log_probs = F.log_softmax(logits, dim=-1)\n"," label2one_hot = torch.nn.functional.one_hot(b_labels, len(label_list))\n"," per_example_loss = -torch.sum(label2one_hot * log_probs, dim=-1)\n"," per_example_loss = torch.masked_select(per_example_loss, b_label_mask.to(device))\n"," labeled_example_count = per_example_loss.type(torch.float32).numel()\n","\n"," D_L_Supervised = torch.div(torch.sum(per_example_loss.to(device)), labeled_example_count) if labeled_example_count > 0 else 0\n"," D_L_unsupervised1U = -1 * torch.mean(torch.log(1 - D_real_probs[:, -1] + epsilon))\n"," D_L_unsupervised2U = -1 * torch.mean(torch.log(D_fake_probs[:, -1] + epsilon))\n"," d_loss = D_L_Supervised + D_L_unsupervised1U + D_L_unsupervised2U\n","\n"," # Optimization\n"," gen_optimizer.zero_grad()\n"," dis_optimizer.zero_grad()\n"," g_loss.backward(retain_graph=True)\n"," d_loss.backward()\n","\n"," gen_optimizer.step()\n"," dis_optimizer.step()\n","\n"," tr_g_loss += g_loss.item()\n"," tr_d_loss += d_loss.item()\n","\n"," if apply_scheduler:\n"," scheduler_d.step()\n"," scheduler_g.step()\n","\n"," torch.cuda.empty_cache() # Clear CUDA cache\n","\n"," avg_train_loss_g = tr_g_loss / len(train_dataloader)\n"," avg_train_loss_d = tr_d_loss / len(train_dataloader)\n"," training_time = format_time(time.time() - t0)\n","\n"," print(f\" Average training loss generator: {avg_train_loss_g:.3f}\")\n"," print(f\" Average training loss discriminator: {avg_train_loss_d:.3f}\")\n"," print(f\" Training epoch took: {training_time}\")\n","\n"," # ========================================\n"," # TEST ON THE EVALUATION DATASET\n"," # ========================================\n"," # After the completion of each training epoch, measure our performance on\n"," # our test set.\n"," print(\"Running Test...\")\n","\n"," t0 = time.time()\n","\n"," # Put the model in evaluation mode--the dropout layers behave differently\n"," # during evaluation.\n"," transformer.eval()\n"," discriminator.eval()\n"," generator.eval()\n","\n"," # Tracking variables\n"," total_test_accuracy = 0\n"," total_test_loss = 0\n"," all_preds = []\n"," all_labels_ids = []\n","\n"," #loss\n"," nll_loss = torch.nn.CrossEntropyLoss(ignore_index=-1)\n","\n"," # Evaluate data for one epoch\n"," for batch in test_dataloader:\n"," # Unpack this training batch from our dataloader.\n"," b_embeddings = batch[0].to(device).float()\n"," b_input_mask = batch[1].to(device)\n"," b_labels = batch[2].to(device).long()\n"," #b_label_masks = batch[2].to(device).bool()\n","\n"," with torch.no_grad():\n"," #model_outputs = transformer(inputs_embeds=b_embeddings, attention_mask=b_input_mask)\n"," #hidden_states = model_outputs.last_hidden_state # Shape: [batch_size, seq_length, hidden_size]\n","\n"," _, logits, probs = discriminator(b_embeddings)\n"," filtered_logits = logits.squeeze(1)\n","\n"," print(f\"filtered_logits shape: {filtered_logits.shape}, b_labels shape: {b_labels.shape}\")\n","\n"," total_test_loss += nll_loss(filtered_logits, b_labels)\n","\n"," _, preds = torch.max(filtered_logits, 1)\n"," all_preds += preds.detach().cpu()\n"," all_labels_ids += b_labels.detach().cpu()\n","\n"," all_preds = torch.stack(all_preds).numpy()\n"," all_labels_ids = torch.stack(all_labels_ids).numpy()\n"," test_accuracy = np.sum(all_preds == all_labels_ids) / len(all_preds)\n"," print(f\" Accuracy: {test_accuracy:.3f}\")\n","\n"," avg_test_loss = total_test_loss / len(test_dataloader)\n"," avg_test_loss = avg_test_loss.item()\n"," test_time = format_time(time.time() - t0)\n","\n"," print(f\" Test Loss: {avg_test_loss:.3f}\")\n"," print(f\" Test took: {test_time}\")\n","\n"," # Record statistics\n"," training_stats.append({\n"," 'epoch': epoch_i + 1,\n"," 'Training Loss generator': avg_train_loss_g,\n"," 'Training Loss discriminator': avg_train_loss_d,\n"," 'Valid. Loss': avg_test_loss,\n"," 'Valid. Accur.': test_accuracy,\n"," 'Training Time': training_time,\n"," 'Test Time': test_time,\n"," 'Preds': all_preds,\n"," 'Labels': all_labels_ids\n"," })\n","\n"," last_pred = []\n"," last_label = []\n"," for stat in training_stats:\n"," last_pred = stat['Preds']\n"," last_label = stat['Labels']\n","\n"," print(\"\\nTraining complete!\")\n"," print(f\"Total training took {format_time(time.time() - total_t0)} (h:mm:ss)\")\n","\n"," with open(f'/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/models/GANBERT/runs/{z}/SpaBert_training_stats_lab_yelp_100L_ns_400R.pkl', 'wb') as f:\n"," pickle.dump(training_stats, f)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":801},"id":"miU4GShmHE_d","executionInfo":{"status":"error","timestamp":1725596523332,"user_tz":420,"elapsed":272,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"f525ab81-f910-44c6-e1b6-9ffc3b24fb99"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stderr","text":[":51: DeprecationWarning: In future, it will be an error for 'np.bool_' scalars to be interpreted as an index\n"," label_mask_array = torch.tensor(label_mask_array, dtype=torch.bool) # Label masks\n",":57: DeprecationWarning: In future, it will be an error for 'np.bool_' scalars to be interpreted as an index\n"," label_mask_array = torch.tensor(label_mask_array, dtype=torch.bool) # Label masks\n","loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/config.json\n","Model config BertConfig {\n"," \"_name_or_path\": \"bert-base-uncased\",\n"," \"architectures\": [\n"," \"BertForMaskedLM\"\n"," ],\n"," \"attention_probs_dropout_prob\": 0.1,\n"," \"classifier_dropout\": null,\n"," \"gradient_checkpointing\": false,\n"," \"hidden_act\": \"gelu\",\n"," \"hidden_dropout_prob\": 0.1,\n"," \"hidden_size\": 768,\n"," \"initializer_range\": 0.02,\n"," \"intermediate_size\": 3072,\n"," \"layer_norm_eps\": 1e-12,\n"," \"max_position_embeddings\": 512,\n"," \"model_type\": \"bert\",\n"," \"num_attention_heads\": 12,\n"," \"num_hidden_layers\": 12,\n"," \"pad_token_id\": 0,\n"," \"position_embedding_type\": \"absolute\",\n"," \"transformers_version\": \"4.44.2\",\n"," \"type_vocab_size\": 2,\n"," \"use_cache\": true,\n"," \"vocab_size\": 30522\n","}\n","\n"]},{"output_type":"stream","name":"stdout","text":["\n","======== Epoch 1 / 10 ========\n","Training...\n"]},{"output_type":"error","ename":"RuntimeError","evalue":"Tensors must have same number of dimensions: got 3 and 2","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)","\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 129\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 130\u001b[0m \u001b[0;31m# Discriminator output\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 131\u001b[0;31m \u001b[0mdiscriminator_input\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mb_embeddings\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgen_rep\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdim\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 132\u001b[0m \u001b[0mfeatures\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlogits\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mprobs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdiscriminator\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdiscriminator_input\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 133\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;31mRuntimeError\u001b[0m: Tensors must have same number of dimensions: got 3 and 2"]}]},{"cell_type":"code","source":["for z in range(1, 3):\n"," label_map = {}\n"," for (i, label) in enumerate(label_list):\n"," label_map[label] = i\n","\n"," #------------------------------\n"," # Load the train dataset\n"," #------------------------------\n"," train_embeddings = labeled_embeddings + unlabled_embeddings\n"," # The labeled (train) dataset is assigned with a mask set to True\n"," train_label_masks = np.ones(len(labeled_embeddings), dtype=bool)\n"," # If unlabeled examples are available\n"," if unlabeled_examples:\n"," # The unlabeled (train) dataset is assigned with a mask set to False\n"," tmp_masks = np.zeros(len(unlabled_embeddings), dtype=bool)\n"," train_label_masks = np.concatenate([train_label_masks, tmp_masks])\n","\n"," # Create labels for the combined dataset because our data is now embeddings only and not the reviews.\n"," train_labels = [example[1] for example in labeled_examples] + [example[1] for example in unlabeled_examples]\n","\n"," # Generate the train data loader using the modified generate_train_data_loader function\n"," train_dataloader = generate_train_data_loader(train_embeddings, train_labels, label_map, train_label_masks, do_shuffle=True, balance_label_examples=apply_balance)\n","\n"," #------------------------------\n"," # Load the test dataset\n"," #------------------------------\n"," test_label_masks = np.ones(len(test_examples), dtype=bool)\n","\n"," # Create labels for the test dataset because our data is now embeddings only and not the reviews.\n"," test_labels = [example[1] for example in test_examples]\n","\n"," # Generate the test data loader using the modified generate_test_data_loader function\n"," test_dataloader = generate_test_data_loader(averaged_embeddings, test_labels, label_map, test_label_masks, do_shuffle=False, balance_label_examples=False)\n","\n"," ###########################################################\n","\n"," # The config file is required to get the dimension of the vector produced by the underlying transformer\n"," config = AutoConfig.from_pretrained(model_name)\n"," hidden_size = int(config.hidden_size)\n"," # Define the number and width of hidden layers\n"," hidden_levels_g = [hidden_size for _ in range(num_hidden_layers_g)]\n"," hidden_levels_d = [hidden_size for _ in range(num_hidden_layers_d)]\n","\n"," #-------------------------------------------------\n"," # Instantiate the Generator and Discriminator\n"," #-------------------------------------------------\n"," generator = Generator(noise_size=noise_size, output_size=hidden_size, hidden_sizes=hidden_levels_g, dropout_rate=out_dropout_rate).to(device)\n"," discriminator = Discriminator(input_size=hidden_size, hidden_sizes=hidden_levels_d, num_labels=len(label_list), dropout_rate=out_dropout_rate).to(device)\n"," transformer = transformer.to(device)\n","\n"," # If using multiple GPUs\n"," if multi_gpu and torch.cuda.device_count() > 1:\n"," transformer = torch.nn.DataParallel(transformer)\n","\n"," ###########################################################\n","\n"," training_stats = []\n"," total_t0 = time.time() # Total training time\n","\n"," # Model parameters\n"," transformer_vars = list(transformer.parameters())\n"," d_vars = transformer_vars + list(discriminator.parameters())\n"," g_vars = list(generator.parameters())\n","\n"," # Optimizer\n"," dis_optimizer = torch.optim.AdamW(d_vars, lr=learning_rate_discriminator)\n"," gen_optimizer = torch.optim.AdamW(g_vars, lr=learning_rate_generator)\n","\n"," # Scheduler\n"," if apply_scheduler:\n"," num_train_examples = len(train_embeddings)\n"," num_train_steps = int(num_train_examples / batch_size * num_train_epochs)\n"," num_warmup_steps = int(num_train_steps * warmup_proportion)\n","\n"," scheduler_d = get_constant_schedule_with_warmup(dis_optimizer, num_warmup_steps=num_warmup_steps)\n"," scheduler_g = get_constant_schedule_with_warmup(gen_optimizer, num_warmup_steps=num_warmup_steps)\n","\n"," # For each epoch...\n"," for epoch_i in range(0, num_train_epochs):\n"," # ========================================\n"," # Training\n"," # ========================================\n"," print(f'\\n======== Epoch {epoch_i + 1} / {num_train_epochs} ========')\n"," print('Training...')\n","\n"," t0 = time.time() # Track time for this epoch\n","\n"," tr_g_loss = 0\n"," tr_d_loss = 0\n","\n"," # Set the models to training mode\n"," transformer.train()\n"," generator.train()\n"," discriminator.train()\n","\n"," for step, batch in enumerate(train_dataloader):\n"," if step % print_each_n_step == 0 and not step == 0:\n"," elapsed = format_time(time.time() - t0)\n"," print(f' Batch {step} of {len(train_dataloader)}. Elapsed: {elapsed}.')\n","\n"," # Unpack the batch from the dataloader\n"," b_embeddings = batch[0].to(device).float()\n"," b_labels = batch[1].to(device).long()\n"," b_label_mask = batch[2].to(device).bool()\n","\n"," real_batch_size = b_embeddings.shape[0]\n","\n"," # Generate fake data\n"," noise = torch.zeros(real_batch_size, noise_size, device=device).uniform_(0, 1)\n"," gen_rep = generator(noise)\n","\n"," # Discriminator output\n"," discriminator_input = torch.cat([b_embeddings, gen_rep], dim=0)\n"," features, logits, probs = discriminator(discriminator_input)\n","\n"," features_list = torch.split(features, real_batch_size)\n"," D_real_features = features_list[0]\n"," D_fake_features = features_list[1]\n","\n"," logits_list = torch.split(logits, real_batch_size)\n"," D_real_logits = logits_list[0]\n"," D_fake_logits = logits_list[1]\n","\n"," probs_list = torch.split(probs, real_batch_size)\n"," D_real_probs = probs_list[0]\n"," D_fake_probs = probs_list[1]\n","\n"," # Loss evaluation\n"," g_loss_d = -1 * torch.mean(torch.log(1 - D_fake_probs[:, -1] + epsilon))\n"," g_feat_reg = torch.mean(torch.pow(torch.mean(D_real_features, dim=0) - torch.mean(D_fake_features, dim=0), 2))\n"," g_loss = g_loss_d + g_feat_reg\n","\n"," logits = D_real_logits[:, :-1]\n"," log_probs = F.log_softmax(logits, dim=-1)\n"," label2one_hot = torch.nn.functional.one_hot(b_labels, len(label_list))\n"," per_example_loss = -torch.sum(label2one_hot * log_probs, dim=-1)\n"," per_example_loss = torch.masked_select(per_example_loss, b_label_mask.to(device))\n"," labeled_example_count = per_example_loss.type(torch.float32).numel()\n","\n"," D_L_Supervised = torch.div(torch.sum(per_example_loss.to(device)), labeled_example_count) if labeled_example_count > 0 else 0\n"," D_L_unsupervised1U = -1 * torch.mean(torch.log(1 - D_real_probs[:, -1] + epsilon))\n"," D_L_unsupervised2U = -1 * torch.mean(torch.log(D_fake_probs[:, -1] + epsilon))\n"," d_loss = D_L_Supervised + D_L_unsupervised1U + D_L_unsupervised2U\n","\n"," # Optimization\n"," gen_optimizer.zero_grad()\n"," dis_optimizer.zero_grad()\n"," g_loss.backward(retain_graph=True)\n"," d_loss.backward()\n","\n"," gen_optimizer.step()\n"," dis_optimizer.step()\n","\n"," tr_g_loss += g_loss.item()\n"," tr_d_loss += d_loss.item()\n","\n"," if apply_scheduler:\n"," scheduler_d.step()\n"," scheduler_g.step()\n","\n"," torch.cuda.empty_cache() # Clear CUDA cache\n","\n"," avg_train_loss_g = tr_g_loss / len(train_dataloader)\n"," avg_train_loss_d = tr_d_loss / len(train_dataloader)\n"," training_time = format_time(time.time() - t0)\n","\n"," print(f\" Average training loss generator: {avg_train_loss_g:.3f}\")\n"," print(f\" Average training loss discriminator: {avg_train_loss_d:.3f}\")\n"," print(f\" Training epoch took: {training_time}\")\n","\n"," # ========================================\n"," # TEST ON THE EVALUATION DATASET\n"," # ========================================\n"," print(\"Running Test...\")\n","\n"," t0 = time.time() # Track test time\n","\n"," # Set the models to evaluation mode\n"," transformer.eval()\n"," discriminator.eval()\n"," generator.eval()\n","\n"," total_test_loss = 0\n"," all_preds = []\n"," all_labels_ids = []\n","\n"," nll_loss = torch.nn.CrossEntropyLoss(ignore_index=-1)\n","\n"," for batch in test_dataloader:\n"," b_embeddings = batch[0].to(device).float()\n"," b_labels = batch[1].to(device).long()\n","\n"," with torch.no_grad():\n"," _, logits, probs = discriminator(b_embeddings)\n"," filtered_logits = logits.squeeze(1)\n","\n"," print(f\"filtered_logits shape: {filtered_logits.shape}, b_labels shape: {b_labels.shape}\")\n","\n"," total_test_loss += nll_loss(filtered_logits, b_labels)\n","\n"," _, preds = torch.max(filtered_logits, 1)\n"," all_preds += preds.detach().cpu()\n"," all_labels_ids += b_labels.detach().cpu()\n","\n"," all_preds = torch.stack(all_preds).numpy()\n"," all_labels_ids = torch.stack(all_labels_ids).numpy()\n"," test_accuracy = np.sum(all_preds == all_labels_ids) / len(all_preds)\n"," print(f\" Accuracy: {test_accuracy:.3f}\")\n","\n"," avg_test_loss = total_test_loss / len(test_dataloader)\n"," avg_test_loss = avg_test_loss.item()\n"," test_time = format_time(time.time() - t0)\n","\n"," print(f\" Test Loss: {avg_test_loss:.3f}\")\n"," print(f\" Test took: {test_time}\")\n","\n"," # Record statistics\n"," training_stats.append({\n"," 'epoch': epoch_i + 1,\n"," 'Training Loss generator': avg_train_loss_g,\n"," 'Training Loss discriminator': avg_train_loss_d,\n"," 'Valid. Loss': avg_test_loss,\n"," 'Valid. Accur.': test_accuracy,\n"," 'Training Time': training_time,\n"," 'Test Time': test_time,\n"," 'Preds': all_preds,\n"," 'Labels': all_labels_ids\n"," })\n","\n"," last_pred = []\n"," last_label = []\n"," for stat in training_stats:\n"," last_pred = stat['Preds']\n"," last_label = stat['Labels']\n","\n"," print(\"\\nTraining complete!\")\n"," print(f\"Total training took {format_time(time.time() - total_t0)} (h:mm:ss)\")\n","\n"," with open(f'/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/models/GANBERT/runs/{z}/SpaBert_training_stats_lab_yelp_100L_ns_400R.pkl', 'wb') as f:\n"," pickle.dump(training_stats, f)\n"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"uw9_4a0OqsGY","executionInfo":{"status":"ok","timestamp":1725600591297,"user_tz":420,"elapsed":2232,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"e7174f4a-c8bd-4c00-e1b3-5c8fcb072469"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stderr","text":[":47: DeprecationWarning: In future, it will be an error for 'np.bool_' scalars to be interpreted as an index\n"," label_mask_array = torch.tensor(label_mask_array, dtype=torch.bool) # Label masks\n",":46: DeprecationWarning: In future, it will be an error for 'np.bool_' scalars to be interpreted as an index\n"," label_mask_array = torch.tensor(label_mask_array, dtype=torch.bool) # Shape: [batch_size]\n","loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/config.json\n","Model config BertConfig {\n"," \"_name_or_path\": \"bert-base-uncased\",\n"," \"architectures\": [\n"," \"BertForMaskedLM\"\n"," ],\n"," \"attention_probs_dropout_prob\": 0.1,\n"," \"classifier_dropout\": null,\n"," \"gradient_checkpointing\": false,\n"," \"hidden_act\": \"gelu\",\n"," \"hidden_dropout_prob\": 0.1,\n"," \"hidden_size\": 768,\n"," \"initializer_range\": 0.02,\n"," \"intermediate_size\": 3072,\n"," \"layer_norm_eps\": 1e-12,\n"," \"max_position_embeddings\": 512,\n"," \"model_type\": \"bert\",\n"," \"num_attention_heads\": 12,\n"," \"num_hidden_layers\": 12,\n"," \"pad_token_id\": 0,\n"," \"position_embedding_type\": \"absolute\",\n"," \"transformers_version\": \"4.44.2\",\n"," \"type_vocab_size\": 2,\n"," \"use_cache\": true,\n"," \"vocab_size\": 30522\n","}\n","\n"]},{"output_type":"stream","name":"stdout","text":["\n","======== Epoch 1 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.405\n"," Average training loss discriminator: 2.488\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.500\n"," Test Loss: 1.104\n"," Test took: 0:00:00\n","\n","======== Epoch 2 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.607\n"," Average training loss discriminator: 1.866\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.600\n"," Test Loss: 0.926\n"," Test took: 0:00:00\n","\n","======== Epoch 3 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.794\n"," Average training loss discriminator: 1.505\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.594\n"," Test Loss: 0.828\n"," Test took: 0:00:00\n","\n","======== Epoch 4 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.748\n"," Average training loss discriminator: 1.463\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.606\n"," Test Loss: 0.773\n"," Test took: 0:00:00\n","\n","======== Epoch 5 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.691\n"," Average training loss discriminator: 1.475\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.550\n"," Test Loss: 0.744\n"," Test took: 0:00:00\n","\n","======== Epoch 6 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.735\n"," Average training loss discriminator: 1.383\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.594\n"," Test Loss: 0.732\n"," Test took: 0:00:00\n","\n","======== Epoch 7 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.732\n"," Average training loss discriminator: 1.364\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.537\n"," Test Loss: 0.720\n"," Test took: 0:00:00\n","\n","======== Epoch 8 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.718\n"," Average training loss discriminator: 1.334\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.544\n"," Test Loss: 0.713\n"," Test took: 0:00:00\n","\n","======== Epoch 9 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.728\n"," Average training loss discriminator: 1.304\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.556\n"," Test Loss: 0.711\n"," Test took: 0:00:00\n","\n","======== Epoch 10 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.721\n"," Average training loss discriminator: 1.282\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.531\n"," Test Loss: 0.706\n"," Test took: 0:00:00\n","\n","Training complete!\n","Total training took 0:00:01 (h:mm:ss)\n"]},{"output_type":"stream","name":"stderr","text":["loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--bert-base-uncased/snapshots/86b5e0934494bd15c9632b12f734a8a67f723594/config.json\n","Model config BertConfig {\n"," \"_name_or_path\": \"bert-base-uncased\",\n"," \"architectures\": [\n"," \"BertForMaskedLM\"\n"," ],\n"," \"attention_probs_dropout_prob\": 0.1,\n"," \"classifier_dropout\": null,\n"," \"gradient_checkpointing\": false,\n"," \"hidden_act\": \"gelu\",\n"," \"hidden_dropout_prob\": 0.1,\n"," \"hidden_size\": 768,\n"," \"initializer_range\": 0.02,\n"," \"intermediate_size\": 3072,\n"," \"layer_norm_eps\": 1e-12,\n"," \"max_position_embeddings\": 512,\n"," \"model_type\": \"bert\",\n"," \"num_attention_heads\": 12,\n"," \"num_hidden_layers\": 12,\n"," \"pad_token_id\": 0,\n"," \"position_embedding_type\": \"absolute\",\n"," \"transformers_version\": \"4.44.2\",\n"," \"type_vocab_size\": 2,\n"," \"use_cache\": true,\n"," \"vocab_size\": 30522\n","}\n","\n"]},{"output_type":"stream","name":"stdout","text":["\n","======== Epoch 1 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.414\n"," Average training loss discriminator: 2.374\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.506\n"," Test Loss: 1.048\n"," Test took: 0:00:00\n","\n","======== Epoch 2 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.651\n"," Average training loss discriminator: 1.760\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.550\n"," Test Loss: 0.889\n"," Test took: 0:00:00\n","\n","======== Epoch 3 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.812\n"," Average training loss discriminator: 1.453\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.594\n"," Test Loss: 0.807\n"," Test took: 0:00:00\n","\n","======== Epoch 4 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.721\n"," Average training loss discriminator: 1.462\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.556\n"," Test Loss: 0.761\n"," Test took: 0:00:00\n","\n","======== Epoch 5 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.700\n"," Average training loss discriminator: 1.429\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.550\n"," Test Loss: 0.737\n"," Test took: 0:00:00\n","\n","======== Epoch 6 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.743\n"," Average training loss discriminator: 1.353\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.544\n"," Test Loss: 0.722\n"," Test took: 0:00:00\n","\n","======== Epoch 7 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.723\n"," Average training loss discriminator: 1.339\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.562\n"," Test Loss: 0.716\n"," Test took: 0:00:00\n","\n","======== Epoch 8 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.725\n"," Average training loss discriminator: 1.318\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.525\n"," Test Loss: 0.709\n"," Test took: 0:00:00\n","\n","======== Epoch 9 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.723\n"," Average training loss discriminator: 1.295\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.581\n"," Test Loss: 0.708\n"," Test took: 0:00:00\n","\n","======== Epoch 10 / 10 ========\n","Training...\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n","Input shape: torch.Size([128, 768])\n"," Average training loss generator: 0.719\n"," Average training loss discriminator: 1.273\n"," Training epoch took: 0:00:00\n","Running Test...\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([64, 768])\n","filtered_logits shape: torch.Size([64, 4]), b_labels shape: torch.Size([64])\n","Input shape: torch.Size([32, 768])\n","filtered_logits shape: torch.Size([32, 4]), b_labels shape: torch.Size([32])\n"," Accuracy: 0.531\n"," Test Loss: 0.702\n"," Test took: 0:00:00\n","\n","Training complete!\n","Total training took 0:00:01 (h:mm:ss)\n"]}]},{"cell_type":"markdown","source":["##Review the results"],"metadata":{"id":"6Ab830tHTOey"}},{"cell_type":"code","source":["from collections import Counter\n","print(\"Training labels distribution:\", Counter(train_label_masks))\n","print(\"Test labels distribution:\", Counter(test_label_masks))"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"4eMvBe5AATCy","executionInfo":{"status":"ok","timestamp":1725598605844,"user_tz":420,"elapsed":264,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"2a360a31-fd37-4a49-c421-d80ead85c753"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["Training labels distribution: Counter({True: 320, False: 320})\n","Test labels distribution: Counter({True: 160})\n"]}]},{"cell_type":"code","source":["print(\"b_embeddings shape:\", b_embeddings.shape)\n","print(\"b_embeddings dtype:\", b_embeddings.dtype)"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"B1Q7EajF4O6n","executionInfo":{"status":"ok","timestamp":1725598608983,"user_tz":420,"elapsed":569,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"97f4b724-56b1-4bc6-9a15-41d2cb8bf202"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["b_embeddings shape: torch.Size([32, 768])\n","b_embeddings dtype: torch.float32\n"]}]},{"cell_type":"code","source":["print(f\"Original logits shape: {logits.shape}\") # Inspect this shape first"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"68a4meKL7OLi","executionInfo":{"status":"ok","timestamp":1725598611064,"user_tz":420,"elapsed":676,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"30a7f682-2de0-4829-e1a5-e70fb58bac8f"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["Original logits shape: torch.Size([32, 4])\n"]}]},{"cell_type":"code","source":["print(\"filtered_logits shape:\", filtered_logits.shape) # Should be [64, num_classes]\n","print(\"b_labels shape:\", b_labels.shape) # Should be [64]"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"JMkhp8cF4XyD","executionInfo":{"status":"ok","timestamp":1725598614357,"user_tz":420,"elapsed":565,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"75708dca-8628-460d-ec34-e0e4f89dd2c8"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["filtered_logits shape: torch.Size([32, 4])\n","b_labels shape: torch.Size([32])\n"]}]},{"cell_type":"code","source":["last_pred = []\n","last_label = []\n","for stat in training_stats:\n"," last_pred = stat['Preds']\n"," last_label = stat['Labels']\n","\n","\n","print(\"\\nTraining complete!\")\n","\n","print(\"Total training took {:} (h:mm:ss)\".format(format_time(time.time()-total_t0)))"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"cEu2GEqxljdA","executionInfo":{"status":"ok","timestamp":1725598616316,"user_tz":420,"elapsed":273,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"ae4ea6ea-6af8-45d6-f579-186f1ac1d9e5"},"execution_count":null,"outputs":[{"output_type":"stream","name":"stdout","text":["\n","Training complete!\n","Total training took 0:00:56 (h:mm:ss)\n"]}]},{"cell_type":"code","source":["with open('/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/models/GANBERT/runs/1/spaBert_training_stats_lab_ns_100.pkl', 'wb') as f:\n"," pickle.dump(training_stats, f)"],"metadata":{"id":"rIS7L_3X_UIJ"},"execution_count":null,"outputs":[]},{"cell_type":"code","source":["last_pred"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"2osetj-j_cqV","executionInfo":{"status":"ok","timestamp":1726187333172,"user_tz":420,"elapsed":551,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"db1df1fe-180a-4c12-b2b9-089240fb913b"},"execution_count":75,"outputs":[{"output_type":"execute_result","data":{"text/plain":["array([0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0,\n"," 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0,\n"," 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0,\n"," 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0,\n"," 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0,\n"," 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0,\n"," 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1,\n"," 1, 0, 0, 1, 0, 1])"]},"metadata":{},"execution_count":75}]},{"cell_type":"code","source":["last_label"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"Yt-UtVLs_e8A","executionInfo":{"status":"ok","timestamp":1726187336866,"user_tz":420,"elapsed":375,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"02ea0dc1-a0d5-4b3a-cb69-dbe500c644f6"},"execution_count":76,"outputs":[{"output_type":"execute_result","data":{"text/plain":["array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n"," 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n"," 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n"," 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1,\n"," 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n"," 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n"," 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n"," 1, 1, 1, 1, 1, 1])"]},"metadata":{},"execution_count":76}]},{"cell_type":"code","source":["from sklearn.metrics import classification_report\n","for z in range(1, 2):\n"," with open('/content/drive/MyDrive/Master_Project_2024_JP/CSC502 Thomas Project/models/GANBERT/runs/'+ str(z) +'/spaBert_training_stats_lab_ns_100.pkl', 'rb') as f:\n"," print(z)\n"," training_stats = pickle.load(f)\n","\n"," last_pred = []\n"," last_label = []\n"," for stat in training_stats:\n"," last_pred = stat['Preds']\n"," last_label = stat['Labels']\n","\n"," target_names = ['real', 'fake']\n"," print(classification_report(last_label, last_pred, target_names=target_names))"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"Q5Piy3NV_m1j","executionInfo":{"status":"ok","timestamp":1726187338775,"user_tz":420,"elapsed":1061,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"d4b7037f-b7f9-4b2b-83fc-b6b290bbce0a"},"execution_count":77,"outputs":[{"output_type":"stream","name":"stdout","text":["1\n"," precision recall f1-score support\n","\n"," real 0.50 0.45 0.47 80\n"," fake 0.50 0.55 0.52 80\n","\n"," accuracy 0.50 160\n"," macro avg 0.50 0.50 0.50 160\n","weighted avg 0.50 0.50 0.50 160\n","\n"]}]},{"cell_type":"code","source":["from sklearn.preprocessing import StandardScaler\n","from sklearn.svm import SVC\n","from sklearn.metrics import confusion_matrix\n","from sklearn.metrics import precision_score, recall_score, f1_score, accuracy_score\n","import matplotlib.pyplot as plt\n","#\n","# Standardize the data set\n","#\n","\n","#\n","# Fit the SVC model\n","#\n","\n","#\n","# Get the predictions\n","#\n","\n","#\n","# Calculate the confusion matrix\n","#\n","conf_matrix = confusion_matrix(y_true=last_label, y_pred=last_pred)\n","#\n","# Print the confusion matrix using Matplotlib\n","#\n","fig, ax = plt.subplots(figsize=(5, 5))\n","ax.matshow(conf_matrix, cmap=plt.cm.Oranges, alpha=0.3)\n","for i in range(conf_matrix.shape[0]):\n"," for j in range(conf_matrix.shape[1]):\n"," ax.text(x=j, y=i,s=conf_matrix[i, j], va='center', ha='center', size='xx-large')\n","\n","plt.xlabel('Predictions', fontsize=18)\n","plt.ylabel('Actuals', fontsize=18)\n","plt.title('Confusion Matrix', fontsize=18)\n","plt.show()"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":507},"id":"xWX1KOtX_6j4","executionInfo":{"status":"ok","timestamp":1726187343249,"user_tz":420,"elapsed":413,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"f46c9eea-1e47-4604-fe6c-03ec0e8097ac"},"execution_count":78,"outputs":[{"output_type":"display_data","data":{"text/plain":["
"],"image/png":"iVBORw0KGgoAAAANSUhEUgAAAcUAAAHqCAYAAAB1O1VnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA83ElEQVR4nO3dd3hU1d728XuSkALpIJFACE2qR6oU0QMizQYKCgLSbYACgliOShGeg4KPCsKrKF2kicABRT3UhyNNaQoIKBhaAqHEFEISIFnvH+zsQ0whmWQyIN/PdeVysmetvX57InPPnll7jcMYYwQAAOTh7gIAALheEIoAAFgIRQAALIQiAAAWQhEAAAuhCACAhVAEAMBCKAIAYCEUAQCwEIrAn+zZs0ddunRRuXLl5OXlJYfDoXr16rmtng0bNsjhcMjhcLitBuTsyJEj9t/myJEj7i4HRYBQhEukp6dr8eLF6tWrl6pXr67g4GB5e3urbNmyuvvuu/Xaa69p79697i4zm6ioKDVv3lxffPGFTp06paCgIIWFhalMmTLuLu2GlBkYDodDtWrVumb7H3/8MUufPn36FGk9u3fv1ujRo/XBBx8U6X7x1+Hl7gLw17N161b17t1bv/76q72tRIkSCggI0Llz57Rp0yZt2rRJb7/9tjp16qQFCxbI29vbjRX/17Rp05SUlKRq1appw4YNKl++vLtLUsmSJVWjRg13l1FoBw4c0JYtW9SsWbNc28ycOdOlNezevVtjxoxRZGSkhg4dWuj9lShRwv7blChRotD7g/txpogitXLlSrVs2VK//vqrSpcurfHjx+vXX3/VxYsXde7cOV28eFE//vijXn31VQUGBmrp0qW6cOGCu8u27dmzR5LUsWPH6yIQJalx48Y6cOCADhw44O5SnFapUiVJ0qxZs3Jtk5qaqoULF8rhcCgyMrKYKiuc8uXL23+b6+X/FxQOoYgi89tvv+nJJ59UWlqaateurd27d+vVV1/VbbfdZrfx9PRUo0aNNH78eEVFRaljx45urDi7zID29/d3cyV/Lb169ZLD4dCiRYtyfRG0dOlSxcfHq0WLFnaIAsWNUESReeONN5SYmChfX18tW7ZMFSpUyLN9aGioli9frqCgoGz3nTp1SiNGjFCdOnVUqlQplSpVSnXq1NHLL7+s2NjYHPf350kPsbGxGjJkiCpXrixfX1+FhYXpiSeeyPGMq1KlSnI4HNqwYYMkacyYMVk+28rcPnr0aDkcDrVs2TLX47rWxJht27apR48edl2lSpVSZGSkWrRoobFjx+rEiRMF2p87Hq+Cqly5slq0aKHExER9+eWXObbJfOu0b9++ee7rwoULWrBggXr16qV69erplltukY+Pj8LDw/XII4/om2++ybGfw+Gw93306NEsf1+Hw6HRo0fbbfv06WN/pmmM0fTp03X33XerdOnScjgcmj17tqTcJ9qcO3dOFSpUkMPh0COPPJJjPZcvX1bz5s3lcDh0xx13KDU1Nc/jRjExQBE4deqU8fDwMJJM//79C7WvDRs2mODgYCPJSDKlSpUypUqVsn8PCQkx//nPf7L1i4qKstt89dVXpmzZskaSKVmypPHx8bHvCwwMNLt3787St1GjRiYsLMyUKFHCHjMsLMz+2bRpkzHGmFGjRhlJpkWLFrnWv379enusP5s9e7ZxOBz2/T4+PiYwMND+XZKZNWtWvvfnrscrv64+pjlz5hhJ5t57783W7siRI8bhcJiAgACTnJxsWrRoYSSZ3r17Z2s7a9Yse78Oh8MEBQWZkiVLZnkMhw8fnq1fWFiY/Vh7eHhk+fuGhYWZiRMn2m179+5tJJlevXqZzp07231CQkKMh4eH/Te6+jGMiorKMt6GDRvsfxNTpkzJVs/rr79uJBk/Pz+zb9++gj2wcBlCEUViwYIFWZ5gnXXs2DH7Cb527drm+++/t+/buHGjqVGjhpFkQkNDzYkTJ7L0vfoJKiQkxDRv3tz8+OOPxhhjLl26ZFavXm3KlStnJJl77rknx/Ezn4xHjRqV4/2FCcXk5GQTEBBgJJknn3zSHDp0yL7v/PnzZvv27WbEiBHm66+/ztf+rofH61quDsXM43c4HOb333/P0m706NFGknnqqaeMMSbPUFy+fLl56aWXzPfff2+Sk5Pt7TExMWbMmDH2C5t//etf2fpmBmpkZGSedWeGor+/v/Hy8jLvvvuuSUhIMMYYk5SUZGJiYowxeYeiMca8+eabRpLx9fU1P//8s719/fr1dmB+/PHHedaC4kUooki88cYb9pNDdHS00/t57rnn7CfpkydPZrv/+PHj9qv9QYMGZbnv6ieomjVrmgsXLmTrv2LFCrvN8ePHs93vylDctm2bfSZ36dKlXPvnd3/GuP/xupY/n/0+9dRTRpIZOXKk3SYjI8NUqlTJSLLPyPMKxWuZOHGikWTuu+++bPcVNBQlmcmTJ+fa7lqhePnyZdO8eXP7RcuFCxfM2bNnTfny5Y0k06lTp4IeHlyMzxRRJM6dO2ffDg0NdWofxhgtXrxYkvTcc8/p1ltvzdamQoUKeu655yRJCxcuzHVfw4cPl5+fX7bt999/v335R+ZM0+ISHBwsSfZM3MK6ER+vfv36SZLmzJkjY4wkaf369Tpy5Ihq1Kihu+66q9BjPPjgg5KkLVu2KD09vVD7CgkJ0bPPPut0f09PT82fP18hISH65ZdfNGTIEPXr10/R0dGKiIjQ9OnTC1Ufih6hiOtGVFSU4uLiJEmtW7fOtV2bNm0kXQniqKioHNs0adIkx+1eXl665ZZbJMkeq7hUrVpVNWvW1KVLl9SkSRO988472r17t9NP3Dfi49WsWTPVrFlTR48e1dq1ayXlf4LN1WJjYzVq1Cg1a9ZMpUuXtlcecjgcql27tqQrE3L++OOPQtV75513Fvoa2ooVK+rTTz+VJH366adasWKFPD09NW/ePIWEhBRq3yh6hCKKROnSpe3bzj55nj592r6d1zVfV89qvbrP1QICAnLt7+V1Zc2KS5cuFbTEQvH09NTChQtVuXJlHT16VK+++qrq16+vwMBAtWnTRh999FGBrtm8UR+vzPCbNWuWEhMTtXTpUnl6eqpXr1756r9lyxbVrFlTb731lrZu3aq4uDj5+fmpbNmy2VYfSk5OLlStZcuWLVT/TJ07d1bnzp3t31966SX9/e9/L5J9o2gRiigSderUsW/v2rXLjZVc3+rWrasDBw7oyy+/1DPPPKPbb79dKSkpWrNmjQYOHKiaNWsW+9u6xa1nz57y9PTUsmXL9PHHHyslJUXt27dXuXLlrtn38uXL6tatm+Lj41WvXj2tWrVKiYmJSkpKUmxsrE6dOqWtW7fa7TPfonWWp6dnofpnOnLkiNasWWP/vmnTpkK/tQvXIBRRJO699155eFz532nZsmVO7ePqV+V/vlbvalffV1Sv5PMr86wpr2vKEhIS8tyHt7e3OnXqpGnTpmnPnj06c+aMPv74Y4WGhur48ePq3bt3vmq5ER6vnJQrV07t27dXSkqK3nzzTUn5f+t0y5YtOnr0qDw9PfXVV1/p/vvvz3aWe+rUqSKvuTAygzwhIUHVq1eXj4+Pvv/+e40dO9bdpSEHhCKKRFhYmP320Pz587Ose3otma/mK1eubE/Syfy8KSeZr7hLly6typUrO1uyUzI/Azp+/HiubbZt21agfZYuXVrPPvus3nnnHUlXzrTzMxHnRni8cpM54ebixYsqU6aMOnTokK9+mY/7Lbfckutbxlefkf1Z5gu3wp5BFsSoUaO0detWlSxZUsuXL7f/zuPGjdP3339fbHUgfwhFFJlx48bJ399fKSkp6tSpk6Kjo/Ns/8cff6hz5872mZXD4VDXrl0lXVmYO6dX/DExMZo2bZokqVu3bkV8BNdWt25du46cwu/06dP2pIo/S0tLy3PfV8/+zHzyzsuN8Hjl5uGHH9aIESM0fPhwffDBB/leTDtz9aPY2NgcV+o5ceKEJk+enGv/wMBASVJ8fHzBi3bC+vXr9fbbb0uS3n//fdWqVUtDhgzRgw8+qPT0dPXo0aPQk4FQtAhFFJnq1avrs88+k7e3t/bt26d69erpnXfe0aFDh+w26enp2rVrl0aOHKkqVapo6dKlWfbxj3/8Q8HBwYqLi1Pr1q21efNm+75NmzapdevWio+PV2hoqF599dViO7ZMd911l71Yde/evbV9+3YZY5SRkaENGzaoZcuWysjIyLHvwoUL1bx5c02bNk2///67vT09PV3fffedfTzNmjXL96zE6/3xyk2JEiU0YcIEvfvuu+rRo0e++919990qVaqUjDHq0qWL/Y5E5mPYsmXLPJfDu/322yVJiYmJ9uUsrnLu3Dn17NlTGRkZ6tSpk5555hn7vlmzZqlcuXI6duyYnn76aZfWgQJy3yWS+Kv6/vvvTbVq1bIsu+Xt7W1CQ0PtVTxkLdHVrVs3c/HixSz9N2zYYIKCgnJdtiw4ONhs3Lgx27jXupA6U2RkZI7LqRlz7Yv3jTHm22+/tVdNkbUsmq+vr5Fkbrvttiyr+1zt6uXJZC3xVrp06SyPSXh4uNm/f3+WfvlZ5s1dj9e1ZO6/oH3zunj/o48+yvI4+vv7249/mTJlsiw4kNNx3Xffffb9AQEBJjIy0kRGRpr333/fbpN58f61Fg/I6zHs0KGDkWQiIiJMXFxctr6rV6+2l/z75JNP8vGooDhwpogi17x5cx04cEALFixQjx49VK1aNfn6+iopKUmhoaG6++679frrr2v//v2aP39+trfOWrRoof3792v48OGqVauWMjIyZIxRrVq19NJLL2n//v2655573HR0Urt27fSf//xHDz30kEJCQpSenq6IiAi9+uqr2rFjR44X0UtShw4dNHfuXPXt21d169ZVUFCQEhISFBAQoMaNG2vs2LHat2+fatasWaB6rvfHq6g999xz+vrrr9WyZUv5+/vr8uXLKl++vF544QX99NNP+tvf/pZn/yVLlujFF19U9erVdenSJR09elRHjx4t0rdUp06dqhUrVsjDwyPX6xFbt26tESNGSJKGDh2q/fv3F9n4cJ7DmGL8xBkAgOsYZ4oAAFgIRQAALIQiAAAWQhEAAAuhCACAhVAEAMBCKAIAYCEUAQCwEIpwualTp6pSpUry9fVVkyZN9MMPP7i7JKBYbNy4UQ8//LDCw8PlcDi0fPlyd5eEayAU4VKLFi3SsGHDNGrUKO3cuVN169ZVu3btcv0GeOCvJDk5WXXr1tXUqVPdXQryiWXe4FJNmjTRnXfeqSlTpkiSMjIyFBERoRdeeOG6+tYGwNUcDoeWLVumRx55xN2lIA+cKcJlLl68qB07dqh169b2Ng8PD7Vu3VpbtmxxY2UAkDNCES5z9uxZpaenKywsLMv2sLCwHL8QFwDcjVAEAMBCKMJlypQpI09PT8XGxmbZHhsbm+t3DgKAOxGKcBlvb281bNhQa9eutbdlZGRo7dq1atasmRsrA4Ccebm7APy1DRs2TL1791ajRo3UuHFjffDBB0pOTlbfvn3dXRrgcufPn9ehQ4fs36OiorR7926FhoaqYsWKbqwMueGSDLjclClTNHHiRJ06dUr16tXT5MmT1aRJE3eXBbjchg0bdO+992bb3rt3b82ePbv4C8I1EYoAAFj4TBEAAAuhCACAhVAEAMBCKAIAYCEUAQCwEIoAAFgIRQAALIQiXC4tLU2jR49WWlqau0sB3IJ/AzcOLt6HyyUmJiooKEgJCQkKDAx0dzlAsePfwI2DM0UAACyEIgAAlpviWzIyMjIUExOjgIAAORwOd5dz00lMTMzyX+Bmw78B9zLGKCkpSeHh4fLwyPtc8Kb4TPHEiROKiIhwdxkAADc6fvy4KlSokGebm+JMMSAgQJJ0/MjvCgwMcHM1QPH7bdN6d5cAuM35CxfUsmsfOwvyclOEYuZbpoGBAcz8wk3Jv1RJd5cAuF1+Pj5jog0AABZCEQAAC6EIAICFUAQAwEIoAgBgIRQBALAQigAAWAhFAAAshCIAABZCEQAAC6EIAICFUAQAwEIoAgBgIRQBALAQigAAWAhFAAAshCIAABZCEQAAC6EIAICFUAQAwEIoAgBgIRQBALAQigAAWAhFAAAshCIAABZCEQAAC6EIAICFUAQAwEIoAgBgIRQBALAQigAAWAhFAAAshCIAABZCEQAAC6EIAICFUAQAwEIoAgBgIRQBALAQigAAWAhFAAAshCIAABZCEQAAC6EIAICFUAQAwEIoAgBgIRQBALAQigAAWAhFAAAshCIAABZCEQAAC6EIAICFUAQAwEIoAgBgIRQBALAQigAAWAhFAAAshCIAABZCEQAAC6EIAICFUAQAwEIoAgBgIRQBALAQigAAWAhFAAAshCIAABZCEQAAC6EIAICFUAQAwEIoAgBgIRQBALAQigAAWAhFAAAshCIAABZCEQAAC6EIAICFUAQAwEIoAgBgIRQBALAQigAAWAhFAAAshCIAABZCEQAAC6EIp2zfvkNvjhyt9g88pGo1aiko9Bb5lAxQ+YqV1fHRzlr+r39dcx/x8fEaO+6fatSkmULKhKlkQLCq3FZDnR7rotlz5hbDUQBFJzklRS279lHNVg+pZquHtPTbNfnuO+jNcXa/V99534VV4lq83F0AbkzTZ87UtE+m27/7+/vLw8NDMTExWhEToxUrv1LnTo9qweefqUSJEtn6b9z4Hz3+RHedPn1akuTj4yMfHx9FRR1RVNQR/bxnj/r07lVsxwMU1qQZn+nUmbMF7rfm+y1au2mrCyqCMzhThFOaNW2q9/93onb8sFVJ8eeUFH9OKecTdCzqkEYMHyZJ+nLpMr39zsRsfXfu3KUHHu6o06dPq8PDD2nHD1uVmpyohLgzij93Wt9+vVLdn3iiuA8JcNq+Xw/p8+VfqW6tGgXql5ySov+Z8on8S5VUlYoVXFQdCoJQhFN69+qpoUMGq0GD+vL397e3R0REaMI74/Vkj+6SpNlzs74Nmp6err5PPa3k5GT16N5Ny5cuUYMG9e37g4KC1K5dW701ZlTxHAhQSBkZGRr1/lRJ0qihAwvUd/LMeTp5+owG931SpUOCXVAdCopQhEvc2aihJCkm5mSW7V99/bV+/nmP/Pz8NPmD9+RwONxRHlBk5i1bqb0Hf9MTHR5Q7duq5rvfvl8Pad6ylapVrYp6dHzQhRWiIAhFuMTmLVc+I6lcuVKW7Z/PXyhJate2jUJDQ4u7LKBIxZ45q0mz5qlMSLCG9uuZ736ZZ5cZxmjkkAHy9PR0YZUoCEIRReb8+fP6+ec9GvTCYC1a/IUk6fmBA7K02bJ1mySpfr16io6O1jPPDVD5ipXlUzJAEZWqqmfvvtqzZ2+x1w44Y9yH05R8IUUvP9dfAf6l8t1v3vKvtPfgb+p8fxvVr1PLhRWioG6oUJw6daoqVaokX19fNWnSRD/88IO7S7rpnThxQg4vHzm8fBQQXFp1GzTS//tomnx9fTV2zGgNHPCc3TY1NVUnTpyQJP3xxx+q17CxPp0+U+fOnZOfn59OnDiheZ/PV8PGTbVw0WJ3HRKQL+s2b9Pq77eocb2/qUObe/PdL/bMWU2a+ZmCAwM1/Ok+risQTrlhQnHRokUaNmyYRo0apZ07d6pu3bpq166dPaUf7uHp6amwsDCFhYXJ29tbkuTl5aXXXnlZgwY+l6VtfHy8fXvylKm6ePGiFs6fp/MJcYo/d1p7du9Uk8aNdenSJfXt/7R+/fXX4jwUIN8upKRq3OSPVcLLSyMHD7h2h6uMm/KJki+kaPjTvRUSFOiiCuGsGyYU33vvPT399NPq27evateurY8//lglS5bUzJkz3V3aTa1cuXI6FX1Mp6KPKeV8gg7+ske9ej6pUWPeUr2GjbVv3y9224yMjCy333t3grp2eVxeXlcul7399jr617Il8vf3V2pqqj6Y/GGxHw+QH5Nnz1PM6TPq/VhHVatUMd/91m/5Qav/s1l1a9fQYw+0dWGFcNYNEYoXL17Ujh071Lp1a3ubh4eHWrdurS1btmRrn5aWpsTExCw/cD0PDw9Vr15dMz6dpmFDh+jYsWPq2aevHYZXX7oRFBSU48X5YWFh6t6tqyRp7br1xVM4UAD7D/2uz75coXJlb9HAnt3y3e9CSqrGTvpInh4eGjVkIDOvr1M3RCiePXtW6enpCgsLy7I9LCxMp06dytZ+/PjxCgoKsn8iIiKKq1RYXnj+yvVau3bt1q5duyVJAQEBdjBWrVol1xl3NapXlyQdP37C9YUCBfQ/Uz5RekaGhvbrKSOj5JSULD+ZLl66pOSUFKWkpkqSpi/6UjGnz6jT/W0UWSE8W7/MF4/p6en2NmOMW47xZvaXXObttdde07Bhw+zfExMTCcZiVr58efv24d9/V8OGDeRwOFSndm1ty+cEKV5J43oUE3tlHsMrb78nvZ17u9HvT9Xo96cqPKys1i2Yaff74uvv9MXX3+Xab+WaDVq5ZoMkac38Gapwa1iubVH0bogzxTJlysjT01OxsbFZtsfGxurWW2/N1t7Hx0eBgYFZflC8oqKi7Nv+pf47Vb31fa0kSYcP/6709PQc+x44eFCSVKlSpAsrBIDsbogzRW9vbzVs2FBr167VI488IunKRI21a9fq+eefd29xN6H09HR5eHjkeSY38X+vrPTv5eWlZs2a2tt7dH9C49+ZoISEBM2aPUdP9e+XpV9sbKzmL1gkSXqgfXsXVA8UzroFeU/uq9nqIUnSP18eqk7t/zsP4u1XXtTbr7yYa7+eL76qH3/aq0fa3ZdnO7jWDXGmKEnDhg3Tp59+qjlz5mj//v0aMGCAkpOT1bdvX3eXdtM5fvy4GjVpppmzZtvXHUpXXqjs3v2TevTsrekzrjxxvDBooEJCQuw2tWrVUv9+fSRJw0e8osVfLNHly5clSfv2/aJHOj2u5ORkhYSE6MWhg4vvoABAN8iZoiR17dpVZ86c0ciRI3Xq1CnVq1dP3377bbbJNygeO3fuUv+nn5Uk+fr6yt/fX0lJSUpLS7Pb9OndSxPeGZ+t7+QP3tfhw79r3foN6tqth3x9feXj46OEhARJV2amfrl4ocLDw4vnYADAcsOEoiQ9//zzvF16HQgPD9eiBZ9r7bp1+uHH7Tp58pTOnTsnX19fVa1aRc2aNlXf3r3UvPldOfb39fXV6u++0afTZ2jO3Hn6Zf9+paamqlq1qrq/fTuNGD6MiVEA3MJhboI5v4mJiQoKClJC3Bkm3eCmdHDjaneXALjN+eQLavRwFyUkJFwzA26YzxQBAHA1QhEAAAuhCACAhVAEAMBCKAIAYCEUAQCwEIoAAFgIRQAALIQiAAAWQhEAAAuhCACAhVAEAMBCKAIAYCEUAQCwEIoAAFgIRQAALIQiAAAWQhEAAAuhCACAhVAEAMBCKAIAYCEUAQCwEIoAAFgIRQAALIQiAAAWQhEAAAuhCACAhVAEAMBCKAIAYCEUAQCwEIoAAFgIRQAALIQiAAAWQhEAAAuhCACAhVAEAMBCKAIAYCEUAQCwEIoAAFgIRQAALIQiAAAWQhEAAAuhCACAhVAEAMBCKAIAYHFpKP7xxx9KSEhw5RAAABQZp0MxJiZGc+fO1bfffpvtvn379qlRo0YqU6aMQkNDdc899+jXX38tVKEAALia06E4c+ZM9e3bVxs2bMiyPSUlRQ888IB27dolY4yMMdq0aZNat26txMTEwtYLAIDLOB2Ka9askSR17do1y/Y5c+bo+PHjCg0N1aeffqp58+apQoUKio6O1tSpUwtXLQAALuR0KB45ckSSVLNmzSzbly5dKofDoX/+85/q37+/unfvrk8//VTGGK1YsaJQxQIA4EpOh+LZs2cVGBgoPz8/e1tGRoY2b94sh8Ohxx57zN7epk0beXh46ODBg4WrFgAAF3I6FNPT05WWlpZl2549e3ThwgXVqVNHISEh/x3Ew0MhISFKTk52vlIAAFzM6VAsV66c0tLSFBUVZW/77rvvJEl33XVXtvbnz59XaGios8MBAOByTodis2bNJEljxoxRRkaGzpw5o48++kgOh0Pt2rXL0jYqKkppaWkqV65c4aoFAMCFnA7FIUOGSJI+++wzBQcHKyIiQkePHlXlypX10EMPZWm7evVqSVKDBg0KUSoAAK7ldCg2btxYM2fOlL+/v86fP6+LFy+qZs2aWrp0qby8vLK0nTt3riTp3nvvLVy1AAC4kMMYYwqzg5SUFO3du1fBwcGqWrWqPDyy5uzFixe1cOFCGWPUsWNHBQcHF2Y4pyQmJiooKEgJcWcUGBhY7OMD7nZw42p3lwC4zfnkC2r0cBclJCRcMwO88rw3H/z8/HTnnXfmer+3t7d69epV2GEAAHA5viUDAAALoQgAgCVfb59WqVKlSAZzOBw6fPhwkewLAICilq9QzFzntLAcDkeR7AcAAFfIVyjOmjXL1XUAAOB2+QrF3r17u7oOAADcjok2AABYCEUAACyEIgAAlkKH4k8//aRnnnlGtWvXVmBgoDw9PXP9+fOaqAAAXE8KlVJTpkzRsGHDlJ6erkIuoQoAgNs5faa4bds2DRkyROnp6Ro4cKBWrVolSQoNDdWaNWs0b9489enTR97e3ipTpozmz5+vdevWFVnhAAAUNafPFCdPnixjjIYOHar33nvP3u7t7a1WrVpJkrp3767BgwerXbt2evPNN7Vz587CVwwAgIs4faa4adMmORwO+8uGM/35bdR69erpww8/1OHDhzVx4kRnhwMAwOWcDsXY2Fj5+PgoMjLyvzvz8FBqamq2to8++qhKlCihpUuXOjscAAAu5/TbpyVLlsy2lmlAQIASExOVlpYmHx8fe3uJEiVUsmRJHT161PlKAQBwMafPFMuXL6/ExERdvnzZ3la1alVJ0o8//pilbUxMjBISEpihCgC4rjkdirVq1VJ6err27Nljb2vZsqWMMXrrrbfst1EvXryowYMHS5L+9re/FbJcAABcx+lQbNu2rYwxWrlypb1t0KBB8vHx0dq1a1WhQgU1b95c5cuX17Jly+RwOPT8888XSdEAALiC058pdu7cWSdOnFB4eLi9rXLlypo/f7769u2ruLg4bdmyRdKVCTgjRoxQjx49Cl8xAAAu4jAu+KAvLi5Oq1at0vHjxxUUFKS2bduqWrVqRT1MviUmJiooKEgJcWcUGBjotjoAdzm4cbW7SwDc5nzyBTV6uIsSEhKumQEuWYw0NDRUTz75pCt2DQCAy/AtGQAAWAhFAAAsTr99mrm+aUE4HA6tXbvW2SEBAHApp0Nxw4YN+WqXueqNMSbbCjgAAFxPnA7FUaNG5Xl/QkKCtm3bpi1btqh06dIaMGCAPD09nR0OAACXc1koZlq3bp06deqkX375RUuWLHF2OAAAXM7lE21atWqlSZMmadmyZZo+fbqrhwMAwGnFMvu0a9eu8vT0JBQBANe1YglFX19flSpVSvv37y+O4QAAcEqxhGJ0dDRfHQUAuO65PBRTUlI0cOBASXx1FADg+ub07NO33norz/tTU1N1/Phxfffddzp37pwcDocGDRrk7HAAALic06E4evTofF2Mb4yRh4eH3njjDXXv3t3Z4QAAcDmnQ/Hvf/97nqHo5eWlkJAQ1a1bV126dNFtt93m7FAAABQLly/zBgDAjYJvyQAAwOJ0KL711lt677338t1+8uTJ15ycAwCAOzmMkxcPenh46NZbb1VMTEy+2leuXFnHjh1Tenq6M8MVSmJiooKCgpQQd0aBgYHFPj7gbgc3rnZ3CYDbnE++oEYPd1FCQsI1M4C3TwEAsBRbKMbFxcnX17e4hgMAoMCKJRS/+OILJSUlqWLFisUxHAAATsn3JRmTJk3SpEmTsmw7c+aMqlSpkmsfY4zi4+OVmJgoh8OhBx980PlKAQBwsXyHYnx8vI4cOZJlW3p6erZtubnvvvs0cuTIgtQGAECxyncoPvLII6pUqZKkK2eA/fr1U1BQkD744INc+3h4eCgwMFC33367qlatWthaAQBwqWK7JMOduCQDNzsuycDNrCCXZDi9zFtGRoazXQEAuC5xnSIAABanQ3Hr1q1q0KBBvr4j8amnnlKDBg20fft2Z4cDAMDlnA7F+fPn66efftI999xzzbZNmzbV7t27NX/+fGeHAwDA5ZwOxf/7v/+TJLVt2/aabR999FFJ0vr1650dDgAAl3M6FE+cOKGgoCCFhoZes23p0qUVFBSk6OhoZ4cDAMDlnA7FlJSUAs1ANcYoKSnJ2eEAAHA5p0OxbNmySkpKytd1itHR0UpMTFSZMmWcHQ4AAJdzOhSbNm0qSZo6deo122a2adKkibPDAQDgck6HYv/+/WWM0YQJE/TJJ5/k2m7atGmaMGGCHA6H+vfv7+xwAAC4nNMr2rRp00aPPfaYlixZogEDBmjq1Kl66KGHFBkZKUk6evSoVq5cqX379skYo86dO+v+++8vssIBAChqToeiJM2ZM0cOh0NffPGF9uzZo71792a5P3NZ1SeeeEIzZswozFAAALhcoZZ58/Pz06JFi7RmzRp1795dkZGR8vHxka+vrypVqqQePXpo3bp1mj9/vvz8/IqqZgAAXKJQZ4qZWrVqpVatWuV6f0ZGhr7++mvNmDFDy5cvL4ohAQAockUSirn57bffNGPGDM2dO1exsbGuHAoAgEIr8lC8cOGCFi9erBkzZmjz5s2S/vvZYq1atYp6OAAAikyRheLWrVs1Y8YMLV68WOfPn5d0JQxr1qypxx9/XI8//rhuv/32ohoOAIAiV6hQPHPmjObOnauZM2fqwIEDkv57VuhwOPTjjz+qYcOGha8SAIBiUOBQNMZo1apVmjlzpr766itdvnxZxhj5+fnpkUceUe/evdW+fXtJvF0KALix5DsUDx8+rJkzZ2rOnDk6efKkjDFyOBy6++671atXL3Xp0kUBAQGurBUAAJfKdyjedtttcjgcMsaocuXK6tWrl3r16qXKlSu7sj4AAIpNgd8+HTx4sCZMmCBvb29X1AMAgNvke0UbHx8fGWP04YcfKjw8XIMGDdLWrVtdWRsAAMUq36F48uRJTZ48WXfccYfi4uL00UcfqXnz5qpRo4b++c9/6tixY66sEwAAl8t3KAYHB+v555/Xrl27tGPHDg0YMEBBQUH67bff9Oabb6pKlSpq1aqVZs2a5cp6AQBwGacWBK9fv76mTp2qkydP6rPPPlOLFi1kjNGGDRv01FNP2e3+/e9/6/Lly0VWLAAArlSob8nw8fGxvwnj0KFDev3111W+fHlJsr9DsWzZsurbt69WrVpFQAIArmsOk7kETRExxui7777T9OnTtXLlSl26dEkOh0PSlbdgz507V5TD5UtiYqKCgoKUEHdGgYGBxT4+4G4HN652dwmA25xPvqBGD3dRQkLCNTOgUGeKOXE4HGrfvr2WLFmi6Ohovfvuu6pVq5aMMYqPjy/q4QAAKDJFHopXK1OmjIYNG6a9e/dq8+bN6t+/vyuHAwCgUFz6fYpXa9q0qZo2bVpcwwEAUGAuPVMEAOBGQigCAGAhFAEAsBCKAABYCEUAACyEIgAAFkIRAAALoQgAgIVQBADAQigCAGAhFAEAsBCKAABYCEUAACyEIgAAFkIRAAALoQgAgIVQBADAQigCAGAhFAEAsBCKAABYCEUAACyEIgAAFkIRAAALoQgAgIVQBADAQigCAGAhFAEAsBCKAABYCEUAACyEIgAAFkIRAAALoQgAgIVQBADAQigCAGAhFAEAsBCKAABYCEUAACyEIgAAFkIRTtm+fYfeHDla7R94SNVq1FJQ6C3yKRmg8hUrq+OjnbX8X/+65j7i4+M1dtw/1ahJM4WUCVPJgGBVua2GOj3WRbPnzC2GowCKTnJKilp27aOarR5SzVYPaem3a/Ldd9Cb4+x+r77zvgurxLV4ubsA3Jimz5ypaZ9Mt3/39/eXh4eHYmJitCImRitWfqXOnR7Vgs8/U4kSJbL137jxP3r8ie46ffq0JMnHx0c+Pj6KijqiqKgj+nnPHvXp3avYjgcorEkzPtOpM2cL3G/N91u0dtNWF1QEZ3CmCKc0a9pU7//vRO34YauS4s8pKf6cUs4n6FjUIY0YPkyS9OXSZXr7nYnZ+u7cuUsPPNxRp0+fVoeHH9KOH7YqNTlRCXFnFH/utL79eqW6P/FEcR8S4LR9vx7S58u/Ut1aNQrULzklRf8z5RP5lyqpKhUruKg6FAShCKf07tVTQ4cMVoMG9eXv729vj4iI0IR3xuvJHt0lSbPnZn0bND09XX2felrJycnq0b2bli9dogYN6tv3BwUFqV27tnprzKjiORCgkDIyMjTq/amSpFFDBxao7+SZ83Ty9BkN7vukSocEu6A6FBShCJe4s1FDSVJMzMks27/6+mv9/PMe+fn5afIH78nhcLijPKDIzFu2UnsP/qYnOjyg2rdVzXe/fb8e0rxlK1WrWhX16PigCytEQRCKcInNW658RlK5cqUs2z+fv1CS1K5tG4WGhhZ3WUCRij1zVpNmzVOZkGAN7dcz3/0yzy4zjNHIIQPk6enpwipREDdEKG7cuFEPP/ywwsPD5XA4tHz5cneXhBycP39eP/+8R4NeGKxFi7+QJD0/cECWNlu2bpMk1a9XT9HR0XrmuQEqX7GyfEoGKKJSVfXs3Vd79uwt9toBZ4z7cJqSL6To5ef6K8C/VL77zVv+lfYe/E2d72+j+nVqubBCFNQNMfs0OTlZdevWVb9+/dSpUyd3l4OrnDhxQhGVsr9l5Ovrq9dfe1UDBzxnb0tNTdWJEyckSX/88YfqNWyss2fPysfHR35+fjpx4oTmfT5fixZ/obmzZ+qJrl2K7TiAglq3eZtWf79Fjev9TR3a3JvvfrFnzmrSzM8UHBio4U/3cV2BcMoNcaZ4//33a9y4cXr00UfdXQr+xNPTU2FhYQoLC5O3t7ckycvLS6+98rIGDXwuS9v4+Hj79uQpU3Xx4kUtnD9P5xPiFH/utPbs3qkmjRvr0qVL6tv/af3666/FeShAvl1ISdW4yR+rhJeXRg4ecO0OVxk35RMlX0jR8Kd7KyQo0EUVwlk3RCgWVFpamhITE7P8wDXKlSunU9HHdCr6mFLOJ+jgL3vUq+eTGjXmLdVr2Fj79v1it83IyMhy+713J6hrl8fl5XXlDYvbb6+jfy1bIn9/f6WmpuqDyR8W+/EA+TF59jzFnD6j3o91VLVKFfPdb/2WH7T6P5tVt3YNPfZAWxdWCGf9JUNx/PjxCgoKsn8iIiLcXdJNwcPDQ9WrV9eMT6dp2NAhOnbsmHr26WuH4dWXbgQFBeV4cX5YWJi6d+sqSVq7bn3xFA4UwP5Dv+uzL1eoXNlbNLBnt3z3u5CSqrGTPpKnh4dGDRnIzOvr1F8yFF977TUlJCTYP8ePH3d3STedF56/cr3Wrl27tWvXbklSQECAHYxVq1bJdcZdjerVJUnHj59wfaFAAf3PlE+UnpGhof16ysgoOSUly0+mi5cuKTklRSmpqZKk6Yu+VMzpM+p0fxtFVgjP1i/zxWN6erq9zRjjlmO8md0QE20KKnPJMLhP+fLl7duHf/9dDRs2kMPhUJ3atbXthx/ytQ9eSeN6FBN7ZWnCV95+T3o793aj35+q0e9PVXhYWa1bMNPu98XX3+mLr7/Ltd/KNRu0cs0GSdKa+TNU4dawIqsd1/aXPFOE+0VFRdm3/Uv9d6p66/taSZIOH/5d6enpOfY9cPCgJKlSpUgXVggA2d0QZ4rnz5/XoUOH7N+joqK0e/duhYaGqmLF/H/IjaKRnp4uDw+PPM/kJv7vlZX+vby81KxZU3t7j+5PaPw7E5SQkKBZs+foqf79svSLjY3V/AWLJEkPtG/vguqBwlm3YGae99ds9ZAk6Z8vD1Wn9q3t7W+/8qLefuXFXPv1fPFV/fjTXj3S7r4828G1bogzxe3bt6t+/fqqX//KGpnDhg1T/fr1NXLkSDdXdnM6fvy4GjVpppmzZtvXHUpXZpTu3v2TevTsrekzrjxxvDBooEJCQuw2tWrVUv9+fSRJw0e8osVfLNHly5clSfv2/aJHOj2u5ORkhYSE6MWhg4vvoABAN8iZYsuWLfnA+Tqzc+cu9X/6WUlXLtT39/dXUlKS0tLS7DZ9evfShHfGZ+s7+YP3dfjw71q3foO6dushX19f+fj4KCEhQdKVmalfLl6o8PDw4jkYALDcEKGI60t4eLgWLfhca9et0w8/btfJk6d07tw5+fr6qmrVKmrWtKn69u6l5s3vyrG/r6+vVn/3jT6dPkNz5s7TL/v3KzU1VdWqVdX97dtpxPBhXEYDwC0c5iY4BUtMTFRQUJAS4s4oMJAVJHDzObhxtbtLANzmfPIFNXq4ixISEq6ZATfEZ4oAABQHQhEAAAuhCACAhVAEAMBCKAIAYCEUAQCwEIoAAFgIRQAALIQiAAAWQhEAAAuhCACAhVAEAMBCKAIAYCEUAQCwEIoAAFgIRQAALIQiAAAWQhEAAAuhCACAhVAEAMBCKAIAYCEUAQCwEIoAAFgIRQAALIQiAAAWQhEAAAuhCACAhVAEAMBCKAIAYCEUAQCwEIoAAFgIRQAALIQiAAAWQhEAAAuhCACAhVAEAMBCKAIAYCEUAQCwEIoAAFgIRQAALIQiAAAWQhEAAAuhCACAhVAEAMBCKAIAYCEUAQCwEIoAAFgIRQAALIQiAAAWQhEAAAuhCACAhVAEAMBCKAIAYCEUAQCwEIoAAFgIRQAALIQiAAAWQhEAAAuhCACAhVAEAMBCKAIAYCEUAQCwEIoAAFgIRQAALIQiAAAWQhEAAAuhCACAhVAEAMBCKAIAYCEUAQCwEIoAAFgIRQAALIQiAAAWQhEAAAuhCACAhVAEAMBCKAIAYCEUAQCwEIoAAFgIRQAALIQiAAAWQhEAAAuhCACAhVAEAMBCKAIAYCEUAQCwEIoAAFi83F1AcTDGSJISE5PcXAngHueTL7i7BMBtzl+48v9/Zhbk5aYIxaSkK2EYUamKmysBALhLUlKSgoKC8mzjMPmJzhtcRkaGYmJiFBAQIIfD4e5ybjqJiYmKiIjQ8ePHFRgY6O5ygGLHvwH3MsYoKSlJ4eHh8vDI+1PDm+JM0cPDQxUqVHB3GTe9wMBAnhBwU+PfgPtc6wwxExNtAACwEIoAAFgIRbicj4+PRo0aJR8fH3eXArgF/wZuHDfFRBsAAPKDM0UAACyEIgAAFkIRAAALoQhcx1q2bCmHw6HRo0dnu69SpUpyOByaPXt2sdY0e/ZsORwOVapUqVjHBYoDoYi/tNGjR8vhcGT78fX1VYUKFdShQwctXrw4X2si/tUdOXJEo0ePzjGAgZvFTbGiDSBJYWFh9u2EhARFR0crOjpaK1eu1OzZs7Vs2bIbasp81apV5evrm++VOq7lyJEjGjNmjCTlGYxBQUGqUaOGypcvXyTjAtcTzhRx0zh16pT9k5ycrL1796pNmzaSpG+++UZvvPGGmyssmLVr1+rAgQN69NFHi3XcRx99VAcOHNDatWuLdVygOBCKuCl5eHioTp06WrFihapVqyZJmjZtmi5fvuzmygC4E6GIm5qvr68ef/xxSVe+VubAgQM6cuSI/dnjkSNHdPjwYT3zzDOqXLmyfHx8sk0wycjI0Oeff64HHnhAYWFh8vb21i233KK2bdtqwYIFeX5emZ6erg8//FANGjRQqVKlFBoaqpYtW2rJkiXXrD0/E222bdumvn37qlq1aipZsqQCAwNVu3Zt9evXT999912Wfd17773273/+DLZPnz72ffmZaHP48GENGDBAt912m/z8/BQYGKgGDRrorbfeUmJiYo59NmzYYI8nSYcOHVK/fv0UEREhHx8fVahQQU8//bSio6NzHffAgQN65plnVL16dZUsWVK+vr6KiIhQ06ZN9Y9//EMHDhzItS8gSTLAX9ioUaOMJJPX/+pTp06122zatMlERUXZv3/++efG39/fSDIlS5Y0pUqVMpGRkXbfc+fOmb///e92e0kmKCgoy+8dOnQwaWlp2cZNTU017dq1s9t5eHiY4OBg43A4jCTzyiuvmBYtWhhJZtSoUdn6R0ZGGklm1qxZ2e67fPmyGTx4cJY6SpUqZUJCQuz9BwUF2e0bNWpkQkJC7LZhYWFZfgYPHmy3nTVrlpGU5XG42qJFi4yPj4+9r4CAgCy/R0REmF9++SVbv/Xr19tt1q1bZz/uAQEBxsvLy74vPDzcnDhxIlv/f//731nGKVGihAkODs7yGOT0OAJXIxTxl5afUBwxYoTdZv/+/VlC0d/f3zRp0sT8+OOPdvuDBw8aY64ET2Zo1atXz6xcudIkJycbY4w5f/68mTNnjilbtqyRZIYOHZpt3BdffNFIMg6Hw4wbN84kJCQYY4yJjY01AwYMyBKwBQ3Fl19+2T6Gfv362TUbY0x8fLxZvny56dq1a5Y+V4dSXvIKxR07dpgSJUoYSaZ58+bm559/NsYYk56eblasWGHKlStnJJmqVauapKSkXMcPCQkxHTp0MPv37zfGGJOWlmYWLVpkAgICjCTTs2fPbGNXrVrVSDJt27Y1e/bssbenpKSYvXv3mjFjxuT4WAFXIxTxl3atUExISDDh4eFGkgkNDTXp6elZQjEyMjLbk3emuXPnGkmmZs2aJj4+Psc227dvNw6Hw3h7e5vY2Fh7e3R0tH328+abb+bYt1u3bnme4eQWigcPHjQeHh5Gknn55Zdz3HdOiiIU27dvbySZatWq2S8QrrZz5077uCdOnJjr+Pfee69JT0/P1n/y5MlGkvHz8zOXLl2yt8fGxtp9Y2Ji8nnEQHZ8poibUnx8vNauXatWrVopJiZGkjRkyJBs38r9/PPPy9/fP8d9zJgxQ5I0YMCAXC+LaNiwoerUqaOLFy9q/fr19vYlS5bo8uXL8vPz00svvZRjX2evF5wzZ44yMjJUunRp+xKL4hAfH29/TjlixAiVLFkyW5v69eurU6dOkqQFCxbkuq9//OMfOX5DeseOHSVJKSkp+u233+ztAQEBdvuTJ086fxC46RGKuGlcPXEkJCRErVu31o4dOyRJTz75pF5//fVsfZo3b57jvtLT07V161ZJV8Lr1ltvzfXn4MGDkqSjR4/a/bdv3y5JatSoUa7fxF69enWnrgXcvHmzJKlNmzby9fUtcH9n7dy5055U1Lp161zbZV4G8/PPP+vSpUs5tmnSpEmO28PDw+3bcXFx9m0/Pz/dd999kqT27dtr5MiR2rZtmy5evFiwg8BNj4v3cdO4+uJ9Hx8flSlTRvXr11ePHj2yzLy8WtmyZXPcHhcXp7S0NEnSH3/8ka/xL1y4YN8+ffq0JF0z9CpUqJDnbMucnDp1SpIUGRlZoH6FlXlMUt7HVaFCBUnS5cuXFRcXl+XvkikgICDHvl5e/33K+nOgTp8+XR06dNBPP/2ksWPHauzYsfL29tadd96pjh07qn///goNDS3QMeHmQyjippEZFgXh6emZ4/b09HT79jfffKP27ds7XVdRy7yk4WZTsWJF7dy5U6tXr9aqVau0adMm/fTTT9q0aZM2bdqk8ePHa8mSJWrVqpW7S8V1jLdPASeULl3aPmu5+m3R/Mo8A73WWWBBzxIl6dZbb3W6rsK4+qz6xIkTubbLvM/Ly6vIz9w8PDzUrl07TZo0Sdu3b1dcXJw+//xzVaxYUX/88Ye6d+/OW6rIE6EIOKFEiRJq3LixJGnlypUF7t+oUSNJVz5bPH/+fI5tfvvttzzDJTd33XWXJGn16tVKTU3Nd7+rJ7YYJxZIb9Cggb2PvJaAW7NmjSSpbt26KlGiRIHHKYiAgAB1797dnhQVGxurPXv2uHRM3NgIRcBJzzzzjCRp1apVWrVqVZ5tr54UIkmdO3eWp6enUlJS9O677+bY56233nKqrj59+sjT01Pnzp3TqFGj8t3v6gk/8fHxBR43ODhY7dq1kyRNnDgxy2eomX766Sd9+eWXkqRu3boVeIzcXOvsz8/Pz76d06xWIBP/dwBOevLJJ9W6dWsZY/Too49q3Lhx9uUdkpScnKz169dr0KBBqlKlSpa+5cuX16BBgyRJY8eO1fjx45WUlCRJOnPmjJ5//nnNmzfPqW/AqFatmkaMGCFJmjBhgp566qksly8kJiZq0aJF2RYSr169ury9vSVdmbTizNniuHHjVKJECR06dEjt2rWzz8oyMjK0atUqPfDAA7p8+bKqVq2qZ599tsD7z83mzZt1xx136P3339f+/fuVkZEh6coZ7+bNmzVgwABJVyb53HHHHUU2Lv6C3HqVJOBi+VnR5s+uvng/Kioqz7YJCQnmoYceyrKUWGBgYJbl2iQZLy+vbH1TUlJM69at7Taenp5ZlmEr7DJvgwYNylKXv79/rsu8Zerfv7/dvmTJkqZixYomMjLSDB8+3G5zrWXeFi5caLy9vbM8Hr6+vgVa5i0vmW3Wr1+fY19ZS7yVLl06y/JwgYGBZuPGjXnuG+BMESiEwMBArVy5UqtWrVLXrl1VsWJFpaWl6cKFCypfvrzatm2r8ePH29cqXs3X11fffPONJk2apHr16snb21vGGN1zzz1avHix3n77bafr8vT01JQpU/T999+rR48eqlixoi5duiRjjGrXrq3+/fvbb2NeberUqRo9erT+9re/SZKOHTumo0eP6uzZs/keu2vXrtq3b5+effZZVa1aVWlpafLy8lK9evU0ZswY7d27V7Vq1XL62HJy5513avHixRowYIAaNmyoMmXKKDExUb6+vqpXr55efvll7d+/X/fcc0+Rjou/HocxfOU4AAASnykCAGAjFAEAsBCKAABYCEUAACyEIgAAFkIRAAALoQgAgIVQBADAQigCAGAhFAEAsBCKAABYCEUAACyEIgAAFkIRAADL/wfpWIHaZdOW3AAAAABJRU5ErkJggg==\n"},"metadata":{}}]},{"cell_type":"markdown","source":["##Test and Debug the Function"],"metadata":{"id":"JCWtE8CGVNWz"}},{"cell_type":"code","source":["from transformers import BertTokenizer, BertModel\n","import torch\n","import spacy\n","import numpy as np\n","\n","# Load the BERT tokenizer and model\n","tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')\n","model = BertModel.from_pretrained('bert-base-uncased')\n","\n","# Load the spaCy model\n","nlp = spacy.load('en_core_web_trf')\n","\n","# Your list of reviews\n","examples_fake = test_examples[:3]\n","\n","# Initialize list for ordered embeddings\n","ordered_embeddings = []\n","\n","# Dummy implementation of get_entity_index and all_embeddings for demonstration\n","def get_entity_index(text):\n"," # Replace this with the actual implementation\n"," if text == \"the chicago hilton\":\n"," return 0\n"," return None\n","\n","# Assume all_embeddings contains vectors of varying sizes (1, 1, 768), (1, 2, 768), or (1, 3, 768)\n","all_embeddings = [np.random.rand(1, 3, 768), np.random.rand(1, 1, 768), np.random.rand(1, 2, 768)] # Dummy embeddings for demonstration\n","\n","# Function to get BERT embeddings\n","def get_bert_embedding(text):\n"," inputs = tokenizer(text, return_tensors='pt', truncation=True, max_length=512)\n"," outputs = model(**inputs)\n"," return outputs.last_hidden_state.mean(dim=1).detach().numpy()\n","\n","# Function to get spaBert embeddings\n","def get_spaBert_embedding(text):\n"," text = text.lower() # Convert text to lowercase\n"," entity_index = get_entity_index(text)\n"," if entity_index is None or entity_index >= len(all_embeddings):\n"," return None\n"," else:\n"," embedding = all_embeddings[entity_index]\n"," # Ensure consistent shape by averaging if necessary\n"," return embedding.mean(axis=1)\n","\n","# Step through each sentence and get the embeddings\n","for entry in examples_fake:\n"," review = entry[0]\n"," print(\"Sentence: \" + review)\n"," doc = nlp(review)\n","\n"," entity_spans = [(ent.start, ent.end, ent.text, ent.label_) for ent in doc.ents]\n","\n"," token_embeddings = []\n","\n"," i = 0\n"," while i < len(doc):\n"," if any(start == i for start, end, _, _ in entity_spans):\n"," # If the current token starts an entity, find the full entity\n"," for start, end, text, label in entity_spans:\n"," if start == i:\n"," if label in ['FAC', 'ORG', 'LOC', 'GPE']:\n"," # Try to get SpaBert embedding for the whole geo-entity\n"," spaBert_emb = get_spaBert_embedding(text)\n"," if spaBert_emb is not None:\n"," spaBert_emb = spaBert_emb.flatten() # Flatten to 1D tensor\n"," print(\"Geo-Entity: \" + text)\n"," print(\"SpaBert Embedding shape: \", spaBert_emb.shape)\n"," token_embeddings.append((text, spaBert_emb))\n"," else:\n"," # If spaBert embedding is None, get one BERT embedding for the whole geo-entity\n"," bert_emb = get_bert_embedding(text)\n"," print(\"Fallback BERT Embedding for Geo-Entity: \" + text)\n"," print(\"BERT Embedding shape: \", bert_emb.shape)\n"," token_embeddings.append((text, bert_emb))\n"," else:\n"," # Get BERT embedding for non-spatial entity\n"," bert_emb = get_bert_embedding(text)\n"," print(\"BERT Embedding for: \" + text)\n"," print(\"BERT Embedding shape: \", bert_emb.shape)\n"," token_embeddings.append((text, bert_emb))\n"," i = end # Move the index past the end of the entity\n"," break\n"," else:\n"," # Get BERT embedding for non-entity token\n"," token = doc[i]\n"," bert_emb = get_bert_embedding(token.text)\n"," print(\"BERT Embedding for: \" + token.text)\n"," print(\"BERT Embedding shape: \", bert_emb.shape)\n"," token_embeddings.append((token.text, bert_emb))\n"," i += 1\n","\n"," # Append embeddings for the current review to the ordered list\n"," ordered_embeddings.append(token_embeddings)\n","\n","# Convert embeddings to tensors for further processing\n","final_ordered_embeddings = [\n"," [(text, torch.tensor(embedding)) for text, embedding in review_embeddings]\n"," for review_embeddings in ordered_embeddings\n","]\n","\n","# Print the results\n","print(\"Ordered Embeddings:\")\n","for review_embeddings in final_ordered_embeddings:\n"," print(\"New Review:\")\n"," for text, embedding in review_embeddings:\n"," print(f\"{text}: {embedding}\")\n"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"srxwAWePb7cv","executionInfo":{"status":"ok","timestamp":1722482142965,"user_tz":420,"elapsed":28378,"user":{"displayName":"Jason Phillips","userId":"10136472498761089328"}},"outputId":"50a10f83-86f2-4c96-954a-7404f55cbeab","collapsed":true},"execution_count":null,"outputs":[{"output_type":"stream","name":"stderr","text":["/usr/local/lib/python3.10/dist-packages/spacy/util.py:1740: UserWarning: [W111] Jupyter notebook detected: if using `prefer_gpu()` or `require_gpu()`, include it in the same cell right before `spacy.load()` to ensure that the model is loaded on the correct device. More information: http://spacy.io/usage/v3#jupyter-notebook-gpu\n"," warnings.warn(Warnings.W111)\n"]},{"output_type":"stream","name":"stdout","text":["\u001b[1;30;43mStreaming output truncated to the last 5000 lines.\u001b[0m\n"," 2.2261e-02, 1.5613e-01, -1.4637e-01, 2.2165e-01, -7.7966e-01,\n"," 1.4474e-01, -1.5885e-01, -8.3754e-02, 4.2861e-01, -4.8140e-01,\n"," 4.5142e-01, 8.4319e-02, 9.6394e-02, -3.6360e-01, -1.9284e-01,\n"," 4.6092e-01, -2.5313e-02, 1.0799e-01, -8.8430e-03, 5.8974e-02,\n"," 7.1944e-02, 2.1046e-01, -2.2278e-02, -1.2261e-01, 1.7701e-01,\n"," -3.0559e-01, -6.3236e-01, -1.4660e-01, 1.4399e-01, 2.6545e-01,\n"," 7.1084e-03, -2.8483e-01, -1.7965e-01, 1.2409e-01, -4.3654e-02,\n"," 3.1850e-01, 3.3423e-02, 2.4867e-01, 5.9863e-01, 7.3530e-02,\n"," -5.0665e-01, 1.1876e-02, -2.2058e-02, 2.4579e-01, 9.9038e-02,\n"," -1.2338e-02, 4.5577e-01, 7.3283e-01, 1.0815e-01, -2.2509e-01,\n"," 7.3798e-01, -1.3457e-01, 1.7850e-01, -3.6752e-01, 4.8011e-02,\n"," -1.3314e-01, -3.7726e-01, 1.4045e-01, -1.3854e-01, 1.7611e-01,\n"," -1.1418e-02, 2.9263e-01, 2.6873e-01, -8.2213e-02, 1.6614e-01,\n"," -4.6167e-02, -5.7598e-01, 2.1929e-01, 6.1148e-01, -1.3514e-01,\n"," 9.4684e-02, -7.5742e-02, -4.7505e-02, 1.9207e-01, -1.0260e-01,\n"," -4.0581e-01, 4.2058e-02, -1.6522e-01, 1.1412e-01, -2.0594e-01,\n"," -5.2334e-02, 1.8916e-01, 2.1497e-01, 2.3804e-02, -4.1369e-01,\n"," -5.3839e-01, 9.1952e-02, 5.7860e-01, -1.4341e-01, -1.1294e-01,\n"," -3.4819e-02, -1.2510e-01, -1.3559e-01, -5.3644e-02, 3.7792e-01,\n"," 2.6066e-02, -1.1588e-01, 4.3771e-01, 1.2920e-01, -2.1632e-01,\n"," -2.3844e-01, 1.4791e-01, 1.5718e-01, -1.1588e-01, 1.9160e-01,\n"," 3.1196e-01, -2.9249e-01, -2.3246e-01, -1.2998e-02, -1.3080e-01,\n"," -5.6340e-01, -1.3139e-01, 1.8496e-01, 2.6631e-01, 2.5820e-01,\n"," 1.9963e-01, 1.3169e-01, -4.1821e-01, -3.5140e-01, -5.2711e-01,\n"," 1.8152e-01, -6.7326e-02, -1.5176e-02, -4.6640e-02, -2.7037e-01,\n"," -3.9431e-01, -1.3509e-01, 1.6846e-01, -1.3976e-01, 4.4115e-01,\n"," -5.9348e-02, -7.4783e-02, 2.6924e-01, 2.0020e-01, -5.0669e-01,\n"," 1.0163e-01, -4.2860e-01, -1.5346e-01, -1.2094e-01, -4.0796e-01,\n"," -2.9065e-01, 2.3164e-01, -9.9210e-02, 6.8808e-02, 1.6105e-01,\n"," 5.2119e-01, 2.8511e-01, 2.1962e-01, -3.1485e-01, -9.9353e-02,\n"," -1.1846e-01, -2.6914e-01, -7.0305e-02, -1.1229e+00, -5.2571e-02,\n"," -5.7563e-02, -1.0902e-01, 3.0578e-02, -1.8039e-01, 3.8141e-01,\n"," 4.0535e-01, -3.4374e-02, -4.4861e-02, 2.9533e-02, -7.2761e-02,\n"," 9.5429e-03, 2.6651e-01, 7.4771e-03, -1.4848e-01, -1.8667e-01,\n"," -2.3241e-01, 1.5939e-01, -3.4388e-01, 7.0864e-02, 2.4453e-01,\n"," -2.8472e-01, -3.1277e-01, -1.4801e-01, 9.7014e-01, 5.1168e-01,\n"," -1.9332e-01, -1.5515e-01, -1.9199e-01, -3.7983e-01, -1.5511e-01,\n"," 9.7434e-03, -3.8142e-01, -5.0146e-01, 4.2549e-01, -1.5556e-01,\n"," 2.8889e-03, 3.2629e-01, -5.7757e-02, 2.2926e-01, -3.3366e-01,\n"," 1.2502e-01, -4.3215e-01, -6.7573e-02, -1.9442e-02, 2.1949e-01,\n"," 1.4507e-01, -2.0705e-01, -9.1417e-02, 1.4430e-01, 3.7913e-01,\n"," -1.6096e-01, 8.5228e-02, -1.6870e-01, -3.5674e-01, 1.2572e-01,\n"," 1.3410e-01, 5.9456e-01, -9.1406e-01, 1.2484e-01, -2.4742e-02,\n"," -7.5065e-01, 2.0029e-01, 2.4681e-01, -5.6168e-02, 6.4239e-02,\n"," 1.5872e-01, 3.6049e-01, 2.9676e-01, 3.8818e-01, -8.2725e-01,\n"," -3.2489e-01, 2.7229e-01, 4.0840e-01, 3.3527e-01, 3.0786e-01,\n"," -1.4647e-01, 4.4355e-01, -1.0110e-01, -2.5035e-01, 5.4428e-01,\n"," 2.0329e-01, -5.4270e-03, -1.1078e-01, 1.4846e-01, -2.7016e-02,\n"," 2.4345e-01, 2.5883e-01, -2.3375e-01, 2.4226e-02, 3.4059e-02,\n"," -9.5256e-02, 2.6024e-01, 2.6355e-01, 6.3221e-02, -2.6164e-01,\n"," -2.4791e-01, 2.9397e-01, -1.1633e-01, 3.7958e-02, 2.4434e-01,\n"," 7.1124e-02, -1.9654e-01, -8.5072e-01, 3.2156e-01, -2.9118e-02,\n"," -1.4426e-01, -2.0783e-01, 9.0390e-02, 5.8715e-01, 2.3539e-01,\n"," -3.5855e-01, -4.8917e-01, 2.2614e-01, 3.6992e-01, -8.9677e-02,\n"," -2.5302e-02, 1.0612e-02, -6.2480e-02, 2.3364e-01, 1.3893e-02,\n"," 3.5362e-01, -5.5842e-01, 3.9546e-02, 1.7314e-02, -2.9383e-01,\n"," -1.0535e-01, -1.0406e-01, 3.6980e-01, -3.5007e-01, -1.8469e-01,\n"," -3.3742e-02, 2.6605e-01, -1.7585e-01, 2.0597e-01, 7.8674e-02,\n"," 4.1204e-02, -1.8598e-01, 1.7753e-02, 3.5230e-01, 5.3065e-01,\n"," 7.4743e-02, 2.2330e-01, 2.6692e-01, 4.1694e-01, -2.8294e-01,\n"," -4.9977e-01, 5.1658e-01, -9.7939e-02, 4.7905e-03, 4.3155e-01,\n"," -1.1113e-01, -4.4155e-01, -4.9506e-01, -2.4912e-01, 1.9889e-01,\n"," -1.1520e+00, 2.5330e-01, -4.6069e-02, -1.9629e-01, -1.5255e-01,\n"," 3.5417e-02, 1.7653e-01, -2.3985e-01, 8.3029e-02, -1.7408e-01,\n"," 1.1693e-01, 2.4224e-01, -4.0424e-01, 8.1266e-02, -3.9135e-01,\n"," 4.9236e-01, 5.1952e-02, 2.6929e-01, -1.3914e-02, 2.3229e-01,\n"," 4.1213e-03, 4.0883e-02, 3.1765e-01, -3.3034e-01, 8.2568e-02,\n"," -3.8057e-01, -4.1492e-01, -2.8877e-01, -4.2482e-01, 1.7556e-02,\n"," 8.1480e-02, -2.0679e-01, -2.7271e+00, -4.6025e-02, -2.4410e-02,\n"," 1.6331e-01, 2.0743e-01, -1.6851e-02, 5.1467e-01, 6.2233e-03,\n"," -1.3036e-01, -1.9033e-01, 2.5336e-01, -2.7485e-01, 8.1273e-02,\n"," -7.6306e-02, -1.8695e-01, -6.2889e-02]])\n","of: tensor([[ 3.4003e-02, -5.1843e-02, -7.4274e-03, -1.5316e-01, -1.6551e-02,\n"," -2.2312e-01, 2.7718e-01, -2.9654e-01, 7.7573e-02, -1.4059e-01,\n"," -2.5555e-01, -8.7039e-02, -6.1729e-02, 1.6766e-01, 5.9935e-02,\n"," -1.4018e-01, -3.7649e-02, 2.6237e-01, 1.9194e-01, 1.4737e-01,\n"," 1.1439e-01, -1.6198e-02, 1.3928e-01, 1.6326e-01, 1.2235e-01,\n"," 2.0089e-01, -3.2865e-01, -9.0019e-02, -3.1685e-01, 2.6006e-01,\n"," 2.3049e-01, -1.6113e-01, 9.4952e-02, 4.2124e-01, -2.6726e-01,\n"," -3.0860e-01, 2.1899e-01, -1.1145e-01, -4.7307e-01, -1.2188e-02,\n"," 4.7535e-02, 4.4654e-02, 2.4671e-02, -1.1783e-01, -1.2315e-01,\n"," -2.0443e-02, -4.3352e-01, -7.9083e-03, -1.0607e-01, 3.9423e-01,\n"," -3.6860e-02, 3.8775e-02, 2.3105e-02, 4.1723e-01, 1.8645e-01,\n"," 1.5184e-01, 5.1730e-02, -1.1549e-02, -3.1475e-03, 2.3940e-02,\n"," 1.0218e-01, 8.2006e-02, 2.8230e-02, -1.3619e-02, 9.4272e-02,\n"," 2.1154e-01, 3.2682e-01, -1.3690e-01, -5.3952e-01, 4.2833e-01,\n"," -2.1545e-01, -3.0594e-01, 3.8546e-01, 3.8968e-02, 4.5602e-01,\n"," 9.5615e-02, 2.1013e-01, 2.4218e-01, 2.3445e-01, -5.3000e-02,\n"," 2.8721e-01, 1.9033e-01, -8.0589e-02, 2.6357e-01, -9.3559e-02,\n"," 1.0045e-01, 2.0300e-01, -3.5041e-01, -2.7986e-01, 4.0026e-01,\n"," 1.6714e-01, 2.6207e-02, 8.3217e-02, -7.7472e-02, 2.6288e-02,\n"," -2.5961e-01, -1.1383e-01, 1.3878e-01, -9.1800e-02, -2.9259e-01,\n"," -2.1519e-01, -4.3410e-01, -3.2954e-01, 2.9148e-01, -1.9149e-01,\n"," -2.4089e-02, -4.6756e-03, 2.0086e-01, 2.3969e-01, -5.1862e-01,\n"," 2.8874e-01, 2.7770e-01, 2.3895e-01, -1.8224e-01, -2.2546e-01,\n"," 2.3441e-01, 1.3583e-01, 1.5173e-01, 2.3032e-03, 2.1154e-01,\n"," -1.8796e-01, 9.0409e-02, 1.3998e-01, 5.6592e-01, 1.7971e-02,\n"," 1.0876e-01, 2.8398e-01, -5.4901e-01, -5.4549e-02, -2.8820e-01,\n"," -2.4389e-01, 2.2206e-01, 1.6865e-01, 2.0846e-01, -1.4368e-01,\n"," -4.0580e-03, 5.6752e-02, -2.0624e-01, -1.5256e-01, -1.8269e-01,\n"," -9.3010e-02, 3.5931e-01, -9.9649e-01, -2.0899e-01, 1.2949e-01,\n"," -9.0646e-02, 8.7800e-02, -1.5181e-01, 1.7170e-01, 8.6633e-02,\n"," 3.2348e-01, 3.0538e-01, -3.8909e-01, -2.4912e-01, -5.5111e-01,\n"," -3.8814e-01, 4.9566e-02, -1.1007e-01, 1.8330e-02, 5.3605e-01,\n"," 2.6509e-01, 1.7680e-01, 1.2848e-01, 3.1335e-01, -1.6445e-01,\n"," -5.3862e-02, -1.9268e-01, 6.5483e-01, 3.7483e-01, 5.5767e-02,\n"," -4.0192e-01, -4.1674e-01, 2.2468e-01, 2.4056e-01, -2.7629e-01,\n"," 2.7231e-01, 3.8089e-01, 1.2680e-01, -8.9488e-02, -4.0944e-01,\n"," -2.7154e+00, 1.2008e-01, -8.5127e-03, -3.0421e-01, -2.3228e-02,\n"," -4.0445e-01, 3.5978e-01, -1.1657e-01, 2.2099e-01, -3.1930e-01,\n"," -9.7810e-02, -2.3980e-02, -4.5140e-01, 3.2869e-01, 8.3467e-02,\n"," -6.0091e-02, -2.0330e-01, -2.9065e-01, 3.2468e-01, -5.6327e-02,\n"," 2.8247e-01, -1.3475e-01, 3.0403e-01, 2.0796e-01, -1.1932e-01,\n"," 1.2849e+00, -1.4773e-01, -1.8882e-01, 6.1050e-02, 2.6918e-01,\n"," -5.2727e-01, 5.8424e-01, 9.4993e-02, -5.7007e-01, 2.3931e-01,\n"," -3.2783e-01, 3.1289e-01, 7.8495e-02, -5.3761e-02, -1.2735e-01,\n"," -2.2904e-02, -1.4442e-01, -1.9315e-01, 1.4273e-01, -2.8945e-01,\n"," -3.2394e-01, 3.2414e-01, 3.2337e-03, 1.9384e-01, -2.0628e-01,\n"," -3.0153e-01, -1.5197e-01, 2.6840e-01, -8.9341e-02, -9.5683e-02,\n"," 2.1585e-01, 1.8602e-02, -1.9571e-01, -6.3580e-02, -1.2172e-01,\n"," -5.1455e-02, 1.5528e-01, 4.3328e-01, 4.1080e-01, -2.1636e-01,\n"," -1.7197e-02, 3.5384e-01, 1.6632e-03, 2.6903e-01, -2.4136e-01,\n"," -4.5309e-02, -4.5618e-01, -1.0423e-03, -3.5961e-01, -4.3097e-02,\n"," 8.3389e-02, -1.5321e-01, -2.2311e-01, 3.5809e-01, -6.7834e-02,\n"," 1.5991e-01, 4.1491e-02, 4.6870e-01, 3.1040e-02, -1.4710e-01,\n"," 2.0037e-01, 4.3574e-01, -1.3030e-01, -1.6174e-01, 2.0024e-01,\n"," 2.3663e-02, -7.8224e-02, 3.1941e-03, -1.4194e+00, -1.0119e-01,\n"," -6.4526e-02, 5.5229e-01, 6.6156e-02, -1.1716e-01, 2.4163e-01,\n"," -2.5401e-02, 1.1232e-01, -4.4740e-02, 3.1281e-02, 2.3393e-01,\n"," -4.0816e-01, -1.8619e-01, -4.6621e-01, 3.1797e-01, -5.6666e-01,\n"," -7.8116e-02, -4.7704e-02, 1.6169e-01, 2.0581e-01, 1.8336e-01,\n"," 7.3382e-02, 9.7798e-02, 2.5512e-01, -4.9800e-01, -1.0867e-01,\n"," -3.4910e-02, -1.4147e-01, -2.8511e-01, -2.8459e-01, 1.3686e-01,\n"," 3.4306e-01, -4.0851e-02, 1.5676e-01, -1.7386e+00, -7.9260e-02,\n"," 1.3555e-01, -5.7428e-01, 1.7113e-01, -1.7472e-01, 6.3700e-02,\n"," -1.7558e-01, -3.4693e-01, 2.5472e-02, 2.2991e-01, -2.8816e-01,\n"," 2.5339e-01, 4.6806e-01, 2.3460e-01, -8.1271e-02, -1.4877e-01,\n"," -1.4409e-01, -4.1094e-01, 4.9980e-02, 2.2588e-02, 7.3638e-02,\n"," 1.7456e-01, -2.6222e-01, 6.5159e-02, 4.0754e-02, -2.7707e-01,\n"," 2.7011e-01, -4.4166e-01, -9.5779e-02, -2.1473e-01, -3.7563e-01,\n"," -3.0474e-01, 9.3938e-02, 1.5576e-01, 1.0314e-01, 1.6039e-01,\n"," 1.5675e-01, 6.4673e-01, -4.6630e-01, 7.7559e-02, 2.4516e-02,\n"," -1.1715e-01, -7.9161e-02, -4.2789e-01, -1.2247e-01, 4.4013e-02,\n"," -3.8194e-01, -1.3929e-01, 1.5740e-01, 6.1158e-02, -4.0570e-01,\n"," 2.2058e-01, -2.3420e-02, -2.0487e-01, -2.1177e-01, 1.7569e-01,\n"," 3.2936e-01, -1.6672e-01, -2.0869e-01, 2.0845e-01, -2.9081e-01,\n"," 1.4119e-01, 3.7029e-01, -5.9016e-01, 2.3214e-01, 1.5253e-01,\n"," -6.6994e-03, -3.7206e-02, -1.4369e-01, -7.5272e-02, -6.0988e-02,\n"," -6.1983e-02, -6.9057e-01, -2.1280e-01, 1.8526e-01, -4.0166e-01,\n"," 1.3350e-02, 3.9360e-01, -3.0717e-01, 3.8069e-02, 9.7852e-02,\n"," 2.8301e-01, 3.3314e-01, -8.1375e-02, 3.7031e-01, 2.3879e-01,\n"," -1.9762e-01, -7.2735e-02, -4.0001e-01, -4.5863e-02, 1.9716e-03,\n"," 7.7303e-02, 2.4502e-01, 1.6739e-01, 2.0085e-01, 3.9950e-01,\n"," -3.7840e-01, 1.9960e-01, 4.4615e-03, -4.5697e-01, -3.5807e-01,\n"," -1.4286e-02, 1.7614e-01, -1.3593e-01, 2.3390e-01, -6.5223e-01,\n"," -2.0267e-01, -2.5456e-01, -1.9051e-01, 8.0544e-01, -3.1490e-01,\n"," 3.7692e-01, 1.5961e-01, 2.7435e-01, -3.3295e-02, 6.7939e-02,\n"," 1.8201e-01, -1.7106e-02, 2.6705e-01, -7.8084e-02, 3.8113e-01,\n"," -2.3967e-02, 3.2812e-02, -1.1634e-01, 6.2836e-02, -2.1103e-01,\n"," -2.9784e-01, -4.8546e-01, -1.2446e-01, 1.7791e-01, -1.5134e-01,\n"," -9.0603e-03, -3.4939e-01, -2.6832e-01, 2.0567e-01, 6.8973e-02,\n"," 5.1933e-01, 4.2284e-01, 4.5932e-01, 2.6855e-01, 1.8044e-01,\n"," -5.0652e-01, 2.3884e-01, 1.3090e-01, 2.3690e-01, -1.0882e-01,\n"," 2.2501e-01, 3.3656e-01, 4.9096e-01, 2.7317e-01, -5.3983e-01,\n"," 5.1993e-01, -3.3609e-01, -3.7928e-01, -4.4245e-01, 2.9851e-01,\n"," -3.3370e-01, -1.9743e-01, 2.7716e-01, -1.7835e-02, 8.9400e-02,\n"," -2.5871e-02, -8.8367e-02, 1.4223e-01, 2.0169e-02, 5.2625e-02,\n"," -1.1629e-01, -1.0620e-01, 1.3041e-01, 4.2907e-01, -4.6166e-02,\n"," -2.4713e-02, -1.1081e-01, -1.2984e-01, 1.2078e-01, -5.3482e-02,\n"," -4.3742e-01, 2.6835e-01, 1.6512e-01, 2.2669e-01, -2.7487e-01,\n"," 1.1412e-01, -2.0297e-01, 4.0843e-01, 3.5634e-02, -4.8642e-01,\n"," -3.7318e-01, 2.2126e-01, 5.9905e-01, 1.1798e-01, -3.5844e-01,\n"," -3.9617e-01, -3.5454e-01, -2.9381e-01, 1.4755e-01, 1.1594e-01,\n"," 2.1303e-01, 1.3435e-01, 1.1832e-01, 1.5577e-01, -2.0783e-01,\n"," -4.2165e-01, 9.2622e-02, -1.5194e-02, -5.6375e-02, 1.9877e-02,\n"," 2.4407e-01, -4.6846e-01, -3.2629e-01, 1.9670e-01, -3.6000e-01,\n"," -3.9254e-01, -2.8331e-01, -1.7414e-02, 5.3356e-01, 1.7326e-01,\n"," 1.2394e-01, 1.6873e-01, -3.5131e-01, -2.0090e-01, -3.8344e-01,\n"," 9.6487e-02, 2.9312e-02, 3.3554e-01, -1.9225e-01, -2.6853e-01,\n"," -4.0268e-01, -1.0568e-01, -1.2876e-01, -6.7957e-03, 6.3847e-01,\n"," -1.9859e-01, -3.8544e-02, 3.9586e-01, -5.2548e-03, -4.4988e-01,\n"," 1.9782e-01, -2.3814e-01, -5.8888e-02, -1.1264e-02, -4.5853e-01,\n"," -3.9141e-01, 1.0359e-03, -6.6347e-02, -1.6033e-02, 3.6025e-01,\n"," 5.0023e-01, 5.5587e-01, 2.7586e-01, -1.6200e-01, -3.3017e-01,\n"," 2.3192e-01, -6.1259e-02, -1.2985e-01, -7.5078e-01, -3.8876e-02,\n"," 1.4234e-01, 3.0135e-01, -1.4164e-01, -2.5020e-01, 1.5932e-01,\n"," 1.6270e-01, 1.2722e-02, 5.4600e-02, 4.7713e-02, 1.6752e-01,\n"," -5.4678e-02, 2.6570e-01, 2.8659e-01, -1.7601e-01, 3.0363e-01,\n"," 9.5841e-02, 3.3549e-01, -1.3533e-01, 9.4096e-02, 1.7442e-01,\n"," -1.1152e-01, -2.9805e-01, 1.6769e-01, 1.0796e+00, 7.6712e-01,\n"," -2.9954e-01, -2.0845e-01, 3.4485e-01, -3.0823e-01, -3.0382e-02,\n"," 1.2169e-01, -6.7985e-02, -6.4387e-01, 1.8397e-01, -3.1644e-01,\n"," -1.5993e-01, 2.8402e-02, -1.3915e-01, 2.4293e-01, -2.0423e-01,\n"," 1.2497e-01, -4.1602e-01, -2.1424e-01, -6.7058e-02, 5.2120e-01,\n"," 9.3094e-02, -9.6044e-03, -8.6065e-02, 2.4031e-01, 1.9908e-01,\n"," -8.7818e-02, 2.2913e-02, -1.1799e-01, -1.3400e-01, 1.1447e-01,\n"," 3.3131e-01, 5.9561e-01, -5.9076e-01, -2.1512e-02, -1.2453e-01,\n"," -7.1596e-01, -2.8356e-02, 1.9525e-01, 2.5676e-01, -1.4342e-01,\n"," 4.0803e-01, 3.9933e-01, 2.4581e-01, 3.5361e-02, -4.8671e-01,\n"," -3.0619e-01, -2.8265e-02, 4.7632e-01, 3.1827e-01, -2.5725e-02,\n"," -2.6642e-01, 5.9554e-01, 6.6783e-02, -3.1954e-01, 3.5164e-01,\n"," 1.8580e-01, -2.0362e-01, -2.9786e-01, 1.2980e-02, -1.5010e-01,\n"," 2.4489e-01, 1.9715e-02, 6.2774e-02, 1.6677e-01, 2.4993e-01,\n"," -1.9688e-01, 2.9916e-01, 3.7617e-01, -1.3932e-01, -1.1772e-01,\n"," -7.3265e-02, 1.0807e-01, -3.2379e-01, -1.9799e-01, 2.4713e-01,\n"," 6.2933e-02, -4.3923e-01, -8.9695e-01, 2.7736e-01, -1.6219e-01,\n"," -2.0564e-01, -1.2423e-01, 2.9084e-01, 4.7469e-01, 7.9888e-03,\n"," -3.0494e-01, -3.0630e-01, 1.5686e-02, 4.2282e-01, 1.3102e-01,\n"," 1.7537e-01, 1.1010e-01, -3.1677e-02, -7.6945e-03, -1.8887e-01,\n"," 5.6015e-01, -2.3796e-01, -2.5357e-01, -1.2861e-01, -1.8833e-01,\n"," -2.3274e-01, 5.6377e-02, 5.5162e-01, -7.2815e-01, -1.4991e-01,\n"," 1.8144e-02, 2.0016e-01, -1.1645e-01, 8.7585e-02, 8.7735e-02,\n"," -6.3843e-02, -2.3254e-01, 9.6439e-02, 2.7285e-01, 3.0960e-01,\n"," 2.3373e-02, 5.4497e-01, 3.5319e-01, 5.4463e-01, -2.8683e-01,\n"," -5.1343e-01, 1.4140e-01, -2.1583e-01, 1.5130e-02, 4.7103e-01,\n"," -2.0292e-01, -1.8396e-01, -5.4703e-01, -1.7242e-01, 8.8310e-02,\n"," -9.8707e-01, 1.7948e-01, -1.0362e-02, -2.8077e-01, -6.4208e-02,\n"," -8.6388e-02, 1.9459e-02, -5.4414e-01, -3.3375e-02, -4.0802e-01,\n"," 1.0443e-01, 4.0478e-02, -1.6036e-01, -4.7580e-02, -5.8918e-01,\n"," 5.3619e-01, 1.3691e-01, 7.2799e-02, 1.7664e-01, 4.3586e-01,\n"," -3.2343e-03, 1.7288e-01, 4.5262e-01, -2.4738e-01, -1.8221e-01,\n"," -2.5081e-01, -2.6276e-01, -3.7314e-01, -1.4524e-01, 9.4253e-02,\n"," 2.4147e-01, -1.2532e-01, -2.9282e+00, -1.6712e-02, 2.9892e-01,\n"," -1.3477e-01, 2.2377e-01, 4.9382e-02, 3.9546e-01, -7.7540e-02,\n"," -1.9716e-01, -2.0238e-01, -1.1882e-01, -6.3153e-01, 1.2154e-01,\n"," 4.0395e-02, -6.9654e-02, -2.2158e-01]])\n","all: tensor([[ 3.9080e-02, 8.6130e-02, 1.5461e-01, -1.3334e-01, -2.2864e-01,\n"," -2.0309e-01, 2.0904e-01, -1.1123e-01, 2.2938e-01, -3.1688e-01,\n"," -3.1888e-01, 7.6310e-02, 1.2395e-01, 7.8026e-02, 1.0463e-01,\n"," -9.3082e-02, -6.5926e-02, 6.5622e-02, 1.3005e-01, 9.9807e-02,\n"," 3.3107e-01, 1.5844e-02, 1.0460e-01, 7.3886e-02, 1.8899e-01,\n"," 2.1026e-01, -2.1865e-01, -1.1261e-01, -3.3679e-01, 1.1441e-01,\n"," 6.7862e-02, 1.1157e-01, -3.5439e-01, 2.8067e-01, -3.8929e-01,\n"," -1.6162e-01, 1.3417e-01, -5.9430e-02, -4.9791e-01, -2.4863e-01,\n"," -2.0781e-01, 1.1061e-01, 1.5947e-01, -1.5627e-01, 9.0498e-02,\n"," -1.4713e-01, -1.9441e-01, 1.8659e-01, -2.8469e-01, 2.5564e-01,\n"," -3.1811e-02, -8.4690e-02, 2.2401e-01, 2.9032e-01, 2.1032e-01,\n"," 1.7863e-02, 8.4148e-02, 7.5833e-02, -3.0474e-01, -4.9267e-02,\n"," 2.6772e-01, -1.5767e-01, 1.7546e-01, 1.0377e-01, 2.3866e-01,\n"," 2.7526e-01, 6.2422e-01, -1.7255e-02, -3.8767e-01, 7.5824e-02,\n"," -1.9711e-01, -1.7382e-01, 3.2962e-01, 1.6854e-02, 3.4313e-01,\n"," -1.3488e-01, -6.8649e-02, 6.7299e-02, 3.8032e-01, 1.1774e-01,\n"," 3.1643e-01, 2.1490e-01, -1.5215e-02, 4.8558e-01, -4.5768e-02,\n"," 8.1553e-02, 3.3758e-02, -1.6963e-01, -8.6837e-02, 1.7613e-02,\n"," 1.8913e-02, -1.3236e-01, 1.5424e-01, -4.8883e-02, 3.2536e-02,\n"," -3.7183e-01, -4.2345e-02, 1.3339e-01, -1.0952e-01, -2.0944e-01,\n"," -4.1971e-02, -2.5268e-01, 1.6501e-01, 1.8784e-01, -3.9036e-01,\n"," -3.1434e-02, -1.0873e-01, 1.4379e-01, 1.0582e-01, -8.2921e-01,\n"," 2.9560e-01, -1.0532e-01, -1.7765e-01, -4.1485e-02, -1.1433e-01,\n"," 4.5528e-01, 3.4221e-02, 6.3156e-02, 2.6767e-02, -5.8678e-02,\n"," -1.5182e-01, -5.0646e-02, 2.7500e-01, 6.4198e-01, 4.2640e-01,\n"," -3.1856e-02, -1.0759e-01, -2.1838e-01, -3.3781e-01, -3.4315e-01,\n"," -2.1021e-01, 3.7082e-01, 2.7111e-01, 2.0798e-01, -2.5040e-01,\n"," 6.0125e-02, 2.6428e-02, 8.6542e-02, -1.2887e-01, -1.7596e-01,\n"," 6.6797e-02, 4.1735e-01, -1.0416e+00, -2.0856e-01, 2.4694e-01,\n"," -1.1540e-01, 3.4631e-01, -1.1927e-01, 6.4228e-02, -5.1281e-02,\n"," 3.9551e-01, 2.0740e-01, -1.6593e-01, -3.6876e-01, -5.9057e-01,\n"," -2.9188e-01, -4.3298e-03, 2.0094e-01, 2.7543e-01, 3.3015e-01,\n"," 1.8208e-01, 1.9445e-01, 1.4245e-01, 2.7652e-01, -3.1073e-01,\n"," -1.9435e-02, -1.2414e-01, 6.4764e-01, 1.8667e-01, 3.3001e-01,\n"," -2.5823e-01, -3.2194e-01, 3.1615e-01, 4.9067e-02, -2.6412e-01,\n"," 1.9455e-01, 3.9070e-01, 1.3646e-01, -1.0093e-01, -3.4545e-01,\n"," -2.8947e+00, 6.2921e-02, -1.0839e-01, -3.5460e-01, 1.3156e-01,\n"," -2.1618e-01, -4.0856e-02, -3.7875e-01, 2.1557e-01, -8.5052e-02,\n"," -3.3972e-01, -1.1164e-01, -2.1090e-01, 5.2155e-02, -8.9931e-02,\n"," -2.4501e-01, 1.4616e-01, -3.4318e-02, -5.7285e-02, 5.1252e-02,\n"," 7.0147e-02, -5.4698e-02, 9.6298e-02, 3.7017e-02, -2.5527e-01,\n"," 9.0313e-01, 1.2385e-01, -2.6807e-01, 1.4982e-01, 2.5856e-01,\n"," -7.0348e-01, 5.4312e-01, -1.9819e-02, -4.5174e-01, 1.8433e-03,\n"," -3.2888e-01, 8.6120e-02, -8.2559e-02, -4.0069e-01, -1.7475e-01,\n"," 1.8789e-02, -7.1653e-02, -1.4883e-01, 1.4007e-01, -1.8526e-01,\n"," -4.6463e-01, 4.0983e-01, -2.5461e-01, 1.0770e-01, -8.9096e-02,\n"," -2.0606e-01, -1.9367e-01, 1.1303e-01, 1.7128e-01, -1.6939e-01,\n"," 1.8988e-01, -1.8406e-01, -5.8146e-02, -2.5360e-01, -1.8897e-01,\n"," -7.8160e-02, 2.6391e-01, 4.0777e-01, 1.7529e-01, -2.3868e-01,\n"," 2.5623e-01, 3.6640e-01, 2.0071e-01, 8.2749e-02, -1.2111e-01,\n"," -1.0003e-02, -8.0983e-01, 8.0267e-02, -3.3448e-01, 2.8691e-02,\n"," -1.1212e-01, -6.4414e-03, -2.4251e-01, 5.5485e-02, 1.2394e-01,\n"," -3.3547e-02, 9.1091e-02, 4.7141e-01, -1.3671e-01, 4.2454e-02,\n"," 2.2573e-01, 1.2430e-01, 1.5109e-01, 4.8828e-02, -7.9636e-02,\n"," 1.3758e-01, 3.2000e-02, -4.5710e-02, -1.2648e+00, -3.0367e-01,\n"," -7.9971e-02, 3.4747e-01, -1.2317e-02, 1.2600e-01, 4.3515e-03,\n"," -1.2718e-01, 1.9642e-01, -3.8064e-01, 2.0841e-01, 3.8155e-01,\n"," -4.2018e-01, -2.0894e-01, -3.8440e-01, 7.1426e-02, -5.2323e-01,\n"," -8.1643e-02, 8.6845e-02, 2.6084e-01, -2.1628e-01, 2.0587e-01,\n"," 2.9978e-01, 2.8812e-01, 1.8086e-01, -4.0660e-01, -8.9844e-02,\n"," -2.9102e-01, -1.3995e-01, -2.8257e-01, -3.4071e-01, 7.5944e-02,\n"," 3.6908e-02, 1.2681e-02, 5.5139e-02, -1.9787e+00, 1.8438e-01,\n"," 3.8408e-01, -4.1249e-01, 1.9911e-01, -2.1744e-01, 6.2846e-02,\n"," 1.6907e-01, -2.5657e-01, 1.8296e-01, 3.2584e-01, -5.4686e-01,\n"," 2.8701e-01, 3.9238e-01, 4.0951e-01, -9.6233e-03, 7.1823e-02,\n"," -4.1108e-02, -5.1646e-01, -3.0848e-02, -1.1774e-01, -1.0709e-01,\n"," 2.4704e-01, -2.1203e-01, 2.0916e-01, 3.9460e-01, -2.6954e-01,\n"," 1.2943e-01, -3.9586e-01, 2.1357e-02, -2.6836e-01, -3.1897e-01,\n"," -1.9527e-02, -1.1100e-02, 2.1424e-02, 5.4204e-02, 2.3191e-01,\n"," -1.6127e-01, 6.0538e-01, -1.7200e-01, 4.6595e-01, -3.3372e-03,\n"," 6.9937e-02, -4.6623e-02, -7.1753e-02, -1.1752e-01, 1.3817e-01,\n"," -3.6647e-01, 1.7421e-01, 1.6247e-01, 1.2576e-01, 3.2420e-03,\n"," 1.6671e-01, -5.1087e-02, -1.6668e-01, 5.7472e-02, -3.2704e-02,\n"," 2.3021e-01, -4.3149e-02, -4.4249e-02, 1.0410e-01, -1.7730e-01,\n"," 2.3756e-01, 5.2307e-01, -5.8565e-01, 8.3564e-02, -8.8702e-02,\n"," -8.4847e-03, -1.0622e-01, 9.2841e-02, 4.5968e-02, -1.3254e-01,\n"," -1.0547e-01, -8.0470e-01, -3.3260e-01, 1.2434e-01, -1.3237e-01,\n"," -4.1832e-02, 3.3168e-01, -3.2669e-01, 1.2428e-01, -1.7765e-01,\n"," -8.7470e-03, 2.9135e-01, -9.9242e-02, 1.2285e-01, 2.1807e-01,\n"," -2.9360e-01, 7.9377e-02, -1.6493e-01, -1.0206e-01, 9.9798e-02,\n"," 2.6861e-01, 1.7351e-01, 2.7705e-01, 3.4153e-01, 2.4632e-01,\n"," -1.9155e-01, 7.8390e-02, 1.5611e-01, -4.1841e-01, -3.5805e-01,\n"," 1.3538e-01, 3.1869e-01, -9.1493e-03, 1.9303e-01, -6.2064e-01,\n"," -2.0674e-01, -2.9514e-01, -1.4770e-01, 3.6023e-01, -4.1442e-02,\n"," 5.8863e-01, 2.8151e-02, 1.3449e-01, -3.1959e-01, -8.1873e-02,\n"," 5.0855e-01, -7.9001e-02, 1.8826e-01, 1.5358e-01, 3.7207e-01,\n"," -1.4003e-01, 9.3876e-02, 1.0155e-02, -1.6400e-02, 1.5344e-01,\n"," -5.0769e-01, -3.1585e-01, 2.2064e-02, 1.4102e-02, -2.1920e-01,\n"," 4.8269e-02, -5.1844e-01, -1.7234e-01, 7.4448e-02, 5.9008e-02,\n"," 2.1474e-01, -1.2594e-01, 4.8783e-01, 1.1870e-01, 1.3573e-01,\n"," -5.6350e-01, 2.4655e-01, 2.1786e-01, 6.1522e-01, -1.8488e-02,\n"," 1.5859e-03, 1.8106e-01, 4.8601e-01, 2.1366e-01, -3.5974e-01,\n"," 4.7068e-01, -8.0791e-02, -1.2943e-01, -2.7232e-01, 2.8644e-01,\n"," 1.3270e-01, -7.0263e-02, 1.3743e-01, -1.4040e-01, 1.3466e-01,\n"," -1.3930e-01, 3.7752e-01, 9.7333e-02, -1.8884e-02, 9.5339e-02,\n"," 5.3863e-02, -5.0698e-01, 3.7277e-01, 5.3934e-01, -1.9690e-01,\n"," 1.6560e-01, 1.8450e-02, -1.9138e-01, 1.9341e-01, -1.9313e-01,\n"," -4.8172e-01, 1.8600e-03, -7.8415e-02, 3.3749e-01, -1.7606e-01,\n"," 1.9326e-01, -1.1454e-01, 3.9174e-01, 1.0416e-01, -2.4271e-01,\n"," -3.9572e-01, 9.3985e-02, 4.6612e-01, 1.4961e-01, -2.6782e-01,\n"," -2.3858e-01, -5.4028e-01, -4.7246e-01, -1.8367e-01, 4.4501e-01,\n"," 2.8212e-02, 4.3470e-01, 2.3588e-01, 2.3839e-01, -1.8624e-01,\n"," -2.1677e-03, -5.4002e-02, 5.4216e-02, -5.0439e-01, 1.4430e-01,\n"," 3.6316e-01, -4.0827e-01, 1.1602e-02, 1.0035e-02, -3.1122e-01,\n"," -3.0373e-01, -2.6915e-01, 5.6099e-03, 3.3804e-01, 3.4618e-01,\n"," 1.3181e-01, 2.8486e-01, -4.7212e-01, -4.7057e-01, -4.1597e-01,\n"," -2.3313e-03, 7.2282e-03, -6.0511e-02, -3.0721e-01, -4.5508e-01,\n"," -1.6096e-01, -5.5803e-02, 4.1022e-02, 2.5655e-03, 7.5240e-02,\n"," -3.8634e-01, 1.9718e-01, 5.6645e-01, 8.0404e-02, -4.7148e-01,\n"," 2.1143e-01, -1.9336e-01, -8.9708e-02, -4.0143e-02, -5.7409e-01,\n"," -4.1658e-01, 1.2018e-01, 3.2040e-02, 1.2512e-01, 1.1187e-01,\n"," 5.8910e-01, 4.3490e-01, 1.7133e-01, -4.2811e-01, -1.1882e-01,\n"," 1.0522e-01, -1.3589e-01, 1.0153e-01, -6.4181e-01, -2.0019e-01,\n"," 2.4968e-01, 2.4799e-01, -7.1965e-02, -2.2246e-01, 2.7789e-01,\n"," 3.5988e-02, 8.6302e-02, 1.3459e-01, 1.2750e-01, 9.2545e-03,\n"," 2.8043e-02, 4.1666e-01, 3.1043e-01, -1.1801e-01, 1.4904e-02,\n"," -1.6963e-01, 6.2913e-02, -3.6316e-01, 1.5241e-01, 1.7388e-01,\n"," -1.6721e-01, -3.4397e-01, -2.9307e-01, 8.9870e-01, 6.4562e-01,\n"," -4.2845e-02, -1.0859e-01, 4.7121e-03, -1.7550e-01, 1.9193e-01,\n"," -6.7926e-02, -1.6237e-01, -4.5024e-01, 2.5983e-01, 8.2564e-02,\n"," 1.9990e-01, 2.4530e-01, 5.6903e-02, 2.0756e-01, -2.6109e-01,\n"," 2.4944e-02, -4.4158e-01, 1.8301e-01, 4.5449e-06, 2.9369e-01,\n"," 4.7727e-02, -1.2961e-01, -1.3268e-01, 3.2944e-01, 4.3288e-01,\n"," 1.1659e-01, -1.3261e-01, 7.8294e-02, -2.6120e-01, -2.1194e-01,\n"," 1.7658e-01, 4.6557e-01, -6.8307e-01, 2.1373e-01, 1.1220e-01,\n"," -5.2600e-01, 8.4030e-02, 3.7593e-01, 1.0837e-01, -7.2584e-02,\n"," 4.3825e-01, 8.2516e-02, 5.0958e-01, 4.4166e-01, -6.5234e-01,\n"," 9.9578e-02, -9.6168e-02, 3.0646e-01, 5.0227e-02, 2.6067e-01,\n"," -2.3902e-01, 3.7752e-01, -2.4146e-02, -3.6249e-01, 3.5283e-01,\n"," 3.1845e-01, -2.1540e-01, -9.1176e-02, 1.6821e-01, -1.8449e-01,\n"," 7.9261e-02, 4.4260e-02, -7.0576e-02, 1.7854e-01, 2.8144e-01,\n"," -1.3435e-01, 3.3492e-01, 1.7926e-01, -1.1158e-01, -5.3037e-03,\n"," -3.4453e-01, -1.5945e-01, -1.7812e-01, -1.4996e-01, 3.1082e-01,\n"," 1.8318e-01, -1.9641e-01, -9.9160e-01, 3.7370e-02, -3.4502e-02,\n"," -9.7207e-02, -5.0064e-01, 1.1509e-01, 4.2348e-01, 1.4104e-01,\n"," -4.2359e-01, -4.1144e-01, 9.3049e-02, 5.4050e-01, 4.6431e-01,\n"," -5.1283e-02, 2.7175e-02, -9.3402e-02, -1.2429e-02, -7.4289e-02,\n"," 1.2605e-01, -3.6576e-01, -3.0391e-01, 2.5073e-01, -2.2678e-01,\n"," -1.8575e-03, -2.7750e-01, 5.3755e-01, -5.8084e-01, 3.8262e-02,\n"," -2.7933e-01, 2.6863e-01, -6.1887e-02, -3.2946e-02, -1.5714e-01,\n"," -1.3421e-01, -1.2307e-01, 2.3105e-01, -3.7169e-02, 3.0241e-01,\n"," -1.6317e-02, 1.0153e-01, 1.8526e-01, 5.6608e-01, -9.8970e-02,\n"," -3.1804e-01, 6.0800e-02, -9.9117e-03, 2.7085e-01, 3.9123e-01,\n"," 3.4167e-02, -3.9211e-01, -2.0750e-01, -3.1784e-01, 1.5678e-02,\n"," -1.0405e+00, 2.9957e-02, 1.5429e-01, -3.0352e-01, -1.6501e-01,\n"," 1.1500e-01, 1.8233e-01, -2.0264e-01, -1.9873e-01, 1.3738e-02,\n"," -1.4435e-01, 3.6109e-01, -2.5330e-01, 8.6330e-03, -3.0161e-01,\n"," 2.2038e-01, 1.1337e-01, 3.3210e-01, 3.3727e-01, 2.0227e-01,\n"," 6.0784e-02, 2.6048e-02, 2.5114e-01, 9.4569e-02, -1.8560e-01,\n"," -3.2625e-01, -3.0237e-01, -3.1293e-01, -1.5557e-01, 2.5327e-01,\n"," 2.4019e-01, -1.4155e-01, -2.9488e+00, -2.5874e-01, 7.6143e-02,\n"," -9.6026e-02, 1.2807e-01, 1.5265e-01, 3.4669e-01, -1.9067e-01,\n"," 1.0189e-01, -1.5573e-01, -1.4982e-01, -3.0406e-01, 3.4466e-01,\n"," -7.7525e-02, -1.7001e-01, -2.0184e-01]])\n",",: tensor([[ 1.4571e-01, -8.9117e-02, 2.6450e-02, 3.9113e-03, -6.6716e-02,\n"," -2.5159e-01, 3.8645e-01, -1.3547e-01, 1.9058e-01, -3.6953e-01,\n"," -1.8845e-01, -7.6884e-02, -3.2524e-02, 2.5172e-01, 1.1722e-01,\n"," -7.8544e-03, -1.7302e-01, 1.5656e-01, 6.8947e-02, 1.9274e-01,\n"," 2.9281e-01, 2.7723e-01, 2.5830e-01, 9.1137e-02, 9.6616e-02,\n"," 2.8481e-01, -7.1738e-02, -4.2233e-01, -3.4306e-01, 2.2832e-01,\n"," 1.7340e-02, 4.0752e-02, -5.0088e-01, 5.0823e-01, -2.1122e-01,\n"," -4.0537e-01, 4.1055e-01, 2.5058e-01, -1.8951e-01, -2.3088e-01,\n"," -9.9218e-02, -5.0576e-03, 3.5897e-02, -1.8635e-01, -4.6532e-02,\n"," 6.9451e-02, -6.5341e-01, -6.9961e-03, -4.9305e-01, 8.9893e-02,\n"," -4.1606e-01, 2.9743e-01, 2.4078e-01, 5.1232e-01, 2.1909e-01,\n"," 1.3387e-02, 6.0983e-02, -2.9164e-01, -1.0814e-01, 1.6252e-01,\n"," 9.4167e-02, 3.7345e-01, 1.0825e-01, 3.0503e-01, 3.1781e-01,\n"," 1.3457e-01, 3.0925e-01, -1.6863e-01, -4.0823e-01, 2.4775e-01,\n"," -3.2803e-01, -3.2905e-01, 1.9529e-01, -2.1022e-02, 2.4805e-01,\n"," 1.6645e-01, 1.1229e-01, 1.8343e-01, 1.4670e-01, -1.6221e-01,\n"," 4.8857e-01, 2.7126e-01, 2.2107e-01, 1.1901e-01, 9.5187e-02,\n"," 2.0734e-01, 1.4156e-01, -1.4448e-01, -2.7253e-01, 2.7796e-01,\n"," 5.5153e-02, -4.6048e-02, 5.2245e-02, -6.4954e-02, 5.1033e-02,\n"," -1.7863e-01, -3.0263e-02, 8.2884e-02, -1.4531e-01, -4.2625e-01,\n"," -2.2086e-01, -3.5510e-01, -1.2653e-01, 2.5765e-01, -1.8971e-01,\n"," 9.2276e-02, -9.6467e-02, -4.9548e-03, 2.2412e-01, -5.0690e-01,\n"," 2.4864e-01, -2.2044e-02, 3.0915e-01, -2.4427e-01, -1.7787e-02,\n"," 3.5503e-01, 1.9915e-01, 2.1399e-01, 5.5363e-02, 2.4421e-01,\n"," -2.1038e-01, -2.8581e-01, 1.5525e-01, 6.7437e-01, -8.7163e-02,\n"," 3.4415e-01, 1.1538e-01, -5.0896e-01, -1.9329e-01, -2.0803e-01,\n"," -3.9488e-01, 3.9691e-01, 2.3519e-01, 1.8214e-01, -5.9625e-02,\n"," 7.2775e-02, 1.8555e-01, -1.8159e-01, -9.4297e-02, -1.7628e-01,\n"," 1.1305e-01, 3.5077e-01, -9.5832e-01, -1.9586e-01, 1.6180e-01,\n"," -1.8532e-01, 2.9025e-01, -1.0661e-01, 2.7439e-01, 1.2160e-01,\n"," 4.0716e-01, 2.6388e-01, -3.3334e-01, -3.5688e-01, -6.5110e-01,\n"," -3.8443e-01, 4.1371e-02, 1.1555e-01, 1.5852e-01, 6.7273e-01,\n"," 1.2757e-01, 2.3980e-01, 2.5816e-01, 3.6018e-01, -9.3621e-02,\n"," -5.5332e-02, -3.3742e-01, 6.8712e-01, 3.3851e-01, 1.1030e-01,\n"," -3.0397e-01, -3.4389e-01, 4.9668e-01, 2.7737e-01, -1.1285e-01,\n"," 2.9047e-01, 4.6451e-01, 2.3961e-01, -1.0644e-01, -3.1535e-01,\n"," -2.8753e+00, 1.2570e-01, -1.4363e-01, -5.3969e-01, 3.2254e-01,\n"," -1.6085e-01, 1.6551e-01, -1.3439e-01, 2.8193e-01, -2.2328e-01,\n"," -2.9894e-01, -2.9440e-01, -1.6945e-01, 1.3558e-01, 3.4803e-01,\n"," 2.2705e-02, -1.1237e-01, -2.3758e-01, 2.3543e-01, -1.6686e-03,\n"," 2.2645e-01, -1.7749e-01, 2.3646e-01, 1.6193e-01, 1.4528e-02,\n"," 1.1855e+00, -2.2295e-01, -3.7911e-01, 5.9377e-02, 1.5180e-01,\n"," -6.9729e-01, 4.5486e-01, 3.8399e-01, -5.7034e-01, 2.7954e-01,\n"," -3.3679e-01, 5.4090e-01, -2.9870e-01, -2.8679e-01, -2.1322e-01,\n"," 3.4502e-01, -3.0060e-01, -1.1728e-01, 8.2267e-02, -4.3531e-01,\n"," -2.5983e-01, 8.0429e-02, -9.5820e-02, -2.6100e-02, -4.1050e-01,\n"," -2.8160e-01, -3.5589e-01, 2.9125e-01, 1.9323e-01, -6.9444e-02,\n"," 2.4017e-01, 1.1566e-01, -9.0855e-02, -1.6796e-01, -2.4410e-01,\n"," 3.1075e-03, 1.6406e-01, 5.3568e-01, 3.0617e-01, -3.7865e-01,\n"," 8.9007e-02, 6.2370e-01, -2.4044e-01, 2.7816e-01, -1.1648e-01,\n"," -1.9073e-01, -4.1636e-01, -1.7217e-01, -1.4691e-01, -4.4707e-02,\n"," 1.2607e-01, 1.0687e-01, -1.6795e-01, 2.8549e-01, -8.1567e-02,\n"," 2.2966e-02, -1.0773e-02, 6.8333e-01, -1.4289e-01, -3.8157e-01,\n"," -4.6643e-04, 3.9218e-01, -1.5765e-01, -2.1006e-01, -7.7416e-02,\n"," 5.8157e-02, -1.7123e-01, -1.0536e-02, -6.3703e-01, -1.8946e-01,\n"," -7.0629e-02, 6.3799e-01, 7.3411e-02, 7.6321e-03, -1.1048e-01,\n"," -4.2403e-04, 2.4042e-01, -2.0534e-01, -6.8047e-03, 2.4739e-01,\n"," -3.6708e-01, -2.3046e-01, -5.9086e-01, 6.1231e-01, -4.5672e-01,\n"," -6.9144e-02, 9.4099e-02, 1.3101e-01, 4.2869e-02, 8.9833e-02,\n"," 9.3597e-02, 2.4287e-01, 1.0510e-01, -3.2402e-01, -7.8580e-02,\n"," -9.3336e-02, -2.3416e-01, -2.1572e-01, -1.3606e-01, 3.0141e-01,\n"," 1.6233e-01, 5.3635e-02, 3.6113e-01, -2.1373e+00, -1.7802e-01,\n"," 1.6804e-01, -4.1662e-01, 2.4974e-01, 1.7530e-01, 2.5181e-01,\n"," -1.6483e-01, -4.9216e-01, -1.5408e-02, 2.3342e-01, -4.1294e-01,\n"," 1.1777e-01, 4.6779e-01, 1.5563e-01, -5.5175e-02, 8.9437e-02,\n"," -2.6464e-02, -1.5208e-01, 3.2119e-01, -1.3710e-01, -1.2571e-01,\n"," 4.2961e-01, -3.2064e-01, 2.4038e-01, 3.4685e-01, -3.6121e-01,\n"," 1.9097e-01, -6.5474e-01, -1.3526e-01, -4.0700e-01, -2.6067e-01,\n"," -2.9967e-01, 1.1696e-01, 2.4040e-01, 7.9220e-02, 1.7563e-01,\n"," 1.8969e-01, 7.8945e-01, -3.4330e-01, 2.2891e-01, 4.7126e-01,\n"," 2.0799e-01, -1.0846e-01, -8.2196e-02, -1.2078e-02, 2.1574e-01,\n"," -1.8101e-01, 1.6718e-01, 1.2133e-01, 8.6467e-02, -2.8239e-01,\n"," 4.4568e-01, -1.3886e-01, -9.8034e-02, -3.5696e-01, -2.6369e-02,\n"," 3.3485e-01, 6.1569e-02, -2.7746e-01, 3.8282e-01, -3.4746e-01,\n"," 3.3132e-01, 4.0558e-01, -8.4932e-01, 1.3011e-01, -2.5669e-01,\n"," 8.9835e-02, -6.1169e-02, -8.2988e-02, 2.5008e-02, -2.2889e-01,\n"," -3.2188e-01, -6.5140e-01, -4.6423e-01, 3.7389e-01, -4.3440e-01,\n"," -1.0283e-01, 3.0310e-01, -3.5769e-01, -3.5431e-02, -2.9953e-02,\n"," 3.3049e-01, 2.9214e-01, 3.9564e-02, 3.0288e-01, 2.2973e-01,\n"," -3.1753e-01, -2.1179e-01, -1.7666e-01, -2.3360e-01, 9.0684e-02,\n"," -7.1183e-02, 1.6260e-01, 1.2539e-01, 3.3810e-01, 4.4933e-01,\n"," -4.1263e-01, 2.8198e-01, -8.3511e-03, -4.0399e-01, -1.4334e-01,\n"," 1.3678e-01, 2.6962e-01, -2.1914e-01, 2.4740e-01, -1.0374e+00,\n"," -2.1050e-01, -1.8165e-01, -9.5031e-03, 3.5994e-01, -4.6178e-01,\n"," 8.9707e-02, 3.8382e-01, 2.7490e-01, -4.7722e-01, 9.0886e-02,\n"," 2.4036e-01, -2.9337e-02, 3.9708e-01, -4.2682e-02, 4.5617e-01,\n"," -1.8704e-02, 1.6249e-02, -1.1373e-01, 1.0254e-01, 1.6447e-01,\n"," -2.4245e-01, -7.6687e-01, -2.7627e-01, 3.3510e-01, 4.3963e-03,\n"," 1.1763e-01, -3.9918e-01, -2.2585e-01, 2.2497e-02, -3.5149e-04,\n"," 7.8753e-01, 1.2097e-01, 5.6824e-01, 5.1911e-01, 1.7139e-01,\n"," -7.4859e-01, 3.0708e-01, 2.5017e-01, 5.6966e-01, 1.0827e-02,\n"," 2.2144e-01, 9.3797e-02, 7.1426e-01, 3.6654e-01, -4.4194e-01,\n"," 6.2905e-01, -3.2575e-01, -1.9558e-01, -5.2749e-01, 7.3160e-02,\n"," -3.9767e-01, -3.0789e-01, -4.2381e-02, -1.6923e-01, 5.3304e-02,\n"," -1.1442e-01, 1.8313e-01, 1.5919e-01, 5.1756e-02, 9.3930e-03,\n"," 1.3479e-01, -3.0994e-01, 2.4348e-01, 6.2807e-01, -9.7308e-02,\n"," 2.2729e-01, 1.5578e-01, -8.2989e-02, 8.2472e-02, -7.8736e-02,\n"," -4.4337e-01, 4.4439e-01, 5.6996e-03, 1.3215e-01, -2.2840e-01,\n"," 1.0326e-01, -1.4564e-01, 4.6967e-01, 9.5750e-02, -5.7422e-01,\n"," -4.2140e-01, 1.5925e-01, 4.9169e-01, 3.0690e-01, -3.8212e-01,\n"," -3.0523e-01, -2.3643e-01, -4.9541e-01, -2.0289e-01, 3.7294e-01,\n"," -2.2985e-01, 1.6987e-01, 2.0271e-01, 3.7342e-01, 1.2005e-01,\n"," -3.4641e-01, 2.2480e-01, 5.5623e-02, -3.4759e-01, 1.0096e-01,\n"," 3.7587e-01, -1.9086e-01, -7.0977e-02, 5.7221e-02, -3.5829e-01,\n"," -3.7393e-01, -1.5656e-01, -1.2160e-02, 4.7843e-01, 2.2425e-01,\n"," 6.3370e-02, 2.2390e-01, -3.9537e-01, -3.2356e-01, -4.7447e-01,\n"," 1.1968e-01, -7.4429e-02, 5.1422e-02, -1.1373e-01, -6.4215e-01,\n"," -4.1410e-01, -5.6147e-02, -3.1049e-01, -1.5104e-01, 3.9813e-01,\n"," -7.3235e-03, -1.7187e-01, 4.9733e-01, -2.4671e-02, -7.0007e-01,\n"," 1.2217e-01, -3.5063e-01, -2.7913e-01, -6.0496e-02, -4.6921e-01,\n"," -7.2195e-01, -1.3520e-01, -1.3021e-01, 3.4127e-01, 4.0355e-01,\n"," 6.1885e-01, 3.6910e-01, 2.6426e-01, -2.2757e-01, -2.7376e-01,\n"," 1.0358e-02, -1.8039e-01, 9.1878e-02, -6.9110e-01, 5.8955e-02,\n"," 1.1730e-01, 1.7650e-01, -2.4481e-01, -1.4893e-01, 3.8230e-01,\n"," 2.5803e-01, -2.2878e-01, -6.2598e-02, 1.5761e-01, -1.9112e-01,\n"," 1.4289e-01, 5.3887e-01, 3.7659e-01, -2.9084e-01, -2.3839e-02,\n"," -1.7665e-01, 2.1653e-01, -2.3172e-01, -1.2722e-02, 2.5848e-01,\n"," -2.4478e-01, -3.2303e-01, 1.5413e-01, 8.2818e-01, 9.5071e-01,\n"," -1.8863e-01, -2.0519e-01, 1.9272e-01, -3.8249e-01, -3.1809e-01,\n"," 6.1254e-02, -1.1218e-01, -4.4748e-01, -1.5089e-02, -1.4681e-01,\n"," -1.1275e-03, 2.5294e-01, -1.1127e-01, 2.9581e-01, -3.4890e-01,\n"," 6.0162e-02, -7.5859e-01, -9.6313e-02, -9.0095e-02, 2.2760e-01,\n"," -1.7138e-01, -1.1211e-01, -1.4906e-02, 2.9721e-01, 3.3282e-01,\n"," -2.1966e-01, 1.0012e-01, -4.9586e-02, -4.2379e-01, 6.5179e-02,\n"," 1.9024e-02, 7.0318e-01, -7.8487e-01, -2.1047e-01, -1.9214e-01,\n"," -7.8466e-01, 1.1031e-01, 3.0623e-01, 4.6791e-02, -2.0817e-01,\n"," 5.3939e-01, 5.2954e-01, 1.5929e-01, 3.9891e-01, -4.7197e-01,\n"," -1.6678e-01, -2.7320e-01, 4.5760e-01, 3.1295e-01, 1.6734e-01,\n"," -4.8150e-01, 7.8517e-01, -6.9360e-02, -4.9245e-01, 6.0045e-01,\n"," 1.3644e-01, -3.6199e-01, -7.1827e-01, 1.0422e-01, 2.9102e-02,\n"," 3.0064e-01, 1.1905e-01, -1.9098e-01, -1.3113e-01, -2.5468e-02,\n"," -1.6509e-01, 8.9786e-02, 6.0397e-02, 2.3104e-02, 2.7286e-02,\n"," -1.6719e-01, 2.6000e-01, -1.6018e-01, -1.6848e-01, 6.5960e-02,\n"," -4.1964e-02, -3.0918e-01, -5.3669e-01, 1.3443e-01, 8.0290e-02,\n"," 1.1820e-02, -3.8390e-01, 8.4092e-02, 5.5495e-01, 7.8698e-02,\n"," -3.1199e-01, -4.6914e-01, -1.8557e-01, 4.7983e-01, 1.8608e-01,\n"," 3.1855e-02, -6.7170e-02, 2.2435e-01, -5.4959e-02, -9.3272e-02,\n"," 3.5537e-01, -6.8320e-01, -1.3271e-01, -1.4779e-01, -7.3625e-02,\n"," -2.2041e-01, 4.2471e-03, 8.7989e-01, -5.3090e-01, -3.2617e-01,\n"," -3.2060e-01, 4.6099e-01, -8.3208e-02, 3.2950e-01, -2.6549e-01,\n"," -1.3904e-01, -3.0038e-01, 2.0566e-01, 1.1365e-01, 1.3593e-01,\n"," 1.2688e-02, 3.2521e-01, 9.8237e-02, 7.9554e-01, -2.5483e-01,\n"," -3.0474e-01, -2.1962e-01, -3.2505e-01, 1.5375e-01, 3.9901e-01,\n"," 1.8572e-01, -5.0251e-01, -6.3008e-01, -2.5683e-01, 2.5506e-01,\n"," -1.0632e+00, 3.7451e-01, -1.0351e-01, -1.6880e-01, -1.2263e-01,\n"," -2.0244e-02, -1.7100e-01, -2.4317e-01, 6.8181e-02, -2.6405e-01,\n"," -7.0037e-02, -2.0590e-01, -4.4318e-01, -2.7295e-02, -4.1052e-01,\n"," 4.8845e-01, -7.4306e-02, 1.6744e-01, 2.7401e-02, 5.9199e-01,\n"," 1.0055e-01, -7.0027e-03, 3.2443e-01, -2.8446e-01, 2.3394e-02,\n"," -3.2145e-01, -2.7080e-01, -1.4532e-01, -4.5786e-02, 2.6260e-01,\n"," 4.6569e-04, -1.0496e-01, -2.0824e+00, -2.4360e-03, -2.9469e-02,\n"," -7.0094e-02, 2.2866e-01, -4.9524e-02, 4.4161e-01, -4.9409e-02,\n"," -1.2736e-01, -2.7610e-01, 8.5445e-02, -4.6898e-01, 2.2688e-01,\n"," 7.0861e-02, 2.2175e-02, 6.0069e-02]])\n","it: tensor([[ 2.2027e-01, -1.1944e-01, 7.4235e-02, 6.9821e-02, -5.6344e-02,\n"," -1.7552e-01, 3.8894e-01, 1.0232e-01, 2.5190e-01, -3.8041e-01,\n"," -2.1077e-01, 1.8061e-02, -3.3499e-02, 1.0924e-01, -3.8305e-01,\n"," -1.3929e-01, 1.3124e-01, -5.1118e-02, -4.7097e-02, 5.5493e-02,\n"," 1.6415e-01, 7.1365e-02, 1.3555e-01, 1.7855e-01, 5.1029e-02,\n"," 3.0406e-01, -2.1767e-01, -2.5467e-01, -1.7627e-01, 2.3088e-01,\n"," -1.4500e-01, -2.2955e-01, -2.0468e-03, 2.9062e-01, -2.2521e-01,\n"," -3.0707e-01, -1.1539e-01, 1.7424e-03, -4.0165e-01, -1.0313e-01,\n"," 4.4039e-02, -3.8193e-02, 1.9275e-01, -2.3236e-03, -1.1502e-01,\n"," -8.6481e-02, -4.1072e-01, 1.2643e-01, 2.2629e-03, 2.5626e-01,\n"," -2.0070e-01, 7.3008e-02, 5.0030e-02, 3.4281e-01, 2.2568e-01,\n"," -9.0087e-02, 1.4005e-01, 1.0952e-01, -1.2507e-01, -6.7314e-03,\n"," 3.0819e-01, 1.3708e-02, 1.1005e-01, 9.1232e-03, 3.6463e-01,\n"," 7.8715e-02, 3.3215e-01, -3.1792e-01, -5.0784e-01, 8.9106e-02,\n"," -1.6161e-01, -3.5563e-01, 1.2286e-01, -2.2003e-01, 1.8882e-01,\n"," -1.5668e-01, -1.0511e-01, 4.1209e-01, 2.1244e-01, 3.3844e-02,\n"," 1.1985e-01, 2.7739e-01, 4.5681e-02, 2.7101e-01, -1.1416e-01,\n"," 1.2113e-01, -1.5002e-01, 1.0335e-01, -1.1471e-01, 5.6976e-02,\n"," 8.9504e-02, -2.8301e-01, 9.3836e-03, -3.2013e-02, -1.6773e-01,\n"," -2.6102e-01, -3.4681e-02, 1.7058e-01, -5.1593e-02, -3.5557e-01,\n"," 5.4674e-02, -4.0604e-01, 4.0356e-02, 3.2883e-01, -3.3012e-01,\n"," 4.8163e-02, 1.4389e-01, 2.1518e-01, 1.5055e-01, -9.7957e-01,\n"," 3.6450e-01, 5.3330e-02, 7.1331e-02, -1.9441e-01, -1.6712e-01,\n"," 3.7570e-01, 1.9557e-01, 6.0259e-02, 2.5147e-01, 2.3265e-01,\n"," -3.6979e-01, -1.4972e-01, 1.9718e-01, 6.7270e-01, 2.6408e-01,\n"," 6.8482e-02, -2.8758e-01, -3.5718e-01, -3.9049e-01, -2.8676e-01,\n"," -3.0750e-01, 5.5670e-01, 1.5753e-01, 8.3112e-02, -4.2511e-02,\n"," 1.7047e-01, -1.7823e-02, -1.7378e-01, -2.3138e-01, -1.4843e-01,\n"," 7.8753e-02, 5.4086e-01, -1.1928e+00, -9.7527e-02, 1.1305e-01,\n"," -1.0320e-01, 2.9873e-01, -2.3008e-01, 9.7020e-02, -1.9894e-02,\n"," 3.1641e-01, 6.8397e-02, -1.9570e-01, -4.9099e-01, -6.6509e-02,\n"," -1.6029e-01, -7.1895e-02, 1.0294e-01, 9.1185e-02, 4.3490e-01,\n"," 3.1176e-01, 4.2686e-01, 2.4587e-01, 3.2450e-01, -2.1253e-01,\n"," -1.1399e-01, -1.3130e-01, 5.6413e-01, 3.1712e-01, -9.2129e-02,\n"," -2.9501e-01, -4.6216e-01, 3.5230e-01, 4.4056e-01, -8.9017e-02,\n"," 2.0984e-01, 2.0896e-01, 1.0847e-01, 4.0375e-03, -2.1626e-01,\n"," -2.8916e+00, 2.1194e-01, -1.7121e-01, -6.0105e-02, 9.4036e-02,\n"," 3.1541e-02, 4.1480e-02, -4.2979e-01, 3.0667e-01, -1.1254e-01,\n"," -2.9133e-01, -1.4170e-01, -1.6717e-01, 3.4934e-02, 2.2154e-01,\n"," -3.2596e-01, 6.1641e-02, 2.3829e-02, 2.1138e-01, 1.3188e-01,\n"," 3.0325e-01, -2.2432e-01, 3.9845e-01, 1.0578e-02, 7.6556e-02,\n"," 9.8916e-01, 2.5928e-01, -2.7369e-01, -5.4215e-03, -1.3935e-02,\n"," -6.5270e-01, 4.0293e-01, 2.6199e-01, -4.5389e-01, 2.9326e-01,\n"," -1.9123e-01, 2.9129e-01, -2.5018e-01, -2.3622e-01, 2.4530e-01,\n"," 3.5197e-02, -2.1925e-01, -5.4522e-02, 1.1519e-01, -3.0984e-01,\n"," -4.6426e-01, 2.2309e-01, -2.3444e-01, 2.0517e-01, -1.1392e-01,\n"," -2.3009e-01, -2.4349e-01, 1.8535e-01, 2.2559e-01, -4.0776e-03,\n"," 2.2395e-01, 8.5354e-02, -1.3368e-02, -8.4096e-02, 5.2045e-02,\n"," 2.0287e-02, 1.7581e-01, 4.7341e-01, 1.1821e-01, -4.6606e-01,\n"," 5.1574e-02, 3.7445e-01, 4.4437e-02, 3.0983e-01, -7.8388e-02,\n"," -1.3590e-01, -4.7842e-01, -2.7559e-01, -2.7868e-01, 1.1607e-01,\n"," -9.4813e-02, -2.5990e-01, -2.1797e-01, 3.8281e-01, 9.1166e-02,\n"," 9.8527e-03, 2.1976e-02, 4.1766e-01, -9.8098e-02, -8.1604e-02,\n"," 4.3206e-01, 8.0220e-02, 1.2930e-01, -2.0703e-01, 3.6778e-01,\n"," 1.7402e-01, -1.2156e-01, 3.0667e-02, -1.1562e+00, -3.0380e-02,\n"," -1.0106e-01, 3.8217e-01, -6.5245e-02, 2.4124e-01, 4.3882e-02,\n"," -2.3470e-01, 1.1356e-01, -2.7050e-01, 2.0223e-02, 5.6372e-01,\n"," -7.1356e-01, -2.6707e-01, -2.9815e-01, 2.5439e-01, -4.4379e-01,\n"," -4.5263e-02, 7.9065e-02, 2.2556e-01, -1.2922e-01, 1.5817e-01,\n"," 5.5583e-02, 1.5549e-02, 3.5616e-01, -4.6270e-01, -1.1030e-01,\n"," -2.1844e-01, -7.4473e-02, -1.4861e-01, -1.0608e-01, 6.3603e-02,\n"," 3.4007e-02, 1.0880e-01, 1.2874e-01, -2.0134e+00, -2.4957e-01,\n"," 3.3689e-01, -5.5752e-01, 1.3370e-01, 3.8454e-02, 6.0184e-02,\n"," 7.1174e-03, -2.0237e-01, -1.1442e-01, 3.2024e-01, -3.2800e-01,\n"," 2.4084e-01, 3.1090e-01, -1.0228e-01, -2.2091e-01, -1.5499e-01,\n"," -4.3736e-02, -4.3711e-01, 2.2115e-01, -2.1497e-01, -2.2202e-01,\n"," 2.8645e-01, -2.8644e-01, 2.0522e-01, 2.1901e-01, -2.1741e-01,\n"," 3.0680e-01, -4.5161e-01, -6.7923e-03, -2.5177e-01, -2.5645e-01,\n"," -3.9733e-02, 4.5703e-02, 3.0277e-01, 1.2109e-01, 2.6534e-01,\n"," 1.0655e-01, 6.9617e-01, -1.2408e-01, 6.7630e-02, 2.0042e-01,\n"," 2.8635e-01, -2.4300e-02, 3.1449e-02, -1.5770e-01, 9.7014e-02,\n"," -6.7592e-01, 5.4201e-02, 1.8695e-01, 2.1726e-01, -2.0248e-01,\n"," 2.7716e-02, -8.3453e-02, -1.9414e-01, -1.5759e-01, -9.4840e-02,\n"," 4.9987e-01, 5.1318e-02, -4.8491e-02, -4.6645e-02, -2.3019e-01,\n"," 2.6610e-01, 3.0812e-01, -6.2312e-01, 3.1539e-01, 1.1113e-01,\n"," 4.4338e-02, 6.6743e-02, 1.9759e-01, -7.7813e-02, -3.8654e-01,\n"," -6.2087e-02, -8.2861e-01, -4.6651e-01, 1.9512e-01, -2.9896e-01,\n"," -1.9080e-01, 2.9757e-01, -2.6184e-01, -6.0577e-02, 3.1124e-02,\n"," 2.2825e-01, 1.7371e-01, 1.1705e-03, 2.4776e-01, 5.0205e-02,\n"," -3.9546e-01, 1.8247e-01, -2.4900e-01, -7.4694e-02, 1.2165e-01,\n"," 1.2099e-01, 7.9865e-03, 2.8964e-01, 1.7740e-01, 3.6726e-01,\n"," -4.2012e-01, 1.7888e-01, -3.5122e-02, -4.5249e-02, -3.2340e-01,\n"," 1.1052e-01, 2.0372e-01, 2.0861e-01, 1.4278e-02, -7.2509e-01,\n"," -5.0391e-02, -1.9585e-01, -1.5602e-01, 1.5925e-01, -1.3084e-01,\n"," 3.4414e-01, 2.8085e-01, -4.9100e-02, -3.4908e-01, -1.2343e-01,\n"," 2.2910e-01, -2.2393e-01, 1.8937e-01, 2.7983e-01, 3.2140e-01,\n"," 4.2040e-02, -2.3889e-02, -1.3578e-01, -2.8523e-03, 1.1346e-01,\n"," -1.8153e-01, -2.2334e-01, 8.2461e-03, -3.4246e-02, 1.0694e-01,\n"," 2.4198e-01, -5.7851e-01, -2.5857e-01, 1.6137e-01, 1.2736e-01,\n"," 2.8560e-01, 1.1330e-01, 4.8430e-01, 3.5360e-01, 1.7819e-01,\n"," -5.3513e-01, 1.1485e-01, -1.1841e-01, 3.9573e-01, -2.2002e-01,\n"," 2.1417e-01, 5.4783e-02, 2.1989e-01, 3.1143e-01, -1.3574e-01,\n"," 3.7642e-01, 9.2887e-02, 7.1855e-02, -4.7151e-01, 2.6069e-01,\n"," -2.2819e-01, -3.4763e-01, 6.3170e-02, -5.7998e-02, 8.3190e-02,\n"," -3.7832e-01, 3.6433e-01, 9.6735e-02, 8.3265e-03, 6.8044e-02,\n"," 1.7812e-01, -4.1020e-01, 4.2937e-02, 4.3050e-01, -2.7690e-01,\n"," 1.0401e-01, 3.4693e-02, -2.8146e-01, 5.0021e-01, -1.4629e-01,\n"," -1.6873e-01, -5.2917e-02, -7.9398e-02, 3.3192e-01, -3.8894e-02,\n"," 2.2125e-01, -2.0754e-01, 3.4028e-01, 1.5154e-01, -4.2564e-01,\n"," -3.3909e-01, 2.3546e-01, 4.3749e-01, 8.9940e-02, -3.4413e-01,\n"," -4.0804e-01, -1.7923e-01, -2.7240e-01, 1.6574e-01, 4.4394e-01,\n"," -2.7823e-01, 1.7342e-01, 3.3938e-01, 2.6672e-01, -3.7165e-02,\n"," -2.6302e-01, 1.3485e-01, -9.8412e-02, -2.2957e-01, 1.8950e-01,\n"," 2.0088e-01, -3.8781e-01, -2.2103e-01, 1.5684e-01, -2.6592e-01,\n"," -4.1073e-01, -1.6731e-02, -8.9452e-02, 4.4255e-01, 4.0209e-01,\n"," 4.9060e-02, 4.2734e-01, -1.5655e-01, -5.2092e-01, -1.8238e-01,\n"," 1.2072e-01, 9.2823e-03, 1.8871e-01, -1.3369e-01, -4.2765e-01,\n"," -2.3894e-01, 9.6829e-02, -6.1079e-02, 3.2749e-01, 1.1739e-01,\n"," 4.8584e-02, 1.3537e-01, 3.2796e-01, -1.0670e-01, -2.3079e-01,\n"," 1.2770e-03, -2.6005e-01, -1.5525e-01, -1.0847e-01, -3.9069e-01,\n"," -4.6459e-01, -3.4442e-02, 9.0906e-02, -1.5577e-02, 4.6542e-02,\n"," 5.9661e-01, 3.1164e-01, 9.1767e-02, -1.5790e-01, -1.0920e-01,\n"," 2.9858e-01, -1.4426e-01, -6.1769e-03, -4.0139e-01, -2.3467e-01,\n"," 7.1068e-02, 1.5768e-01, -1.2439e-01, -2.0753e-01, 2.9510e-01,\n"," 7.8543e-02, -2.7405e-01, -3.1712e-02, 9.5077e-02, 1.9112e-01,\n"," -1.9459e-01, 2.0737e-01, 5.1687e-01, -9.7452e-02, 1.4697e-01,\n"," -8.4526e-02, 3.4626e-01, -1.7299e-01, 8.9858e-02, 1.2325e-01,\n"," -2.9711e-01, -2.0456e-01, -1.5102e-01, 9.8218e-01, 4.2533e-01,\n"," -6.8748e-02, 4.5898e-02, 3.6028e-02, -1.9822e-01, 4.2027e-02,\n"," 1.3800e-01, -1.0101e-01, -5.8931e-01, 1.1539e-01, -2.9185e-02,\n"," 1.5030e-01, 3.0875e-01, -5.9638e-03, 3.4034e-01, -4.2806e-01,\n"," 2.1164e-01, -2.8403e-01, 5.3208e-02, 1.2465e-01, 1.4487e-01,\n"," 1.7028e-01, -1.6336e-01, -2.5352e-01, 2.2317e-01, 2.2053e-01,\n"," -2.2875e-02, -1.9565e-02, -2.1520e-01, -4.9637e-01, -2.2027e-01,\n"," 1.1919e-01, 2.9695e-01, -7.9563e-01, -5.5853e-02, -1.0678e-01,\n"," -6.9812e-01, 1.4109e-01, 4.5215e-01, 5.2359e-02, -3.5316e-01,\n"," 2.9064e-01, 2.4421e-01, 4.1849e-01, 2.8063e-01, -5.1330e-01,\n"," 1.1445e-01, -1.6092e-02, 1.4196e-01, 3.5865e-01, 1.0781e-01,\n"," -1.7120e-01, 4.3856e-01, 6.1252e-02, -3.1343e-01, 3.9572e-01,\n"," -8.2547e-02, -1.0750e-01, -2.7703e-01, 1.6951e-01, -1.0765e-01,\n"," 1.9026e-01, 9.8495e-02, -8.2233e-02, -7.6998e-02, 2.0506e-01,\n"," -6.9838e-02, 1.7055e-01, 1.5526e-01, -1.5313e-01, -1.6544e-01,\n"," -4.1426e-01, -1.0866e-01, -8.4201e-02, -1.1407e-01, 1.8083e-01,\n"," 1.0480e-01, -3.7618e-01, -1.0561e+00, -1.2531e-02, -1.3003e-01,\n"," 7.1581e-02, -3.4941e-02, 2.1013e-02, 5.3226e-01, -3.2955e-02,\n"," -4.0961e-01, -2.0489e-01, -3.0236e-02, 2.3550e-01, 1.9089e-01,\n"," 2.3198e-01, 9.8365e-02, 1.3093e-01, 1.7673e-01, -1.0373e-01,\n"," 3.1410e-01, -4.9044e-01, -2.6820e-01, 9.2850e-02, -2.0312e-01,\n"," 5.8524e-02, 1.0559e-01, 6.7082e-01, -4.9779e-01, -1.0273e-03,\n"," -2.0031e-01, 1.8239e-01, 1.1844e-01, 4.1402e-01, -3.1818e-01,\n"," -2.4948e-01, -1.9094e-01, 2.1248e-01, -7.0988e-02, 2.7575e-02,\n"," -1.0383e-01, 1.2059e-01, 3.5622e-01, 5.4530e-01, -1.7522e-01,\n"," -1.6157e-01, 3.0792e-02, -2.8449e-01, 2.2098e-01, 4.1140e-01,\n"," -1.1893e-01, -2.1560e-01, -3.2909e-01, -1.3463e-01, 2.9427e-01,\n"," -1.2845e+00, 2.0898e-01, 7.3657e-02, -6.0501e-02, 1.0916e-01,\n"," 1.2527e-01, -1.0217e-01, -1.6703e-01, -2.0322e-01, 7.6199e-02,\n"," 3.8769e-02, 1.7317e-01, -1.4345e-01, 9.0697e-03, -3.9579e-01,\n"," 2.5957e-01, 1.4723e-01, -6.8055e-02, 1.3603e-01, 3.7692e-01,\n"," -2.1444e-02, 1.4341e-01, 1.2355e-01, -1.9439e-01, 4.7307e-02,\n"," -3.3443e-01, -1.9464e-01, -2.4437e-01, -1.3106e-01, -5.1612e-03,\n"," -1.6138e-02, -4.0847e-02, -2.9791e+00, -8.5794e-02, 8.7101e-02,\n"," -9.6938e-02, 1.3412e-01, -1.5182e-01, 4.9418e-01, -1.4579e-01,\n"," 1.2012e-01, -2.7812e-01, -1.0410e-01, -3.6717e-01, 1.9878e-01,\n"," 6.5551e-02, -6.1877e-02, -8.6936e-02]])\n","is: tensor([[ 1.8697e-03, -3.2151e-01, -7.7460e-02, 2.7740e-02, 6.2456e-02,\n"," -3.0456e-01, 4.1830e-01, -2.8992e-01, 2.6928e-01, -2.6196e-01,\n"," -1.4777e-01, -8.2192e-02, -5.2840e-02, 6.1550e-02, 1.4844e-02,\n"," -2.9886e-01, 1.1996e-01, 7.9637e-02, 6.1569e-02, 2.0292e-01,\n"," 1.9862e-01, 1.5923e-01, 3.9574e-04, -7.3993e-02, 3.8438e-01,\n"," 1.9591e-01, -1.8840e-01, -2.5852e-01, -2.4467e-01, 2.8185e-01,\n"," -1.1110e-01, -1.4497e-01, -3.6825e-01, 2.2947e-01, -4.0383e-01,\n"," -2.0087e-01, 2.9177e-01, -6.9990e-02, -3.6491e-01, -7.8553e-02,\n"," -2.9983e-01, -1.0799e-01, 1.0274e-01, 8.7958e-02, -1.8464e-01,\n"," -1.7401e-01, -2.6010e-01, 1.2287e-03, -2.6320e-01, 1.4533e-01,\n"," -2.7946e-01, 2.6213e-01, -1.6042e-02, 5.2499e-01, -1.1449e-03,\n"," 9.1524e-02, 2.7029e-01, -2.7303e-02, -1.7996e-01, -4.0311e-02,\n"," 2.1410e-01, 1.8204e-01, 2.3880e-02, -1.9675e-01, 2.2373e-01,\n"," 2.3156e-01, 3.0306e-01, -2.3258e-01, -4.3007e-01, 2.1494e-01,\n"," -1.2659e-01, -3.4914e-01, 1.9842e-01, -2.0177e-01, 2.6123e-01,\n"," -1.8415e-02, -1.3235e-01, 3.3458e-01, 1.4593e-02, -1.2124e-01,\n"," 8.9224e-02, 1.9894e-01, 5.5546e-03, 2.9379e-01, -1.3886e-02,\n"," 4.4121e-03, 8.7993e-03, -1.9347e-01, -2.2984e-01, 2.9285e-01,\n"," 1.9872e-01, -2.7407e-01, 2.2483e-01, -9.6574e-02, 2.5281e-01,\n"," -2.3031e-01, 5.0902e-02, 1.2539e-01, -4.0944e-02, -1.6317e-01,\n"," -3.2688e-01, -7.0987e-01, 1.3338e-01, 3.1315e-01, -2.3064e-01,\n"," -8.2985e-02, 3.6632e-02, 3.6983e-01, 1.6702e-01, -8.2823e-01,\n"," 1.8914e-01, 4.1402e-02, 1.0409e-01, -1.9764e-02, -1.5156e-01,\n"," 3.8722e-01, 4.8230e-02, 2.4031e-01, 1.1137e-01, 3.0857e-01,\n"," -2.7013e-01, -1.3414e-01, 2.4263e-01, 6.9010e-01, 1.9098e-01,\n"," 1.4073e-01, -1.0874e-01, -2.8252e-01, -2.0213e-01, -3.6445e-01,\n"," -3.4538e-01, 3.2266e-01, 3.0816e-01, 3.7494e-01, -2.4652e-02,\n"," 1.0611e-02, -8.2075e-02, -1.2609e-01, -4.1470e-01, -1.0074e-01,\n"," -1.2555e-01, 5.7480e-01, -8.6346e-01, -2.4227e-01, 2.2759e-01,\n"," -2.6719e-01, 1.1397e-01, -2.7495e-01, 3.7002e-01, 1.9933e-01,\n"," 3.0195e-01, -8.0814e-02, -3.9891e-01, -2.4465e-01, -3.2512e-01,\n"," -2.1890e-01, -8.9821e-02, 1.3372e-01, 2.1553e-03, 4.6271e-01,\n"," 3.4682e-01, 2.6688e-01, 3.3165e-01, -6.8560e-02, 6.6277e-02,\n"," -3.5954e-01, -4.0313e-01, 7.2013e-01, 2.5525e-01, 3.4894e-02,\n"," -2.2595e-01, -4.2219e-01, 4.6512e-01, 3.6027e-01, -2.6512e-01,\n"," 1.3482e-01, 1.3108e-01, -1.1716e-01, -1.8619e-02, -2.1586e-01,\n"," -2.6994e+00, 2.4360e-01, -1.7115e-01, -1.4231e-01, 1.4217e-01,\n"," 6.6598e-02, 7.1642e-02, -1.3172e-01, 3.8511e-01, -2.1052e-01,\n"," -1.9772e-01, -1.6144e-01, -3.3393e-01, 1.3404e-01, 9.1712e-02,\n"," -2.4543e-01, -6.9195e-02, -2.0488e-01, 1.5504e-01, -1.1512e-01,\n"," 3.4769e-01, -1.0439e-01, 1.8340e-01, 1.8045e-01, 3.2331e-02,\n"," 1.0881e+00, 2.0635e-01, -2.7674e-01, 3.2679e-02, 1.5499e-01,\n"," -7.2025e-01, 5.0436e-01, 2.8603e-01, -5.4240e-01, 3.4077e-01,\n"," -1.3731e-01, 2.2577e-01, -1.6461e-01, -3.1177e-01, 1.4409e-01,\n"," 1.5334e-01, -3.9225e-01, -3.2240e-01, 6.6480e-02, -3.2858e-01,\n"," -3.1871e-01, 3.0527e-01, 2.4267e-02, 2.5849e-01, -3.5991e-01,\n"," -8.4364e-02, -2.5220e-01, 1.9502e-01, 2.6347e-01, -3.1921e-01,\n"," 3.7543e-01, 2.2437e-01, -6.2851e-02, 8.0805e-02, -2.8033e-01,\n"," 7.5052e-02, 4.5296e-01, 5.1609e-01, 2.6392e-01, -3.2010e-01,\n"," -2.1473e-01, 4.5929e-01, -1.5447e-01, 2.4216e-01, -7.4336e-02,\n"," -3.2070e-01, -6.0355e-01, -6.2864e-02, -5.0746e-02, -1.5584e-01,\n"," -8.6392e-03, 3.2910e-02, -2.1421e-01, 3.9752e-01, -2.9374e-01,\n"," 3.3773e-01, 7.5409e-02, 4.4468e-01, -1.3732e-01, -1.7217e-01,\n"," 2.5120e-01, 1.6315e-02, 1.3016e-01, 8.2184e-02, 1.6439e-01,\n"," -8.7246e-02, -2.6074e-01, 2.0086e-01, -9.3485e-01, -2.7442e-01,\n"," -1.0608e-01, 6.3571e-01, -1.6626e-02, 3.0940e-02, -2.2704e-02,\n"," 8.6353e-02, 3.7748e-01, -3.9554e-02, -2.4750e-03, 4.3900e-01,\n"," -7.1655e-01, -2.7072e-02, -3.1258e-01, 3.4072e-01, -4.0797e-01,\n"," -1.6145e-01, 6.5377e-02, 1.7735e-01, -6.0703e-02, 2.6116e-01,\n"," 4.3492e-02, 8.9716e-02, 3.4105e-01, -5.0260e-01, -2.1158e-01,\n"," -6.0291e-02, -3.1999e-01, -2.6387e-01, -1.3731e-01, 2.3449e-01,\n"," 1.2332e-01, 4.4151e-02, 1.8659e-01, -2.0093e+00, -2.0058e-01,\n"," 2.3975e-01, -4.6089e-01, 9.8208e-02, -5.8506e-02, 2.2320e-02,\n"," -1.0994e-03, -2.9547e-01, 5.8799e-02, 4.6605e-02, -3.8993e-01,\n"," 3.7131e-01, 6.5619e-01, 1.7579e-01, 3.5621e-02, -2.9010e-02,\n"," 5.2902e-02, -4.9559e-01, 1.6817e-01, 1.7079e-02, -9.3977e-02,\n"," 3.9762e-01, -9.3852e-02, 3.3964e-01, 7.7576e-02, -3.1746e-01,\n"," 1.0219e-01, -3.1246e-01, -2.3716e-01, -2.5728e-01, -1.5917e-01,\n"," -1.3577e-01, 9.5219e-03, 1.9219e-01, 2.6939e-01, 1.8506e-01,\n"," 2.9638e-01, 8.2790e-01, -1.9400e-01, -1.2523e-02, 3.9636e-01,\n"," 2.6522e-01, -2.0681e-01, -1.2239e-01, -1.8549e-01, 6.4841e-02,\n"," -1.6309e-01, 4.0983e-02, 1.6435e-01, 1.9139e-01, -2.1484e-01,\n"," 7.1836e-02, -8.4125e-02, -5.4567e-02, -8.0429e-02, 3.1976e-03,\n"," 3.0498e-01, 1.1114e-01, -3.8087e-01, 3.6319e-01, -1.7593e-01,\n"," 7.2298e-02, 1.6631e-01, -5.6938e-01, 1.3773e-01, -7.0020e-02,\n"," 1.7213e-01, 5.2953e-02, 2.5434e-01, -1.7713e-01, -1.9017e-01,\n"," -1.3093e-01, -8.5987e-01, -3.0105e-01, 2.4698e-01, -2.7790e-01,\n"," 4.9439e-02, 3.4442e-01, -4.1068e-01, -1.3946e-01, -2.6809e-01,\n"," 4.1420e-01, 2.4348e-03, 1.6486e-01, 2.1774e-01, 2.1962e-01,\n"," -3.7282e-01, -4.8055e-02, -2.5581e-01, -8.0311e-02, 8.4998e-02,\n"," 4.9077e-02, 2.1729e-01, 6.9998e-02, 2.5035e-01, 3.6638e-01,\n"," -5.4563e-01, 3.8146e-01, -5.6758e-02, -1.4173e-01, -3.2236e-01,\n"," 1.0513e-01, 6.5995e-03, -1.5990e-01, 1.0247e-01, -8.0096e-01,\n"," -3.4851e-02, -7.9852e-02, -1.5575e-01, 3.2593e-01, -1.6044e-01,\n"," 2.5603e-01, 1.4140e-01, 2.1942e-01, -2.0685e-01, -2.8493e-01,\n"," 1.9528e-01, -1.5996e-02, 2.1575e-01, 4.8777e-02, 3.9468e-01,\n"," -2.0199e-01, -8.0605e-02, -2.8236e-01, 2.1750e-01, 2.3375e-02,\n"," -3.9214e-01, -5.3155e-01, -7.4678e-02, 2.0181e-02, -1.6100e-01,\n"," 1.1017e-01, -1.9483e-01, -2.5688e-01, 8.0502e-02, 1.7548e-01,\n"," 7.3325e-01, 2.4487e-01, 2.5128e-01, 4.8031e-01, 2.4326e-01,\n"," -4.2904e-01, -5.7729e-02, -1.1157e-01, 4.2170e-01, -3.5807e-01,\n"," -1.6398e-02, 2.6152e-01, 3.0963e-01, 2.7681e-01, -3.1257e-01,\n"," 6.4292e-01, -2.3799e-01, -5.2642e-02, -4.6144e-01, 1.4274e-01,\n"," -4.7081e-01, -4.0608e-01, 1.2654e-02, -1.0138e-01, 2.7303e-02,\n"," -3.2333e-01, 2.0613e-01, 5.5301e-02, 1.3190e-01, 4.3834e-02,\n"," 2.8985e-02, -1.1305e-01, 5.0564e-01, 4.1251e-01, -3.4767e-01,\n"," 4.1457e-01, 1.8430e-01, -9.9562e-02, 1.9372e-01, -3.7351e-03,\n"," -3.9657e-01, 2.4432e-01, -1.1603e-02, 2.5609e-01, -3.2050e-02,\n"," 5.2519e-03, 3.2480e-03, 1.8653e-01, 1.5037e-01, -2.3396e-01,\n"," -2.6237e-01, 2.9199e-01, 4.8939e-01, 3.2174e-01, -4.5484e-01,\n"," -3.8316e-01, -1.4486e-01, -2.2286e-01, -2.7317e-02, 3.2270e-01,\n"," -5.1186e-02, 2.2661e-01, 3.0729e-01, 4.6752e-01, -6.1053e-02,\n"," -3.6775e-01, 8.9749e-03, -9.5514e-02, -2.3237e-01, 1.1131e-01,\n"," 3.3019e-01, 2.7881e-03, -2.0579e-01, 6.6362e-02, -2.5834e-01,\n"," -3.4687e-01, -2.4419e-01, -4.0189e-03, 3.5229e-01, 2.7011e-01,\n"," 1.6936e-02, 4.1147e-01, -3.5420e-01, -4.8230e-01, -2.2401e-01,\n"," 4.3381e-02, -2.5263e-02, 2.1812e-01, -2.2043e-01, -2.6893e-01,\n"," -4.1569e-01, -8.5913e-02, -7.8470e-02, 1.8877e-01, 2.3622e-01,\n"," -3.0409e-01, -1.0295e-01, 2.5066e-01, -1.1850e-01, -4.2483e-01,\n"," -1.9921e-01, -3.9208e-01, -4.8236e-01, 9.7306e-02, -4.0721e-01,\n"," -5.3597e-01, -1.8237e-01, -3.6961e-02, 3.4132e-01, -1.1565e-02,\n"," 6.8570e-01, 3.6684e-01, 1.4704e-01, -1.9376e-01, 6.7605e-02,\n"," 1.9593e-01, -1.3733e-02, 1.8709e-01, -7.5263e-01, -1.5905e-01,\n"," -8.9051e-02, -7.5011e-02, -1.0840e-01, -1.2490e-01, 1.4668e-01,\n"," 1.2459e-01, -6.3242e-02, 7.9327e-02, 1.4409e-01, 4.8669e-02,\n"," 2.6192e-03, 2.5275e-01, 4.2298e-01, -3.3102e-01, 1.4964e-01,\n"," 1.7529e-01, 2.6218e-01, -3.3960e-01, 4.4573e-02, 5.1640e-02,\n"," -1.3057e-02, -2.8317e-01, 1.2903e-01, 1.0209e+00, 6.6357e-01,\n"," -2.9555e-01, -3.5495e-05, 2.1063e-01, -1.4781e-01, 3.9247e-02,\n"," 2.3618e-01, -8.7327e-02, -5.1372e-01, -9.7266e-02, -2.1151e-01,\n"," -1.9037e-01, -1.3693e-02, -8.4586e-02, 2.1561e-01, -1.8468e-01,\n"," 2.1060e-01, -3.8622e-01, -8.1579e-02, -8.0516e-02, 1.8653e-01,\n"," -1.1430e-01, 2.0596e-02, -3.2029e-01, 9.8316e-02, 1.7938e-01,\n"," -2.3479e-01, -8.8719e-02, -2.3090e-01, -3.5579e-01, 6.9056e-02,\n"," 2.8082e-01, 5.9478e-01, -5.7686e-01, -7.0712e-02, -7.1670e-02,\n"," -6.8886e-01, 2.8716e-01, 4.6037e-01, 8.5511e-02, -1.5773e-01,\n"," 5.7352e-01, 2.5527e-01, 3.1434e-01, 2.4952e-01, -2.8375e-01,\n"," -1.6349e-01, -2.5004e-01, 4.2881e-01, 3.8245e-01, 7.1545e-02,\n"," -1.5492e-01, 5.5823e-01, -2.9444e-01, -4.0516e-01, 2.9365e-01,\n"," 2.4712e-01, -2.4901e-01, -4.1190e-01, 2.2186e-01, 5.7751e-02,\n"," 1.5777e-01, 7.5507e-02, -2.1608e-02, 4.5435e-03, 1.0407e-01,\n"," -3.0175e-01, -4.0616e-02, 2.2791e-01, 2.1490e-01, 7.0052e-02,\n"," -3.4287e-01, 8.8782e-02, -1.1014e-01, -1.1654e-01, -1.7771e-02,\n"," 6.5245e-02, -1.0739e-01, -9.7636e-01, 1.3144e-02, 3.7066e-04,\n"," 1.2365e-02, -3.6228e-01, 1.1752e-01, 2.8827e-01, 3.0365e-01,\n"," -4.1336e-01, -2.0176e-01, -1.2524e-01, 2.5792e-01, 2.4093e-01,\n"," -7.9870e-03, -1.6644e-01, 1.8708e-02, 1.6321e-01, -4.1054e-01,\n"," 3.8808e-01, -5.7599e-01, -9.3410e-02, 7.5947e-02, 3.9543e-02,\n"," -1.5001e-01, -1.1766e-01, 5.4193e-01, -6.4604e-01, -5.0309e-02,\n"," -6.2907e-02, 1.8551e-01, 5.1217e-02, 2.3072e-01, -3.6149e-02,\n"," -2.3456e-02, -3.9047e-01, 3.4240e-01, -1.2439e-02, 2.3032e-01,\n"," 2.2514e-01, 4.3220e-01, 3.7389e-01, 4.8468e-01, -3.1992e-01,\n"," -2.9266e-01, -2.2074e-01, -1.7781e-01, 1.4267e-01, 3.0812e-01,\n"," 2.8772e-03, -2.4614e-02, -3.8921e-01, 1.9679e-01, 1.2218e-01,\n"," -1.1499e+00, 2.4324e-01, -1.0012e-01, -7.8817e-02, -2.7842e-02,\n"," 1.8674e-01, -1.2989e-02, -1.7676e-01, -1.5938e-01, -3.3805e-01,\n"," 1.0930e-01, 2.0728e-01, -2.5851e-01, -1.9172e-02, -3.7778e-01,\n"," 2.8461e-01, -2.1312e-01, 3.6264e-02, 1.3177e-01, 6.0335e-01,\n"," 9.7145e-02, 3.0143e-01, 2.2475e-01, -4.1595e-01, 2.4370e-02,\n"," -2.8742e-01, -1.7549e-01, -1.1151e-01, -1.2572e-01, 2.4712e-01,\n"," -1.7420e-02, 5.8104e-02, -2.7543e+00, 5.5879e-02, 6.8826e-02,\n"," -1.9035e-01, 6.9779e-02, -3.3699e-01, 4.0038e-01, -1.7857e-01,\n"," 7.4339e-02, -6.6231e-02, -1.1496e-02, -4.7582e-01, 3.9001e-01,\n"," 1.7502e-01, -5.7692e-03, 1.0571e-01]])\n","a: tensor([[ 2.9766e-02, -2.1431e-01, -1.2068e-01, -4.1195e-02, 2.5696e-02,\n"," -2.3825e-01, 2.7309e-01, -1.3659e-01, 2.0421e-01, -3.3920e-01,\n"," -2.3311e-01, -9.6260e-02, 7.7194e-03, 4.0456e-03, -3.6642e-02,\n"," -3.1823e-01, 1.1493e-01, 9.1760e-02, -7.5329e-02, 1.4700e-01,\n"," 1.9969e-01, 1.1724e-01, 1.0364e-01, 1.3756e-01, 2.3491e-01,\n"," 2.6981e-01, -2.2302e-01, 2.1402e-02, -1.3284e-01, 2.2310e-01,\n"," -8.2853e-02, -2.6862e-01, -2.3380e-01, 3.3130e-01, -2.4006e-01,\n"," -2.6743e-01, 1.5785e-01, -6.7930e-02, -6.6787e-01, -1.8224e-01,\n"," -1.5107e-01, -2.4947e-02, -6.7863e-02, -1.3507e-01, -9.4461e-02,\n"," 7.0511e-02, -4.8655e-01, 6.3150e-02, -2.4193e-01, 2.1166e-01,\n"," -2.2201e-01, 1.9255e-01, -7.4001e-02, 3.9651e-01, -3.0388e-02,\n"," 9.4766e-02, 2.8955e-01, 4.7689e-02, -3.5565e-02, -2.9259e-01,\n"," 3.9359e-01, 6.7486e-02, 1.4510e-02, 1.7061e-01, 3.5484e-01,\n"," 2.4037e-01, 2.1264e-01, -8.7173e-02, -4.0058e-01, 1.9068e-01,\n"," -1.3033e-01, -2.8333e-01, 3.7309e-01, -1.4915e-01, 2.3222e-01,\n"," -3.3257e-03, -6.3892e-02, 1.8903e-01, 1.4485e-01, -9.5187e-02,\n"," 2.9848e-01, 9.6510e-02, 1.8680e-01, 3.5581e-01, -6.5269e-02,\n"," 3.6280e-02, -5.7054e-02, -1.2931e-01, -3.2515e-01, 1.9937e-01,\n"," 1.0433e-02, 4.2378e-02, 1.8724e-01, 1.4107e-01, 7.2615e-02,\n"," -1.3930e-01, -1.3167e-01, 6.9057e-02, 2.5913e-02, -3.8142e-01,\n"," -4.7875e-01, -3.1730e-01, 2.3199e-02, 4.0319e-01, -2.7735e-01,\n"," 1.6501e-02, -4.8206e-02, 3.8175e-02, -5.5496e-02, -6.4563e-01,\n"," 1.0976e-01, 6.7288e-02, 1.7956e-01, -1.8032e-01, -1.5734e-01,\n"," 4.2853e-01, 1.2850e-01, 3.0403e-01, 2.3961e-01, 3.6994e-01,\n"," -2.5487e-02, -2.6360e-01, -1.8218e-02, 5.0424e-01, 1.2878e-01,\n"," 1.2209e-01, -9.8373e-02, -6.9990e-01, -1.6446e-01, -3.8234e-01,\n"," -2.9856e-01, 2.0223e-01, 3.2179e-01, 5.5917e-01, -1.9865e-01,\n"," -6.7176e-02, -1.4493e-01, -9.2787e-02, -1.9368e-01, -2.3885e-01,\n"," -1.8555e-01, 4.1853e-01, -9.0785e-01, -4.3142e-01, 2.1403e-01,\n"," -1.1205e-01, 2.7316e-01, 6.1555e-02, 2.3438e-01, 1.7055e-01,\n"," 1.7392e-01, 7.5553e-03, -2.7680e-01, -2.6915e-01, -4.4307e-01,\n"," -2.8903e-01, -6.9446e-02, 3.0993e-01, 1.4709e-01, 5.0718e-01,\n"," 3.1296e-01, 3.8201e-01, 2.8219e-01, 2.7311e-01, 1.4176e-01,\n"," -9.0831e-02, -3.4716e-01, 5.9876e-01, 1.8733e-01, -3.7262e-02,\n"," -3.1021e-01, -2.6346e-01, 4.1389e-01, 3.7745e-01, -4.2195e-01,\n"," 2.2076e-01, 3.8145e-01, 2.4493e-01, 5.0578e-02, -2.0899e-01,\n"," -2.8598e+00, 2.8092e-01, -1.0267e-01, -2.3313e-01, 1.2316e-01,\n"," -7.0178e-02, 4.3349e-02, -4.5919e-01, 3.8599e-01, -1.2311e-01,\n"," -3.0402e-01, -2.3688e-01, -3.0632e-01, 2.4516e-01, 1.6378e-01,\n"," -2.3376e-01, -3.7696e-02, -2.8564e-02, 2.6141e-01, -5.8938e-02,\n"," 2.8535e-01, -1.2145e-01, 3.6036e-01, 1.3323e-01, -5.3546e-02,\n"," 1.1835e+00, 7.8859e-02, -2.3259e-01, 1.1870e-01, 1.2241e-01,\n"," -7.7662e-01, 6.7191e-01, 2.4018e-01, -4.2675e-01, 2.6001e-01,\n"," -2.0730e-01, 3.7363e-01, -3.8873e-03, -4.2587e-01, 2.8298e-02,\n"," 2.1029e-01, -3.1187e-01, -2.6725e-01, 5.1085e-02, -3.0283e-01,\n"," -4.5526e-01, 9.4070e-02, 1.8893e-02, 1.1274e-01, -3.3635e-01,\n"," -1.0464e-01, -2.1086e-01, 1.3569e-01, 3.0474e-01, -3.3532e-01,\n"," 5.7644e-01, -2.6769e-02, 5.3349e-02, -3.6336e-02, -1.9290e-01,\n"," -6.5088e-02, 2.5356e-01, 4.4730e-01, 2.4178e-01, -1.7648e-01,\n"," 1.3588e-02, 5.1953e-01, 5.0137e-02, 4.5152e-01, -3.9874e-02,\n"," -9.0222e-02, -5.9322e-01, 8.6243e-02, -2.9029e-01, -1.1663e-01,\n"," -6.1298e-02, 3.6915e-02, -3.8637e-01, 3.7119e-01, -2.0113e-01,\n"," 1.9844e-01, 2.5708e-01, 6.8749e-01, -2.1933e-01, -2.4934e-02,\n"," 4.1762e-01, 3.0081e-01, -4.4853e-02, -1.0090e-01, -4.3189e-02,\n"," 4.4227e-02, -7.7119e-02, -1.3800e-01, -1.1937e+00, -2.8774e-01,\n"," -1.1892e-01, 5.7053e-01, 1.0110e-01, 7.8563e-02, 1.6622e-01,\n"," -4.0469e-02, 3.7188e-02, -5.6371e-02, 1.3350e-01, 2.3949e-01,\n"," -4.0340e-01, -1.4374e-01, -4.6271e-01, 3.6151e-01, -4.3518e-01,\n"," -9.9090e-02, 1.7223e-03, 1.0477e-01, -2.2188e-02, 2.5496e-01,\n"," -3.9257e-02, 1.1826e-01, 1.8422e-01, -3.7302e-01, -1.6050e-01,\n"," -1.4614e-01, -1.8468e-01, -2.5632e-01, -1.9597e-01, 7.8435e-02,\n"," 3.2813e-01, -6.9360e-02, 6.7640e-02, -2.1440e+00, -4.8955e-02,\n"," 1.0592e-01, -5.0961e-01, 8.5353e-02, 1.5945e-02, 2.2298e-02,\n"," 2.6123e-02, -1.2189e-01, -2.3861e-02, 2.1260e-01, -2.6022e-01,\n"," 2.7888e-01, 5.3912e-01, 4.4073e-02, 3.6532e-02, -2.5099e-01,\n"," 2.9836e-02, -4.0468e-01, 2.1820e-01, 4.0127e-02, 6.6862e-02,\n"," 5.2284e-01, -2.6807e-01, 3.0644e-01, 1.6742e-01, -4.1357e-01,\n"," 1.3514e-01, -3.7441e-01, -6.2654e-02, -2.3620e-01, -4.1957e-01,\n"," -2.3252e-01, 1.6526e-01, 2.6690e-01, 2.9751e-01, 3.4346e-01,\n"," 9.5559e-03, 1.0234e+00, -1.2318e-01, 2.9327e-01, 2.4730e-01,\n"," 2.6944e-01, 7.6334e-03, -1.1545e-01, 3.5644e-02, 1.6281e-01,\n"," -2.4262e-01, 1.3012e-01, 1.5275e-01, 1.8414e-01, -1.5155e-01,\n"," 1.5976e-01, -1.4456e-01, 7.2859e-02, -4.2068e-02, 8.3814e-02,\n"," 2.5087e-01, 1.2823e-01, -2.9072e-01, 2.7520e-01, -1.7717e-01,\n"," 1.1600e-01, 2.1279e-01, -7.7443e-01, -3.0287e-02, -3.9582e-02,\n"," 4.6240e-03, 1.3704e-01, -6.2964e-03, 2.6462e-02, -9.3106e-02,\n"," -2.9325e-02, -7.1687e-01, -3.6360e-01, 1.4785e-01, -4.1614e-01,\n"," 8.4419e-02, 2.6185e-01, -3.0443e-01, -7.1920e-03, -1.1489e-01,\n"," 3.2626e-01, 4.7172e-02, 1.6609e-01, 3.3192e-01, 4.3618e-01,\n"," -3.6911e-01, 2.6058e-02, -1.9710e-01, -2.3635e-02, 1.5993e-01,\n"," 9.5753e-03, 1.7974e-01, 6.3784e-02, 1.8942e-01, 3.0276e-01,\n"," -4.8881e-01, 3.0498e-01, -4.3379e-03, -3.0876e-01, -3.1591e-01,\n"," -7.6121e-02, 9.9806e-02, -2.7801e-01, 5.0308e-02, -7.8253e-01,\n"," -9.1268e-02, -1.7719e-01, -1.7522e-01, 4.5642e-01, -1.4507e-01,\n"," 2.4867e-01, 8.7616e-02, 2.1436e-01, -2.4374e-01, -3.1693e-01,\n"," 1.8767e-01, 1.2465e-02, 1.3447e-01, 6.8811e-03, 3.9533e-01,\n"," 1.6467e-01, 2.3174e-01, -1.5334e-01, 4.7454e-02, 8.2656e-02,\n"," -4.2981e-01, -5.0778e-01, -9.1735e-02, 2.2126e-01, -3.4921e-02,\n"," 8.9432e-02, -4.6558e-01, -3.0298e-01, 1.5568e-01, 7.7618e-02,\n"," 5.9864e-01, 7.6054e-02, 5.3246e-01, 3.4488e-01, 1.3232e-01,\n"," -5.0681e-01, 4.2888e-02, 9.2467e-02, 2.2621e-01, -6.8085e-02,\n"," 6.4268e-02, 2.9970e-01, 3.0785e-01, 4.3337e-01, -2.4177e-01,\n"," 4.5123e-01, -2.1042e-01, -1.5066e-01, -3.2193e-01, 2.3048e-01,\n"," -3.5644e-01, -4.2225e-01, 1.0407e-01, -1.0613e-01, 4.6739e-02,\n"," -1.2326e-01, 1.7536e-01, 1.8654e-01, 1.5260e-01, 7.4601e-02,\n"," 2.3776e-02, -3.3356e-01, 2.0624e-01, 4.8988e-01, -2.1484e-01,\n"," 2.6085e-01, 4.2907e-03, -1.5448e-01, 1.7374e-01, -1.5047e-01,\n"," -5.1552e-01, -8.5561e-02, -1.2645e-01, 3.6549e-01, -2.8782e-01,\n"," 2.6058e-01, -8.6930e-03, 4.0833e-01, -2.1392e-02, -5.6000e-01,\n"," -3.6655e-01, 7.8567e-02, 4.2874e-01, 1.5124e-01, -6.1984e-01,\n"," -2.3722e-01, -1.3388e-01, -1.2631e-01, 1.6851e-01, 1.8093e-01,\n"," -3.7693e-02, 1.2901e-01, 3.3172e-01, 3.8932e-01, -4.2852e-01,\n"," -3.6691e-01, 3.4901e-02, -1.1733e-01, -7.8055e-03, 2.5049e-02,\n"," 1.5528e-01, -4.6668e-01, -1.4601e-01, 2.7901e-01, -4.0122e-01,\n"," -2.9922e-01, -1.7071e-01, 7.2100e-02, 5.3903e-01, 4.9446e-02,\n"," 1.8611e-01, 3.9184e-01, -4.0948e-01, -3.1001e-01, -3.6400e-01,\n"," 2.2928e-01, 3.2897e-02, 3.5467e-01, 3.2829e-02, -3.3781e-01,\n"," -4.1136e-01, 2.3005e-01, -2.0464e-02, 1.0534e-01, 3.0562e-01,\n"," -2.3885e-01, -1.7667e-01, 5.7814e-01, 1.3691e-01, -6.8672e-01,\n"," -6.7866e-03, -2.8899e-01, -2.5964e-01, -1.1600e-01, -5.0626e-01,\n"," -5.1745e-01, 1.7356e-02, 3.7843e-02, -1.2863e-01, 1.6746e-01,\n"," 5.7741e-01, 4.5241e-01, 2.9691e-01, -2.3909e-01, -1.7628e-01,\n"," 1.2903e-01, -1.0709e-01, -2.2481e-03, -8.4869e-01, -1.6803e-01,\n"," -3.4063e-02, 1.8357e-01, 4.3764e-02, -8.7373e-02, 2.3552e-01,\n"," 1.5954e-01, -3.3332e-02, -1.4616e-01, -2.9096e-02, -5.0226e-03,\n"," 5.0387e-02, 1.9696e-01, 3.2610e-01, -4.1396e-01, 2.4605e-01,\n"," 2.1061e-02, 3.4157e-01, -2.6485e-01, -9.0696e-03, 3.6800e-01,\n"," -3.5547e-01, -3.6937e-01, 1.9005e-01, 1.0360e+00, 8.0834e-01,\n"," -3.2096e-02, -4.2683e-02, 3.5666e-01, -2.1886e-01, -1.6052e-01,\n"," 8.6015e-02, -2.7510e-01, -6.4987e-01, 1.9867e-01, -7.7649e-02,\n"," -9.8748e-02, 5.4568e-02, -6.5018e-02, 1.7677e-01, -3.4066e-01,\n"," 2.3156e-01, -4.6544e-01, -2.8610e-01, -6.3517e-02, 2.6259e-01,\n"," 3.8220e-02, -6.2086e-02, -1.8990e-01, 2.6974e-01, 1.8499e-01,\n"," -3.1193e-01, -3.8158e-02, -2.4080e-01, -7.0333e-01, 6.6644e-02,\n"," 2.3776e-01, 5.2434e-01, -6.0955e-01, 4.3559e-02, 9.2938e-02,\n"," -6.8132e-01, 1.2533e-02, 1.9932e-01, 2.4007e-01, -7.0889e-02,\n"," 4.7627e-01, 4.0209e-01, 3.2402e-01, 2.3604e-01, -4.1696e-01,\n"," -3.3027e-01, -5.6188e-01, 4.7798e-01, 3.1419e-01, 1.4704e-01,\n"," -1.7947e-02, 4.9978e-01, -6.2832e-02, -2.0645e-01, 3.8227e-01,\n"," 2.2955e-01, -2.0701e-01, -3.5132e-01, 1.0823e-01, -1.9474e-02,\n"," 2.7179e-01, -1.4539e-01, -1.2640e-01, -2.8944e-02, 3.2080e-01,\n"," -2.7629e-01, 1.0162e-01, 3.5193e-01, 5.4211e-02, -2.3018e-01,\n"," -1.3879e-01, 4.8029e-02, -2.7097e-01, 1.8626e-01, 1.1639e-01,\n"," 6.3488e-02, -3.1306e-01, -1.1445e+00, -5.0344e-02, 6.6426e-03,\n"," -1.9401e-02, -3.2551e-01, -2.6791e-02, 2.8124e-01, 1.2197e-01,\n"," -4.5488e-01, -2.3495e-01, -8.2608e-02, 4.2248e-01, 3.0545e-01,\n"," 1.7438e-01, 1.9974e-01, 2.1834e-01, 1.2738e-01, -3.3408e-01,\n"," 4.4410e-01, -5.4000e-01, -2.1152e-01, -2.6625e-01, -2.3229e-01,\n"," -1.8770e-01, -5.2992e-02, 5.4430e-01, -7.6142e-01, -3.5234e-02,\n"," 2.1700e-01, 5.3426e-02, 5.1466e-02, 2.0147e-01, -8.3735e-02,\n"," -2.0607e-02, -5.9429e-01, 9.8292e-02, 1.8464e-01, 2.9018e-01,\n"," -1.3225e-02, 4.4727e-01, 3.4694e-01, 5.9295e-01, -7.6907e-02,\n"," -3.3563e-01, -3.7522e-02, -2.0567e-01, 3.7593e-02, 6.3959e-01,\n"," -1.2071e-01, -1.5493e-01, -6.7366e-01, 1.3272e-01, 2.1831e-01,\n"," -1.2465e+00, 4.9006e-01, 9.5738e-02, -1.9943e-01, -1.3369e-01,\n"," 2.3173e-02, 1.0562e-01, -4.8005e-01, -1.2307e-01, -3.1234e-01,\n"," 1.4729e-01, -6.0690e-02, -4.1739e-01, 3.7265e-02, -6.0652e-01,\n"," 3.1934e-01, 2.4017e-02, 2.5446e-01, 1.0526e-01, 4.9362e-01,\n"," 2.9498e-02, 1.9185e-01, 2.8104e-01, -3.8310e-01, 2.9010e-02,\n"," -3.3226e-01, -2.2402e-01, -1.2438e-01, -2.9204e-01, 3.0043e-01,\n"," 4.3323e-05, -1.9795e-01, -2.6715e+00, 1.4403e-01, 8.2754e-02,\n"," -4.7888e-02, 1.5393e-01, -4.6024e-01, 5.4568e-01, 4.2123e-02,\n"," 1.5056e-01, -1.1054e-01, 8.1319e-02, -6.8825e-01, 2.3358e-01,\n"," 2.1198e-01, -6.2038e-03, -6.5695e-02]])\n","pet: tensor([[-1.1753e-01, -1.9058e-01, -1.4255e-01, -2.1120e-01, -1.6127e-01,\n"," -4.3370e-02, 3.4605e-01, -7.8662e-02, 2.3042e-02, -2.3518e-01,\n"," -5.6055e-02, -7.8579e-02, 3.0462e-02, 1.2073e-02, -3.5568e-01,\n"," -2.2639e-01, -3.1121e-02, 2.0060e-01, 2.2185e-01, 2.0930e-01,\n"," 1.3519e-01, -2.9154e-01, 1.1572e-01, -7.0738e-02, 1.3041e-01,\n"," 2.1452e-01, -2.9310e-01, 4.8277e-02, -2.7578e-01, 5.4618e-02,\n"," -1.4875e-01, -2.4007e-01, 2.5294e-01, 4.6507e-01, -1.2825e-01,\n"," -2.9974e-01, -8.0699e-02, -7.8631e-02, -3.2636e-01, -6.5924e-02,\n"," 3.3836e-02, -6.9477e-02, 5.9497e-02, -1.6523e-01, 2.2343e-01,\n"," -2.1711e-01, -2.8202e-01, 2.4413e-01, -2.2494e-01, 1.9964e-01,\n"," -1.3879e-01, 1.5774e-01, -8.4627e-02, 2.8198e-01, -6.2480e-03,\n"," 7.3649e-02, 2.8430e-01, 9.9791e-02, -2.3266e-01, -1.1902e-01,\n"," -1.7452e-01, 1.0306e-01, -8.4619e-02, 1.1353e-01, 4.4240e-01,\n"," 4.5958e-02, 3.6057e-01, 2.6121e-02, -5.2768e-01, 8.5782e-02,\n"," -2.4788e-01, -1.4006e-01, 2.7123e-01, -1.9901e-02, 2.5135e-01,\n"," 8.2211e-02, -1.7276e-01, 3.9833e-01, 2.1625e-01, 5.3304e-02,\n"," 2.0732e-01, 9.5838e-02, -1.3478e-01, 4.1772e-01, -2.3737e-02,\n"," 2.2211e-01, -1.3746e-01, -1.5004e-01, -1.6962e-01, 1.9584e-01,\n"," 1.8280e-01, -1.4908e-01, 3.5352e-02, 7.7354e-02, 6.6443e-02,\n"," 5.4267e-02, -3.9627e-01, 7.0211e-02, -1.6846e-01, -8.6571e-02,\n"," -1.5498e-01, -2.6894e-01, -1.4246e-01, 3.7578e-01, -1.3995e-01,\n"," -2.4565e-02, -2.7699e-02, -6.9196e-02, 6.7844e-02, -5.8438e-01,\n"," 3.9514e-01, 1.3475e-01, 2.8893e-01, 3.3088e-02, -7.0129e-02,\n"," 2.8087e-01, 8.7138e-02, 3.4312e-02, 9.0724e-02, 3.6819e-02,\n"," -4.1970e-01, -3.0208e-01, 3.7519e-02, 6.0225e-01, 1.2972e-01,\n"," 4.1280e-01, -1.2700e-01, -4.8160e-01, 1.4684e-01, -2.3733e-01,\n"," -1.2955e-01, 3.1048e-01, 2.1254e-01, 1.9369e-01, -1.0270e-01,\n"," 1.2105e-02, 1.9017e-01, 4.9736e-02, -1.1976e-01, -1.3432e-01,\n"," -3.0217e-02, 4.8193e-01, -9.5151e-01, -1.7706e-01, 2.4775e-01,\n"," 1.4874e-01, 1.6147e-01, 1.6795e-01, 1.2859e-01, -1.2160e-01,\n"," 1.6375e-01, 1.6763e-01, -3.5789e-01, -3.3751e-01, -3.1688e-01,\n"," -8.1245e-02, -1.6947e-02, 1.0358e-02, 1.7282e-01, 4.4796e-01,\n"," 3.1591e-01, 2.4263e-01, 4.9991e-02, 1.1235e-01, -2.0668e-01,\n"," -1.1015e-01, -2.4927e-02, 5.2555e-01, 2.2774e-01, 6.8240e-02,\n"," -4.3651e-01, -1.8895e-01, 3.0580e-01, 6.2210e-01, -1.8784e-01,\n"," 2.8747e-01, 8.5789e-02, 1.0787e-01, 6.6216e-03, -1.4267e-01,\n"," -2.8841e+00, 1.7748e-01, 3.6661e-02, -2.8352e-01, 2.0420e-01,\n"," -1.8695e-01, 3.1395e-01, -3.4693e-01, 3.5900e-01, -4.6401e-01,\n"," -3.0928e-01, -1.3484e-01, -4.1940e-02, 1.5071e-01, 2.4431e-01,\n"," -1.8370e-01, -8.3949e-03, -1.0815e-01, 3.4886e-01, 1.8172e-02,\n"," 2.9415e-01, -1.7515e-01, 2.8444e-01, 3.7879e-01, -4.8932e-02,\n"," 7.8610e-01, 7.0004e-02, -3.2650e-01, 1.6456e-01, 7.2159e-02,\n"," -6.9577e-01, 4.2778e-01, 3.0247e-01, -3.0741e-01, 4.3254e-01,\n"," -3.9029e-01, 2.9388e-01, -2.4589e-01, -3.8480e-01, -8.2060e-02,\n"," 1.1360e-01, -8.4957e-02, -1.0398e-01, 2.0297e-01, -2.4375e-01,\n"," -3.6573e-01, 2.0354e-01, -4.5006e-02, 1.7091e-01, -9.7588e-03,\n"," -2.3450e-01, -1.9153e-01, 1.1457e-01, 1.1962e-01, -3.8969e-01,\n"," 3.2102e-01, 1.5275e-01, -1.0360e-01, -7.7887e-03, -5.0086e-02,\n"," -1.0807e-01, 1.2984e-01, 5.3984e-01, 9.7129e-02, -2.4622e-01,\n"," -3.3791e-01, 3.7603e-01, 1.8782e-01, 4.9861e-01, -2.2060e-01,\n"," 2.6978e-02, -3.9630e-01, -4.9249e-02, -2.7307e-01, -2.3526e-01,\n"," -7.7667e-02, -6.1610e-02, -3.0604e-01, 3.2165e-01, 1.1782e-01,\n"," 7.4824e-04, 2.3165e-01, 5.6424e-01, -2.1622e-01, 5.8619e-03,\n"," 7.8636e-02, 1.3682e-01, 6.2086e-02, 3.8915e-02, -8.1317e-02,\n"," -1.2051e-01, -8.0143e-02, -1.5526e-01, -1.3199e+00, 1.6328e-01,\n"," -1.0948e-01, 1.9113e-01, 1.8018e-01, 4.2901e-01, -1.4066e-01,\n"," 4.6388e-02, 1.0520e-01, 1.7548e-02, -7.0286e-02, 3.8145e-01,\n"," -4.1850e-01, -4.1174e-01, -4.4598e-01, 1.3714e-01, -5.0955e-01,\n"," -1.1868e-01, -7.0154e-02, 7.0206e-02, 2.1896e-01, -1.1593e-02,\n"," -5.7769e-02, -4.9587e-03, 2.5545e-01, -4.3224e-01, -2.4319e-01,\n"," 8.8821e-02, -1.4057e-01, 1.1532e-01, -2.9394e-01, 2.5214e-01,\n"," 1.6294e-01, 1.5609e-01, -5.6524e-02, -1.6464e+00, -2.9447e-01,\n"," 1.4028e-01, -3.7801e-01, 8.2192e-03, -3.4623e-01, -2.5595e-02,\n"," -6.7016e-02, -8.7282e-02, 2.0293e-01, 2.7765e-01, -5.1887e-01,\n"," 3.7704e-01, 2.0393e-01, 2.2995e-01, 1.2514e-02, -3.1003e-02,\n"," -2.1792e-01, -4.7384e-01, 1.6482e-01, 4.8442e-02, -4.9681e-02,\n"," 2.8802e-01, -3.0315e-02, 1.7627e-01, 3.8701e-01, -2.9742e-01,\n"," 1.4631e-01, -5.4230e-01, 6.5122e-02, -8.5074e-02, -2.8594e-01,\n"," -2.3099e-01, 2.1012e-03, 3.4846e-01, 9.1328e-02, 2.6014e-01,\n"," 1.6746e-01, 8.2186e-01, 1.1069e-01, 1.0567e-01, 2.3986e-01,\n"," 6.6488e-02, -8.2876e-02, -2.4455e-01, -3.6692e-02, 2.4992e-01,\n"," -1.7422e-01, -1.2981e-01, 3.0355e-01, 4.1134e-01, -1.7064e-01,\n"," 1.8952e-01, -1.1807e-01, 5.2106e-02, -1.4800e-02, 1.4011e-01,\n"," 2.8441e-01, -1.5933e-01, -1.3502e-01, 1.5945e-01, -2.3626e-01,\n"," 3.2939e-02, 2.3136e-01, -4.4689e-01, 4.4261e-02, -1.0254e-01,\n"," -3.0993e-01, -2.5479e-01, 1.8709e-01, -4.2888e-02, -3.9811e-01,\n"," -1.8627e-01, -8.7716e-01, -5.4473e-01, 3.9734e-01, -5.2474e-01,\n"," 1.6525e-01, 3.0488e-01, -3.2955e-01, 1.6459e-02, -7.8900e-02,\n"," 2.6964e-02, 3.4870e-01, 1.0267e-01, -1.4881e-01, -1.0231e-01,\n"," -4.0430e-01, 3.3390e-02, -2.3008e-01, -2.0931e-01, -5.6374e-02,\n"," 2.5200e-01, 1.3705e-01, 2.2343e-01, 4.5314e-02, 4.2261e-01,\n"," -3.0075e-01, 1.3390e-01, 4.5477e-02, -5.7855e-01, -1.8362e-01,\n"," 5.2306e-02, -1.3925e-01, 1.0609e-01, 1.7225e-02, -7.2257e-01,\n"," 8.2921e-02, -2.1040e-02, -3.1761e-01, 1.9204e-01, -1.2557e-01,\n"," 1.9676e-01, 1.2368e-01, 4.6391e-02, -6.7358e-02, 2.2318e-01,\n"," 4.6750e-01, 1.8953e-01, 3.2674e-01, -1.5106e-01, 3.2170e-01,\n"," 2.7222e-01, 2.6418e-02, 6.0749e-02, -1.2435e-02, -6.7527e-02,\n"," -3.6812e-01, -4.0337e-01, -8.3406e-02, 1.5634e-01, -4.3034e-02,\n"," 4.4651e-02, -3.1430e-01, -6.7450e-02, 1.4722e-01, 2.4026e-01,\n"," 4.1693e-01, 2.3363e-01, 4.6114e-01, 1.0067e-01, 1.1097e-01,\n"," -5.2192e-01, 3.1823e-01, -1.0478e-01, 2.2204e-01, -6.1398e-03,\n"," 8.0866e-02, 5.9015e-01, 3.6707e-01, 3.7564e-01, -2.5740e-01,\n"," 4.0970e-01, 7.9812e-02, 3.4716e-02, -2.6065e-01, 1.7714e-01,\n"," -3.5758e-01, -2.8841e-01, 4.5720e-02, -3.1158e-01, -5.0484e-02,\n"," 3.4163e-02, 2.5759e-01, -1.2582e-01, 2.5163e-04, 2.1328e-01,\n"," -1.6561e-01, -5.0836e-01, 3.2139e-01, 4.0254e-01, -2.1231e-01,\n"," 4.1592e-02, 1.4574e-02, -3.4600e-01, 5.3424e-02, -1.1287e-01,\n"," -4.9698e-01, -2.6382e-02, 5.5593e-02, 1.1204e-01, -1.7035e-01,\n"," 1.3071e-01, 1.0035e-01, 4.0221e-01, 4.8349e-02, -3.6989e-01,\n"," -3.0865e-01, 4.0742e-01, 4.9184e-01, 4.6415e-03, -1.5064e-01,\n"," -1.8198e-01, -1.8446e-01, -2.7118e-01, 1.5143e-01, 2.5412e-01,\n"," 1.0855e-01, 2.3216e-01, 3.4191e-01, 2.0984e-01, -3.3844e-02,\n"," -2.2614e-01, -9.8017e-02, -6.0761e-03, -1.3335e-01, 3.4231e-01,\n"," 4.5813e-01, -5.1720e-01, -4.7818e-01, 2.1231e-01, -4.2324e-01,\n"," -3.7438e-01, -1.9578e-01, 8.6346e-02, 6.1382e-01, 1.5777e-01,\n"," 1.4803e-01, 3.1891e-01, -3.1797e-01, -4.8079e-01, -2.9904e-01,\n"," 7.5277e-02, -7.7568e-02, 1.8260e-01, -1.7052e-01, -5.3166e-01,\n"," -2.2905e-01, 9.9965e-02, 8.8674e-02, 2.2100e-01, 2.3821e-01,\n"," -2.6148e-01, -6.4448e-03, 3.6649e-01, 3.2933e-01, -5.8273e-01,\n"," 4.5943e-02, -3.3525e-01, -2.1579e-01, 5.7436e-02, -4.9912e-01,\n"," -3.5531e-01, -1.5240e-01, 1.1543e-01, 5.4653e-02, 2.3208e-01,\n"," 5.7302e-01, 2.9494e-01, 2.4484e-01, -2.7409e-01, -1.8403e-02,\n"," 7.9628e-02, -1.7583e-02, -5.8299e-02, -4.5151e-01, -3.4850e-01,\n"," 2.6555e-02, 1.6538e-01, 1.3187e-01, -7.6054e-02, 7.4135e-02,\n"," 4.9512e-02, 1.4933e-02, -2.2375e-02, 2.9116e-02, -2.9583e-02,\n"," -1.8371e-01, 2.1525e-01, 2.1788e-01, -1.3803e-01, 1.9193e-01,\n"," 1.8964e-01, 3.9203e-01, -4.4168e-01, -3.3379e-02, 4.6331e-01,\n"," -4.9910e-01, -3.1701e-01, -1.0157e-01, 1.0160e+00, 7.0971e-01,\n"," -7.4534e-02, -9.1023e-03, 1.4531e-01, -1.8842e-01, -1.4315e-01,\n"," -1.2318e-01, -1.9530e-01, -3.6618e-01, 4.1947e-01, -1.0312e-01,\n"," -3.5979e-01, 2.0061e-01, -6.3292e-02, 2.9959e-01, -3.7871e-01,\n"," -7.6888e-02, -3.6421e-01, -5.5796e-04, 4.9564e-02, 4.4603e-01,\n"," -8.6819e-05, -5.1680e-02, 9.7872e-02, 3.0047e-01, 9.8823e-02,\n"," -2.6315e-01, 7.7465e-03, -1.8158e-01, -3.6351e-01, -2.0875e-01,\n"," 2.1281e-01, 7.3532e-01, -6.2573e-01, 3.1954e-01, -1.7914e-01,\n"," -6.8135e-01, -1.3013e-02, 3.9061e-01, 1.9133e-01, -1.7642e-01,\n"," 3.1850e-01, 1.8145e-01, 2.1918e-01, 2.2871e-01, -5.7479e-01,\n"," -2.7105e-01, 7.1082e-02, 4.2167e-01, 2.9609e-01, 1.6295e-01,\n"," -2.1418e-02, 3.3102e-01, 2.6365e-01, -2.1342e-01, 3.3048e-01,\n"," 2.2376e-01, -1.9068e-01, -2.2435e-01, 1.9898e-02, -1.5781e-01,\n"," 5.6756e-02, -2.8271e-03, -8.3910e-02, -3.8621e-02, 1.4286e-01,\n"," -3.3117e-01, 2.1200e-02, 1.6305e-01, -4.2152e-03, -7.8578e-02,\n"," -5.3149e-01, 1.6736e-02, -2.0410e-01, -2.8110e-02, 2.2032e-01,\n"," 1.2070e-02, -1.7500e-01, -1.1821e+00, -1.1400e-02, -2.5523e-01,\n"," -1.4252e-01, -1.1098e-01, 5.2331e-03, 6.6988e-01, 3.1522e-01,\n"," -3.6587e-01, -2.1484e-01, 1.0689e-01, 4.2707e-01, 2.6624e-01,\n"," 1.4765e-01, 3.7280e-02, -2.0889e-02, 1.5557e-01, -6.8058e-02,\n"," 1.5084e-01, -6.5229e-01, -2.8818e-01, -8.0414e-02, -1.8360e-01,\n"," -1.7340e-01, -1.8789e-01, 5.6256e-01, -2.9863e-01, -2.0133e-01,\n"," -3.4376e-01, 1.2683e-01, -5.9783e-02, 2.7311e-01, -2.8293e-01,\n"," 6.9732e-02, -2.9131e-01, 5.9031e-02, -1.3735e-01, 3.4276e-01,\n"," -1.4672e-01, 3.2908e-01, 3.8097e-01, 5.9105e-01, -3.2311e-01,\n"," -1.9467e-01, 4.2301e-02, -8.1523e-02, -2.2980e-02, 4.0276e-01,\n"," -2.1492e-01, 1.2680e-01, -3.9339e-01, -1.3606e-01, -3.6342e-02,\n"," -9.9831e-01, 4.3902e-01, -7.1286e-02, -2.6072e-02, 3.5223e-03,\n"," 1.9533e-01, -1.0678e-02, -3.2131e-01, 8.4775e-03, -1.2253e-01,\n"," 4.0220e-01, 3.2753e-01, -2.3378e-01, -9.9193e-02, -3.1437e-01,\n"," 2.3696e-01, -4.9375e-02, -1.1315e-01, 8.9506e-02, 3.4871e-01,\n"," -4.2066e-02, 1.1719e-01, 6.8989e-02, -1.3901e-01, 7.7135e-02,\n"," -4.7364e-03, -3.0045e-01, -2.6808e-01, -1.8846e-01, -1.6031e-01,\n"," 1.2057e-01, -1.9412e-01, -3.0179e+00, -1.5812e-01, 2.1011e-01,\n"," -8.6287e-02, 4.2192e-02, -3.7968e-01, 5.7899e-01, 4.8529e-02,\n"," -1.1639e-01, -1.9069e-01, 1.9664e-01, -4.7617e-01, 1.3494e-01,\n"," -2.7590e-03, -2.0677e-01, 1.5721e-01]])\n","-: tensor([[ 3.0881e-01, -1.8431e-02, -1.8723e-01, -2.0743e-03, -1.3487e-01,\n"," -4.9475e-02, 4.3985e-01, -2.4973e-02, 3.3502e-01, -5.3035e-01,\n"," 9.7750e-03, 1.0307e-02, 7.6879e-03, 8.0298e-02, -8.7145e-02,\n"," -7.2275e-02, 6.6143e-02, 3.4019e-01, 4.5770e-01, 2.2446e-02,\n"," 4.7634e-01, -5.2016e-02, 2.2895e-01, 8.1737e-02, 1.2476e-01,\n"," 1.3980e-01, -2.7710e-01, 1.5021e-01, -2.4448e-01, 2.0755e-02,\n"," 1.9976e-01, 1.7917e-01, -6.6373e-01, 6.0277e-01, 1.3352e-01,\n"," -1.8082e-01, 3.3583e-01, -1.9302e-01, 2.4297e-01, -3.6388e-02,\n"," -1.5318e-02, -5.9102e-02, -2.6925e-01, -1.7659e-01, 1.9031e-04,\n"," -9.3259e-02, -5.6735e-01, -8.8457e-02, -4.9099e-01, -1.9993e-01,\n"," -3.3283e-01, 7.9535e-02, 4.8903e-01, 5.6563e-01, -1.7308e-01,\n"," 3.6665e-01, 1.1027e-01, 1.4337e-01, -2.3334e-01, 2.7151e-01,\n"," 1.0027e-01, 4.7967e-01, 1.4626e-01, 1.1997e-01, 4.6199e-01,\n"," 2.8828e-01, 1.3253e-01, 1.3095e-01, -3.0922e-01, 3.8638e-01,\n"," -4.4984e-01, -5.7286e-01, 1.7795e-01, 2.0185e-01, 3.7316e-01,\n"," -5.2352e-02, -1.0832e-01, 3.8096e-01, 1.5721e-01, -4.3419e-02,\n"," 4.4761e-01, 2.1262e-01, 1.6401e-01, 1.8217e-01, 4.2928e-01,\n"," 2.4439e-01, -2.3081e-01, -1.4977e-01, 1.4295e-01, -2.2510e-01,\n"," -2.3632e-01, -3.0739e-01, -1.4158e-02, 2.4953e-01, -2.8313e-02,\n"," -2.1629e-01, -2.6970e-01, -9.2278e-02, 2.3390e-01, -2.5901e-01,\n"," 3.8780e-03, -5.0197e-01, -1.7892e-01, 2.4655e-02, -2.4109e-01,\n"," -2.8732e-01, -1.4773e-01, -3.2083e-01, 1.5485e-01, -9.4527e-01,\n"," 4.2784e-02, 4.0101e-01, 6.4385e-02, 1.7875e-02, -8.8187e-02,\n"," 5.1481e-01, 1.6086e-01, 1.7307e-01, 3.2073e-01, 5.0476e-01,\n"," -3.2060e-01, 2.0526e-01, -6.6513e-02, 4.9185e-01, -6.0099e-02,\n"," 3.6929e-01, 2.5052e-01, -3.3704e-01, -2.3335e-01, -1.9160e-01,\n"," 6.3221e-02, 5.3876e-01, 1.6195e-01, -1.3072e-01, -2.6529e-01,\n"," -1.3970e-01, 3.2382e-01, 1.7605e-01, -5.3121e-01, -3.8378e-02,\n"," -1.2451e-01, 2.2193e-01, -1.1207e+00, -2.9178e-01, 6.1909e-01,\n"," 1.5995e-01, 1.6181e-01, -7.2952e-02, 5.7034e-01, -2.4582e-01,\n"," 3.1664e-01, 4.4156e-02, -1.0142e-01, 1.0701e-01, -7.7426e-01,\n"," -2.5601e-01, 1.7534e-01, 8.7972e-02, 3.8632e-01, 5.7367e-01,\n"," 5.3502e-01, 2.6261e-01, 5.5710e-01, 3.6893e-01, -2.7515e-02,\n"," 9.8149e-02, -2.2874e-01, 5.5041e-01, 3.5332e-01, 1.3156e-02,\n"," -2.6012e-01, -5.5077e-01, 2.3858e-01, 4.2729e-02, -2.9299e-01,\n"," 4.3785e-01, 1.9440e-01, 1.5930e-01, -3.4150e-02, -4.4707e-01,\n"," -2.7735e+00, -4.8181e-02, 4.7313e-02, -2.4471e-01, 1.8528e-01,\n"," -3.8483e-01, 4.5337e-02, -1.6357e-01, -6.2813e-02, 1.6328e-01,\n"," -1.3078e-01, -1.1456e-01, -2.1758e-01, 5.4536e-01, 4.1796e-01,\n"," -1.3591e-01, -2.7533e-03, -5.1131e-01, 4.3371e-02, -9.0913e-02,\n"," 4.3065e-02, -1.3867e-01, 4.7574e-02, 2.2984e-01, -2.0350e-01,\n"," 1.4777e+00, -2.0221e-01, -4.3939e-01, 2.7812e-01, -5.4217e-02,\n"," -6.2090e-01, 4.4726e-01, 4.4734e-01, -5.7979e-01, 6.4312e-02,\n"," -2.8265e-01, 3.6540e-01, -3.0646e-01, -3.3377e-02, -8.3762e-02,\n"," -1.4298e-01, -7.7321e-03, 1.4864e-02, 2.1774e-01, -3.4082e-01,\n"," -3.6549e-01, 1.8373e-01, 3.5785e-01, 1.2703e-02, -5.8164e-01,\n"," -2.1877e-01, -3.5604e-01, 1.2492e-01, 2.8744e-01, -2.1492e-02,\n"," 2.1360e-02, 9.0692e-02, -1.5428e-01, -6.4428e-02, -2.3393e-01,\n"," 5.7481e-02, 1.6744e-01, 1.2930e-01, 3.6524e-01, -4.5820e-01,\n"," 1.4073e-01, 2.9452e-01, -1.0094e-01, 2.0882e-01, -7.8174e-02,\n"," -9.5813e-02, -4.7136e-01, -4.6895e-02, -3.4572e-01, 4.3377e-02,\n"," 1.9695e-01, 7.0562e-02, -1.6429e-01, -3.7607e-03, 5.2542e-02,\n"," -1.5150e-01, 1.4484e-01, 2.7844e-01, -8.1543e-02, -3.7834e-01,\n"," 6.4515e-02, 2.0875e-01, -4.1447e-01, -8.8050e-02, 1.3991e-01,\n"," -1.0528e-01, -6.1875e-02, -1.0741e-01, -1.2388e+00, -2.4134e-01,\n"," -3.2277e-01, 5.5973e-01, 1.3245e-01, -7.8635e-02, 8.3189e-02,\n"," 1.3357e-01, 3.9026e-01, -1.7185e-01, 2.5468e-01, 2.5899e-01,\n"," -4.3948e-01, -2.6243e-01, -5.3407e-01, 3.3849e-01, -3.3655e-01,\n"," -2.7990e-01, 1.0349e-02, 1.4530e-01, 3.3844e-01, 1.2926e-02,\n"," -7.0957e-02, 4.4945e-01, -8.6354e-02, -3.8247e-01, 3.0030e-02,\n"," 5.8441e-02, -2.7072e-01, -4.0820e-01, -3.4088e-01, -1.1057e-01,\n"," 6.9281e-02, 1.4232e-03, 1.1199e-01, -2.1359e+00, -8.4013e-02,\n"," 8.6298e-04, -3.1330e-01, -3.1417e-02, -2.0773e-02, 2.5969e-01,\n"," -3.1765e-01, -2.6191e-01, -1.9374e-02, 1.2681e-01, -2.3586e-01,\n"," 2.0729e-01, 2.3280e-01, 5.4218e-01, 3.1464e-01, -4.1739e-02,\n"," 8.8836e-02, -1.8059e-01, 3.1180e-02, -8.8437e-02, 1.0642e-01,\n"," 3.9288e-01, -4.4002e-01, 2.9638e-01, 1.4696e-01, -2.6648e-01,\n"," 1.6345e-01, -5.8564e-01, -7.9970e-02, -8.0786e-02, -4.2115e-01,\n"," -1.5261e-01, 2.6673e-01, -1.3879e-01, 4.9749e-02, 1.1419e-01,\n"," -7.3311e-02, 7.7170e-01, 2.4730e-01, -1.0701e-01, 3.0918e-01,\n"," 1.3267e-01, 2.3126e-01, -1.1653e-01, 4.7934e-01, -2.0004e-01,\n"," -3.1478e-01, -3.5324e-02, -1.8873e-01, 2.5209e-02, -3.1662e-01,\n"," 3.2575e-01, -1.3358e-01, -2.0875e-01, -2.9592e-01, 3.2679e-01,\n"," 4.0825e-02, -3.1138e-01, -9.7667e-02, 1.0777e-01, -4.7067e-01,\n"," 2.9488e-02, 7.3456e-03, -4.5606e-01, 3.5407e-01, -9.8753e-02,\n"," 6.3368e-02, -2.6063e-01, -1.9631e-01, 2.1140e-02, 7.4937e-02,\n"," 8.9374e-03, -7.3127e-01, -3.0261e-01, 6.6611e-02, -6.7388e-01,\n"," -4.2806e-02, 5.9818e-01, -4.7534e-01, -5.8988e-02, -1.9419e-02,\n"," -2.3791e-01, 3.2545e-01, 1.6591e-01, 4.6574e-02, 5.5026e-01,\n"," -1.8627e-01, -2.0221e-01, -1.6992e-01, 2.9499e-01, 1.5326e-01,\n"," 1.9270e-02, 2.4621e-01, 1.2050e-02, 5.2469e-01, 1.6512e-01,\n"," -2.6104e-01, 9.2767e-02, 2.3603e-02, -8.8077e-02, -8.8156e-02,\n"," -2.4892e-02, 1.3019e-01, 1.1889e-02, 1.4010e-01, -7.1788e-01,\n"," 2.6608e-02, 3.6268e-02, -2.5298e-01, 4.2888e-01, -6.4859e-01,\n"," 9.8205e-02, 3.5307e-01, 5.0112e-01, -5.4153e-01, -6.6189e-02,\n"," 2.6129e-02, -1.9491e-02, 2.7243e-01, 3.3352e-01, -3.1321e-02,\n"," 9.8212e-02, -1.8364e-01, 2.1382e-01, 2.3711e-01, 2.1198e-01,\n"," -5.1263e-01, -5.3737e-01, -9.8104e-02, 2.9240e-01, 2.7386e-01,\n"," 1.4614e-01, -7.6071e-01, -1.2294e-01, 5.5545e-02, -9.1988e-02,\n"," 6.0640e-01, 4.6874e-02, 5.5175e-01, 5.5830e-01, 1.6957e-01,\n"," -4.5140e-01, 2.2449e-01, 2.0423e-01, 3.0442e-01, -9.1970e-02,\n"," 2.0044e-01, 1.2277e-01, 4.3866e-01, 1.4634e-01, -3.8025e-01,\n"," 5.7555e-01, -3.6780e-01, -1.4852e-01, -2.9311e-01, 1.8530e-01,\n"," -5.4767e-01, -3.1603e-01, 1.8686e-01, -2.4414e-01, 1.8953e-01,\n"," -2.6241e-01, 3.2963e-01, 2.1885e-01, -8.2357e-03, 2.0849e-01,\n"," -7.0639e-02, -6.1242e-01, 2.8057e-01, 3.2841e-01, 1.5015e-02,\n"," -1.9113e-01, 6.3136e-02, 2.5264e-01, -7.0410e-02, -6.2326e-03,\n"," -2.8874e-02, 3.9968e-01, 1.1005e-01, -1.2466e-01, -3.1321e-01,\n"," -8.1496e-02, 7.1259e-02, 2.4384e-01, -9.1276e-03, -2.9388e-01,\n"," -3.9513e-01, 1.2320e-01, 4.2580e-01, 2.7149e-01, -3.8357e-01,\n"," -3.9259e-02, -1.9710e-01, -4.7921e-01, -4.0352e-01, 3.6449e-01,\n"," 2.6308e-02, 7.3807e-02, 2.1356e-01, 3.0931e-01, 1.1677e-01,\n"," -4.9016e-02, 1.3813e-01, 1.4178e-01, -1.2268e-01, 2.5576e-01,\n"," 5.1400e-01, -2.7425e-01, -1.8913e-01, 1.0101e-01, -3.3838e-01,\n"," -2.9238e-01, -5.5262e-01, 2.8674e-01, 1.9634e-01, 3.7120e-01,\n"," 1.7354e-01, 4.2791e-01, -1.9697e-01, -5.9127e-01, -3.6937e-01,\n"," -1.9443e-01, 1.0262e-01, -1.0176e-01, 3.5622e-04, -3.4120e-01,\n"," -5.4022e-01, -5.0228e-01, -4.1879e-01, -3.9689e-01, 3.2575e-01,\n"," 1.2836e-01, -5.1145e-03, 3.3047e-01, -1.4841e-01, -2.8841e-01,\n"," 1.3860e-01, -9.3225e-02, 2.4217e-01, 2.2177e-01, -3.9906e-01,\n"," -2.3422e-01, 8.1297e-02, -3.4354e-01, 6.7377e-03, 1.5799e-01,\n"," 2.7954e-01, 2.0366e-01, 2.4677e-01, -3.1699e-01, -3.6054e-01,\n"," 1.9518e-01, -3.6106e-01, -5.6279e-02, -4.3485e-01, 1.5595e-01,\n"," 7.3996e-02, 9.8081e-02, -2.1425e-01, -1.8261e-01, 5.6920e-02,\n"," 8.2795e-02, -2.0711e-01, 6.9064e-02, 2.2939e-01, -9.3168e-02,\n"," 5.8189e-01, 1.2109e-01, 5.0591e-01, -1.0948e-01, 1.5804e-01,\n"," -4.2084e-01, 1.6607e-01, -4.0567e-01, -1.8787e-01, 1.5802e-01,\n"," -2.8762e-01, -6.4372e-01, -1.6994e-01, 1.0742e+00, 6.0754e-01,\n"," -1.2398e-01, -2.3258e-01, 3.9317e-02, -2.6285e-01, -5.0107e-01,\n"," 2.4170e-02, 2.4937e-01, -3.6157e-01, 4.4621e-01, 2.4640e-01,\n"," 3.4309e-02, 3.4567e-01, -3.8077e-01, 1.1273e-01, -4.5777e-01,\n"," -2.5005e-01, -6.6396e-01, 2.6011e-01, -2.9320e-01, 6.6699e-01,\n"," -3.6833e-01, 3.3794e-02, -2.0355e-02, 1.6663e-01, 6.6694e-01,\n"," 1.4021e-01, 2.5975e-01, -1.7444e-02, -1.3142e-01, 4.3643e-01,\n"," 8.4788e-02, 5.8767e-01, -5.7058e-01, -1.0283e-01, -3.5304e-01,\n"," -6.6969e-01, -1.0152e-01, 1.7763e-01, -1.7186e-01, -1.1462e-01,\n"," 2.6949e-01, 3.6778e-01, 1.3292e-01, 1.1114e-01, -4.2783e-01,\n"," -2.0638e-01, -3.5399e-01, 1.0316e-01, 1.4024e-01, 2.1931e-01,\n"," -2.3168e-01, 3.1902e-01, -2.9244e-01, -3.7271e-01, 2.4853e-01,\n"," 3.2360e-01, -1.9291e-01, -9.9434e-02, 5.9524e-02, 9.4724e-02,\n"," 6.3932e-02, 2.7129e-01, 1.3411e-01, -1.4441e-01, -3.0720e-01,\n"," 2.4942e-01, -4.9376e-02, -3.9676e-01, 1.1472e-01, 9.4558e-02,\n"," 2.7924e-02, -1.6204e-01, -3.5314e-01, -6.3576e-01, -5.6745e-02,\n"," 1.4032e-01, -2.2482e-01, -1.1739e+00, 1.5968e-01, -8.4789e-02,\n"," -2.2310e-01, -5.3320e-02, 4.9960e-01, 3.4601e-01, -1.2102e-01,\n"," -1.8815e-01, -3.2725e-01, -7.0873e-02, 4.8036e-02, 2.0354e-01,\n"," -4.2389e-02, -2.6361e-02, 1.3182e-01, 6.5451e-02, 1.4911e-01,\n"," 2.8582e-01, -2.6823e-01, 3.5206e-01, 2.7665e-01, -5.7584e-02,\n"," -7.3346e-02, 2.0605e-01, 2.2716e-01, -8.2873e-01, -3.0445e-01,\n"," -1.5500e-01, 4.6156e-01, -1.0073e-01, 7.4445e-01, -9.4295e-02,\n"," 6.6074e-02, -1.8402e-01, -2.9506e-01, 8.1425e-02, 1.7701e-01,\n"," 1.8796e-01, 6.5443e-01, -3.3569e-02, 6.6976e-01, 1.3333e-01,\n"," -3.0119e-01, -1.0617e-01, -3.3671e-01, 9.6051e-02, -1.4850e-03,\n"," 4.6755e-01, -3.3934e-01, -7.8341e-01, -9.4983e-02, 2.5593e-01,\n"," -1.1166e+00, 7.7357e-01, -8.6369e-02, -2.5843e-01, -8.2375e-02,\n"," 1.6280e-01, -7.4769e-02, -6.1348e-01, 2.4280e-01, -6.2289e-01,\n"," 1.1129e-01, 3.0934e-03, -2.9315e-01, -2.6015e-01, -6.4838e-01,\n"," 5.8760e-01, 5.5165e-01, -2.2982e-01, 2.3379e-01, 2.2738e-01,\n"," 3.2897e-01, -2.1704e-01, 5.6538e-01, -3.5777e-01, 3.5868e-01,\n"," -1.4628e-02, 1.7756e-01, -5.6393e-01, -5.4452e-01, -1.5569e-01,\n"," 4.0129e-01, 9.1878e-02, -2.9235e+00, -2.1916e-01, 1.0543e-01,\n"," -2.9480e-01, 1.7017e-01, 2.5921e-02, 5.4776e-01, -6.6025e-02,\n"," 2.8036e-01, -1.1701e-01, 1.3038e-01, -3.1908e-01, -5.0228e-02,\n"," 1.7403e-01, 1.4155e-01, 3.2593e-01]])\n","friendly: tensor([[-2.2140e-01, 4.3990e-02, -2.3101e-01, -1.8569e-01, -1.3231e-01,\n"," -2.7459e-01, 4.2728e-01, 1.6677e-01, -7.7132e-02, -9.4585e-02,\n"," -7.2238e-02, -8.3458e-02, 2.6498e-01, -1.1185e-01, -3.1985e-01,\n"," 1.0869e-02, 1.3324e-02, 5.5499e-02, 3.1895e-01, 2.6277e-01,\n"," 1.7810e-01, -1.7001e-01, -9.3102e-02, 1.6224e-02, 1.1539e-01,\n"," 1.6234e-01, -1.5458e-02, 1.6934e-02, 3.3433e-02, -9.2549e-02,\n"," 1.1977e-01, -2.7937e-01, 4.6653e-02, 1.9252e-01, -9.6098e-02,\n"," -8.1045e-02, 2.3850e-01, 7.7941e-02, -5.9016e-01, 2.7907e-01,\n"," -1.8927e-01, -1.0746e-01, 1.2495e-01, -6.9341e-02, 3.4162e-01,\n"," -3.3531e-01, -1.5507e-02, 1.5959e-01, -1.2481e-01, 2.4653e-01,\n"," -2.5131e-01, 2.2947e-01, -1.9405e-01, 2.3186e-01, 9.5830e-02,\n"," 2.9084e-01, 1.6804e-01, -1.8679e-01, -3.1979e-01, -4.0190e-02,\n"," -3.7584e-01, 2.3215e-01, -1.8206e-01, -1.0988e-02, 3.1072e-01,\n"," 4.4216e-01, 1.9822e-01, -1.7736e-01, -4.3226e-01, 2.0933e-01,\n"," -1.8594e-01, -1.1083e-01, 2.5158e-01, 1.6061e-01, 3.0126e-01,\n"," 4.4919e-01, -2.6176e-01, 3.0893e-01, -1.3984e-01, -1.6927e-01,\n"," 8.1348e-02, -9.7839e-03, -7.0780e-02, 3.1439e-01, 1.6373e-02,\n"," 4.4280e-01, -4.0384e-01, -7.2722e-02, -1.4546e-01, 6.7337e-02,\n"," -1.2445e-01, 1.0607e-01, 4.1447e-02, 5.0354e-02, 2.2233e-01,\n"," 2.9890e-01, -2.4084e-01, 2.8672e-01, -2.4250e-01, -1.4397e-01,\n"," 1.0763e-01, 1.8062e-01, -1.3753e-01, 2.7959e-01, -1.9898e-01,\n"," 1.6520e-01, -7.7052e-02, -3.5875e-01, 1.0365e-01, -2.9368e-01,\n"," 3.6961e-01, 1.4354e-01, 6.3835e-02, -8.1978e-02, -4.2554e-01,\n"," 3.3810e-01, -8.5893e-02, -1.5604e-01, -1.0030e-01, 1.5558e-01,\n"," -5.6946e-01, -5.6166e-01, -2.1474e-01, 5.6901e-01, -1.0066e-01,\n"," 1.2305e-01, -2.8007e-03, -4.0254e-02, 1.2015e-01, -3.2802e-01,\n"," 7.2741e-02, 2.4190e-01, -6.5859e-02, 2.3377e-01, -2.3166e-01,\n"," 2.3591e-01, 2.5592e-01, -1.1789e-01, -4.1824e-01, -8.1399e-03,\n"," 1.3829e-01, 3.6739e-01, -8.8446e-01, -2.8105e-01, 2.6968e-01,\n"," 1.1998e-01, 3.0770e-01, 5.5954e-02, 2.3089e-01, -2.7707e-01,\n"," 6.8921e-02, 1.8563e-01, -2.4531e-01, -2.8641e-01, -5.9113e-01,\n"," -1.6114e-01, 1.4747e-01, 3.1348e-03, 3.5620e-01, 4.8936e-01,\n"," 1.9966e-01, 3.9166e-02, 3.8746e-02, 5.8677e-02, -2.3945e-01,\n"," 3.0113e-01, -2.0508e-01, 7.3599e-01, 1.2720e-01, 2.3369e-01,\n"," -1.9826e-01, -8.8454e-02, 5.5964e-01, 9.3382e-02, -7.0938e-02,\n"," 2.3816e-01, -8.9947e-02, 4.1020e-01, 1.6041e-01, -2.1967e-01,\n"," -3.0013e+00, -9.2743e-03, 1.6197e-01, 1.8251e-01, 3.4978e-01,\n"," -5.7023e-01, 2.4223e-01, -3.8069e-01, -4.7157e-02, -3.7828e-02,\n"," -1.6632e-01, -1.4762e-01, -4.1309e-01, 1.5553e-01, 1.4430e-01,\n"," -1.8244e-01, -2.2679e-02, -2.7880e-01, -3.0313e-01, -2.3532e-01,\n"," 3.6923e-01, 6.1061e-02, 8.0316e-02, 7.4025e-01, 1.3070e-01,\n"," 6.6586e-01, 1.9735e-01, -4.1408e-01, 2.2176e-01, 2.3102e-01,\n"," -7.2303e-01, 7.2317e-01, -9.0471e-02, 1.0948e-02, 1.7466e-01,\n"," -5.1220e-01, 2.4975e-01, -3.2865e-02, -4.0506e-01, 4.5636e-02,\n"," 9.8683e-02, 2.8699e-02, -2.0806e-01, 1.4779e-01, -7.5121e-02,\n"," -2.2047e-01, 1.8203e-01, 2.4130e-01, 4.7734e-02, 3.9481e-01,\n"," 6.5021e-02, -2.2825e-01, 1.7691e-01, -1.2419e-01, -5.8243e-01,\n"," 2.1777e-02, -2.2865e-01, -2.4909e-01, -6.6129e-02, -2.0299e-01,\n"," -3.6645e-01, -9.0265e-02, 3.8731e-01, -6.6940e-03, -9.6125e-02,\n"," -9.3349e-02, 1.4660e-01, 1.4350e-01, 3.8704e-01, 2.7450e-01,\n"," -7.7417e-02, -4.4768e-01, 9.4376e-02, -4.9511e-01, -3.5469e-01,\n"," 1.2696e-01, -3.2623e-01, 1.5129e-02, 2.4072e-01, 1.3385e-04,\n"," 3.0715e-02, 5.1471e-01, 6.4129e-01, 7.9104e-02, -4.2079e-01,\n"," -1.1808e-01, 3.4792e-01, -2.6498e-01, -3.8793e-01, -1.5324e-01,\n"," -5.5945e-02, -9.4088e-02, 1.6347e-01, -1.1053e+00, -2.9547e-01,\n"," 6.2842e-02, 1.1614e-01, 2.2374e-01, 1.3019e-01, 2.6067e-01,\n"," 1.5147e-01, 3.7435e-01, -3.7838e-01, 9.6783e-02, 2.0990e-01,\n"," -5.4412e-02, -3.5627e-01, -5.1388e-01, -1.1376e-02, -8.1279e-01,\n"," -3.5639e-01, -7.1313e-03, -1.7301e-02, 1.5902e-01, 3.8770e-01,\n"," -1.0624e-02, 2.4233e-01, 5.3838e-01, 2.2376e-01, -4.8049e-01,\n"," -1.9268e-01, -1.3558e-01, 9.0444e-02, -1.3063e-01, -3.1983e-01,\n"," 3.5327e-01, -1.0118e-01, -1.2608e-01, -9.5633e-01, 9.3933e-02,\n"," 2.2519e-01, -3.9695e-01, 3.2696e-01, -2.1269e-01, 3.3800e-02,\n"," -2.2256e-01, -2.5215e-01, 2.8579e-02, 3.4871e-01, -2.7549e-01,\n"," 1.2763e-01, 1.7217e-01, 1.1703e-01, 9.9974e-02, -8.7098e-02,\n"," -4.2079e-01, -4.5264e-01, 8.6515e-02, -3.1557e-01, 1.4825e-01,\n"," 4.5001e-01, -1.0715e-01, 4.3266e-01, 6.6619e-01, 8.5136e-02,\n"," -1.0335e-01, -3.8336e-01, 1.2168e-02, -1.5803e-01, -6.9077e-01,\n"," -1.3883e-01, 7.4722e-02, 2.7904e-01, -9.2847e-02, 9.3060e-02,\n"," 6.1240e-03, 6.1494e-01, 1.2945e-01, -1.5732e-01, 6.9183e-01,\n"," 1.6385e-01, -2.3872e-01, 1.2606e-01, 6.1848e-02, 2.0207e-01,\n"," -2.2207e-01, -8.5002e-02, 2.3885e-01, 3.5521e-01, 1.0839e-01,\n"," 1.8917e-01, 1.8385e-01, -2.0928e-01, -6.5909e-02, 5.4083e-01,\n"," 8.8611e-02, -1.0496e-01, -2.1398e-01, 1.3426e-01, -5.3383e-01,\n"," -3.0711e-01, 5.4021e-01, -5.5171e-01, -8.2184e-02, -1.6170e-01,\n"," -3.6097e-01, -6.1571e-01, -1.8834e-01, -9.6056e-02, -1.8795e-01,\n"," -5.2093e-02, -5.8610e-01, -4.5821e-01, 9.6426e-02, -5.4460e-01,\n"," -1.2478e-01, 5.1568e-01, -7.7464e-02, 3.5568e-01, 1.1471e-02,\n"," 1.9701e-01, 4.7505e-02, -2.1873e-02, 8.8611e-02, 4.5519e-01,\n"," -1.1944e-01, -1.2359e-01, -2.1752e-01, 5.3304e-02, 1.2860e-01,\n"," 4.9646e-01, 8.4634e-02, -1.0469e-01, -7.9650e-02, 5.6457e-01,\n"," -1.9883e-01, 2.8792e-01, 2.5937e-02, -4.4254e-01, -1.9489e-01,\n"," -1.7583e-03, -2.9220e-02, 2.3755e-01, 2.6710e-01, -7.1081e-01,\n"," 1.0912e-01, -4.4049e-01, -1.3170e-02, 2.7557e-01, -4.3362e-01,\n"," 3.6253e-01, -2.0488e-03, 1.0020e-01, -1.1445e-01, 2.0676e-01,\n"," 5.4378e-01, 3.8726e-02, 3.3511e-01, 3.6056e-02, 4.2015e-01,\n"," 4.5111e-02, 1.3374e-01, 2.3344e-02, -2.5005e-01, -2.3688e-01,\n"," -1.4459e-01, -3.3280e-01, -1.5493e-01, 8.9620e-02, 1.8205e-02,\n"," -1.1040e-02, -1.2356e-01, -8.7857e-02, 2.2025e-01, 1.8475e-01,\n"," -1.7261e-02, 7.8412e-02, 4.0545e-01, 4.1145e-01, 1.7174e-02,\n"," -7.2875e-01, -9.7719e-02, 5.5949e-03, 1.0518e-01, -1.7412e-02,\n"," 1.9229e-01, 3.3224e-01, 5.9189e-01, 1.3673e-02, -1.1727e-01,\n"," 1.2819e-01, -2.0587e-01, -2.3972e-01, -2.2935e-01, 1.9618e-01,\n"," -2.9501e-01, -2.7537e-01, -5.1856e-01, -1.3886e-01, 2.6392e-01,\n"," 5.0197e-02, 2.1378e-01, -2.4227e-01, -4.8913e-02, 5.2089e-01,\n"," -2.1730e-01, -5.5901e-01, 2.5854e-01, 2.6517e-01, -6.7904e-02,\n"," 1.8565e-01, -2.9149e-01, -2.2172e-01, 1.9169e-01, 6.0767e-02,\n"," -2.1403e-01, -5.9168e-02, -4.0758e-01, 4.3262e-02, -2.8231e-01,\n"," -4.2286e-01, -8.9155e-02, 4.5105e-01, -7.4169e-02, -2.9578e-01,\n"," -2.3969e-01, 5.4690e-01, 3.0796e-01, -2.2358e-01, 1.3989e-01,\n"," -2.7406e-02, -3.9248e-01, -1.6674e-01, 7.7951e-02, 3.7948e-01,\n"," -6.2921e-02, 3.5362e-01, 2.1037e-01, 4.7416e-01, 2.3264e-02,\n"," -8.3685e-02, 7.0152e-02, -2.5300e-02, 5.1693e-02, 1.8225e-01,\n"," 4.9543e-02, -7.5464e-01, -2.7417e-01, 1.1216e-01, -4.1438e-01,\n"," -4.0393e-01, -2.1570e-01, 1.9089e-01, -9.3532e-02, -5.4360e-02,\n"," 1.1550e-01, 5.3247e-01, -1.2127e-01, -8.6001e-01, -3.6610e-01,\n"," 3.1976e-01, 9.4871e-02, -2.0730e-01, -2.8352e-01, -6.0668e-01,\n"," 7.0774e-02, -3.5478e-01, 5.9646e-04, 1.2759e-01, 1.5255e-01,\n"," -7.5340e-02, 1.7927e-01, 4.3300e-01, 5.4584e-01, -7.5599e-01,\n"," -1.8844e-01, 1.6912e-01, -1.5979e-01, -8.2782e-02, -3.1725e-01,\n"," -2.4296e-01, -3.5661e-01, -1.1066e-01, 8.9850e-02, 2.8180e-01,\n"," 4.8817e-01, 2.3515e-01, -1.4627e-01, -1.9891e-01, 1.9904e-01,\n"," -2.2646e-01, -2.2034e-01, 1.7291e-02, -8.5324e-01, -1.7469e-01,\n"," -6.8533e-02, 1.5723e-01, -8.6268e-02, -2.0334e-01, 1.5038e-01,\n"," 6.0174e-01, 1.5042e-01, 1.8089e-01, -1.5350e-03, 3.7504e-01,\n"," 3.1026e-01, 3.6350e-01, -8.5630e-02, -4.5455e-01, 2.3315e-01,\n"," 2.2723e-01, 7.4062e-02, -7.3979e-01, 4.8461e-02, 3.8633e-01,\n"," -4.2881e-01, -4.3543e-01, 1.8005e-01, 8.1365e-01, 6.2875e-01,\n"," -1.6031e-01, -2.3770e-01, 2.8859e-01, -3.5813e-01, -4.3154e-01,\n"," 2.6327e-01, -3.6213e-01, -3.3508e-01, 3.5189e-01, 1.4123e-01,\n"," -1.6936e-01, 3.3539e-01, -7.9927e-02, 2.9265e-01, -4.6856e-01,\n"," 1.6675e-01, -4.3106e-01, 1.0660e-02, -7.5519e-02, 6.0020e-01,\n"," -1.4558e-01, 3.8719e-02, 3.6328e-01, 2.4681e-01, -6.1141e-02,\n"," -2.8942e-01, 9.6841e-02, -1.5411e-01, -6.3811e-01, 1.5561e-01,\n"," 4.7895e-02, 7.9830e-01, -6.9685e-01, 1.1707e-01, -2.0729e-01,\n"," -6.2589e-01, -7.5294e-02, 5.4696e-01, 3.7261e-02, -9.1875e-02,\n"," 6.1274e-01, 2.5483e-01, 3.4627e-01, 5.6471e-02, -6.4663e-01,\n"," -1.4319e-01, 2.6935e-01, 4.1623e-02, -3.4783e-02, -1.6462e-01,\n"," 5.9048e-02, 3.6063e-01, -9.9687e-03, -5.7519e-02, 4.6143e-01,\n"," 4.7618e-01, -4.4121e-01, -3.5480e-02, 6.8195e-02, 1.1921e-02,\n"," 1.0786e-01, -1.9254e-01, -3.8351e-02, -5.5723e-03, -8.8377e-02,\n"," -1.0217e-01, 5.8904e-01, -5.5207e-02, 2.1552e-01, -1.6631e-01,\n"," -3.7337e-01, 6.4523e-01, -1.8996e-01, -1.4433e-01, 3.3614e-01,\n"," 1.2116e-01, -3.1644e-01, -7.7746e-01, -3.5829e-02, -1.0090e-01,\n"," -2.1016e-01, -3.1180e-01, 2.2537e-01, 4.4257e-01, -2.9897e-01,\n"," -2.5892e-01, -6.5090e-01, 2.3045e-01, -3.9432e-02, 1.9616e-01,\n"," 1.7816e-01, 1.8608e-01, -1.0751e-01, 3.1153e-01, -2.2144e-01,\n"," 3.6135e-01, -6.9430e-01, -2.4637e-01, 1.1455e-01, -6.0557e-02,\n"," -3.0109e-01, -5.5075e-01, 2.0098e-01, -3.7480e-01, -4.4634e-01,\n"," -4.7232e-02, 2.1350e-01, -2.8659e-01, 3.4268e-01, -1.3709e-01,\n"," -2.1269e-02, -2.1056e-01, 2.3961e-02, 6.5842e-02, 5.4921e-01,\n"," 6.4956e-02, 5.2597e-01, 2.6960e-01, 3.4652e-01, -4.2813e-01,\n"," -2.8312e-01, 2.7482e-01, 1.2435e-01, -2.5831e-01, 5.0209e-01,\n"," -1.5110e-01, 6.1736e-02, -5.7234e-01, -1.7913e-01, 3.4566e-01,\n"," -9.7496e-01, 3.1378e-01, -2.1153e-01, -1.6925e-01, -1.1125e-01,\n"," -4.2962e-03, -8.9294e-02, -3.0602e-01, 2.6024e-01, -3.8842e-02,\n"," 4.6106e-01, 4.7882e-01, -1.6437e-01, 4.9871e-02, -1.7363e-01,\n"," 3.0205e-01, 1.6727e-01, -2.0798e-01, 1.0760e-01, 1.6372e-01,\n"," -2.7877e-01, 1.8806e-01, -7.9231e-02, -1.4879e-01, -1.8200e-01,\n"," 2.1610e-02, -2.6716e-02, -3.8582e-01, 1.3587e-01, -7.5767e-02,\n"," 3.2636e-01, -2.2711e-01, -2.4013e+00, -6.5937e-02, -1.2944e-01,\n"," -4.4829e-04, 1.3269e-01, -2.3314e-01, 3.3533e-01, 1.0924e-01,\n"," 1.9392e-01, -2.3777e-01, 8.5442e-02, -3.4079e-01, -1.5408e-01,\n"," -1.2083e-01, -3.7265e-01, 5.2429e-01]])\n","hotel: tensor([[ 2.7908e-01, 9.4146e-03, -6.8139e-02, 9.6196e-02, 4.0810e-02,\n"," -1.3135e-01, 3.5519e-01, -7.6731e-05, 6.2505e-02, -1.8185e-01,\n"," -6.4107e-02, -1.4285e-01, 5.6907e-02, 3.1357e-01, -2.2882e-03,\n"," -2.2001e-01, -6.7800e-02, 1.5486e-01, 2.0934e-01, 4.6053e-02,\n"," 1.3253e-01, -1.9163e-01, -1.2849e-01, 4.6003e-02, -3.9260e-02,\n"," -2.9305e-02, -3.6985e-01, 1.4145e-01, -3.0623e-01, 1.1645e-02,\n"," 5.6015e-02, -2.2196e-01, -1.7820e-01, 2.7386e-01, 2.7861e-02,\n"," -2.3392e-01, 4.5677e-01, -2.2790e-01, -8.6664e-02, 1.3434e-01,\n"," 1.7727e-01, -1.9236e-01, 2.1800e-01, 5.0067e-02, 5.0691e-02,\n"," -2.5351e-01, -2.0240e-01, -2.2629e-02, -5.7016e-01, 1.4532e-01,\n"," -1.3202e-01, 2.2689e-01, 1.9037e-02, 2.0918e-01, 2.2449e-01,\n"," 3.0113e-01, 7.0975e-02, 1.1823e-01, 1.2308e-01, -2.4572e-02,\n"," 1.4149e-01, 2.2063e-01, -2.6636e-01, -1.5777e-01, 3.7475e-01,\n"," 1.1711e-01, 4.0271e-01, -6.6022e-03, -3.6232e-01, 2.2540e-01,\n"," -4.0738e-03, -3.0458e-01, 4.4289e-01, 9.4033e-02, 2.1036e-01,\n"," 9.6991e-02, -1.7023e-01, 3.6529e-01, 7.3617e-04, -1.1050e-02,\n"," 2.4532e-01, 5.0537e-02, 2.9482e-01, 3.0451e-01, 1.1945e-01,\n"," 2.6726e-01, -1.4071e-01, -1.1701e-01, -1.3758e-01, -7.4477e-02,\n"," -2.7512e-01, 1.0624e-01, -1.6548e-01, 5.7256e-02, -8.3249e-02,\n"," 2.6120e-02, -2.7889e-01, 7.0773e-02, 1.2511e-01, -6.6685e-02,\n"," 1.1182e-03, -6.9043e-02, -1.4900e-01, 1.8673e-01, -1.1305e-01,\n"," -2.3677e-03, -5.7480e-02, -1.1612e-03, 1.9521e-01, -8.8678e-01,\n"," 1.6831e-01, -1.9830e-02, 8.8586e-02, -2.5630e-01, -1.2159e-01,\n"," 2.4695e-01, 2.9738e-01, 1.3840e-02, 5.7116e-02, 2.1598e-01,\n"," -3.7617e-01, -3.1240e-02, 2.6581e-01, 4.4810e-01, -1.5304e-02,\n"," 3.2936e-01, -4.2411e-02, -2.6470e-01, 1.0558e-01, -2.2191e-01,\n"," -2.5228e-01, 4.2098e-01, -1.5084e-02, 3.2047e-01, -1.1528e-01,\n"," 3.7665e-02, 3.3981e-01, 9.5118e-02, -2.9044e-01, -8.9801e-02,\n"," -1.3813e-01, 3.3212e-01, -1.2064e+00, -3.8895e-01, 3.6994e-02,\n"," 1.1402e-01, 3.2849e-01, -1.8504e-01, 3.2643e-01, -3.3489e-01,\n"," 1.5817e-01, 2.9202e-01, -3.1996e-01, 1.2986e-01, -5.0693e-01,\n"," -4.2294e-02, 5.4261e-02, -3.4255e-01, 3.0719e-01, 3.5942e-01,\n"," 2.7544e-01, 2.0953e-01, 2.2484e-01, 3.0299e-01, -2.8448e-01,\n"," -1.4170e-01, -2.5237e-01, 4.7105e-01, 3.7817e-01, 1.5275e-02,\n"," -5.2821e-01, -3.0538e-01, 2.2513e-03, 1.5382e-01, -1.8245e-01,\n"," 2.0200e-01, 9.2305e-03, 9.5428e-02, 5.9873e-02, -1.1206e-01,\n"," -2.9440e+00, -2.6973e-02, -1.6398e-01, 5.0678e-02, 4.3214e-01,\n"," -1.6268e-03, 2.6755e-01, -9.3951e-02, 2.8989e-01, -3.0928e-01,\n"," 5.5731e-02, -1.3277e-01, -3.2752e-01, 4.1419e-01, 3.1291e-01,\n"," -4.8974e-02, -1.3861e-01, -1.2247e-01, 8.6805e-02, 9.6140e-02,\n"," -1.8479e-02, -6.1620e-02, 1.4932e-01, 2.6347e-01, -3.6213e-01,\n"," 9.1182e-01, 1.4730e-01, -2.7555e-01, 1.2911e-01, 1.2100e-01,\n"," -3.1854e-01, 2.8886e-01, 1.4668e-01, -3.6578e-01, 3.3323e-01,\n"," -3.0589e-01, 3.8913e-01, -1.9843e-01, -2.7499e-01, -2.7171e-02,\n"," 2.9917e-01, -1.2832e-01, -1.3541e-01, 1.4934e-01, -1.8621e-01,\n"," -3.2177e-01, 1.9700e-01, 2.3834e-01, 1.4583e-01, -1.2899e-02,\n"," 6.6573e-02, -2.3177e-01, 1.3434e-01, 6.2806e-02, -1.7608e-01,\n"," 1.3308e-01, -9.6075e-02, -7.6201e-02, -1.8530e-01, -1.8173e-01,\n"," -1.4854e-01, -2.1228e-01, 2.2652e-01, 3.1460e-03, -1.7840e-01,\n"," -2.6897e-01, 2.0276e-02, 1.1786e-01, 2.9293e-01, -2.7274e-01,\n"," 1.2496e-01, -2.8416e-01, 2.6399e-01, -2.8472e-01, 5.8018e-02,\n"," 2.5393e-01, 1.9140e-01, 4.6696e-03, 8.6853e-02, 2.3182e-02,\n"," -8.4032e-02, 2.8723e-01, 1.0975e-01, -9.4958e-02, -2.7702e-01,\n"," 1.1316e-01, 2.8158e-04, -1.5736e-01, -4.3699e-02, -2.0182e-02,\n"," -1.3548e-01, -1.6716e-01, -1.9791e-01, -1.4800e+00, 1.2983e-01,\n"," -1.0172e-01, 8.8242e-02, 1.8721e-01, 3.5029e-01, 3.4374e-02,\n"," 1.0732e-02, 1.2717e-01, -9.3343e-02, 1.7040e-01, 1.9821e-01,\n"," -3.7623e-01, -4.0377e-01, -2.0646e-01, 1.0185e-01, -3.3118e-01,\n"," -2.8616e-02, 2.0963e-02, 1.0173e-01, 1.4556e-01, 3.2291e-01,\n"," -1.2766e-01, 2.9925e-01, 1.3302e-01, -1.8677e-01, -3.1726e-01,\n"," -1.9099e-01, 1.2165e-01, -1.2737e-01, -6.8218e-02, 1.2410e-02,\n"," 1.8145e-01, 1.6566e-01, -2.8658e-02, -2.3838e+00, -4.2838e-02,\n"," 7.9192e-02, -3.4706e-01, 1.6189e-01, -4.1553e-02, 2.3800e-01,\n"," -2.0929e-01, -3.1927e-01, 2.6142e-03, -4.9922e-02, -2.9749e-01,\n"," 2.0382e-01, 1.5181e-01, 5.9291e-02, 2.0820e-01, 4.6690e-04,\n"," -2.6570e-02, -2.7022e-01, -5.9255e-02, 3.4821e-02, 5.1372e-02,\n"," 9.9619e-02, -1.9078e-01, 8.4062e-02, 3.0166e-01, -1.5904e-01,\n"," 1.2045e-01, -5.0262e-01, 1.3861e-02, -9.6578e-02, -1.5591e-02,\n"," -1.6315e-02, 1.5061e-01, 1.1067e-02, 5.7491e-02, -4.7788e-03,\n"," 7.8907e-02, 6.8952e-01, -2.6607e-02, -4.1334e-04, 3.6423e-01,\n"," 1.3369e-01, -2.0541e-01, -1.6803e-01, -2.1445e-01, -5.5468e-02,\n"," -5.7439e-02, 2.6114e-03, 2.2773e-01, 2.1647e-01, -1.6487e-02,\n"," 2.3875e-01, 6.5916e-02, -1.0687e-01, -2.3371e-01, 1.4221e-01,\n"," 3.2152e-01, 4.3083e-02, -6.3169e-02, 1.2417e-01, -1.9511e-01,\n"," 3.6062e-01, 1.3469e-01, -3.6923e-01, 2.0563e-02, 2.5672e-02,\n"," 9.1979e-02, -1.1844e-01, 1.1237e-01, -1.7063e-01, -2.9483e-01,\n"," -1.0183e-01, -7.3483e-01, -6.0830e-01, 1.6242e-01, -3.4425e-01,\n"," 3.5687e-02, 3.5145e-01, -2.3207e-01, 2.0281e-01, 1.0505e-01,\n"," -2.1933e-01, 1.9892e-01, 1.8222e-01, -1.3797e-01, 7.8975e-02,\n"," 5.9763e-03, -2.0051e-01, -1.8499e-01, 1.6888e-01, 1.2403e-01,\n"," 3.7655e-01, -2.0431e-03, -7.7501e-02, 1.1545e-01, 8.6880e-02,\n"," -2.8327e-01, 6.8948e-02, 1.9041e-02, -4.0207e-01, -3.3468e-01,\n"," 2.6100e-01, 6.9018e-02, 9.4044e-02, -1.2524e-01, -2.6535e-01,\n"," -1.6012e-01, -3.0143e-01, 8.0218e-02, 1.3317e-01, -1.4799e-01,\n"," 1.9228e-01, 8.3282e-03, 2.3660e-01, -3.1152e-01, 7.3265e-03,\n"," 6.3997e-01, -4.0138e-02, 2.4381e-01, 9.4616e-02, 9.6614e-02,\n"," 1.2976e-01, -7.6739e-02, -1.9650e-01, 2.8393e-02, 6.2315e-03,\n"," -4.5713e-01, -1.5173e-01, -1.6068e-01, 1.3833e-01, -1.2368e-01,\n"," 7.4510e-02, -4.3046e-01, -1.2454e-01, 7.4271e-02, 1.5003e-01,\n"," 1.7577e-01, 4.6880e-01, 3.4916e-01, 2.1860e-01, 2.6478e-01,\n"," -4.8160e-01, 1.8335e-01, 1.8249e-01, 1.9588e-01, -8.3468e-02,\n"," 1.3145e-01, 4.2193e-01, 3.3501e-01, -3.3734e-02, -2.6534e-01,\n"," 3.1928e-01, -2.6141e-01, -1.6496e-01, -4.1273e-01, 1.2901e-01,\n"," 5.3358e-03, -1.2378e-01, 1.8835e-01, -2.8125e-01, -3.9302e-02,\n"," -1.0920e-01, 2.6754e-01, -2.5838e-01, -1.5763e-01, 1.8603e-01,\n"," -1.3307e-01, -4.4425e-01, 4.9626e-01, 3.6749e-01, 1.7143e-01,\n"," 4.8004e-02, -1.2476e-01, -3.3141e-02, 2.6679e-01, -2.6696e-02,\n"," -1.1989e-01, 2.3537e-01, 1.3474e-01, -1.4793e-01, -2.7368e-01,\n"," -9.3823e-02, 2.0651e-01, 6.1648e-02, -1.3721e-01, -3.4259e-01,\n"," -2.7265e-01, 3.0730e-01, 5.4558e-01, 3.1224e-01, -2.0058e-01,\n"," -2.3653e-01, -7.9165e-03, -3.4498e-01, 2.7827e-02, 1.0043e-01,\n"," 4.9292e-02, 2.3174e-01, 2.1430e-01, 1.5568e-01, 1.3809e-01,\n"," 2.1259e-02, 2.9527e-01, 2.9112e-01, -9.0410e-02, 2.2258e-01,\n"," 1.8553e-01, -2.9230e-01, 7.1455e-02, -7.2720e-02, -4.6181e-01,\n"," -1.7177e-01, -1.4722e-01, 1.2192e-02, 1.6454e-01, 2.8246e-01,\n"," 5.4141e-02, 1.2595e-01, 1.4988e-02, -2.7499e-01, -1.5692e-01,\n"," 9.1868e-02, -2.1326e-01, -3.5220e-01, 6.5435e-02, -1.1490e-01,\n"," -1.1999e-01, -2.1184e-01, 1.2018e-02, 6.1336e-02, 4.4122e-01,\n"," -1.2325e-01, -6.5252e-02, 7.8522e-01, 2.7108e-01, -3.5041e-01,\n"," -1.0090e-01, 1.5035e-01, 5.5486e-02, -4.2035e-02, -3.5440e-01,\n"," -2.0011e-01, -2.0030e-01, 1.1191e-03, 1.7554e-02, 1.5060e-02,\n"," 3.2884e-01, 1.9185e-01, -5.9172e-02, -2.1824e-01, -2.3935e-01,\n"," -3.4757e-02, -1.3366e-01, 3.8379e-02, -3.7384e-01, 4.9081e-03,\n"," 2.1150e-01, 2.7343e-01, 2.7674e-02, -2.0465e-01, 1.9808e-01,\n"," 3.2879e-01, -8.3666e-02, 2.1155e-01, 1.8294e-01, 1.3882e-01,\n"," 1.2704e-01, 3.4752e-01, 1.4569e-01, 5.9118e-02, 1.7788e-01,\n"," -1.3667e-01, 2.2889e-01, -4.0745e-01, 7.4396e-03, 3.9934e-01,\n"," -5.4115e-01, -2.1316e-01, -8.8560e-02, 8.6319e-01, 4.3814e-01,\n"," -2.6380e-02, -5.4968e-02, 1.0019e-01, -3.4614e-02, -3.5909e-01,\n"," 7.0638e-02, -2.8307e-01, -7.1765e-02, 3.6110e-01, -1.1160e-01,\n"," -3.8690e-02, 2.2902e-01, -2.6513e-01, 4.1167e-02, -2.6095e-01,\n"," 2.1098e-01, -5.3192e-01, 1.9293e-01, -3.3993e-01, 5.1941e-01,\n"," -1.8412e-01, -1.1115e-01, 1.1413e-01, -1.1535e-02, 9.3152e-02,\n"," -2.3419e-01, 5.3310e-02, -1.7998e-01, -2.8301e-01, 1.8439e-03,\n"," 1.0960e-01, 4.4891e-01, -5.6153e-01, 1.1726e-02, -1.8293e-01,\n"," -3.6330e-01, 4.0019e-02, 2.1773e-01, 8.0445e-02, -2.9992e-01,\n"," 3.2632e-01, 1.4702e-01, 2.4235e-01, 2.0898e-01, -5.4193e-01,\n"," -1.1314e-01, -1.0955e-01, 9.2893e-02, 2.5612e-01, 6.7650e-02,\n"," -1.0328e-01, 2.0898e-01, -1.4103e-01, -2.7155e-01, 3.2091e-01,\n"," -1.3414e-01, -1.4792e-01, -3.8316e-01, 4.3988e-01, -5.7879e-02,\n"," -1.1565e-01, -1.5987e-02, 1.6509e-01, 8.2972e-02, 1.7650e-03,\n"," -5.7116e-02, 9.7711e-02, -1.9184e-01, 1.5034e-01, -6.8963e-02,\n"," -8.8647e-02, -2.5219e-01, -3.8885e-02, 7.4059e-03, 1.9836e-02,\n"," -2.8337e-02, 5.2735e-02, -1.2385e+00, 1.9571e-01, -3.1938e-01,\n"," -2.1450e-01, -5.9489e-02, 6.2940e-02, 3.5694e-01, 5.1505e-02,\n"," -2.6662e-02, -1.7313e-01, 2.3534e-01, 2.2318e-01, 2.5299e-01,\n"," -7.9802e-02, -1.2666e-01, 2.5797e-01, -8.1944e-02, -1.1400e-01,\n"," 1.1554e-01, -3.1685e-01, -3.3426e-02, 3.2199e-01, -7.9873e-02,\n"," -9.9847e-02, -1.4852e-01, 3.7052e-01, -3.8195e-01, -1.9745e-01,\n"," -2.2805e-01, 1.1710e-01, -1.1772e-01, 3.0073e-01, -2.1794e-01,\n"," -5.2473e-02, -2.2410e-01, 1.6446e-01, 1.5050e-01, 3.6387e-02,\n"," -1.9868e-02, 2.1043e-02, 1.6206e-01, 3.7841e-01, 1.1470e-02,\n"," -2.2816e-01, 9.2454e-02, 1.0472e-01, 3.3765e-02, 2.5520e-01,\n"," -1.5868e-01, 4.8292e-02, -3.5063e-01, -5.5786e-01, -1.4663e-02,\n"," -1.0565e+00, 3.2953e-01, -9.2671e-02, 8.2238e-03, -2.4343e-01,\n"," 2.0862e-02, -1.1607e-01, 1.5180e-01, -2.0215e-02, -1.1776e-01,\n"," 3.0191e-01, 2.8411e-02, -4.9750e-02, -1.3770e-01, -2.6241e-01,\n"," 5.3569e-02, 2.2279e-01, 4.3742e-02, 3.6482e-02, -8.3218e-03,\n"," 1.2789e-01, 1.2766e-01, -2.2761e-01, 1.5298e-02, 4.3281e-02,\n"," 1.4467e-01, 7.8942e-02, -1.5635e-01, -1.0785e-01, 2.4515e-02,\n"," 2.7228e-01, 8.5516e-03, -3.3191e+00, -4.5669e-01, 1.1550e-01,\n"," -7.0335e-02, 8.2238e-02, -3.1696e-01, 4.7210e-01, -7.9768e-02,\n"," 2.9475e-02, -1.7540e-01, 2.6600e-01, -2.7849e-01, 2.9825e-03,\n"," -1.6589e-01, -5.2135e-02, 1.0111e-01]])\n",",: tensor([[ 1.4571e-01, -8.9117e-02, 2.6450e-02, 3.9113e-03, -6.6716e-02,\n"," -2.5159e-01, 3.8645e-01, -1.3547e-01, 1.9058e-01, -3.6953e-01,\n"," -1.8845e-01, -7.6884e-02, -3.2524e-02, 2.5172e-01, 1.1722e-01,\n"," -7.8544e-03, -1.7302e-01, 1.5656e-01, 6.8947e-02, 1.9274e-01,\n"," 2.9281e-01, 2.7723e-01, 2.5830e-01, 9.1137e-02, 9.6616e-02,\n"," 2.8481e-01, -7.1738e-02, -4.2233e-01, -3.4306e-01, 2.2832e-01,\n"," 1.7340e-02, 4.0752e-02, -5.0088e-01, 5.0823e-01, -2.1122e-01,\n"," -4.0537e-01, 4.1055e-01, 2.5058e-01, -1.8951e-01, -2.3088e-01,\n"," -9.9218e-02, -5.0576e-03, 3.5897e-02, -1.8635e-01, -4.6532e-02,\n"," 6.9451e-02, -6.5341e-01, -6.9961e-03, -4.9305e-01, 8.9893e-02,\n"," -4.1606e-01, 2.9743e-01, 2.4078e-01, 5.1232e-01, 2.1909e-01,\n"," 1.3387e-02, 6.0983e-02, -2.9164e-01, -1.0814e-01, 1.6252e-01,\n"," 9.4167e-02, 3.7345e-01, 1.0825e-01, 3.0503e-01, 3.1781e-01,\n"," 1.3457e-01, 3.0925e-01, -1.6863e-01, -4.0823e-01, 2.4775e-01,\n"," -3.2803e-01, -3.2905e-01, 1.9529e-01, -2.1022e-02, 2.4805e-01,\n"," 1.6645e-01, 1.1229e-01, 1.8343e-01, 1.4670e-01, -1.6221e-01,\n"," 4.8857e-01, 2.7126e-01, 2.2107e-01, 1.1901e-01, 9.5187e-02,\n"," 2.0734e-01, 1.4156e-01, -1.4448e-01, -2.7253e-01, 2.7796e-01,\n"," 5.5153e-02, -4.6048e-02, 5.2245e-02, -6.4954e-02, 5.1033e-02,\n"," -1.7863e-01, -3.0263e-02, 8.2884e-02, -1.4531e-01, -4.2625e-01,\n"," -2.2086e-01, -3.5510e-01, -1.2653e-01, 2.5765e-01, -1.8971e-01,\n"," 9.2276e-02, -9.6467e-02, -4.9548e-03, 2.2412e-01, -5.0690e-01,\n"," 2.4864e-01, -2.2044e-02, 3.0915e-01, -2.4427e-01, -1.7787e-02,\n"," 3.5503e-01, 1.9915e-01, 2.1399e-01, 5.5363e-02, 2.4421e-01,\n"," -2.1038e-01, -2.8581e-01, 1.5525e-01, 6.7437e-01, -8.7163e-02,\n"," 3.4415e-01, 1.1538e-01, -5.0896e-01, -1.9329e-01, -2.0803e-01,\n"," -3.9488e-01, 3.9691e-01, 2.3519e-01, 1.8214e-01, -5.9625e-02,\n"," 7.2775e-02, 1.8555e-01, -1.8159e-01, -9.4297e-02, -1.7628e-01,\n"," 1.1305e-01, 3.5077e-01, -9.5832e-01, -1.9586e-01, 1.6180e-01,\n"," -1.8532e-01, 2.9025e-01, -1.0661e-01, 2.7439e-01, 1.2160e-01,\n"," 4.0716e-01, 2.6388e-01, -3.3334e-01, -3.5688e-01, -6.5110e-01,\n"," -3.8443e-01, 4.1371e-02, 1.1555e-01, 1.5852e-01, 6.7273e-01,\n"," 1.2757e-01, 2.3980e-01, 2.5816e-01, 3.6018e-01, -9.3621e-02,\n"," -5.5332e-02, -3.3742e-01, 6.8712e-01, 3.3851e-01, 1.1030e-01,\n"," -3.0397e-01, -3.4389e-01, 4.9668e-01, 2.7737e-01, -1.1285e-01,\n"," 2.9047e-01, 4.6451e-01, 2.3961e-01, -1.0644e-01, -3.1535e-01,\n"," -2.8753e+00, 1.2570e-01, -1.4363e-01, -5.3969e-01, 3.2254e-01,\n"," -1.6085e-01, 1.6551e-01, -1.3439e-01, 2.8193e-01, -2.2328e-01,\n"," -2.9894e-01, -2.9440e-01, -1.6945e-01, 1.3558e-01, 3.4803e-01,\n"," 2.2705e-02, -1.1237e-01, -2.3758e-01, 2.3543e-01, -1.6686e-03,\n"," 2.2645e-01, -1.7749e-01, 2.3646e-01, 1.6193e-01, 1.4528e-02,\n"," 1.1855e+00, -2.2295e-01, -3.7911e-01, 5.9377e-02, 1.5180e-01,\n"," -6.9729e-01, 4.5486e-01, 3.8399e-01, -5.7034e-01, 2.7954e-01,\n"," -3.3679e-01, 5.4090e-01, -2.9870e-01, -2.8679e-01, -2.1322e-01,\n"," 3.4502e-01, -3.0060e-01, -1.1728e-01, 8.2267e-02, -4.3531e-01,\n"," -2.5983e-01, 8.0429e-02, -9.5820e-02, -2.6100e-02, -4.1050e-01,\n"," -2.8160e-01, -3.5589e-01, 2.9125e-01, 1.9323e-01, -6.9444e-02,\n"," 2.4017e-01, 1.1566e-01, -9.0855e-02, -1.6796e-01, -2.4410e-01,\n"," 3.1075e-03, 1.6406e-01, 5.3568e-01, 3.0617e-01, -3.7865e-01,\n"," 8.9007e-02, 6.2370e-01, -2.4044e-01, 2.7816e-01, -1.1648e-01,\n"," -1.9073e-01, -4.1636e-01, -1.7217e-01, -1.4691e-01, -4.4707e-02,\n"," 1.2607e-01, 1.0687e-01, -1.6795e-01, 2.8549e-01, -8.1567e-02,\n"," 2.2966e-02, -1.0773e-02, 6.8333e-01, -1.4289e-01, -3.8157e-01,\n"," -4.6643e-04, 3.9218e-01, -1.5765e-01, -2.1006e-01, -7.7416e-02,\n"," 5.8157e-02, -1.7123e-01, -1.0536e-02, -6.3703e-01, -1.8946e-01,\n"," -7.0629e-02, 6.3799e-01, 7.3411e-02, 7.6321e-03, -1.1048e-01,\n"," -4.2403e-04, 2.4042e-01, -2.0534e-01, -6.8047e-03, 2.4739e-01,\n"," -3.6708e-01, -2.3046e-01, -5.9086e-01, 6.1231e-01, -4.5672e-01,\n"," -6.9144e-02, 9.4099e-02, 1.3101e-01, 4.2869e-02, 8.9833e-02,\n"," 9.3597e-02, 2.4287e-01, 1.0510e-01, -3.2402e-01, -7.8580e-02,\n"," -9.3336e-02, -2.3416e-01, -2.1572e-01, -1.3606e-01, 3.0141e-01,\n"," 1.6233e-01, 5.3635e-02, 3.6113e-01, -2.1373e+00, -1.7802e-01,\n"," 1.6804e-01, -4.1662e-01, 2.4974e-01, 1.7530e-01, 2.5181e-01,\n"," -1.6483e-01, -4.9216e-01, -1.5408e-02, 2.3342e-01, -4.1294e-01,\n"," 1.1777e-01, 4.6779e-01, 1.5563e-01, -5.5175e-02, 8.9437e-02,\n"," -2.6464e-02, -1.5208e-01, 3.2119e-01, -1.3710e-01, -1.2571e-01,\n"," 4.2961e-01, -3.2064e-01, 2.4038e-01, 3.4685e-01, -3.6121e-01,\n"," 1.9097e-01, -6.5474e-01, -1.3526e-01, -4.0700e-01, -2.6067e-01,\n"," -2.9967e-01, 1.1696e-01, 2.4040e-01, 7.9220e-02, 1.7563e-01,\n"," 1.8969e-01, 7.8945e-01, -3.4330e-01, 2.2891e-01, 4.7126e-01,\n"," 2.0799e-01, -1.0846e-01, -8.2196e-02, -1.2078e-02, 2.1574e-01,\n"," -1.8101e-01, 1.6718e-01, 1.2133e-01, 8.6467e-02, -2.8239e-01,\n"," 4.4568e-01, -1.3886e-01, -9.8034e-02, -3.5696e-01, -2.6369e-02,\n"," 3.3485e-01, 6.1569e-02, -2.7746e-01, 3.8282e-01, -3.4746e-01,\n"," 3.3132e-01, 4.0558e-01, -8.4932e-01, 1.3011e-01, -2.5669e-01,\n"," 8.9835e-02, -6.1169e-02, -8.2988e-02, 2.5008e-02, -2.2889e-01,\n"," -3.2188e-01, -6.5140e-01, -4.6423e-01, 3.7389e-01, -4.3440e-01,\n"," -1.0283e-01, 3.0310e-01, -3.5769e-01, -3.5431e-02, -2.9953e-02,\n"," 3.3049e-01, 2.9214e-01, 3.9564e-02, 3.0288e-01, 2.2973e-01,\n"," -3.1753e-01, -2.1179e-01, -1.7666e-01, -2.3360e-01, 9.0684e-02,\n"," -7.1183e-02, 1.6260e-01, 1.2539e-01, 3.3810e-01, 4.4933e-01,\n"," -4.1263e-01, 2.8198e-01, -8.3511e-03, -4.0399e-01, -1.4334e-01,\n"," 1.3678e-01, 2.6962e-01, -2.1914e-01, 2.4740e-01, -1.0374e+00,\n"," -2.1050e-01, -1.8165e-01, -9.5031e-03, 3.5994e-01, -4.6178e-01,\n"," 8.9707e-02, 3.8382e-01, 2.7490e-01, -4.7722e-01, 9.0886e-02,\n"," 2.4036e-01, -2.9337e-02, 3.9708e-01, -4.2682e-02, 4.5617e-01,\n"," -1.8704e-02, 1.6249e-02, -1.1373e-01, 1.0254e-01, 1.6447e-01,\n"," -2.4245e-01, -7.6687e-01, -2.7627e-01, 3.3510e-01, 4.3963e-03,\n"," 1.1763e-01, -3.9918e-01, -2.2585e-01, 2.2497e-02, -3.5149e-04,\n"," 7.8753e-01, 1.2097e-01, 5.6824e-01, 5.1911e-01, 1.7139e-01,\n"," -7.4859e-01, 3.0708e-01, 2.5017e-01, 5.6966e-01, 1.0827e-02,\n"," 2.2144e-01, 9.3797e-02, 7.1426e-01, 3.6654e-01, -4.4194e-01,\n"," 6.2905e-01, -3.2575e-01, -1.9558e-01, -5.2749e-01, 7.3160e-02,\n"," -3.9767e-01, -3.0789e-01, -4.2381e-02, -1.6923e-01, 5.3304e-02,\n"," -1.1442e-01, 1.8313e-01, 1.5919e-01, 5.1756e-02, 9.3930e-03,\n"," 1.3479e-01, -3.0994e-01, 2.4348e-01, 6.2807e-01, -9.7308e-02,\n"," 2.2729e-01, 1.5578e-01, -8.2989e-02, 8.2472e-02, -7.8736e-02,\n"," -4.4337e-01, 4.4439e-01, 5.6996e-03, 1.3215e-01, -2.2840e-01,\n"," 1.0326e-01, -1.4564e-01, 4.6967e-01, 9.5750e-02, -5.7422e-01,\n"," -4.2140e-01, 1.5925e-01, 4.9169e-01, 3.0690e-01, -3.8212e-01,\n"," -3.0523e-01, -2.3643e-01, -4.9541e-01, -2.0289e-01, 3.7294e-01,\n"," -2.2985e-01, 1.6987e-01, 2.0271e-01, 3.7342e-01, 1.2005e-01,\n"," -3.4641e-01, 2.2480e-01, 5.5623e-02, -3.4759e-01, 1.0096e-01,\n"," 3.7587e-01, -1.9086e-01, -7.0977e-02, 5.7221e-02, -3.5829e-01,\n"," -3.7393e-01, -1.5656e-01, -1.2160e-02, 4.7843e-01, 2.2425e-01,\n"," 6.3370e-02, 2.2390e-01, -3.9537e-01, -3.2356e-01, -4.7447e-01,\n"," 1.1968e-01, -7.4429e-02, 5.1422e-02, -1.1373e-01, -6.4215e-01,\n"," -4.1410e-01, -5.6147e-02, -3.1049e-01, -1.5104e-01, 3.9813e-01,\n"," -7.3235e-03, -1.7187e-01, 4.9733e-01, -2.4671e-02, -7.0007e-01,\n"," 1.2217e-01, -3.5063e-01, -2.7913e-01, -6.0496e-02, -4.6921e-01,\n"," -7.2195e-01, -1.3520e-01, -1.3021e-01, 3.4127e-01, 4.0355e-01,\n"," 6.1885e-01, 3.6910e-01, 2.6426e-01, -2.2757e-01, -2.7376e-01,\n"," 1.0358e-02, -1.8039e-01, 9.1878e-02, -6.9110e-01, 5.8955e-02,\n"," 1.1730e-01, 1.7650e-01, -2.4481e-01, -1.4893e-01, 3.8230e-01,\n"," 2.5803e-01, -2.2878e-01, -6.2598e-02, 1.5761e-01, -1.9112e-01,\n"," 1.4289e-01, 5.3887e-01, 3.7659e-01, -2.9084e-01, -2.3839e-02,\n"," -1.7665e-01, 2.1653e-01, -2.3172e-01, -1.2722e-02, 2.5848e-01,\n"," -2.4478e-01, -3.2303e-01, 1.5413e-01, 8.2818e-01, 9.5071e-01,\n"," -1.8863e-01, -2.0519e-01, 1.9272e-01, -3.8249e-01, -3.1809e-01,\n"," 6.1254e-02, -1.1218e-01, -4.4748e-01, -1.5089e-02, -1.4681e-01,\n"," -1.1275e-03, 2.5294e-01, -1.1127e-01, 2.9581e-01, -3.4890e-01,\n"," 6.0162e-02, -7.5859e-01, -9.6313e-02, -9.0095e-02, 2.2760e-01,\n"," -1.7138e-01, -1.1211e-01, -1.4906e-02, 2.9721e-01, 3.3282e-01,\n"," -2.1966e-01, 1.0012e-01, -4.9586e-02, -4.2379e-01, 6.5179e-02,\n"," 1.9024e-02, 7.0318e-01, -7.8487e-01, -2.1047e-01, -1.9214e-01,\n"," -7.8466e-01, 1.1031e-01, 3.0623e-01, 4.6791e-02, -2.0817e-01,\n"," 5.3939e-01, 5.2954e-01, 1.5929e-01, 3.9891e-01, -4.7197e-01,\n"," -1.6678e-01, -2.7320e-01, 4.5760e-01, 3.1295e-01, 1.6734e-01,\n"," -4.8150e-01, 7.8517e-01, -6.9360e-02, -4.9245e-01, 6.0045e-01,\n"," 1.3644e-01, -3.6199e-01, -7.1827e-01, 1.0422e-01, 2.9102e-02,\n"," 3.0064e-01, 1.1905e-01, -1.9098e-01, -1.3113e-01, -2.5468e-02,\n"," -1.6509e-01, 8.9786e-02, 6.0397e-02, 2.3104e-02, 2.7286e-02,\n"," -1.6719e-01, 2.6000e-01, -1.6018e-01, -1.6848e-01, 6.5960e-02,\n"," -4.1964e-02, -3.0918e-01, -5.3669e-01, 1.3443e-01, 8.0290e-02,\n"," 1.1820e-02, -3.8390e-01, 8.4092e-02, 5.5495e-01, 7.8698e-02,\n"," -3.1199e-01, -4.6914e-01, -1.8557e-01, 4.7983e-01, 1.8608e-01,\n"," 3.1855e-02, -6.7170e-02, 2.2435e-01, -5.4959e-02, -9.3272e-02,\n"," 3.5537e-01, -6.8320e-01, -1.3271e-01, -1.4779e-01, -7.3625e-02,\n"," -2.2041e-01, 4.2471e-03, 8.7989e-01, -5.3090e-01, -3.2617e-01,\n"," -3.2060e-01, 4.6099e-01, -8.3208e-02, 3.2950e-01, -2.6549e-01,\n"," -1.3904e-01, -3.0038e-01, 2.0566e-01, 1.1365e-01, 1.3593e-01,\n"," 1.2688e-02, 3.2521e-01, 9.8237e-02, 7.9554e-01, -2.5483e-01,\n"," -3.0474e-01, -2.1962e-01, -3.2505e-01, 1.5375e-01, 3.9901e-01,\n"," 1.8572e-01, -5.0251e-01, -6.3008e-01, -2.5683e-01, 2.5506e-01,\n"," -1.0632e+00, 3.7451e-01, -1.0351e-01, -1.6880e-01, -1.2263e-01,\n"," -2.0244e-02, -1.7100e-01, -2.4317e-01, 6.8181e-02, -2.6405e-01,\n"," -7.0037e-02, -2.0590e-01, -4.4318e-01, -2.7295e-02, -4.1052e-01,\n"," 4.8845e-01, -7.4306e-02, 1.6744e-01, 2.7401e-02, 5.9199e-01,\n"," 1.0055e-01, -7.0027e-03, 3.2443e-01, -2.8446e-01, 2.3394e-02,\n"," -3.2145e-01, -2.7080e-01, -1.4532e-01, -4.5786e-02, 2.6260e-01,\n"," 4.6569e-04, -1.0496e-01, -2.0824e+00, -2.4360e-03, -2.9469e-02,\n"," -7.0094e-02, 2.2866e-01, -4.9524e-02, 4.4161e-01, -4.9409e-02,\n"," -1.2736e-01, -2.7610e-01, 8.5445e-02, -4.6898e-01, 2.2688e-01,\n"," 7.0861e-02, 2.2175e-02, 6.0069e-02]])\n","something: tensor([[ 1.6714e-01, -2.9833e-01, -1.1169e-01, -9.2056e-03, 2.7024e-02,\n"," -1.7610e-01, 2.0459e-01, 6.3631e-02, 2.1743e-02, -3.8159e-01,\n"," -5.4717e-02, -2.6887e-02, 6.7999e-02, 1.2419e-01, -1.9457e-01,\n"," -3.9153e-01, 7.5327e-02, -9.0266e-02, -4.5388e-02, 1.1386e-01,\n"," 4.6951e-02, -3.4176e-02, 1.0692e-02, 1.3674e-01, 1.5429e-01,\n"," 3.2344e-01, -2.4812e-01, -1.7530e-01, -3.2975e-01, 1.4595e-01,\n"," -4.5376e-02, 3.1746e-02, 7.6582e-02, 4.2796e-01, -1.2366e-01,\n"," -1.7610e-01, -3.6453e-02, -7.5870e-02, -4.2520e-01, -1.3346e-01,\n"," -2.6033e-01, 4.0388e-02, 1.0526e-01, -7.6055e-02, 5.4135e-02,\n"," -9.8837e-02, -4.0638e-01, -3.2572e-02, -3.2991e-01, 1.8290e-01,\n"," -1.2827e-01, 2.5381e-01, -1.9539e-02, 2.9226e-01, 1.7317e-01,\n"," 1.3669e-01, 2.2599e-01, -3.0290e-02, 2.1672e-03, -1.9302e-01,\n"," 5.3652e-01, -7.1935e-02, -4.8488e-02, 1.3261e-01, 1.4181e-01,\n"," 3.4368e-01, 4.1542e-01, -1.1174e-01, -3.3362e-01, 1.0735e-01,\n"," -8.3634e-02, -2.0138e-01, 2.4245e-01, -1.8907e-01, 2.1866e-01,\n"," 1.9239e-01, 1.3194e-01, 2.3357e-01, 2.0171e-01, -8.4106e-03,\n"," 2.4056e-01, 2.9161e-01, 1.2743e-01, 1.7774e-01, -5.2466e-02,\n"," -1.6726e-02, -2.2945e-01, -1.3776e-01, -1.0122e-01, 2.9769e-01,\n"," -2.3226e-02, 2.0352e-02, 2.5832e-01, 1.3347e-01, 9.7360e-02,\n"," -1.2601e-01, -1.5378e-01, 1.6572e-01, -7.2433e-02, -2.4727e-01,\n"," -9.7259e-02, -2.9917e-01, -9.8677e-02, 3.5491e-01, -3.3321e-01,\n"," -4.9973e-02, 1.2039e-01, 1.5110e-01, 8.9014e-02, -6.7633e-01,\n"," 2.5068e-01, 1.5266e-01, 4.2957e-02, 8.1893e-02, -5.4250e-02,\n"," 2.6214e-01, 1.2653e-01, 1.7701e-01, 2.5178e-01, 3.5103e-02,\n"," -2.4457e-01, -1.5141e-01, 1.3193e-01, 6.6264e-01, 3.0044e-01,\n"," 1.5976e-01, -2.4315e-01, -4.4658e-01, -1.6963e-01, -4.1727e-01,\n"," -1.7891e-01, 4.8503e-01, 2.5459e-01, 2.3367e-01, 4.4195e-03,\n"," 1.6104e-01, -2.7664e-03, 1.0188e-01, -2.7257e-01, -2.2932e-01,\n"," 1.1255e-01, 4.0207e-01, -9.1939e-01, -2.4335e-01, 1.8238e-02,\n"," -8.5296e-02, 3.1039e-01, -1.3989e-01, 1.4967e-01, -9.3625e-04,\n"," 2.1990e-01, 8.5887e-02, -2.9180e-01, -2.0258e-01, -3.4529e-01,\n"," -2.6450e-01, -1.7210e-01, 1.2824e-02, 2.4440e-01, 4.2650e-01,\n"," 8.8504e-02, 3.2848e-01, 1.0130e-01, 9.0850e-02, -2.8652e-01,\n"," -8.3656e-02, -1.9883e-01, 7.2301e-01, 2.6840e-01, 2.0686e-01,\n"," -4.4053e-01, -1.5774e-01, 2.5926e-01, 3.2509e-01, -3.2169e-01,\n"," 8.5040e-02, 1.3504e-01, 1.0570e-01, -9.0834e-02, -1.5541e-01,\n"," -2.9533e+00, 1.3008e-01, 1.0126e-01, -1.3096e-01, -9.1567e-03,\n"," 1.0444e-01, 1.1974e-01, -4.3286e-01, 2.3728e-01, -2.8306e-01,\n"," -2.0646e-01, 9.6229e-03, -2.2287e-01, 1.4740e-01, 3.1954e-01,\n"," -3.5986e-01, 1.3755e-01, 1.0306e-01, 2.8242e-01, 1.2664e-01,\n"," 3.0827e-01, -1.6669e-01, 1.1021e-01, 1.9800e-01, 1.7054e-02,\n"," 1.0219e+00, 1.8154e-01, -1.5506e-01, 8.7705e-02, -3.9796e-02,\n"," -5.3727e-01, 5.0514e-01, 1.5894e-01, -3.1101e-01, 3.2049e-01,\n"," -1.8092e-01, 2.0291e-01, -1.0772e-01, -6.3324e-01, -1.7196e-02,\n"," -3.7225e-02, -2.0064e-01, -6.7901e-03, 1.1394e-01, -2.1511e-01,\n"," -4.6999e-01, 3.0861e-01, 1.8491e-02, 1.5464e-01, -1.6557e-01,\n"," -1.6309e-01, -1.9538e-01, 2.3837e-01, 1.1290e-01, -3.0523e-01,\n"," 1.6243e-01, -1.0985e-02, -1.0324e-01, -1.3701e-01, -1.9456e-01,\n"," -1.1140e-01, 1.6223e-01, 4.3266e-01, 3.5235e-01, -2.7155e-01,\n"," 1.4496e-01, 4.0679e-01, 3.0396e-01, 4.6101e-01, -3.3577e-01,\n"," -9.2569e-02, -5.1589e-01, 2.9737e-02, -1.6784e-01, -1.6205e-01,\n"," -2.7736e-01, -1.9902e-01, -3.4362e-01, 5.0667e-01, 2.2908e-02,\n"," 2.0631e-01, 1.8397e-01, 4.9817e-01, -2.3719e-01, 7.7785e-02,\n"," 3.5645e-01, 1.4842e-01, -9.4724e-02, -3.0896e-01, 1.3791e-01,\n"," 1.5334e-01, 1.0833e-02, 4.5973e-03, -1.3340e+00, -3.8182e-02,\n"," -2.6980e-01, 3.2990e-01, 1.8643e-01, 1.1281e-01, 1.1324e-01,\n"," -1.7378e-01, 5.7822e-02, -7.5923e-02, 2.4753e-01, 3.2991e-01,\n"," -4.7500e-01, -3.5483e-01, -2.1576e-01, 2.4613e-01, -5.2888e-01,\n"," -1.9904e-01, 3.8461e-02, 8.2304e-02, -2.3959e-02, 3.0414e-01,\n"," 1.9625e-01, 8.1866e-02, 2.5072e-01, -3.9263e-01, -1.5383e-01,\n"," 1.0396e-01, 1.0231e-01, -1.9204e-01, -2.2681e-01, -6.8246e-02,\n"," 1.2957e-01, -3.4086e-02, 1.1040e-01, -2.2330e+00, -1.7500e-01,\n"," 2.9157e-01, -4.8870e-01, -1.9153e-03, -2.8708e-01, -8.5049e-02,\n"," 2.1822e-01, -6.6457e-02, -1.0734e-01, 3.9161e-01, -3.1420e-01,\n"," 2.4236e-01, 2.8244e-01, 1.7047e-01, -8.7415e-02, -2.0747e-01,\n"," 2.1290e-02, -5.0558e-01, 1.8621e-01, -4.3541e-02, 2.0978e-02,\n"," 3.1293e-01, -1.6284e-01, 1.1221e-01, 1.6872e-01, -3.9355e-01,\n"," 3.4210e-01, -2.6672e-01, 2.0997e-01, -2.4040e-01, -2.0163e-01,\n"," -1.6921e-01, 2.3678e-01, 4.1596e-01, 1.7886e-01, 1.7419e-01,\n"," 4.1752e-02, 7.9808e-01, 7.0198e-02, 1.4744e-01, -5.6297e-02,\n"," 2.2762e-01, 2.7382e-02, -7.1271e-02, -9.6045e-02, 2.6819e-01,\n"," -4.2888e-01, -7.3314e-02, 1.1285e-01, 2.2672e-01, -1.4591e-01,\n"," 6.9845e-02, -1.5600e-01, -2.2011e-03, 6.1905e-02, 1.4460e-01,\n"," 2.3091e-01, 3.4617e-03, -2.3570e-01, 2.9321e-01, -2.0608e-01,\n"," 2.5781e-01, 3.2369e-01, -5.9284e-01, 1.1197e-01, -1.9979e-01,\n"," -2.6902e-01, 1.2423e-01, -9.2939e-02, -1.1700e-01, -1.9847e-01,\n"," -1.2642e-02, -7.6023e-01, -3.8069e-01, 3.3232e-01, -2.5941e-01,\n"," 1.5006e-02, 2.6013e-01, -2.6779e-01, 1.7101e-01, 4.2029e-02,\n"," 1.5037e-01, 3.1340e-01, 1.2648e-01, 1.1335e-02, 2.4980e-01,\n"," -4.2774e-01, 1.7507e-01, -2.1351e-01, 4.9269e-02, 9.3488e-02,\n"," 1.3083e-01, 1.6531e-02, 4.7039e-01, 9.8474e-02, 1.9811e-01,\n"," -5.1287e-01, 1.9439e-01, 2.8712e-02, -4.3253e-01, -5.2977e-01,\n"," -6.6426e-02, 6.4759e-02, 4.8238e-02, 1.6740e-02, -6.5685e-01,\n"," -6.3760e-02, -1.1068e-01, -2.3697e-01, 3.2319e-01, -7.1542e-03,\n"," 3.2467e-01, 1.1908e-01, -5.5506e-02, -1.2041e-01, -4.9810e-02,\n"," 2.5419e-01, -1.1299e-01, 2.0975e-01, 1.2278e-01, 2.4573e-01,\n"," 1.8879e-01, -2.0516e-02, 1.5872e-01, -1.5025e-01, -7.7121e-02,\n"," -4.5455e-01, -2.7291e-01, 1.0673e-01, 1.8113e-01, -8.6097e-02,\n"," 1.0655e-01, -5.2518e-01, -2.7901e-01, 1.9689e-02, 1.8278e-01,\n"," 3.6107e-01, 6.1535e-02, 4.5507e-01, 2.5479e-01, -1.5136e-01,\n"," -4.9676e-01, -9.3686e-02, 8.9107e-02, 3.7417e-01, -1.4639e-01,\n"," 3.0870e-01, 1.3010e-01, 3.5241e-01, 2.7750e-01, -2.1555e-01,\n"," 1.7298e-01, -1.0104e-01, -1.7064e-01, -2.0225e-01, 1.9709e-01,\n"," -1.7428e-01, -1.9510e-01, 2.1222e-01, -1.7858e-01, 8.6600e-02,\n"," -2.3722e-02, 3.0900e-01, 1.5498e-01, -5.2871e-02, 7.3370e-03,\n"," 8.2684e-02, -4.0662e-01, 1.1735e-01, 4.3120e-01, -3.7486e-01,\n"," 1.2745e-01, 1.7090e-01, -2.6569e-01, 2.8949e-01, -1.0066e-01,\n"," -4.6580e-01, -1.8992e-01, 3.8023e-02, 3.2670e-01, -3.6175e-01,\n"," 8.0940e-02, -1.2309e-01, 6.0129e-01, 2.6492e-01, -4.3625e-01,\n"," -3.1731e-01, 8.4693e-02, 6.3122e-01, 1.2045e-01, -2.3020e-01,\n"," -3.2369e-01, -8.3557e-02, -4.0083e-02, 1.1912e-01, 2.6771e-01,\n"," 9.5548e-02, 1.4972e-01, 1.7384e-01, 2.6386e-01, -3.0032e-01,\n"," -1.6849e-01, 3.0826e-02, 5.6825e-02, -1.1214e-01, 1.2698e-01,\n"," 1.8411e-01, -5.1868e-01, -4.2227e-01, 2.3302e-01, -2.2436e-01,\n"," -2.9189e-01, -1.5412e-01, 8.1301e-02, 4.5186e-01, 2.2219e-01,\n"," 9.0394e-02, 4.2867e-01, -3.9982e-01, -4.3161e-01, -2.1304e-01,\n"," 1.7622e-01, -5.6119e-02, 2.6700e-01, -1.9357e-01, -3.3837e-01,\n"," -1.8167e-01, 1.0602e-01, 9.9006e-02, 2.2248e-01, 2.9639e-01,\n"," -2.1755e-01, 1.0685e-01, 4.4327e-01, 2.5960e-01, -4.9255e-01,\n"," -1.5118e-01, -1.1466e-01, -6.6102e-02, 6.6501e-02, -5.5138e-01,\n"," -3.8277e-01, -7.5653e-02, 2.6048e-02, -3.1544e-02, 7.9266e-02,\n"," 5.5761e-01, 4.5606e-01, -2.5246e-02, -6.7876e-02, 6.3132e-02,\n"," 1.5866e-01, -1.3771e-01, 6.4550e-02, -6.8924e-01, -3.4501e-01,\n"," 1.5596e-02, 1.0588e-01, -4.5295e-02, -2.8161e-01, 1.1145e-01,\n"," 1.7847e-01, -1.9490e-01, -5.9226e-02, 1.1366e-02, -1.1059e-01,\n"," 1.6069e-02, 5.5212e-01, 3.6308e-01, -3.0558e-01, 3.7737e-02,\n"," -1.4082e-01, 1.7890e-01, -3.1290e-01, 4.2557e-02, 3.8310e-01,\n"," -3.8418e-01, -4.6646e-01, -1.4330e-01, 1.0301e+00, 6.4320e-01,\n"," 9.7119e-04, -4.7876e-02, 1.7986e-01, -5.2537e-02, -3.2514e-02,\n"," 1.2626e-01, -2.4739e-01, -5.0174e-01, 2.4760e-01, -2.4998e-03,\n"," -1.2740e-01, 1.6881e-01, -1.5628e-01, 3.1934e-01, -2.1073e-01,\n"," 1.3408e-01, -1.3196e-01, -5.4410e-02, 9.5333e-03, 2.9091e-01,\n"," -2.2880e-02, -1.7467e-01, -1.3959e-01, 2.2671e-01, 1.8779e-01,\n"," -1.2192e-01, -3.4081e-02, -1.8791e-01, -4.2273e-01, 7.8159e-03,\n"," 1.8377e-01, 5.7901e-01, -5.2427e-01, 1.1887e-01, -2.1570e-02,\n"," -7.0563e-01, 1.2988e-02, 3.6932e-01, 7.4468e-02, -9.7015e-02,\n"," 4.5562e-01, 2.6459e-01, 2.7974e-01, 4.2216e-01, -4.1861e-01,\n"," -1.0997e-01, -8.3835e-02, 2.2016e-01, 2.8047e-01, 1.8557e-01,\n"," -3.9485e-02, 9.4461e-02, -2.0627e-02, -2.8453e-01, 4.9221e-01,\n"," 1.3789e-01, -4.5334e-02, -2.6907e-01, 6.6643e-02, -1.5488e-01,\n"," 2.1652e-01, -1.7084e-01, -2.3628e-01, 1.9727e-01, 2.6685e-01,\n"," -2.5822e-01, 1.6833e-01, 3.3876e-01, -1.9214e-01, -2.4016e-01,\n"," -2.6866e-01, 1.4820e-02, -9.6048e-02, -2.5784e-02, 2.6252e-01,\n"," 6.8069e-03, -1.5076e-01, -1.1650e+00, -2.3992e-03, -1.1080e-01,\n"," -8.4223e-02, -1.1103e-01, 1.9916e-02, 5.1828e-01, -7.0911e-02,\n"," -4.3290e-01, -3.9606e-01, -1.7253e-01, 4.2166e-01, 2.8385e-01,\n"," -8.1944e-03, 1.6479e-01, -2.7504e-02, 9.6598e-02, -2.4508e-01,\n"," 2.4894e-01, -5.2018e-01, -1.2270e-01, 1.2020e-01, -1.6786e-01,\n"," -1.0934e-01, -1.2798e-01, 7.2203e-01, -3.8388e-01, -1.1990e-01,\n"," 1.2747e-01, 5.6555e-02, -1.0838e-01, 2.9318e-01, -3.6272e-01,\n"," -8.9551e-02, -1.6008e-01, 7.7681e-02, 3.7154e-02, 1.0559e-01,\n"," -4.9233e-02, 3.6431e-01, 3.9935e-01, 5.4042e-01, -2.8476e-01,\n"," -7.5718e-02, 8.5280e-02, -2.1898e-01, -5.8851e-03, 6.0837e-01,\n"," -5.5587e-02, -3.4282e-01, -3.7962e-01, -1.5205e-01, 1.9568e-02,\n"," -1.2728e+00, 2.1061e-01, -1.5844e-02, -7.7556e-02, -1.4890e-01,\n"," 8.0038e-02, 7.4203e-02, -2.2921e-01, -8.1998e-02, -1.2515e-01,\n"," 4.1819e-02, 2.7443e-01, -2.4553e-01, 4.2946e-02, -3.9756e-01,\n"," 2.9469e-01, 1.9410e-02, 5.2630e-02, 1.6868e-02, 4.5058e-01,\n"," 5.3594e-04, 4.3460e-02, 5.5334e-02, -9.4930e-02, -2.0403e-02,\n"," -3.1468e-01, -2.1241e-01, -1.3706e-01, -1.3646e-01, -3.2011e-02,\n"," 2.8389e-01, -2.4696e-01, -3.0712e+00, -2.3109e-01, 2.2344e-02,\n"," -1.1713e-01, 1.2895e-01, -1.7355e-01, 3.3687e-01, 2.3640e-02,\n"," 1.5255e-02, -7.7109e-02, -1.4978e-01, -4.6125e-01, 1.4753e-01,\n"," 2.2373e-02, -2.0410e-01, -2.2948e-01]])\n","that: tensor([[ 1.2209e-01, 2.0227e-01, -4.6396e-01, 1.1151e-01, -2.4800e-01,\n"," -4.2227e-01, 1.5942e-01, 1.6196e-01, 3.2044e-01, -2.5150e-01,\n"," -2.1604e-01, -7.8793e-02, -1.0219e-01, 1.0145e-01, -3.3350e-01,\n"," -3.8904e-01, 3.4737e-01, -6.0807e-02, -1.5661e-01, 4.4627e-01,\n"," 1.1872e-01, 2.3147e-02, 3.4212e-02, 2.0038e-01, 1.1454e-01,\n"," 2.6590e-01, -3.3844e-01, -3.9735e-01, -2.7965e-01, 7.7839e-03,\n"," 1.2852e-01, -1.1539e-01, -2.4519e-01, 6.2287e-02, -6.3905e-01,\n"," -6.3548e-01, -9.6361e-03, 5.6857e-02, -8.0106e-01, -9.5345e-02,\n"," -2.5746e-01, -7.3814e-02, -1.7116e-02, -9.2448e-02, -1.2801e-01,\n"," 8.2550e-02, -3.4228e-01, 1.4577e-01, 4.5640e-02, -1.0627e-01,\n"," 3.9228e-02, 2.7096e-01, -4.7769e-02, 5.5392e-01, 5.0398e-01,\n"," 2.3001e-01, 2.2352e-01, 2.5047e-01, -2.7630e-01, -5.7667e-02,\n"," 3.0678e-01, 2.4965e-01, 2.4369e-01, -1.0790e-02, 4.7375e-01,\n"," 2.3751e-01, 2.7495e-01, -1.7936e-01, -3.9389e-01, 8.5030e-02,\n"," -3.1088e-01, -4.2074e-01, 5.4253e-02, -1.1907e-01, 4.6168e-01,\n"," 2.7867e-01, -3.6864e-01, 6.2718e-02, 2.2691e-02, -6.5641e-03,\n"," -1.4211e-01, 4.8548e-01, 2.5485e-01, 1.1405e-01, -7.1922e-02,\n"," 3.7013e-03, 1.3103e-03, -5.0556e-02, -9.5370e-02, 3.6722e-01,\n"," -1.1176e-01, 2.4136e-01, 3.6382e-01, 1.2752e-01, -4.6255e-02,\n"," -2.8116e-01, 1.5971e-01, 7.9536e-02, 3.9385e-02, -3.3432e-01,\n"," -5.4293e-02, -6.2433e-01, -4.0557e-01, 2.0980e-01, -3.2767e-01,\n"," 3.4641e-02, 1.5111e-02, 1.5266e-01, 3.1326e-01, -3.4487e-01,\n"," 2.3463e-01, -1.4920e-01, 1.0916e-01, -1.1501e-01, -1.1981e-01,\n"," 3.0668e-01, -1.7637e-01, 1.9107e-01, 3.2451e-01, 1.7497e-01,\n"," -2.8828e-01, -3.1970e-01, 2.9604e-01, 7.3425e-01, 3.8063e-01,\n"," 5.2640e-02, -3.9221e-01, -3.1778e-01, -5.8781e-01, -2.5995e-01,\n"," -1.7785e-01, 5.0993e-01, 3.5426e-01, 1.5197e-01, -3.6071e-01,\n"," -4.3265e-02, -1.5879e-01, -1.5462e-01, -2.7486e-01, -2.9848e-01,\n"," -7.2120e-03, 7.3522e-01, -9.2436e-01, -2.2026e-01, 1.1082e-01,\n"," -2.1544e-01, 2.9068e-01, -3.6962e-01, 2.2894e-01, 2.9927e-01,\n"," 3.5899e-01, 1.1346e-01, -2.9371e-01, -6.3113e-01, -4.8400e-01,\n"," -3.5549e-01, -4.1322e-01, 3.4687e-01, 3.8134e-01, 4.4205e-01,\n"," 5.7366e-02, 3.3273e-01, 1.8493e-02, 2.2648e-01, -2.1043e-01,\n"," -4.9244e-02, -3.2124e-01, 7.3267e-01, 1.7656e-01, -9.7662e-02,\n"," -1.3610e-01, -5.9176e-01, 4.0276e-01, 2.7823e-01, -1.0246e-01,\n"," 1.4064e-01, 1.2763e-01, -1.3729e-01, 1.8180e-01, -2.0888e-01,\n"," -2.7943e+00, 3.2486e-01, -9.7317e-02, 3.0682e-02, 7.9418e-02,\n"," 1.4576e-01, 1.8700e-01, -4.9795e-01, 2.1552e-01, -3.0110e-01,\n"," -2.6817e-01, -2.3147e-01, -2.1274e-01, 1.1619e-01, 1.6014e-01,\n"," -4.4135e-01, 1.5300e-01, 2.3965e-01, 1.0105e-01, 7.0958e-02,\n"," 2.5498e-01, -2.6946e-01, 2.8515e-02, 2.4656e-01, 1.6054e-01,\n"," 1.2625e+00, 1.5892e-01, -4.4927e-01, 2.3367e-01, 1.8909e-01,\n"," -6.3554e-01, 3.6589e-01, 2.3097e-01, -3.1212e-01, 5.6692e-01,\n"," -2.3678e-01, 4.9466e-02, -2.2700e-02, -2.7614e-01, -7.7217e-02,\n"," -3.9628e-02, -1.2694e-01, -5.0273e-01, -1.7374e-01, -1.2793e-01,\n"," -3.2893e-01, 6.9489e-02, -2.1191e-02, 4.4003e-01, 5.4319e-02,\n"," -2.4581e-01, 5.4987e-02, 7.2245e-02, 7.4528e-02, 2.7993e-01,\n"," -2.8658e-02, 9.0365e-02, 2.7024e-01, -1.8917e-01, -1.3224e-01,\n"," 3.6253e-03, 2.0602e-01, 5.5046e-01, 4.0389e-01, -2.1545e-01,\n"," 4.8946e-01, 4.4499e-01, -8.1959e-02, 5.5147e-01, -6.9752e-03,\n"," 8.3556e-02, -8.2326e-01, -2.5946e-01, -9.2258e-02, -1.8494e-01,\n"," -4.9391e-02, 6.2116e-02, -2.3385e-01, 4.4338e-01, 1.0608e-01,\n"," 1.5008e-01, 1.4735e-01, 2.0840e-01, -2.6130e-01, -4.1531e-01,\n"," 3.0197e-01, 9.6375e-02, -6.9722e-02, 2.1565e-02, 3.5205e-01,\n"," 1.9370e-01, 1.7048e-01, 1.4065e-01, -7.6184e-01, -5.3849e-03,\n"," -2.2014e-01, 3.5116e-01, 3.1440e-02, 5.1475e-03, -2.0233e-01,\n"," -4.4743e-01, 1.5001e-02, -4.0216e-01, 1.9546e-01, 8.0196e-01,\n"," -7.7327e-01, -2.0590e-01, -5.4245e-01, 5.1190e-01, -1.5756e-01,\n"," -3.7032e-01, 9.7683e-02, 1.5624e-01, -4.2737e-01, 1.8220e-01,\n"," 8.7733e-02, 1.7041e-01, 1.7687e-02, -4.6398e-01, -2.2742e-02,\n"," -1.8453e-01, -2.3671e-01, -4.7127e-02, -3.1378e-01, -1.3967e-01,\n"," 2.8951e-01, -3.5173e-02, -2.8944e-01, -1.7932e+00, 1.0108e-01,\n"," 3.6812e-01, -7.0310e-01, 7.0251e-02, 2.0672e-01, -1.6449e-01,\n"," 3.9092e-03, -5.0211e-01, -9.8375e-02, 1.4992e-01, -4.6463e-01,\n"," 9.9110e-02, 4.8967e-01, 4.3520e-01, -4.6595e-01, -4.0964e-01,\n"," -2.9125e-01, -2.1999e-01, 3.8818e-01, -1.0515e-01, -1.4738e-01,\n"," 4.1650e-01, -2.5213e-01, -8.7502e-03, 5.0689e-01, -3.1827e-01,\n"," 3.6792e-02, -4.2256e-01, -8.0453e-03, -1.8851e-01, -2.4664e-01,\n"," -3.1209e-02, 1.8139e-01, 4.1684e-01, -1.0267e-01, -3.8938e-01,\n"," 8.3374e-02, 7.3672e-01, -1.9621e-03, 1.3949e-01, 2.0295e-01,\n"," 2.0042e-01, -5.0484e-02, 1.9459e-01, 1.1365e-01, 7.8612e-02,\n"," -3.2604e-01, -6.8399e-02, 1.6778e-01, 2.7129e-01, 3.0614e-02,\n"," 1.6284e-01, -1.7520e-01, -2.1694e-01, -4.4883e-01, 2.8102e-01,\n"," -6.2880e-02, 6.2935e-02, -2.6350e-01, 7.4740e-02, 1.3058e-02,\n"," 2.7539e-02, 3.5778e-01, -4.7255e-01, -2.5249e-01, -5.0096e-02,\n"," -4.3147e-02, 7.7348e-02, 2.2157e-01, -2.8402e-02, -1.6748e-01,\n"," -2.8712e-01, -6.2836e-01, -1.6822e-01, 4.3424e-01, -4.2535e-01,\n"," -2.8445e-01, 5.8042e-01, -2.1030e-01, -2.2193e-01, -1.9926e-01,\n"," 3.7475e-01, 2.9723e-01, -1.2859e-01, 8.2080e-02, 1.4980e-01,\n"," -3.9077e-01, 7.8363e-02, -2.1886e-01, 9.2682e-02, 2.3119e-01,\n"," 3.9191e-01, 1.6103e-01, 3.4986e-01, 1.0090e-01, 1.2589e-01,\n"," -4.1394e-01, 1.2309e-01, -1.8175e-02, -3.5023e-01, -6.0320e-01,\n"," 6.7183e-02, 1.2228e-01, 1.7545e-01, 2.9773e-02, -9.8307e-01,\n"," -2.4690e-01, -2.2478e-01, -7.1077e-02, 1.6858e-01, -4.4481e-02,\n"," 4.8573e-01, 5.0355e-01, 1.0451e-01, -3.4461e-01, 1.1302e-01,\n"," 6.0199e-01, -3.9937e-01, 2.6138e-01, 2.0461e-01, 2.0569e-01,\n"," 1.2666e-01, 1.2679e-01, 7.1440e-01, -1.3338e-01, -6.8547e-02,\n"," -3.2186e-01, -2.3858e-01, 5.6101e-02, 2.2511e-01, -3.7609e-02,\n"," 9.4722e-02, -3.2722e-01, -2.3648e-01, 7.4251e-02, 1.0901e-01,\n"," 2.3481e-01, -1.2178e-01, 5.8858e-01, 5.4944e-01, 4.9110e-02,\n"," -3.9016e-01, 8.4125e-02, 1.5724e-02, 5.2079e-01, -1.5267e-01,\n"," 3.4883e-01, 2.1797e-01, 2.5407e-01, 3.4835e-01, -2.1278e-01,\n"," 3.1213e-01, -1.4882e-01, -1.6473e-01, -4.2014e-01, 2.3339e-01,\n"," -1.6736e-01, -2.5804e-01, -2.2727e-01, -1.3783e-01, 2.0376e-01,\n"," -2.1282e-01, 2.5797e-01, 8.4758e-02, 2.4127e-01, 1.3929e-01,\n"," 6.5433e-02, -2.5651e-01, 4.7500e-01, 7.1861e-01, -2.9811e-01,\n"," -1.3928e-01, 2.8473e-02, -3.2617e-01, 4.1917e-01, -2.0575e-01,\n"," 2.1641e-02, 7.7939e-02, -3.7716e-01, 2.5093e-01, 1.0241e-02,\n"," -6.5728e-02, -2.7432e-01, 4.2376e-01, 1.9696e-01, -6.7215e-01,\n"," -1.2311e-01, 4.0744e-01, 3.8005e-01, 1.9298e-01, -1.9367e-01,\n"," -5.0286e-01, -3.2770e-01, -2.2346e-01, 2.8448e-02, 4.1782e-01,\n"," -3.9732e-01, 3.0271e-01, 4.8310e-01, 3.5295e-01, -2.3216e-01,\n"," -5.1808e-01, 1.7917e-01, 6.0989e-02, -2.4833e-04, 5.1171e-01,\n"," 5.6771e-01, -4.2712e-01, -2.7870e-01, -2.7601e-01, -1.4638e-02,\n"," -5.6813e-01, -2.2688e-01, -1.8399e-03, 1.2382e-01, -2.8456e-02,\n"," 1.3156e-01, 6.0462e-01, -3.4138e-01, -5.9267e-01, -3.7038e-01,\n"," 1.2356e-01, -1.5624e-01, 2.0553e-01, -2.5273e-01, -3.0051e-01,\n"," -3.2120e-01, -3.4166e-01, 3.5886e-02, 4.7120e-01, 4.0028e-01,\n"," 1.4778e-01, 1.0971e-01, 4.6932e-01, -1.1931e-02, -5.6957e-01,\n"," 4.0057e-02, -2.1024e-01, -2.0418e-01, 2.7940e-01, -5.1398e-01,\n"," -4.3834e-01, 2.8162e-02, -2.3287e-02, -1.0573e-02, 4.2367e-02,\n"," 4.7052e-01, 5.4863e-01, 8.6211e-02, -3.4965e-01, -2.0106e-01,\n"," 5.9687e-02, -2.7369e-01, -3.5079e-01, -5.4734e-01, -4.5804e-01,\n"," -5.2296e-02, 2.4799e-01, -2.3152e-01, -2.3268e-01, 3.0436e-01,\n"," 2.5994e-01, -1.8251e-01, -4.2746e-02, 1.0314e-01, -2.0988e-01,\n"," -1.8526e-02, 2.8942e-01, 2.8935e-01, 2.6899e-02, 8.6824e-02,\n"," -2.2041e-01, 4.0306e-01, -2.0175e-01, -3.7917e-02, 2.3260e-01,\n"," -4.7865e-01, -3.0403e-01, -2.8421e-01, 6.6470e-01, 7.4141e-01,\n"," 1.4057e-01, -2.7549e-01, -8.7063e-02, -8.2002e-02, -1.3608e-01,\n"," 1.7483e-01, -1.5991e-01, -2.2402e-01, 1.4835e-01, 2.2797e-01,\n"," 2.4451e-01, 5.1431e-02, -2.1172e-01, 2.2232e-01, -3.6597e-01,\n"," 4.5632e-01, -4.8035e-01, 2.0963e-02, -4.0662e-02, 2.6247e-01,\n"," 3.7973e-01, 1.0802e-01, -1.8258e-01, 3.4382e-01, 4.3618e-01,\n"," -8.8679e-02, 2.9661e-02, -1.1438e-01, -3.2518e-01, -1.2431e-01,\n"," 1.3906e-01, 1.3826e-01, -7.9325e-01, -1.3275e-01, 2.6835e-01,\n"," -3.6547e-01, 4.2582e-02, 4.6496e-01, -7.6755e-02, -1.5607e-01,\n"," 1.6308e-01, 5.0951e-02, 4.1929e-01, 2.8690e-01, -6.9046e-01,\n"," -1.7224e-02, -1.9360e-02, 1.6652e-01, 2.1751e-01, 1.7530e-01,\n"," -2.1186e-01, 4.6240e-01, -7.7639e-02, -4.6936e-01, 9.4235e-02,\n"," -5.9845e-02, 3.6243e-02, -3.0488e-01, 2.9837e-01, 2.1184e-01,\n"," 2.6805e-01, 2.1225e-01, -1.3233e-01, 3.2806e-01, 1.9958e-02,\n"," -2.4155e-01, 3.1634e-01, 3.1022e-01, 4.1100e-02, 3.7789e-02,\n"," 5.4038e-02, -3.8945e-02, -9.7905e-02, 1.1459e-01, 3.5389e-01,\n"," 3.2329e-01, -4.1584e-01, -7.4286e-01, -1.6739e-01, 1.0173e-01,\n"," 8.7788e-02, -4.0068e-01, 1.0757e-01, 6.7391e-01, -3.6262e-01,\n"," -3.8923e-01, -2.9046e-01, -2.6226e-01, 5.1044e-01, 1.8121e-01,\n"," -1.2725e-02, -1.8843e-01, 2.2207e-01, 3.3366e-01, -2.8114e-01,\n"," 2.4640e-01, -7.2411e-01, 1.8139e-01, 3.7352e-01, -6.2151e-02,\n"," -2.0521e-01, -1.2320e-01, 5.7213e-01, -6.4441e-01, 3.0393e-02,\n"," -9.6469e-02, 1.8118e-01, 1.0034e-01, 5.9624e-01, -3.2368e-01,\n"," -5.1858e-02, -4.4355e-01, 1.3500e-01, 1.6743e-01, 3.9279e-01,\n"," -2.3551e-01, 3.7285e-02, 1.1154e-01, 4.1610e-01, -3.2465e-01,\n"," -1.2596e-01, -1.1887e-01, -4.6293e-01, 1.9889e-01, 5.5120e-01,\n"," -9.3269e-02, -1.5848e-01, -1.7174e-01, -1.8127e-01, 1.8145e-01,\n"," -1.4144e+00, 2.2096e-01, -3.7559e-01, -5.7402e-02, 1.9007e-01,\n"," 3.5617e-01, 7.5243e-02, -2.7107e-01, -2.3951e-01, -2.6165e-01,\n"," 2.6096e-02, 2.9580e-01, -8.3572e-02, -3.0049e-01, -5.4787e-01,\n"," 8.7370e-01, -7.6488e-02, 1.5647e-01, -8.5677e-02, 1.8563e-01,\n"," -2.8058e-01, -1.6794e-01, 5.5850e-02, 5.7870e-02, 7.3284e-02,\n"," -1.3417e-01, -1.7313e-01, -5.9202e-01, -3.5877e-02, 2.4029e-01,\n"," -5.1142e-03, -4.4537e-01, -2.1075e+00, -7.3954e-02, 2.9267e-02,\n"," -2.6571e-01, 1.0785e-01, -8.3721e-02, 3.7180e-01, -2.0962e-01,\n"," 3.7709e-01, -3.2266e-01, 7.2114e-02, -5.5826e-01, 1.6720e-01,\n"," 3.7614e-01, 1.1847e-01, 9.3670e-03]])\n","is: tensor([[ 1.8697e-03, -3.2151e-01, -7.7460e-02, 2.7740e-02, 6.2456e-02,\n"," -3.0456e-01, 4.1830e-01, -2.8992e-01, 2.6928e-01, -2.6196e-01,\n"," -1.4777e-01, -8.2192e-02, -5.2840e-02, 6.1550e-02, 1.4844e-02,\n"," -2.9886e-01, 1.1996e-01, 7.9637e-02, 6.1569e-02, 2.0292e-01,\n"," 1.9862e-01, 1.5923e-01, 3.9574e-04, -7.3993e-02, 3.8438e-01,\n"," 1.9591e-01, -1.8840e-01, -2.5852e-01, -2.4467e-01, 2.8185e-01,\n"," -1.1110e-01, -1.4497e-01, -3.6825e-01, 2.2947e-01, -4.0383e-01,\n"," -2.0087e-01, 2.9177e-01, -6.9990e-02, -3.6491e-01, -7.8553e-02,\n"," -2.9983e-01, -1.0799e-01, 1.0274e-01, 8.7958e-02, -1.8464e-01,\n"," -1.7401e-01, -2.6010e-01, 1.2287e-03, -2.6320e-01, 1.4533e-01,\n"," -2.7946e-01, 2.6213e-01, -1.6042e-02, 5.2499e-01, -1.1449e-03,\n"," 9.1524e-02, 2.7029e-01, -2.7303e-02, -1.7996e-01, -4.0311e-02,\n"," 2.1410e-01, 1.8204e-01, 2.3880e-02, -1.9675e-01, 2.2373e-01,\n"," 2.3156e-01, 3.0306e-01, -2.3258e-01, -4.3007e-01, 2.1494e-01,\n"," -1.2659e-01, -3.4914e-01, 1.9842e-01, -2.0177e-01, 2.6123e-01,\n"," -1.8415e-02, -1.3235e-01, 3.3458e-01, 1.4593e-02, -1.2124e-01,\n"," 8.9224e-02, 1.9894e-01, 5.5546e-03, 2.9379e-01, -1.3886e-02,\n"," 4.4121e-03, 8.7993e-03, -1.9347e-01, -2.2984e-01, 2.9285e-01,\n"," 1.9872e-01, -2.7407e-01, 2.2483e-01, -9.6574e-02, 2.5281e-01,\n"," -2.3031e-01, 5.0902e-02, 1.2539e-01, -4.0944e-02, -1.6317e-01,\n"," -3.2688e-01, -7.0987e-01, 1.3338e-01, 3.1315e-01, -2.3064e-01,\n"," -8.2985e-02, 3.6632e-02, 3.6983e-01, 1.6702e-01, -8.2823e-01,\n"," 1.8914e-01, 4.1402e-02, 1.0409e-01, -1.9764e-02, -1.5156e-01,\n"," 3.8722e-01, 4.8230e-02, 2.4031e-01, 1.1137e-01, 3.0857e-01,\n"," -2.7013e-01, -1.3414e-01, 2.4263e-01, 6.9010e-01, 1.9098e-01,\n"," 1.4073e-01, -1.0874e-01, -2.8252e-01, -2.0213e-01, -3.6445e-01,\n"," -3.4538e-01, 3.2266e-01, 3.0816e-01, 3.7494e-01, -2.4652e-02,\n"," 1.0611e-02, -8.2075e-02, -1.2609e-01, -4.1470e-01, -1.0074e-01,\n"," -1.2555e-01, 5.7480e-01, -8.6346e-01, -2.4227e-01, 2.2759e-01,\n"," -2.6719e-01, 1.1397e-01, -2.7495e-01, 3.7002e-01, 1.9933e-01,\n"," 3.0195e-01, -8.0814e-02, -3.9891e-01, -2.4465e-01, -3.2512e-01,\n"," -2.1890e-01, -8.9821e-02, 1.3372e-01, 2.1553e-03, 4.6271e-01,\n"," 3.4682e-01, 2.6688e-01, 3.3165e-01, -6.8560e-02, 6.6277e-02,\n"," -3.5954e-01, -4.0313e-01, 7.2013e-01, 2.5525e-01, 3.4894e-02,\n"," -2.2595e-01, -4.2219e-01, 4.6512e-01, 3.6027e-01, -2.6512e-01,\n"," 1.3482e-01, 1.3108e-01, -1.1716e-01, -1.8619e-02, -2.1586e-01,\n"," -2.6994e+00, 2.4360e-01, -1.7115e-01, -1.4231e-01, 1.4217e-01,\n"," 6.6598e-02, 7.1642e-02, -1.3172e-01, 3.8511e-01, -2.1052e-01,\n"," -1.9772e-01, -1.6144e-01, -3.3393e-01, 1.3404e-01, 9.1712e-02,\n"," -2.4543e-01, -6.9195e-02, -2.0488e-01, 1.5504e-01, -1.1512e-01,\n"," 3.4769e-01, -1.0439e-01, 1.8340e-01, 1.8045e-01, 3.2331e-02,\n"," 1.0881e+00, 2.0635e-01, -2.7674e-01, 3.2679e-02, 1.5499e-01,\n"," -7.2025e-01, 5.0436e-01, 2.8603e-01, -5.4240e-01, 3.4077e-01,\n"," -1.3731e-01, 2.2577e-01, -1.6461e-01, -3.1177e-01, 1.4409e-01,\n"," 1.5334e-01, -3.9225e-01, -3.2240e-01, 6.6480e-02, -3.2858e-01,\n"," -3.1871e-01, 3.0527e-01, 2.4267e-02, 2.5849e-01, -3.5991e-01,\n"," -8.4364e-02, -2.5220e-01, 1.9502e-01, 2.6347e-01, -3.1921e-01,\n"," 3.7543e-01, 2.2437e-01, -6.2851e-02, 8.0805e-02, -2.8033e-01,\n"," 7.5052e-02, 4.5296e-01, 5.1609e-01, 2.6392e-01, -3.2010e-01,\n"," -2.1473e-01, 4.5929e-01, -1.5447e-01, 2.4216e-01, -7.4336e-02,\n"," -3.2070e-01, -6.0355e-01, -6.2864e-02, -5.0746e-02, -1.5584e-01,\n"," -8.6392e-03, 3.2910e-02, -2.1421e-01, 3.9752e-01, -2.9374e-01,\n"," 3.3773e-01, 7.5409e-02, 4.4468e-01, -1.3732e-01, -1.7217e-01,\n"," 2.5120e-01, 1.6315e-02, 1.3016e-01, 8.2184e-02, 1.6439e-01,\n"," -8.7246e-02, -2.6074e-01, 2.0086e-01, -9.3485e-01, -2.7442e-01,\n"," -1.0608e-01, 6.3571e-01, -1.6626e-02, 3.0940e-02, -2.2704e-02,\n"," 8.6353e-02, 3.7748e-01, -3.9554e-02, -2.4750e-03, 4.3900e-01,\n"," -7.1655e-01, -2.7072e-02, -3.1258e-01, 3.4072e-01, -4.0797e-01,\n"," -1.6145e-01, 6.5377e-02, 1.7735e-01, -6.0703e-02, 2.6116e-01,\n"," 4.3492e-02, 8.9716e-02, 3.4105e-01, -5.0260e-01, -2.1158e-01,\n"," -6.0291e-02, -3.1999e-01, -2.6387e-01, -1.3731e-01, 2.3449e-01,\n"," 1.2332e-01, 4.4151e-02, 1.8659e-01, -2.0093e+00, -2.0058e-01,\n"," 2.3975e-01, -4.6089e-01, 9.8208e-02, -5.8506e-02, 2.2320e-02,\n"," -1.0994e-03, -2.9547e-01, 5.8799e-02, 4.6605e-02, -3.8993e-01,\n"," 3.7131e-01, 6.5619e-01, 1.7579e-01, 3.5621e-02, -2.9010e-02,\n"," 5.2902e-02, -4.9559e-01, 1.6817e-01, 1.7079e-02, -9.3977e-02,\n"," 3.9762e-01, -9.3852e-02, 3.3964e-01, 7.7576e-02, -3.1746e-01,\n"," 1.0219e-01, -3.1246e-01, -2.3716e-01, -2.5728e-01, -1.5917e-01,\n"," -1.3577e-01, 9.5219e-03, 1.9219e-01, 2.6939e-01, 1.8506e-01,\n"," 2.9638e-01, 8.2790e-01, -1.9400e-01, -1.2523e-02, 3.9636e-01,\n"," 2.6522e-01, -2.0681e-01, -1.2239e-01, -1.8549e-01, 6.4841e-02,\n"," -1.6309e-01, 4.0983e-02, 1.6435e-01, 1.9139e-01, -2.1484e-01,\n"," 7.1836e-02, -8.4125e-02, -5.4567e-02, -8.0429e-02, 3.1976e-03,\n"," 3.0498e-01, 1.1114e-01, -3.8087e-01, 3.6319e-01, -1.7593e-01,\n"," 7.2298e-02, 1.6631e-01, -5.6938e-01, 1.3773e-01, -7.0020e-02,\n"," 1.7213e-01, 5.2953e-02, 2.5434e-01, -1.7713e-01, -1.9017e-01,\n"," -1.3093e-01, -8.5987e-01, -3.0105e-01, 2.4698e-01, -2.7790e-01,\n"," 4.9439e-02, 3.4442e-01, -4.1068e-01, -1.3946e-01, -2.6809e-01,\n"," 4.1420e-01, 2.4348e-03, 1.6486e-01, 2.1774e-01, 2.1962e-01,\n"," -3.7282e-01, -4.8055e-02, -2.5581e-01, -8.0311e-02, 8.4998e-02,\n"," 4.9077e-02, 2.1729e-01, 6.9998e-02, 2.5035e-01, 3.6638e-01,\n"," -5.4563e-01, 3.8146e-01, -5.6758e-02, -1.4173e-01, -3.2236e-01,\n"," 1.0513e-01, 6.5995e-03, -1.5990e-01, 1.0247e-01, -8.0096e-01,\n"," -3.4851e-02, -7.9852e-02, -1.5575e-01, 3.2593e-01, -1.6044e-01,\n"," 2.5603e-01, 1.4140e-01, 2.1942e-01, -2.0685e-01, -2.8493e-01,\n"," 1.9528e-01, -1.5996e-02, 2.1575e-01, 4.8777e-02, 3.9468e-01,\n"," -2.0199e-01, -8.0605e-02, -2.8236e-01, 2.1750e-01, 2.3375e-02,\n"," -3.9214e-01, -5.3155e-01, -7.4678e-02, 2.0181e-02, -1.6100e-01,\n"," 1.1017e-01, -1.9483e-01, -2.5688e-01, 8.0502e-02, 1.7548e-01,\n"," 7.3325e-01, 2.4487e-01, 2.5128e-01, 4.8031e-01, 2.4326e-01,\n"," -4.2904e-01, -5.7729e-02, -1.1157e-01, 4.2170e-01, -3.5807e-01,\n"," -1.6398e-02, 2.6152e-01, 3.0963e-01, 2.7681e-01, -3.1257e-01,\n"," 6.4292e-01, -2.3799e-01, -5.2642e-02, -4.6144e-01, 1.4274e-01,\n"," -4.7081e-01, -4.0608e-01, 1.2654e-02, -1.0138e-01, 2.7303e-02,\n"," -3.2333e-01, 2.0613e-01, 5.5301e-02, 1.3190e-01, 4.3834e-02,\n"," 2.8985e-02, -1.1305e-01, 5.0564e-01, 4.1251e-01, -3.4767e-01,\n"," 4.1457e-01, 1.8430e-01, -9.9562e-02, 1.9372e-01, -3.7351e-03,\n"," -3.9657e-01, 2.4432e-01, -1.1603e-02, 2.5609e-01, -3.2050e-02,\n"," 5.2519e-03, 3.2480e-03, 1.8653e-01, 1.5037e-01, -2.3396e-01,\n"," -2.6237e-01, 2.9199e-01, 4.8939e-01, 3.2174e-01, -4.5484e-01,\n"," -3.8316e-01, -1.4486e-01, -2.2286e-01, -2.7317e-02, 3.2270e-01,\n"," -5.1186e-02, 2.2661e-01, 3.0729e-01, 4.6752e-01, -6.1053e-02,\n"," -3.6775e-01, 8.9749e-03, -9.5514e-02, -2.3237e-01, 1.1131e-01,\n"," 3.3019e-01, 2.7881e-03, -2.0579e-01, 6.6362e-02, -2.5834e-01,\n"," -3.4687e-01, -2.4419e-01, -4.0189e-03, 3.5229e-01, 2.7011e-01,\n"," 1.6936e-02, 4.1147e-01, -3.5420e-01, -4.8230e-01, -2.2401e-01,\n"," 4.3381e-02, -2.5263e-02, 2.1812e-01, -2.2043e-01, -2.6893e-01,\n"," -4.1569e-01, -8.5913e-02, -7.8470e-02, 1.8877e-01, 2.3622e-01,\n"," -3.0409e-01, -1.0295e-01, 2.5066e-01, -1.1850e-01, -4.2483e-01,\n"," -1.9921e-01, -3.9208e-01, -4.8236e-01, 9.7306e-02, -4.0721e-01,\n"," -5.3597e-01, -1.8237e-01, -3.6961e-02, 3.4132e-01, -1.1565e-02,\n"," 6.8570e-01, 3.6684e-01, 1.4704e-01, -1.9376e-01, 6.7605e-02,\n"," 1.9593e-01, -1.3733e-02, 1.8709e-01, -7.5263e-01, -1.5905e-01,\n"," -8.9051e-02, -7.5011e-02, -1.0840e-01, -1.2490e-01, 1.4668e-01,\n"," 1.2459e-01, -6.3242e-02, 7.9327e-02, 1.4409e-01, 4.8669e-02,\n"," 2.6192e-03, 2.5275e-01, 4.2298e-01, -3.3102e-01, 1.4964e-01,\n"," 1.7529e-01, 2.6218e-01, -3.3960e-01, 4.4573e-02, 5.1640e-02,\n"," -1.3057e-02, -2.8317e-01, 1.2903e-01, 1.0209e+00, 6.6357e-01,\n"," -2.9555e-01, -3.5495e-05, 2.1063e-01, -1.4781e-01, 3.9247e-02,\n"," 2.3618e-01, -8.7327e-02, -5.1372e-01, -9.7266e-02, -2.1151e-01,\n"," -1.9037e-01, -1.3693e-02, -8.4586e-02, 2.1561e-01, -1.8468e-01,\n"," 2.1060e-01, -3.8622e-01, -8.1579e-02, -8.0516e-02, 1.8653e-01,\n"," -1.1430e-01, 2.0596e-02, -3.2029e-01, 9.8316e-02, 1.7938e-01,\n"," -2.3479e-01, -8.8719e-02, -2.3090e-01, -3.5579e-01, 6.9056e-02,\n"," 2.8082e-01, 5.9478e-01, -5.7686e-01, -7.0712e-02, -7.1670e-02,\n"," -6.8886e-01, 2.8716e-01, 4.6037e-01, 8.5511e-02, -1.5773e-01,\n"," 5.7352e-01, 2.5527e-01, 3.1434e-01, 2.4952e-01, -2.8375e-01,\n"," -1.6349e-01, -2.5004e-01, 4.2881e-01, 3.8245e-01, 7.1545e-02,\n"," -1.5492e-01, 5.5823e-01, -2.9444e-01, -4.0516e-01, 2.9365e-01,\n"," 2.4712e-01, -2.4901e-01, -4.1190e-01, 2.2186e-01, 5.7751e-02,\n"," 1.5777e-01, 7.5507e-02, -2.1608e-02, 4.5435e-03, 1.0407e-01,\n"," -3.0175e-01, -4.0616e-02, 2.2791e-01, 2.1490e-01, 7.0052e-02,\n"," -3.4287e-01, 8.8782e-02, -1.1014e-01, -1.1654e-01, -1.7771e-02,\n"," 6.5245e-02, -1.0739e-01, -9.7636e-01, 1.3144e-02, 3.7066e-04,\n"," 1.2365e-02, -3.6228e-01, 1.1752e-01, 2.8827e-01, 3.0365e-01,\n"," -4.1336e-01, -2.0176e-01, -1.2524e-01, 2.5792e-01, 2.4093e-01,\n"," -7.9870e-03, -1.6644e-01, 1.8708e-02, 1.6321e-01, -4.1054e-01,\n"," 3.8808e-01, -5.7599e-01, -9.3410e-02, 7.5947e-02, 3.9543e-02,\n"," -1.5001e-01, -1.1766e-01, 5.4193e-01, -6.4604e-01, -5.0309e-02,\n"," -6.2907e-02, 1.8551e-01, 5.1217e-02, 2.3072e-01, -3.6149e-02,\n"," -2.3456e-02, -3.9047e-01, 3.4240e-01, -1.2439e-02, 2.3032e-01,\n"," 2.2514e-01, 4.3220e-01, 3.7389e-01, 4.8468e-01, -3.1992e-01,\n"," -2.9266e-01, -2.2074e-01, -1.7781e-01, 1.4267e-01, 3.0812e-01,\n"," 2.8772e-03, -2.4614e-02, -3.8921e-01, 1.9679e-01, 1.2218e-01,\n"," -1.1499e+00, 2.4324e-01, -1.0012e-01, -7.8817e-02, -2.7842e-02,\n"," 1.8674e-01, -1.2989e-02, -1.7676e-01, -1.5938e-01, -3.3805e-01,\n"," 1.0930e-01, 2.0728e-01, -2.5851e-01, -1.9172e-02, -3.7778e-01,\n"," 2.8461e-01, -2.1312e-01, 3.6264e-02, 1.3177e-01, 6.0335e-01,\n"," 9.7145e-02, 3.0143e-01, 2.2475e-01, -4.1595e-01, 2.4370e-02,\n"," -2.8742e-01, -1.7549e-01, -1.1151e-01, -1.2572e-01, 2.4712e-01,\n"," -1.7420e-02, 5.8104e-02, -2.7543e+00, 5.5879e-02, 6.8826e-02,\n"," -1.9035e-01, 6.9779e-02, -3.3699e-01, 4.0038e-01, -1.7857e-01,\n"," 7.4339e-02, -6.6231e-02, -1.1496e-02, -4.7582e-01, 3.9001e-01,\n"," 1.7502e-01, -5.7692e-03, 1.0571e-01]])\n","unusual: tensor([[ 6.4464e-02, -3.4013e-02, -3.3912e-01, 7.5163e-02, -5.2474e-02,\n"," -2.7475e-01, 1.8149e-01, -6.7264e-02, 3.0967e-02, -2.7567e-01,\n"," -6.3827e-02, -1.4769e-01, 3.6248e-02, 9.8215e-02, -3.2965e-01,\n"," -1.1069e-01, 8.9107e-02, -1.8657e-02, 7.9898e-02, 9.4999e-02,\n"," 1.2357e-01, -4.9598e-02, 1.0488e-01, -2.0964e-01, -2.4742e-02,\n"," 1.0692e-01, -2.6287e-01, -9.2929e-02, -3.8314e-01, 2.7880e-01,\n"," -1.7635e-01, -5.8926e-02, 6.2025e-02, 5.1714e-01, -1.9695e-02,\n"," -8.7521e-02, 4.2550e-02, -8.7594e-02, -3.3308e-01, -2.4094e-01,\n"," 8.3701e-02, -9.8729e-02, 8.1445e-02, -9.4598e-02, -1.0342e-01,\n"," -2.3787e-01, -2.9704e-01, 8.3370e-02, -2.5950e-01, 2.2658e-01,\n"," -1.4876e-01, 1.9920e-01, 3.5171e-02, 2.9138e-01, 1.4731e-01,\n"," 2.1313e-02, 1.4613e-01, 7.3460e-02, 1.0229e-01, -1.9069e-01,\n"," 1.8321e-01, 1.7455e-02, 2.5807e-02, 8.0755e-02, 1.2588e-01,\n"," 1.2591e-01, 3.5506e-01, -1.9598e-01, -2.2669e-01, 1.1542e-01,\n"," -5.9150e-02, -4.2422e-01, 2.1844e-01, -5.3419e-02, 1.8565e-01,\n"," 5.8685e-02, 2.3543e-02, 1.5152e-01, 2.1407e-01, 1.4521e-01,\n"," 2.9959e-01, 1.2149e-01, 1.5879e-01, 2.1157e-01, -6.4079e-02,\n"," 1.2502e-01, -1.8071e-01, 1.5279e-02, -2.3663e-01, 3.8935e-02,\n"," 1.9487e-01, 3.2695e-03, 1.5150e-01, 4.6295e-02, 5.2882e-02,\n"," -5.1236e-02, -1.6216e-01, 5.2623e-02, -1.0842e-01, -1.5162e-01,\n"," -3.7357e-02, -2.9193e-01, -1.4136e-01, 4.7468e-01, -8.4539e-02,\n"," 3.4516e-02, -8.2532e-02, -1.1806e-02, 2.4755e-02, -6.6317e-01,\n"," 2.8501e-01, -2.0662e-02, 1.2321e-01, -1.2099e-01, -1.5876e-01,\n"," 3.6816e-01, 1.7011e-01, 7.0303e-02, 2.2895e-01, 1.2792e-01,\n"," -7.7891e-02, -2.0920e-01, -1.0861e-02, 7.4922e-01, 1.4834e-01,\n"," 7.0919e-02, -5.2123e-02, -4.3959e-01, -1.0794e-01, -2.9160e-01,\n"," -4.9456e-01, 3.7443e-01, 2.4623e-01, 3.1715e-01, -1.8695e-02,\n"," -2.3747e-02, -1.0918e-01, -1.4628e-01, -3.6356e-01, -5.1671e-02,\n"," -9.4246e-02, 4.2027e-01, -1.0176e+00, -3.1891e-01, 2.8020e-02,\n"," 6.1655e-02, 3.0418e-01, -1.0166e-01, 5.6163e-02, -1.0611e-01,\n"," 1.0976e-01, 8.7541e-02, -3.1066e-01, -5.0108e-02, -2.7656e-01,\n"," -2.1018e-01, 9.9534e-03, 1.5942e-03, 2.9343e-01, 2.7799e-01,\n"," 1.2830e-01, 4.6399e-01, 1.7282e-02, 1.3920e-01, -3.2410e-01,\n"," -5.7648e-02, -2.1222e-01, 6.3390e-01, 1.4911e-01, 1.5098e-02,\n"," -2.6372e-01, -1.0738e-01, 1.8333e-01, 4.5962e-01, -3.8950e-01,\n"," 2.8457e-01, 2.1841e-01, 1.6496e-01, 9.7465e-03, -1.5412e-01,\n"," -2.8904e+00, 1.0381e-01, -9.1481e-02, -1.7601e-01, 5.5940e-02,\n"," -4.6533e-02, 7.5234e-02, -2.8699e-01, 1.7759e-01, -3.3838e-01,\n"," -1.2880e-02, 4.2578e-02, -2.4567e-01, 1.3146e-01, 1.7560e-01,\n"," -1.4347e-01, -2.6394e-02, -2.1558e-02, 1.7342e-01, 3.0309e-02,\n"," 2.5081e-01, 1.6591e-01, 2.0080e-01, 3.4322e-01, 7.2882e-02,\n"," 7.8157e-01, 1.7057e-01, -2.4079e-01, 8.8311e-02, -4.8137e-02,\n"," -4.9711e-01, 3.3792e-01, 2.8833e-01, -2.4824e-01, 4.0599e-01,\n"," -2.0900e-01, 3.1813e-01, -2.4341e-01, -4.3327e-01, 1.4298e-01,\n"," 2.4411e-02, -1.1482e-01, -1.2202e-01, 2.2471e-01, -1.2566e-01,\n"," -5.1247e-01, 2.8771e-01, -1.4505e-02, 1.7378e-01, -4.6369e-02,\n"," -2.1865e-01, -2.7905e-01, 1.4551e-01, 2.7990e-02, -2.2413e-01,\n"," 1.3317e-01, 1.0272e-02, 8.6055e-03, 3.2282e-02, -1.9228e-01,\n"," -4.2921e-02, 1.7512e-01, 2.9928e-01, 3.5278e-01, -2.4741e-01,\n"," -1.3225e-01, 2.5058e-01, 2.7063e-01, 4.2424e-01, -2.1691e-01,\n"," -9.1270e-02, -2.9672e-01, 9.5753e-02, -1.0649e-01, 2.0498e-03,\n"," -7.9208e-02, 1.1895e-02, -2.4338e-01, 2.4328e-01, -7.4836e-02,\n"," 1.4195e-01, 3.0820e-01, 4.8693e-01, -4.4754e-01, -1.2086e-01,\n"," 2.1923e-01, 3.7339e-01, -1.3963e-01, -7.3453e-02, 1.1350e-01,\n"," 8.6085e-02, 2.6713e-02, 1.1466e-01, -1.3240e+00, 2.1944e-01,\n"," -2.1762e-01, 3.2553e-01, 1.1884e-01, 1.6868e-01, -1.8068e-01,\n"," -2.4658e-02, 8.1826e-02, -1.0671e-01, 9.7295e-02, 3.9845e-01,\n"," -3.6709e-01, -3.0207e-01, -3.3139e-01, 3.4258e-02, -4.1060e-01,\n"," -2.3519e-01, 1.1701e-01, 1.0684e-01, 9.3163e-02, 2.7189e-01,\n"," 6.5004e-02, 4.2711e-03, 1.1997e-01, -4.7572e-01, -2.5986e-01,\n"," -3.9909e-03, 1.6127e-01, -8.6419e-02, -2.6726e-01, 1.4846e-01,\n"," 7.9445e-02, 3.5701e-02, 3.1543e-02, -2.3420e+00, -1.3687e-01,\n"," 2.4152e-01, -3.7858e-01, 2.7988e-02, -1.0845e-01, 1.0456e-01,\n"," -4.1190e-02, -2.2818e-02, 3.5516e-02, 2.4556e-01, 5.0009e-02,\n"," 6.9581e-02, 2.5449e-01, 3.4327e-02, -8.5649e-02, -5.5129e-02,\n"," -1.0490e-01, -5.1831e-01, 2.6198e-01, -1.7349e-01, 1.6505e-03,\n"," 1.7090e-01, -2.8350e-01, 9.3965e-02, 1.3112e-01, -3.1201e-01,\n"," 2.2279e-01, -4.4075e-01, -7.6461e-02, -3.7187e-02, -3.5273e-01,\n"," -2.4408e-01, 3.0572e-02, 1.5376e-01, 1.3190e-01, 2.5066e-01,\n"," 1.3895e-01, 9.8721e-01, 4.5357e-02, 2.8641e-01, 2.2402e-02,\n"," 1.7234e-01, -3.3666e-02, -2.3191e-01, -4.8718e-02, 6.9786e-02,\n"," -2.2085e-01, 8.3589e-02, 2.6435e-01, 3.5710e-01, 2.8395e-02,\n"," 2.1023e-01, -2.0225e-01, -1.9738e-01, 8.8014e-04, 1.8169e-01,\n"," 2.9112e-01, -1.8721e-02, -1.9358e-01, 1.6283e-01, -1.6552e-01,\n"," 2.0568e-01, 2.9863e-01, -6.9105e-01, 2.1715e-01, -1.0663e-01,\n"," 1.0486e-01, -8.7537e-03, -8.2752e-02, 3.4078e-02, -3.1801e-02,\n"," 7.5785e-02, -6.9002e-01, -3.3276e-01, 3.2658e-01, -3.0826e-01,\n"," -2.2000e-02, 3.4089e-01, -2.4144e-01, -5.3606e-02, -2.1455e-02,\n"," 1.7105e-01, 2.7479e-01, 1.2496e-01, -3.9301e-02, 2.1749e-01,\n"," -1.6918e-01, 1.4120e-01, -1.6665e-01, 9.8983e-02, 2.4240e-02,\n"," 5.8584e-02, 1.4906e-01, 3.2358e-01, 1.6611e-01, 4.3292e-01,\n"," -3.9821e-01, 2.3019e-01, 2.6998e-01, -4.4549e-01, -2.7402e-01,\n"," 3.7719e-02, -3.9569e-03, -4.3462e-02, 1.1438e-01, -5.2358e-01,\n"," -9.6180e-02, -8.8869e-04, -2.3196e-01, 2.7507e-01, -1.4706e-01,\n"," 4.8766e-01, 1.7715e-01, 1.3181e-01, -1.1070e-01, -2.0332e-02,\n"," 3.0439e-01, -2.7240e-02, 3.4311e-01, 1.0456e-02, 3.2307e-01,\n"," 1.8481e-01, 2.4421e-01, -2.3990e-02, 3.2002e-02, -7.6515e-02,\n"," -3.6562e-01, -2.5549e-01, -3.5209e-02, 3.5675e-02, -8.1288e-02,\n"," -5.2368e-02, -2.7886e-01, -1.3602e-01, 2.1579e-01, 1.9926e-01,\n"," 3.1879e-01, 2.4325e-01, 4.7457e-01, 4.0500e-01, -2.1082e-02,\n"," -5.7988e-01, 7.2504e-02, 2.2105e-01, 2.3232e-01, 6.0325e-02,\n"," 2.8035e-01, 3.2246e-01, 1.6524e-01, 3.3171e-01, -2.5228e-01,\n"," 3.4063e-01, -1.0751e-01, -2.5434e-01, -1.3638e-01, 6.3455e-02,\n"," -7.5630e-02, -3.7117e-01, 2.1866e-01, -1.4968e-01, 2.9395e-02,\n"," 1.0017e-01, 5.3689e-02, -1.2898e-01, 3.2237e-02, 9.8427e-02,\n"," 6.6170e-02, -4.2192e-01, 1.3779e-01, 3.5366e-01, -1.0219e-01,\n"," 1.6805e-01, 1.5891e-01, -4.7075e-02, 2.1798e-01, -6.2121e-02,\n"," -3.3440e-01, 3.3554e-02, 1.0976e-01, 3.2555e-02, -3.5603e-01,\n"," 7.5953e-02, -1.5449e-01, 3.1532e-01, 3.9956e-02, -3.7739e-01,\n"," -2.2454e-01, 3.0221e-01, 3.5349e-01, 1.3299e-01, -2.0689e-01,\n"," -2.0284e-01, -1.4256e-01, -9.9464e-02, 7.0002e-02, 3.3103e-01,\n"," -1.9898e-01, 1.1474e-01, 1.1589e-02, 3.8737e-01, -2.6109e-01,\n"," -2.7279e-01, 3.5899e-02, -6.8538e-03, -6.5444e-02, 1.3820e-01,\n"," 1.0499e-01, -4.8080e-01, -3.6351e-01, 6.0033e-02, -1.8893e-01,\n"," -1.6904e-01, -6.6318e-02, -9.5019e-03, 4.1941e-01, 2.9836e-02,\n"," 8.3258e-02, 3.2561e-01, -1.9404e-01, -3.9320e-01, -1.6591e-01,\n"," 1.6838e-01, 4.6146e-02, 1.5683e-01, -1.6726e-01, -3.4664e-01,\n"," -3.3071e-01, 1.8642e-01, 1.6663e-01, 1.2957e-01, 4.7443e-01,\n"," -2.6354e-01, -8.1887e-02, 2.6584e-01, 1.5487e-01, -5.0884e-01,\n"," -1.7701e-01, -2.3842e-01, -4.1209e-02, -1.0768e-02, -2.1533e-01,\n"," -5.0068e-01, -5.0565e-03, -6.7459e-02, 1.3506e-03, 2.7431e-01,\n"," 4.8736e-01, 1.3797e-01, 6.7397e-02, -3.3616e-01, 7.0555e-02,\n"," 1.4981e-01, -3.6031e-02, 2.7680e-02, -5.7460e-01, -1.5513e-01,\n"," -2.7792e-02, 7.3335e-02, -3.1639e-02, -6.4395e-03, 1.2891e-01,\n"," 1.6457e-01, -2.5703e-01, 2.4250e-02, 2.9403e-02, -1.0981e-01,\n"," 7.0152e-02, 2.8698e-01, 3.9926e-01, -2.5044e-01, 2.6452e-01,\n"," 3.7069e-02, 2.5376e-01, -3.6107e-01, 3.3725e-02, 2.7694e-01,\n"," -4.2177e-01, -4.5889e-01, 5.9709e-02, 9.6707e-01, 5.6122e-01,\n"," -9.9493e-02, -2.1089e-01, 1.5341e-01, -1.0099e-01, -1.9841e-01,\n"," -6.6644e-02, -2.3983e-01, -4.6000e-01, 3.4577e-01, 1.3529e-02,\n"," -8.5610e-02, 7.5082e-02, -1.1039e-01, 1.6956e-01, -4.4041e-01,\n"," 3.4498e-01, -3.3049e-01, -8.6051e-02, -1.5341e-01, 1.9530e-01,\n"," 1.0746e-01, -2.0635e-01, -8.1443e-02, 1.7349e-01, 1.8294e-01,\n"," -1.1284e-01, -2.5191e-02, -2.2373e-02, -4.0475e-01, -1.2590e-01,\n"," 1.7169e-01, 4.6479e-01, -5.2123e-01, 8.3693e-02, 8.2559e-02,\n"," -7.8211e-01, 3.6895e-02, 3.7417e-01, 3.6908e-02, 1.3314e-01,\n"," 2.3775e-01, 1.0824e-01, 1.7798e-01, 2.3443e-01, -3.8813e-01,\n"," -7.8100e-02, -1.0778e-01, 1.8526e-01, 4.1595e-01, 2.7562e-01,\n"," -1.3774e-01, 2.0981e-01, 1.3353e-01, -2.2524e-01, 3.7430e-01,\n"," 3.8161e-02, -1.4871e-01, -9.4604e-02, 2.7052e-02, -2.1299e-01,\n"," 1.8438e-01, 7.2867e-03, -1.0170e-01, 1.6525e-01, 2.3289e-01,\n"," -2.8544e-01, 6.0687e-02, 1.4457e-01, -9.5034e-02, -8.2853e-02,\n"," -2.7896e-01, 2.4410e-02, -1.3070e-01, -4.8123e-02, 1.4906e-01,\n"," 1.1208e-01, -1.1411e-01, -1.1839e+00, -3.2346e-02, -1.6269e-01,\n"," -2.7687e-01, -1.5618e-01, -6.7597e-02, 4.8891e-01, 2.0787e-01,\n"," -3.2796e-01, -2.1690e-01, -1.7745e-01, 3.8453e-01, 7.0023e-02,\n"," 4.5355e-03, 2.3528e-02, 2.3966e-02, -5.8391e-04, -7.4982e-02,\n"," 4.7399e-01, -3.4239e-01, -2.2566e-01, 2.6605e-02, -1.6456e-01,\n"," -1.6506e-01, -1.5214e-01, 5.5805e-01, -3.7424e-01, -1.2006e-01,\n"," -7.0067e-02, 6.8045e-02, -9.3787e-02, 2.7134e-01, -3.5173e-01,\n"," -3.3600e-02, -1.7018e-01, 1.6585e-01, -4.7530e-02, 2.0257e-01,\n"," -1.3129e-01, 3.3110e-01, 2.2937e-01, 5.2770e-01, -2.4688e-01,\n"," -7.2587e-02, -1.0617e-02, -9.9357e-02, -7.0512e-02, 4.2908e-01,\n"," -2.1915e-01, -2.2945e-01, -2.8233e-01, -1.4750e-01, 4.3904e-02,\n"," -1.2680e+00, 3.5709e-01, 3.9257e-02, -1.6089e-01, 1.1746e-02,\n"," 1.3371e-01, 1.0405e-01, -3.0915e-01, -1.0032e-01, -8.5018e-02,\n"," 1.9742e-01, 9.4230e-02, -1.3937e-01, -2.0746e-01, -4.0613e-01,\n"," 2.4422e-01, -6.5410e-02, 1.1783e-01, 1.1851e-01, 3.9204e-01,\n"," -5.0269e-02, -3.0048e-02, 1.5168e-01, -2.9108e-01, 7.9652e-02,\n"," -5.2698e-03, -1.9140e-01, -1.5343e-01, 2.7433e-02, 3.6809e-03,\n"," 1.4644e-01, -4.0275e-01, -2.8605e+00, -1.6291e-01, 2.9833e-02,\n"," -3.5776e-02, -5.3433e-03, -2.3370e-01, 3.2348e-01, -1.0491e-01,\n"," 3.0535e-03, -1.3573e-01, 3.9637e-02, -4.9795e-01, 3.0375e-01,\n"," 1.5191e-01, -1.4535e-01, -1.9471e-01]])\n","in: tensor([[-1.0469e-02, -3.4887e-01, -3.9906e-01, -1.0612e-01, 6.1948e-02,\n"," -1.0872e-01, 1.6045e-01, -2.3786e-02, -8.4369e-03, -3.0320e-01,\n"," 2.0577e-01, -2.1507e-02, 1.7677e-01, -8.3829e-02, -2.3816e-01,\n"," -7.7797e-02, 4.4353e-03, 1.3983e-01, 1.6558e-01, -2.6958e-03,\n"," 8.0272e-02, 9.3942e-03, 2.4963e-01, 2.7434e-01, 1.1856e-01,\n"," 2.8712e-01, -1.5177e-01, 2.6251e-01, -2.7147e-01, -4.0402e-02,\n"," -5.4683e-02, -2.8251e-01, -6.4366e-02, 3.8945e-01, -5.2794e-02,\n"," -1.6888e-01, 1.4126e-01, -2.3491e-01, -3.1927e-01, 1.5932e-01,\n"," -4.8828e-02, -1.8718e-01, -1.1196e-01, -1.1908e-01, -1.0914e-01,\n"," 1.1975e-01, -4.0840e-01, 1.5753e-01, -1.2965e-01, 6.1302e-02,\n"," -3.1301e-01, 3.1141e-01, 1.9204e-01, 2.4873e-01, 3.9456e-02,\n"," 2.5985e-01, 2.3399e-01, 3.1777e-01, -1.0433e-01, -1.5133e-01,\n"," 8.2563e-03, 2.8997e-01, -1.5824e-02, -1.2877e-01, 3.4367e-01,\n"," 1.4795e-01, 2.0879e-01, -1.6377e-01, -5.4858e-01, -1.0054e-01,\n"," -2.7534e-01, -2.0530e-01, 1.3866e-01, 1.5652e-01, 2.3298e-01,\n"," 1.6632e-01, -4.6278e-02, 3.9412e-01, 9.7893e-02, 7.8762e-02,\n"," 2.4237e-01, 9.9038e-02, -1.1035e-01, 3.2687e-01, 7.9727e-02,\n"," -3.6802e-02, -1.3585e-01, -1.3175e-01, 1.0498e-02, -2.2310e-02,\n"," -5.5055e-02, -1.2224e-01, 4.1740e-02, 1.6062e-01, 1.0895e-01,\n"," 3.9505e-02, -1.3682e-01, 1.6154e-02, -1.8732e-01, -2.0208e-01,\n"," 8.7010e-02, -3.0361e-01, -5.1802e-02, 4.9419e-01, -1.9523e-01,\n"," -1.1930e-01, 3.7979e-02, 1.1840e-01, 5.6054e-02, -8.2909e-01,\n"," 2.5055e-01, 3.2828e-02, 2.9812e-01, -3.8002e-02, -1.4081e-02,\n"," 1.6850e-01, 2.6146e-01, 1.9247e-01, 2.0080e-01, 2.2591e-01,\n"," -3.8329e-01, -1.4640e-01, 6.4893e-02, 8.0498e-01, -1.0449e-02,\n"," 1.5718e-01, -2.1008e-03, -3.8983e-01, -1.8234e-01, -7.0638e-02,\n"," -2.9796e-01, 2.4292e-01, 3.3073e-01, 2.7160e-01, -6.5514e-02,\n"," -5.8862e-02, 5.2382e-02, -9.0124e-02, -3.1123e-01, -1.2620e-01,\n"," -1.3323e-01, 3.8649e-01, -9.0235e-01, -5.9301e-01, -1.4318e-02,\n"," -2.6898e-01, 3.4750e-01, -2.2360e-01, 3.5516e-01, -2.5770e-01,\n"," 2.2842e-01, -1.4775e-01, -3.2382e-01, -1.0954e-01, -2.4329e-01,\n"," -5.4530e-02, 2.1789e-01, 1.9403e-01, 2.2117e-01, 3.7876e-01,\n"," 2.7611e-01, 1.8109e-01, 2.2719e-01, 6.7545e-02, -1.3594e-01,\n"," -2.1537e-01, -3.5116e-01, 4.0856e-01, 3.4789e-01, 2.7489e-01,\n"," -4.7655e-01, -4.1025e-01, 3.8783e-01, 2.1588e-01, -3.9584e-01,\n"," 3.7275e-02, 2.3073e-01, 8.0775e-02, -8.2826e-02, -1.7322e-01,\n"," -2.6806e+00, -1.4115e-01, -2.2523e-01, -1.8209e-01, 4.8946e-01,\n"," -4.3283e-02, 8.0882e-02, -3.5729e-01, 2.4210e-01, -3.2844e-01,\n"," -2.4132e-01, -1.4793e-01, -5.5560e-01, 2.4806e-01, 1.3966e-01,\n"," -1.8439e-01, 6.3824e-02, 1.3365e-02, -3.9366e-02, 2.5094e-02,\n"," -1.1998e-01, -1.2045e-01, 4.6770e-01, 3.4016e-01, -1.6739e-01,\n"," 9.9141e-01, -1.3422e-01, -4.8594e-01, 1.4262e-02, 2.9214e-01,\n"," -5.2545e-01, 2.4196e-01, 1.2542e-01, -3.4431e-01, 1.5645e-01,\n"," -3.7388e-01, 9.0583e-02, -3.0887e-01, 7.1036e-03, 2.7729e-02,\n"," 3.4548e-01, -1.2203e-01, -4.3174e-02, 1.3752e-01, -7.3621e-02,\n"," -2.2842e-01, 1.8478e-02, 2.5509e-01, 1.9981e-01, -1.1122e-01,\n"," -1.4208e-01, -3.3403e-01, 1.0462e-01, 9.4808e-02, -3.0230e-02,\n"," 3.1686e-01, 1.1821e-01, -1.8647e-02, 7.5108e-02, -9.8017e-02,\n"," -1.0948e-02, -1.5121e-02, 4.3705e-01, 1.8078e-01, -4.1447e-02,\n"," -1.5369e-01, 2.0542e-02, -1.2186e-01, 1.8281e-01, -3.1614e-01,\n"," 1.7858e-02, -4.2345e-01, -5.3145e-02, -5.2760e-01, -1.2031e-01,\n"," -9.9715e-02, -3.0681e-01, -2.7647e-01, 3.2986e-01, 6.5519e-02,\n"," 1.9550e-01, 2.1136e-01, 4.0438e-01, 4.0746e-02, -2.4565e-01,\n"," 4.4374e-01, 6.0326e-02, 3.1910e-02, -1.3530e-01, 1.9383e-01,\n"," -8.8862e-02, 5.6658e-02, -1.3945e-01, -1.3239e+00, -2.6938e-01,\n"," -1.9100e-01, 3.0537e-01, 2.4916e-02, 1.3102e-01, 6.4190e-03,\n"," -1.2541e-01, -3.7238e-03, -2.8021e-02, 3.4874e-02, 4.1664e-01,\n"," -1.7664e-01, -4.9067e-01, -2.8561e-01, 1.0556e-01, -2.9339e-01,\n"," -1.0953e-01, 1.8034e-01, 2.2489e-01, 7.3144e-02, 2.4496e-02,\n"," 2.1137e-01, 2.1750e-01, 1.4218e-01, -4.4467e-01, -2.7275e-01,\n"," -1.0118e-01, 1.0670e-01, -2.4194e-02, 8.6090e-02, 1.9195e-01,\n"," 3.4682e-01, 1.6324e-01, 2.4639e-01, -1.6992e+00, -1.3542e-01,\n"," 1.9082e-01, -5.6291e-01, 1.2015e-01, -4.7667e-02, -2.8383e-02,\n"," -5.6935e-02, -2.7588e-01, 7.9304e-02, 4.8785e-02, -4.4433e-01,\n"," 3.3945e-01, 1.7760e-01, 2.3666e-01, 1.6153e-01, -1.7683e-02,\n"," -1.1214e-01, -6.2408e-01, -2.9673e-02, 5.7167e-02, -8.5221e-02,\n"," 1.9394e-02, -8.0933e-02, 2.3255e-01, 2.6026e-01, -4.1164e-01,\n"," 2.4707e-01, -4.8751e-01, -1.0590e-01, -1.8433e-01, -2.7313e-01,\n"," -2.2613e-01, 2.5233e-01, 1.8193e-01, 2.0720e-01, 1.1839e-01,\n"," 5.2460e-02, 7.5206e-01, -1.8871e-01, -3.1842e-02, 5.6449e-01,\n"," 1.8152e-02, -1.3957e-02, -6.9589e-02, -1.6250e-01, 2.6149e-01,\n"," -3.2580e-01, -1.3970e-01, 8.5003e-02, 2.9803e-01, -2.2122e-01,\n"," 1.3848e-01, 1.9826e-01, 1.3976e-01, -9.1678e-02, 4.6271e-02,\n"," 2.1715e-01, -1.0598e-01, -3.3291e-01, 9.0496e-02, -3.3487e-01,\n"," 2.3762e-01, 1.6550e-01, -7.8682e-01, 2.2636e-01, -3.2688e-01,\n"," 3.3455e-02, 1.9556e-01, 2.2426e-02, -4.0004e-02, -7.4537e-02,\n"," -4.1161e-01, -8.8128e-01, -3.8869e-01, 2.1453e-01, -4.3663e-01,\n"," -2.5847e-02, 4.2232e-01, -3.1822e-01, -9.9932e-03, 9.7858e-04,\n"," -1.5048e-01, 4.1922e-02, 7.4174e-02, 1.1165e-02, -1.6409e-02,\n"," -4.6075e-01, -1.0155e-01, -4.1946e-01, -1.0061e-01, 1.2029e-01,\n"," 5.9952e-02, 1.6593e-01, 4.1526e-01, 2.6003e-01, 4.0045e-01,\n"," -1.0099e-01, 4.3621e-01, 2.8063e-02, -1.0384e-01, -2.6095e-01,\n"," 9.5671e-02, -1.7374e-02, 7.7613e-02, 8.6647e-02, -6.7996e-01,\n"," 2.3735e-02, -1.4304e-01, -3.2480e-01, 4.6043e-01, -3.2521e-01,\n"," 2.8075e-01, 4.1783e-02, 3.1568e-01, -1.5818e-02, -6.1440e-02,\n"," 2.5004e-01, 3.6424e-03, 2.3642e-01, 2.3057e-03, 3.2288e-01,\n"," -7.0587e-02, -3.7934e-02, 3.1477e-02, 5.9118e-02, -8.2543e-02,\n"," -2.9553e-01, -4.2139e-01, -1.9397e-01, 4.8917e-02, 4.7579e-02,\n"," 1.8667e-01, -3.7198e-01, -1.5408e-01, -4.9469e-02, 6.8507e-02,\n"," 6.4160e-01, 8.4254e-02, 4.7807e-01, 4.4916e-01, 2.4089e-01,\n"," -4.0327e-01, -1.9931e-01, 1.0374e-01, 1.2485e-01, -2.6044e-02,\n"," 1.2351e-01, 5.4537e-01, 4.3808e-01, 2.7755e-01, -1.7042e-01,\n"," 4.5841e-01, -1.0482e-01, 1.7960e-01, -4.4239e-01, 1.3610e-01,\n"," -1.1436e-01, -1.5472e-01, 1.3619e-01, -1.5821e-01, 2.0639e-01,\n"," 6.5254e-02, 2.2214e-01, 9.1405e-02, 2.3587e-02, 1.1597e-01,\n"," 1.5345e-01, -3.0277e-01, -4.1800e-02, 4.2774e-01, -2.8140e-01,\n"," -1.8040e-01, 7.7400e-02, -1.8986e-01, 2.2534e-02, -4.1197e-02,\n"," -2.7629e-01, 1.3267e-01, 2.2933e-01, 1.3260e-01, -1.7390e-01,\n"," 1.0630e-01, 1.7680e-01, 2.1748e-01, 1.5591e-01, -3.1644e-01,\n"," -2.8875e-01, 5.7646e-02, 5.2537e-01, 2.3142e-01, -1.2487e-01,\n"," -2.1870e-01, -1.5532e-01, -3.7585e-01, 2.0453e-01, 3.1722e-01,\n"," -1.1916e-01, 3.1571e-01, 2.4586e-01, 1.7613e-01, -4.3456e-02,\n"," -2.0703e-01, 2.4756e-01, -1.1400e-01, -2.1716e-01, 7.2209e-02,\n"," 1.5120e-01, -3.1913e-01, -3.3840e-01, 1.6670e-01, -1.8234e-01,\n"," -1.1828e-01, -8.8734e-02, 2.3588e-01, 4.5979e-01, 3.4798e-01,\n"," 1.7037e-01, 3.2740e-01, -3.3840e-01, -3.2937e-01, -2.5181e-01,\n"," 1.1250e-01, 7.9926e-02, 2.2442e-01, -1.4604e-02, -1.5474e-01,\n"," -9.7625e-02, -1.1479e-01, 3.4731e-02, 4.9065e-02, 2.6151e-01,\n"," -3.2013e-01, 4.3837e-02, 5.5782e-01, 5.1297e-02, -3.6348e-01,\n"," 1.1299e-02, -2.3320e-01, -1.3601e-01, -4.4621e-03, -4.6788e-01,\n"," -5.8689e-01, 8.6400e-02, 3.7801e-02, 2.4166e-01, 1.7041e-01,\n"," 5.8925e-01, 3.1390e-01, 3.1943e-01, -3.1607e-01, -2.5575e-01,\n"," 8.6704e-02, -1.1021e-01, 1.2883e-02, -5.0520e-01, -2.6901e-01,\n"," 9.3307e-02, 1.4313e-01, -8.9304e-02, -1.8896e-01, 1.0288e-01,\n"," 2.2518e-01, -2.8419e-01, 8.7507e-02, -1.8980e-02, -7.7692e-02,\n"," 2.1326e-03, 2.7148e-01, 2.3272e-01, 1.2150e-01, 5.8984e-02,\n"," -2.7156e-01, 5.6519e-02, -3.2262e-01, 1.6046e-01, 3.1626e-01,\n"," -3.3156e-01, -3.9348e-01, 2.1931e-01, 7.6203e-01, 6.4126e-01,\n"," -4.5321e-01, -6.2349e-02, 3.0930e-01, -1.9395e-01, -2.2196e-01,\n"," 9.2664e-02, -1.4823e-02, -5.4005e-01, 8.8917e-02, 1.0734e-01,\n"," -2.8078e-01, 2.7518e-01, -1.0070e-01, 2.5316e-01, -1.7982e-01,\n"," -4.1409e-02, -2.1380e-01, -1.3055e-01, -4.7909e-02, 2.8083e-01,\n"," -2.7096e-01, 4.4539e-02, -4.8489e-02, 1.0843e-01, 2.7364e-01,\n"," -1.8682e-01, 7.8132e-02, -3.0210e-01, -3.3502e-01, -1.3398e-01,\n"," 2.7216e-01, 4.0110e-01, -4.3704e-01, -1.4026e-01, -1.1378e-01,\n"," -3.7480e-01, 2.3906e-01, 2.6842e-01, -9.5164e-02, -7.0068e-02,\n"," 2.9149e-01, 1.6925e-01, 3.9769e-01, -7.3776e-02, -5.5548e-01,\n"," -2.6454e-01, -2.2534e-01, 1.7565e-01, 1.7574e-01, 6.2024e-01,\n"," -3.3271e-02, 4.8366e-01, -7.1704e-02, -1.8839e-01, 2.6914e-01,\n"," -3.6342e-02, 7.8381e-02, -4.2242e-01, 1.0533e-01, 2.2615e-02,\n"," 7.7299e-02, 3.0341e-01, 1.2768e-01, -3.8403e-02, -2.1225e-01,\n"," -1.5284e-01, 2.3525e-02, 2.6678e-01, -4.9136e-02, 2.4615e-01,\n"," -1.5811e-01, -4.0980e-03, -2.1463e-01, -1.2129e-01, 9.1273e-02,\n"," 8.7419e-03, -2.2229e-01, -1.0538e+00, -5.2330e-02, -1.9005e-03,\n"," -1.5382e-01, -2.3830e-01, 2.8236e-01, 5.1899e-01, 3.1745e-01,\n"," -3.6952e-01, -3.2795e-01, -3.0019e-01, 3.4370e-01, 1.9178e-01,\n"," -6.5139e-02, -3.3842e-02, 2.2393e-01, 1.6260e-01, -1.2297e-01,\n"," 5.3135e-01, -5.4326e-01, -1.1459e-02, 1.3833e-01, -1.8314e-01,\n"," -1.7674e-01, 2.0577e-01, 3.7441e-01, -6.9147e-01, -2.1123e-02,\n"," 9.0057e-02, 7.8166e-02, -9.1146e-02, 3.5850e-01, -7.4843e-02,\n"," 1.1717e-01, -3.7411e-01, 2.1711e-02, -7.1854e-02, 2.3200e-01,\n"," 2.0908e-01, 3.9111e-01, 3.0637e-01, 5.3270e-01, -2.4063e-01,\n"," -3.2935e-01, -2.0023e-02, -8.0822e-02, 4.9889e-02, 3.7334e-01,\n"," -8.7782e-02, -1.8629e-01, -6.2356e-01, -1.2815e-01, 2.2614e-01,\n"," -1.1377e+00, 2.1884e-01, -1.0677e-01, 8.5709e-02, -1.5418e-02,\n"," 1.9039e-02, 6.1657e-02, 2.5742e-02, -1.6399e-01, 3.9331e-02,\n"," 3.7002e-01, -1.4516e-01, 6.7154e-02, 1.1036e-02, -1.6228e-01,\n"," 3.9854e-01, 6.1839e-02, -6.7198e-02, 3.1496e-02, 3.1144e-01,\n"," 1.5441e-02, 1.3723e-01, 3.9687e-01, -2.2312e-01, -1.6643e-01,\n"," -2.2159e-01, -1.9899e-01, -1.9659e-01, -3.0170e-01, 3.9665e-03,\n"," -1.1545e-01, -3.5223e-01, -2.9236e+00, -1.3167e-01, 6.1604e-02,\n"," 7.2104e-03, 1.8204e-01, -2.5502e-01, 1.7326e-01, -1.0882e-01,\n"," -2.8429e-01, -1.0022e-01, 1.4909e-01, -4.2708e-01, -9.9090e-03,\n"," 2.3801e-01, -2.6077e-01, -1.5291e-01]])\n","bigger: tensor([[ 1.6904e-02, -3.8730e-02, -1.2516e-01, 3.0450e-02, -1.4179e-01,\n"," -1.7481e-01, 3.2437e-01, 5.4859e-02, -7.7683e-02, -2.0621e-01,\n"," -1.2275e-01, -3.3005e-02, 8.6325e-02, 1.3560e-02, -3.6569e-01,\n"," -3.3380e-01, 8.2672e-02, 6.6676e-03, -7.3273e-02, 2.5914e-01,\n"," 2.9305e-01, -8.6702e-02, 2.4196e-01, 1.4027e-01, 5.2685e-02,\n"," 2.8054e-01, -1.2670e-01, -1.8378e-01, -2.1068e-01, 1.9655e-01,\n"," -2.0418e-01, -1.6339e-01, -4.6543e-02, 3.6863e-01, -6.3222e-03,\n"," -2.1979e-01, -4.1922e-02, -2.1634e-02, -3.4672e-01, -1.9969e-01,\n"," -5.1050e-02, 3.2514e-02, 6.0175e-03, -1.6289e-02, -6.3553e-03,\n"," -1.5969e-01, -8.5157e-02, 1.1170e-01, -2.7861e-01, 6.7137e-02,\n"," -2.1550e-01, 2.1621e-01, -1.9650e-01, 1.4606e-01, 1.6628e-01,\n"," -1.5689e-01, 2.0062e-01, 1.8335e-01, -5.0184e-02, -1.2462e-01,\n"," 4.2887e-02, -8.8767e-02, 5.3367e-02, 5.2536e-02, 1.2519e-01,\n"," 8.3072e-02, 3.4247e-01, 4.4326e-02, -4.9772e-01, 2.8124e-01,\n"," -1.3089e-01, -4.5339e-01, 2.8875e-01, -7.8125e-02, 2.5238e-01,\n"," -1.1437e-02, -9.9305e-02, 1.7947e-01, 1.4711e-01, -5.7225e-02,\n"," 1.2771e-01, 4.0409e-02, -1.6411e-02, 4.4569e-01, -3.1596e-02,\n"," 1.4510e-01, -1.9467e-01, 6.4606e-02, -3.3438e-01, 4.0471e-01,\n"," 2.4148e-02, 1.9838e-01, 1.9001e-01, 1.6721e-01, 6.1720e-02,\n"," -5.1429e-02, -5.6222e-02, 1.9232e-02, 6.3849e-02, -4.1689e-01,\n"," -7.8032e-02, -3.3646e-01, -1.8135e-01, 2.0325e-01, -3.1818e-01,\n"," -2.9656e-04, -5.3568e-02, 3.1041e-02, 1.2304e-01, -5.7795e-01,\n"," 2.8787e-01, 2.0887e-03, 5.1198e-02, 8.5932e-03, -8.1166e-02,\n"," 3.2022e-01, -2.4509e-02, -7.3208e-02, 8.9661e-02, 1.8568e-02,\n"," -2.4213e-01, -2.2306e-01, 4.8425e-02, 7.7943e-01, 2.9764e-01,\n"," -4.7308e-02, -2.0984e-01, -4.0879e-01, -9.7251e-02, -3.6013e-01,\n"," -7.7931e-03, 2.2434e-01, 1.8116e-01, 3.9718e-01, -2.2547e-01,\n"," 3.4189e-02, -6.8486e-02, -1.6198e-02, -2.2173e-01, -2.7919e-01,\n"," 1.5906e-01, 5.4983e-01, -8.8732e-01, -2.2307e-01, 3.1448e-01,\n"," -8.7047e-02, 2.6789e-01, -2.4962e-01, 9.9675e-02, -1.1827e-01,\n"," 2.4278e-01, 2.9465e-01, -4.2006e-01, -2.4163e-01, -2.6231e-01,\n"," -2.0597e-01, -1.1439e-01, -1.0031e-02, 1.7507e-01, 4.8629e-01,\n"," 1.4302e-01, 1.3502e-01, 1.2252e-01, 7.6244e-02, -9.4450e-02,\n"," 3.9010e-02, -2.1643e-01, 7.0617e-01, 2.9542e-01, 1.2910e-01,\n"," -3.6768e-01, -1.4635e-01, 4.4350e-01, 3.9877e-01, -3.6646e-01,\n"," 2.7227e-01, 2.0863e-01, 1.4553e-01, 8.5016e-02, -1.5106e-01,\n"," -2.8934e+00, 1.2338e-01, 6.7610e-02, -2.1708e-02, 1.2056e-01,\n"," -2.6258e-01, 1.2111e-01, -5.1578e-01, 4.2722e-01, -2.2485e-01,\n"," -1.4345e-01, 1.8093e-01, -1.8861e-01, 2.5350e-01, 2.1023e-01,\n"," -2.0262e-01, 1.8749e-01, -2.3341e-01, 3.5335e-02, -2.1485e-02,\n"," 2.5912e-01, 6.8034e-02, 1.8062e-01, 2.0976e-01, 9.9957e-02,\n"," 7.8348e-01, 6.4623e-02, -4.1198e-01, 2.5199e-02, -2.2599e-03,\n"," -7.9188e-01, 2.5197e-01, 7.8102e-02, -1.5043e-01, 2.7205e-01,\n"," -2.3042e-01, 3.0985e-01, -1.5978e-01, -5.2351e-01, 4.2436e-02,\n"," 2.1450e-02, -9.0776e-03, -2.8213e-01, 2.7743e-01, -8.6005e-03,\n"," -4.9489e-01, 4.9514e-01, 5.5829e-02, 4.6722e-02, -2.7502e-01,\n"," -1.0831e-01, -1.7993e-01, 1.3767e-01, 2.4780e-01, -1.3346e-01,\n"," 1.0021e-01, 2.7576e-02, -1.6306e-01, -8.8804e-02, -3.6852e-01,\n"," -2.1743e-01, 5.4958e-02, 5.5363e-01, 3.1120e-02, -2.2789e-01,\n"," -3.1308e-02, 3.4893e-01, 5.8103e-02, 4.6609e-01, -1.5730e-01,\n"," -3.0373e-02, -4.6307e-01, 4.3180e-02, -3.9822e-01, 1.2099e-02,\n"," -2.5078e-01, 1.0179e-02, -3.3918e-02, 2.5209e-01, -5.2687e-02,\n"," 4.7797e-02, 3.0032e-01, 3.9802e-01, -4.1523e-02, -1.7040e-01,\n"," 4.0317e-01, 1.7332e-01, -9.4751e-02, -2.3973e-02, -1.2515e-01,\n"," 5.6918e-02, 2.4952e-02, -3.1902e-02, -1.3741e+00, 3.2229e-02,\n"," -3.7027e-01, 3.4830e-01, 1.8999e-01, 2.8387e-01, -7.5228e-02,\n"," -9.2240e-02, 8.0542e-02, -2.7424e-01, 2.6012e-02, 5.2796e-01,\n"," -4.1762e-01, -3.5751e-01, -4.1083e-01, 3.1724e-01, -5.3567e-01,\n"," -1.8426e-01, 4.4281e-02, -1.5143e-02, 1.2399e-01, 1.8085e-01,\n"," -2.8985e-02, 5.8929e-02, 2.2597e-01, -2.0120e-01, -2.6774e-01,\n"," 1.8541e-02, 7.6794e-03, 3.0104e-02, -1.3479e-01, -4.2309e-02,\n"," 2.5318e-02, 8.1640e-03, 9.2179e-02, -1.9505e+00, -1.1478e-01,\n"," 2.5425e-01, -3.9209e-01, 1.6762e-01, -3.1008e-01, -2.5714e-01,\n"," 9.2420e-02, -2.3590e-01, -2.9908e-02, 2.2736e-01, -2.7558e-01,\n"," 4.7857e-02, 1.9425e-01, -1.9928e-02, -1.2785e-01, 2.1764e-01,\n"," 3.0306e-02, -6.4276e-01, 9.0401e-02, -1.2202e-01, -1.5239e-01,\n"," 3.4817e-01, -1.6209e-01, 1.9240e-01, 3.1285e-01, -7.9009e-02,\n"," -2.7455e-02, -3.8826e-01, -9.7647e-02, -2.6540e-01, -3.0273e-01,\n"," 6.4112e-02, -4.0886e-02, 1.9528e-01, 1.4536e-01, 2.6918e-01,\n"," -3.6963e-02, 8.0665e-01, -3.6632e-02, 2.6950e-01, 1.1413e-01,\n"," 1.9994e-01, 2.9158e-03, -6.1248e-02, -2.4478e-01, 1.2217e-01,\n"," -2.3548e-01, -4.4136e-02, 2.4512e-01, 3.6037e-01, 1.3116e-03,\n"," -4.1250e-02, -1.0480e-01, -8.4391e-02, -1.0624e-01, 2.3002e-01,\n"," 2.7880e-01, -9.5236e-02, -2.6132e-01, 1.7479e-01, -1.0960e-02,\n"," 1.2915e-02, 4.5520e-01, -4.7322e-01, 1.0700e-01, -2.8420e-01,\n"," -3.0563e-01, 4.9667e-03, 1.7014e-01, -1.3782e-01, -1.0605e-01,\n"," 1.5567e-01, -6.4980e-01, -4.0591e-01, 2.2598e-01, -3.2754e-01,\n"," 2.5419e-01, 2.9243e-01, -2.2513e-01, 2.8068e-01, -6.0246e-02,\n"," 2.7861e-02, 2.7139e-01, 1.1256e-01, -7.4804e-02, 2.3649e-01,\n"," -1.3429e-01, 1.6509e-01, -8.8884e-02, -8.1824e-02, -2.0161e-02,\n"," 2.7793e-01, 8.7890e-02, 3.6578e-01, 5.6520e-02, 1.5815e-01,\n"," -2.2721e-01, 2.2895e-01, 1.3408e-01, -6.4742e-01, -4.2573e-01,\n"," -1.4843e-01, 1.3714e-03, -8.0881e-02, 2.8528e-01, -5.6005e-01,\n"," 9.2115e-02, -3.0649e-01, -2.1895e-01, 2.5493e-01, -1.1810e-02,\n"," 3.6937e-01, 7.8543e-02, 9.9705e-02, -2.1803e-01, -1.8107e-01,\n"," 3.6239e-01, -1.8611e-01, 5.0797e-01, 4.4171e-03, 3.4521e-01,\n"," 1.4248e-01, 6.5037e-02, 5.1281e-02, -4.6586e-02, 4.4361e-02,\n"," -4.0670e-01, -2.4358e-01, -3.2650e-02, -8.7492e-02, -1.4847e-01,\n"," 1.2772e-01, -3.7101e-01, -1.3065e-01, 1.5036e-01, 8.3044e-02,\n"," 2.7592e-01, 9.6215e-02, 4.9269e-01, 2.5399e-01, 1.3063e-01,\n"," -3.7718e-01, -1.7254e-01, 3.0072e-01, 4.6683e-01, -1.9006e-01,\n"," 1.7336e-01, 5.8401e-01, 4.2334e-01, 1.9083e-01, -3.1844e-01,\n"," 1.0495e-01, -1.4373e-01, -2.4963e-01, -2.8757e-01, -1.0086e-01,\n"," -1.4253e-01, -3.6726e-01, -1.8612e-02, -1.4753e-01, -2.9282e-02,\n"," -4.8701e-03, 2.5237e-01, 4.1819e-03, 9.2499e-03, 1.0847e-01,\n"," 6.9568e-02, -6.2264e-01, 1.7904e-01, 4.8755e-01, -3.4720e-01,\n"," -7.2610e-02, 5.7849e-02, -1.6999e-01, 2.1239e-01, -3.0047e-02,\n"," -4.0406e-01, 2.5423e-03, -3.2763e-02, 9.4988e-02, -2.6161e-01,\n"," -6.5119e-03, -5.9689e-02, 4.5670e-01, 1.5322e-04, -3.8751e-01,\n"," -2.5201e-01, 3.5236e-01, 2.8157e-01, 3.2302e-01, -2.8704e-01,\n"," -2.7931e-01, -1.7809e-02, -1.5636e-01, 1.1554e-01, 3.9896e-01,\n"," 7.1515e-02, 2.3688e-01, 5.1641e-01, 2.6460e-01, -2.8023e-01,\n"," -8.1334e-02, -1.9864e-03, -1.9412e-03, 1.9885e-02, 2.3049e-01,\n"," 2.3616e-01, -3.3134e-01, -2.3480e-01, 3.3869e-02, -3.4125e-01,\n"," -5.8549e-01, -1.2104e-01, 6.4855e-03, 2.0707e-01, 3.8328e-02,\n"," 1.2814e-01, 2.8127e-01, -3.8797e-01, -4.8003e-01, -3.1068e-01,\n"," 1.1522e-02, 5.6307e-02, 1.0336e-01, -7.3480e-02, -5.2074e-01,\n"," -8.8133e-02, 1.4136e-01, -1.0846e-01, 2.1022e-01, 2.6140e-01,\n"," -2.0540e-01, -1.5137e-02, 5.8027e-01, 3.8001e-01, -4.9495e-01,\n"," 1.9900e-01, -3.7498e-01, 6.4629e-02, 7.5842e-03, -1.8652e-01,\n"," -5.5442e-01, -1.0977e-01, -7.8948e-02, 1.7850e-01, 2.2238e-01,\n"," 5.4008e-01, 3.3657e-01, 1.4067e-03, -2.4648e-01, 7.9554e-03,\n"," -3.5131e-02, 1.0847e-01, 2.7862e-03, -8.2385e-01, -3.5846e-01,\n"," -2.2926e-02, 1.2360e-01, -1.0267e-01, -2.3390e-01, 1.1652e-01,\n"," 2.6236e-01, -9.0734e-02, 2.6079e-01, 1.6292e-01, -8.2640e-02,\n"," 1.4285e-01, 3.8260e-01, 3.7625e-01, -3.2768e-01, 2.3876e-01,\n"," -1.1753e-01, 1.6023e-01, -4.7565e-01, 7.9175e-02, 1.1077e-01,\n"," -3.6248e-01, -2.3070e-01, 7.0517e-02, 9.9725e-01, 5.6287e-01,\n"," 1.2241e-01, -1.6463e-01, 2.5237e-01, -1.4434e-01, -1.6752e-01,\n"," 5.3729e-02, -2.6992e-01, -4.3917e-01, 2.5951e-01, 8.9696e-02,\n"," -1.1047e-01, 2.6973e-01, 1.1860e-01, 4.1769e-01, -2.5405e-01,\n"," 2.2499e-01, -3.6056e-01, 4.5890e-02, 1.1613e-01, 1.1131e-01,\n"," 2.1438e-02, -1.8782e-01, 5.5598e-02, 2.8108e-01, 1.6448e-01,\n"," 4.3293e-02, 4.1442e-02, -2.2571e-01, -5.1616e-01, -1.1154e-01,\n"," 3.5880e-02, 4.4958e-01, -6.1464e-01, 2.0990e-01, 1.3593e-01,\n"," -5.1941e-01, 5.3784e-02, 3.5719e-01, -3.4669e-02, 1.9829e-02,\n"," 3.6595e-01, 2.1938e-01, 3.0795e-01, 3.5752e-01, -6.1526e-01,\n"," 6.5303e-02, -5.6056e-02, 1.4344e-01, 3.7160e-01, 2.6015e-01,\n"," -2.1859e-01, 2.5768e-01, 1.9052e-01, -4.5240e-01, 3.6612e-01,\n"," 3.6853e-01, -1.2011e-01, -3.9836e-01, 1.1241e-01, -1.9146e-01,\n"," -4.2871e-03, -2.4513e-02, -2.5739e-01, 4.4200e-02, 1.9119e-01,\n"," -3.5419e-01, 2.4547e-01, 3.8601e-01, -2.1093e-02, -2.9493e-01,\n"," -3.0483e-01, 1.9242e-01, -1.7988e-01, -1.3046e-01, 2.9456e-01,\n"," 1.7381e-01, -2.4198e-01, -1.2726e+00, 1.2764e-01, 7.8343e-02,\n"," -1.4945e-01, -2.1118e-01, 5.7377e-02, 4.3879e-01, -2.1811e-02,\n"," -1.0192e-01, -4.2281e-01, -8.5336e-02, 1.2905e-01, 3.2012e-01,\n"," 2.9208e-01, -1.7799e-01, 1.5249e-01, 1.7951e-01, -4.1356e-01,\n"," 2.9959e-01, -5.0260e-01, -1.2399e-01, 2.7659e-02, -1.8894e-01,\n"," 3.8363e-03, -2.4766e-01, 6.0637e-01, -2.3893e-01, -4.3050e-03,\n"," -1.6099e-01, 1.8050e-01, 4.0410e-02, 3.7786e-01, -1.7295e-01,\n"," -2.3918e-01, -2.0494e-01, -5.3633e-03, -1.3724e-02, 2.1399e-01,\n"," -1.6394e-01, 1.7280e-01, 5.0588e-01, 5.2124e-01, -7.5576e-02,\n"," -2.9320e-01, 1.1502e-01, -8.0936e-02, 1.1538e-01, 5.6268e-01,\n"," -1.7426e-01, 5.3436e-02, -2.5296e-01, -9.7788e-02, -7.5391e-03,\n"," -1.1350e+00, 2.1502e-01, -7.0127e-02, -1.0075e-01, -8.2483e-03,\n"," -4.8526e-02, 1.2602e-01, -1.8236e-01, 1.3438e-03, -7.6525e-02,\n"," 2.9335e-01, 2.1745e-01, 3.3446e-02, -8.6317e-05, -4.9512e-01,\n"," -5.3648e-02, -9.8601e-02, 9.4220e-02, -1.2452e-01, 4.5219e-01,\n"," -1.2346e-01, -1.1159e-01, 9.8914e-02, 4.2672e-02, 8.7744e-02,\n"," -8.5121e-02, -1.5213e-01, -2.4974e-01, 2.0692e-02, 1.0588e-01,\n"," 1.1214e-01, -2.9239e-01, -2.7216e+00, -1.9136e-01, -6.7291e-02,\n"," -8.5647e-02, 2.4957e-01, -2.0549e-01, 3.9778e-01, 1.0009e-01,\n"," 1.2579e-01, -2.1657e-01, 5.1320e-02, -4.5240e-01, 2.1513e-01,\n"," 7.4267e-02, -1.1990e-01, -6.6715e-02]])\n","city: tensor([[ 9.7565e-02, 4.0549e-03, -5.5472e-02, 8.4485e-02, -3.1341e-01,\n"," -1.4903e-01, 4.8570e-01, 2.4914e-01, -5.4778e-02, -3.6799e-01,\n"," -1.9637e-01, -1.0090e-01, -2.9542e-02, 5.6613e-01, 1.0718e-02,\n"," -2.4285e-01, 2.7174e-03, 3.3732e-02, 2.3518e-01, -5.7187e-02,\n"," 4.1149e-02, -1.3886e-01, 2.7979e-01, 9.6652e-02, 1.2974e-01,\n"," 6.1293e-02, -2.6963e-01, -1.0194e-01, -2.6889e-01, 4.0480e-02,\n"," 1.1987e-01, -1.7713e-01, -1.3635e-01, 3.3392e-01, 1.1693e-01,\n"," -1.7227e-01, 4.5946e-02, -2.2697e-01, -2.6970e-01, -4.2899e-02,\n"," -1.9164e-01, -2.2336e-02, 2.1932e-01, -3.7853e-02, 3.1057e-01,\n"," -6.6347e-02, 1.2984e-02, -7.4027e-02, -2.6209e-01, 2.6690e-01,\n"," -2.8636e-01, 1.7059e-01, -9.6722e-02, 3.6101e-01, 2.0818e-01,\n"," 2.3056e-01, 1.3801e-01, 2.1693e-01, 1.5999e-01, 1.7430e-01,\n"," 1.1422e-01, 9.1574e-02, -3.0715e-01, 1.1791e-01, 3.6155e-01,\n"," 2.2587e-01, 4.2097e-01, 1.5455e-01, -3.7614e-01, 3.3887e-01,\n"," -2.5856e-01, -2.0347e-01, 2.9172e-01, -7.0400e-02, 2.4099e-01,\n"," 5.0042e-02, -4.3671e-03, 1.8561e-01, 1.0139e-01, 1.6074e-02,\n"," -2.1024e-02, 2.0221e-01, 3.0933e-01, 1.4019e-02, -2.6156e-02,\n"," 1.5374e-01, -3.4463e-01, -2.1055e-01, -1.7915e-01, 2.5808e-01,\n"," -3.2754e-01, 1.7878e-01, -4.2307e-02, 1.0019e-01, -1.0836e-01,\n"," -2.0485e-02, -1.0359e-01, 1.5925e-01, 5.1588e-02, -3.4569e-02,\n"," -7.4927e-02, -2.6692e-01, 5.4014e-03, 2.2686e-01, -3.5721e-02,\n"," 2.0015e-01, 3.5814e-02, -1.4281e-01, 3.3327e-01, -6.0541e-01,\n"," 2.2915e-01, 2.9808e-02, 4.1673e-02, 1.1369e-02, -5.7437e-02,\n"," 2.3894e-01, 5.2596e-02, 2.1700e-01, -1.0638e-01, 1.7680e-01,\n"," -4.4152e-01, -4.2947e-01, -1.6253e-02, 4.8785e-01, -1.6287e-01,\n"," 3.1963e-02, -1.3548e-01, -3.5082e-01, 1.5378e-01, -1.7991e-01,\n"," -1.3351e-01, 4.1425e-01, 3.5855e-02, 3.7190e-01, -1.4929e-01,\n"," 1.1466e-01, -9.0673e-02, 3.0043e-01, -3.0980e-01, -3.3750e-01,\n"," -2.3741e-01, 5.1303e-01, -9.2094e-01, -5.5598e-01, 7.4841e-02,\n"," 1.1359e-01, 2.2311e-01, -8.5005e-03, 2.7041e-01, -2.1456e-02,\n"," 2.1174e-01, 2.6674e-01, -3.4778e-01, 3.0289e-02, -5.2117e-01,\n"," -5.0485e-01, 2.6652e-02, -8.6195e-02, 4.9324e-01, 5.5715e-01,\n"," 2.3350e-01, 1.2589e-01, 1.8467e-01, 4.4332e-01, -5.0879e-01,\n"," -5.8399e-02, -8.8752e-02, 5.0095e-01, 2.2406e-01, -3.7185e-02,\n"," -5.6110e-01, -5.1102e-01, 2.6963e-01, 7.1667e-03, -4.9696e-03,\n"," 1.7494e-01, 9.3096e-02, 1.6852e-01, -5.0201e-02, -1.1044e-01,\n"," -2.9265e+00, 8.9145e-02, 1.0170e-01, 1.8802e-01, 3.6103e-01,\n"," -1.2835e-01, 2.3540e-01, -8.1496e-02, -3.5478e-02, -1.5223e-01,\n"," -1.4427e-01, -9.8152e-02, -4.4929e-01, 4.4686e-01, 7.4136e-02,\n"," -2.4612e-05, -1.6273e-01, -2.3318e-01, 1.3428e-01, 8.8852e-03,\n"," 1.9565e-01, -2.3815e-01, 2.0741e-01, 2.6210e-01, -4.0100e-01,\n"," 8.5217e-01, 9.4376e-02, -3.3815e-01, 6.6989e-02, 8.5023e-02,\n"," -6.8046e-01, 3.8425e-01, 1.5000e-02, -3.0018e-01, 3.4435e-01,\n"," -3.0280e-01, 1.8904e-01, -1.5424e-01, -2.5046e-01, -5.3041e-02,\n"," 8.6258e-02, -8.6025e-02, -9.9887e-03, 7.7727e-02, -5.7333e-02,\n"," -3.7498e-01, 2.1617e-01, 1.5811e-01, 1.0071e-01, -1.8806e-01,\n"," 6.7100e-02, -1.7517e-01, 1.1313e-01, -1.2415e-02, -3.2667e-01,\n"," 4.7945e-02, 1.1410e-01, -5.4353e-02, -1.6693e-01, -1.1447e-01,\n"," -1.0119e-01, 9.9737e-02, 2.1567e-01, 1.4232e-02, -5.5527e-03,\n"," -7.8645e-02, 2.0326e-01, 1.6959e-01, 4.1663e-01, -2.1360e-01,\n"," 2.3324e-01, -3.9780e-01, 1.5780e-01, -3.2770e-01, 1.9286e-02,\n"," -2.5596e-02, -6.5021e-02, -2.5253e-01, 3.9182e-01, -9.5231e-02,\n"," 4.8915e-02, 3.0900e-01, 2.7438e-01, 4.9050e-02, -5.1955e-01,\n"," 2.2211e-01, 3.5973e-01, -3.0895e-01, -2.6964e-01, -2.6943e-01,\n"," -1.5427e-01, -1.9934e-01, -1.1509e-01, -1.3118e+00, 6.6489e-02,\n"," 5.0887e-02, 1.9070e-01, 3.0976e-01, 1.0308e-01, 2.6401e-01,\n"," -3.4430e-02, 5.2522e-01, -2.5897e-01, 1.2640e-01, 4.9992e-01,\n"," -2.2633e-01, -4.0639e-01, -2.7267e-01, 1.7534e-01, -5.3960e-01,\n"," -2.6814e-01, -7.9126e-02, -2.1113e-01, 1.9663e-01, 1.6692e-01,\n"," -2.7670e-01, 2.3355e-01, 1.1878e-01, -7.5143e-02, -1.7473e-01,\n"," -2.8222e-01, 6.8024e-02, -1.5694e-01, -5.9684e-02, 5.3592e-03,\n"," 9.0129e-02, 2.9738e-01, 1.0172e-01, -1.5625e+00, 4.7858e-01,\n"," 5.4091e-02, -4.4860e-01, 1.5651e-01, -1.1039e-01, 2.8968e-01,\n"," -1.9247e-01, -2.4453e-01, -6.1673e-02, 2.1246e-01, -4.2871e-01,\n"," 1.6622e-01, 4.4453e-01, 1.8174e-01, 1.2861e-01, -1.5308e-01,\n"," -2.1337e-01, -4.0112e-01, -1.6065e-01, -8.7920e-04, -1.1521e-01,\n"," 9.9958e-03, -2.5647e-02, 3.3922e-01, 8.4968e-02, -2.4215e-01,\n"," 1.1117e-02, -6.1636e-01, 8.7648e-02, -1.0176e-01, -2.0689e-01,\n"," -5.3960e-02, -6.5134e-02, 1.7204e-01, 2.2675e-01, 8.1797e-02,\n"," 1.6667e-01, 5.9704e-01, -6.3083e-02, 1.5942e-01, 3.5049e-01,\n"," 1.2895e-01, -2.1617e-01, 3.1992e-02, -1.0338e-01, -1.9002e-01,\n"," -1.4474e-01, -2.7823e-01, 2.1775e-01, 3.2529e-01, -1.9267e-01,\n"," 2.8317e-01, 5.9233e-02, -2.2867e-01, -1.4598e-01, 2.2228e-01,\n"," 1.7157e-01, -1.6003e-01, -1.9891e-02, 8.0389e-03, -2.2211e-01,\n"," -1.8502e-01, 3.5748e-01, -5.3598e-01, -1.5590e-01, -6.4864e-02,\n"," -1.9399e-01, -5.6983e-02, 1.1108e-01, -3.3147e-01, -2.2596e-01,\n"," -3.2807e-01, -7.1529e-01, -6.2692e-01, 1.7715e-01, -4.2944e-01,\n"," -6.4891e-02, 2.8686e-01, -9.9255e-02, 5.3655e-02, -2.6036e-02,\n"," -2.0315e-01, 3.4009e-01, -1.5218e-02, -1.8835e-02, 1.2750e-01,\n"," -2.9799e-01, -2.3973e-01, -2.6265e-01, 1.0439e-01, -8.7526e-02,\n"," 5.5231e-01, 1.6230e-01, -8.0562e-02, 1.4088e-01, 2.8499e-01,\n"," -7.3408e-02, 2.0836e-01, 4.6717e-02, -3.5161e-01, -3.0776e-01,\n"," 6.6616e-02, -9.4461e-02, -1.3067e-01, -7.9976e-02, -5.4221e-01,\n"," 6.4611e-03, -5.3490e-01, -1.4665e-01, 1.1148e-01, -1.4252e-01,\n"," 2.1761e-01, 7.2645e-02, 1.5937e-01, -1.8915e-01, 7.8981e-02,\n"," 4.4528e-01, 1.6545e-01, 3.8308e-01, 8.5895e-02, 5.4796e-01,\n"," 2.1666e-01, 6.0269e-03, -6.1313e-02, -1.1141e-01, -2.2961e-01,\n"," -3.4695e-01, -1.6949e-01, -1.6676e-01, 2.7910e-01, -1.2114e-01,\n"," -2.5438e-02, -4.8870e-01, -4.1773e-01, 1.0132e-01, 1.9711e-01,\n"," 4.5764e-02, 2.0124e-01, 4.0981e-01, 3.7013e-01, 8.1666e-02,\n"," -5.0451e-01, -1.0334e-01, -1.6595e-01, 2.6621e-01, -1.9647e-01,\n"," 2.6452e-01, 3.5195e-01, 2.9788e-01, 4.6920e-02, -4.0085e-01,\n"," 2.3970e-01, -2.3397e-01, -1.3124e-01, -3.8254e-01, -1.4385e-02,\n"," 1.3001e-01, 9.1350e-02, 5.2163e-02, -1.1431e-01, 2.8985e-02,\n"," 2.6183e-02, 1.9309e-01, -5.4476e-02, -6.6342e-02, 2.0692e-01,\n"," -9.0088e-02, -1.5511e-01, 2.3532e-01, 3.4158e-01, -2.9381e-01,\n"," -1.2404e-02, -3.1719e-01, -6.1684e-02, 7.6146e-02, 4.8459e-02,\n"," -4.1944e-01, -2.7671e-02, 1.2933e-01, 1.0928e-01, -3.3081e-01,\n"," -2.6981e-01, 1.9682e-01, 3.4162e-01, 3.0753e-01, -4.7364e-01,\n"," -1.3255e-01, 4.9926e-01, 5.0514e-01, 2.2558e-01, -1.8867e-01,\n"," -1.8823e-01, -2.1200e-01, -3.4352e-01, -1.3453e-01, 2.5666e-01,\n"," -8.0164e-03, 2.0640e-01, 2.1423e-01, 2.3456e-01, 2.1601e-02,\n"," -9.8296e-02, 2.0366e-01, 2.9629e-01, -7.5688e-02, -3.8353e-03,\n"," 1.1760e-01, -3.2662e-01, -2.2820e-01, 7.8947e-02, -2.2480e-01,\n"," -3.8251e-01, -2.9521e-01, 1.4773e-01, 3.5240e-01, 1.7457e-01,\n"," 1.5873e-01, 2.2984e-01, -4.4084e-02, -4.0710e-01, -1.8715e-01,\n"," 2.7768e-01, -8.7013e-02, -1.6677e-03, 3.3052e-02, -4.0332e-01,\n"," -4.0735e-01, 2.2249e-02, -1.0771e-01, 9.6825e-02, 3.4495e-01,\n"," -1.3029e-01, 1.1239e-01, 6.9371e-01, 1.5619e-01, -5.0164e-01,\n"," -6.8701e-02, -5.1672e-02, 8.6943e-03, -2.0302e-01, -3.5867e-01,\n"," -2.7493e-01, -1.0831e-01, 6.0862e-03, -6.4747e-02, 7.4181e-02,\n"," 5.7693e-01, 2.4318e-01, 1.7769e-02, -2.4800e-01, -1.7271e-01,\n"," 7.6075e-02, -4.5842e-01, 1.5949e-01, -6.3782e-01, 3.9258e-02,\n"," 3.3248e-01, 3.2967e-01, -6.4068e-02, -3.1198e-01, -1.4475e-01,\n"," 3.3055e-01, 4.1223e-01, -5.6365e-03, 3.7052e-03, 8.7272e-02,\n"," 5.2179e-02, 4.1937e-01, 9.8503e-02, -5.8744e-02, 2.0391e-01,\n"," -1.2306e-01, 5.8633e-02, -4.4961e-01, 1.8556e-02, 4.8460e-01,\n"," -4.5937e-01, -3.7090e-01, -1.0569e-01, 8.9865e-01, 5.2896e-01,\n"," -9.3893e-02, -1.7308e-01, 3.3830e-01, -8.8538e-02, -1.0838e-02,\n"," 7.2850e-02, -4.3787e-01, -2.9519e-01, 2.4360e-01, -1.2008e-01,\n"," -8.9556e-03, 1.0375e-02, -1.3252e-01, 2.7989e-01, -2.2338e-01,\n"," 2.4410e-01, -2.9764e-01, 3.1846e-02, -3.3385e-01, 6.2269e-01,\n"," -1.7479e-03, -1.3874e-01, 2.0045e-01, 1.5485e-01, 2.0061e-01,\n"," -1.9013e-01, 6.1568e-02, 8.4722e-02, -3.8697e-01, 1.1519e-02,\n"," 2.0168e-01, 7.3339e-01, -8.3494e-01, 2.2196e-01, -3.7561e-02,\n"," -4.5345e-01, 1.1189e-01, 2.3846e-01, -1.2189e-01, -2.5804e-01,\n"," 2.6551e-01, 1.5691e-01, 1.6558e-01, 4.3737e-01, -2.9775e-01,\n"," -2.3784e-01, 6.7237e-02, 4.1720e-01, -1.8817e-02, -1.9251e-02,\n"," -2.0949e-01, 3.0568e-01, -3.4132e-02, -1.7074e-01, 1.6810e-01,\n"," -5.8184e-02, -3.1281e-01, -3.6829e-01, 8.4532e-03, 2.5947e-01,\n"," 6.2673e-02, 2.2722e-01, 1.1645e-01, -2.4414e-02, -3.4665e-02,\n"," -1.8847e-01, 3.9765e-02, 2.9171e-02, -3.2623e-03, 1.1180e-01,\n"," -5.6836e-02, 2.0797e-01, -4.9864e-02, -1.1917e-01, 3.3812e-02,\n"," 1.3588e-01, -1.5717e-01, -1.0549e+00, -9.6124e-03, -7.5894e-02,\n"," 9.4542e-02, -2.0816e-01, 9.9753e-02, 5.3985e-01, 9.6205e-02,\n"," -1.3531e-01, -6.2185e-01, 1.6573e-01, 1.9278e-01, 3.2458e-01,\n"," 1.0004e-02, -3.0498e-02, 1.6599e-02, 4.5458e-01, -1.4786e-01,\n"," 1.9692e-01, -5.7486e-01, -2.4346e-02, 1.8672e-01, -1.6404e-01,\n"," -2.0172e-01, -1.4482e-01, 3.5525e-01, -3.8628e-01, -3.3673e-01,\n"," -4.6435e-02, 2.1909e-01, -2.4703e-01, 1.1580e-01, -1.6115e-01,\n"," -5.4107e-02, 1.1493e-01, 2.2763e-01, -7.2004e-02, 3.1812e-02,\n"," -6.6241e-02, 3.9888e-01, 5.0630e-01, 9.1150e-01, -2.2802e-01,\n"," -4.2602e-01, 1.6570e-01, 4.1350e-02, 2.7551e-01, 3.9441e-01,\n"," 1.2207e-01, -1.9498e-01, -2.2060e-01, -5.0708e-01, -6.5489e-03,\n"," -1.1121e+00, 2.8840e-01, -3.7084e-02, -1.4813e-01, 5.2060e-02,\n"," 3.1444e-01, -1.6906e-01, -2.1473e-01, 2.1398e-01, -1.7993e-01,\n"," 1.3970e-01, 4.4840e-01, -7.9205e-02, -3.0698e-01, -3.2793e-01,\n"," 3.0821e-01, 2.7152e-01, -7.1823e-02, -5.3214e-02, 7.0550e-03,\n"," -7.4108e-02, -4.6005e-02, 4.7662e-02, 2.9812e-01, -3.5473e-02,\n"," 2.2245e-01, 1.3832e-01, -1.2874e-01, -5.4964e-02, 1.3706e-01,\n"," 2.0267e-01, -1.0338e-01, -3.0676e+00, -3.5641e-01, -4.2746e-04,\n"," -1.7078e-01, 1.4617e-01, 2.1336e-01, 2.5473e-01, -2.0221e-01,\n"," 1.5246e-01, -3.1861e-01, 2.0312e-01, -4.0504e-01, 3.3342e-01,\n"," -2.1787e-01, -2.1454e-01, -2.0649e-01]])\n","4: tensor([[ 1.4372e-01, 1.5270e-01, 2.4131e-01, -6.9664e-02, -2.7103e-01,\n"," -2.6695e-01, 4.0733e-01, -2.7378e-01, 6.6391e-02, -3.0194e-01,\n"," 4.4053e-03, -1.5541e-01, 9.2695e-02, 8.3998e-02, -1.7917e-01,\n"," 1.3287e-01, -1.4409e-01, -9.1436e-02, -1.4845e-01, 1.1903e-01,\n"," 5.1463e-01, 5.7692e-02, 1.5126e-01, 1.4677e-01, 3.7280e-01,\n"," 4.2800e-01, -1.7182e-01, 9.6115e-02, -2.8452e-02, 9.9515e-02,\n"," -2.0470e-01, -5.2359e-02, -8.3856e-02, 3.3243e-01, 2.3429e-02,\n"," -2.2720e-02, 1.4888e-02, -2.7297e-01, -4.0338e-01, -1.0384e-01,\n"," 6.4399e-02, 9.1103e-02, -2.4250e-01, -1.8606e-01, 2.4055e-01,\n"," -1.5063e-02, -5.0907e-01, 1.2987e-01, -6.6550e-02, -8.9741e-02,\n"," -3.4640e-01, 3.1522e-02, -7.9425e-02, 1.7753e-01, 2.5739e-01,\n"," -3.0462e-01, 1.1952e-01, 1.1062e-02, -1.2134e-01, -3.0400e-01,\n"," 8.2881e-02, -5.0133e-02, -1.0412e-02, 1.0648e-01, 2.1607e-01,\n"," 4.7565e-02, 2.2069e-01, 1.4181e-01, -3.8422e-01, 8.0345e-02,\n"," -2.4524e-01, -3.0903e-01, 4.7425e-01, 1.0033e-01, 3.4118e-01,\n"," 7.9084e-02, -1.5808e-01, 1.2627e-01, 3.8779e-01, 2.0160e-01,\n"," 1.9690e-01, 1.8946e-01, 1.5489e-01, 4.1661e-01, -4.4318e-03,\n"," 1.9998e-01, -5.9547e-02, -1.5596e-01, 1.2190e-02, 1.3798e-01,\n"," -2.3583e-01, -1.8739e-02, -2.5567e-01, 1.3427e-01, -8.0426e-02,\n"," -6.6841e-02, -2.9471e-01, -1.5822e-01, -1.3051e-01, -1.9806e-01,\n"," 3.7516e-02, -1.7406e-01, 1.6530e-02, 2.5659e-01, -2.6796e-01,\n"," -9.7912e-02, -3.7017e-03, 1.0449e-01, 8.9260e-02, -4.0290e-01,\n"," 2.7218e-01, 3.7272e-01, -5.4995e-02, -1.9700e-02, -1.1479e-01,\n"," 2.3100e-01, -1.2173e-01, -3.7194e-02, -1.9398e-01, 3.5643e-01,\n"," -5.5972e-01, 8.8335e-02, 2.1672e-01, 5.5382e-01, 1.1712e-03,\n"," -1.4783e-01, -2.0519e-02, -4.7057e-01, -7.1452e-02, -2.6446e-01,\n"," 7.6747e-02, 1.6924e-01, 3.7852e-01, 1.9631e-01, -2.5788e-01,\n"," -2.5394e-01, -2.4489e-01, -3.8283e-02, -3.2391e-01, -4.1164e-02,\n"," 1.0634e-01, 5.4478e-01, -5.9433e-01, -5.0297e-01, -3.5427e-02,\n"," 1.3844e-01, 2.2371e-01, 1.1574e-01, 2.4554e-02, -8.7020e-02,\n"," 3.4395e-01, 5.4346e-01, -4.1168e-01, -3.4535e-01, -3.2702e-01,\n"," -2.5674e-01, -1.9822e-01, 2.5351e-01, 3.8015e-01, 3.9158e-01,\n"," 1.3642e-01, 3.1505e-01, 1.4043e-01, 3.0917e-01, -4.1089e-02,\n"," 3.0802e-01, -2.2278e-01, 3.5619e-01, 2.0303e-01, 7.3016e-02,\n"," -3.9539e-01, -2.1420e-01, 3.0128e-01, -3.8688e-02, -6.1343e-02,\n"," 6.2798e-02, 2.1389e-01, 1.1895e-01, -7.3058e-02, -2.0887e-01,\n"," -2.8593e+00, 1.5200e-01, -1.2269e-01, -3.6294e-01, -2.6253e-01,\n"," 4.0353e-02, 2.9283e-01, -3.5778e-01, 3.5347e-01, -1.4389e-01,\n"," -1.6263e-04, -2.7828e-01, 1.7744e-01, 1.4140e-01, 1.8693e-01,\n"," -2.7943e-02, 2.8972e-01, -4.7199e-01, -2.6189e-01, -3.5464e-01,\n"," 1.5152e-01, -8.1428e-02, 2.7012e-01, 3.1351e-01, -1.0495e-01,\n"," 7.9566e-01, -5.2903e-02, -2.7874e-01, 1.9338e-01, 2.1632e-01,\n"," -5.7724e-01, 7.8377e-01, -1.2557e-01, -2.7503e-01, 3.2554e-01,\n"," -2.1037e-01, 4.7496e-02, -3.6205e-01, -3.6639e-01, -1.2666e-01,\n"," -2.1995e-01, -6.7785e-02, -2.4990e-01, 1.4138e-01, -2.7664e-01,\n"," -2.4580e-01, 1.3256e-01, -5.4900e-02, 1.6740e-01, 5.6780e-02,\n"," -3.2045e-01, 1.6966e-02, 1.3040e-01, 3.1142e-01, -1.3090e-01,\n"," 4.3494e-01, -1.8191e-01, -1.7604e-01, -5.9547e-02, -3.4092e-01,\n"," 6.0141e-03, -1.5916e-01, 2.3811e-01, -1.7939e-01, -1.8349e-01,\n"," 1.4626e-01, 2.3311e-01, 9.6461e-02, 3.4547e-01, -7.1923e-02,\n"," 1.8251e-01, -3.5212e-01, 3.2447e-02, -4.7872e-01, 3.3935e-01,\n"," -7.8379e-03, 9.0899e-03, -4.3629e-01, 4.5317e-01, 4.1650e-01,\n"," 1.0903e-01, 2.9608e-01, 1.6155e-01, -3.5664e-01, 1.6223e-02,\n"," 5.9893e-01, -1.8970e-02, -1.2956e-01, 7.3759e-02, -7.6905e-02,\n"," -1.0431e-01, -8.9264e-02, -4.2304e-01, -1.1640e+00, -1.7691e-01,\n"," -2.9004e-01, 4.0281e-01, 1.0454e-02, 2.7135e-01, 1.2593e-01,\n"," -4.6309e-01, 1.2270e-01, -3.3244e-01, 3.9066e-01, 1.8301e-01,\n"," -1.1770e-01, -1.7132e-01, -7.5280e-01, 2.7139e-01, -4.6680e-01,\n"," -2.8977e-01, -1.4555e-02, 9.0127e-02, 2.0269e-01, 3.1661e-01,\n"," -2.1811e-02, 1.1659e-01, 2.0175e-02, -3.2395e-02, -2.0124e-01,\n"," -2.1129e-01, -9.7173e-02, -2.7186e-01, -2.1003e-01, 2.3338e-02,\n"," 4.2872e-01, -1.6456e-01, 8.9981e-02, -1.7337e+00, -1.4008e-01,\n"," -3.7648e-01, -1.0341e-01, 2.6074e-01, 1.3659e-01, 2.2192e-01,\n"," -2.6240e-01, 3.3346e-01, -2.3083e-01, 2.9134e-01, -3.6770e-01,\n"," 1.0487e-01, 2.7166e-01, 4.4728e-02, 1.4574e-01, -2.2325e-01,\n"," -3.0678e-01, -4.3902e-01, 4.4862e-01, 1.5819e-02, 8.6182e-02,\n"," 5.3351e-01, -5.1060e-01, 2.0627e-01, 5.4929e-02, -3.9416e-01,\n"," 1.3248e-01, -2.9274e-01, 1.7961e-02, -3.6921e-01, -3.2246e-01,\n"," -2.3955e-01, 9.9404e-02, 2.7190e-02, 1.7161e-01, 1.0348e-01,\n"," 3.9195e-02, 5.7434e-01, 3.4783e-02, -1.4916e-01, 4.4018e-01,\n"," 9.1932e-02, 7.1614e-02, 1.1798e-01, -7.3978e-02, 4.5681e-02,\n"," -4.8140e-01, -1.5101e-01, 5.0167e-01, 1.1644e-01, 1.6681e-02,\n"," 1.8352e-01, -8.9203e-02, 1.3965e-01, -2.0261e-02, 6.8722e-02,\n"," 1.8553e-02, -9.7045e-02, -1.2551e-01, 1.5984e-01, -2.2538e-01,\n"," -1.7417e-01, 2.8017e-01, -6.5093e-01, -2.9347e-02, -1.7431e-01,\n"," 3.3796e-02, -1.8713e-01, -3.0813e-01, -2.4399e-01, 1.2527e-01,\n"," 3.8974e-02, -5.4384e-01, -4.2647e-01, 4.7544e-01, -4.0015e-01,\n"," -1.6522e-03, 5.4680e-01, 6.7714e-02, 4.5438e-02, -1.6284e-01,\n"," 2.5780e-02, 2.1296e-01, -7.3802e-02, 3.3140e-01, -3.1106e-02,\n"," -3.0973e-01, -3.1110e-02, -1.4657e-01, -7.4335e-02, 1.6210e-01,\n"," 4.7235e-01, -2.3911e-01, 1.5029e-01, 6.3438e-02, 2.3911e-01,\n"," -5.2692e-01, -5.1959e-02, 1.5054e-02, -5.3314e-01, -1.9149e-01,\n"," -1.6915e-01, 4.2658e-02, 1.9504e-01, -6.4390e-02, -4.8487e-01,\n"," 9.0178e-02, -7.2234e-01, -1.5735e-01, 5.6376e-01, -1.6120e-01,\n"," 2.9277e-01, -2.9321e-01, 4.8295e-01, -2.9926e-01, -4.1523e-02,\n"," 2.0738e-01, -1.4459e-01, -6.0407e-02, -2.0851e-01, 4.4935e-01,\n"," -1.2059e-01, 1.7210e-01, -5.3204e-01, -1.1049e-01, 8.4436e-02,\n"," -5.7446e-01, -1.3228e-01, 2.3758e-01, 1.3626e-01, 6.0630e-02,\n"," 1.6782e-01, -6.3781e-01, -3.0147e-01, 1.4648e-01, 4.2604e-01,\n"," 2.8237e-01, 9.3413e-02, 1.6418e-01, 3.6882e-01, 3.2992e-01,\n"," -5.9550e-01, 4.4938e-01, 5.1788e-01, 3.1840e-01, 3.8932e-02,\n"," 2.7064e-01, 3.9544e-01, 6.2783e-01, 4.6835e-02, -3.2905e-01,\n"," 6.3368e-01, -1.6904e-01, -2.3818e-01, -7.1211e-01, 9.6838e-02,\n"," 7.5521e-02, -2.6024e-01, 1.9698e-01, 1.2197e-01, 3.2710e-01,\n"," -3.5774e-02, -3.7642e-02, 2.5839e-02, 6.8550e-02, 1.8612e-01,\n"," -1.1212e-01, -2.3011e-01, 1.7205e-01, 6.2175e-01, 1.8936e-02,\n"," 1.4899e-01, 6.2772e-02, -9.7795e-02, 2.5738e-01, -1.1773e-01,\n"," -7.1959e-01, 4.2380e-02, 2.3227e-01, 2.8429e-01, -3.0644e-01,\n"," -1.9423e-01, 3.7913e-02, 6.3272e-01, 4.9849e-02, -2.4351e-01,\n"," -5.1694e-01, 3.6001e-01, 2.4383e-01, 2.5070e-01, -9.5266e-02,\n"," -2.8270e-01, -2.9825e-01, -1.5233e-01, 3.4698e-01, 3.5703e-01,\n"," 2.5354e-01, 2.1177e-01, 3.4404e-01, 4.3510e-01, -3.7868e-01,\n"," -9.5449e-02, 9.2271e-02, -6.6478e-02, -2.2979e-01, 3.9944e-01,\n"," 2.0427e-01, -3.1439e-01, -1.5797e-01, 2.4403e-01, -3.0117e-01,\n"," -3.0974e-01, -3.0128e-01, 1.7407e-01, 7.2640e-02, 6.4492e-02,\n"," 2.2294e-01, 2.8952e-01, -3.5746e-01, -3.1911e-01, -3.2260e-01,\n"," 1.5105e-01, -2.4025e-02, 1.6121e-01, -2.9198e-01, -5.6600e-01,\n"," -1.6964e-01, -3.3997e-01, -3.0448e-02, -3.9984e-02, 1.3506e-02,\n"," -1.7384e-01, 1.3862e-01, 6.4986e-01, 1.4770e-01, -2.2942e-01,\n"," 3.5280e-01, 7.1653e-02, 1.2103e-01, -2.4789e-01, -2.4513e-01,\n"," -3.9133e-01, -3.2352e-01, 5.2393e-01, -1.3351e-02, 2.1927e-03,\n"," 4.6060e-01, 1.4228e-01, -8.0325e-02, -2.0315e-02, -2.2391e-01,\n"," 1.5622e-01, -2.1808e-01, 5.1600e-02, -5.7983e-01, -3.5552e-01,\n"," -2.0607e-01, 1.4304e-01, -4.4083e-03, -4.0058e-01, -1.0976e-01,\n"," 4.2306e-01, -2.0331e-01, 2.8215e-01, -4.0330e-02, 1.6139e-01,\n"," 5.4913e-02, 3.3148e-01, 1.9069e-01, -3.2080e-01, 3.3625e-01,\n"," -3.0794e-01, 2.8779e-01, -3.0068e-01, 1.8774e-01, 2.8195e-01,\n"," -3.7653e-01, -5.7248e-01, -1.0889e-02, 7.1206e-01, 7.6275e-01,\n"," -2.6702e-01, -1.8121e-01, 1.7497e-01, -1.1442e-01, -2.1099e-01,\n"," 2.9193e-01, -8.7390e-02, -4.8446e-01, 4.2704e-01, -7.2526e-02,\n"," 3.1291e-02, 6.9506e-02, -2.4147e-01, 3.5460e-01, -2.5448e-01,\n"," 2.0231e-01, -4.0086e-01, 2.6643e-01, -1.0278e-01, 2.8087e-01,\n"," 1.1017e-01, -1.7966e-01, 2.6155e-01, 3.6210e-01, 3.9786e-01,\n"," -2.3134e-01, -1.7778e-03, 2.2224e-02, -6.6807e-01, 2.6089e-01,\n"," 3.2568e-01, 1.7133e-01, -5.3540e-01, -1.8078e-01, -4.6256e-02,\n"," -5.2264e-01, -5.9329e-02, 2.7114e-01, 1.1751e-01, 2.0666e-01,\n"," 2.6315e-01, 2.6359e-01, 4.4779e-01, 2.1477e-01, -1.8758e-01,\n"," -1.3193e-01, -1.7501e-01, -7.9440e-02, 2.8076e-01, 3.3405e-01,\n"," -1.5889e-03, 6.0187e-01, 1.4751e-01, -2.0325e-01, 9.4740e-02,\n"," 3.4586e-01, 1.5664e-01, -2.3871e-01, -6.5295e-02, 1.9541e-01,\n"," -1.0110e-01, 1.5283e-01, -1.7032e-01, 2.8351e-01, -9.4824e-02,\n"," -1.8327e-01, 5.1742e-02, 2.6521e-01, 4.0890e-02, 1.1414e-01,\n"," -3.1734e-01, 1.2534e-01, -2.3619e-01, 1.1304e-01, 2.7675e-01,\n"," 4.6104e-01, 9.8682e-02, -1.0959e+00, 2.1249e-01, 7.3772e-03,\n"," -4.4492e-01, 7.2612e-03, 9.3805e-02, 1.9840e-01, -1.0369e-01,\n"," -1.1757e-01, -3.3771e-01, -1.2509e-01, 1.4479e-01, 2.8065e-01,\n"," -3.1347e-01, -2.4807e-01, 1.3657e-01, 3.2290e-01, -2.1317e-01,\n"," 6.3424e-02, -2.9640e-01, -3.1879e-03, 3.2992e-01, -1.5354e-01,\n"," -1.7102e-01, -3.6804e-01, 1.7604e-01, -4.8442e-01, 5.0297e-02,\n"," -1.7808e-01, 7.6641e-02, -2.4768e-01, 2.8126e-01, -3.2884e-01,\n"," -1.6447e-01, -2.7347e-01, 1.1488e-01, 1.7271e-01, 1.4660e-01,\n"," 3.1749e-01, 5.0220e-01, 1.1037e-01, 3.1862e-01, 2.1792e-01,\n"," -1.4004e-01, -6.5812e-02, -1.9215e-01, 1.0413e-01, 2.1987e-01,\n"," -4.9967e-02, 1.4654e-01, -2.8810e-01, -2.2794e-01, -2.7359e-02,\n"," -1.0722e+00, -1.0813e-01, -2.8119e-02, -1.4672e-01, -2.6597e-01,\n"," -9.0721e-02, -1.2326e-01, -2.5824e-01, -1.7012e-01, 3.5358e-02,\n"," 1.0803e-01, 1.9695e-01, -2.1638e-01, -2.3235e-01, -5.4397e-01,\n"," 1.4322e-01, -2.9160e-01, -1.2255e-01, 4.9158e-01, 2.4520e-03,\n"," -2.4856e-01, 2.7421e-01, 2.3838e-01, -4.5021e-01, -2.0612e-01,\n"," -4.4548e-01, -2.8623e-01, -8.6317e-02, -2.5045e-01, 2.6330e-01,\n"," 1.9341e-01, -2.7378e-01, -2.7250e+00, 3.0125e-01, 2.3864e-01,\n"," -9.3118e-02, 1.5710e-01, 1.8249e-01, 4.2867e-01, 2.4945e-01,\n"," -3.4481e-01, -1.9328e-01, 1.1227e-01, -4.7625e-01, 3.4681e-01,\n"," 1.9968e-01, 1.3859e-01, 1.1096e-01]])\n","-: tensor([[ 3.0881e-01, -1.8431e-02, -1.8723e-01, -2.0743e-03, -1.3487e-01,\n"," -4.9475e-02, 4.3985e-01, -2.4973e-02, 3.3502e-01, -5.3035e-01,\n"," 9.7750e-03, 1.0307e-02, 7.6879e-03, 8.0298e-02, -8.7145e-02,\n"," -7.2275e-02, 6.6143e-02, 3.4019e-01, 4.5770e-01, 2.2446e-02,\n"," 4.7634e-01, -5.2016e-02, 2.2895e-01, 8.1737e-02, 1.2476e-01,\n"," 1.3980e-01, -2.7710e-01, 1.5021e-01, -2.4448e-01, 2.0755e-02,\n"," 1.9976e-01, 1.7917e-01, -6.6373e-01, 6.0277e-01, 1.3352e-01,\n"," -1.8082e-01, 3.3583e-01, -1.9302e-01, 2.4297e-01, -3.6388e-02,\n"," -1.5318e-02, -5.9102e-02, -2.6925e-01, -1.7659e-01, 1.9031e-04,\n"," -9.3259e-02, -5.6735e-01, -8.8457e-02, -4.9099e-01, -1.9993e-01,\n"," -3.3283e-01, 7.9535e-02, 4.8903e-01, 5.6563e-01, -1.7308e-01,\n"," 3.6665e-01, 1.1027e-01, 1.4337e-01, -2.3334e-01, 2.7151e-01,\n"," 1.0027e-01, 4.7967e-01, 1.4626e-01, 1.1997e-01, 4.6199e-01,\n"," 2.8828e-01, 1.3253e-01, 1.3095e-01, -3.0922e-01, 3.8638e-01,\n"," -4.4984e-01, -5.7286e-01, 1.7795e-01, 2.0185e-01, 3.7316e-01,\n"," -5.2352e-02, -1.0832e-01, 3.8096e-01, 1.5721e-01, -4.3419e-02,\n"," 4.4761e-01, 2.1262e-01, 1.6401e-01, 1.8217e-01, 4.2928e-01,\n"," 2.4439e-01, -2.3081e-01, -1.4977e-01, 1.4295e-01, -2.2510e-01,\n"," -2.3632e-01, -3.0739e-01, -1.4158e-02, 2.4953e-01, -2.8313e-02,\n"," -2.1629e-01, -2.6970e-01, -9.2278e-02, 2.3390e-01, -2.5901e-01,\n"," 3.8780e-03, -5.0197e-01, -1.7892e-01, 2.4655e-02, -2.4109e-01,\n"," -2.8732e-01, -1.4773e-01, -3.2083e-01, 1.5485e-01, -9.4527e-01,\n"," 4.2784e-02, 4.0101e-01, 6.4385e-02, 1.7875e-02, -8.8187e-02,\n"," 5.1481e-01, 1.6086e-01, 1.7307e-01, 3.2073e-01, 5.0476e-01,\n"," -3.2060e-01, 2.0526e-01, -6.6513e-02, 4.9185e-01, -6.0099e-02,\n"," 3.6929e-01, 2.5052e-01, -3.3704e-01, -2.3335e-01, -1.9160e-01,\n"," 6.3221e-02, 5.3876e-01, 1.6195e-01, -1.3072e-01, -2.6529e-01,\n"," -1.3970e-01, 3.2382e-01, 1.7605e-01, -5.3121e-01, -3.8378e-02,\n"," -1.2451e-01, 2.2193e-01, -1.1207e+00, -2.9178e-01, 6.1909e-01,\n"," 1.5995e-01, 1.6181e-01, -7.2952e-02, 5.7034e-01, -2.4582e-01,\n"," 3.1664e-01, 4.4156e-02, -1.0142e-01, 1.0701e-01, -7.7426e-01,\n"," -2.5601e-01, 1.7534e-01, 8.7972e-02, 3.8632e-01, 5.7367e-01,\n"," 5.3502e-01, 2.6261e-01, 5.5710e-01, 3.6893e-01, -2.7515e-02,\n"," 9.8149e-02, -2.2874e-01, 5.5041e-01, 3.5332e-01, 1.3156e-02,\n"," -2.6012e-01, -5.5077e-01, 2.3858e-01, 4.2729e-02, -2.9299e-01,\n"," 4.3785e-01, 1.9440e-01, 1.5930e-01, -3.4150e-02, -4.4707e-01,\n"," -2.7735e+00, -4.8181e-02, 4.7313e-02, -2.4471e-01, 1.8528e-01,\n"," -3.8483e-01, 4.5337e-02, -1.6357e-01, -6.2813e-02, 1.6328e-01,\n"," -1.3078e-01, -1.1456e-01, -2.1758e-01, 5.4536e-01, 4.1796e-01,\n"," -1.3591e-01, -2.7533e-03, -5.1131e-01, 4.3371e-02, -9.0913e-02,\n"," 4.3065e-02, -1.3867e-01, 4.7574e-02, 2.2984e-01, -2.0350e-01,\n"," 1.4777e+00, -2.0221e-01, -4.3939e-01, 2.7812e-01, -5.4217e-02,\n"," -6.2090e-01, 4.4726e-01, 4.4734e-01, -5.7979e-01, 6.4312e-02,\n"," -2.8265e-01, 3.6540e-01, -3.0646e-01, -3.3377e-02, -8.3762e-02,\n"," -1.4298e-01, -7.7321e-03, 1.4864e-02, 2.1774e-01, -3.4082e-01,\n"," -3.6549e-01, 1.8373e-01, 3.5785e-01, 1.2703e-02, -5.8164e-01,\n"," -2.1877e-01, -3.5604e-01, 1.2492e-01, 2.8744e-01, -2.1492e-02,\n"," 2.1360e-02, 9.0692e-02, -1.5428e-01, -6.4428e-02, -2.3393e-01,\n"," 5.7481e-02, 1.6744e-01, 1.2930e-01, 3.6524e-01, -4.5820e-01,\n"," 1.4073e-01, 2.9452e-01, -1.0094e-01, 2.0882e-01, -7.8174e-02,\n"," -9.5813e-02, -4.7136e-01, -4.6895e-02, -3.4572e-01, 4.3377e-02,\n"," 1.9695e-01, 7.0562e-02, -1.6429e-01, -3.7607e-03, 5.2542e-02,\n"," -1.5150e-01, 1.4484e-01, 2.7844e-01, -8.1543e-02, -3.7834e-01,\n"," 6.4515e-02, 2.0875e-01, -4.1447e-01, -8.8050e-02, 1.3991e-01,\n"," -1.0528e-01, -6.1875e-02, -1.0741e-01, -1.2388e+00, -2.4134e-01,\n"," -3.2277e-01, 5.5973e-01, 1.3245e-01, -7.8635e-02, 8.3189e-02,\n"," 1.3357e-01, 3.9026e-01, -1.7185e-01, 2.5468e-01, 2.5899e-01,\n"," -4.3948e-01, -2.6243e-01, -5.3407e-01, 3.3849e-01, -3.3655e-01,\n"," -2.7990e-01, 1.0349e-02, 1.4530e-01, 3.3844e-01, 1.2926e-02,\n"," -7.0957e-02, 4.4945e-01, -8.6354e-02, -3.8247e-01, 3.0030e-02,\n"," 5.8441e-02, -2.7072e-01, -4.0820e-01, -3.4088e-01, -1.1057e-01,\n"," 6.9281e-02, 1.4232e-03, 1.1199e-01, -2.1359e+00, -8.4013e-02,\n"," 8.6298e-04, -3.1330e-01, -3.1417e-02, -2.0773e-02, 2.5969e-01,\n"," -3.1765e-01, -2.6191e-01, -1.9374e-02, 1.2681e-01, -2.3586e-01,\n"," 2.0729e-01, 2.3280e-01, 5.4218e-01, 3.1464e-01, -4.1739e-02,\n"," 8.8836e-02, -1.8059e-01, 3.1180e-02, -8.8437e-02, 1.0642e-01,\n"," 3.9288e-01, -4.4002e-01, 2.9638e-01, 1.4696e-01, -2.6648e-01,\n"," 1.6345e-01, -5.8564e-01, -7.9970e-02, -8.0786e-02, -4.2115e-01,\n"," -1.5261e-01, 2.6673e-01, -1.3879e-01, 4.9749e-02, 1.1419e-01,\n"," -7.3311e-02, 7.7170e-01, 2.4730e-01, -1.0701e-01, 3.0918e-01,\n"," 1.3267e-01, 2.3126e-01, -1.1653e-01, 4.7934e-01, -2.0004e-01,\n"," -3.1478e-01, -3.5324e-02, -1.8873e-01, 2.5209e-02, -3.1662e-01,\n"," 3.2575e-01, -1.3358e-01, -2.0875e-01, -2.9592e-01, 3.2679e-01,\n"," 4.0825e-02, -3.1138e-01, -9.7667e-02, 1.0777e-01, -4.7067e-01,\n"," 2.9488e-02, 7.3456e-03, -4.5606e-01, 3.5407e-01, -9.8753e-02,\n"," 6.3368e-02, -2.6063e-01, -1.9631e-01, 2.1140e-02, 7.4937e-02,\n"," 8.9374e-03, -7.3127e-01, -3.0261e-01, 6.6611e-02, -6.7388e-01,\n"," -4.2806e-02, 5.9818e-01, -4.7534e-01, -5.8988e-02, -1.9419e-02,\n"," -2.3791e-01, 3.2545e-01, 1.6591e-01, 4.6574e-02, 5.5026e-01,\n"," -1.8627e-01, -2.0221e-01, -1.6992e-01, 2.9499e-01, 1.5326e-01,\n"," 1.9270e-02, 2.4621e-01, 1.2050e-02, 5.2469e-01, 1.6512e-01,\n"," -2.6104e-01, 9.2767e-02, 2.3603e-02, -8.8077e-02, -8.8156e-02,\n"," -2.4892e-02, 1.3019e-01, 1.1889e-02, 1.4010e-01, -7.1788e-01,\n"," 2.6608e-02, 3.6268e-02, -2.5298e-01, 4.2888e-01, -6.4859e-01,\n"," 9.8205e-02, 3.5307e-01, 5.0112e-01, -5.4153e-01, -6.6189e-02,\n"," 2.6129e-02, -1.9491e-02, 2.7243e-01, 3.3352e-01, -3.1321e-02,\n"," 9.8212e-02, -1.8364e-01, 2.1382e-01, 2.3711e-01, 2.1198e-01,\n"," -5.1263e-01, -5.3737e-01, -9.8104e-02, 2.9240e-01, 2.7386e-01,\n"," 1.4614e-01, -7.6071e-01, -1.2294e-01, 5.5545e-02, -9.1988e-02,\n"," 6.0640e-01, 4.6874e-02, 5.5175e-01, 5.5830e-01, 1.6957e-01,\n"," -4.5140e-01, 2.2449e-01, 2.0423e-01, 3.0442e-01, -9.1970e-02,\n"," 2.0044e-01, 1.2277e-01, 4.3866e-01, 1.4634e-01, -3.8025e-01,\n"," 5.7555e-01, -3.6780e-01, -1.4852e-01, -2.9311e-01, 1.8530e-01,\n"," -5.4767e-01, -3.1603e-01, 1.8686e-01, -2.4414e-01, 1.8953e-01,\n"," -2.6241e-01, 3.2963e-01, 2.1885e-01, -8.2357e-03, 2.0849e-01,\n"," -7.0639e-02, -6.1242e-01, 2.8057e-01, 3.2841e-01, 1.5015e-02,\n"," -1.9113e-01, 6.3136e-02, 2.5264e-01, -7.0410e-02, -6.2326e-03,\n"," -2.8874e-02, 3.9968e-01, 1.1005e-01, -1.2466e-01, -3.1321e-01,\n"," -8.1496e-02, 7.1259e-02, 2.4384e-01, -9.1276e-03, -2.9388e-01,\n"," -3.9513e-01, 1.2320e-01, 4.2580e-01, 2.7149e-01, -3.8357e-01,\n"," -3.9259e-02, -1.9710e-01, -4.7921e-01, -4.0352e-01, 3.6449e-01,\n"," 2.6308e-02, 7.3807e-02, 2.1356e-01, 3.0931e-01, 1.1677e-01,\n"," -4.9016e-02, 1.3813e-01, 1.4178e-01, -1.2268e-01, 2.5576e-01,\n"," 5.1400e-01, -2.7425e-01, -1.8913e-01, 1.0101e-01, -3.3838e-01,\n"," -2.9238e-01, -5.5262e-01, 2.8674e-01, 1.9634e-01, 3.7120e-01,\n"," 1.7354e-01, 4.2791e-01, -1.9697e-01, -5.9127e-01, -3.6937e-01,\n"," -1.9443e-01, 1.0262e-01, -1.0176e-01, 3.5622e-04, -3.4120e-01,\n"," -5.4022e-01, -5.0228e-01, -4.1879e-01, -3.9689e-01, 3.2575e-01,\n"," 1.2836e-01, -5.1145e-03, 3.3047e-01, -1.4841e-01, -2.8841e-01,\n"," 1.3860e-01, -9.3225e-02, 2.4217e-01, 2.2177e-01, -3.9906e-01,\n"," -2.3422e-01, 8.1297e-02, -3.4354e-01, 6.7377e-03, 1.5799e-01,\n"," 2.7954e-01, 2.0366e-01, 2.4677e-01, -3.1699e-01, -3.6054e-01,\n"," 1.9518e-01, -3.6106e-01, -5.6279e-02, -4.3485e-01, 1.5595e-01,\n"," 7.3996e-02, 9.8081e-02, -2.1425e-01, -1.8261e-01, 5.6920e-02,\n"," 8.2795e-02, -2.0711e-01, 6.9064e-02, 2.2939e-01, -9.3168e-02,\n"," 5.8189e-01, 1.2109e-01, 5.0591e-01, -1.0948e-01, 1.5804e-01,\n"," -4.2084e-01, 1.6607e-01, -4.0567e-01, -1.8787e-01, 1.5802e-01,\n"," -2.8762e-01, -6.4372e-01, -1.6994e-01, 1.0742e+00, 6.0754e-01,\n"," -1.2398e-01, -2.3258e-01, 3.9317e-02, -2.6285e-01, -5.0107e-01,\n"," 2.4170e-02, 2.4937e-01, -3.6157e-01, 4.4621e-01, 2.4640e-01,\n"," 3.4309e-02, 3.4567e-01, -3.8077e-01, 1.1273e-01, -4.5777e-01,\n"," -2.5005e-01, -6.6396e-01, 2.6011e-01, -2.9320e-01, 6.6699e-01,\n"," -3.6833e-01, 3.3794e-02, -2.0355e-02, 1.6663e-01, 6.6694e-01,\n"," 1.4021e-01, 2.5975e-01, -1.7444e-02, -1.3142e-01, 4.3643e-01,\n"," 8.4788e-02, 5.8767e-01, -5.7058e-01, -1.0283e-01, -3.5304e-01,\n"," -6.6969e-01, -1.0152e-01, 1.7763e-01, -1.7186e-01, -1.1462e-01,\n"," 2.6949e-01, 3.6778e-01, 1.3292e-01, 1.1114e-01, -4.2783e-01,\n"," -2.0638e-01, -3.5399e-01, 1.0316e-01, 1.4024e-01, 2.1931e-01,\n"," -2.3168e-01, 3.1902e-01, -2.9244e-01, -3.7271e-01, 2.4853e-01,\n"," 3.2360e-01, -1.9291e-01, -9.9434e-02, 5.9524e-02, 9.4724e-02,\n"," 6.3932e-02, 2.7129e-01, 1.3411e-01, -1.4441e-01, -3.0720e-01,\n"," 2.4942e-01, -4.9376e-02, -3.9676e-01, 1.1472e-01, 9.4558e-02,\n"," 2.7924e-02, -1.6204e-01, -3.5314e-01, -6.3576e-01, -5.6745e-02,\n"," 1.4032e-01, -2.2482e-01, -1.1739e+00, 1.5968e-01, -8.4789e-02,\n"," -2.2310e-01, -5.3320e-02, 4.9960e-01, 3.4601e-01, -1.2102e-01,\n"," -1.8815e-01, -3.2725e-01, -7.0873e-02, 4.8036e-02, 2.0354e-01,\n"," -4.2389e-02, -2.6361e-02, 1.3182e-01, 6.5451e-02, 1.4911e-01,\n"," 2.8582e-01, -2.6823e-01, 3.5206e-01, 2.7665e-01, -5.7584e-02,\n"," -7.3346e-02, 2.0605e-01, 2.2716e-01, -8.2873e-01, -3.0445e-01,\n"," -1.5500e-01, 4.6156e-01, -1.0073e-01, 7.4445e-01, -9.4295e-02,\n"," 6.6074e-02, -1.8402e-01, -2.9506e-01, 8.1425e-02, 1.7701e-01,\n"," 1.8796e-01, 6.5443e-01, -3.3569e-02, 6.6976e-01, 1.3333e-01,\n"," -3.0119e-01, -1.0617e-01, -3.3671e-01, 9.6051e-02, -1.4850e-03,\n"," 4.6755e-01, -3.3934e-01, -7.8341e-01, -9.4983e-02, 2.5593e-01,\n"," -1.1166e+00, 7.7357e-01, -8.6369e-02, -2.5843e-01, -8.2375e-02,\n"," 1.6280e-01, -7.4769e-02, -6.1348e-01, 2.4280e-01, -6.2289e-01,\n"," 1.1129e-01, 3.0934e-03, -2.9315e-01, -2.6015e-01, -6.4838e-01,\n"," 5.8760e-01, 5.5165e-01, -2.2982e-01, 2.3379e-01, 2.2738e-01,\n"," 3.2897e-01, -2.1704e-01, 5.6538e-01, -3.5777e-01, 3.5868e-01,\n"," -1.4628e-02, 1.7756e-01, -5.6393e-01, -5.4452e-01, -1.5569e-01,\n"," 4.0129e-01, 9.1878e-02, -2.9235e+00, -2.1916e-01, 1.0543e-01,\n"," -2.9480e-01, 1.7017e-01, 2.5921e-02, 5.4776e-01, -6.6025e-02,\n"," 2.8036e-01, -1.1701e-01, 1.3038e-01, -3.1908e-01, -5.0228e-02,\n"," 1.7403e-01, 1.4155e-01, 3.2593e-01]])\n","diamond: tensor([[ 1.6155e-01, -6.2006e-02, -1.5000e-01, 3.0886e-02, 1.6777e-02,\n"," -2.9198e-02, 2.4209e-01, -2.3379e-01, 5.3452e-02, -2.3526e-01,\n"," -9.6149e-02, 8.7985e-02, 1.4318e-01, 1.3678e-01, -3.0620e-01,\n"," -4.6677e-02, -2.7219e-02, -9.1051e-03, 9.1234e-03, 8.8166e-02,\n"," 7.7598e-02, -8.4719e-02, 1.3375e-01, 1.0031e-01, 2.5690e-02,\n"," 2.3563e-01, -2.2137e-01, -1.0481e-01, -2.0500e-01, 1.6794e-01,\n"," 1.2290e-02, -1.3172e-01, 1.1735e-01, 3.1996e-01, -2.7419e-02,\n"," -1.3261e-01, 6.9343e-02, -1.0631e-01, -2.8758e-01, -1.3452e-01,\n"," 1.1645e-02, -3.6717e-02, 1.4572e-01, -1.0871e-01, 6.7752e-02,\n"," -9.1051e-02, -3.3869e-01, -3.1073e-02, -9.0417e-02, 1.2567e-01,\n"," -1.9024e-01, 1.4569e-01, -3.8745e-02, 3.3764e-01, 1.4760e-01,\n"," 5.0370e-02, 2.2879e-01, 3.2504e-02, -1.1580e-01, -6.6777e-02,\n"," 6.6056e-02, 9.7825e-03, -1.0010e-01, 1.4854e-01, 1.8629e-01,\n"," 5.8362e-02, 4.1709e-01, -1.5232e-01, -3.4063e-01, 4.8405e-02,\n"," 5.8052e-02, -2.6320e-01, 2.3537e-01, -1.3473e-01, 2.1460e-01,\n"," 3.5210e-02, -2.0203e-01, 3.0658e-01, 3.5111e-01, 1.2765e-01,\n"," 2.8540e-01, 1.7238e-01, 1.7749e-01, 2.5244e-01, -9.8107e-02,\n"," 1.9131e-01, -1.3892e-01, -1.8316e-02, -1.0845e-01, 2.1901e-01,\n"," 3.6928e-03, -1.6223e-02, 1.5503e-01, 2.1549e-01, 7.7459e-02,\n"," -9.1703e-02, -2.7171e-01, 5.0915e-02, 1.4361e-01, -2.5122e-01,\n"," -1.3461e-01, -1.9500e-01, -1.8262e-01, 3.6907e-01, -3.1924e-01,\n"," 7.2243e-02, -7.5017e-02, 7.7848e-02, 1.1544e-01, -8.8202e-01,\n"," 3.9885e-01, -6.2385e-02, -3.1391e-02, -1.4352e-01, -1.0472e-01,\n"," 1.3788e-01, 1.5366e-01, 1.4801e-01, 1.8624e-01, 1.9576e-01,\n"," -2.3702e-01, -2.1261e-01, 1.3194e-01, 6.4998e-01, 1.4062e-02,\n"," 1.1957e-01, -1.9939e-01, -3.5114e-01, -2.7650e-03, -3.8376e-01,\n"," -3.1642e-01, 1.6965e-01, 2.1439e-01, 4.3543e-01, -5.8537e-02,\n"," -6.8431e-02, -3.3889e-02, -6.3625e-02, -1.1258e-01, -8.5980e-02,\n"," -4.1167e-02, 4.5939e-01, -1.0091e+00, -4.0449e-01, 1.7593e-01,\n"," -4.1240e-02, 3.0619e-01, 2.4596e-02, 2.8869e-01, -6.0369e-02,\n"," 2.7029e-01, 3.2369e-01, -3.7970e-01, -2.2171e-01, -2.3308e-01,\n"," -2.1764e-01, 4.3358e-02, -1.3117e-02, 2.5879e-01, 4.1362e-01,\n"," 2.9991e-01, 3.5048e-01, 3.0319e-02, 1.8770e-01, -1.5346e-01,\n"," -1.4167e-02, -2.4166e-01, 5.1696e-01, 1.0828e-01, 2.1880e-02,\n"," -3.7915e-01, -3.3599e-01, 2.0425e-01, 3.7316e-01, -2.1216e-01,\n"," 1.6552e-01, 1.7652e-01, 1.6671e-01, -1.9825e-01, -2.5311e-01,\n"," -2.7847e+00, -9.9542e-02, -1.0271e-01, -9.0576e-02, 3.3444e-01,\n"," -6.9123e-02, 1.7905e-01, -2.1512e-01, 3.4969e-01, -2.2629e-01,\n"," -3.7289e-01, -1.7309e-01, -1.0192e-01, 2.2423e-01, 7.7583e-02,\n"," -1.3439e-01, -1.2030e-01, -2.1322e-01, 1.3032e-01, 8.3531e-02,\n"," 1.3636e-01, -7.6187e-02, 2.0385e-01, 2.1101e-01, -1.3745e-01,\n"," 8.3567e-01, 1.5643e-01, -3.5256e-01, 1.6974e-01, 1.1594e-03,\n"," -5.4599e-01, 3.8616e-01, 2.0408e-01, -2.3741e-01, 3.7920e-01,\n"," -1.8009e-01, 3.1296e-01, -2.6296e-01, -2.5109e-01, 1.3402e-02,\n"," 1.2154e-01, 5.0769e-02, 2.2573e-03, 1.4103e-03, -1.4490e-01,\n"," -4.2951e-01, 1.9985e-01, 9.4503e-02, 6.1141e-02, -8.7380e-02,\n"," -2.4632e-01, -3.0656e-01, 2.9041e-01, -1.9808e-02, -1.1227e-01,\n"," 2.0792e-01, 1.6686e-01, 1.4599e-01, -3.8115e-02, -1.6481e-01,\n"," -9.0445e-02, 1.1601e-01, 2.1240e-01, 2.0106e-01, -2.9176e-01,\n"," -1.3053e-01, 3.1890e-01, 3.7415e-02, 3.5731e-01, -2.4931e-01,\n"," -7.1683e-02, -4.3466e-01, -8.8263e-02, -1.3551e-01, -1.7927e-01,\n"," 5.8203e-02, -1.2975e-01, -2.0773e-01, 3.5970e-01, 4.0680e-02,\n"," 1.3878e-01, 2.0321e-01, 4.5236e-01, -1.8268e-01, -1.3206e-02,\n"," 3.8553e-01, 3.1719e-01, 5.0518e-02, -1.0016e-01, 4.0758e-02,\n"," -8.5421e-02, -5.2012e-04, 6.0038e-02, -1.4116e+00, -1.1775e-01,\n"," -6.1927e-02, 3.0228e-01, 4.3233e-02, 1.3895e-01, 1.4152e-02,\n"," -1.6804e-01, 1.3765e-01, -3.7362e-02, 6.4547e-02, 2.8603e-01,\n"," -1.6678e-01, -3.6179e-01, -4.9810e-01, 1.3687e-01, -3.1914e-01,\n"," -8.3354e-02, 1.1649e-01, 1.0426e-02, 1.3492e-01, 2.9956e-01,\n"," -1.1194e-02, 5.6056e-02, 9.0633e-02, -6.4260e-01, -2.1332e-01,\n"," -2.0331e-02, 8.8627e-02, -1.8637e-01, -1.4972e-01, 2.3080e-01,\n"," 2.5664e-01, 1.0753e-01, 1.1893e-02, -1.9986e+00, -1.2929e-01,\n"," 1.1760e-01, -4.7291e-01, 1.4547e-01, -3.1767e-01, 1.6647e-01,\n"," 3.9899e-02, -4.1275e-02, -1.6145e-02, 1.1434e-01, -2.2873e-01,\n"," 3.5291e-01, 1.1446e-01, 8.3882e-02, 1.1774e-02, -1.7421e-01,\n"," -2.4409e-01, -3.9251e-01, 1.6896e-01, 8.8537e-03, -2.0875e-01,\n"," 2.7864e-01, -1.3921e-01, 2.3089e-02, 1.5487e-01, -1.0211e-01,\n"," 1.8610e-01, -5.6059e-01, 2.1383e-02, -3.5948e-01, -2.7307e-01,\n"," -2.8188e-01, 1.1330e-01, 2.2731e-01, 1.0505e-01, 1.8506e-01,\n"," 7.9843e-02, 7.9518e-01, -2.7031e-02, 2.7724e-01, 2.7292e-01,\n"," 1.6114e-01, -2.0382e-01, -1.9601e-01, -7.7445e-02, 5.5307e-02,\n"," -3.4659e-01, 1.2376e-02, 2.0118e-01, 4.1644e-01, -1.3287e-01,\n"," 1.7172e-01, -1.2916e-01, -3.0420e-01, -2.1040e-01, -9.5013e-02,\n"," 3.1264e-01, -7.8368e-02, -4.9758e-02, 6.1746e-02, -2.4088e-01,\n"," 3.1686e-01, 3.8709e-01, -7.8150e-01, 7.8537e-02, -7.3150e-02,\n"," -9.1013e-02, -3.7428e-02, -1.4620e-01, -9.2994e-02, -3.7137e-01,\n"," -3.4075e-02, -8.5667e-01, -5.1011e-01, 2.2376e-01, -3.7160e-01,\n"," -6.5752e-02, 1.8427e-01, -1.7117e-01, 2.4116e-01, -2.7424e-02,\n"," 8.4986e-02, 1.8369e-01, 2.0374e-01, -1.7834e-02, 1.7031e-01,\n"," -2.5706e-01, 1.2295e-01, -1.2203e-01, 2.7260e-02, 5.7436e-02,\n"," 2.2619e-01, 1.0386e-01, 2.3086e-01, 1.4760e-01, 3.6100e-01,\n"," -2.1782e-01, 2.7142e-02, 1.7698e-01, -3.7589e-01, -2.8124e-01,\n"," 1.3302e-01, -1.2577e-01, -4.7649e-02, 8.7537e-02, -4.7663e-01,\n"," -1.6076e-01, -1.0944e-01, -1.8792e-01, 1.5006e-01, -1.8122e-01,\n"," 2.4751e-01, 1.6087e-01, 8.5157e-02, -2.8510e-01, -2.1377e-01,\n"," 1.5715e-01, -1.4482e-01, 1.8342e-01, -2.1588e-02, 1.9625e-01,\n"," 2.5457e-01, 1.0668e-01, 2.1085e-02, 3.6033e-02, 4.3402e-02,\n"," -4.3830e-01, -2.7535e-01, 3.0799e-02, 1.1966e-01, 1.5044e-01,\n"," 9.6739e-02, -4.3612e-01, -2.7303e-01, 9.8734e-02, 1.2048e-01,\n"," 2.1290e-01, 2.1660e-01, 3.8917e-01, 4.5810e-01, 1.7736e-01,\n"," -4.5409e-01, 2.4038e-01, 1.4458e-01, 2.5858e-01, -1.3291e-01,\n"," 1.2081e-01, 1.9987e-01, 2.6053e-01, 2.6510e-01, -1.9645e-01,\n"," 3.8082e-01, -4.6392e-02, -2.4017e-02, -2.0708e-01, 2.1349e-02,\n"," -2.0304e-01, -3.1355e-01, 2.5605e-01, -1.5252e-01, -4.2663e-02,\n"," 8.2841e-02, 1.4852e-01, 4.3171e-02, -2.5515e-02, 5.9696e-02,\n"," 3.1954e-02, -3.8974e-01, 1.4721e-01, 5.4626e-01, -1.4231e-01,\n"," 1.4410e-01, 1.1249e-01, -1.1134e-01, 2.8159e-01, 5.7671e-02,\n"," -4.4984e-01, 9.1915e-02, 1.4619e-01, 9.5846e-02, -3.4576e-01,\n"," 4.8444e-02, 5.5058e-02, 2.8162e-01, 1.3359e-01, -4.1147e-01,\n"," -2.2814e-01, 2.2646e-01, 5.9465e-01, 3.4965e-01, -3.8291e-01,\n"," -2.6475e-01, -4.3818e-02, -1.2649e-01, 8.8705e-02, 3.5307e-01,\n"," -7.6790e-02, 1.7741e-01, 3.1729e-01, 1.5065e-01, -2.1113e-01,\n"," -2.7623e-01, 2.1714e-01, 1.6124e-01, -1.5851e-01, 1.4791e-01,\n"," 4.3121e-01, -4.1026e-01, -2.9160e-01, 1.3234e-01, -2.3545e-01,\n"," -2.5787e-01, -1.6319e-01, 3.6354e-02, 4.1241e-01, 1.0721e-01,\n"," 1.9513e-01, 2.6139e-01, -1.5938e-01, -2.5043e-01, -2.6829e-01,\n"," 2.5153e-01, 5.7163e-02, 1.2275e-01, -5.5238e-02, -3.6964e-01,\n"," -2.4181e-01, -3.4939e-02, 4.9968e-02, 1.3704e-01, 2.5634e-01,\n"," -2.0681e-01, -3.9144e-02, 4.8878e-01, 1.7427e-01, -6.8812e-01,\n"," 9.9883e-02, -3.0734e-01, -2.1347e-01, -3.1412e-01, -3.7722e-01,\n"," -3.7671e-01, -4.3934e-02, 1.9778e-02, -2.2081e-01, 1.7804e-01,\n"," 6.1392e-01, 2.6825e-01, 2.4625e-01, -2.0385e-01, -1.8708e-01,\n"," 2.9943e-01, -1.8142e-01, 5.5168e-04, -3.6227e-01, 5.9422e-02,\n"," 6.3952e-02, 1.0054e-01, -6.2633e-02, -1.3071e-01, 1.9196e-01,\n"," 2.7584e-01, -1.7983e-01, 2.1253e-02, 6.6244e-02, -4.0689e-02,\n"," -9.4789e-02, 2.1489e-01, 3.4149e-01, -3.7516e-02, 1.7910e-01,\n"," -7.6141e-02, 2.9747e-01, -3.1768e-01, 9.7735e-02, 3.7721e-01,\n"," -2.9060e-01, -4.1847e-01, 1.2051e-02, 9.2768e-01, 5.0307e-01,\n"," -1.9633e-01, -7.0922e-02, 1.4282e-01, -2.5699e-01, -6.5048e-02,\n"," 8.5316e-02, -8.0577e-02, -3.8652e-01, 3.2823e-01, 1.7272e-02,\n"," -1.9758e-01, 8.8534e-02, -2.0298e-01, 2.0466e-01, -2.7140e-01,\n"," 1.5935e-01, -4.2760e-01, 6.2103e-02, -1.0143e-01, 2.3680e-01,\n"," -4.4769e-02, -1.1840e-02, -1.3973e-01, 7.9882e-02, 1.9626e-01,\n"," -9.6507e-02, 6.7166e-02, -9.5862e-02, -4.8592e-01, 3.1385e-03,\n"," 1.9411e-01, 3.8237e-01, -5.3866e-01, 1.1092e-01, 5.7812e-02,\n"," -5.6236e-01, 1.8260e-01, 3.8107e-01, 5.6748e-03, -1.3619e-01,\n"," 4.0926e-01, 2.7191e-01, 2.8821e-01, 2.5143e-01, -4.6042e-01,\n"," -2.1689e-01, -3.9808e-02, 2.6291e-01, 3.1902e-01, 1.5881e-01,\n"," 9.0327e-02, 3.5406e-01, 1.7696e-01, -1.7525e-01, 3.1081e-01,\n"," 4.9704e-02, -2.1773e-01, -2.2470e-01, 3.8713e-02, -6.8153e-02,\n"," 1.7886e-01, 2.4056e-02, 2.5037e-02, -1.5545e-01, 3.8074e-03,\n"," -8.2626e-02, 1.5410e-02, 1.4101e-01, 4.3115e-02, -2.9053e-01,\n"," -2.9652e-01, 1.5493e-01, -4.2695e-02, -1.8727e-01, 1.3930e-01,\n"," 1.0442e-01, -2.4539e-01, -1.1451e+00, -5.5929e-02, -1.1946e-01,\n"," -5.5293e-02, -8.4008e-02, -9.9077e-02, 3.0845e-01, 2.4989e-02,\n"," -2.9476e-01, -2.2857e-01, -7.1070e-02, 2.4382e-01, 2.2943e-01,\n"," -1.6535e-02, 5.8608e-02, -1.1309e-01, -1.4129e-01, -3.9074e-02,\n"," 4.3498e-01, -4.7297e-01, 2.1886e-02, 4.2312e-03, -1.7632e-01,\n"," -1.0953e-01, 1.3565e-02, 4.7293e-01, -4.7238e-01, -7.1958e-02,\n"," -9.5515e-02, 1.4450e-01, -1.4632e-02, 3.3539e-01, -2.4305e-01,\n"," -5.7962e-02, -6.5108e-02, 1.3570e-01, 1.7703e-01, 1.1686e-01,\n"," -1.3157e-01, 3.1411e-01, 2.9696e-01, 2.8327e-01, -2.2373e-01,\n"," -1.6743e-01, 1.4353e-01, -9.5142e-02, -4.9715e-02, 4.8305e-01,\n"," -3.4590e-01, 6.7829e-03, -5.5110e-01, -7.1140e-02, 1.3638e-01,\n"," -1.1708e+00, 3.6722e-01, -1.9811e-02, -2.4107e-01, -1.4952e-01,\n"," 2.9152e-01, 1.1852e-02, -3.4777e-01, 3.1889e-02, -1.3998e-01,\n"," 1.4143e-01, 1.1046e-01, -7.7125e-02, -1.1550e-01, -4.3812e-01,\n"," 2.1294e-01, 1.1019e-01, 1.0932e-01, 1.9545e-01, 2.0674e-01,\n"," -3.1174e-03, 1.4706e-01, 1.1089e-01, -1.2874e-01, -4.4977e-02,\n"," -8.1229e-02, -1.9227e-01, -1.2100e-01, -2.8690e-01, 1.4305e-01,\n"," 2.7572e-02, -2.7691e-01, -3.0675e+00, -1.2856e-01, 2.4486e-01,\n"," -4.9501e-02, 1.0879e-01, -8.5592e-02, 2.1765e-01, 2.9262e-03,\n"," 1.2153e-02, -2.3550e-01, 1.0965e-01, -4.4805e-01, 7.2454e-02,\n"," 6.0577e-02, -1.9104e-01, -2.3315e-01]])\n","hotels: tensor([[ 2.1570e-01, -3.6560e-03, 2.2070e-02, 1.1095e-01, 2.5285e-01,\n"," -1.9882e-01, 2.5208e-01, 1.1614e-01, -3.8234e-02, -7.0603e-02,\n"," -2.3725e-01, -1.0097e-01, -3.6462e-02, 1.5268e-01, -1.3794e-01,\n"," -1.8959e-01, -1.0833e-01, 3.3954e-02, 3.3592e-02, 6.3228e-02,\n"," 1.3950e-01, 3.2192e-02, 1.2301e-01, 3.0005e-02, -2.2232e-04,\n"," 3.1592e-02, -3.9093e-01, 1.3103e-01, -4.0480e-01, -8.1852e-02,\n"," -2.4360e-02, -6.0579e-02, -1.0232e-01, 3.7015e-01, 6.6290e-02,\n"," -2.1151e-01, 1.7951e-01, -3.5618e-01, -1.0347e-01, -1.6530e-01,\n"," 5.7957e-02, -1.8272e-01, 1.8002e-01, 5.7539e-02, -5.7325e-02,\n"," -2.5141e-01, -2.6370e-01, -2.3859e-02, -4.1974e-01, 5.0567e-02,\n"," -1.7736e-01, 1.9365e-01, 2.6921e-01, 2.1845e-01, 2.6959e-01,\n"," 4.4859e-01, 9.2731e-02, 3.4062e-02, 2.0704e-01, -4.2809e-02,\n"," 1.5843e-01, 2.8457e-01, -1.6724e-01, -6.8417e-02, 4.5857e-01,\n"," -3.7095e-02, 2.7349e-01, -8.1998e-02, -3.7398e-01, 2.8256e-02,\n"," 8.6303e-03, -2.6784e-01, 3.8161e-01, 1.3063e-01, 1.2557e-01,\n"," 3.9083e-02, -1.9260e-01, 2.4832e-01, 4.8675e-02, -9.7626e-02,\n"," 1.4296e-01, 1.8805e-01, 2.5871e-01, 2.6985e-01, 6.6015e-02,\n"," 2.8838e-01, -3.8947e-02, -1.4851e-01, -2.4454e-01, -1.8010e-01,\n"," 5.5977e-02, 6.3045e-02, 2.2144e-01, 9.6634e-02, -5.9859e-02,\n"," -1.0602e-01, -2.8694e-01, -7.9329e-02, 1.2900e-01, -7.8515e-02,\n"," -7.5361e-03, -5.0134e-02, -1.8004e-01, 1.5112e-01, -1.1779e-01,\n"," -1.2647e-01, -1.0062e-01, 2.2550e-01, 1.0960e-01, -7.2830e-01,\n"," 5.5442e-02, -2.2093e-02, 9.0858e-02, -1.5698e-01, -1.6904e-01,\n"," 1.6879e-01, 3.1694e-01, 1.9068e-01, 6.8256e-02, -2.0193e-02,\n"," -3.0307e-01, -1.9163e-01, 3.2432e-01, 5.0430e-01, 1.0770e-01,\n"," 1.8309e-01, 8.5545e-02, -3.4417e-01, 1.0324e-02, -3.6015e-01,\n"," -3.4387e-01, 4.0733e-01, -1.7967e-01, 3.1672e-01, -1.1553e-01,\n"," 3.6285e-02, 1.9377e-01, 6.8579e-02, -2.7265e-01, -1.3898e-01,\n"," -1.5290e-01, 4.6389e-01, -1.0282e+00, -3.0548e-01, 1.6732e-01,\n"," -1.1143e-01, 3.3695e-01, -1.1625e-01, 1.3198e-01, -2.7722e-01,\n"," 2.4101e-01, 2.9062e-01, -2.5042e-01, 4.5858e-02, -3.9232e-01,\n"," 6.9966e-02, 2.3269e-01, -2.7016e-01, 2.2791e-01, 3.2623e-01,\n"," 3.5344e-01, 2.4199e-01, 1.5735e-01, 2.4001e-01, -5.0976e-01,\n"," -1.0288e-01, -3.5309e-01, 4.8604e-01, 1.7118e-01, 7.6825e-02,\n"," -5.8884e-01, -1.9424e-01, 8.0746e-02, -4.9592e-02, -1.3407e-01,\n"," 9.6672e-02, 6.7221e-02, 2.0306e-01, 2.0445e-01, 5.2293e-02,\n"," -2.9323e+00, -9.8120e-02, 3.9653e-02, -3.0875e-02, 3.8918e-01,\n"," -2.2295e-01, 6.4385e-02, -5.5887e-02, 3.5564e-01, -3.1393e-01,\n"," 2.7911e-02, 5.2292e-03, -2.7949e-01, 2.5896e-01, 2.9909e-01,\n"," -1.5645e-01, -6.6614e-02, -1.4716e-01, 1.3896e-02, -3.2215e-02,\n"," 2.4755e-03, -7.9098e-03, 1.0726e-01, 3.3981e-01, -2.8338e-01,\n"," 8.1519e-01, 2.1378e-01, -3.1875e-01, 1.3456e-01, 5.1999e-02,\n"," -2.4553e-01, 6.4865e-02, 2.0653e-01, -3.8864e-01, 3.1717e-01,\n"," -3.6816e-01, 2.0379e-01, -3.3624e-03, -3.7629e-01, -1.2752e-01,\n"," 3.5937e-01, -6.3614e-02, -1.6655e-01, 2.5790e-01, -1.0386e-01,\n"," -3.7854e-01, 3.2419e-01, 9.1966e-02, 3.4107e-01, -4.0249e-03,\n"," 1.3570e-01, -1.7789e-01, 6.8757e-02, 1.0811e-01, -1.9535e-01,\n"," 2.3298e-01, -1.0943e-01, -2.3920e-02, -2.1872e-01, -2.8408e-01,\n"," -1.1572e-01, -1.5993e-01, 1.5863e-01, 4.7460e-02, 4.0635e-03,\n"," -2.4289e-01, 4.9126e-02, 1.2022e-01, 2.8167e-01, -1.5977e-01,\n"," 1.4688e-01, -4.3731e-01, 1.3364e-01, -2.7100e-01, 3.2295e-01,\n"," 1.1755e-01, 2.2072e-01, -3.4294e-02, 1.6573e-01, -3.1135e-03,\n"," 1.2076e-02, 3.8071e-01, 2.0388e-01, -1.0366e-01, -2.7496e-01,\n"," 8.1956e-02, 1.7138e-01, -1.6189e-01, 8.5068e-02, 7.1523e-03,\n"," -9.4681e-02, -1.1791e-01, -2.4895e-01, -1.1682e+00, 3.4507e-02,\n"," -2.6511e-01, 1.6986e-01, 2.4967e-01, 3.0307e-01, 1.5750e-01,\n"," -4.6939e-05, 2.1036e-01, -7.3852e-02, -1.2204e-02, 9.2119e-02,\n"," -4.2223e-01, -3.7698e-01, -1.7462e-01, 1.6576e-02, -2.9434e-01,\n"," -3.8980e-02, 5.3365e-02, 9.2417e-02, 2.6443e-02, 1.9231e-01,\n"," -1.7320e-01, 1.2355e-01, 1.3880e-01, -3.5874e-01, -3.0139e-01,\n"," -2.0057e-01, -1.3062e-01, -2.5657e-01, -9.6853e-02, -1.0054e-01,\n"," 2.7120e-01, 2.7214e-01, -2.1171e-01, -2.3078e+00, 3.3603e-02,\n"," 1.3179e-02, -4.5747e-01, 5.3018e-02, -2.6425e-01, 2.4304e-01,\n"," -1.9266e-01, -4.6219e-01, -5.4490e-02, 1.0331e-01, -2.9292e-01,\n"," 2.4471e-01, 2.4409e-01, 9.6816e-02, 2.3245e-01, 6.8937e-02,\n"," -1.1625e-01, -4.3554e-01, 2.0867e-01, 5.3127e-02, -7.2709e-02,\n"," 1.0633e-01, -2.3572e-01, -1.0199e-02, 3.7410e-01, -4.2983e-02,\n"," 1.5433e-01, -4.9769e-01, -2.1472e-02, -1.5463e-01, 5.8364e-02,\n"," -1.9376e-01, 2.0142e-01, 6.7117e-02, 7.7030e-02, -1.3188e-01,\n"," 1.9811e-01, 4.4363e-01, -5.9953e-02, 7.9544e-02, 3.5104e-01,\n"," 1.2504e-01, -9.4078e-02, -5.3964e-03, -3.1075e-01, -3.9127e-02,\n"," -3.3037e-02, -2.3994e-02, 3.1047e-01, 3.4039e-01, -1.5766e-02,\n"," 3.3204e-01, 1.0676e-01, -2.2375e-01, -5.7133e-01, 1.0138e-01,\n"," 2.0435e-01, -5.0145e-02, -1.5755e-01, 2.3209e-01, -8.6250e-02,\n"," 2.2020e-01, 1.4109e-01, -3.5746e-01, 5.6239e-02, -1.6297e-01,\n"," -5.6742e-02, -3.4379e-02, 1.9575e-01, -9.9260e-02, -3.9623e-01,\n"," 6.2562e-02, -7.1248e-01, -3.9377e-01, -2.0632e-02, -2.2444e-01,\n"," 9.9969e-02, 2.8997e-01, -2.6505e-01, 1.5017e-01, 1.1252e-01,\n"," -1.6456e-01, 1.0271e-01, 1.9498e-01, -3.2985e-01, -1.0661e-01,\n"," 9.5078e-02, -1.1951e-01, -2.8301e-01, -3.4268e-02, 1.9306e-01,\n"," 4.9405e-01, 1.4748e-01, 1.5288e-01, 1.4747e-01, 4.6642e-02,\n"," -3.0930e-01, 2.8735e-01, -1.4130e-01, -5.1815e-01, -4.0514e-01,\n"," 3.5453e-01, 1.8386e-02, 1.2902e-01, -1.8731e-01, -7.7586e-02,\n"," -1.5061e-01, -7.3409e-02, -5.0642e-02, 3.7083e-02, -4.7849e-02,\n"," 1.4699e-01, -4.5312e-02, 5.4181e-02, -4.3411e-01, 5.9038e-03,\n"," 8.4630e-01, -1.5539e-01, 3.1623e-01, 1.0130e-01, 1.5353e-01,\n"," 2.4451e-01, -1.9247e-01, -1.2670e-02, 1.2449e-01, 7.7742e-02,\n"," -6.7119e-01, -2.9274e-02, -1.0558e-01, 2.2203e-01, -1.4609e-02,\n"," 1.6024e-01, -3.1849e-01, -1.4772e-01, 1.1975e-01, 1.6068e-01,\n"," 7.8020e-02, 3.6274e-01, 1.8564e-01, 1.6025e-01, 2.2517e-01,\n"," -4.3273e-01, 2.1032e-01, 3.4930e-01, 3.8263e-01, -1.7119e-01,\n"," 2.5032e-01, 5.0948e-01, 3.7617e-01, -1.1174e-01, -3.3749e-01,\n"," 1.9723e-01, -1.7074e-01, -2.8823e-02, -4.7160e-01, 9.4035e-02,\n"," 1.6955e-02, -2.0696e-01, 9.9739e-02, -2.0576e-01, -1.2083e-01,\n"," -1.3293e-01, 2.8561e-01, -2.1156e-01, -2.4828e-02, 7.3753e-02,\n"," -9.9816e-02, -4.3120e-01, 4.4179e-01, 4.1298e-01, 1.0279e-01,\n"," 1.0016e-01, -2.4505e-01, -8.2889e-02, 2.5555e-01, -6.8615e-02,\n"," -2.7643e-02, 8.6024e-02, 1.2670e-01, -1.6898e-01, -3.7782e-01,\n"," -3.4921e-02, 8.3836e-02, 1.3193e-01, 2.7541e-03, -3.2087e-01,\n"," -3.9814e-01, 2.8929e-01, 5.1947e-01, 5.2470e-01, -1.8758e-01,\n"," -9.9346e-02, 5.0650e-02, -1.5219e-01, -3.8389e-03, 2.5401e-01,\n"," 2.2483e-02, 1.0047e-01, 1.9179e-02, 9.5946e-02, -1.3993e-01,\n"," -5.1604e-03, 2.4605e-01, 1.5707e-01, -6.2796e-02, 2.6755e-01,\n"," 1.5317e-01, -3.5222e-01, -6.0325e-02, -2.2732e-01, -4.2806e-01,\n"," -2.7283e-01, -1.5627e-01, 4.5820e-02, 4.6644e-01, 1.3586e-01,\n"," 1.0981e-01, 2.0764e-01, -1.4025e-02, -2.2365e-01, -2.2005e-01,\n"," -6.0367e-02, -2.9650e-01, -3.2269e-01, -1.4205e-02, -2.4989e-01,\n"," -2.6510e-01, -7.3415e-02, -1.2174e-01, 1.1782e-01, 4.8066e-01,\n"," -1.4638e-01, -2.2808e-02, 7.9453e-01, 3.3770e-01, -4.3920e-01,\n"," -1.3831e-01, 1.3073e-01, 8.1385e-02, -1.6685e-01, -2.5335e-01,\n"," -1.3528e-01, -2.3460e-01, 1.6819e-01, 3.8714e-02, 9.3366e-02,\n"," 4.3791e-01, 2.7434e-01, -3.7543e-02, -2.3436e-01, -2.1450e-01,\n"," -1.2342e-01, -7.1242e-02, -9.9016e-02, -2.4131e-01, 5.8700e-03,\n"," -4.9371e-02, 1.9429e-01, -7.7723e-03, -1.3118e-02, 1.6460e-01,\n"," 3.5125e-01, -6.7320e-02, 3.0683e-01, 9.3045e-02, -1.0014e-01,\n"," 1.4507e-01, 2.7977e-01, 2.3741e-01, 1.4676e-01, 2.0583e-01,\n"," -3.4338e-02, 1.2235e-01, -3.4895e-01, 1.9823e-02, 5.3196e-01,\n"," -4.2050e-01, -2.2572e-01, 2.2398e-02, 6.6620e-01, 4.6833e-01,\n"," 3.3200e-03, -1.6475e-01, 1.8595e-01, -1.6085e-01, -2.7963e-01,\n"," 3.3053e-01, -3.7273e-01, -3.7052e-02, 5.1329e-01, -1.1866e-01,\n"," -1.7716e-01, 2.9445e-01, -2.3345e-01, 1.1507e-01, -2.0688e-01,\n"," 8.8910e-02, -4.7740e-01, 1.5842e-01, -2.1777e-01, 4.3467e-01,\n"," -8.3534e-02, -2.7141e-01, 1.8815e-01, 6.7765e-02, 2.3474e-01,\n"," -2.8616e-01, 8.5084e-03, -3.0447e-01, -3.6967e-01, -1.1607e-01,\n"," 1.4731e-01, 4.3586e-01, -3.5289e-01, 1.1387e-01, -8.3547e-02,\n"," -2.7668e-01, 1.9375e-01, 3.8776e-01, 2.0403e-01, -1.9844e-01,\n"," 3.0511e-01, -1.4824e-02, 3.3815e-01, 2.6823e-01, -6.1652e-01,\n"," -1.4253e-01, -2.3485e-01, 1.1892e-01, 1.5395e-01, 5.4190e-02,\n"," -1.0935e-01, 1.7742e-01, -4.0381e-02, -3.0522e-01, 1.8714e-01,\n"," 6.5892e-03, -1.6774e-01, -4.0156e-01, 4.2101e-01, -8.0580e-02,\n"," -1.8433e-01, -3.6929e-02, 2.6342e-02, 1.4858e-01, 3.0737e-02,\n"," -3.8351e-02, 1.3447e-01, -2.3326e-01, 3.6618e-01, -2.0066e-01,\n"," 6.9972e-02, -1.0467e-01, -7.5287e-02, -1.3367e-01, 8.0005e-02,\n"," 9.8523e-02, 3.8435e-02, -1.1484e+00, 1.1602e-01, -1.2630e-01,\n"," -2.9876e-01, -1.3417e-01, 2.2610e-01, 4.3894e-01, 3.4540e-01,\n"," -2.4715e-01, -1.5959e-01, 1.5636e-01, 1.5557e-01, 3.8961e-01,\n"," -3.2650e-01, -4.7461e-02, 1.1953e-01, 9.7915e-02, -2.3363e-01,\n"," 2.9380e-01, -3.4016e-01, -1.6518e-02, 2.9571e-01, -2.3186e-01,\n"," -1.5635e-01, -1.6978e-01, 4.6097e-01, -2.2511e-01, -2.9295e-01,\n"," -1.5238e-01, 1.3256e-01, -2.4858e-01, 1.7870e-01, 1.0048e-02,\n"," -1.0698e-01, -6.7129e-02, 1.2567e-01, 5.4952e-02, 2.5566e-02,\n"," 6.6044e-02, -1.1960e-01, 1.9843e-01, 3.8143e-01, 1.6415e-02,\n"," -2.4879e-01, 1.2101e-01, -2.5322e-02, -9.3178e-02, 2.8669e-01,\n"," -9.9023e-02, 6.2995e-02, -2.5290e-01, -5.5460e-01, -1.1439e-01,\n"," -1.0061e+00, 2.8061e-01, -2.6281e-02, 1.8749e-01, 5.2619e-03,\n"," 7.0159e-02, -1.5800e-03, 1.3370e-01, 2.7022e-02, -5.9951e-02,\n"," 3.8626e-01, 7.8241e-02, -1.2865e-01, -2.1838e-01, -2.0496e-01,\n"," 1.8332e-01, -1.8250e-01, 1.4226e-01, -2.2132e-01, 2.6350e-02,\n"," 1.5751e-01, 2.6217e-01, -2.2424e-01, 2.5020e-02, 3.9768e-02,\n"," 2.4022e-01, 6.0624e-02, -1.7002e-01, 1.1943e-02, 1.1249e-01,\n"," 2.7625e-01, 1.0911e-01, -3.0438e+00, -4.9068e-01, 7.3921e-02,\n"," -6.9637e-02, 1.9043e-01, -3.7763e-01, 3.5818e-01, -1.0886e-01,\n"," -7.0791e-02, -1.5682e-01, 2.3588e-01, -4.6437e-01, 2.1349e-01,\n"," -2.3584e-01, 6.6467e-02, -7.4251e-02]])\n",".: tensor([[ 4.5348e-01, -1.8830e-01, 1.7507e-01, 7.1039e-03, 2.2034e-01,\n"," -4.9231e-01, 3.6829e-01, 1.2740e-01, 2.8249e-01, -7.8697e-01,\n"," -2.7519e-01, -3.9487e-01, -4.1616e-01, 3.9147e-01, -4.5335e-02,\n"," 1.9615e-01, -1.6234e-01, 3.2602e-01, -2.1966e-02, 2.3604e-01,\n"," 1.4318e-01, 1.7123e-01, 2.9725e-01, 3.2211e-01, -1.6466e-01,\n"," 2.2267e-01, -1.1038e-02, -4.0098e-01, -5.4943e-01, 3.6646e-01,\n"," 3.4939e-01, 6.1663e-02, -6.6616e-01, 7.3585e-01, -2.7118e-01,\n"," -3.6975e-01, 3.5042e-01, 3.4800e-01, -3.2717e-01, -9.9070e-02,\n"," -1.9632e-01, -1.5016e-03, -1.9205e-01, 1.8623e-01, -4.5804e-01,\n"," -8.9590e-02, -6.2124e-01, 1.1418e-01, -6.9934e-01, 3.2600e-02,\n"," -4.7400e-01, 3.4464e-01, 2.9823e-01, 7.8699e-01, 2.0413e-01,\n"," 1.9291e-01, 2.5619e-01, -1.5694e-01, -3.0631e-01, -2.8426e-01,\n"," 4.4769e-01, 5.8988e-01, 2.1269e-01, 2.6123e-01, 3.0855e-01,\n"," 4.2329e-01, 5.3188e-01, -2.5013e-01, -5.9493e-01, 1.0751e-01,\n"," -1.7083e-01, -6.1714e-01, 4.7907e-01, 7.0216e-02, 2.5952e-01,\n"," 4.6630e-01, -1.2902e-01, 4.2337e-01, 2.8233e-01, -2.1868e-01,\n"," 2.2846e-01, 3.2688e-01, 1.4589e-01, 3.2978e-01, 2.7944e-01,\n"," 2.1485e-01, 4.0849e-01, -6.4734e-02, -3.6425e-01, 3.7081e-01,\n"," 1.9606e-01, 1.2835e-01, 1.5477e-01, -2.2391e-01, 9.5398e-03,\n"," -4.3007e-01, 1.2331e-01, -6.3968e-02, 1.1239e-01, -6.9018e-02,\n"," -2.3441e-01, -3.6065e-01, -1.1541e-01, 1.9831e-01, -2.4907e-01,\n"," 1.2070e-01, -1.9354e-01, 6.0468e-02, 1.5458e-01, -4.9639e-01,\n"," 1.9796e-01, -1.4081e-01, 5.5174e-01, -2.4280e-01, 1.6101e-01,\n"," 5.0484e-01, 1.2675e-01, -3.4398e-02, 1.5196e-01, 4.1237e-01,\n"," -1.4354e-01, -8.7697e-02, 1.4666e-01, 4.7918e-01, -7.6127e-02,\n"," -7.2733e-02, 3.8126e-01, -2.7627e-01, -2.9327e-01, -2.3705e-01,\n"," -1.9586e-01, 6.3424e-01, 6.4245e-01, 9.0075e-02, -7.5863e-02,\n"," 2.4580e-01, 4.9624e-01, 2.0993e-03, -6.7482e-02, -8.7690e-02,\n"," 3.5912e-01, 1.2679e-01, -4.4358e-01, -5.3740e-01, 3.6333e-01,\n"," -3.4129e-01, 3.2629e-01, -4.1933e-01, 3.6289e-01, 2.3670e-01,\n"," 3.7624e-01, 1.6394e-01, -2.8234e-01, -4.2126e-01, -6.3006e-01,\n"," -3.7523e-01, -4.0895e-02, 2.5531e-01, 2.7977e-01, 7.8362e-01,\n"," 3.7094e-01, 2.8214e-01, 2.4569e-01, 1.2563e-01, -7.8180e-02,\n"," 4.0002e-01, -4.2215e-01, 6.4686e-01, 3.4092e-01, 3.4064e-01,\n"," -1.0943e-01, -2.3358e-01, 7.7647e-01, 3.5561e-01, -6.0287e-01,\n"," 3.5344e-01, 5.9257e-01, 3.8518e-01, 7.5158e-02, -7.5135e-01,\n"," -2.8235e+00, 6.3741e-01, -4.3727e-01, -6.7433e-01, 1.8773e-01,\n"," -4.1856e-01, 2.8332e-01, -3.3438e-01, -4.3009e-02, -2.8261e-01,\n"," -1.1996e-01, -2.7276e-01, -3.5582e-01, -1.5484e-01, 7.5801e-01,\n"," -3.2138e-01, -3.1184e-02, -4.3055e-01, 4.3398e-02, -1.5087e-01,\n"," 4.3989e-01, -1.7733e-01, 4.6694e-01, -2.1089e-02, 1.4117e-01,\n"," 1.0674e+00, -1.0987e-02, -2.0880e-01, 1.5499e-01, 1.6871e-01,\n"," -8.5973e-01, 3.4127e-01, 2.2219e-01, -6.4233e-01, 9.1418e-02,\n"," -2.6854e-01, 4.1168e-01, -2.8914e-01, -3.5024e-01, 1.6793e-01,\n"," 1.6709e-01, -3.7309e-01, -6.6064e-01, 2.0561e-01, -5.4616e-01,\n"," -6.2656e-01, 1.3092e-01, -7.5093e-02, -3.4995e-02, -5.2362e-01,\n"," -3.4133e-01, -7.6007e-01, 5.5437e-01, 3.0431e-01, -2.1358e-01,\n"," 1.6309e-01, -4.1998e-01, -2.3834e-01, -3.4719e-01, -3.1075e-01,\n"," -3.2279e-02, 4.2536e-01, 5.8491e-01, 1.3458e-01, -5.9748e-01,\n"," 3.1311e-01, 6.1152e-01, -1.2006e-01, 1.9062e-01, -2.2676e-01,\n"," -4.6829e-01, -4.5523e-01, -1.1576e-01, -2.8146e-01, 4.0316e-01,\n"," -5.0014e-02, -3.3733e-01, -2.3961e-01, 2.3093e-01, 7.1646e-03,\n"," -1.6933e-01, -8.9796e-02, 7.7479e-01, -2.4910e-02, -7.7794e-01,\n"," -2.4171e-01, 7.4668e-01, -8.9598e-02, -1.8638e-01, 4.0923e-02,\n"," 2.9136e-01, -6.5494e-01, -1.2900e-01, -3.7367e-01, 7.0549e-02,\n"," -1.3457e-01, 9.0630e-01, -1.5957e-02, -1.0959e-01, 2.8614e-01,\n"," 1.6320e-01, 5.6781e-01, -1.1435e-01, -6.3630e-02, 1.4269e-01,\n"," -4.7207e-01, -2.7827e-01, -7.1770e-01, 7.4473e-01, -4.7612e-01,\n"," 1.2611e-02, 7.0531e-02, -1.4756e-01, -7.6886e-02, -9.2276e-03,\n"," 2.4978e-01, -3.1718e-02, 4.0236e-02, -3.9608e-01, -8.4622e-02,\n"," -5.8600e-02, -3.9829e-01, -3.3312e-01, -1.8696e-01, 5.7915e-02,\n"," 1.0142e-01, -1.9107e-03, 1.9955e-01, -1.2766e+00, -2.8024e-02,\n"," 4.5669e-01, -6.4264e-01, -1.5487e-02, 2.0107e-01, 1.7875e-01,\n"," -1.8246e-01, -5.7863e-01, 1.5104e-01, 2.3881e-01, -7.1423e-01,\n"," -2.1690e-01, 7.3272e-01, 2.2925e-01, -2.5409e-01, 3.9927e-02,\n"," 9.3614e-02, -5.2525e-01, 3.1696e-01, -4.0564e-01, -1.0337e-01,\n"," 7.2755e-01, -6.5758e-01, 3.9297e-01, 5.2725e-01, -4.8893e-01,\n"," -2.3215e-02, -5.2608e-01, -1.2420e-01, -1.9675e-01, -5.8462e-01,\n"," -2.2883e-02, -1.2387e-01, 2.7748e-01, 1.7486e-01, 3.7255e-01,\n"," 2.2015e-01, 6.9971e-01, -5.5898e-01, 4.0969e-01, 3.9624e-01,\n"," 1.9406e-01, -2.7292e-01, 1.9355e-01, -2.1390e-03, 2.0569e-01,\n"," -8.7155e-02, 4.6767e-01, 2.6840e-01, -3.1141e-02, -4.5651e-01,\n"," 1.3645e-01, -3.7801e-02, -2.2399e-01, -2.1192e-01, 1.6100e-01,\n"," 5.5640e-01, -1.0880e-01, -5.0559e-01, 3.9774e-01, -3.7869e-01,\n"," 1.7025e-01, 7.5975e-01, -4.4897e-01, 3.6500e-02, -1.9951e-01,\n"," 8.3332e-02, -2.1846e-01, 4.1368e-02, 2.1889e-01, -1.4069e-01,\n"," -1.9662e-01, -6.8092e-01, -2.5798e-01, 3.0049e-01, -4.1367e-01,\n"," -1.3249e-01, 3.2424e-01, -3.7501e-01, -1.6308e-01, -3.4556e-01,\n"," 8.5657e-02, 2.2148e-01, 2.4923e-01, 1.6740e-01, 5.2595e-01,\n"," -4.0290e-01, -5.1228e-01, -3.5420e-01, -3.7082e-01, 2.8130e-02,\n"," -1.6266e-02, 4.4732e-01, 2.0107e-02, 4.6386e-01, 6.4512e-01,\n"," -5.0234e-01, 2.4100e-01, -4.9106e-01, -2.9007e-01, -2.8911e-02,\n"," 1.8697e-01, 1.8171e-01, -4.2016e-01, 2.1170e-01, -8.1915e-01,\n"," -3.5526e-01, -3.4640e-01, -3.9361e-01, 3.6860e-01, -8.0960e-01,\n"," 3.7472e-01, 7.0408e-01, 3.4573e-01, -3.9874e-01, -1.3271e-01,\n"," 9.9384e-02, -2.4246e-01, 2.6231e-01, 2.7874e-01, 3.3810e-01,\n"," 2.2294e-02, -8.7863e-02, -2.4486e-01, -2.8568e-01, 1.3539e-01,\n"," -5.9400e-01, -7.6683e-01, -3.1517e-01, 1.9946e-01, 1.7281e-01,\n"," 1.5299e-01, -2.6483e-01, -2.6165e-01, 2.3178e-02, -2.5443e-04,\n"," 5.7783e-01, 2.2849e-01, 5.6724e-01, 5.3044e-01, 2.4962e-01,\n"," -7.3630e-01, 2.1081e-01, 3.4268e-01, 7.1922e-01, 1.6124e-01,\n"," 1.7985e-01, 4.5608e-02, 3.7212e-01, 2.3815e-01, -4.1263e-01,\n"," 4.2455e-01, -7.2041e-01, -1.4807e-01, -7.6460e-01, 1.3052e-01,\n"," -7.4779e-02, -3.8263e-01, 2.5845e-03, -4.1168e-01, 2.3321e-02,\n"," -2.0722e-01, 7.0864e-02, 1.2491e-01, 6.3322e-01, 5.2189e-02,\n"," 2.5787e-01, -6.5308e-01, 1.7495e-01, 7.8438e-01, 1.0858e-01,\n"," -5.3592e-02, 1.2920e-01, 1.1037e-01, 3.5378e-01, -1.3653e-01,\n"," -6.6339e-01, 3.2365e-01, 1.2129e-01, -1.0001e-01, -1.7036e-01,\n"," 1.7442e-01, -4.9405e-01, 3.9476e-01, 1.3864e-01, -6.7256e-01,\n"," -4.4334e-01, 7.3256e-03, 6.2347e-01, 5.7804e-01, -4.3610e-01,\n"," -6.9711e-01, -4.5012e-01, -5.7931e-01, -6.9976e-02, 3.8447e-01,\n"," -2.6073e-02, -2.0652e-01, 1.0005e-01, 7.1732e-01, -1.1453e-01,\n"," -4.0987e-01, 2.0156e-01, 1.8528e-02, -1.8776e-01, -1.0491e-01,\n"," 2.6226e-01, -4.8050e-01, 1.8078e-01, 1.1094e-02, -3.2223e-01,\n"," -4.4628e-01, -4.6405e-01, -2.3279e-01, 5.1338e-01, 2.3713e-01,\n"," 8.0800e-02, 5.7985e-01, -6.1536e-01, -3.7931e-01, -3.7645e-01,\n"," -2.4543e-02, -1.3078e-01, 9.2059e-05, -9.6786e-02, -4.9029e-01,\n"," -5.8782e-01, -1.7790e-01, -5.4727e-01, 4.5365e-02, 1.8576e-01,\n"," 2.6723e-03, -2.9051e-01, 3.8869e-01, -1.9330e-02, -7.1462e-01,\n"," 1.7309e-01, -1.3812e-01, -1.7957e-01, 1.2937e-01, -4.6853e-01,\n"," -7.8175e-01, 8.3480e-02, -1.2135e-01, 3.7738e-01, 3.9583e-01,\n"," 4.7264e-01, 4.2058e-01, 4.5121e-01, -2.5106e-01, -4.5507e-01,\n"," 3.1404e-01, -1.6203e-01, -3.3820e-02, -9.6074e-01, -1.1195e-01,\n"," 5.2586e-02, 2.6658e-01, -2.8935e-01, -1.3185e-01, -2.8762e-02,\n"," 3.3677e-01, 3.5136e-02, 8.7574e-02, 3.1881e-01, 9.4601e-02,\n"," 3.2273e-01, 3.8530e-01, 3.7498e-01, -5.0480e-01, 4.4202e-02,\n"," 1.2428e-01, 2.9107e-01, -8.5656e-02, -9.9938e-02, 7.7933e-02,\n"," -3.1349e-01, -3.3704e-01, -3.1019e-02, 5.8150e-01, 8.1029e-01,\n"," -2.0424e-02, -2.4084e-01, 3.1046e-01, -7.0704e-01, -1.5582e-01,\n"," 2.0017e-01, 2.2535e-01, -4.5762e-01, -1.9981e-01, -2.8181e-02,\n"," -3.9681e-02, 3.6738e-01, 9.0550e-02, 4.3812e-01, -4.5247e-01,\n"," 2.5520e-02, -7.0521e-01, 5.3787e-03, 4.5357e-02, 3.9488e-01,\n"," -4.1344e-01, -2.9403e-01, -1.6025e-01, 2.2574e-01, 4.0828e-01,\n"," -9.7869e-03, 3.1572e-01, 3.5571e-01, -5.9462e-01, 2.9582e-02,\n"," -9.5098e-03, 8.0329e-01, -9.5315e-01, -4.6124e-01, -4.6358e-01,\n"," -7.2788e-01, 1.2206e-01, 4.6110e-01, 2.8691e-01, -1.4462e-01,\n"," 5.5521e-01, 4.2595e-01, -7.1716e-02, 4.5035e-01, -3.9482e-01,\n"," 1.4131e-01, 5.2883e-02, 4.7578e-01, 5.0554e-01, -3.0877e-02,\n"," -3.9485e-01, 6.9497e-01, -2.4222e-01, -5.7728e-01, 4.1447e-01,\n"," 3.8426e-01, -4.2439e-01, -8.4900e-01, 1.4796e-01, 1.0494e-01,\n"," 6.4935e-01, 2.3828e-01, -7.8157e-02, -1.3743e-01, 3.1546e-01,\n"," -1.9149e-01, 2.5726e-01, 1.5579e-01, 1.8360e-02, 1.0965e-01,\n"," -1.7123e-01, 2.5819e-01, -3.7684e-01, -8.5722e-02, 1.3216e-01,\n"," 6.8183e-02, -8.4629e-01, -2.8109e-01, 7.3553e-02, 2.1026e-02,\n"," 1.9825e-01, -3.3823e-01, 2.2018e-01, 8.1056e-01, -2.1641e-01,\n"," -2.1998e-01, -4.1161e-01, -1.0578e-01, 5.9057e-01, 4.3316e-01,\n"," 1.7861e-01, -4.4955e-02, 6.4006e-02, 1.1017e-01, -7.9044e-02,\n"," 6.4614e-01, -8.2497e-01, -2.3669e-01, 5.9330e-02, 1.1457e-01,\n"," -3.9954e-01, 1.7513e-01, 8.8864e-01, -9.3543e-01, -4.4443e-01,\n"," -8.2218e-02, 4.1437e-01, 6.8788e-02, 1.9565e-01, 3.5263e-02,\n"," -5.2059e-01, -5.6900e-01, 3.9609e-01, 7.6761e-02, 5.4600e-01,\n"," -1.2122e-01, 4.6571e-01, -7.3947e-02, 7.6923e-01, 3.3772e-02,\n"," -1.6156e-01, -2.9230e-01, -3.7582e-01, 1.0410e-02, 4.2878e-01,\n"," 2.8119e-01, -3.8968e-01, -5.7589e-01, -1.2057e-02, 6.3778e-01,\n"," -1.0882e+00, 4.6640e-01, -1.2503e-01, -1.6129e-02, 2.3618e-02,\n"," -2.5130e-01, -3.8945e-01, -4.2134e-01, -3.0728e-01, -4.2577e-01,\n"," -2.9983e-01, 2.5491e-02, -2.5637e-01, 1.2489e-01, -6.9120e-01,\n"," 3.1652e-01, 8.3624e-02, 6.9753e-02, -2.0633e-01, 7.8316e-01,\n"," 2.7591e-01, -8.6392e-02, 5.9732e-01, -4.3589e-01, -2.7823e-01,\n"," -2.5989e-01, -3.9100e-01, -4.8490e-01, 2.2950e-01, 1.1498e-01,\n"," 2.2206e-01, -4.8881e-02, -1.4778e+00, -2.0791e-01, 1.0116e-01,\n"," -3.0382e-01, 1.0343e-01, -1.1787e-01, 4.7665e-01, 1.1696e-01,\n"," -1.7067e-01, -3.7773e-01, -1.3615e-01, -7.9040e-01, 1.0221e-01,\n"," 1.9086e-01, 2.3631e-01, -5.6585e-02]])\n","Thanks: tensor([[ 4.0132e-01, -7.6910e-02, 1.3869e-01, 5.0165e-02, -3.3416e-01,\n"," -4.6681e-01, 1.1664e-01, 3.9584e-01, 7.0089e-02, -5.6297e-02,\n"," -1.9344e-01, -2.1291e-01, 6.0318e-02, 4.4978e-02, -2.6208e-01,\n"," -5.2711e-01, -1.3368e-02, 7.5480e-02, -6.6486e-02, 3.8407e-01,\n"," 3.2845e-01, -4.8990e-02, -2.2253e-01, 2.1441e-01, -6.3892e-02,\n"," -1.4085e-02, -1.5502e-01, 4.2367e-02, -7.1889e-02, -1.2543e-02,\n"," 4.8120e-01, 2.4385e-02, -4.3435e-02, 4.7492e-01, 5.8224e-02,\n"," -2.2886e-01, 6.7940e-03, 2.6562e-02, -2.2515e-01, 7.0468e-02,\n"," 1.9843e-02, 8.5941e-03, 2.5628e-02, 2.2932e-01, -2.8364e-02,\n"," -3.2981e-01, 1.0210e-01, -4.8099e-03, -8.1761e-02, -1.1958e-01,\n"," -1.6622e-01, -2.7579e-01, 1.6712e-01, 2.7560e-01, 9.3631e-04,\n"," 1.4828e-02, 1.1529e-01, -1.6126e-01, -1.7761e-01, 1.5462e-01,\n"," -3.5121e-01, 1.2477e-01, 2.9749e-02, -5.6392e-02, 2.7566e-01,\n"," 2.6337e-01, 3.2636e-01, 3.5459e-02, -4.3919e-01, 2.3750e-01,\n"," -1.9428e-01, -1.3353e-01, 1.5364e-01, -1.1981e-01, -4.2625e-02,\n"," 1.1411e-01, -4.3684e-01, 2.6953e-01, 1.0338e-01, -1.4410e-01,\n"," 1.4809e-01, 4.7466e-01, -1.3140e-02, 2.3372e-01, 2.5418e-01,\n"," 2.8649e-01, -8.6075e-02, -1.9212e-01, -2.8735e-01, -2.5017e-03,\n"," 1.5954e-01, 5.3146e-02, -1.5584e-01, 2.7196e-01, 6.2166e-02,\n"," 1.7395e-01, -4.2036e-01, 1.9621e-01, -1.6114e-01, -1.3726e-01,\n"," -3.0484e-02, -6.1945e-01, 1.2396e-01, 2.2243e-01, -1.2596e-01,\n"," 1.2814e-01, -5.1642e-02, -1.6958e-01, 2.2953e-01, -6.2918e-01,\n"," 3.6706e-01, 8.9441e-02, -1.7620e-02, -2.1412e-01, -3.5041e-01,\n"," -2.8104e-02, 9.5127e-02, -3.9959e-01, 1.0269e-01, 1.1906e-01,\n"," -2.7016e-01, -3.1730e-01, 1.0474e-02, 5.1926e-01, 7.0244e-02,\n"," 2.2738e-02, 2.6420e-02, -3.3118e-01, -2.4965e-01, -3.9263e-01,\n"," -2.1222e-01, 3.0764e-01, 3.9757e-01, 1.6903e-01, -2.8719e-01,\n"," 1.8129e-01, 1.1812e-01, -2.1236e-01, -2.8361e-01, -1.2271e-01,\n"," -1.7846e-04, 5.4467e-01, -8.4388e-01, -1.5712e-01, 3.6117e-01,\n"," 2.1547e-01, 1.6666e-01, -1.3197e-01, 2.9790e-01, -2.3769e-01,\n"," 1.4622e-01, -3.6684e-02, -2.5229e-01, -1.8399e-01, -5.1495e-01,\n"," -1.8207e-01, -8.5664e-02, -1.6540e-02, 2.3078e-01, 4.8881e-01,\n"," 3.0543e-01, 1.6421e-01, 2.0418e-01, 3.4575e-01, -9.1819e-02,\n"," 3.8685e-01, -2.4136e-01, 2.8213e-01, 3.2908e-01, 1.5155e-01,\n"," -3.2464e-01, -3.7781e-01, 4.6282e-01, 1.3187e-01, -1.4382e-01,\n"," 9.3794e-02, 4.2611e-01, 2.4015e-01, 1.6120e-01, -7.4692e-02,\n"," -3.0125e+00, 2.3812e-02, -1.9464e-01, -2.1277e-01, 1.7496e-01,\n"," -2.6465e-02, 3.5261e-01, -3.7600e-01, 2.7306e-03, 2.6367e-01,\n"," -1.5349e-01, -8.2513e-03, -4.3980e-01, 4.4636e-01, 3.5128e-01,\n"," -9.1138e-02, 1.1374e-01, 1.3626e-02, 2.4631e-01, 1.9676e-01,\n"," 3.2569e-01, -2.8139e-01, 9.8009e-03, 3.5413e-01, 4.8023e-02,\n"," 8.5671e-01, 1.6301e-01, -2.2029e-01, 2.6013e-02, 7.2600e-02,\n"," -7.1132e-01, 2.3913e-01, 3.4371e-01, -1.4992e-01, 2.3444e-01,\n"," -2.1365e-01, 1.2375e-01, -2.7297e-01, -4.7489e-01, 1.4824e-01,\n"," 3.8823e-02, -1.0813e-01, -3.2551e-01, 1.3256e-01, 1.1565e-01,\n"," -5.0609e-01, 3.7280e-01, 1.7291e-01, -6.9361e-02, 6.9202e-02,\n"," -6.8893e-02, -6.2045e-01, 2.2071e-02, 4.1743e-01, -1.1374e-01,\n"," 3.4725e-02, -1.4678e-01, -1.6460e-01, -1.3668e-01, -1.1122e-01,\n"," 8.1958e-02, 1.1167e-03, 1.5861e-01, 1.8483e-01, -3.3940e-01,\n"," -4.0507e-02, -1.0995e-01, -1.2214e-01, 3.1892e-01, 1.5772e-01,\n"," -2.8450e-02, -4.6745e-01, 1.3623e-01, -4.8067e-01, 2.5248e-03,\n"," 7.7262e-02, -3.9296e-01, 1.5845e-03, -8.0255e-03, -2.9748e-02,\n"," -7.9970e-03, 5.0169e-01, 5.2853e-01, 1.3427e-01, -1.9943e-01,\n"," 2.2400e-01, 3.1680e-01, 5.4829e-03, -5.8004e-02, -1.9082e-01,\n"," -1.0903e-01, -1.1229e-01, 6.6385e-02, -9.1849e-01, -4.6136e-02,\n"," -3.6634e-01, 2.6421e-01, 1.6441e-02, 1.9061e-01, -1.2071e-01,\n"," -3.1652e-02, 4.8623e-01, -5.6836e-01, 5.1298e-02, 2.5582e-01,\n"," -2.7269e-01, -4.4874e-01, -1.5044e-01, 2.1666e-01, -6.2231e-01,\n"," -1.7127e-01, 5.9268e-02, 1.9371e-01, 1.6664e-01, 2.7096e-01,\n"," 2.2838e-02, 4.0031e-01, 2.5145e-01, -5.1441e-02, -1.9974e-01,\n"," 1.8606e-01, -1.3552e-01, 3.5801e-01, 3.8801e-02, -3.0132e-01,\n"," 2.7911e-01, 1.9903e-01, -1.7231e-01, -1.4480e+00, -1.5870e-01,\n"," 1.0680e-01, -5.3357e-01, 4.1412e-01, 1.2491e-01, 2.9289e-01,\n"," -1.4279e-01, -4.7991e-01, -9.5248e-02, 9.0988e-02, -2.3821e-01,\n"," 1.3496e-01, 1.8866e-01, -1.1545e-01, 3.5268e-02, 7.7169e-02,\n"," -2.3049e-01, -4.2746e-01, 3.4380e-02, -3.0653e-01, 1.8238e-01,\n"," 1.5875e-01, -1.9118e-01, 3.7548e-01, -8.7538e-02, -1.9388e-01,\n"," 1.3881e-01, -1.7508e-01, 7.3345e-02, -2.1473e-01, -4.1165e-01,\n"," -1.1147e-01, 1.4599e-01, 3.5839e-01, 1.2554e-01, 1.5580e-01,\n"," 2.8563e-01, 2.4249e-01, -1.7068e-01, 5.1746e-02, 1.2170e-01,\n"," 1.6052e-01, -2.0997e-01, 2.2030e-01, -1.0697e-01, -1.2725e-01,\n"," -3.9364e-01, 1.8636e-02, 1.5791e-01, 1.8265e-01, -1.4643e-01,\n"," 2.3485e-01, -9.1783e-02, 4.2045e-02, -2.9711e-01, 7.4480e-02,\n"," 5.5185e-01, -3.6390e-01, 2.5110e-02, 2.8417e-01, -1.9139e-02,\n"," 1.1899e-01, 1.7131e-01, -2.8957e-01, -7.1887e-03, -3.4379e-01,\n"," -3.8708e-01, -2.9997e-01, 2.5914e-01, 1.8968e-01, -1.8521e-01,\n"," -2.0213e-01, -7.6480e-01, -4.0206e-01, 1.0669e-01, -5.1442e-01,\n"," -1.8773e-01, 4.5440e-01, 1.1649e-01, 1.0536e-01, 6.2941e-02,\n"," -5.6105e-01, 1.4452e-01, 1.3033e-01, -1.3301e-01, 1.3081e-01,\n"," -5.0260e-01, -1.2647e-01, -4.1842e-01, 4.2247e-02, 2.8428e-02,\n"," 1.4464e-01, 2.2122e-01, 1.6893e-01, 1.6431e-01, 6.8603e-01,\n"," -4.3133e-01, 3.9809e-01, 4.4042e-02, -1.3756e-01, -5.9189e-02,\n"," 1.6992e-01, -2.0791e-03, 2.8116e-01, 2.3832e-01, -6.7891e-01,\n"," -5.5224e-02, -1.2837e-01, -9.4927e-02, -4.8758e-02, -5.5822e-01,\n"," 1.9481e-01, 2.6754e-01, -2.1469e-01, -4.4291e-01, -3.0451e-02,\n"," 3.7202e-01, -2.8676e-01, 1.7852e-01, 8.6204e-02, 4.6217e-01,\n"," 9.7784e-03, -1.5016e-01, -6.4185e-01, -2.0391e-01, -1.8617e-01,\n"," -3.6345e-01, -2.5138e-01, -1.7511e-01, 1.6808e-01, 9.2996e-02,\n"," -4.8854e-02, 1.6708e-01, 2.0739e-02, 3.0883e-01, 1.5687e-01,\n"," 2.6577e-02, -7.1378e-03, 4.0265e-01, 8.0319e-02, 1.5128e-01,\n"," -5.3556e-01, 7.0732e-02, 2.2488e-01, 4.0804e-01, -2.8491e-01,\n"," 5.5006e-02, 1.7817e-02, 4.0681e-01, 2.7057e-02, -3.9344e-01,\n"," 2.6712e-01, -3.9535e-01, 5.0881e-03, -1.8999e-01, 3.5570e-01,\n"," -1.6099e-01, -9.4875e-04, -2.8361e-02, -1.7053e-01, 1.4753e-01,\n"," 4.2193e-02, 4.8533e-01, 3.7410e-03, 4.7468e-02, 1.2596e-01,\n"," 8.2241e-02, -4.4172e-01, -8.2437e-02, 2.7732e-01, 1.0952e-01,\n"," 1.6215e-01, 6.0818e-02, -9.6620e-03, 4.8205e-01, 1.4976e-01,\n"," -5.5686e-01, -1.2751e-01, -1.8795e-01, -8.6033e-02, -5.5980e-01,\n"," 1.8682e-01, -8.3542e-02, 4.1371e-01, 1.4997e-01, -3.2396e-01,\n"," -9.9459e-02, 1.5315e-01, 3.2391e-01, 5.7656e-02, -6.8444e-04,\n"," -3.8860e-01, -2.2677e-01, -4.5270e-01, 1.7720e-01, 2.2687e-01,\n"," -2.1969e-01, 3.4063e-01, 7.2729e-02, 3.0261e-01, -1.1092e-02,\n"," -1.7196e-01, 3.3607e-01, -2.8263e-01, -4.2806e-01, 1.5339e-01,\n"," 1.9899e-01, -4.5356e-01, -2.1554e-01, 2.2557e-01, -3.6599e-01,\n"," -6.0609e-02, -8.4800e-02, 4.5914e-01, 2.8088e-01, 1.6625e-01,\n"," -5.0439e-02, 2.1067e-01, -3.2976e-01, -4.2317e-01, 1.0802e-01,\n"," 1.4315e-01, 9.5425e-02, -1.6734e-01, -2.5188e-01, -4.4483e-01,\n"," 5.1163e-03, 5.3121e-02, -6.3196e-02, -6.5988e-02, 1.6409e-01,\n"," -1.3097e-01, 8.2190e-02, 3.4893e-01, 2.1540e-01, -1.9190e-01,\n"," 1.8266e-01, -1.4515e-01, -8.5415e-02, -3.1588e-01, -2.8214e-01,\n"," -2.2578e-01, -8.6538e-02, 5.8413e-02, 3.1231e-02, 6.6042e-02,\n"," 7.5432e-01, 2.5529e-01, 1.4932e-01, -1.6761e-01, -1.4374e-02,\n"," 4.7123e-02, 1.6617e-02, 1.0637e-01, -5.8785e-01, -4.5169e-01,\n"," 3.8519e-02, 7.2777e-02, 1.3036e-01, -2.0990e-01, 1.9754e-01,\n"," 2.2317e-01, 3.3611e-01, 9.1356e-02, 1.3094e-01, 2.1507e-01,\n"," 1.1098e-01, 1.0609e-01, 1.7558e-01, -4.8080e-01, 1.7532e-01,\n"," -2.6420e-01, 1.9112e-01, -2.6327e-01, -1.6213e-01, 1.0106e-02,\n"," -6.2926e-01, 3.5476e-02, 1.0276e-01, 8.2549e-01, 5.3971e-01,\n"," -1.3810e-01, -2.3482e-01, 2.1385e-01, -4.6623e-01, -1.0369e-01,\n"," 3.3959e-02, -1.0395e-01, -2.8678e-01, 2.8070e-01, 1.4315e-01,\n"," -2.4831e-01, 5.0775e-01, 2.9190e-01, 2.2411e-01, -1.3513e-01,\n"," 4.3921e-01, -4.6333e-01, 1.9797e-01, 6.8769e-02, 4.4107e-01,\n"," 2.4940e-01, -4.6986e-02, -2.1946e-02, 3.4205e-01, 2.2945e-01,\n"," -4.5363e-01, 6.4017e-02, 4.3456e-02, -4.0655e-01, -2.1164e-01,\n"," 3.1318e-01, 4.8246e-01, -6.3655e-01, 5.6793e-02, -1.8468e-01,\n"," -4.6236e-01, -8.7166e-02, 5.4625e-01, 5.4817e-02, -3.3646e-01,\n"," 2.1909e-01, 2.7941e-01, 1.1557e-01, 2.4367e-01, -7.9275e-01,\n"," -1.5495e-01, 1.0466e-01, 6.2306e-01, 4.8488e-01, -5.4940e-02,\n"," -1.9621e-01, 8.4580e-02, 8.4320e-02, -2.7420e-01, 3.9669e-01,\n"," 1.5467e-02, -2.3180e-01, -1.5261e-01, -4.4444e-02, -5.6830e-02,\n"," 2.2706e-01, 2.1447e-01, -9.8621e-02, -1.2851e-02, -3.0446e-01,\n"," -1.3706e-01, 3.5632e-01, 3.2800e-02, 1.8024e-01, -1.1969e-01,\n"," -2.6950e-01, -1.5275e-01, -2.1369e-02, -1.3402e-01, 2.2279e-01,\n"," 1.7220e-01, -1.7749e-01, -1.0715e+00, -8.3774e-02, -7.2930e-02,\n"," -3.6580e-01, -2.2474e-02, -1.7514e-02, 4.6885e-01, -6.8933e-02,\n"," -3.7508e-01, -4.9270e-01, 1.5829e-01, -8.0416e-02, 5.7441e-01,\n"," 2.8344e-01, 1.3347e-01, 4.2844e-02, 1.3447e-01, -2.3569e-01,\n"," 4.6761e-01, -5.7342e-01, -2.0123e-01, 3.2086e-01, 3.1623e-02,\n"," -2.3385e-01, -2.4098e-01, 4.9882e-01, -5.7119e-01, -3.3247e-01,\n"," -1.4788e-01, 7.7035e-01, -3.1897e-01, 3.4106e-01, -3.8205e-01,\n"," -9.7243e-02, -2.3412e-01, 3.2173e-01, -1.0333e-01, 2.9249e-01,\n"," 9.2447e-02, 2.3783e-01, 1.1105e-01, 4.6040e-01, -3.1101e-01,\n"," -1.1825e-01, 1.9941e-01, 8.1150e-03, 1.8880e-01, 5.1623e-01,\n"," 1.2407e-01, 9.1732e-02, -3.5442e-01, -1.2547e-01, 3.3588e-01,\n"," -1.1874e+00, 5.0461e-02, 1.3363e-01, -2.1624e-01, 2.8096e-01,\n"," -3.1858e-01, -2.4294e-01, -3.4186e-01, -1.2104e-01, 2.6148e-02,\n"," 3.2416e-01, 3.4636e-01, -5.9829e-02, 1.5330e-01, -1.9608e-01,\n"," 1.5603e-01, -2.3925e-02, -2.4953e-01, -1.0168e-01, 3.4972e-01,\n"," -1.7738e-01, 3.5249e-01, 1.4648e-01, -8.5084e-02, 9.7320e-02,\n"," -2.1353e-01, -6.9366e-03, -1.6236e-01, 1.6615e-01, -1.8016e-01,\n"," 5.7466e-02, -3.7339e-01, -3.0117e+00, 1.9711e-01, 8.2943e-02,\n"," -1.4559e-01, 3.0131e-01, -3.8203e-01, 3.2097e-01, -2.4165e-01,\n"," -1.0298e-01, -4.0561e-01, 6.3951e-02, -1.3105e-01, 6.9622e-02,\n"," 1.8064e-01, -2.5309e-01, -1.5543e-01]])\n",",: tensor([[ 1.4571e-01, -8.9117e-02, 2.6450e-02, 3.9113e-03, -6.6716e-02,\n"," -2.5159e-01, 3.8645e-01, -1.3547e-01, 1.9058e-01, -3.6953e-01,\n"," -1.8845e-01, -7.6884e-02, -3.2524e-02, 2.5172e-01, 1.1722e-01,\n"," -7.8544e-03, -1.7302e-01, 1.5656e-01, 6.8947e-02, 1.9274e-01,\n"," 2.9281e-01, 2.7723e-01, 2.5830e-01, 9.1137e-02, 9.6616e-02,\n"," 2.8481e-01, -7.1738e-02, -4.2233e-01, -3.4306e-01, 2.2832e-01,\n"," 1.7340e-02, 4.0752e-02, -5.0088e-01, 5.0823e-01, -2.1122e-01,\n"," -4.0537e-01, 4.1055e-01, 2.5058e-01, -1.8951e-01, -2.3088e-01,\n"," -9.9218e-02, -5.0576e-03, 3.5897e-02, -1.8635e-01, -4.6532e-02,\n"," 6.9451e-02, -6.5341e-01, -6.9961e-03, -4.9305e-01, 8.9893e-02,\n"," -4.1606e-01, 2.9743e-01, 2.4078e-01, 5.1232e-01, 2.1909e-01,\n"," 1.3387e-02, 6.0983e-02, -2.9164e-01, -1.0814e-01, 1.6252e-01,\n"," 9.4167e-02, 3.7345e-01, 1.0825e-01, 3.0503e-01, 3.1781e-01,\n"," 1.3457e-01, 3.0925e-01, -1.6863e-01, -4.0823e-01, 2.4775e-01,\n"," -3.2803e-01, -3.2905e-01, 1.9529e-01, -2.1022e-02, 2.4805e-01,\n"," 1.6645e-01, 1.1229e-01, 1.8343e-01, 1.4670e-01, -1.6221e-01,\n"," 4.8857e-01, 2.7126e-01, 2.2107e-01, 1.1901e-01, 9.5187e-02,\n"," 2.0734e-01, 1.4156e-01, -1.4448e-01, -2.7253e-01, 2.7796e-01,\n"," 5.5153e-02, -4.6048e-02, 5.2245e-02, -6.4954e-02, 5.1033e-02,\n"," -1.7863e-01, -3.0263e-02, 8.2884e-02, -1.4531e-01, -4.2625e-01,\n"," -2.2086e-01, -3.5510e-01, -1.2653e-01, 2.5765e-01, -1.8971e-01,\n"," 9.2276e-02, -9.6467e-02, -4.9548e-03, 2.2412e-01, -5.0690e-01,\n"," 2.4864e-01, -2.2044e-02, 3.0915e-01, -2.4427e-01, -1.7787e-02,\n"," 3.5503e-01, 1.9915e-01, 2.1399e-01, 5.5363e-02, 2.4421e-01,\n"," -2.1038e-01, -2.8581e-01, 1.5525e-01, 6.7437e-01, -8.7163e-02,\n"," 3.4415e-01, 1.1538e-01, -5.0896e-01, -1.9329e-01, -2.0803e-01,\n"," -3.9488e-01, 3.9691e-01, 2.3519e-01, 1.8214e-01, -5.9625e-02,\n"," 7.2775e-02, 1.8555e-01, -1.8159e-01, -9.4297e-02, -1.7628e-01,\n"," 1.1305e-01, 3.5077e-01, -9.5832e-01, -1.9586e-01, 1.6180e-01,\n"," -1.8532e-01, 2.9025e-01, -1.0661e-01, 2.7439e-01, 1.2160e-01,\n"," 4.0716e-01, 2.6388e-01, -3.3334e-01, -3.5688e-01, -6.5110e-01,\n"," -3.8443e-01, 4.1371e-02, 1.1555e-01, 1.5852e-01, 6.7273e-01,\n"," 1.2757e-01, 2.3980e-01, 2.5816e-01, 3.6018e-01, -9.3621e-02,\n"," -5.5332e-02, -3.3742e-01, 6.8712e-01, 3.3851e-01, 1.1030e-01,\n"," -3.0397e-01, -3.4389e-01, 4.9668e-01, 2.7737e-01, -1.1285e-01,\n"," 2.9047e-01, 4.6451e-01, 2.3961e-01, -1.0644e-01, -3.1535e-01,\n"," -2.8753e+00, 1.2570e-01, -1.4363e-01, -5.3969e-01, 3.2254e-01,\n"," -1.6085e-01, 1.6551e-01, -1.3439e-01, 2.8193e-01, -2.2328e-01,\n"," -2.9894e-01, -2.9440e-01, -1.6945e-01, 1.3558e-01, 3.4803e-01,\n"," 2.2705e-02, -1.1237e-01, -2.3758e-01, 2.3543e-01, -1.6686e-03,\n"," 2.2645e-01, -1.7749e-01, 2.3646e-01, 1.6193e-01, 1.4528e-02,\n"," 1.1855e+00, -2.2295e-01, -3.7911e-01, 5.9377e-02, 1.5180e-01,\n"," -6.9729e-01, 4.5486e-01, 3.8399e-01, -5.7034e-01, 2.7954e-01,\n"," -3.3679e-01, 5.4090e-01, -2.9870e-01, -2.8679e-01, -2.1322e-01,\n"," 3.4502e-01, -3.0060e-01, -1.1728e-01, 8.2267e-02, -4.3531e-01,\n"," -2.5983e-01, 8.0429e-02, -9.5820e-02, -2.6100e-02, -4.1050e-01,\n"," -2.8160e-01, -3.5589e-01, 2.9125e-01, 1.9323e-01, -6.9444e-02,\n"," 2.4017e-01, 1.1566e-01, -9.0855e-02, -1.6796e-01, -2.4410e-01,\n"," 3.1075e-03, 1.6406e-01, 5.3568e-01, 3.0617e-01, -3.7865e-01,\n"," 8.9007e-02, 6.2370e-01, -2.4044e-01, 2.7816e-01, -1.1648e-01,\n"," -1.9073e-01, -4.1636e-01, -1.7217e-01, -1.4691e-01, -4.4707e-02,\n"," 1.2607e-01, 1.0687e-01, -1.6795e-01, 2.8549e-01, -8.1567e-02,\n"," 2.2966e-02, -1.0773e-02, 6.8333e-01, -1.4289e-01, -3.8157e-01,\n"," -4.6643e-04, 3.9218e-01, -1.5765e-01, -2.1006e-01, -7.7416e-02,\n"," 5.8157e-02, -1.7123e-01, -1.0536e-02, -6.3703e-01, -1.8946e-01,\n"," -7.0629e-02, 6.3799e-01, 7.3411e-02, 7.6321e-03, -1.1048e-01,\n"," -4.2403e-04, 2.4042e-01, -2.0534e-01, -6.8047e-03, 2.4739e-01,\n"," -3.6708e-01, -2.3046e-01, -5.9086e-01, 6.1231e-01, -4.5672e-01,\n"," -6.9144e-02, 9.4099e-02, 1.3101e-01, 4.2869e-02, 8.9833e-02,\n"," 9.3597e-02, 2.4287e-01, 1.0510e-01, -3.2402e-01, -7.8580e-02,\n"," -9.3336e-02, -2.3416e-01, -2.1572e-01, -1.3606e-01, 3.0141e-01,\n"," 1.6233e-01, 5.3635e-02, 3.6113e-01, -2.1373e+00, -1.7802e-01,\n"," 1.6804e-01, -4.1662e-01, 2.4974e-01, 1.7530e-01, 2.5181e-01,\n"," -1.6483e-01, -4.9216e-01, -1.5408e-02, 2.3342e-01, -4.1294e-01,\n"," 1.1777e-01, 4.6779e-01, 1.5563e-01, -5.5175e-02, 8.9437e-02,\n"," -2.6464e-02, -1.5208e-01, 3.2119e-01, -1.3710e-01, -1.2571e-01,\n"," 4.2961e-01, -3.2064e-01, 2.4038e-01, 3.4685e-01, -3.6121e-01,\n"," 1.9097e-01, -6.5474e-01, -1.3526e-01, -4.0700e-01, -2.6067e-01,\n"," -2.9967e-01, 1.1696e-01, 2.4040e-01, 7.9220e-02, 1.7563e-01,\n"," 1.8969e-01, 7.8945e-01, -3.4330e-01, 2.2891e-01, 4.7126e-01,\n"," 2.0799e-01, -1.0846e-01, -8.2196e-02, -1.2078e-02, 2.1574e-01,\n"," -1.8101e-01, 1.6718e-01, 1.2133e-01, 8.6467e-02, -2.8239e-01,\n"," 4.4568e-01, -1.3886e-01, -9.8034e-02, -3.5696e-01, -2.6369e-02,\n"," 3.3485e-01, 6.1569e-02, -2.7746e-01, 3.8282e-01, -3.4746e-01,\n"," 3.3132e-01, 4.0558e-01, -8.4932e-01, 1.3011e-01, -2.5669e-01,\n"," 8.9835e-02, -6.1169e-02, -8.2988e-02, 2.5008e-02, -2.2889e-01,\n"," -3.2188e-01, -6.5140e-01, -4.6423e-01, 3.7389e-01, -4.3440e-01,\n"," -1.0283e-01, 3.0310e-01, -3.5769e-01, -3.5431e-02, -2.9953e-02,\n"," 3.3049e-01, 2.9214e-01, 3.9564e-02, 3.0288e-01, 2.2973e-01,\n"," -3.1753e-01, -2.1179e-01, -1.7666e-01, -2.3360e-01, 9.0684e-02,\n"," -7.1183e-02, 1.6260e-01, 1.2539e-01, 3.3810e-01, 4.4933e-01,\n"," -4.1263e-01, 2.8198e-01, -8.3511e-03, -4.0399e-01, -1.4334e-01,\n"," 1.3678e-01, 2.6962e-01, -2.1914e-01, 2.4740e-01, -1.0374e+00,\n"," -2.1050e-01, -1.8165e-01, -9.5031e-03, 3.5994e-01, -4.6178e-01,\n"," 8.9707e-02, 3.8382e-01, 2.7490e-01, -4.7722e-01, 9.0886e-02,\n"," 2.4036e-01, -2.9337e-02, 3.9708e-01, -4.2682e-02, 4.5617e-01,\n"," -1.8704e-02, 1.6249e-02, -1.1373e-01, 1.0254e-01, 1.6447e-01,\n"," -2.4245e-01, -7.6687e-01, -2.7627e-01, 3.3510e-01, 4.3963e-03,\n"," 1.1763e-01, -3.9918e-01, -2.2585e-01, 2.2497e-02, -3.5149e-04,\n"," 7.8753e-01, 1.2097e-01, 5.6824e-01, 5.1911e-01, 1.7139e-01,\n"," -7.4859e-01, 3.0708e-01, 2.5017e-01, 5.6966e-01, 1.0827e-02,\n"," 2.2144e-01, 9.3797e-02, 7.1426e-01, 3.6654e-01, -4.4194e-01,\n"," 6.2905e-01, -3.2575e-01, -1.9558e-01, -5.2749e-01, 7.3160e-02,\n"," -3.9767e-01, -3.0789e-01, -4.2381e-02, -1.6923e-01, 5.3304e-02,\n"," -1.1442e-01, 1.8313e-01, 1.5919e-01, 5.1756e-02, 9.3930e-03,\n"," 1.3479e-01, -3.0994e-01, 2.4348e-01, 6.2807e-01, -9.7308e-02,\n"," 2.2729e-01, 1.5578e-01, -8.2989e-02, 8.2472e-02, -7.8736e-02,\n"," -4.4337e-01, 4.4439e-01, 5.6996e-03, 1.3215e-01, -2.2840e-01,\n"," 1.0326e-01, -1.4564e-01, 4.6967e-01, 9.5750e-02, -5.7422e-01,\n"," -4.2140e-01, 1.5925e-01, 4.9169e-01, 3.0690e-01, -3.8212e-01,\n"," -3.0523e-01, -2.3643e-01, -4.9541e-01, -2.0289e-01, 3.7294e-01,\n"," -2.2985e-01, 1.6987e-01, 2.0271e-01, 3.7342e-01, 1.2005e-01,\n"," -3.4641e-01, 2.2480e-01, 5.5623e-02, -3.4759e-01, 1.0096e-01,\n"," 3.7587e-01, -1.9086e-01, -7.0977e-02, 5.7221e-02, -3.5829e-01,\n"," -3.7393e-01, -1.5656e-01, -1.2160e-02, 4.7843e-01, 2.2425e-01,\n"," 6.3370e-02, 2.2390e-01, -3.9537e-01, -3.2356e-01, -4.7447e-01,\n"," 1.1968e-01, -7.4429e-02, 5.1422e-02, -1.1373e-01, -6.4215e-01,\n"," -4.1410e-01, -5.6147e-02, -3.1049e-01, -1.5104e-01, 3.9813e-01,\n"," -7.3235e-03, -1.7187e-01, 4.9733e-01, -2.4671e-02, -7.0007e-01,\n"," 1.2217e-01, -3.5063e-01, -2.7913e-01, -6.0496e-02, -4.6921e-01,\n"," -7.2195e-01, -1.3520e-01, -1.3021e-01, 3.4127e-01, 4.0355e-01,\n"," 6.1885e-01, 3.6910e-01, 2.6426e-01, -2.2757e-01, -2.7376e-01,\n"," 1.0358e-02, -1.8039e-01, 9.1878e-02, -6.9110e-01, 5.8955e-02,\n"," 1.1730e-01, 1.7650e-01, -2.4481e-01, -1.4893e-01, 3.8230e-01,\n"," 2.5803e-01, -2.2878e-01, -6.2598e-02, 1.5761e-01, -1.9112e-01,\n"," 1.4289e-01, 5.3887e-01, 3.7659e-01, -2.9084e-01, -2.3839e-02,\n"," -1.7665e-01, 2.1653e-01, -2.3172e-01, -1.2722e-02, 2.5848e-01,\n"," -2.4478e-01, -3.2303e-01, 1.5413e-01, 8.2818e-01, 9.5071e-01,\n"," -1.8863e-01, -2.0519e-01, 1.9272e-01, -3.8249e-01, -3.1809e-01,\n"," 6.1254e-02, -1.1218e-01, -4.4748e-01, -1.5089e-02, -1.4681e-01,\n"," -1.1275e-03, 2.5294e-01, -1.1127e-01, 2.9581e-01, -3.4890e-01,\n"," 6.0162e-02, -7.5859e-01, -9.6313e-02, -9.0095e-02, 2.2760e-01,\n"," -1.7138e-01, -1.1211e-01, -1.4906e-02, 2.9721e-01, 3.3282e-01,\n"," -2.1966e-01, 1.0012e-01, -4.9586e-02, -4.2379e-01, 6.5179e-02,\n"," 1.9024e-02, 7.0318e-01, -7.8487e-01, -2.1047e-01, -1.9214e-01,\n"," -7.8466e-01, 1.1031e-01, 3.0623e-01, 4.6791e-02, -2.0817e-01,\n"," 5.3939e-01, 5.2954e-01, 1.5929e-01, 3.9891e-01, -4.7197e-01,\n"," -1.6678e-01, -2.7320e-01, 4.5760e-01, 3.1295e-01, 1.6734e-01,\n"," -4.8150e-01, 7.8517e-01, -6.9360e-02, -4.9245e-01, 6.0045e-01,\n"," 1.3644e-01, -3.6199e-01, -7.1827e-01, 1.0422e-01, 2.9102e-02,\n"," 3.0064e-01, 1.1905e-01, -1.9098e-01, -1.3113e-01, -2.5468e-02,\n"," -1.6509e-01, 8.9786e-02, 6.0397e-02, 2.3104e-02, 2.7286e-02,\n"," -1.6719e-01, 2.6000e-01, -1.6018e-01, -1.6848e-01, 6.5960e-02,\n"," -4.1964e-02, -3.0918e-01, -5.3669e-01, 1.3443e-01, 8.0290e-02,\n"," 1.1820e-02, -3.8390e-01, 8.4092e-02, 5.5495e-01, 7.8698e-02,\n"," -3.1199e-01, -4.6914e-01, -1.8557e-01, 4.7983e-01, 1.8608e-01,\n"," 3.1855e-02, -6.7170e-02, 2.2435e-01, -5.4959e-02, -9.3272e-02,\n"," 3.5537e-01, -6.8320e-01, -1.3271e-01, -1.4779e-01, -7.3625e-02,\n"," -2.2041e-01, 4.2471e-03, 8.7989e-01, -5.3090e-01, -3.2617e-01,\n"," -3.2060e-01, 4.6099e-01, -8.3208e-02, 3.2950e-01, -2.6549e-01,\n"," -1.3904e-01, -3.0038e-01, 2.0566e-01, 1.1365e-01, 1.3593e-01,\n"," 1.2688e-02, 3.2521e-01, 9.8237e-02, 7.9554e-01, -2.5483e-01,\n"," -3.0474e-01, -2.1962e-01, -3.2505e-01, 1.5375e-01, 3.9901e-01,\n"," 1.8572e-01, -5.0251e-01, -6.3008e-01, -2.5683e-01, 2.5506e-01,\n"," -1.0632e+00, 3.7451e-01, -1.0351e-01, -1.6880e-01, -1.2263e-01,\n"," -2.0244e-02, -1.7100e-01, -2.4317e-01, 6.8181e-02, -2.6405e-01,\n"," -7.0037e-02, -2.0590e-01, -4.4318e-01, -2.7295e-02, -4.1052e-01,\n"," 4.8845e-01, -7.4306e-02, 1.6744e-01, 2.7401e-02, 5.9199e-01,\n"," 1.0055e-01, -7.0027e-03, 3.2443e-01, -2.8446e-01, 2.3394e-02,\n"," -3.2145e-01, -2.7080e-01, -1.4532e-01, -4.5786e-02, 2.6260e-01,\n"," 4.6569e-04, -1.0496e-01, -2.0824e+00, -2.4360e-03, -2.9469e-02,\n"," -7.0094e-02, 2.2866e-01, -4.9524e-02, 4.4161e-01, -4.9409e-02,\n"," -1.2736e-01, -2.7610e-01, 8.5445e-02, -4.6898e-01, 2.2688e-01,\n"," 7.0861e-02, 2.2175e-02, 6.0069e-02]])\n","Chicago Hilton: tensor([[ 4.7260e-02, -4.2242e-01, -3.2807e-01, 1.4179e-02, 3.1474e-01,\n"," -2.2014e-01, 2.1648e-01, 2.8057e-01, -1.8348e-01, -1.4905e-01,\n"," -4.8119e-01, -3.9534e-02, -2.1146e-01, 6.4685e-02, -3.2490e-01,\n"," -9.0134e-02, 2.3801e-01, 3.0299e-03, -1.8128e-02, 3.1824e-01,\n"," 1.3308e-01, -3.9615e-01, -2.4322e-01, 3.7385e-01, -1.8209e-02,\n"," -9.0970e-03, -4.3070e-01, -2.3032e-01, -5.9745e-01, -5.4710e-03,\n"," 3.2477e-04, -1.2628e-01, 1.9898e-01, -6.4607e-02, 1.4167e-01,\n"," -1.1744e-01, 2.8809e-02, -2.4697e-02, -4.0223e-01, 3.7576e-02,\n"," -1.3749e-01, -8.7603e-02, 1.0732e+00, -3.1104e-02, 3.0412e-01,\n"," -6.7402e-02, 9.1159e-02, 2.9943e-01, -3.7841e-03, -1.8198e-01,\n"," -5.4418e-01, 1.5301e-01, -5.0915e-02, 4.0930e-01, 1.5594e-01,\n"," 5.6241e-01, -1.3019e-01, -8.4103e-03, 1.9776e-01, -1.6786e-01,\n"," 1.5778e-01, 2.3445e-01, -3.8371e-02, -6.9247e-02, 3.5987e-01,\n"," 3.0863e-01, 2.5192e-01, 4.4523e-01, -3.6503e-01, 1.6054e-01,\n"," -1.3992e-01, 6.4380e-02, 1.0793e-01, 4.2269e-01, 1.4801e-01,\n"," -3.0368e-02, -1.6126e-01, 2.2432e-01, 2.0902e-02, -1.4225e-01,\n"," 2.7603e-01, -9.1248e-02, 4.4626e-01, 4.7418e-01, 3.0743e-01,\n"," 5.4534e-02, -4.1446e-01, -3.6189e-01, -3.5166e-02, 2.0704e-01,\n"," -5.6824e-01, -2.2314e-01, 6.6155e-02, 1.4147e-01, 1.8661e-01,\n"," -3.7990e-02, 9.2261e-02, 4.1863e-02, 3.2703e-01, 2.5195e-01,\n"," 1.1336e-01, -3.7081e-01, -2.0212e-01, 3.8430e-01, -1.3318e-01,\n"," -9.0308e-02, 1.1680e-01, -8.7390e-02, 4.3111e-01, -3.8404e-01,\n"," 8.0969e-02, -1.2291e-01, 3.5996e-01, -5.4991e-01, -7.8424e-02,\n"," 2.1106e-01, 4.7650e-02, -1.3266e-01, 3.5789e-02, 1.7839e-01,\n"," -3.4441e-01, -3.3029e-01, 1.2033e-01, 8.5142e-01, 8.6307e-02,\n"," -4.0338e-02, 1.7235e-01, -5.0719e-02, -2.4504e-01, -6.3670e-02,\n"," -2.3420e-01, 5.6682e-01, 1.6954e-01, 1.2783e-01, -3.8476e-01,\n"," 1.5829e-01, 9.3572e-02, 2.5027e-01, -4.9580e-02, 4.5407e-02,\n"," -3.9857e-01, 4.0054e-01, -6.7349e-01, -3.3989e-01, 4.1366e-01,\n"," -3.5526e-02, 3.5884e-01, -8.2657e-02, 1.0644e-01, -5.7359e-01,\n"," 2.9651e-01, 2.3562e-01, -3.8390e-01, -3.0155e-01, -1.7208e-01,\n"," -1.3969e-01, -9.0451e-02, -1.5196e-01, 3.6466e-01, 3.8097e-01,\n"," 6.7488e-01, -1.8383e-02, 1.6437e-01, 2.9338e-01, -6.4249e-01,\n"," 1.8992e-01, -2.2542e-01, 4.1691e-01, 1.4560e-01, -7.0144e-03,\n"," -3.0336e-01, -5.8489e-02, 5.2974e-01, 7.8469e-01, 3.0712e-01,\n"," 5.9715e-02, 2.0868e-01, 1.8044e-02, 2.5077e-01, 1.1234e-02,\n"," -2.1882e+00, -2.8430e-01, 2.5250e-01, 1.2175e-01, 7.3062e-01,\n"," -2.3942e-01, 4.8950e-01, -5.6212e-01, 3.6823e-01, 1.3381e-01,\n"," -3.3982e-01, -2.7194e-01, -1.7770e-01, 3.9789e-01, 2.3314e-01,\n"," -3.8074e-01, 3.0629e-01, -2.5536e-01, 8.3213e-02, 1.7983e-02,\n"," -1.1267e-02, 1.2045e-01, 5.1713e-01, 4.6093e-01, -4.8726e-01,\n"," 6.7148e-01, 1.5188e-01, -6.6251e-01, 7.2451e-02, 1.6827e-01,\n"," -8.3079e-01, 1.5251e-02, 3.1032e-01, -2.5058e-01, 2.8572e-01,\n"," -4.7035e-01, 1.9742e-01, -1.9926e-01, -3.2001e-01, -1.4891e-01,\n"," 6.8620e-01, 1.0821e-01, -2.9996e-01, 8.7649e-02, -1.5798e-01,\n"," -1.9677e-01, 8.3411e-02, -2.3626e-01, 1.2408e-01, 2.6948e-02,\n"," -2.9151e-01, -3.0020e-01, -2.1803e-01, -9.7649e-02, -2.3749e-01,\n"," -2.8974e-02, 8.8283e-02, -2.8967e-01, 7.2794e-02, -2.6201e-01,\n"," -2.9964e-01, 1.1317e-01, 2.6888e-01, 2.3430e-01, -1.6537e-01,\n"," 4.4225e-02, 8.8283e-02, -9.6758e-02, 6.9827e-01, -3.6207e-02,\n"," 6.4164e-02, -3.3640e-01, 7.4207e-02, -2.2934e-01, 8.7168e-02,\n"," 2.0534e-02, 4.2390e-02, -2.9264e-01, 7.1548e-02, -2.5970e-01,\n"," 1.0801e-01, 3.7403e-02, 5.5755e-01, 8.1786e-02, -2.7508e-01,\n"," 4.0261e-01, 2.3036e-01, -1.8844e-01, -6.4822e-02, -5.7276e-02,\n"," -3.2084e-01, -3.2403e-02, -4.0352e-02, -5.8008e-01, -2.4978e-01,\n"," -3.1997e-01, 2.2769e-01, 1.2475e-01, 6.6120e-01, -2.6466e-01,\n"," -2.3447e-02, 2.9971e-01, -1.6461e-01, -1.4963e-01, 1.5350e-01,\n"," -1.3586e-01, -7.5160e-01, -5.4694e-01, 2.0695e-01, -2.2699e-01,\n"," -5.5065e-03, 7.5988e-02, -2.2549e-01, 1.9137e-01, 2.7320e-01,\n"," -3.2646e-01, 3.0135e-01, 3.7644e-01, -5.6508e-01, -3.1139e-01,\n"," 5.4301e-02, -1.3910e-01, 3.2708e-01, -2.9385e-01, 2.4933e-01,\n"," 3.5947e-01, 1.2111e-01, -3.5021e-01, -2.2725e+00, -9.2052e-02,\n"," -3.4283e-02, -4.4827e-01, 7.1031e-02, -5.4434e-02, -3.2794e-01,\n"," -2.0025e-01, -3.6227e-01, -3.8022e-01, 2.1378e-01, -4.3156e-01,\n"," 1.1035e-01, -4.2410e-02, -1.6392e-01, 1.1858e-01, -2.2918e-03,\n"," -3.5393e-01, -1.5144e-01, 4.6640e-01, -1.4161e-01, -4.1335e-01,\n"," 7.0205e-02, -1.9540e-01, 3.6844e-01, 3.7725e-01, -4.3011e-01,\n"," 1.2283e-01, -3.2310e-01, 2.1348e-01, 1.6972e-01, -2.0045e-02,\n"," 7.5376e-02, 2.2668e-01, 1.8520e-01, 7.3715e-02, -1.1386e-01,\n"," -4.3422e-02, 3.9850e-01, -7.1474e-02, 4.1026e-01, 3.6811e-01,\n"," 7.5160e-02, -1.3301e-02, 4.4696e-01, -3.6791e-01, 2.9028e-01,\n"," -2.0473e-01, 6.8285e-02, 6.4066e-02, 3.9478e-01, 1.6276e-01,\n"," -3.0797e-03, -3.2319e-01, -2.6717e-01, -2.0683e-01, 8.2522e-03,\n"," 5.2038e-01, -6.6326e-02, -3.2885e-01, 3.7187e-01, -2.3771e-01,\n"," 1.1584e-01, 5.3215e-01, -6.0953e-01, -1.1744e-01, -4.5889e-02,\n"," -3.3926e-01, -3.2988e-02, 1.2443e-01, -2.2603e-01, -1.5201e-01,\n"," -9.5817e-02, -1.0592e+00, -7.1439e-01, 3.2520e-02, -4.7304e-01,\n"," -1.0983e-01, 2.3401e-01, -3.9260e-01, 8.1629e-05, 2.3801e-01,\n"," 4.3484e-02, -7.6585e-02, 9.2111e-02, -4.3124e-01, -2.0789e-01,\n"," -5.1360e-02, 7.2715e-02, -1.6186e-01, 2.3642e-02, 9.6749e-02,\n"," 3.9926e-01, 1.7052e-01, 1.1557e-01, 1.8100e-01, 2.5337e-01,\n"," -4.1172e-01, 2.0908e-01, -5.4289e-02, -4.0294e-01, -5.3207e-01,\n"," 1.6862e-01, 1.6337e-01, 2.3024e-01, -1.5039e-01, -2.4906e-01,\n"," -1.3391e-02, 1.9119e-01, 1.2148e-01, -4.5431e-02, -4.9188e-01,\n"," 1.6847e-01, -2.9415e-03, 3.1654e-01, -6.0923e-02, 4.5012e-01,\n"," 6.5630e-01, -7.2398e-02, 1.7780e-01, -2.4888e-01, 2.3537e-01,\n"," 4.9189e-02, 3.3715e-01, -8.2674e-02, 2.7412e-01, -4.1947e-02,\n"," -5.2307e-01, -7.4101e-02, -1.7200e-01, 1.2509e-01, -2.1707e-03,\n"," 4.4233e-01, -2.1682e-01, -2.8232e-01, 2.1046e-01, 3.7786e-01,\n"," -1.9098e-01, 2.8904e-01, 6.6228e-01, 2.5030e-01, 1.9266e-01,\n"," -3.4716e-01, 1.5178e-01, 1.4968e-01, 5.3209e-01, 1.8812e-01,\n"," 2.8227e-01, 3.3002e-01, 2.5149e-01, -1.4326e-01, -2.2114e-01,\n"," -5.5483e-02, -5.1851e-01, 1.8812e-01, -5.8611e-01, 5.4513e-01,\n"," -3.0109e-01, -6.4963e-02, -1.1305e-02, -2.7641e-02, -2.2578e-02,\n"," -2.6833e-01, 6.1763e-01, 2.0515e-02, -1.1594e-01, 4.9038e-01,\n"," 2.6859e-01, -9.7741e-01, 4.5668e-01, 3.2526e-01, 4.9432e-02,\n"," 1.4949e-01, -1.6450e-01, -3.7615e-01, 2.0763e-01, 4.2011e-02,\n"," 1.5349e-01, -7.5596e-02, 3.6939e-01, -1.9501e-01, -5.2123e-01,\n"," -4.8484e-02, -6.3865e-02, -1.1534e-01, 7.0545e-02, -5.1313e-01,\n"," -4.0561e-01, 4.7312e-01, 2.0142e-01, 4.3667e-01, -3.8446e-01,\n"," -2.3749e-01, 5.1995e-02, -1.8705e-01, -3.1537e-01, 4.9775e-01,\n"," -8.0009e-03, 1.1788e-01, 4.2510e-01, 5.9842e-02, -1.6644e-02,\n"," -1.2856e-01, 3.0093e-01, -6.5665e-02, 1.6645e-01, 6.8241e-02,\n"," 6.8431e-01, -2.4350e-01, -4.1558e-01, -2.1407e-01, -3.6170e-01,\n"," -6.1147e-01, -1.7457e-01, 4.4677e-02, 2.6645e-01, -1.2529e-01,\n"," -2.2890e-01, -3.2665e-02, -2.1902e-01, -5.9551e-01, -9.5338e-02,\n"," 2.9021e-02, -3.3621e-01, -1.7533e-02, -1.1850e-01, -3.9360e-01,\n"," -1.6021e-01, 8.4183e-02, -1.9598e-01, -3.7005e-02, 3.7908e-01,\n"," -1.9287e-01, 1.7484e-01, 8.1036e-01, 4.6296e-01, -8.2394e-01,\n"," -2.9989e-01, -1.0737e-01, 6.2264e-02, 2.2245e-01, -7.5794e-01,\n"," -2.9174e-01, -1.8115e-01, 1.1185e-01, -2.5851e-02, 2.0475e-01,\n"," 4.8562e-01, 5.2141e-01, 3.3868e-01, -1.0278e-01, -4.2950e-01,\n"," 9.0988e-02, -4.6253e-01, -2.9296e-01, -5.9284e-01, -2.3741e-01,\n"," -1.4842e-01, 5.8627e-02, -2.7700e-01, -2.3418e-01, 4.2513e-01,\n"," 2.0167e-01, -1.5532e-01, 3.7053e-02, 1.1945e-01, -2.6756e-01,\n"," 3.0756e-04, 3.2437e-01, 3.5673e-01, 2.0058e-01, 4.9623e-02,\n"," 1.6135e-01, -3.7852e-02, -8.2116e-01, 3.1296e-02, 5.7208e-01,\n"," -8.5302e-01, -6.1113e-02, -1.6547e-01, 3.9584e-01, 6.9487e-01,\n"," -2.7705e-01, -1.3835e-01, 5.8004e-01, -2.9758e-01, -5.6691e-01,\n"," 3.2059e-01, -4.8121e-01, -1.6066e-01, 7.7082e-01, 1.3398e-01,\n"," 1.7131e-01, 4.6177e-01, -4.2156e-01, 3.5350e-01, -2.6977e-01,\n"," 2.0780e-01, -7.3594e-01, 3.2162e-01, -4.8984e-01, 3.4313e-01,\n"," -1.4522e-02, -1.2780e-01, -3.3091e-02, 1.8141e-01, -2.0417e-02,\n"," -4.0939e-01, -2.6478e-02, -8.5658e-02, -5.6080e-01, -2.9579e-01,\n"," 5.9043e-02, 4.8837e-01, -8.1028e-01, 4.0486e-01, -2.0469e-01,\n"," -4.2208e-01, -2.8713e-01, 3.3177e-01, -4.0110e-03, 1.3434e-01,\n"," 2.6627e-01, 1.9645e-01, 6.3175e-01, 3.4697e-01, -5.5949e-01,\n"," -2.9055e-01, 4.2856e-02, 2.3064e-01, 2.1138e-01, 7.2655e-03,\n"," 5.9812e-02, 3.7961e-01, 1.5772e-01, -3.6716e-01, 8.4334e-02,\n"," -1.6743e-02, -5.7522e-01, -3.0874e-01, 1.3281e-01, 8.4382e-02,\n"," 1.5815e-01, 2.2005e-01, -1.8265e-01, -2.3564e-02, -6.2237e-02,\n"," -6.4928e-02, -8.9605e-02, 2.8294e-02, 1.2431e-01, 6.7265e-02,\n"," -1.9304e-01, -3.0524e-01, 6.7356e-02, 2.3865e-01, 2.5420e-01,\n"," 2.2981e-01, -1.5388e-01, -4.8186e-01, 1.9518e-01, -8.4311e-02,\n"," -3.7947e-02, -2.0739e-01, -4.1275e-02, 3.7128e-01, -3.3320e-01,\n"," -2.0566e-01, -1.0761e-01, -2.1421e-01, 2.1029e-01, 2.5102e-01,\n"," -4.9322e-01, -9.0369e-02, 4.7673e-01, 4.3961e-01, -4.4631e-01,\n"," 5.3287e-01, -4.9098e-01, -4.0050e-01, 1.2343e-01, -8.1922e-01,\n"," -4.2388e-01, -1.0688e-01, 6.5091e-01, -2.2621e-01, -1.8827e-01,\n"," -2.1420e-01, -3.0038e-02, -3.0388e-01, 4.6457e-01, 2.1827e-01,\n"," -4.8863e-02, -1.5077e-01, 3.5153e-01, -1.1119e-01, -4.1165e-02,\n"," -1.8235e-01, 2.1517e-01, 2.8146e-01, 8.3971e-01, -2.4396e-01,\n"," -1.2255e-01, 1.8412e-01, 1.7554e-01, -1.0029e-02, 5.6084e-01,\n"," 4.1933e-02, -2.3603e-01, -2.0042e-01, -5.6472e-01, 4.9201e-03,\n"," -1.0980e+00, 3.5318e-01, 9.4215e-02, 3.9164e-02, 1.2150e-01,\n"," 3.8638e-01, -2.3380e-01, -1.8742e-01, -1.2723e-01, -3.5877e-01,\n"," 4.8464e-01, 1.3301e-01, 1.9828e-01, -1.8179e-01, -3.6853e-01,\n"," -1.5701e-02, 3.4275e-01, 2.2053e-01, -1.3727e-01, 2.7818e-01,\n"," 1.2301e-01, 3.9588e-02, -1.8641e-02, 5.3333e-01, 1.1932e-01,\n"," 2.5847e-01, 1.8704e-01, -3.6308e-01, 2.1326e-02, -2.5899e-01,\n"," -1.6803e-01, -1.6145e-01, -1.4864e+00, -3.3305e-01, -4.3119e-01,\n"," -2.1622e-01, 1.5980e-01, -1.5747e-02, 4.8475e-01, -4.2998e-03,\n"," -1.0908e-01, 4.7672e-02, 1.7854e-01, -3.5485e-01, -7.0104e-02,\n"," -4.1794e-01, -9.3350e-02, -7.0339e-02]])\n",",: tensor([[ 1.4571e-01, -8.9117e-02, 2.6450e-02, 3.9113e-03, -6.6716e-02,\n"," -2.5159e-01, 3.8645e-01, -1.3547e-01, 1.9058e-01, -3.6953e-01,\n"," -1.8845e-01, -7.6884e-02, -3.2524e-02, 2.5172e-01, 1.1722e-01,\n"," -7.8544e-03, -1.7302e-01, 1.5656e-01, 6.8947e-02, 1.9274e-01,\n"," 2.9281e-01, 2.7723e-01, 2.5830e-01, 9.1137e-02, 9.6616e-02,\n"," 2.8481e-01, -7.1738e-02, -4.2233e-01, -3.4306e-01, 2.2832e-01,\n"," 1.7340e-02, 4.0752e-02, -5.0088e-01, 5.0823e-01, -2.1122e-01,\n"," -4.0537e-01, 4.1055e-01, 2.5058e-01, -1.8951e-01, -2.3088e-01,\n"," -9.9218e-02, -5.0576e-03, 3.5897e-02, -1.8635e-01, -4.6532e-02,\n"," 6.9451e-02, -6.5341e-01, -6.9961e-03, -4.9305e-01, 8.9893e-02,\n"," -4.1606e-01, 2.9743e-01, 2.4078e-01, 5.1232e-01, 2.1909e-01,\n"," 1.3387e-02, 6.0983e-02, -2.9164e-01, -1.0814e-01, 1.6252e-01,\n"," 9.4167e-02, 3.7345e-01, 1.0825e-01, 3.0503e-01, 3.1781e-01,\n"," 1.3457e-01, 3.0925e-01, -1.6863e-01, -4.0823e-01, 2.4775e-01,\n"," -3.2803e-01, -3.2905e-01, 1.9529e-01, -2.1022e-02, 2.4805e-01,\n"," 1.6645e-01, 1.1229e-01, 1.8343e-01, 1.4670e-01, -1.6221e-01,\n"," 4.8857e-01, 2.7126e-01, 2.2107e-01, 1.1901e-01, 9.5187e-02,\n"," 2.0734e-01, 1.4156e-01, -1.4448e-01, -2.7253e-01, 2.7796e-01,\n"," 5.5153e-02, -4.6048e-02, 5.2245e-02, -6.4954e-02, 5.1033e-02,\n"," -1.7863e-01, -3.0263e-02, 8.2884e-02, -1.4531e-01, -4.2625e-01,\n"," -2.2086e-01, -3.5510e-01, -1.2653e-01, 2.5765e-01, -1.8971e-01,\n"," 9.2276e-02, -9.6467e-02, -4.9548e-03, 2.2412e-01, -5.0690e-01,\n"," 2.4864e-01, -2.2044e-02, 3.0915e-01, -2.4427e-01, -1.7787e-02,\n"," 3.5503e-01, 1.9915e-01, 2.1399e-01, 5.5363e-02, 2.4421e-01,\n"," -2.1038e-01, -2.8581e-01, 1.5525e-01, 6.7437e-01, -8.7163e-02,\n"," 3.4415e-01, 1.1538e-01, -5.0896e-01, -1.9329e-01, -2.0803e-01,\n"," -3.9488e-01, 3.9691e-01, 2.3519e-01, 1.8214e-01, -5.9625e-02,\n"," 7.2775e-02, 1.8555e-01, -1.8159e-01, -9.4297e-02, -1.7628e-01,\n"," 1.1305e-01, 3.5077e-01, -9.5832e-01, -1.9586e-01, 1.6180e-01,\n"," -1.8532e-01, 2.9025e-01, -1.0661e-01, 2.7439e-01, 1.2160e-01,\n"," 4.0716e-01, 2.6388e-01, -3.3334e-01, -3.5688e-01, -6.5110e-01,\n"," -3.8443e-01, 4.1371e-02, 1.1555e-01, 1.5852e-01, 6.7273e-01,\n"," 1.2757e-01, 2.3980e-01, 2.5816e-01, 3.6018e-01, -9.3621e-02,\n"," -5.5332e-02, -3.3742e-01, 6.8712e-01, 3.3851e-01, 1.1030e-01,\n"," -3.0397e-01, -3.4389e-01, 4.9668e-01, 2.7737e-01, -1.1285e-01,\n"," 2.9047e-01, 4.6451e-01, 2.3961e-01, -1.0644e-01, -3.1535e-01,\n"," -2.8753e+00, 1.2570e-01, -1.4363e-01, -5.3969e-01, 3.2254e-01,\n"," -1.6085e-01, 1.6551e-01, -1.3439e-01, 2.8193e-01, -2.2328e-01,\n"," -2.9894e-01, -2.9440e-01, -1.6945e-01, 1.3558e-01, 3.4803e-01,\n"," 2.2705e-02, -1.1237e-01, -2.3758e-01, 2.3543e-01, -1.6686e-03,\n"," 2.2645e-01, -1.7749e-01, 2.3646e-01, 1.6193e-01, 1.4528e-02,\n"," 1.1855e+00, -2.2295e-01, -3.7911e-01, 5.9377e-02, 1.5180e-01,\n"," -6.9729e-01, 4.5486e-01, 3.8399e-01, -5.7034e-01, 2.7954e-01,\n"," -3.3679e-01, 5.4090e-01, -2.9870e-01, -2.8679e-01, -2.1322e-01,\n"," 3.4502e-01, -3.0060e-01, -1.1728e-01, 8.2267e-02, -4.3531e-01,\n"," -2.5983e-01, 8.0429e-02, -9.5820e-02, -2.6100e-02, -4.1050e-01,\n"," -2.8160e-01, -3.5589e-01, 2.9125e-01, 1.9323e-01, -6.9444e-02,\n"," 2.4017e-01, 1.1566e-01, -9.0855e-02, -1.6796e-01, -2.4410e-01,\n"," 3.1075e-03, 1.6406e-01, 5.3568e-01, 3.0617e-01, -3.7865e-01,\n"," 8.9007e-02, 6.2370e-01, -2.4044e-01, 2.7816e-01, -1.1648e-01,\n"," -1.9073e-01, -4.1636e-01, -1.7217e-01, -1.4691e-01, -4.4707e-02,\n"," 1.2607e-01, 1.0687e-01, -1.6795e-01, 2.8549e-01, -8.1567e-02,\n"," 2.2966e-02, -1.0773e-02, 6.8333e-01, -1.4289e-01, -3.8157e-01,\n"," -4.6643e-04, 3.9218e-01, -1.5765e-01, -2.1006e-01, -7.7416e-02,\n"," 5.8157e-02, -1.7123e-01, -1.0536e-02, -6.3703e-01, -1.8946e-01,\n"," -7.0629e-02, 6.3799e-01, 7.3411e-02, 7.6321e-03, -1.1048e-01,\n"," -4.2403e-04, 2.4042e-01, -2.0534e-01, -6.8047e-03, 2.4739e-01,\n"," -3.6708e-01, -2.3046e-01, -5.9086e-01, 6.1231e-01, -4.5672e-01,\n"," -6.9144e-02, 9.4099e-02, 1.3101e-01, 4.2869e-02, 8.9833e-02,\n"," 9.3597e-02, 2.4287e-01, 1.0510e-01, -3.2402e-01, -7.8580e-02,\n"," -9.3336e-02, -2.3416e-01, -2.1572e-01, -1.3606e-01, 3.0141e-01,\n"," 1.6233e-01, 5.3635e-02, 3.6113e-01, -2.1373e+00, -1.7802e-01,\n"," 1.6804e-01, -4.1662e-01, 2.4974e-01, 1.7530e-01, 2.5181e-01,\n"," -1.6483e-01, -4.9216e-01, -1.5408e-02, 2.3342e-01, -4.1294e-01,\n"," 1.1777e-01, 4.6779e-01, 1.5563e-01, -5.5175e-02, 8.9437e-02,\n"," -2.6464e-02, -1.5208e-01, 3.2119e-01, -1.3710e-01, -1.2571e-01,\n"," 4.2961e-01, -3.2064e-01, 2.4038e-01, 3.4685e-01, -3.6121e-01,\n"," 1.9097e-01, -6.5474e-01, -1.3526e-01, -4.0700e-01, -2.6067e-01,\n"," -2.9967e-01, 1.1696e-01, 2.4040e-01, 7.9220e-02, 1.7563e-01,\n"," 1.8969e-01, 7.8945e-01, -3.4330e-01, 2.2891e-01, 4.7126e-01,\n"," 2.0799e-01, -1.0846e-01, -8.2196e-02, -1.2078e-02, 2.1574e-01,\n"," -1.8101e-01, 1.6718e-01, 1.2133e-01, 8.6467e-02, -2.8239e-01,\n"," 4.4568e-01, -1.3886e-01, -9.8034e-02, -3.5696e-01, -2.6369e-02,\n"," 3.3485e-01, 6.1569e-02, -2.7746e-01, 3.8282e-01, -3.4746e-01,\n"," 3.3132e-01, 4.0558e-01, -8.4932e-01, 1.3011e-01, -2.5669e-01,\n"," 8.9835e-02, -6.1169e-02, -8.2988e-02, 2.5008e-02, -2.2889e-01,\n"," -3.2188e-01, -6.5140e-01, -4.6423e-01, 3.7389e-01, -4.3440e-01,\n"," -1.0283e-01, 3.0310e-01, -3.5769e-01, -3.5431e-02, -2.9953e-02,\n"," 3.3049e-01, 2.9214e-01, 3.9564e-02, 3.0288e-01, 2.2973e-01,\n"," -3.1753e-01, -2.1179e-01, -1.7666e-01, -2.3360e-01, 9.0684e-02,\n"," -7.1183e-02, 1.6260e-01, 1.2539e-01, 3.3810e-01, 4.4933e-01,\n"," -4.1263e-01, 2.8198e-01, -8.3511e-03, -4.0399e-01, -1.4334e-01,\n"," 1.3678e-01, 2.6962e-01, -2.1914e-01, 2.4740e-01, -1.0374e+00,\n"," -2.1050e-01, -1.8165e-01, -9.5031e-03, 3.5994e-01, -4.6178e-01,\n"," 8.9707e-02, 3.8382e-01, 2.7490e-01, -4.7722e-01, 9.0886e-02,\n"," 2.4036e-01, -2.9337e-02, 3.9708e-01, -4.2682e-02, 4.5617e-01,\n"," -1.8704e-02, 1.6249e-02, -1.1373e-01, 1.0254e-01, 1.6447e-01,\n"," -2.4245e-01, -7.6687e-01, -2.7627e-01, 3.3510e-01, 4.3963e-03,\n"," 1.1763e-01, -3.9918e-01, -2.2585e-01, 2.2497e-02, -3.5149e-04,\n"," 7.8753e-01, 1.2097e-01, 5.6824e-01, 5.1911e-01, 1.7139e-01,\n"," -7.4859e-01, 3.0708e-01, 2.5017e-01, 5.6966e-01, 1.0827e-02,\n"," 2.2144e-01, 9.3797e-02, 7.1426e-01, 3.6654e-01, -4.4194e-01,\n"," 6.2905e-01, -3.2575e-01, -1.9558e-01, -5.2749e-01, 7.3160e-02,\n"," -3.9767e-01, -3.0789e-01, -4.2381e-02, -1.6923e-01, 5.3304e-02,\n"," -1.1442e-01, 1.8313e-01, 1.5919e-01, 5.1756e-02, 9.3930e-03,\n"," 1.3479e-01, -3.0994e-01, 2.4348e-01, 6.2807e-01, -9.7308e-02,\n"," 2.2729e-01, 1.5578e-01, -8.2989e-02, 8.2472e-02, -7.8736e-02,\n"," -4.4337e-01, 4.4439e-01, 5.6996e-03, 1.3215e-01, -2.2840e-01,\n"," 1.0326e-01, -1.4564e-01, 4.6967e-01, 9.5750e-02, -5.7422e-01,\n"," -4.2140e-01, 1.5925e-01, 4.9169e-01, 3.0690e-01, -3.8212e-01,\n"," -3.0523e-01, -2.3643e-01, -4.9541e-01, -2.0289e-01, 3.7294e-01,\n"," -2.2985e-01, 1.6987e-01, 2.0271e-01, 3.7342e-01, 1.2005e-01,\n"," -3.4641e-01, 2.2480e-01, 5.5623e-02, -3.4759e-01, 1.0096e-01,\n"," 3.7587e-01, -1.9086e-01, -7.0977e-02, 5.7221e-02, -3.5829e-01,\n"," -3.7393e-01, -1.5656e-01, -1.2160e-02, 4.7843e-01, 2.2425e-01,\n"," 6.3370e-02, 2.2390e-01, -3.9537e-01, -3.2356e-01, -4.7447e-01,\n"," 1.1968e-01, -7.4429e-02, 5.1422e-02, -1.1373e-01, -6.4215e-01,\n"," -4.1410e-01, -5.6147e-02, -3.1049e-01, -1.5104e-01, 3.9813e-01,\n"," -7.3235e-03, -1.7187e-01, 4.9733e-01, -2.4671e-02, -7.0007e-01,\n"," 1.2217e-01, -3.5063e-01, -2.7913e-01, -6.0496e-02, -4.6921e-01,\n"," -7.2195e-01, -1.3520e-01, -1.3021e-01, 3.4127e-01, 4.0355e-01,\n"," 6.1885e-01, 3.6910e-01, 2.6426e-01, -2.2757e-01, -2.7376e-01,\n"," 1.0358e-02, -1.8039e-01, 9.1878e-02, -6.9110e-01, 5.8955e-02,\n"," 1.1730e-01, 1.7650e-01, -2.4481e-01, -1.4893e-01, 3.8230e-01,\n"," 2.5803e-01, -2.2878e-01, -6.2598e-02, 1.5761e-01, -1.9112e-01,\n"," 1.4289e-01, 5.3887e-01, 3.7659e-01, -2.9084e-01, -2.3839e-02,\n"," -1.7665e-01, 2.1653e-01, -2.3172e-01, -1.2722e-02, 2.5848e-01,\n"," -2.4478e-01, -3.2303e-01, 1.5413e-01, 8.2818e-01, 9.5071e-01,\n"," -1.8863e-01, -2.0519e-01, 1.9272e-01, -3.8249e-01, -3.1809e-01,\n"," 6.1254e-02, -1.1218e-01, -4.4748e-01, -1.5089e-02, -1.4681e-01,\n"," -1.1275e-03, 2.5294e-01, -1.1127e-01, 2.9581e-01, -3.4890e-01,\n"," 6.0162e-02, -7.5859e-01, -9.6313e-02, -9.0095e-02, 2.2760e-01,\n"," -1.7138e-01, -1.1211e-01, -1.4906e-02, 2.9721e-01, 3.3282e-01,\n"," -2.1966e-01, 1.0012e-01, -4.9586e-02, -4.2379e-01, 6.5179e-02,\n"," 1.9024e-02, 7.0318e-01, -7.8487e-01, -2.1047e-01, -1.9214e-01,\n"," -7.8466e-01, 1.1031e-01, 3.0623e-01, 4.6791e-02, -2.0817e-01,\n"," 5.3939e-01, 5.2954e-01, 1.5929e-01, 3.9891e-01, -4.7197e-01,\n"," -1.6678e-01, -2.7320e-01, 4.5760e-01, 3.1295e-01, 1.6734e-01,\n"," -4.8150e-01, 7.8517e-01, -6.9360e-02, -4.9245e-01, 6.0045e-01,\n"," 1.3644e-01, -3.6199e-01, -7.1827e-01, 1.0422e-01, 2.9102e-02,\n"," 3.0064e-01, 1.1905e-01, -1.9098e-01, -1.3113e-01, -2.5468e-02,\n"," -1.6509e-01, 8.9786e-02, 6.0397e-02, 2.3104e-02, 2.7286e-02,\n"," -1.6719e-01, 2.6000e-01, -1.6018e-01, -1.6848e-01, 6.5960e-02,\n"," -4.1964e-02, -3.0918e-01, -5.3669e-01, 1.3443e-01, 8.0290e-02,\n"," 1.1820e-02, -3.8390e-01, 8.4092e-02, 5.5495e-01, 7.8698e-02,\n"," -3.1199e-01, -4.6914e-01, -1.8557e-01, 4.7983e-01, 1.8608e-01,\n"," 3.1855e-02, -6.7170e-02, 2.2435e-01, -5.4959e-02, -9.3272e-02,\n"," 3.5537e-01, -6.8320e-01, -1.3271e-01, -1.4779e-01, -7.3625e-02,\n"," -2.2041e-01, 4.2471e-03, 8.7989e-01, -5.3090e-01, -3.2617e-01,\n"," -3.2060e-01, 4.6099e-01, -8.3208e-02, 3.2950e-01, -2.6549e-01,\n"," -1.3904e-01, -3.0038e-01, 2.0566e-01, 1.1365e-01, 1.3593e-01,\n"," 1.2688e-02, 3.2521e-01, 9.8237e-02, 7.9554e-01, -2.5483e-01,\n"," -3.0474e-01, -2.1962e-01, -3.2505e-01, 1.5375e-01, 3.9901e-01,\n"," 1.8572e-01, -5.0251e-01, -6.3008e-01, -2.5683e-01, 2.5506e-01,\n"," -1.0632e+00, 3.7451e-01, -1.0351e-01, -1.6880e-01, -1.2263e-01,\n"," -2.0244e-02, -1.7100e-01, -2.4317e-01, 6.8181e-02, -2.6405e-01,\n"," -7.0037e-02, -2.0590e-01, -4.4318e-01, -2.7295e-02, -4.1052e-01,\n"," 4.8845e-01, -7.4306e-02, 1.6744e-01, 2.7401e-02, 5.9199e-01,\n"," 1.0055e-01, -7.0027e-03, 3.2443e-01, -2.8446e-01, 2.3394e-02,\n"," -3.2145e-01, -2.7080e-01, -1.4532e-01, -4.5786e-02, 2.6260e-01,\n"," 4.6569e-04, -1.0496e-01, -2.0824e+00, -2.4360e-03, -2.9469e-02,\n"," -7.0094e-02, 2.2866e-01, -4.9524e-02, 4.4161e-01, -4.9409e-02,\n"," -1.2736e-01, -2.7610e-01, 8.5445e-02, -4.6898e-01, 2.2688e-01,\n"," 7.0861e-02, 2.2175e-02, 6.0069e-02]])\n","for: tensor([[ 3.6519e-01, 1.0656e-01, -2.5788e-01, -1.1395e-01, 3.8514e-02,\n"," -2.5673e-01, 7.2828e-01, 1.6136e-01, 1.0641e-01, -6.8962e-02,\n"," -3.3832e-01, -5.6425e-02, 2.8038e-02, 1.7367e-01, -3.7353e-01,\n"," -2.3877e-01, 2.1031e-02, 1.6640e-01, 3.9267e-02, 1.4630e-01,\n"," 1.8513e-01, -1.1814e-01, 8.2540e-02, 3.1770e-01, 1.0412e-01,\n"," 2.3884e-01, 3.3224e-02, 1.1345e-01, -2.8481e-01, 1.6128e-01,\n"," -1.4014e-02, 1.7987e-01, -3.4365e-01, 4.0348e-01, -5.8387e-02,\n"," -2.1054e-01, 1.8979e-02, -1.0201e-01, -4.8151e-01, -1.3845e-01,\n"," 7.5689e-03, -1.5583e-01, -7.8837e-02, -1.3735e-01, 8.6166e-02,\n"," -2.7760e-01, -2.2852e-01, 3.4097e-02, -1.8044e-01, 2.4532e-01,\n"," -4.8557e-01, 1.4537e-01, 1.8648e-02, 2.9198e-01, 3.7047e-01,\n"," 8.1790e-02, 2.9887e-02, 4.1441e-01, -4.1244e-02, 2.0605e-01,\n"," -7.3651e-02, 7.5918e-02, 4.1018e-02, -1.0406e-01, 2.1624e-01,\n"," 3.0375e-01, 1.8889e-01, -1.6464e-01, -3.8418e-01, 6.2612e-02,\n"," -9.6736e-02, -6.1291e-01, 2.4418e-01, 1.2958e-01, 8.7703e-03,\n"," 1.4023e-02, -3.1229e-02, 3.7283e-01, 3.3627e-01, -2.0974e-01,\n"," 8.0887e-01, 1.8188e-01, 1.1048e-01, 8.1556e-01, 4.2149e-01,\n"," -9.6087e-02, -1.7139e-01, 1.2729e-02, -1.7404e-02, 2.2588e-01,\n"," 1.7465e-02, 1.5507e-01, -3.4380e-01, 2.5403e-01, 1.3090e-01,\n"," -3.5029e-01, -1.0430e-01, 3.0087e-02, 8.9494e-02, -1.7340e-01,\n"," 2.0122e-02, -2.8836e-01, 4.3476e-03, 4.4799e-02, -3.7605e-01,\n"," 2.0878e-01, -3.6528e-01, -5.7122e-02, 5.4122e-01, -8.1005e-01,\n"," 3.7829e-01, 1.1443e-01, 1.1527e-01, 1.0501e-02, -5.3944e-02,\n"," 2.2237e-01, 4.2319e-02, 9.8745e-02, -6.5767e-02, 2.1785e-01,\n"," 2.0479e-02, 2.5624e-02, -2.5186e-01, 5.0065e-01, 1.9363e-01,\n"," -2.1992e-02, 5.4149e-02, -6.2643e-02, -4.1732e-02, -3.4757e-01,\n"," -2.8523e-01, 4.2048e-01, 4.3065e-01, 2.5324e-01, -1.3580e-01,\n"," -2.5042e-02, 1.3016e-01, -1.4770e-01, -6.3630e-01, 1.2680e-01,\n"," -2.9281e-02, 4.5699e-01, -8.6596e-01, -2.6417e-01, 2.5323e-01,\n"," 9.7572e-02, 4.0317e-01, 1.6710e-02, 4.7199e-01, -1.1424e-01,\n"," 4.8531e-01, 2.2866e-01, -2.0935e-01, -7.7009e-02, -7.6634e-01,\n"," -4.9519e-01, 1.2879e-01, 2.6164e-01, 3.5438e-01, 5.8952e-01,\n"," 1.6473e-01, 3.1640e-01, 1.9781e-01, 2.8997e-01, -6.5466e-01,\n"," -3.0289e-02, -1.7570e-01, 4.3328e-01, 1.8287e-01, 2.5295e-01,\n"," -1.3751e-01, -3.8970e-01, 1.8578e-02, 1.8702e-01, -3.5595e-01,\n"," 3.0978e-01, 2.3712e-01, 3.1776e-01, -2.4805e-01, -5.8513e-01,\n"," -2.9635e+00, 3.0753e-01, -1.3569e-02, -3.9485e-01, 3.1003e-01,\n"," -1.3356e-01, 6.5851e-03, -3.6724e-01, 2.9164e-01, 8.5792e-02,\n"," -6.4329e-01, -3.1554e-01, -2.7081e-01, 2.5006e-01, 1.7874e-01,\n"," -2.7753e-01, 2.9647e-01, -4.8359e-02, 2.7658e-01, 1.3184e-01,\n"," 1.3975e-01, 6.8639e-03, -6.0572e-02, 8.2333e-02, 3.5578e-02,\n"," 8.1111e-01, 2.5906e-01, -3.4215e-01, 2.3279e-01, 6.4781e-02,\n"," -3.2818e-01, 6.9784e-01, 3.5944e-01, -3.7872e-01, 2.2614e-01,\n"," -5.2060e-01, 1.5246e-01, -1.3791e-01, -1.9691e-01, -2.1418e-01,\n"," -3.3934e-01, -1.3780e-01, -3.4545e-01, 6.6958e-02, -5.2407e-02,\n"," -5.4510e-01, 1.4389e-01, -3.1280e-01, 1.4681e-02, -4.0058e-01,\n"," -5.5491e-01, -4.2336e-01, 2.0013e-01, 2.4405e-01, -1.6797e-01,\n"," 1.1373e-01, 7.3648e-02, -1.2745e-01, -6.2798e-02, -1.1360e-01,\n"," -1.8212e-01, -1.2931e-01, 4.5880e-01, 3.1102e-01, -3.2814e-01,\n"," 4.0518e-01, 3.8684e-01, 2.1470e-01, 3.8941e-01, -1.7441e-02,\n"," -1.5411e-01, -7.0021e-01, -7.6281e-02, -2.9781e-01, -1.1838e-02,\n"," -1.3387e-01, -2.9429e-01, -1.0300e-01, 6.1116e-02, 2.9341e-01,\n"," 5.3400e-02, 1.4414e-01, 3.2892e-01, -9.0667e-02, -4.2784e-02,\n"," 2.7710e-01, 1.6696e-01, -2.3040e-01, -3.2014e-01, 1.5793e-01,\n"," -1.1479e-01, 8.2232e-02, -1.9458e-01, -1.0549e+00, -1.4416e-01,\n"," -1.5096e-01, 5.3156e-01, 2.3735e-01, -7.7836e-02, -3.0464e-02,\n"," -1.0580e-01, 1.3388e-01, -4.3460e-01, 2.9287e-01, 6.2343e-01,\n"," -5.4242e-01, -2.4396e-01, -2.2858e-01, -7.3866e-02, -4.3572e-01,\n"," -4.4624e-01, 4.2833e-02, 2.6594e-01, -1.8203e-02, 6.8948e-02,\n"," 3.2192e-01, 1.4198e-01, 5.3341e-02, -3.2430e-01, -2.2226e-01,\n"," -1.0480e-01, -3.7309e-01, -3.1653e-01, 2.9789e-02, 1.4487e-01,\n"," 4.2620e-02, 1.3172e-01, 1.8273e-02, -1.5364e+00, 9.2694e-02,\n"," 4.4257e-01, -2.7587e-01, 1.4571e-01, -8.6481e-02, 1.2201e-01,\n"," 1.3566e-01, -4.7747e-01, -2.6753e-01, 1.1925e-01, -3.6952e-01,\n"," 2.9946e-01, 3.6295e-01, 1.3769e-01, 2.8086e-02, -6.0722e-02,\n"," -1.8563e-01, -3.1877e-01, -6.8700e-02, -1.1972e-02, -2.0828e-01,\n"," 1.3053e-01, -6.7061e-01, 1.8360e-01, 2.0364e-01, -3.3527e-01,\n"," -1.7123e-02, -2.5012e-01, -1.8993e-01, 5.3631e-02, -1.9577e-01,\n"," -6.8678e-02, -2.6243e-01, -8.1374e-02, 3.0018e-01, 7.3819e-02,\n"," 9.3044e-03, 9.2169e-01, -1.4804e-01, 3.1178e-01, 7.2051e-02,\n"," 8.9149e-02, -1.2175e-01, -1.9989e-01, 3.7729e-02, -2.5750e-01,\n"," -4.3346e-01, 2.6998e-02, 8.4695e-02, 2.5938e-01, 1.7742e-02,\n"," 1.6435e-01, -9.7592e-02, -3.5198e-01, 1.0930e-01, 3.6558e-02,\n"," -1.9214e-01, -1.8583e-01, -9.7273e-02, 1.9906e-01, -2.3112e-01,\n"," 2.7486e-01, 4.1083e-01, -9.2042e-01, 3.0643e-02, -3.5347e-01,\n"," 3.3816e-02, 4.3097e-03, 5.1163e-02, -1.1259e-01, 7.0246e-02,\n"," 2.1768e-02, -6.5651e-01, -3.1707e-01, 2.0856e-01, -3.7662e-01,\n"," -1.0091e-01, 4.6038e-01, -3.0326e-01, -4.2571e-02, -3.6233e-01,\n"," 6.8782e-02, 2.2306e-01, 3.7425e-01, 6.6926e-02, 2.3305e-01,\n"," -2.4042e-01, -9.7768e-02, -3.6154e-01, 9.2714e-02, 2.2161e-02,\n"," 8.8705e-02, 7.5522e-02, 1.1658e-01, 5.5608e-01, 3.1672e-01,\n"," -9.1138e-02, 1.9213e-01, -1.9995e-03, -3.8566e-01, -3.5989e-01,\n"," -3.6411e-02, -2.6983e-01, -2.1895e-01, 1.7849e-01, -6.7933e-01,\n"," -1.5777e-01, -2.2951e-01, 1.2903e-01, 5.9088e-01, -2.8727e-01,\n"," 4.2521e-01, 2.8445e-01, 4.7523e-02, -7.5480e-02, -4.0374e-01,\n"," 4.9152e-01, -1.1081e-01, 2.9188e-01, 2.0255e-01, 3.1791e-01,\n"," 1.2724e-01, -1.0364e-01, -4.8425e-02, 1.5882e-02, 4.4994e-02,\n"," -5.0474e-01, -3.7482e-01, 1.3623e-01, 3.0185e-01, 1.8696e-01,\n"," 1.1469e-01, -3.9789e-01, -3.4469e-01, 1.1184e-01, 8.6074e-02,\n"," 5.9066e-01, 2.2380e-01, 3.2374e-01, 2.9196e-01, 2.5079e-01,\n"," -3.9873e-01, 4.8570e-02, -6.5174e-02, 5.1552e-01, -1.8904e-01,\n"," -1.3304e-01, 1.6719e-01, 3.3849e-01, -1.9191e-01, -4.2374e-01,\n"," 4.7995e-01, 1.0142e-02, -1.1996e-01, -3.8190e-01, -2.8470e-02,\n"," -3.2841e-02, -2.1961e-01, 2.0173e-01, -4.8254e-01, 3.5086e-01,\n"," -2.2013e-01, 4.1053e-01, 3.2119e-01, -4.8461e-02, 1.6165e-01,\n"," -1.8698e-01, -4.5190e-01, 2.6981e-01, 4.9827e-01, 1.4668e-02,\n"," 5.7455e-02, 5.3393e-02, -2.6659e-01, -9.7603e-02, -3.0157e-02,\n"," -3.7031e-01, 2.0614e-01, 2.7931e-01, 2.6171e-01, -1.5726e-01,\n"," -1.6895e-01, -3.8270e-01, 2.9095e-01, -1.5063e-01, -3.1597e-01,\n"," -4.1941e-01, 1.5364e-01, 3.7982e-01, 1.7904e-01, -3.0141e-01,\n"," -7.9132e-02, -2.3010e-01, -3.8627e-01, -5.9050e-02, 2.3925e-01,\n"," 2.9116e-01, 4.1590e-01, 1.9572e-01, 8.1127e-02, -3.1452e-02,\n"," -3.7974e-01, 1.2356e-01, 8.7257e-02, -3.7673e-01, 5.1324e-02,\n"," 6.5172e-01, -6.0589e-01, -2.2164e-01, -3.3447e-01, -2.8369e-01,\n"," -1.8336e-01, -3.0014e-02, 2.2862e-01, 2.3357e-01, 5.1994e-01,\n"," 1.9060e-01, -4.7412e-02, -4.2313e-01, -3.0555e-01, -2.4023e-01,\n"," -1.3930e-02, -8.5105e-02, 1.2633e-01, -1.0169e-01, -6.5242e-01,\n"," -3.4619e-01, -2.1732e-01, 5.3907e-02, -2.2854e-02, 4.1234e-01,\n"," -3.1872e-01, -1.7276e-02, 5.0520e-01, 1.4765e-01, -4.1796e-01,\n"," 4.5569e-02, -2.6236e-01, -1.9788e-01, 2.9268e-02, -2.5381e-01,\n"," -2.4227e-01, 6.2079e-02, -1.3581e-01, 4.1968e-01, 8.7107e-02,\n"," 5.7111e-01, 5.0069e-01, 3.9599e-01, -3.7736e-01, -2.5942e-01,\n"," -1.7307e-02, -2.4966e-01, -1.4930e-01, -7.8862e-01, -1.6647e-01,\n"," 4.1364e-02, 2.6805e-01, 1.6453e-02, 6.2057e-02, 3.0947e-01,\n"," -1.3476e-01, 2.5649e-01, 1.6296e-01, 4.7472e-02, 2.5658e-01,\n"," 1.0148e-01, 2.6550e-01, 3.2704e-01, -5.1028e-01, 3.8861e-01,\n"," -5.0847e-01, 2.5258e-01, -4.5967e-01, 3.8716e-01, 2.1566e-01,\n"," -6.7986e-02, -5.6726e-01, -1.1231e-01, 9.8028e-01, 6.5386e-01,\n"," -3.9643e-01, -2.2979e-01, 3.1601e-01, -1.5094e-01, -7.9323e-02,\n"," 9.9569e-02, 1.2281e-02, -4.1510e-01, -8.0887e-02, 3.7065e-02,\n"," 1.6217e-01, 1.8377e-01, 1.2695e-01, 1.1740e-01, -5.3397e-01,\n"," 2.1603e-02, -6.5463e-01, -2.1120e-01, -1.0463e-01, 2.9678e-01,\n"," -2.3011e-03, -1.7182e-01, -8.5597e-02, 5.5427e-01, 5.1945e-01,\n"," 8.1996e-02, 5.5832e-02, 2.4795e-01, -2.5458e-01, -3.3720e-02,\n"," 2.0212e-01, 5.0204e-01, -5.6326e-01, -8.5291e-04, 5.4427e-02,\n"," -8.9544e-01, 1.8276e-01, 4.1929e-01, -3.4288e-01, -3.5754e-01,\n"," 5.3574e-01, 2.3270e-01, 3.1046e-01, 4.4428e-01, -5.8795e-01,\n"," -2.5070e-01, -1.1432e-01, 2.8797e-01, 4.0487e-03, 4.7647e-01,\n"," -5.1244e-04, 4.9659e-01, -1.3251e-02, -1.5406e-01, 3.2919e-01,\n"," 1.5197e-01, -1.9198e-01, 9.2756e-03, 7.1865e-02, 8.7542e-02,\n"," 1.9687e-01, 9.4016e-02, -1.4111e-01, 1.4835e-01, -7.6816e-02,\n"," 2.8767e-02, 5.4728e-01, 1.6774e-01, -1.2194e-02, -6.2981e-02,\n"," -1.9104e-01, -1.1574e-01, 8.8839e-02, -2.0101e-01, 1.3078e-01,\n"," 1.1588e-01, -2.3809e-01, -1.0467e+00, 2.0440e-01, 2.5475e-02,\n"," -1.8452e-01, -1.5395e-01, 2.5105e-01, 1.2067e-01, 6.8254e-02,\n"," -3.6884e-01, -3.6541e-01, -2.5395e-01, 4.5335e-01, 3.9766e-01,\n"," -3.5425e-02, -1.8366e-01, 1.8047e-01, 5.2589e-02, 9.7676e-02,\n"," 2.8832e-01, -2.2423e-01, -1.3030e-01, -1.4028e-01, -1.3612e-01,\n"," -1.0209e-01, -2.5463e-01, 5.4045e-01, -2.5001e-01, -1.9342e-01,\n"," -4.7998e-01, 5.1084e-01, -1.2469e-01, 1.1919e-01, -4.4601e-02,\n"," -1.1646e-01, -3.1653e-01, 2.8618e-01, 1.6297e-01, 4.7415e-01,\n"," -4.4529e-02, 4.4244e-01, 2.7974e-01, 5.9834e-01, -1.6170e-01,\n"," -3.1851e-01, -1.8591e-01, 7.2868e-03, 2.2947e-01, 2.7168e-01,\n"," 6.8102e-02, -6.6386e-01, -6.3833e-01, -9.2669e-02, 1.5607e-01,\n"," -9.2004e-01, 1.0946e-01, -1.5090e-02, -2.7022e-01, -1.5496e-01,\n"," 7.0321e-02, 1.8059e-01, -4.7405e-01, -3.2975e-02, 2.0005e-01,\n"," 1.3378e-01, 2.3497e-01, -8.2114e-02, -1.2301e-01, -5.5575e-01,\n"," 1.9949e-01, 1.0452e-01, 7.8139e-02, 2.7772e-01, 1.7568e-01,\n"," -2.1187e-01, 1.9526e-01, 3.5428e-01, -5.2464e-02, 3.9802e-02,\n"," -3.2246e-02, -3.7564e-01, -3.2054e-01, -3.5867e-01, 7.9196e-02,\n"," 3.6578e-01, -2.5958e-01, -2.8355e+00, -1.4579e-01, -1.8457e-01,\n"," -1.3048e-01, 6.4947e-02, -1.8145e-03, 1.7541e-01, -1.2897e-01,\n"," 6.4157e-02, 1.1969e-01, 7.3144e-02, -1.7806e-01, 1.8563e-01,\n"," -5.3453e-02, -2.7481e-01, -1.9925e-01]])\n","a: tensor([[ 2.9766e-02, -2.1431e-01, -1.2068e-01, -4.1195e-02, 2.5696e-02,\n"," -2.3825e-01, 2.7309e-01, -1.3659e-01, 2.0421e-01, -3.3920e-01,\n"," -2.3311e-01, -9.6260e-02, 7.7194e-03, 4.0456e-03, -3.6642e-02,\n"," -3.1823e-01, 1.1493e-01, 9.1760e-02, -7.5329e-02, 1.4700e-01,\n"," 1.9969e-01, 1.1724e-01, 1.0364e-01, 1.3756e-01, 2.3491e-01,\n"," 2.6981e-01, -2.2302e-01, 2.1402e-02, -1.3284e-01, 2.2310e-01,\n"," -8.2853e-02, -2.6862e-01, -2.3380e-01, 3.3130e-01, -2.4006e-01,\n"," -2.6743e-01, 1.5785e-01, -6.7930e-02, -6.6787e-01, -1.8224e-01,\n"," -1.5107e-01, -2.4947e-02, -6.7863e-02, -1.3507e-01, -9.4461e-02,\n"," 7.0511e-02, -4.8655e-01, 6.3150e-02, -2.4193e-01, 2.1166e-01,\n"," -2.2201e-01, 1.9255e-01, -7.4001e-02, 3.9651e-01, -3.0388e-02,\n"," 9.4766e-02, 2.8955e-01, 4.7689e-02, -3.5565e-02, -2.9259e-01,\n"," 3.9359e-01, 6.7486e-02, 1.4510e-02, 1.7061e-01, 3.5484e-01,\n"," 2.4037e-01, 2.1264e-01, -8.7173e-02, -4.0058e-01, 1.9068e-01,\n"," -1.3033e-01, -2.8333e-01, 3.7309e-01, -1.4915e-01, 2.3222e-01,\n"," -3.3257e-03, -6.3892e-02, 1.8903e-01, 1.4485e-01, -9.5187e-02,\n"," 2.9848e-01, 9.6510e-02, 1.8680e-01, 3.5581e-01, -6.5269e-02,\n"," 3.6280e-02, -5.7054e-02, -1.2931e-01, -3.2515e-01, 1.9937e-01,\n"," 1.0433e-02, 4.2378e-02, 1.8724e-01, 1.4107e-01, 7.2615e-02,\n"," -1.3930e-01, -1.3167e-01, 6.9057e-02, 2.5913e-02, -3.8142e-01,\n"," -4.7875e-01, -3.1730e-01, 2.3199e-02, 4.0319e-01, -2.7735e-01,\n"," 1.6501e-02, -4.8206e-02, 3.8175e-02, -5.5496e-02, -6.4563e-01,\n"," 1.0976e-01, 6.7288e-02, 1.7956e-01, -1.8032e-01, -1.5734e-01,\n"," 4.2853e-01, 1.2850e-01, 3.0403e-01, 2.3961e-01, 3.6994e-01,\n"," -2.5487e-02, -2.6360e-01, -1.8218e-02, 5.0424e-01, 1.2878e-01,\n"," 1.2209e-01, -9.8373e-02, -6.9990e-01, -1.6446e-01, -3.8234e-01,\n"," -2.9856e-01, 2.0223e-01, 3.2179e-01, 5.5917e-01, -1.9865e-01,\n"," -6.7176e-02, -1.4493e-01, -9.2787e-02, -1.9368e-01, -2.3885e-01,\n"," -1.8555e-01, 4.1853e-01, -9.0785e-01, -4.3142e-01, 2.1403e-01,\n"," -1.1205e-01, 2.7316e-01, 6.1555e-02, 2.3438e-01, 1.7055e-01,\n"," 1.7392e-01, 7.5553e-03, -2.7680e-01, -2.6915e-01, -4.4307e-01,\n"," -2.8903e-01, -6.9446e-02, 3.0993e-01, 1.4709e-01, 5.0718e-01,\n"," 3.1296e-01, 3.8201e-01, 2.8219e-01, 2.7311e-01, 1.4176e-01,\n"," -9.0831e-02, -3.4716e-01, 5.9876e-01, 1.8733e-01, -3.7262e-02,\n"," -3.1021e-01, -2.6346e-01, 4.1389e-01, 3.7745e-01, -4.2195e-01,\n"," 2.2076e-01, 3.8145e-01, 2.4493e-01, 5.0578e-02, -2.0899e-01,\n"," -2.8598e+00, 2.8092e-01, -1.0267e-01, -2.3313e-01, 1.2316e-01,\n"," -7.0178e-02, 4.3349e-02, -4.5919e-01, 3.8599e-01, -1.2311e-01,\n"," -3.0402e-01, -2.3688e-01, -3.0632e-01, 2.4516e-01, 1.6378e-01,\n"," -2.3376e-01, -3.7696e-02, -2.8564e-02, 2.6141e-01, -5.8938e-02,\n"," 2.8535e-01, -1.2145e-01, 3.6036e-01, 1.3323e-01, -5.3546e-02,\n"," 1.1835e+00, 7.8859e-02, -2.3259e-01, 1.1870e-01, 1.2241e-01,\n"," -7.7662e-01, 6.7191e-01, 2.4018e-01, -4.2675e-01, 2.6001e-01,\n"," -2.0730e-01, 3.7363e-01, -3.8873e-03, -4.2587e-01, 2.8298e-02,\n"," 2.1029e-01, -3.1187e-01, -2.6725e-01, 5.1085e-02, -3.0283e-01,\n"," -4.5526e-01, 9.4070e-02, 1.8893e-02, 1.1274e-01, -3.3635e-01,\n"," -1.0464e-01, -2.1086e-01, 1.3569e-01, 3.0474e-01, -3.3532e-01,\n"," 5.7644e-01, -2.6769e-02, 5.3349e-02, -3.6336e-02, -1.9290e-01,\n"," -6.5088e-02, 2.5356e-01, 4.4730e-01, 2.4178e-01, -1.7648e-01,\n"," 1.3588e-02, 5.1953e-01, 5.0137e-02, 4.5152e-01, -3.9874e-02,\n"," -9.0222e-02, -5.9322e-01, 8.6243e-02, -2.9029e-01, -1.1663e-01,\n"," -6.1298e-02, 3.6915e-02, -3.8637e-01, 3.7119e-01, -2.0113e-01,\n"," 1.9844e-01, 2.5708e-01, 6.8749e-01, -2.1933e-01, -2.4934e-02,\n"," 4.1762e-01, 3.0081e-01, -4.4853e-02, -1.0090e-01, -4.3189e-02,\n"," 4.4227e-02, -7.7119e-02, -1.3800e-01, -1.1937e+00, -2.8774e-01,\n"," -1.1892e-01, 5.7053e-01, 1.0110e-01, 7.8563e-02, 1.6622e-01,\n"," -4.0469e-02, 3.7188e-02, -5.6371e-02, 1.3350e-01, 2.3949e-01,\n"," -4.0340e-01, -1.4374e-01, -4.6271e-01, 3.6151e-01, -4.3518e-01,\n"," -9.9090e-02, 1.7223e-03, 1.0477e-01, -2.2188e-02, 2.5496e-01,\n"," -3.9257e-02, 1.1826e-01, 1.8422e-01, -3.7302e-01, -1.6050e-01,\n"," -1.4614e-01, -1.8468e-01, -2.5632e-01, -1.9597e-01, 7.8435e-02,\n"," 3.2813e-01, -6.9360e-02, 6.7640e-02, -2.1440e+00, -4.8955e-02,\n"," 1.0592e-01, -5.0961e-01, 8.5353e-02, 1.5945e-02, 2.2298e-02,\n"," 2.6123e-02, -1.2189e-01, -2.3861e-02, 2.1260e-01, -2.6022e-01,\n"," 2.7888e-01, 5.3912e-01, 4.4073e-02, 3.6532e-02, -2.5099e-01,\n"," 2.9836e-02, -4.0468e-01, 2.1820e-01, 4.0127e-02, 6.6862e-02,\n"," 5.2284e-01, -2.6807e-01, 3.0644e-01, 1.6742e-01, -4.1357e-01,\n"," 1.3514e-01, -3.7441e-01, -6.2654e-02, -2.3620e-01, -4.1957e-01,\n"," -2.3252e-01, 1.6526e-01, 2.6690e-01, 2.9751e-01, 3.4346e-01,\n"," 9.5559e-03, 1.0234e+00, -1.2318e-01, 2.9327e-01, 2.4730e-01,\n"," 2.6944e-01, 7.6334e-03, -1.1545e-01, 3.5644e-02, 1.6281e-01,\n"," -2.4262e-01, 1.3012e-01, 1.5275e-01, 1.8414e-01, -1.5155e-01,\n"," 1.5976e-01, -1.4456e-01, 7.2859e-02, -4.2068e-02, 8.3814e-02,\n"," 2.5087e-01, 1.2823e-01, -2.9072e-01, 2.7520e-01, -1.7717e-01,\n"," 1.1600e-01, 2.1279e-01, -7.7443e-01, -3.0287e-02, -3.9582e-02,\n"," 4.6240e-03, 1.3704e-01, -6.2964e-03, 2.6462e-02, -9.3106e-02,\n"," -2.9325e-02, -7.1687e-01, -3.6360e-01, 1.4785e-01, -4.1614e-01,\n"," 8.4419e-02, 2.6185e-01, -3.0443e-01, -7.1920e-03, -1.1489e-01,\n"," 3.2626e-01, 4.7172e-02, 1.6609e-01, 3.3192e-01, 4.3618e-01,\n"," -3.6911e-01, 2.6058e-02, -1.9710e-01, -2.3635e-02, 1.5993e-01,\n"," 9.5753e-03, 1.7974e-01, 6.3784e-02, 1.8942e-01, 3.0276e-01,\n"," -4.8881e-01, 3.0498e-01, -4.3379e-03, -3.0876e-01, -3.1591e-01,\n"," -7.6121e-02, 9.9806e-02, -2.7801e-01, 5.0308e-02, -7.8253e-01,\n"," -9.1268e-02, -1.7719e-01, -1.7522e-01, 4.5642e-01, -1.4507e-01,\n"," 2.4867e-01, 8.7616e-02, 2.1436e-01, -2.4374e-01, -3.1693e-01,\n"," 1.8767e-01, 1.2465e-02, 1.3447e-01, 6.8811e-03, 3.9533e-01,\n"," 1.6467e-01, 2.3174e-01, -1.5334e-01, 4.7454e-02, 8.2656e-02,\n"," -4.2981e-01, -5.0778e-01, -9.1735e-02, 2.2126e-01, -3.4921e-02,\n"," 8.9432e-02, -4.6558e-01, -3.0298e-01, 1.5568e-01, 7.7618e-02,\n"," 5.9864e-01, 7.6054e-02, 5.3246e-01, 3.4488e-01, 1.3232e-01,\n"," -5.0681e-01, 4.2888e-02, 9.2467e-02, 2.2621e-01, -6.8085e-02,\n"," 6.4268e-02, 2.9970e-01, 3.0785e-01, 4.3337e-01, -2.4177e-01,\n"," 4.5123e-01, -2.1042e-01, -1.5066e-01, -3.2193e-01, 2.3048e-01,\n"," -3.5644e-01, -4.2225e-01, 1.0407e-01, -1.0613e-01, 4.6739e-02,\n"," -1.2326e-01, 1.7536e-01, 1.8654e-01, 1.5260e-01, 7.4601e-02,\n"," 2.3776e-02, -3.3356e-01, 2.0624e-01, 4.8988e-01, -2.1484e-01,\n"," 2.6085e-01, 4.2907e-03, -1.5448e-01, 1.7374e-01, -1.5047e-01,\n"," -5.1552e-01, -8.5561e-02, -1.2645e-01, 3.6549e-01, -2.8782e-01,\n"," 2.6058e-01, -8.6930e-03, 4.0833e-01, -2.1392e-02, -5.6000e-01,\n"," -3.6655e-01, 7.8567e-02, 4.2874e-01, 1.5124e-01, -6.1984e-01,\n"," -2.3722e-01, -1.3388e-01, -1.2631e-01, 1.6851e-01, 1.8093e-01,\n"," -3.7693e-02, 1.2901e-01, 3.3172e-01, 3.8932e-01, -4.2852e-01,\n"," -3.6691e-01, 3.4901e-02, -1.1733e-01, -7.8055e-03, 2.5049e-02,\n"," 1.5528e-01, -4.6668e-01, -1.4601e-01, 2.7901e-01, -4.0122e-01,\n"," -2.9922e-01, -1.7071e-01, 7.2100e-02, 5.3903e-01, 4.9446e-02,\n"," 1.8611e-01, 3.9184e-01, -4.0948e-01, -3.1001e-01, -3.6400e-01,\n"," 2.2928e-01, 3.2897e-02, 3.5467e-01, 3.2829e-02, -3.3781e-01,\n"," -4.1136e-01, 2.3005e-01, -2.0464e-02, 1.0534e-01, 3.0562e-01,\n"," -2.3885e-01, -1.7667e-01, 5.7814e-01, 1.3691e-01, -6.8672e-01,\n"," -6.7866e-03, -2.8899e-01, -2.5964e-01, -1.1600e-01, -5.0626e-01,\n"," -5.1745e-01, 1.7356e-02, 3.7843e-02, -1.2863e-01, 1.6746e-01,\n"," 5.7741e-01, 4.5241e-01, 2.9691e-01, -2.3909e-01, -1.7628e-01,\n"," 1.2903e-01, -1.0709e-01, -2.2481e-03, -8.4869e-01, -1.6803e-01,\n"," -3.4063e-02, 1.8357e-01, 4.3764e-02, -8.7373e-02, 2.3552e-01,\n"," 1.5954e-01, -3.3332e-02, -1.4616e-01, -2.9096e-02, -5.0226e-03,\n"," 5.0387e-02, 1.9696e-01, 3.2610e-01, -4.1396e-01, 2.4605e-01,\n"," 2.1061e-02, 3.4157e-01, -2.6485e-01, -9.0696e-03, 3.6800e-01,\n"," -3.5547e-01, -3.6937e-01, 1.9005e-01, 1.0360e+00, 8.0834e-01,\n"," -3.2096e-02, -4.2683e-02, 3.5666e-01, -2.1886e-01, -1.6052e-01,\n"," 8.6015e-02, -2.7510e-01, -6.4987e-01, 1.9867e-01, -7.7649e-02,\n"," -9.8748e-02, 5.4568e-02, -6.5018e-02, 1.7677e-01, -3.4066e-01,\n"," 2.3156e-01, -4.6544e-01, -2.8610e-01, -6.3517e-02, 2.6259e-01,\n"," 3.8220e-02, -6.2086e-02, -1.8990e-01, 2.6974e-01, 1.8499e-01,\n"," -3.1193e-01, -3.8158e-02, -2.4080e-01, -7.0333e-01, 6.6644e-02,\n"," 2.3776e-01, 5.2434e-01, -6.0955e-01, 4.3559e-02, 9.2938e-02,\n"," -6.8132e-01, 1.2533e-02, 1.9932e-01, 2.4007e-01, -7.0889e-02,\n"," 4.7627e-01, 4.0209e-01, 3.2402e-01, 2.3604e-01, -4.1696e-01,\n"," -3.3027e-01, -5.6188e-01, 4.7798e-01, 3.1419e-01, 1.4704e-01,\n"," -1.7947e-02, 4.9978e-01, -6.2832e-02, -2.0645e-01, 3.8227e-01,\n"," 2.2955e-01, -2.0701e-01, -3.5132e-01, 1.0823e-01, -1.9474e-02,\n"," 2.7179e-01, -1.4539e-01, -1.2640e-01, -2.8944e-02, 3.2080e-01,\n"," -2.7629e-01, 1.0162e-01, 3.5193e-01, 5.4211e-02, -2.3018e-01,\n"," -1.3879e-01, 4.8029e-02, -2.7097e-01, 1.8626e-01, 1.1639e-01,\n"," 6.3488e-02, -3.1306e-01, -1.1445e+00, -5.0344e-02, 6.6426e-03,\n"," -1.9401e-02, -3.2551e-01, -2.6791e-02, 2.8124e-01, 1.2197e-01,\n"," -4.5488e-01, -2.3495e-01, -8.2608e-02, 4.2248e-01, 3.0545e-01,\n"," 1.7438e-01, 1.9974e-01, 2.1834e-01, 1.2738e-01, -3.3408e-01,\n"," 4.4410e-01, -5.4000e-01, -2.1152e-01, -2.6625e-01, -2.3229e-01,\n"," -1.8770e-01, -5.2992e-02, 5.4430e-01, -7.6142e-01, -3.5234e-02,\n"," 2.1700e-01, 5.3426e-02, 5.1466e-02, 2.0147e-01, -8.3735e-02,\n"," -2.0607e-02, -5.9429e-01, 9.8292e-02, 1.8464e-01, 2.9018e-01,\n"," -1.3225e-02, 4.4727e-01, 3.4694e-01, 5.9295e-01, -7.6907e-02,\n"," -3.3563e-01, -3.7522e-02, -2.0567e-01, 3.7593e-02, 6.3959e-01,\n"," -1.2071e-01, -1.5493e-01, -6.7366e-01, 1.3272e-01, 2.1831e-01,\n"," -1.2465e+00, 4.9006e-01, 9.5738e-02, -1.9943e-01, -1.3369e-01,\n"," 2.3173e-02, 1.0562e-01, -4.8005e-01, -1.2307e-01, -3.1234e-01,\n"," 1.4729e-01, -6.0690e-02, -4.1739e-01, 3.7265e-02, -6.0652e-01,\n"," 3.1934e-01, 2.4017e-02, 2.5446e-01, 1.0526e-01, 4.9362e-01,\n"," 2.9498e-02, 1.9185e-01, 2.8104e-01, -3.8310e-01, 2.9010e-02,\n"," -3.3226e-01, -2.2402e-01, -1.2438e-01, -2.9204e-01, 3.0043e-01,\n"," 4.3323e-05, -1.9795e-01, -2.6715e+00, 1.4403e-01, 8.2754e-02,\n"," -4.7888e-02, 1.5393e-01, -4.6024e-01, 5.4568e-01, 4.2123e-02,\n"," 1.5056e-01, -1.1054e-01, 8.1319e-02, -6.8825e-01, 2.3358e-01,\n"," 2.1198e-01, -6.2038e-03, -6.5695e-02]])\n","great: tensor([[ 5.7431e-01, 1.9164e-01, -1.8754e-01, -2.3192e-02, -3.3144e-02,\n"," -4.2427e-01, 9.2702e-02, 1.2195e-01, 2.5497e-02, -1.2869e-01,\n"," 1.7284e-02, -1.5694e-01, 3.2545e-01, 4.3922e-01, -4.2822e-01,\n"," 1.4612e-02, 4.8873e-02, 3.2975e-02, -3.6575e-01, 3.8539e-02,\n"," 1.8049e-01, -1.0000e-01, 3.1012e-02, 2.2641e-01, -5.6927e-03,\n"," 2.4046e-01, -8.3418e-02, -2.2131e-01, -3.5041e-01, 5.2985e-02,\n"," 3.0451e-01, -2.1979e-01, 9.4220e-02, 1.6996e-01, -4.1905e-01,\n"," -3.1938e-01, 2.9158e-02, 1.7880e-02, -3.2741e-01, -2.3340e-01,\n"," 1.3141e-01, -1.6498e-01, -3.6058e-02, 2.1404e-01, -2.2658e-03,\n"," 6.9935e-02, -4.2061e-01, 2.2666e-01, -9.6132e-02, 5.7861e-02,\n"," -2.1683e-01, 1.4229e-01, -8.5008e-02, 1.1579e-01, 1.6202e-01,\n"," -9.0846e-03, -2.5552e-02, -2.8070e-02, -5.0926e-02, -2.1818e-01,\n"," -5.1639e-02, -3.0378e-02, -2.8221e-02, 7.9349e-03, 1.7066e-01,\n"," -2.8445e-01, 4.4001e-01, -9.1570e-02, -5.8364e-01, 4.3963e-01,\n"," -2.0715e-01, -4.3801e-01, 1.5373e-01, -3.9465e-01, 1.8026e-01,\n"," -4.5150e-02, -4.0153e-02, 7.1727e-02, 3.1871e-01, -9.7930e-02,\n"," 3.3969e-01, -7.4856e-02, 1.6387e-02, 4.9908e-01, 8.9668e-03,\n"," 2.2782e-01, -1.8109e-01, -2.5170e-01, -3.0309e-01, 3.5944e-01,\n"," 1.5028e-01, -4.6065e-03, -7.6882e-02, 6.5206e-02, 4.0874e-01,\n"," -1.8183e-01, -9.2809e-02, -1.3372e-01, -4.2534e-01, -4.6305e-01,\n"," 6.0728e-02, -4.7746e-01, 2.4412e-02, 1.9432e-01, -8.1694e-02,\n"," -6.0316e-02, 2.5021e-02, -9.1332e-02, 3.0858e-01, -5.6897e-01,\n"," 4.9072e-01, 2.2801e-01, 1.8385e-02, -3.1081e-01, -1.5558e-01,\n"," 3.1241e-01, -8.3333e-02, -2.4148e-02, 1.8456e-01, 6.2148e-02,\n"," -1.0950e-01, -1.1845e-01, 2.0308e-01, 7.3062e-01, 3.2526e-01,\n"," 8.6733e-02, -1.0712e-01, -4.4861e-01, -3.8973e-01, -3.5150e-01,\n"," 9.1467e-02, 2.7658e-01, 1.3101e-01, 3.6151e-01, -3.8670e-01,\n"," 2.8540e-01, -2.3473e-02, -3.6605e-01, -1.4143e-01, -4.5606e-02,\n"," -2.3581e-02, 3.2713e-01, -8.6155e-01, -1.8926e-01, 2.5121e-01,\n"," -1.0632e-01, 9.9057e-02, -4.4983e-02, -2.3239e-01, 1.5129e-01,\n"," 2.8624e-01, 3.0741e-01, -4.3489e-01, -5.7859e-01, -4.0522e-02,\n"," 1.3654e-01, 2.0398e-02, -1.8005e-01, 3.1941e-01, 2.6968e-01,\n"," 3.0523e-01, 1.8358e-01, 9.0000e-02, 4.4626e-02, -1.3558e-01,\n"," 3.6986e-02, -1.0682e-01, 4.9628e-01, 7.2158e-02, 1.6356e-03,\n"," 1.1443e-01, -1.5441e-01, 5.6293e-01, 2.2562e-01, -3.0484e-01,\n"," 1.1625e-01, 4.0329e-01, 1.7270e-01, 7.0206e-02, -9.0116e-02,\n"," -2.8653e+00, -1.2126e-01, 2.0799e-02, 8.8935e-02, 2.1779e-01,\n"," -4.0015e-02, 2.3575e-01, -3.4777e-01, 1.5727e-01, -1.3969e-01,\n"," -2.3802e-01, -1.9463e-01, -1.9539e-01, 2.3705e-01, 4.5818e-01,\n"," -4.5655e-02, 3.4435e-01, 1.6476e-01, 2.2664e-01, 1.0557e-01,\n"," -9.7499e-02, 2.1227e-01, 2.7376e-01, 1.4411e-01, -1.3117e-01,\n"," 8.2428e-01, 1.2286e-01, 1.8777e-01, -1.0293e-01, 2.3548e-02,\n"," -5.0193e-01, 3.8760e-01, 3.8828e-01, -2.9236e-01, 1.8100e-01,\n"," -1.7188e-01, 1.3353e-01, -4.5083e-02, -6.3074e-01, 1.7914e-03,\n"," -3.2464e-02, -3.6643e-01, -2.8939e-01, 5.3406e-02, -1.8828e-01,\n"," -4.9695e-01, 2.6187e-01, 2.6533e-01, 1.1272e-01, -1.8282e-01,\n"," -2.5481e-02, -2.1176e-01, 1.3476e-01, 3.2038e-01, -2.4023e-01,\n"," 1.0125e-01, 8.6988e-02, 1.1939e-01, 9.6572e-02, 3.8972e-02,\n"," -2.8083e-01, 5.8705e-01, 4.6411e-01, 4.3399e-02, -4.4295e-01,\n"," -3.7753e-02, 2.7768e-01, 3.2619e-01, 3.3698e-01, -3.9053e-02,\n"," 9.4018e-03, -6.8362e-01, 1.8676e-01, -2.6045e-01, 1.2021e-01,\n"," -2.1374e-01, -9.2004e-03, -1.5286e-01, 2.5554e-01, -1.3878e-01,\n"," 2.5563e-01, 2.2613e-01, 5.3545e-01, -3.1555e-01, 5.7766e-02,\n"," 4.2045e-01, 1.6658e-01, 2.4343e-01, -8.1519e-02, 5.6482e-02,\n"," 1.3430e-01, 5.1277e-02, 6.7104e-02, -1.1842e+00, -1.5370e-02,\n"," -5.0485e-01, 3.3094e-01, -8.9898e-02, 2.7565e-01, -2.9571e-01,\n"," -2.7504e-01, 4.5342e-01, -5.5138e-01, -1.3167e-01, 5.5663e-01,\n"," -5.2889e-01, -4.5397e-01, -3.9627e-01, 6.8161e-02, -3.0560e-01,\n"," 1.7183e-02, 2.3824e-03, 1.4206e-01, -2.8351e-02, 1.9707e-01,\n"," 2.9646e-02, 2.6521e-01, 2.2559e-01, -5.6359e-01, -1.1023e-01,\n"," -1.8744e-01, 1.9778e-01, 1.3378e-01, -1.0447e-01, -5.2405e-02,\n"," -1.0277e-01, 7.7495e-02, 5.3203e-02, -2.2121e+00, -1.7573e-01,\n"," 4.0557e-01, -5.6109e-01, 1.9904e-01, -1.4364e-01, 4.1440e-02,\n"," 1.3987e-01, -2.4952e-01, 3.8494e-01, 3.2943e-01, -3.8643e-01,\n"," 9.3265e-03, 2.3910e-01, -2.4171e-02, -1.0735e-01, -1.7311e-01,\n"," -8.2682e-02, -4.2769e-01, 1.5029e-01, -6.5501e-02, -5.4652e-02,\n"," 1.1426e-01, -1.0944e-01, 4.0805e-01, 2.3541e-01, -1.1498e-01,\n"," -1.1809e-02, -2.4777e-01, 3.3199e-01, -1.4205e-01, -3.2046e-01,\n"," 5.5121e-03, 2.7528e-02, 3.6725e-01, -8.5507e-02, 2.1336e-01,\n"," 7.2632e-03, 5.7979e-01, -1.3616e-01, 3.9562e-01, 2.6197e-01,\n"," -1.6144e-01, -1.5096e-01, 1.7043e-01, -1.9630e-01, 1.3453e-01,\n"," -1.9526e-01, 2.4261e-01, 2.4373e-02, 3.1630e-01, -1.8591e-01,\n"," 2.0888e-01, -6.4573e-02, -1.7874e-01, -3.2162e-01, 3.3620e-02,\n"," 4.6140e-01, -9.1783e-02, -2.8741e-01, 5.9026e-02, -9.2735e-02,\n"," 3.0668e-01, 4.2574e-01, -3.6158e-01, 2.0720e-01, -4.9980e-02,\n"," -3.0522e-01, 5.9532e-02, 2.0595e-01, 1.4293e-01, -4.3341e-01,\n"," -1.1885e-01, -6.4778e-01, -3.0789e-01, 8.4337e-02, -2.2820e-01,\n"," 5.0785e-02, 3.8240e-01, -1.1523e-01, 4.2097e-02, -1.7712e-01,\n"," 6.6614e-02, 2.2788e-01, 5.7668e-02, -5.3699e-02, -8.3347e-02,\n"," -4.4803e-01, 6.3763e-02, -1.1602e-01, 1.3357e-01, -8.9365e-02,\n"," -3.0433e-02, 1.2558e-01, 2.6552e-01, -5.3965e-02, 4.0283e-01,\n"," -7.2930e-02, 3.5485e-01, 2.3743e-01, -4.0119e-01, -2.4222e-01,\n"," 3.6762e-01, 2.2012e-01, -3.4634e-02, 2.2409e-01, -2.8511e-01,\n"," 2.4974e-02, -3.1040e-02, -9.0689e-02, 1.5612e-01, -9.2719e-02,\n"," 3.5948e-01, 2.3376e-01, -2.5735e-02, -3.0555e-01, -1.4845e-01,\n"," 4.0779e-01, -1.7079e-01, 2.4068e-01, 7.4956e-02, 1.6568e-01,\n"," 1.6340e-02, 1.3469e-01, -3.9102e-03, 9.5507e-02, -1.6751e-01,\n"," -1.8609e-01, -3.2093e-01, -1.8883e-01, 1.4392e-01, -1.5248e-01,\n"," 1.1331e-02, -1.3478e-01, -1.9432e-01, 9.7377e-02, 8.1026e-02,\n"," 1.7335e-01, -5.4757e-02, 4.5375e-01, 1.2474e-01, 3.4753e-01,\n"," -6.7031e-01, 2.1889e-01, 1.0351e-01, 5.8670e-01, -1.5927e-01,\n"," 3.0422e-01, 2.2164e-01, 3.1795e-01, 9.5430e-02, -4.2168e-01,\n"," 1.0080e-01, -2.8956e-01, -2.2010e-02, -3.6277e-02, 1.7971e-01,\n"," 7.0761e-02, -2.3480e-01, 1.6407e-01, -1.3584e-01, -2.0928e-01,\n"," 8.0733e-02, 4.6044e-01, -2.1567e-01, -5.7440e-02, 2.6239e-02,\n"," 6.5092e-02, -6.5842e-01, 3.6330e-01, 7.6407e-01, 1.3089e-02,\n"," 1.1518e-01, 1.7128e-01, -1.3274e-01, 2.8134e-01, -2.0158e-01,\n"," -3.0554e-01, 8.8005e-03, 2.5132e-02, -2.7891e-03, -3.4370e-01,\n"," 2.3581e-01, -1.0477e-01, 2.6807e-01, 3.4876e-01, -4.0320e-01,\n"," -1.1308e-01, 4.4866e-01, 2.0838e-01, 3.6975e-02, -1.1645e-01,\n"," -3.3967e-01, -1.8935e-01, -3.1574e-01, -1.3743e-01, 4.3977e-01,\n"," -3.0309e-01, -8.0291e-02, 5.1819e-01, 3.6785e-01, -3.3126e-01,\n"," -2.4524e-01, 8.6269e-02, -9.1076e-03, -2.4349e-01, 2.2232e-01,\n"," 2.7943e-01, -2.4525e-01, -9.5140e-02, -1.1430e-01, -1.2572e-01,\n"," -1.6906e-01, -2.2465e-01, 6.8530e-03, -1.4381e-03, 3.3728e-01,\n"," 3.9097e-02, 2.8640e-01, -3.3373e-01, -7.4484e-01, -1.2005e-01,\n"," -5.3801e-02, -8.8926e-02, -1.3052e-01, -2.0915e-01, -2.5706e-01,\n"," -2.0149e-01, -9.2171e-02, 3.8527e-02, 7.4536e-02, 2.8930e-01,\n"," -2.2077e-01, 6.1283e-02, 5.1312e-01, 4.1752e-02, -1.6167e-01,\n"," 1.0927e-02, -2.5302e-01, -1.9158e-02, 1.2199e-01, -2.6958e-01,\n"," -2.3878e-01, 4.2105e-02, 2.9992e-01, -1.2303e-02, -1.5718e-02,\n"," 4.4536e-01, 2.6495e-01, -1.1164e-02, -2.0164e-01, 5.0153e-02,\n"," 8.6035e-02, -3.9584e-02, -3.3959e-02, -7.9913e-01, -2.4635e-01,\n"," 2.2661e-01, -2.1404e-01, 4.1934e-02, -2.2292e-01, 3.6141e-01,\n"," 2.4583e-01, -8.3686e-02, 9.4343e-02, 2.2001e-01, 1.2757e-01,\n"," -1.0038e-01, 2.9767e-01, 3.6522e-01, -9.7050e-03, 2.7892e-01,\n"," 1.2481e-02, 3.4263e-01, -2.7831e-01, -1.1027e-01, -3.8570e-02,\n"," -5.6544e-01, -3.0504e-01, -1.5216e-01, 8.4985e-01, 4.3030e-01,\n"," -2.1809e-01, -5.9455e-02, 2.3733e-01, -4.1538e-01, -1.6242e-01,\n"," -1.9986e-02, 8.5527e-03, -8.8672e-02, 2.6909e-01, -6.8136e-03,\n"," -2.5903e-01, 8.6179e-03, -2.3708e-01, 3.0168e-01, -2.9473e-01,\n"," 3.9822e-01, -5.3824e-01, -4.7912e-02, 8.1808e-02, -1.3850e-01,\n"," 1.1161e-01, -1.3888e-01, -2.2186e-01, 2.6530e-01, 5.7524e-02,\n"," 5.7438e-02, 3.1535e-02, 6.4863e-02, -3.4873e-01, -3.5817e-01,\n"," -6.5261e-02, 4.2077e-01, -6.1623e-01, 1.6331e-01, 5.3552e-02,\n"," -1.5918e-01, 1.6303e-01, 1.7387e-01, 3.4868e-01, -2.3876e-03,\n"," 3.0844e-01, 3.1976e-02, 3.8208e-01, 4.2314e-01, -6.6127e-01,\n"," -1.0415e-01, 3.0180e-01, 4.0130e-01, 1.6690e-01, 1.6062e-01,\n"," -3.5926e-01, 3.8968e-01, 1.9234e-01, -4.3776e-01, 4.1277e-01,\n"," 1.6816e-02, -7.9441e-02, -1.9234e-01, 1.2213e-01, 1.1859e-02,\n"," 1.8517e-01, 1.2426e-01, -2.8530e-01, 1.9071e-01, 5.3927e-02,\n"," -4.6412e-01, 8.3037e-02, 2.1964e-01, 4.4940e-02, -4.9034e-02,\n"," 1.4855e-01, -7.2764e-02, -1.5266e-02, 5.9917e-02, 2.3634e-01,\n"," 1.2532e-01, -2.2831e-01, -1.0537e+00, 4.6518e-03, -1.6454e-01,\n"," -2.9918e-01, -3.8454e-01, 4.1624e-01, 4.9712e-01, -1.7687e-01,\n"," -1.9686e-01, -3.8384e-01, 1.7435e-01, 7.8088e-02, 1.4085e-01,\n"," 1.1658e-01, -9.4390e-02, -2.6349e-01, 2.7308e-01, -9.3847e-02,\n"," 2.7568e-01, -4.9938e-01, -1.2556e-01, 6.0537e-02, -1.4510e-01,\n"," -2.5401e-01, -9.9282e-02, 4.0180e-01, -4.0304e-01, -4.1169e-02,\n"," -3.8168e-01, 1.2327e-01, -3.6538e-01, 7.6268e-02, -6.2857e-01,\n"," -5.3653e-01, -2.2818e-02, 2.8053e-01, -2.4774e-01, 3.7471e-01,\n"," -2.1917e-01, 8.6387e-02, 3.1972e-01, 6.0891e-01, -1.2907e-02,\n"," -2.7872e-01, -4.1747e-03, -7.6699e-02, 1.0740e-02, 6.6301e-01,\n"," -4.1128e-01, 5.9723e-02, -2.3514e-01, -2.0318e-01, 2.0505e-01,\n"," -1.2864e+00, 4.0370e-01, -8.2330e-02, -3.3015e-01, -1.4985e-01,\n"," -7.2439e-02, -2.0961e-03, -1.0174e-02, -8.6985e-02, 1.3807e-01,\n"," 3.0949e-02, 1.8596e-01, -1.7005e-01, 1.9757e-01, -1.5595e-01,\n"," 7.6485e-02, -9.6714e-02, 2.2981e-01, -1.3113e-01, 4.9814e-01,\n"," -2.8492e-01, 4.3790e-02, -6.4279e-02, -3.7948e-02, 2.8154e-01,\n"," -4.0435e-01, -8.3353e-02, -2.2007e-01, 4.9082e-02, -3.7620e-02,\n"," 7.0862e-02, -1.1645e-01, -2.6428e+00, -1.6954e-01, -2.5078e-02,\n"," -1.8765e-01, 6.9015e-02, 3.3812e-02, 3.8982e-01, 6.2746e-02,\n"," -2.8406e-01, -3.3360e-01, 1.5519e-01, -4.0652e-01, 1.1808e-01,\n"," 8.5500e-02, -3.2080e-02, -1.2888e-01]])\n","stay: tensor([[ 3.0475e-01, 5.6359e-02, -2.7604e-02, -1.9285e-01, -2.1676e-01,\n"," -1.9165e-01, 4.9000e-01, 3.3701e-02, 2.1618e-01, -3.0227e-01,\n"," -3.6060e-01, -2.6145e-02, 1.4889e-01, -1.4172e-02, -3.3832e-01,\n"," -3.0139e-01, 6.9056e-02, 1.4431e-01, 6.5665e-02, 2.9249e-01,\n"," 3.5601e-01, 4.5449e-02, 2.6788e-01, -1.9088e-02, 1.0037e-01,\n"," 8.2252e-02, -2.9565e-01, 3.3226e-02, -1.7069e-01, 4.3960e-03,\n"," -6.9311e-02, -7.9257e-02, -1.1397e-01, 2.7313e-01, 4.9994e-03,\n"," 2.2871e-03, 2.7392e-02, -2.6215e-02, -2.0427e-01, -1.0333e-01,\n"," -3.7773e-02, -1.1644e-01, -3.0812e-03, -9.1205e-02, 3.0341e-01,\n"," -1.7735e-01, -2.4123e-01, 1.2794e-01, -3.4580e-01, -3.9530e-02,\n"," -3.1661e-01, 1.7872e-01, 2.1222e-01, 8.8524e-02, -7.4009e-02,\n"," 6.1652e-02, 2.8390e-01, 1.4206e-02, -1.2332e-01, 3.2350e-02,\n"," -7.1194e-02, -1.2116e-01, 8.0719e-02, 2.1994e-01, -2.4487e-02,\n"," 1.4040e-01, 2.6718e-01, -2.1045e-01, -3.0027e-01, 1.9599e-01,\n"," -1.5498e-01, -2.0514e-01, 6.4325e-02, -1.0655e-02, 3.7282e-02,\n"," -1.1064e-02, -3.1705e-01, 2.5362e-01, 2.1484e-01, 1.2662e-01,\n"," 1.2297e-01, 2.5643e-01, -2.0218e-01, 2.6500e-01, 6.0977e-02,\n"," 9.7933e-02, -1.8008e-01, -1.7516e-01, -1.2079e-01, 2.8691e-01,\n"," -3.1124e-01, -9.1477e-03, 9.3638e-02, 3.1453e-01, 8.2289e-02,\n"," -2.6727e-01, -5.1779e-02, 1.3083e-01, 2.3484e-02, -5.0914e-02,\n"," -2.2548e-01, -4.2878e-01, 7.9015e-02, 2.0434e-01, -1.4057e-01,\n"," 5.6614e-02, -1.4631e-01, -3.7978e-02, 2.2757e-01, -8.2352e-01,\n"," 3.0375e-01, -7.4646e-02, 6.4290e-02, -4.0537e-02, -2.4714e-02,\n"," 3.4173e-01, 1.9767e-02, -4.1221e-02, 1.5369e-01, 2.3468e-02,\n"," -3.9274e-01, -1.9642e-01, 1.2407e-01, 6.5294e-01, 1.7042e-01,\n"," 1.0289e-01, 2.0161e-02, -3.0910e-01, -1.5574e-01, -4.2241e-01,\n"," -3.0305e-02, 2.7232e-01, 4.6674e-02, 1.5858e-01, -1.0775e-01,\n"," 2.4423e-01, 4.0037e-02, 1.7267e-02, -2.3609e-01, -1.4645e-01,\n"," 2.9392e-02, 4.7722e-01, -1.0637e+00, -1.5893e-01, 5.1442e-01,\n"," -8.7774e-02, 2.3794e-02, -2.2263e-01, 1.6081e-01, -9.1426e-02,\n"," 1.7117e-01, 3.9864e-02, -1.0562e-01, -2.1436e-01, -5.7568e-01,\n"," -2.7094e-01, -5.8258e-02, 1.7669e-01, 2.2346e-01, 3.7170e-01,\n"," 2.5159e-01, 1.5615e-01, 3.8084e-01, 8.7079e-02, -1.3320e-01,\n"," 8.0391e-02, -2.3105e-01, 4.3232e-01, 1.4812e-01, 1.6841e-01,\n"," -2.1615e-01, -1.8324e-01, 5.2329e-01, 3.6613e-01, -3.0057e-01,\n"," 1.4418e-01, 3.8161e-01, 1.0453e-01, -1.4400e-01, -2.4235e-01,\n"," -3.0416e+00, 1.0280e-01, -1.3614e-01, -1.1066e-01, -5.4239e-02,\n"," 5.6517e-02, 1.8853e-01, -4.2164e-01, 1.5300e-01, -9.4600e-02,\n"," -8.5063e-02, 5.8901e-02, -3.4300e-01, 1.8420e-01, 7.9056e-02,\n"," -4.5342e-01, -1.1119e-01, 1.6009e-01, 4.5335e-02, 2.4542e-01,\n"," 2.5844e-01, -1.8898e-01, 8.2355e-02, 2.3886e-01, -1.8211e-02,\n"," 1.0841e+00, 1.3233e-01, -2.5181e-01, 1.8900e-01, 7.4329e-02,\n"," -7.1764e-01, 6.9680e-01, 1.8978e-01, -2.5828e-01, 2.0615e-01,\n"," -2.7426e-01, 2.3385e-01, -3.2775e-01, -3.7878e-01, 6.8414e-02,\n"," 1.1855e-01, 2.1191e-02, -3.3991e-01, -6.5007e-02, -6.4582e-02,\n"," -7.2403e-01, 2.8704e-01, -3.6623e-02, 2.3528e-01, -1.8515e-01,\n"," -2.3418e-01, -3.4477e-01, 4.5353e-02, 2.8982e-01, -1.7479e-01,\n"," 2.3007e-01, -4.1198e-02, -1.6896e-01, -3.4254e-01, -1.2006e-01,\n"," 6.6093e-02, 1.2104e-01, 4.3662e-01, 1.9159e-01, -3.1039e-01,\n"," -2.8118e-02, 4.0240e-01, -2.9295e-03, 4.7831e-01, -2.1923e-01,\n"," 1.3698e-02, -7.4794e-01, 4.8226e-02, -2.2438e-01, -4.6369e-02,\n"," -1.2814e-01, 2.4358e-02, -1.4319e-01, 3.0668e-01, -3.7682e-03,\n"," 2.0698e-01, 2.2889e-01, 6.6213e-01, -1.0151e-01, -2.0170e-01,\n"," 1.9460e-01, -9.7419e-03, 1.3862e-02, 2.7271e-02, -2.5903e-01,\n"," -4.6063e-02, -1.1153e-01, 5.3272e-02, -1.2521e+00, -1.2284e-01,\n"," 1.0387e-01, 5.2780e-01, 1.8999e-01, 2.5649e-01, -1.1308e-01,\n"," 3.1139e-02, 3.9057e-02, -2.4997e-01, 2.2854e-01, 4.7960e-01,\n"," -4.8438e-01, -2.7522e-01, -3.5835e-01, 2.6246e-01, -3.6271e-01,\n"," -3.2132e-02, -1.4199e-01, 9.2578e-02, 1.4855e-01, 1.4184e-01,\n"," 8.7581e-02, 2.9523e-01, 3.2039e-01, -5.1024e-01, -4.3607e-02,\n"," -2.2706e-01, -1.0194e-01, 7.4125e-02, -1.9225e-02, 7.6662e-02,\n"," 1.0690e-01, 1.5526e-02, -2.2056e-02, -2.1569e+00, 2.6423e-03,\n"," 2.2293e-01, -5.2899e-01, 1.7528e-01, -2.3841e-01, 1.2040e-01,\n"," 9.1807e-02, -3.0850e-01, 6.4843e-03, 1.2229e-01, -4.0442e-01,\n"," 4.9645e-02, 2.2126e-01, 7.7104e-02, 2.2524e-01, 1.6828e-01,\n"," -1.2970e-01, -3.8679e-01, 8.6409e-02, -1.8827e-01, 3.9675e-02,\n"," 5.0894e-01, -2.3341e-01, 2.4244e-01, 4.6355e-01, -1.3526e-01,\n"," -2.5299e-03, -3.6449e-01, 1.2798e-03, -2.9312e-01, -2.1008e-01,\n"," -9.0415e-02, -2.9668e-01, 1.5017e-01, 1.4907e-01, 1.2379e-01,\n"," 1.4916e-01, 6.6189e-01, -1.4536e-01, 5.6482e-02, 4.2772e-01,\n"," 1.7789e-01, -3.8356e-03, 5.4739e-02, -2.9682e-02, 1.9472e-03,\n"," -1.1598e-01, 3.4623e-02, 3.4540e-01, 1.8058e-01, -1.7671e-02,\n"," 1.4040e-01, -1.7936e-01, -9.7992e-02, -2.9644e-02, 1.2041e-01,\n"," 2.7977e-01, 1.9510e-01, -3.7284e-02, 7.9157e-02, -8.2692e-02,\n"," 1.8387e-02, 2.2079e-01, -3.8164e-01, 5.2447e-02, -1.4043e-01,\n"," -1.3637e-01, -1.6422e-01, 2.5074e-01, -1.4910e-01, -1.0704e-01,\n"," -1.3807e-01, -8.1637e-01, -4.7416e-01, 4.2311e-01, -3.2444e-01,\n"," -1.6352e-01, 4.0114e-01, -1.1745e-01, 1.7039e-01, -5.1924e-02,\n"," 2.5855e-02, 3.5829e-01, 1.6106e-02, -3.7804e-03, 5.4749e-01,\n"," -2.5566e-01, 9.0195e-02, -2.2395e-01, -6.0129e-02, 5.1541e-02,\n"," 1.2938e-01, 5.2610e-03, 2.1147e-01, 2.2453e-01, 4.1287e-01,\n"," -5.2111e-01, 1.9885e-01, -9.0872e-02, -8.0238e-02, -1.7539e-01,\n"," 1.2844e-01, 1.1240e-01, 1.9449e-01, 1.5447e-01, -8.4929e-01,\n"," -1.0477e-02, -2.8405e-01, 8.3808e-02, 6.9209e-02, -2.5824e-02,\n"," 2.7522e-01, 6.9259e-02, 8.5380e-02, -5.2347e-01, 2.3403e-02,\n"," 4.5715e-01, -1.2982e-01, 3.3678e-01, 1.8335e-01, 1.6673e-01,\n"," 1.8511e-01, -1.5972e-01, -1.1676e-01, -5.7253e-02, -9.0901e-02,\n"," -3.8977e-01, -1.6337e-01, 8.0570e-03, -5.0973e-02, 9.1482e-02,\n"," 2.5262e-02, -2.1527e-01, -2.8551e-01, 2.9124e-02, 2.8063e-01,\n"," 3.1237e-01, -2.8631e-02, 2.7609e-01, 4.9781e-02, -1.0251e-01,\n"," -5.5993e-01, 9.1842e-02, 8.5154e-02, 2.0962e-01, -2.5965e-01,\n"," 1.4061e-02, 1.6528e-01, 4.2732e-01, 2.7239e-01, -2.4391e-01,\n"," 3.6977e-01, -1.8716e-01, -7.5479e-02, -3.2414e-01, 1.7257e-03,\n"," -1.4906e-02, -1.7603e-02, 3.5783e-03, -3.1280e-01, 1.4611e-01,\n"," -1.1202e-02, 3.4059e-01, -3.0817e-02, 9.7597e-02, 6.6521e-02,\n"," -4.0500e-02, -5.4269e-01, 4.3523e-02, 4.6876e-01, -1.0835e-01,\n"," 9.8870e-02, 2.0054e-01, -7.0503e-02, 1.3906e-01, -1.1977e-01,\n"," -4.3026e-01, 9.3799e-02, -1.2848e-01, 2.0636e-01, -2.6579e-01,\n"," -6.2737e-02, -2.6968e-01, 3.7450e-01, 1.2909e-01, -2.8626e-01,\n"," -4.4842e-01, 3.3727e-01, 3.5825e-01, 2.0317e-01, -9.9578e-02,\n"," -4.0766e-01, -2.2971e-01, -1.2175e-01, -1.2616e-03, 1.5693e-01,\n"," -1.1271e-01, 3.3961e-01, 3.9748e-01, 2.3247e-01, -1.9204e-01,\n"," -3.5251e-01, 1.9646e-01, 1.0590e-01, -1.4477e-01, 3.2860e-01,\n"," 2.7868e-01, -3.0529e-01, -1.2858e-01, 1.1552e-01, -2.6180e-01,\n"," -3.3588e-01, -9.6102e-02, 1.8334e-01, -1.0177e-02, 3.2758e-01,\n"," -5.0906e-02, 3.7359e-01, -4.0904e-01, -2.3900e-01, -4.5981e-01,\n"," 3.2548e-01, -4.1850e-02, 2.0103e-01, -2.8601e-02, -5.0207e-01,\n"," -8.7608e-02, 1.4866e-01, -3.6716e-03, -1.1642e-02, -2.8697e-02,\n"," -2.5781e-01, 6.0971e-02, 4.2931e-01, 1.4112e-01, -4.1469e-01,\n"," 1.3608e-02, -3.4631e-01, -3.5479e-01, -5.1323e-02, -2.9481e-01,\n"," -3.9405e-01, -1.3861e-01, -1.3201e-01, -9.7586e-02, -5.4282e-03,\n"," 7.0579e-01, 2.6976e-01, 7.7209e-02, -3.4969e-02, -1.2567e-01,\n"," 2.6743e-01, -1.2320e-01, -1.7116e-01, -4.3149e-01, -3.4478e-01,\n"," 3.4015e-01, 2.8494e-01, -9.1116e-02, 7.1580e-03, 2.5925e-01,\n"," 4.0651e-01, 1.2284e-02, 9.6674e-02, 2.3500e-01, 1.9087e-01,\n"," 4.3552e-02, 2.7167e-01, 1.6385e-01, -1.4139e-01, 4.1448e-02,\n"," -1.5096e-01, 6.0363e-02, -3.7279e-01, 2.3603e-01, 3.8137e-01,\n"," -3.6169e-01, -1.7727e-01, 1.1121e-01, 9.9978e-01, 5.3659e-01,\n"," -1.6146e-01, -2.5422e-01, 3.8176e-01, -1.7197e-01, 3.7783e-02,\n"," -5.0422e-03, -2.5781e-01, -2.6017e-01, 2.8333e-02, -1.1817e-01,\n"," 8.5938e-02, 3.1072e-01, 1.1897e-01, 4.0625e-01, -2.4935e-01,\n"," 3.0252e-01, -4.2784e-01, 1.3383e-01, 3.5118e-02, 2.7027e-01,\n"," 8.5636e-02, -2.5121e-02, -4.7674e-02, 3.2693e-01, 2.1119e-01,\n"," -3.4612e-02, 4.5811e-02, 8.0860e-02, -4.5689e-01, -2.0858e-02,\n"," 2.2850e-01, 4.3421e-01, -5.4575e-01, 1.0054e-01, 5.7462e-02,\n"," -5.5616e-01, 1.2933e-02, 5.0027e-01, -3.3040e-01, -2.0912e-01,\n"," 2.5622e-01, 2.9408e-01, 2.3228e-01, 1.2110e-01, -5.0553e-01,\n"," 1.4128e-01, 3.7934e-02, 1.9310e-01, 2.1726e-01, 2.3594e-01,\n"," -1.4181e-01, 3.1117e-01, 1.1840e-01, -8.3246e-02, 4.2331e-01,\n"," 9.7039e-03, -3.8864e-01, -5.9477e-02, 2.1229e-01, 2.6222e-02,\n"," 1.8022e-01, -1.3997e-01, -2.6306e-01, -6.5664e-02, -4.0136e-02,\n"," -2.4329e-01, 2.1671e-01, 1.7996e-01, 1.3994e-01, -1.9297e-01,\n"," -3.1866e-01, -6.6364e-02, -2.3854e-01, -1.5514e-01, 2.9210e-01,\n"," 1.5848e-01, -2.5062e-01, -1.1472e+00, -8.3809e-02, 2.5902e-02,\n"," -6.9011e-02, -2.3920e-01, 1.1408e-02, 3.3316e-01, 1.9724e-01,\n"," -3.9588e-01, -3.6583e-01, 1.2669e-01, 3.0471e-01, 4.9315e-01,\n"," 9.4052e-02, -7.2203e-02, 2.6788e-02, 2.0846e-01, -3.9496e-01,\n"," 3.4240e-01, -5.6771e-01, -2.2639e-01, 1.9587e-01, -3.5553e-02,\n"," -7.1803e-02, -1.4699e-01, 3.7449e-01, -4.8030e-01, -2.1795e-01,\n"," -1.4971e-01, 1.4112e-01, 1.3875e-01, 1.9227e-01, -3.9241e-01,\n"," 7.1393e-02, -1.9654e-01, 3.0894e-01, 1.8944e-01, 2.1121e-01,\n"," 8.6905e-02, 9.0270e-02, 2.9329e-01, 7.7499e-01, -3.7485e-01,\n"," -1.8080e-01, -6.5290e-02, 4.5483e-04, 1.8948e-01, 3.7782e-01,\n"," -2.8742e-01, 2.2605e-02, -1.2761e-01, -7.6839e-02, 8.6998e-02,\n"," -1.1674e+00, 2.7771e-01, 3.3645e-02, -1.8896e-01, -4.8772e-02,\n"," -1.5474e-01, 1.2255e-01, -3.8284e-01, -1.2495e-02, -8.7996e-02,\n"," 7.3786e-02, 2.7042e-01, -2.2702e-01, -3.3917e-01, -4.2124e-01,\n"," 3.5238e-01, -3.0693e-02, -9.3727e-03, 1.3420e-01, 1.1853e-01,\n"," 1.4671e-02, -1.6529e-01, 3.6840e-02, -5.6659e-02, 8.4581e-02,\n"," -3.1482e-01, -5.6638e-02, -2.6329e-01, 1.0451e-01, 9.8631e-02,\n"," -1.5277e-01, -1.0151e-01, -2.9521e+00, -6.9152e-02, 1.9580e-01,\n"," 5.0828e-02, -2.3532e-02, -1.7281e-01, 3.9476e-01, -7.4202e-02,\n"," -2.8629e-02, -2.3775e-01, 4.7718e-02, -2.5063e-01, 2.0703e-01,\n"," -3.6303e-02, -4.6855e-01, 7.0360e-02]])\n","!: tensor([[ 2.4765e-01, -1.1723e-01, -2.4293e-01, -2.1635e-01, -1.3210e-01,\n"," -4.7216e-01, 6.5651e-01, -6.5096e-02, 1.0135e-01, -4.0674e-01,\n"," -3.4119e-01, -1.4655e-02, -1.5393e-01, 4.0751e-01, -8.5867e-02,\n"," 1.1310e-01, -3.1916e-04, 2.7419e-01, 3.1587e-02, 1.9669e-01,\n"," -1.0966e-02, 5.8827e-02, 3.0703e-02, 1.2696e-01, 3.8363e-01,\n"," -1.4981e-02, 5.6733e-02, -2.1438e-01, -4.1531e-01, 2.5270e-01,\n"," 3.1749e-01, -4.7157e-02, -5.2512e-01, 2.5751e-01, -5.7355e-01,\n"," -7.5381e-02, 2.0764e-01, 2.0120e-01, -2.9802e-01, -1.6422e-01,\n"," -2.7132e-01, -3.6882e-01, -7.0249e-02, 1.9728e-01, -6.0711e-02,\n"," -3.6120e-01, -5.1592e-01, -1.2106e-01, -5.7533e-01, 9.4820e-02,\n"," -1.2780e-01, 2.2365e-01, 8.9924e-03, 9.7074e-02, 1.5584e-01,\n"," -6.9135e-02, -6.8671e-02, -3.1125e-01, -2.4214e-01, -2.4834e-01,\n"," 2.7670e-01, 2.0801e-01, 2.5183e-01, -9.3627e-02, -4.2075e-02,\n"," 5.7245e-02, 3.0578e-01, 2.8017e-01, -8.0165e-01, 2.2216e-01,\n"," 8.7582e-02, -3.3201e-01, 3.9753e-01, 9.5786e-02, 2.9013e-01,\n"," 3.8791e-02, -5.8178e-02, 4.5687e-01, -1.0820e-01, -2.5800e-01,\n"," 2.0343e-01, 1.6860e-01, 2.0440e-01, 5.3335e-01, -2.1836e-01,\n"," -7.4015e-02, 9.0435e-02, -3.1666e-01, -2.5572e-01, 7.2074e-01,\n"," 9.9923e-02, -2.0799e-01, 1.6978e-01, 2.8869e-03, 5.8025e-01,\n"," -2.5313e-01, 6.4398e-02, 9.2205e-02, -6.1625e-02, -2.2648e-01,\n"," 1.1488e-01, -6.1813e-01, 5.3098e-02, 1.7602e-01, -6.6378e-02,\n"," 1.1510e-01, 4.8208e-01, 8.9769e-02, 2.7152e-01, -5.2829e-01,\n"," 3.1391e-01, -4.6735e-02, 2.5152e-01, -4.0532e-01, -1.3971e-01,\n"," 2.9883e-01, 3.8366e-01, 3.8822e-01, 2.2172e-01, 5.6543e-02,\n"," 1.0712e-02, -8.0983e-02, 4.5322e-01, 7.1336e-01, 9.0812e-02,\n"," 3.7677e-02, -6.9215e-02, -9.3699e-02, -2.7645e-01, -2.4348e-01,\n"," -6.0003e-02, 5.8002e-01, 2.2850e-01, -4.6147e-02, -3.1597e-01,\n"," 2.2554e-01, 2.2759e-01, -2.6675e-01, -3.9104e-01, -4.2697e-02,\n"," 1.7684e-01, 3.0451e-01, -7.0662e-01, -1.8984e-01, 3.1687e-01,\n"," -2.2923e-01, -4.6172e-02, -2.9228e-01, 2.1640e-01, -1.0876e-01,\n"," -6.8914e-02, -3.1099e-01, -3.1099e-01, -7.4027e-01, -5.3075e-01,\n"," -3.4151e-01, -3.3379e-01, 1.4549e-01, 1.7762e-01, 6.1169e-01,\n"," 4.3505e-01, -5.4240e-02, 8.0837e-02, 1.5249e-01, -7.1513e-02,\n"," -3.3862e-01, -1.2946e-01, 1.0066e+00, -1.4760e-01, 1.7320e-01,\n"," -1.4561e-01, -2.9352e-01, 8.6265e-01, 7.8968e-01, 2.0555e-01,\n"," 1.1266e-01, 5.9774e-01, 1.0538e-01, -3.8357e-02, -3.8097e-01,\n"," -2.8720e+00, 6.1334e-02, 1.2403e-01, -1.9333e-01, 2.6971e-01,\n"," -2.8630e-02, 4.2030e-01, -3.9729e-01, 7.3100e-02, -3.1910e-01,\n"," -6.9828e-02, -2.2634e-01, -2.9546e-01, 1.0499e-01, 4.6602e-01,\n"," -1.8561e-01, -1.0024e-02, -3.1581e-01, 2.5540e-01, 2.3636e-01,\n"," 5.2436e-01, -3.2712e-02, 2.9063e-01, 7.3067e-03, 3.9730e-02,\n"," 1.0193e+00, 1.4844e-02, -7.4340e-02, 1.7033e-01, 9.4369e-02,\n"," -8.6039e-01, 7.7678e-01, 2.5785e-01, -3.8490e-01, -2.1316e-02,\n"," -4.1974e-02, 2.7585e-01, 2.0180e-02, -2.7692e-01, 9.6549e-02,\n"," 8.8455e-02, -3.3242e-01, -5.3826e-01, -9.8565e-02, -2.4928e-01,\n"," -3.6300e-01, 2.0111e-01, 8.8413e-02, 3.3173e-01, -5.0910e-01,\n"," -1.7245e-01, -3.2734e-01, 3.5102e-01, 4.2331e-01, -5.0536e-01,\n"," 8.5138e-02, -9.5035e-02, -1.1805e-01, -3.5665e-01, -3.0839e-01,\n"," -5.2741e-02, 7.1581e-01, 4.9237e-01, 2.5008e-01, -1.5259e-01,\n"," 1.6519e-01, 5.6711e-01, -1.6877e-03, 3.9424e-01, -1.8432e-01,\n"," -1.8111e-01, -6.2313e-01, 2.2869e-01, -6.5868e-01, 1.5652e-01,\n"," -9.7501e-02, 1.9716e-01, 1.5999e-01, 2.6841e-01, -2.6286e-01,\n"," 4.6072e-01, -1.1079e-02, 4.8828e-01, 7.6910e-02, -2.9342e-01,\n"," -5.0304e-02, 4.9156e-01, 2.2918e-01, -8.2844e-03, -9.6683e-02,\n"," 1.4288e-02, -1.4309e-02, 1.4039e-01, -4.8651e-01, -9.3715e-02,\n"," -3.7450e-01, 5.6247e-01, -8.1327e-02, 3.2218e-02, -1.3738e-01,\n"," -1.1800e-01, 2.9963e-01, -5.5857e-01, 3.4827e-03, 5.3874e-01,\n"," -4.5156e-01, -3.7872e-02, -4.5521e-01, 5.0643e-01, -2.6296e-02,\n"," -2.3988e-01, -3.1555e-02, -6.9799e-02, -2.3009e-01, 3.0719e-01,\n"," 4.8275e-03, 2.6006e-01, 4.9318e-01, -4.7361e-01, -1.0552e-01,\n"," 1.8031e-01, -1.7696e-01, -2.7578e-01, -2.5828e-01, -2.5864e-01,\n"," 2.8763e-02, -6.3738e-03, 1.2556e-01, -1.4778e+00, 1.5809e-01,\n"," 5.6360e-01, -5.3969e-01, 2.1972e-01, -1.0256e-01, 1.1245e-01,\n"," -1.2830e-01, -5.1237e-01, -2.4383e-01, 1.8523e-01, -5.7237e-01,\n"," 5.0537e-01, 5.4445e-01, 5.3121e-02, -3.5255e-01, 1.1667e-01,\n"," -4.4850e-01, -4.5355e-01, -5.5171e-02, -7.2067e-02, -2.2038e-01,\n"," 6.0413e-01, -6.8963e-03, 2.4243e-01, 1.4308e-01, -3.0295e-01,\n"," 1.3932e-02, -2.9025e-01, -2.6792e-01, -3.5926e-01, -5.2837e-01,\n"," -3.4889e-01, 2.1694e-01, 5.5567e-02, 1.1214e-01, 3.6070e-02,\n"," 1.0186e-01, 4.9937e-01, -4.1917e-01, 3.2880e-01, 2.0285e-01,\n"," 6.9234e-02, -1.1835e-01, 2.2557e-01, -3.1079e-01, -2.5085e-01,\n"," -1.8064e-01, 3.6065e-02, 1.5877e-01, 3.0306e-01, -4.9504e-01,\n"," 1.9805e-01, -2.5071e-01, -7.7159e-02, -2.6687e-01, 3.8725e-01,\n"," 4.3887e-01, 1.9059e-01, -3.7599e-01, 6.2170e-01, 3.3303e-02,\n"," -8.6096e-02, 4.1120e-01, -5.1740e-01, -1.4495e-01, -4.3125e-01,\n"," 1.3965e-01, -2.0175e-01, 9.3296e-03, 1.5754e-01, -4.3702e-02,\n"," -6.4056e-01, -6.3995e-01, -2.1357e-01, 4.1608e-01, -2.6353e-01,\n"," 9.5555e-02, 4.1969e-01, -1.5755e-01, -1.5832e-01, -3.2000e-01,\n"," 2.5349e-01, -4.7365e-02, -2.0910e-01, 1.1157e-01, 4.2027e-01,\n"," -4.5068e-01, -1.2019e-01, -3.2362e-01, -1.4408e-01, 1.3404e-01,\n"," -9.8479e-02, 1.1147e-01, -2.6287e-02, 2.1978e-01, 6.6917e-01,\n"," -5.8631e-01, 3.0597e-01, -3.4541e-02, -3.1144e-01, -3.2330e-02,\n"," 4.5913e-01, -1.5572e-01, -2.4202e-01, 1.1681e-01, -4.6817e-01,\n"," -5.8213e-02, -2.5967e-02, 1.0121e-01, 2.8433e-01, -2.7939e-01,\n"," 1.7377e-01, -5.4009e-02, 1.4878e-02, -1.1114e-01, 2.1159e-01,\n"," 8.0033e-03, -2.9802e-01, 1.4545e-01, 3.0560e-02, 1.4260e-01,\n"," -2.3344e-01, 5.5985e-02, 4.7803e-01, -1.4175e-01, 2.1685e-01,\n"," -3.6624e-01, -1.0799e-02, -2.3781e-01, 2.3829e-01, -3.7137e-01,\n"," -5.9305e-03, -2.6328e-02, -2.3738e-01, 1.7365e-01, -7.6041e-03,\n"," 5.1147e-01, -4.0053e-01, 6.5634e-01, 1.6980e-01, 3.6331e-01,\n"," -7.8556e-01, 3.9335e-01, 1.5107e-01, 5.6601e-01, -3.2165e-01,\n"," 2.6671e-01, 1.7235e-01, 6.1412e-01, -3.0673e-01, -5.2400e-01,\n"," 2.0473e-01, -3.5616e-01, -2.9019e-02, -3.6222e-01, 4.7045e-02,\n"," -3.4876e-01, -3.0287e-01, -3.3723e-01, -1.3332e-01, 1.2958e-01,\n"," -2.3090e-01, 2.7430e-01, 2.2121e-01, 7.2729e-02, 4.0355e-02,\n"," 1.2132e-01, -4.1609e-01, 2.1902e-01, 3.9454e-01, -3.3366e-01,\n"," 3.6393e-01, 3.0675e-02, 1.3271e-01, 1.9554e-01, -2.0074e-01,\n"," -3.9919e-01, 2.3147e-01, -4.6680e-02, 3.0573e-01, -2.3409e-02,\n"," -3.3541e-02, -2.0361e-02, 1.8162e-01, -1.6056e-01, -1.3642e-01,\n"," -2.5296e-01, 2.6629e-01, 3.1777e-01, 1.9849e-01, -1.5943e-01,\n"," -5.2253e-01, -5.7177e-01, -2.6499e-01, -4.8492e-03, 2.5690e-01,\n"," -1.4764e-01, 6.6715e-02, 5.0746e-01, 5.6703e-01, -3.5170e-01,\n"," -2.8620e-01, 1.4681e-01, -6.6729e-02, -8.3342e-02, 1.4725e-01,\n"," 1.7021e-01, -1.1823e-01, -7.9831e-02, 4.6950e-02, -3.7097e-01,\n"," -5.6263e-01, -2.2626e-01, -1.6523e-01, -1.2022e-01, 1.3029e-01,\n"," 6.5303e-02, 3.8104e-01, -6.0062e-01, -7.0631e-01, -2.5899e-01,\n"," 9.4268e-02, 4.8327e-01, -2.5539e-01, -3.5539e-01, -6.8635e-01,\n"," -2.4004e-01, 7.4311e-02, -2.8185e-01, 2.3663e-01, 2.5141e-01,\n"," -4.4993e-02, -2.1460e-02, 1.7046e-01, 1.6410e-01, -4.9287e-01,\n"," -2.8358e-02, -4.1604e-01, 1.4980e-01, 1.7722e-02, -5.5980e-01,\n"," -2.6359e-01, 1.4530e-01, -1.6204e-02, -8.3184e-02, 1.8872e-01,\n"," 3.8833e-01, 4.0844e-01, 9.0047e-02, -2.0215e-01, 2.1359e-01,\n"," -1.4876e-02, 2.1712e-01, 3.2910e-02, -1.2180e+00, -1.0210e-01,\n"," 1.3111e-01, 3.2296e-01, -1.9728e-01, -4.1255e-01, 1.2833e-01,\n"," 1.8172e-01, -7.8031e-02, 3.0701e-01, 2.0108e-01, 1.7847e-01,\n"," 6.1376e-02, 4.6001e-01, 7.4010e-01, -4.2488e-01, 2.0210e-01,\n"," -2.1802e-01, 3.9862e-01, -1.3940e-01, 4.8126e-02, 1.5087e-01,\n"," -5.8276e-01, -2.2824e-01, 2.0540e-01, 6.2027e-01, 6.9434e-01,\n"," -7.3159e-02, -1.3641e-01, 2.1514e-01, -1.9128e-01, 5.0071e-02,\n"," -4.1375e-02, 8.2217e-02, -1.7921e-01, -9.3085e-02, 8.4690e-02,\n"," -7.9604e-02, -7.0880e-03, -7.0792e-02, 2.7408e-02, -4.6207e-01,\n"," 6.7466e-02, -7.5703e-01, -1.5563e-01, -1.2149e-01, -1.0664e-02,\n"," 3.2026e-03, -8.7811e-02, -1.8524e-01, 1.2954e-01, 1.7395e-01,\n"," 2.7243e-01, 2.9678e-02, 3.0159e-01, -4.9886e-01, -1.4746e-01,\n"," 1.9762e-01, 2.8110e-01, -4.2347e-01, 8.3471e-02, -2.9546e-02,\n"," -6.1113e-01, 1.9005e-01, 4.5182e-01, 7.7572e-02, -4.2409e-02,\n"," 4.2516e-01, 6.4087e-01, 4.1942e-01, 5.1882e-01, -4.0334e-01,\n"," -2.2627e-01, -5.1663e-03, 4.5979e-01, 2.3518e-01, 7.2026e-02,\n"," -2.3063e-01, 8.0060e-01, -1.6929e-01, -7.2189e-01, 2.0956e-01,\n"," 4.5984e-01, -3.1200e-01, -3.5127e-01, -1.0583e-01, 6.2176e-02,\n"," 4.3347e-01, -6.0807e-02, 2.6328e-01, 1.4977e-01, 2.3745e-01,\n"," -3.5506e-01, 2.6664e-01, 2.0116e-01, 2.6193e-01, 6.6278e-02,\n"," -9.0874e-02, 1.2584e-01, 4.5560e-01, -6.8599e-02, 1.1144e-02,\n"," -8.8994e-02, -4.9053e-02, -6.4847e-01, 5.2192e-02, 7.3579e-02,\n"," -7.1107e-02, -3.5480e-01, 1.6951e-01, 5.6647e-01, -1.1146e-01,\n"," -3.7864e-01, -5.1839e-01, 5.4160e-02, 4.2911e-01, 8.4946e-02,\n"," -2.1057e-01, -9.4043e-02, 3.1234e-02, 1.8734e-01, -3.4632e-01,\n"," 2.8265e-02, -7.1655e-01, -1.7668e-01, -8.4936e-02, 1.6303e-01,\n"," -3.4236e-01, 4.7172e-02, 7.5039e-01, -6.3567e-01, -2.1190e-01,\n"," -1.1310e-01, -3.5336e-02, -1.2725e-01, 6.0579e-02, -3.0158e-01,\n"," -4.5995e-01, -1.3900e-01, 1.4207e-01, -1.9504e-01, 4.2449e-01,\n"," 2.0711e-01, 3.6467e-01, 4.5925e-01, 9.2332e-01, -2.4856e-01,\n"," -1.2232e-01, 1.9377e-01, -2.6700e-01, -8.4578e-02, 4.2214e-01,\n"," 4.3623e-02, 3.9745e-02, -5.4367e-01, -9.0686e-02, 4.6217e-01,\n"," -1.3321e+00, 3.1523e-01, -1.3472e-01, -3.2936e-01, 1.2022e-01,\n"," 3.1897e-02, -2.8501e-01, -3.1467e-01, -2.0373e-02, -2.6907e-01,\n"," -1.1179e-01, 3.1092e-02, -6.1231e-01, 9.8211e-02, -4.2775e-01,\n"," 4.1630e-01, -2.2150e-01, 3.6637e-01, 8.5311e-02, 7.2983e-01,\n"," 2.5472e-01, 1.4389e-01, 1.4172e-02, -1.3177e-01, -3.2431e-01,\n"," -3.6021e-01, -1.5830e-01, -2.1587e-01, 5.3296e-01, 3.5912e-01,\n"," -1.8317e-02, -1.0013e-01, -2.1327e+00, -1.6673e-01, 1.6375e-01,\n"," -1.2370e-01, 4.0220e-01, -1.5303e-01, 1.0278e-01, -3.8122e-02,\n"," -4.7337e-02, -4.4114e-01, -1.8625e-01, -8.6430e-01, 3.6100e-01,\n"," 2.4993e-01, -3.5293e-02, -1.2965e-01]])\n"]}]}]} \ No newline at end of file